FFMPEG播放彩色视频

This commit is contained in:
Sch 2023-08-01 02:45:29 +08:00
parent e6386ab601
commit 5fabd46016
8 changed files with 224 additions and 121 deletions

View File

@ -0,0 +1,95 @@
#include "FFMPEGUtils.h"
FString FFFMPEGUtils::LoadMedia(const FString& Path, FTimelinePropertyData* PropertyData)
{
AVFormatContext* FormatContext = nullptr;
if (avformat_open_input(&FormatContext, TCHAR_TO_UTF8(*Path), nullptr, nullptr) != 0)
{
check(false)
}
int32 VideoStream = -1;
int32 AudioStream = -1;
for (unsigned int i = 0; i < FormatContext->nb_streams; i++) {
if (FormatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
VideoStream = i;
} else if (FormatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
AudioStream = i;
}
}
if (VideoStream != -1)
{
AVCodecContext* VideoCodecContext = avcodec_alloc_context3(nullptr);
avcodec_parameters_to_context(VideoCodecContext, FormatContext->streams[VideoStream]->codecpar);
AVCodec* VideoCodec = avcodec_find_decoder(VideoCodecContext->codec_id);
VideoCodecContext->time_base = AVRational({1, 30});
VideoCodecContext->gop_size = 1;
if (avcodec_open2(VideoCodecContext, VideoCodec, nullptr) < 0)
{
check(false)
}
PropertyData->VideoCodec = VideoCodec;
PropertyData->VideoCodecContext = VideoCodecContext;
}
if (AudioStream != -1)
{
AVCodecContext* AudioCodecContext = avcodec_alloc_context3(nullptr);
avcodec_parameters_to_context(AudioCodecContext, FormatContext->streams[AudioStream]->codecpar);
AVCodec* AudioCodec = avcodec_find_decoder(AudioCodecContext->codec_id);
if (avcodec_open2(AudioCodecContext, AudioCodec, nullptr) < 0)
{
check(false)
}
PropertyData->AudioCodecContext = AudioCodecContext;
PropertyData->AudioCodec = AudioCodec;
}
PropertyData->VideoStream = VideoStream;
PropertyData->AudioStream = AudioStream;
PropertyData->Context = FormatContext;
if (VideoStream != -1)
{
PropertyData->Type = ETrackType::VideoTrack;
}
else if (AudioStream != -1)
{
PropertyData->Type = ETrackType::AudioTrack;
}
PropertyData->Name = FPaths::GetBaseFilename(Path);
PropertyData->MoviePath = Path;
PropertyData->MovieFrameLength = FormatContext->duration / AV_TIME_BASE * 30;
return {};
}
FString FFFMPEGUtils::ConvertMediaGoPto1(const FString& Path)
{
AVFormatContext* FormatContext = nullptr;
if (avformat_open_input(&FormatContext, TCHAR_TO_UTF8(*Path), nullptr, nullptr) != 0)
{
check(false)
}
int32 VideoStream = -1;
int32 AudioStream = -1;
for (unsigned int i = 0; i < FormatContext->nb_streams; i++) {
if (FormatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
VideoStream = i;
} else if (FormatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
AudioStream = i;
}
}
return {};
}

View File

@ -0,0 +1,16 @@
#pragma once
#include "Cut5/Widgets/DefineGlobal.h"
extern "C"{
#include <libavformat/avformat.h>
}
struct FFFMPEGUtils
{
/**
* @brief
* @return Return Paths
*/
static FString LoadMedia(const FString& Path, FTimelinePropertyData* PropertyData);
static FString ConvertMediaGoPto1(const FString& Path);
};

View File

@ -183,8 +183,12 @@ struct CUT5_API FTimelinePropertyData
FGuid Guid = FGuid::NewGuid();
AVFormatContext* Context = nullptr;
AVCodecContext* CodecContext = nullptr;
AVCodec* Codec = nullptr;
AVCodecContext* VideoCodecContext = nullptr;
AVCodecContext* AudioCodecContext = nullptr;
AVCodec* VideoCodec = nullptr;
AVCodec* AudioCodec = nullptr;
int32 VideoStream = -1;
int32 AudioStream = -1;
// Movie Data
FString MoviePath = "";

View File

@ -74,6 +74,7 @@ void DragDropOperator::OnDrop(const FGeometry& MyGeometry, const FDragDropEvent&
NewClipData.ClipStartFrame = MyGeometry.AbsoluteToLocal(DragDropEvent.GetScreenSpacePosition()).X / FGlobalData::DefaultTimeTickSpace;
NewClipData.ClipColors.Add(FLinearColor(1, 1, 1, 1));
NewClipData.ResourcePropertyGuid = ClipDragOperation.TimelinePropertyData->Guid;
NewClipData.ResourcePropertyDataPtr = ClipDragOperation.TimelinePropertyData;
if (ClipDragOperation.TimelinePropertyData->Type == ETrackType::VideoTrack)
{
// 如果拖拽物是视频,那么对不同轨道进行不同和操作

View File

@ -17,6 +17,7 @@
#include "RuntimeAudioImporterLibrary.h"
#include "Cut5/Interface/SoundInterface.h"
#include "Cut5/Utils/FFMPEGUtils.h"
#include "Engine/Engine.h"
#include "Cut5/Utils/OpencvUtils.h"
@ -151,95 +152,20 @@ void SCustomInputPanel::Construct(const FArguments& InArgs)
{
if (FPaths::GetExtension(OpenFileName[i]) == "mp3")
{
AVFormatContext* FormatContext = nullptr;
if (avformat_open_input(&FormatContext, TCHAR_TO_UTF8(*OpenFileName[i]), nullptr, nullptr) != 0)
{
check(false)
}
if (avformat_find_stream_info(FormatContext, nullptr) < 0)
{
check(false)
}
const int64 Duration = FormatContext->duration / AV_TIME_BASE;
UE_LOG(LogTemp, Warning, TEXT("Duration: %lld"), Duration);
FTimelinePropertyData PropertyData;
PropertyData.Name = OpenFileName[i];
PropertyData.Type = ETrackType::AudioTrack;
PropertyData.MoviePath = OpenFileName[i];
PropertyData.MovieFrameLength = Duration * 30;
PropertyData.Context = FormatContext;
const AVCodecParameters* CodecParam = FormatContext->streams[0]->codecpar;;
AVCodec* Codec = avcodec_find_decoder(CodecParam->codec_id);
AVCodecContext* CodecContext = avcodec_alloc_context3(Codec);
avcodec_open2(CodecContext, Codec, nullptr);
// void* Stream = nullptr;
// Pa_Initialize();
// Pa_OpenDefaultStream(&Stream, 0, 2, paFloat32, 44100, 0, nullptr, nullptr);
// Pa_StartStream(Stream);
TArray<uint8> DataResult;
AVPacket Packet = *av_packet_alloc();
AVFrame* Frame = av_frame_alloc();
while (1)
{
if (av_read_frame(FormatContext, &Packet) < 0)
{
if (av_read_frame(FormatContext, &Packet) < 0)
{
break;
}
}
avcodec_send_packet(CodecContext, &Packet);
if (avcodec_receive_frame(CodecContext, Frame) >= 0)
{
const uint8* Result = FUtils::ConvertTwoChannelSound2PortAudioSound(Frame->data[0], Frame->data[1], Frame->nb_samples);
if (Result != nullptr)
{
DataResult.Append(Result, Frame->nb_samples * 4 * 2);
// Pa_WriteStream(Stream, Result, Frame->nb_samples);
}
delete[] Result;
}
}
PropertyData.AudioData = DataResult;
PropertyData.CodecContext = CodecContext;
PropertyData.Codec = Codec;
FTimelinePropertyData Data;
FFFMPEGUtils::LoadMedia(OpenFileName[i], &Data);
return FReply::Handled();
}
else
{
FTimelinePropertyData Data;
FFFMPEGUtils::LoadMedia(OpenFileName[i], &Data);
GridPanel->AddSlot(GridPanel->GetChildren()->Num() % 3, GridPanel->GetChildren()->Num() / 3)
[
SNew(SCustomInputResource)
.PropertyData(PropertyData)
.PropertyData(Data)
];
return FReply::Handled();
}
Async(EAsyncExecution::Thread, [&, this, OpenFileName, i]
{
cv::VideoCapture NewCapture(TCHAR_TO_UTF8(*OpenFileName[i]));
const int32 FrameCount = NewCapture.get(cv::CAP_PROP_FRAME_COUNT);
FGraphEventRef Task = FFunctionGraphTask::CreateAndDispatchWhenReady([&]()
{
FTimelinePropertyData PropertyData;
PropertyData.Name = OpenFileName[i];
PropertyData.Type = ETrackType::VideoTrack;
PropertyData.MoviePath = OpenFileName[i];
PropertyData.MovieFrameLength = FrameCount;
GridPanel->AddSlot(GridPanel->GetChildren()->Num() % 3, GridPanel->GetChildren()->Num() / 3)
[
SNew(SCustomInputResource)
.PropertyData(PropertyData)
.VideoCapture(NewCapture)
];
}, TStatId(), nullptr, ENamedThreads::GameThread);
FTaskGraphInterface::Get().WaitUntilTaskCompletes(Task);
});
}
return FReply::Handled();
})
@ -391,4 +317,5 @@ int SCustomInputPanel::AudioCallback(const void* input, void* output, unsigned l
return 0;
}
END_SLATE_FUNCTION_BUILD_OPTIMIZATION

View File

@ -6,6 +6,7 @@
#include "CoreMinimal.h"
#include "DefineGlobal.h"
#include "Widgets/SCompoundWidget.h"
#include "Widgets/Layout/SGridPanel.h"
class FSoundThread;
@ -47,4 +48,6 @@ public:
TSharedPtr<SWidgetSwitcher> Switcher;
};

View File

@ -2,6 +2,10 @@
#include "STimelineClip.h"
#include "AudioDevice.h"
#include "RuntimeAudioImporterLibrary.h"
#include "SlateOptMacros.h"
@ -16,7 +20,9 @@ extern "C"
{
#include "libavformat/avformat.h"
#include "libswresample/swresample.h"
#include <libswscale/swscale.h>
#include "portaudio.h"
#include <libswscale/swscale.h>
}
BEGIN_SLATE_FUNCTION_BUILD_OPTIMIZATION
@ -106,60 +112,111 @@ void STimelineClip::Seek(int32 Frame)
{
case ETrackType::VideoTrack:
{
if (!ClipData->VideoCapture || Frame > ClipData->ClipEndFrame || Frame < 0)
{
return;
}
FDateTime A = FDateTime::Now();
// FDateTime A = FDateTime::Now();
const int32 Offset = Frame - (ClipData->ClipStartFrame);
const int32 SeekMovieFrame = ClipData->VideoStartFrame + Offset;
if (SeekMovieFrame > LastSeekFrame)
int64 Timestamp = av_rescale_q(SeekMovieFrame / 30.0 * AV_TIME_BASE, AVRational{1, AV_TIME_BASE}, ClipData->ResourcePropertyDataPtr->Context->streams[ClipData->ResourcePropertyDataPtr->VideoStream]->time_base);
if (SeekMovieFrame - LastSeekFrame > 1 || SeekMovieFrame - LastSeekFrame < -1)
{
const int32 SeekOffset = SeekMovieFrame - LastSeekFrame;
for (int32 i = 0; i < SeekOffset; i++)
AVRational frame_rate = ClipData->ResourcePropertyDataPtr->Context->streams[ClipData->ResourcePropertyDataPtr->VideoStream]->avg_frame_rate;
if (av_seek_frame(ClipData->ResourcePropertyDataPtr->Context, ClipData->ResourcePropertyDataPtr->VideoStream, Timestamp, AVSEEK_FLAG_BACKWARD) < 0)
{
ClipData->VideoCapture->grab();
}
}
else if (SeekMovieFrame < LastSeekFrame)
{
ClipData->VideoCapture->set(cv::CAP_PROP_POS_FRAMES, 0);
for (int32 i = 0; i < SeekMovieFrame; i++)
{
ClipData->VideoCapture->grab();
}
GEngine->AddOnScreenDebugMessage(-1, 10.0f, FColor::Blue, TEXT("Seek Failed"));
};
}
LastSeekFrame = SeekMovieFrame;
// GEngine->AddOnScreenDebugMessage(-1, 10.0F, FColor::Red, FString::Printf(TEXT("Read Time: %f"), (FDateTime::Now() - A).GetTotalMilliseconds()));
cv::Mat Read;
ClipData->VideoCapture->retrieve(Read);
// ClipData->VideoCapture->read(Read);
UTexture2D* Texture = UTexture2D::CreateTransient(Read.cols, Read.rows, PF_B8G8R8A8);
AVPacket* Packet = av_packet_alloc();
AVFrame* AllocatedFrame = av_frame_alloc();
av_init_packet(Packet);
avcodec_receive_frame(ClipData->ResourcePropertyDataPtr->VideoCodecContext, AllocatedFrame);
int32 Times = 0;
while (av_read_frame(ClipData->ResourcePropertyDataPtr->Context, Packet) >= 0)
{
if (Packet->stream_index == ClipData->ResourcePropertyDataPtr->VideoStream)
{
int32 Response = avcodec_send_packet(ClipData->ResourcePropertyDataPtr->VideoCodecContext, Packet);
if (Response < 0)
{
UE_LOG(LogTemp, Error, TEXT("Error while sending a packet to the decoder: %s"), *FString(FString::FromInt(Response)));
return;
}
Response = avcodec_receive_frame(ClipData->ResourcePropertyDataPtr->VideoCodecContext, AllocatedFrame);
if (Response == AVERROR(EAGAIN) || Response == AVERROR_EOF)
{
continue;
}
else if (Response < 0)
{
UE_LOG(LogTemp, Error, TEXT("Error while receiving a frame from the decoder: %s"), *FString(FString::FromInt(Response)));
return;
}
if (AllocatedFrame->best_effort_timestamp >= Timestamp)
{
av_packet_unref(Packet);
break;
}
}
}
av_packet_unref(Packet);
// GEngine->AddOnScreenDebugMessage(-1, 1.00f, FColor::Green, FString::FromInt(AllocatedFrame->best_effort_timestamp));
AVCodecContext* VideoCodecContext = ClipData->ResourcePropertyDataPtr->VideoCodecContext;
struct SwsContext* swsCtx = sws_getContext(
AllocatedFrame->width, AllocatedFrame->height, VideoCodecContext->pix_fmt,
AllocatedFrame->width, AllocatedFrame->height, AV_PIX_FMT_RGBA,
SWS_BILINEAR, NULL, NULL, NULL
);
if (!swsCtx)
{
UE_LOG(LogTemp, Error, TEXT("Error creating swsContext"));
return;
}
uint8* RawData = new uint8[AllocatedFrame->width * AllocatedFrame->height * 4];
uint8* dest[4] = {RawData, 0, 0, 0};
int32 dest_linesize[4] = {AllocatedFrame->width * 4, 0, 0, 0};
sws_scale(swsCtx, AllocatedFrame->data, AllocatedFrame->linesize, 0, AllocatedFrame->height, dest, dest_linesize);
sws_freeContext(swsCtx);
UTexture2D* Texture = UTexture2D::CreateTransient(AllocatedFrame->width, AllocatedFrame->height, PF_B8G8R8A8);
if (Texture)
{
A = FDateTime::Now();
uint8* RGBAData = new uint8[Read.cols * Read.rows * 4];
for (int i = 0; i < Read.cols * Read.rows; i++)
{
RGBAData[i * 4 + 0] = Read.data[i * 3 + 0];
RGBAData[i * 4 + 1] = Read.data[i * 3 + 1];
RGBAData[i * 4 + 2] = Read.data[i * 3 + 2];
RGBAData[i * 4 + 3] = 255;
}
// GEngine->AddOnScreenDebugMessage(-1, 10.0F, FColor::Red, FString::Printf(TEXT("RGBA Time: %f"), (FDateTime::Now() - A).GetTotalMilliseconds()));
void* MipData = Texture->GetPlatformData()->Mips[0].BulkData.Lock(LOCK_READ_WRITE);
FMemory::Memcpy(MipData, RGBAData, Read.cols * Read.rows * 4);
FMemory::Memcpy(MipData, RawData, AllocatedFrame->width * AllocatedFrame->height * 4);
Texture->GetPlatformData()->Mips[0].BulkData.Unlock();
Texture->UpdateResource();
MainWidgetInterface->OnUpdateVideo(FGuid::NewGuid(), Texture);
delete RawData;
}
av_frame_free(&AllocatedFrame);
// AVFrame* frameRGBA = av_frame_alloc();
// int numBytes = avpicture_get_size(AV_PIX_FMT_RGBA, VideoCodecContext->width, VideoCodecContext->height);
// uint8_t* buffer = (uint8_t*)av_malloc(numBytes * sizeof(uint8_t));
// avpicture_fill((AVPicture*)frameRGBA, buffer, AV_PIX_FMT_RGBA, VideoCodecContext->width, VideoCodecContext->height);
// sws_scale(swsCtx, AllocatedFrame->data, AllocatedFrame->linesize, 0, VideoCodecContext->height, frameRGBA->data, frameRGBA->linesize);
//
// UTexture2D* texture = UTexture2D::CreateTransient(VideoCodecContext->width, VideoCodecContext->height);
// FUpdateTextureRegion2D region(0, 0, 0, 0, VideoCodecContext->width, VideoCodecContext->height);
// texture->UpdateTextureRegions(0, 1, &region, VideoCodecContext->width * 4, 4, frameRGBA->data[0]);
// MainWidgetInterface->OnUpdateVideo(FGuid::NewGuid(), texture);
}
break;

View File

@ -48,7 +48,7 @@ public:
virtual FReply OnMouseMove(const FGeometry& MyGeometry, const FPointerEvent& MouseEvent) override;
TSharedPtr<IWidgetInterface> Body;
int32 LastSeekFrame = 0;
int64 LastTimeStamp = 0;
virtual int32 OnPaint(const FPaintArgs& Args, const FGeometry& AllottedGeometry, const FSlateRect& MyCullingRect, FSlateWindowElementList& OutDrawElements, int32 LayerId, const FWidgetStyle& InWidgetStyle, bool bParentEnabled) const override;
FDecodedAudioStruct DecodedAudioStruct;
PaStream* Stream = nullptr;