// ------------------------------------------------ // Copyright Joe Marshall 2024- All Rights Reserved // ------------------------------------------------ // // The main IMediaPlayer implementation. This is a // shim which provides Unreal interfaces, and then // defers everything vulkan related to the main // AndroidVulkanVideoImpl class. // ------------------------------------------------ #include "AndroidVulkanMediaPlayer.h" #include "AndroidVulkanTextureSample.h" #include "IVulkanImpl.h" #include "VideoMediaSampleHolder.h" #include "UnrealArchiveFileSource.h" #include "UnrealAudioOut.h" #include "HAL/FileManager.h" #include "HAL/PlatformFilemanager.h" #include "IPlatformFilePak.h" #include "Misc/ConfigCacheIni.h" // for GConfig access #include "UnrealLogging.h" #define LOCTEXT_NAMESPACE "FAndroidVulkanVideoModule" #include static void *implDLL = NULL; // logging object passed to impl static UnrealLogger logger; typedef IVulkanImpl *(*CreateImplType)(); typedef void (*DestroyImplType)(IVulkanImpl *); static CreateImplType createImpl = NULL; static DestroyImplType destroyImpl = NULL; FAndroidVulkanMediaPlayer::FAndroidVulkanMediaPlayer(IMediaEventSink &InEventSink) : SampleQueue(MakeShared()), EventSink(InEventSink) { if (implDLL == NULL) { implDLL = dlopen("libVkLayer_OverrideLib.so", RTLD_NOW | RTLD_LOCAL); createImpl = (CreateImplType)(dlsym(implDLL, "createImpl")); destroyImpl = (DestroyImplType)(dlsym(implDLL, "destroyImpl")); } impl = createImpl(); impl->setDataCallback(this); impl->setLogger(&logger); impl->setLooping(Looping); // get log format bitmask from defaultEngine.ini int32 LogVisibility; if (GConfig->GetInt(TEXT("DirectVideo"), TEXT("LogBitmask"), LogVisibility, GEngineIni)) { logger.SetLogVisibilityBitmask((int64)LogVisibility); } else { logger.SetLogVisibilityBitmask(ILogger::LogTypes::ALL_LOGS_BITMASK); } // get output format from defaultEngine.ini FString OutFormat; EMediaTextureSampleFormat SampleFormat = EMediaTextureSampleFormat::CharBGR10A2; if (GConfig->GetString(TEXT("DirectVideo"), TEXT("OutputFormat"), OutFormat, GEngineIni)) { if (OutFormat.Equals(TEXT("CharBGR10A2"), ESearchCase::IgnoreCase)) { SampleFormat = EMediaTextureSampleFormat::CharBGR10A2; } else if (OutFormat.Equals(TEXT("CharBGRA"), ESearchCase::IgnoreCase)) { SampleFormat = EMediaTextureSampleFormat::CharBGRA; } else if (OutFormat.Equals(TEXT("CharRGBA"), ESearchCase::IgnoreCase)) { SampleFormat = EMediaTextureSampleFormat::CharRGBA; } else if (OutFormat.Equals(TEXT("RGBA16"), ESearchCase::IgnoreCase)) { SampleFormat = EMediaTextureSampleFormat::RGBA16; } else if (OutFormat.Equals(TEXT("FloatRGB"), ESearchCase::IgnoreCase)) { SampleFormat = EMediaTextureSampleFormat::FloatRGB; } else if (OutFormat.Equals(TEXT("FloatRGBA"), ESearchCase::IgnoreCase)) { SampleFormat = EMediaTextureSampleFormat::FloatRGBA; } else { UE_LOGFMT(LogDirectVideo, Error, "Bad sample format in defaultEngine.ini"); } } AndroidVulkanTextureSample::SetVideoFormat(SampleFormat); AudioOut = new UnrealAudioOut(NULL); impl->setAudioOut(AudioOut); PlayState = EMediaState::Closed; CurInfo.Empty(); DelegateEnterBackground = FCoreDelegates::ApplicationWillEnterBackgroundDelegate.AddRaw( this, &FAndroidVulkanMediaPlayer::OnEnterBackground); DelegateEnterForeground = FCoreDelegates::ApplicationHasEnteredForegroundDelegate.AddRaw( this, &FAndroidVulkanMediaPlayer::OnEnterForeground); SeekIndex = 0; SentBlankFrame = false; Seeking = false; } FAndroidVulkanMediaPlayer::~FAndroidVulkanMediaPlayer() { Close(); SentBlankFrame = true; if (DelegateEnterBackground.IsValid()) { FCoreDelegates::ApplicationWillEnterBackgroundDelegate.Remove(DelegateEnterBackground); } if (DelegateEnterForeground.IsValid()) { FCoreDelegates::ApplicationHasEnteredForegroundDelegate.Remove(DelegateEnterForeground); } destroyImpl(impl); impl = NULL; delete AudioOut; AudioOut = NULL; } // IMediaPlayer // -------------------------------- void FAndroidVulkanMediaPlayer::Close() { SentBlankFrame = false; if (PlayState != EMediaState::Closed) { UE_LOGFMT(LogDirectVideo, VeryVerbose, "Media player close called"); SampleQueue->FlushSamples(); VideoSamplePool.ReleaseEverything(); SeekIndex = 0; Seeking = false; EventSink.ReceiveMediaEvent(EMediaEvent::TracksChanged); EventSink.ReceiveMediaEvent(EMediaEvent::MediaClosed); PlayState = EMediaState::Closed; // close the impl last because it will // kill all the texture images that we release in the pool above impl->close(); } } FString FAndroidVulkanMediaPlayer::GetInfo() const { return CurInfo; } FGuid FAndroidVulkanMediaPlayer::GetPlayerPluginGUID() const { static FGuid OurGUID(0x9bf2d7c6, 0xb2b84d26, 0xb6ae5a3a, 0xc9883569); return OurGUID; } FString FAndroidVulkanMediaPlayer::GetStats() const { return TEXT("Not implemented"); } FString FAndroidVulkanMediaPlayer::GetUrl() const { return VideoURL; } bool FAndroidVulkanMediaPlayer::Open(const FString &Url, const IMediaOptions *Options) { Close(); UE_LOGFMT(LogDirectVideo, VeryVerbose, "Try open Url {0}", Url); SentBlankFrame = false; bool started = false; FString fullPath = Url; if (fullPath.StartsWith("file://")) { fullPath = fullPath.RightChop(7); } if (fullPath.Contains("://")) { // a (non-file url) started = impl->startVideoURL(TCHAR_TO_UTF8(*fullPath), false); } else { // a file path - check if it is local or not if (fullPath.StartsWith("./")) { fullPath = FPaths::ProjectContentDir() + fullPath.RightChop(2); } FPaths::NormalizeFilename(fullPath); UE_LOGFMT(LogDirectVideo, VeryVerbose, "Start video {0}", fullPath); IAndroidPlatformFile &PlatformFile = IAndroidPlatformFile::GetPlatformPhysical(); if (PlatformFile.FileExists(*fullPath)) { int64 FileOffset = PlatformFile.FileStartOffset(*fullPath); int64 FileSize = PlatformFile.FileSize(*fullPath); FString FileRootPath = PlatformFile.FileRootPath(*fullPath); UE_LOGFMT(LogDirectVideo, VeryVerbose, "File exists: {0} {1} {2} {3}", FileRootPath, FileOffset, FileSize, fullPath); started = impl->startVideoFile(TCHAR_TO_UTF8(*FileRootPath), FileOffset, FileSize, false); } else { // check if the file exists in an archive / encrypted etc. FPakPlatformFile *PakPlatformFile = (FPakPlatformFile *)(FPlatformFileManager::Get().FindPlatformFile( FPakPlatformFile::GetTypeName())); TRefCountPtr PakFile; FPakEntry FileEntry; if (PakPlatformFile != nullptr && PakPlatformFile->FindFileInPakFiles(*fullPath, &PakFile, &FileEntry)) { // we have the file in a pak file // use a custom media data source to read it // n.b. if pak file is uncompressed and the file isn't encrypted we could skip this // step but this should work whatever TSharedRef Archive = MakeShareable(IFileManager::Get().CreateFileReader(*fullPath)); UnrealArchiveFileSource *fs = new UnrealArchiveFileSource(Archive); started = impl->startVideoCustomSource(fs, false); } } } if (started) { PlayState = EMediaState::Stopped; EventSink.ReceiveMediaEvent(EMediaEvent::TracksChanged); EventSink.ReceiveMediaEvent(EMediaEvent::MediaOpened); UE_LOGFMT(LogDirectVideo, Verbose, "Opening {0}", *Url); SeekIndex = 0; Seeking = false; } else { // couldn't open UE_LOGFMT(LogDirectVideo, Error, "Can't find video file {0}", *Url); } return started; } bool FAndroidVulkanMediaPlayer::Open(const TSharedRef &Archive, const FString &OriginalUrl, const IMediaOptions *Options) { UE_LOGFMT(LogDirectVideo, Error, "Opening archive {0} not supported", *OriginalUrl); return false; } void FAndroidVulkanMediaPlayer::SetGuid(const FGuid &Guid) { PlayerGUID = Guid; } void FAndroidVulkanMediaPlayer::TickFetch(FTimespan DeltaTime, FTimespan Timecode) { UE_LOGFMT(LogDirectVideo, VeryVerbose, "TickFetch"); } void FAndroidVulkanMediaPlayer::TickInput(FTimespan DeltaTime, FTimespan Timecode) { UE_LOGFMT(LogDirectVideo, VeryVerbose, "TickInput start"); /* int64_t presTimeNs = impl->getTimeNS(); SampleQueue->SetLastTimeStamp( FMediaTimeStamp(DeltaTime, #if UE_VERSION_OLDER_THAN(5, 3, 0) static_cast(SeekIndex) << 32)); #else FMediaTimeStamp::MakeSequenceIndex(SeekIndex, 0))); #endif */ HasVideoThisFrame = false; VideoSamplePool.Tick(); if (!SentBlankFrame && impl->numVideoTracks() == 0) { UE_LOGFMT(LogDirectVideo, VeryVerbose, "blank frame"); SentBlankFrame = true; auto textureSample = VideoSamplePool.AcquireShared(); textureSample->InitNoVideo(); SampleQueue->AddVideo(textureSample); } UE_LOGFMT(LogDirectVideo, VeryVerbose, "TickInput dt:{0} tc {1} num {2}", DeltaTime.GetTicks(), Timecode.GetTicks(), SampleQueue->NumVideoSamples()); } bool FAndroidVulkanMediaPlayer::GetPlayerFeatureFlag(EFeatureFlag flag) const { switch (flag) { case EFeatureFlag::PlayerUsesInternalFlushOnSeek: return true; case EFeatureFlag::AlwaysPullNewestVideoFrame: return true; case EFeatureFlag::UsePlaybackTimingV2: return true; case EFeatureFlag::IsTrackSwitchSeamless: return true; case EFeatureFlag::UseRealtimeWithVideoOnly: UE_LOGFMT(LogDirectVideo, VeryVerbose, "Feature flag query"); return true; default: return false; } } // IMediaControl // -------------------------------- bool FAndroidVulkanMediaPlayer::CanControl(EMediaControl Control) const { switch (Control) { case EMediaControl::Pause: return PlayState == EMediaState::Playing; case EMediaControl::Resume: return PlayState == EMediaState::Stopped; case EMediaControl::Seek: return PlayState != EMediaState::Closed; } return false; } FTimespan FAndroidVulkanMediaPlayer::GetDuration() const { int64_t timeNS = impl->getDurationNS(); if (timeNS >= 0 && PlayState != EMediaState::Closed) { UE_LOGFMT(LogDirectVideo, VeryVerbose, "Get Duration {0}", int64(timeNS)); return FTimespan(timeNS / 100LL); } else { UE_LOGFMT(LogDirectVideo, VeryVerbose, "Get Duration empty"); return FTimespan::MaxValue(); } } float FAndroidVulkanMediaPlayer::GetRate() const { if (PlayState == EMediaState::Playing) { return impl->getRate(); } else { return 0.0; } } EMediaState FAndroidVulkanMediaPlayer::GetState() const { return PlayState; } EMediaStatus FAndroidVulkanMediaPlayer::GetStatus() const { // not supported yet return EMediaStatus::None; } TRangeSet FAndroidVulkanMediaPlayer::GetSupportedRates(EMediaRateThinning Thinning) const { TRangeSet Retval; if (impl->numAudioTracks() > 0) { Retval.Add(TRange(0.0f)); Retval.Add(TRange(1.0f)); } else { Retval.Add(TRange(TRange::BoundsType::Inclusive(0.0f), TRange::BoundsType::Inclusive(10.0f))); } return Retval; } FTimespan FAndroidVulkanMediaPlayer::GetTime() const { int64_t timeNS = impl->getTimeNS(); UE_LOGFMT(LogDirectVideo, VeryVerbose, "Get Time {0}", int64(timeNS)); if (timeNS >= 0) { return FTimespan(timeNS / 100LL); } return FTimespan::Zero(); } bool FAndroidVulkanMediaPlayer::IsLooping() const { return Looping; } bool FAndroidVulkanMediaPlayer::Seek(const FTimespan &Time) { UE_LOGFMT(LogDirectVideo, Verbose, "Seek index:{0}", SeekIndex); Seeking = true; int64_t ticks = Time.GetTicks(); int64_t nanoseconds = ticks * 100LL; impl->seek(nanoseconds); return true; } bool FAndroidVulkanMediaPlayer::SetLooping(bool Loop) { Looping = Loop; impl->setLooping(Looping); return true; } bool FAndroidVulkanMediaPlayer::SetRate(float Rate) { bool retval = false; int iInitialState = (int)PlayState; switch (PlayState) { case EMediaState::Playing: if (Rate == 0.0) { impl->setPlaying(false); PlayState = EMediaState::Stopped; EventSink.ReceiveMediaEvent(EMediaEvent::PlaybackSuspended); retval = true; } else { return impl->setRate(Rate); retval = true; } break; case EMediaState::Stopped: if (Rate == 0.0) { retval = true; } else if (Rate > 0.0) { impl->setPlaying(true); PlayState = EMediaState::Playing; retval = impl->setRate(Rate); if (retval) { EventSink.ReceiveMediaEvent(EMediaEvent::PlaybackResumed); } } break; }; int iFinalState = (int)PlayState; UE_LOGFMT(LogDirectVideo, Verbose, "Set rate {0} {1} state:{2}->{3}", Rate, retval, iInitialState, iFinalState); // return false; return retval; } bool FAndroidVulkanMediaPlayer::SetNativeVolume(float Volume) { return AudioOut->setVolume(Volume); } // IMediaTracks // -------------------------------- bool FAndroidVulkanMediaPlayer::GetAudioTrackFormat(int32 TrackIndex, int32 FormatIndex, FMediaAudioTrackFormat &OutFormat) const { if (FormatIndex != 0 || PlayState == EMediaState::Closed) { return false; } int32 bitsPerSample; int32 channels; int32 rate; if (!impl->getAudioTrackFormat(TrackIndex, &bitsPerSample, &channels, &rate)) { return false; } OutFormat.BitsPerSample = bitsPerSample; OutFormat.NumChannels = channels; OutFormat.SampleRate = rate; OutFormat.TypeName = TEXT("Native"); UE_LOGFMT(LogDirectVideo, VeryVerbose, "Get audio trackformat {0} {1} {2} {3}", TrackIndex, bitsPerSample, channels, rate); return true; } int32 FAndroidVulkanMediaPlayer::GetNumTracks(EMediaTrackType TrackType) const { UE_LOGFMT(LogDirectVideo, VeryVerbose, "Get num tracks type: {0}", int(TrackType)); // TODO: support audio / video only switch (TrackType) { case EMediaTrackType::Audio: return impl->numAudioTracks(); case EMediaTrackType::Video: return impl->numVideoTracks(); default: return 0; } } int32 FAndroidVulkanMediaPlayer::GetNumTrackFormats(EMediaTrackType TrackType, int32 TrackIndex) const { if (TrackIndex >= GetNumTracks(TrackType)) { return 0; } switch (TrackType) { case EMediaTrackType::Audio: return 1; case EMediaTrackType::Video: return 1; default: return 0; } } int32 FAndroidVulkanMediaPlayer::GetSelectedTrack(EMediaTrackType TrackType) const { if (TrackType == EMediaTrackType::Audio) { UE_LOGFMT(LogDirectVideo, VeryVerbose, "Returning no selected audio track"); return INDEX_NONE; } UE_LOGFMT(LogDirectVideo, VeryVerbose, "Returning selected track type {0}", int(TrackType)); return 0; } FText FAndroidVulkanMediaPlayer::GetTrackDisplayName(EMediaTrackType TrackType, int32 TrackIndex) const { // TODO: pass through display names return FText::Format(LOCTEXT("TrackName", "Track {0} type {1}"), (int)TrackType, TrackIndex); } int32 FAndroidVulkanMediaPlayer::GetTrackFormat(EMediaTrackType TrackType, int32 TrackIndex) const { return 0; } FString FAndroidVulkanMediaPlayer::GetTrackLanguage(EMediaTrackType TrackType, int32 TrackIndex) const { // TODO - pass language through return TEXT(""); } FString FAndroidVulkanMediaPlayer::GetTrackName(EMediaTrackType TrackType, int32 TrackIndex) const { if (TrackIndex == 0 && PlayState != EMediaState::Closed) { return TEXT("TRACK"); } else { return TEXT(""); } } bool FAndroidVulkanMediaPlayer::GetVideoTrackFormat(int32 TrackIndex, int32 FormatIndex, FMediaVideoTrackFormat &OutFormat) const { if (FormatIndex != 0 || PlayState == EMediaState::Closed) { return false; } int w = 0, h = 0; float frameRate = 0; if (!impl->getVideoTrackFormat(TrackIndex, &w, &h, &frameRate)) { return false; } OutFormat.Dim.X = w; OutFormat.Dim.Y = h; OutFormat.FrameRate = frameRate; OutFormat.FrameRates = TRange(frameRate); OutFormat.TypeName = TEXT("Vulkan Video Frame"); UE_LOGFMT(LogDirectVideo, VeryVerbose, "Got out video format"); return true; } bool FAndroidVulkanMediaPlayer::SelectTrack(EMediaTrackType TrackType, int32 TrackIndex) { if (TrackType == EMediaTrackType::Audio) { UE_LOGFMT(LogDirectVideo, VeryVerbose, "Not selecting audio track"); return false; } UE_LOGFMT(LogDirectVideo, VeryVerbose, "Selecting track type {0} = {0}", int(TrackType), TrackIndex); return true; // return TrackIndex == 0 && PlayState != EMediaState::Closed; } bool FAndroidVulkanMediaPlayer::SetTrackFormat(EMediaTrackType TrackType, int32 TrackIndex, int32 FormatIndex) { // todo: support multiple formats / track return TrackIndex == 0 && FormatIndex == 0 && PlayState != EMediaState::Closed; } IMediaSamples &FAndroidVulkanMediaPlayer::GetSamples() { return *SampleQueue.Get(); } void FAndroidVulkanMediaPlayer::onVideoFrame(void *frameHwBuffer, int w, int h, int64_t presTimeNs) { if (PlayState == EMediaState::Closed) { UE_LOGFMT(LogDirectVideo, VeryVerbose, "Releasing frame as we are closed"); impl->releaseFrame(frameHwBuffer); return; } if (HasVideoThisFrame) { impl->releaseFrame(frameHwBuffer); return; } HasVideoThisFrame = true; auto textureSample = VideoSamplePool.AcquireShared(); textureSample->Init(impl, frameHwBuffer, w, h, FMediaTimeStamp(FTimespan(presTimeNs / 100LL), FMediaTimeStamp::MakeSequenceIndex(SeekIndex, 0))); SampleQueue->AddVideo(textureSample); UE_LOGFMT(LogDirectVideo, VeryVerbose, "Add video sample"); // UE_LOGFMT(LogDirectVideo, VeryVerbose, "Video samples {0}", // SampleQueue->NumVideoSamples()); // NB: this needs to happen after the sample is in the queue, so that // the queue has something in when media player facade samples it for playback // time if (Seeking) { SeekIndex += 1; UE_LOGFMT(LogDirectVideo, Verbose, "Seek completed {0}", SeekIndex); Seeking = false; EventSink.ReceiveMediaEvent(EMediaEvent::SeekCompleted); } } void FAndroidVulkanMediaPlayer::OnEnterBackground() { // going into backgroud - if playing, pause implementation player if (PlayState == EMediaState::Playing) { impl->setPlaying(false); EventSink.ReceiveMediaEvent(EMediaEvent::PlaybackSuspended); UE_LOGFMT(LogDirectVideo, Verbose, "Enter suspend"); } } void FAndroidVulkanMediaPlayer::OnEnterForeground() { // back into foreground - if playing, start if (PlayState == EMediaState::Playing) { impl->setPlaying(true); EventSink.ReceiveMediaEvent(EMediaEvent::PlaybackResumed); UE_LOGFMT(LogDirectVideo, Verbose, "Enter foreground"); } } void FAndroidVulkanMediaPlayer::ProcessVideoSamples() { UE_LOGFMT(LogDirectVideo, VeryVerbose, "Process video samples"); } void *FAndroidVulkanMediaPlayer::getVkDeviceProcAddr(const char *name) { void *result = static_cast( (static_cast(GDynamicRHI))->RHIGetVkDeviceProcAddr(name)); return result; } VkDevice FAndroidVulkanMediaPlayer::getVkDevice() { IVulkanDynamicRHI *rhi = static_cast(GDynamicRHI); if (rhi == NULL) { return VK_NULL_HANDLE; } return rhi->RHIGetVkDevice(); } const VkAllocationCallbacks *FAndroidVulkanMediaPlayer::getVkAllocationCallbacks() { IVulkanDynamicRHI *rhi = static_cast(GDynamicRHI); if (rhi == NULL) { return NULL; } return rhi->RHIGetVkAllocationCallbacks(); } VkPhysicalDevice FAndroidVulkanMediaPlayer::getNativePhysicalDevice() { return static_cast(GDynamicRHI->RHIGetNativePhysicalDevice()); } void FAndroidVulkanMediaPlayer::onPlaybackEnd(bool looping) { EventSink.ReceiveMediaEvent(EMediaEvent::PlaybackEndReached); if (!looping) { PlayState = EMediaState::Stopped; EventSink.ReceiveMediaEvent(EMediaEvent::PlaybackSuspended); } } #undef LOCTEXT_NAMESPACE