Add AV video backend for macOS and iOS

This commit is contained in:
Wojtek Figat
2024-05-22 11:53:46 +02:00
parent 97be8ee8cc
commit 2af4e8fe10
8 changed files with 346 additions and 9 deletions

View File

@@ -0,0 +1,289 @@
// Copyright (c) 2012-2024 Wojciech Figat. All rights reserved.
#if VIDEO_API_AV
#include "VideoBackendAV.h"
#include "Engine/Platform/Apple/AppleUtils.h"
#include "Engine/Profiler/ProfilerCPU.h"
#include "Engine/Threading/TaskGraph.h"
#include "Engine/Core/Log.h"
#include "Engine/Engine/Globals.h"
#include <AVFoundation/AVFoundation.h>
#define VIDEO_API_AV_ERROR(api, err) LOG(Warning, "[VideoBackendAV] {} failed with error 0x{:x}", TEXT(#api), (uint64)err)
struct VideoPlayerAV
{
AVPlayer* Player;
AVPlayerItemVideoOutput* Output;
int8 PendingPlay : 1;
int8 PendingPause : 1;
int8 PendingSeek : 1;
TimeSpan SeekTime;
};
namespace AV
{
Array<VideoBackendPlayer*> Players;
TimeSpan ConvertTime(const CMTime& t)
{
return TimeSpan::FromSeconds(t.timescale != 0 ? (t.value / (double)t.timescale) : 0.0);
}
CMTime ConvertTime(const TimeSpan& t)
{
return CMTime{(CMTimeValue)(100000.0 * t.GetTotalSeconds()), (CMTimeScale)100000, kCMTimeFlags_Valid, {}};
}
void UpdatePlayer(int32 index)
{
PROFILE_CPU();
auto& player = *Players[index];
ZoneText(player.DebugUrl, player.DebugUrlLen);
auto& playerAV = player.GetBackendState<VideoPlayerAV>();
// Update format
AVPlayerItem* playerItem = [playerAV.Player currentItem];
if (!playerItem)
return;
if (player.Width == 0)
{
CGSize size = [playerItem presentationSize];
player.Width = player.VideoFrameWidth = size.width;
player.Height = player.VideoFrameHeight = size.height;
NSArray* tracks = [playerItem tracks];
for (NSUInteger i = 0; i < [tracks count]; i++)
{
AVPlayerItemTrack* track = (AVPlayerItemTrack*)[tracks objectAtIndex:i];
AVAssetTrack* assetTrack = track.assetTrack;
NSString* mediaType = assetTrack.mediaType;
if ([mediaType isEqualToString:AVMediaTypeVideo] && playerAV.Output == nullptr)
{
player.FrameRate = assetTrack.nominalFrameRate;
if (player.FrameRate <= 0.0f)
{
CMTime frameDuration = assetTrack.minFrameDuration;
if ((frameDuration.flags & kCMTimeFlags_Valid) != 0)
player.FrameRate = (float)frameDuration.timescale / (float)frameDuration.value;
else
player.FrameRate = 25;
}
CGSize frameSize = assetTrack.naturalSize;
player.Width = player.VideoFrameWidth = frameSize.width;
player.Height = player.VideoFrameHeight = frameSize.height;
CMFormatDescriptionRef desc = (CMFormatDescriptionRef)[assetTrack.formatDescriptions objectAtIndex:0];
CMVideoCodecType codec = CMFormatDescriptionGetMediaSubType(desc);
int32 pixelFormat = kCVPixelFormatType_32BGRA; // TODO: use packed vieo format
player.Format = PixelFormat::B8G8R8A8_UNorm;
NSMutableDictionary* attributes = [NSMutableDictionary dictionary];
[attributes setObject:[NSNumber numberWithInt: pixelFormat] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithInteger:1] forKey:(NSString*)kCVPixelBufferBytesPerRowAlignmentKey];
playerAV.Output = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:attributes];
[playerItem addOutput: playerAV.Output];
}
else if ([mediaType isEqualToString:AVMediaTypeAudio])
{
CMFormatDescriptionRef desc = (CMFormatDescriptionRef)[assetTrack.formatDescriptions objectAtIndex:0];
const AudioStreamBasicDescription* audioDesc = CMAudioFormatDescriptionGetStreamBasicDescription(desc);
player.AudioInfo.SampleRate = audioDesc->mSampleRate;
player.AudioInfo.NumChannels = audioDesc->mChannelsPerFrame;
player.AudioInfo.BitDepth = audioDesc->mBitsPerChannel > 0 ? audioDesc->mBitsPerChannel : 16;
}
}
}
// Wait for the video to be ready
//AVPlayerStatus status = [playerAV.Player status];
//AVPlayerTimeControlStatus timeControlStatus = [playerAV.Player timeControlStatus];
if (playerAV.Output == nullptr)
return;
// Control playback
if (playerAV.PendingPlay)
{
playerAV.PendingPlay = 0;
[playerAV.Player play];
}
else if (playerAV.PendingPause)
{
playerAV.PendingPause = 0;
[playerAV.Player pause];
}
if (playerAV.PendingSeek)
{
playerAV.PendingSeek = 0;
[playerAV.Player seekToTime:AV::ConvertTime(playerAV.SeekTime)];
//[playerAV.Player seekToTime:time toleranceBefore:time toleranceAfter:time];
}
// Check if there is a new video frame to process
CMTime currentTime = [playerAV.Player currentTime];
if (playerAV.Output && [playerAV.Output hasNewPixelBufferForItemTime: currentTime])
{
CVPixelBufferRef buffer = [playerAV.Output copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nullptr];
if (buffer)
{
const int32 bufferWidth = CVPixelBufferGetWidth(buffer);
const int32 bufferHeight = CVPixelBufferGetHeight(buffer);
const int32 bufferStride = CVPixelBufferGetBytesPerRow(buffer);
const int32 bufferSize = bufferStride * bufferHeight;
// TODO: use Metal Texture Cache for faster GPU-based video processing
if (CVPixelBufferLockBaseAddress(buffer, kCVPixelBufferLock_ReadOnly) == kCVReturnSuccess)
{
uint8* bufferData = (uint8*)CVPixelBufferGetBaseAddress(buffer);
player.UpdateVideoFrame(Span<byte>(bufferData, bufferSize), ConvertTime(currentTime), TimeSpan::FromSeconds(1.0f / player.FrameRate));
CVPixelBufferUnlockBaseAddress(buffer, kCVPixelBufferLock_ReadOnly);
}
CVPixelBufferRelease(buffer);
}
}
player.Tick();
}
}
bool VideoBackendAV::Player_Create(const VideoBackendPlayerInfo& info, VideoBackendPlayer& player)
{
PROFILE_CPU();
player = VideoBackendPlayer();
auto& playerAV = player.GetBackendState<VideoPlayerAV>();
// Load media
NSURL* url;
if (info.Url.StartsWith(TEXT("http"), StringSearchCase::IgnoreCase))
{
url = [NSURL URLWithString:(NSString*)AppleUtils::ToString(info.Url)];
}
else
{
#if PLATFORM_MAC
if (info.Url.StartsWith(TEXT("Content/"), StringSearchCase::CaseSensitive))
url = [NSURL fileURLWithPath:(NSString*)AppleUtils::ToString(Globals::ProjectFolder / info.Url) isDirectory:NO];
else
url = [NSURL fileURLWithPath:(NSString*)AppleUtils::ToString(info.Url) isDirectory:NO];
#else
url = [NSURL fileURLWithPath:(NSString*)AppleUtils::ToString(StringUtils::GetFileName(info.Url)) isDirectory:NO];
#endif
}
playerAV.Player = [AVPlayer playerWithURL:url];
if (playerAV.Player == nullptr)
{
return true;
}
[playerAV.Player retain];
// Configure player
//[playerAV.Player addObserver:playerStatusObserver.get() forKeyPath:"status" options:NSKeyValueObservingOptionNew context:&player];
playerAV.Player.actionAtItemEnd = info.Loop ? AVPlayerActionAtItemEndNone : AVPlayerActionAtItemEndPause;
[playerAV.Player setVolume: info.Volume];
// Setup player data
player.Backend = this;
player.Created(info);
AV::Players.Add(&player);
return false;
}
void VideoBackendAV::Player_Destroy(VideoBackendPlayer& player)
{
PROFILE_CPU();
player.ReleaseResources();
auto& playerAV = player.GetBackendState<VideoPlayerAV>();
if (playerAV.PendingPause)
[playerAV.Player pause];
if (playerAV.Output)
[playerAV.Output release];
[playerAV.Player release];
AV::Players.Remove(&player);
player = VideoBackendPlayer();
}
void VideoBackendAV::Player_UpdateInfo(VideoBackendPlayer& player, const VideoBackendPlayerInfo& info)
{
PROFILE_CPU();
auto& playerAV = player.GetBackendState<VideoPlayerAV>();
playerAV.Player.actionAtItemEnd = info.Loop ? AVPlayerActionAtItemEndNone : AVPlayerActionAtItemEndPause;
// TODO: spatial audio
// TODO: audio pan
[playerAV.Player setVolume: info.Volume];
player.Updated(info);
}
void VideoBackendAV::Player_Play(VideoBackendPlayer& player)
{
PROFILE_CPU();
auto& playerAV = player.GetBackendState<VideoPlayerAV>();
playerAV.PendingPlay = true;
playerAV.PendingPause = false;
player.PlayAudio();
}
void VideoBackendAV::Player_Pause(VideoBackendPlayer& player)
{
PROFILE_CPU();
auto& playerAV = player.GetBackendState<VideoPlayerAV>();
playerAV.PendingPlay = false;
playerAV.PendingPause = true;
player.PauseAudio();
}
void VideoBackendAV::Player_Stop(VideoBackendPlayer& player)
{
PROFILE_CPU();
auto& playerAV = player.GetBackendState<VideoPlayerAV>();
playerAV.PendingPlay = false;
playerAV.PendingPause = true;
playerAV.PendingSeek = true;
playerAV.SeekTime = TimeSpan::Zero();
player.StopAudio();
}
void VideoBackendAV::Player_Seek(VideoBackendPlayer& player, TimeSpan time)
{
PROFILE_CPU();
auto& playerAV = player.GetBackendState<VideoPlayerAV>();
playerAV.PendingSeek = true;
playerAV.SeekTime = time;
}
TimeSpan VideoBackendAV::Player_GetTime(const VideoBackendPlayer& player)
{
PROFILE_CPU();
auto& playerAV = player.GetBackendState<VideoPlayerAV>();
if (playerAV.PendingSeek)
return playerAV.SeekTime;
return AV::ConvertTime([playerAV.Player currentTime]);
}
const Char* VideoBackendAV::Base_Name()
{
return TEXT("AVFoundation");
}
bool VideoBackendAV::Base_Init()
{
return false;
}
void VideoBackendAV::Base_Update(TaskGraph* graph)
{
// Schedule work to update all videos in async
Function<void(int32)> job;
job.Bind(AV::UpdatePlayer);
graph->DispatchJob(job, AV::Players.Count());
}
void VideoBackendAV::Base_Dispose()
{
}
#endif

View File

@@ -0,0 +1,30 @@
// Copyright (c) 2012-2024 Wojciech Figat. All rights reserved.
#pragma once
#if VIDEO_API_AV
#include "../VideoBackend.h"
/// <summary>
/// The AVFoundation video backend.
/// </summary>
class VideoBackendAV : public VideoBackend
{
public:
// [VideoBackend]
bool Player_Create(const VideoBackendPlayerInfo& info, VideoBackendPlayer& player) override;
void Player_Destroy(VideoBackendPlayer& player) override;
void Player_UpdateInfo(VideoBackendPlayer& player, const VideoBackendPlayerInfo& info) override;
void Player_Play(VideoBackendPlayer& player) override;
void Player_Pause(VideoBackendPlayer& player) override;
void Player_Stop(VideoBackendPlayer& player) override;
void Player_Seek(VideoBackendPlayer& player, TimeSpan time) override;
TimeSpan Player_GetTime(const VideoBackendPlayer& player) override;
const Char* Base_Name() override;
bool Base_Init() override;
void Base_Update(TaskGraph* graph) override;
void Base_Dispose() override;
};
#endif

View File

@@ -574,7 +574,7 @@ bool VideoBackendAndroid::Base_Init()
void VideoBackendAndroid::Base_Update(TaskGraph* graph)
{
// Schedule work to update all videos models in async
// Schedule work to update all videos in async
Function<void(int32)> job;
job.Bind(Android::UpdatePlayer);
graph->DispatchJob(job, Android::Players.Count());

View File

@@ -582,7 +582,7 @@ bool VideoBackendMF::Base_Init()
void VideoBackendMF::Base_Update(TaskGraph* graph)
{
// Schedule work to update all videos models in async
// Schedule work to update all videos in async
Function<void(int32)> job;
job.Bind(MF::UpdatePlayer);
graph->DispatchJob(job, MF::Players.Count());

View File

@@ -34,6 +34,12 @@ public class Video : EngineModule
options.OutputFiles.Add("mfreadwrite.lib");
options.OutputFiles.Add("mfuuid.lib");
break;
case TargetPlatform.Mac:
case TargetPlatform.iOS:
// AVFoundation
options.SourcePaths.Add(Path.Combine(FolderPath, "AV"));
options.CompileEnv.PreprocessorDefinitions.Add("VIDEO_API_AV");
break;
case TargetPlatform.PS4:
options.SourcePaths.Add(Path.Combine(Globals.EngineRoot, "Source", "Platforms", "PS4", "Engine", "Video"));
options.CompileEnv.PreprocessorDefinitions.Add("VIDEO_API_PS4");

View File

@@ -22,6 +22,9 @@
#if VIDEO_API_MF
#include "MF/VideoBackendMF.h"
#endif
#if VIDEO_API_AV
#include "AV/VideoBackendAV.h"
#endif
#if VIDEO_API_ANDROID
#include "Android/VideoBackendAndroid.h"
#endif
@@ -109,13 +112,17 @@ protected:
context->GPU->SetState(pso);
context->GPU->DrawFullscreenTriangle();
}
else
else if (frame->Format() == _player->Format)
{
// Raw texture data upload
uint32 rowPitch, slicePitch;
frame->ComputePitch(0, rowPitch, slicePitch);
context->GPU->UpdateTexture(frame, 0, 0, _player->VideoFrameMemory.Get(), rowPitch, slicePitch);
}
else
{
LOG(Warning, "Incorrect video player data format {} for player texture format {}", ScriptingEnum::ToString(_player->Format), ScriptingEnum::ToString(_player->Frame->Format()));
}
// Frame has been updated
_player->FramesCount++;
@@ -161,7 +168,6 @@ public:
}
bool Init() override;
void Update() override;
void Dispose() override;
};
@@ -187,11 +193,6 @@ bool VideoService::Init()
return false;
}
void VideoService::Update()
{
PROFILE_CPU_NAMED("Video.Update");
}
void VideoService::Dispose()
{
PROFILE_CPU_NAMED("Video.Dispose");
@@ -223,6 +224,9 @@ bool Video::CreatePlayerBackend(const VideoBackendPlayerInfo& info, VideoBackend
#if VIDEO_API_MF
TRY_USE_BACKEND(VideoBackendMF);
#endif
#if VIDEO_API_AV
TRY_USE_BACKEND(VideoBackendAV);
#endif
#if VIDEO_API_ANDROID
TRY_USE_BACKEND(VideoBackendAndroid);
#endif
@@ -335,6 +339,8 @@ void VideoBackendPlayer::UpdateVideoFrame(Span<byte> data, TimeSpan time, TimeSp
// Update output frame texture
InitVideoFrame();
auto desc = GPUTextureDescription::New2D(Width, Height, PixelFormat::R8G8B8A8_UNorm, GPUTextureFlags::ShaderResource | GPUTextureFlags::RenderTarget);
if (!PixelFormatExtensions::IsVideo(Format))
desc.Format = Format; // Use raw format reported by the backend (eg. BGRA)
if (Frame->GetDescription() != desc)
{
if (Frame->Init(desc))

View File

@@ -45,10 +45,13 @@ namespace Flax.Build.Platforms
options.LinkEnv.InputLibraries.Add("bz2");
options.LinkEnv.InputLibraries.Add("CoreFoundation.framework");
options.LinkEnv.InputLibraries.Add("CoreGraphics.framework");
options.LinkEnv.InputLibraries.Add("CoreMedia.framework");
options.LinkEnv.InputLibraries.Add("CoreVideo.framework");
options.LinkEnv.InputLibraries.Add("SystemConfiguration.framework");
options.LinkEnv.InputLibraries.Add("IOKit.framework");
options.LinkEnv.InputLibraries.Add("Cocoa.framework");
options.LinkEnv.InputLibraries.Add("QuartzCore.framework");
options.LinkEnv.InputLibraries.Add("AVFoundation.framework");
}
protected override void AddArgsCommon(BuildOptions options, List<string> args)

View File

@@ -47,10 +47,13 @@ namespace Flax.Build.Platforms
options.LinkEnv.InputLibraries.Add("Foundation.framework");
options.LinkEnv.InputLibraries.Add("CoreFoundation.framework");
options.LinkEnv.InputLibraries.Add("CoreGraphics.framework");
options.LinkEnv.InputLibraries.Add("CoreMedia.framework");
options.LinkEnv.InputLibraries.Add("CoreVideo.framework");
options.LinkEnv.InputLibraries.Add("SystemConfiguration.framework");
options.LinkEnv.InputLibraries.Add("IOKit.framework");
options.LinkEnv.InputLibraries.Add("UIKit.framework");
options.LinkEnv.InputLibraries.Add("QuartzCore.framework");
options.LinkEnv.InputLibraries.Add("AVFoundation.framework");
}
protected override void AddArgsCommon(BuildOptions options, List<string> args)