Add Video module

This commit is contained in:
Wojtek Figat
2024-05-01 01:25:16 +02:00
parent 8a45dda98c
commit 0d8c9f6626
22 changed files with 1699 additions and 4 deletions

View File

@@ -0,0 +1,28 @@
// Copyright (c) 2012-2024 Wojciech Figat. All rights reserved.
using FlaxEngine;
namespace FlaxEditor.Content
{
/// <summary>
/// Content item that contains video media file.
/// </summary>
/// <seealso cref="FlaxEditor.Content.JsonAssetItem" />
public sealed class VideoItem : FileItem
{
/// <summary>
/// Initializes a new instance of the <see cref="VideoItem"/> class.
/// </summary>
/// <param name="path">The file path.</param>
public VideoItem(string path)
: base(path)
{
}
/// <inheritdoc />
public override string TypeDescription => "Video";
/// <inheritdoc />
public override SpriteHandle DefaultThumbnail => Editor.Instance.Icons.ColorWheel128;
}
}

View File

@@ -0,0 +1,48 @@
// Copyright (c) 2012-2024 Wojciech Figat. All rights reserved.
using FlaxEditor.Windows;
using FlaxEditor.Windows.Assets;
using FlaxEngine;
namespace FlaxEditor.Content
{
/// <summary>
/// A video media file proxy object.
/// </summary>
public class VideoProxy : ContentProxy
{
private readonly string _extension;
internal VideoProxy(string extension)
{
_extension = extension;
}
/// <inheritdoc />
public override string Name => "Video";
/// <inheritdoc />
public override string FileExtension => _extension;
/// <inheritdoc />
public override Color AccentColor => Color.FromRGB(0x11f7f1);
/// <inheritdoc />
public override bool IsProxyFor(ContentItem item)
{
return item is VideoItem;
}
/// <inheritdoc />
public override ContentItem ConstructItem(string path)
{
return new VideoItem(path);
}
/// <inheritdoc />
public override EditorWindow Open(Editor editor, ContentItem item)
{
return new VideoWindow(editor, (VideoItem)item);
}
}
}

View File

@@ -25,6 +25,7 @@ namespace FlaxEditor.CustomEditors.Editors
new OptionType("Linear Gradient", typeof(LinearGradientBrush)),
new OptionType("Texture 9-Slicing", typeof(Texture9SlicingBrush)),
new OptionType("Sprite 9-Slicing", typeof(Sprite9SlicingBrush)),
new OptionType("Video", typeof(VideoBrush)),
};
}
}

View File

@@ -1090,6 +1090,7 @@ namespace FlaxEditor.Modules
Proxy.Add(new VisualScriptProxy());
Proxy.Add(new BehaviorTreeProxy());
Proxy.Add(new LocalizedStringTableProxy());
Proxy.Add(new VideoProxy("mp4"));
Proxy.Add(new WidgetProxy());
Proxy.Add(new FileProxy());
Proxy.Add(new SpawnableJsonAssetProxy<PhysicalMaterial>());

View File

@@ -21,6 +21,7 @@
#include "Engine/Level/Actors/Sky.h"
#include "Engine/Level/Actors/SkyLight.h"
#include "Engine/Level/Actors/SpotLight.h"
#include "Engine/Video/VideoPlayer.h"
#define ICON_RADIUS 7.0f
@@ -283,6 +284,7 @@ bool ViewportIconsRendererService::Init()
MAP_TYPE(Sky, Skybox);
MAP_TYPE(SkyLight, SkyLight);
MAP_TYPE(SpotLight, PointLight);
MAP_TYPE(VideoPlayer, SceneAnimationPlayer);
#undef MAP_TYPE
return false;

View File

@@ -0,0 +1,232 @@
// Copyright (c) 2012-2024 Wojciech Figat. All rights reserved.
using FlaxEditor.Content;
using FlaxEngine;
using FlaxEngine.GUI;
namespace FlaxEditor.Windows.Assets
{
/// <summary>
/// Editor window to view video media.
/// </summary>
public sealed class VideoWindow : EditorWindow, IContentItemOwner
{
private VideoItem _item;
private Image _frame;
private VideoPlayer _videoPlayer;
private Image _seekBegin, _seekEnd, _seekLeft, _seekRight, _playPause, _stop;
/// <inheritdoc />
public VideoWindow(Editor editor, VideoItem item)
: base(editor, false, ScrollBars.None)
{
_item = item;
_item.AddReference(this);
Title = _item.ShortName;
// Setup video player
_videoPlayer = new VideoPlayer
{
PlayOnStart = false,
Url = item.Path,
};
// Setup UI
var style = Style.Current;
var icons = Editor.Icons;
var playbackButtonsSize = 24.0f;
var playbackButtonsMouseOverColor = Color.FromBgra(0xFFBBBBBB);
_frame = new Image
{
Brush = new VideoBrush(_videoPlayer),
AnchorPreset = AnchorPresets.StretchAll,
Offsets = new Margin(0.0f, 0.0f, 0.0f, playbackButtonsSize),
Parent = this,
};
var playbackButtonsArea = new ContainerControl
{
AutoFocus = false,
ClipChildren = false,
BackgroundColor = style.LightBackground,
AnchorPreset = AnchorPresets.HorizontalStretchBottom,
Offsets = new Margin(0, 0, -playbackButtonsSize, playbackButtonsSize),
Parent = this
};
var playbackButtonsPanel = new ContainerControl
{
AutoFocus = false,
ClipChildren = false,
AnchorPreset = AnchorPresets.VerticalStretchCenter,
Offsets = Margin.Zero,
Parent = playbackButtonsArea,
};
_seekBegin = new Image(playbackButtonsPanel.Width, 0, playbackButtonsSize, playbackButtonsSize)
{
TooltipText = "Rewind to timeline start (Home)",
Brush = new SpriteBrush(icons.Skip64),
MouseOverColor = playbackButtonsMouseOverColor,
Rotation = 180.0f,
Parent = playbackButtonsPanel
};
_seekBegin.Clicked += (image, button) => SeekBegin();
playbackButtonsPanel.Width += playbackButtonsSize;
_seekLeft = new Image(playbackButtonsPanel.Width, 0, playbackButtonsSize, playbackButtonsSize)
{
TooltipText = "Move one frame back (Left Arrow)",
Brush = new SpriteBrush(icons.Left32),
MouseOverColor = playbackButtonsMouseOverColor,
Parent = playbackButtonsPanel
};
_seekLeft.Clicked += (image, button) => SeekLeft();
playbackButtonsPanel.Width += playbackButtonsSize;
_stop = new Image(playbackButtonsPanel.Width, 0, playbackButtonsSize, playbackButtonsSize)
{
TooltipText = "Stop playback",
Brush = new SpriteBrush(icons.Stop64),
MouseOverColor = playbackButtonsMouseOverColor,
Parent = playbackButtonsPanel
};
_stop.Clicked += (image, button) => Stop();
playbackButtonsPanel.Width += playbackButtonsSize;
_playPause = new Image(playbackButtonsPanel.Width, 0, playbackButtonsSize, playbackButtonsSize)
{
TooltipText = "Play/pause playback (Space)",
Brush = new SpriteBrush(icons.Play64),
MouseOverColor = playbackButtonsMouseOverColor,
Parent = playbackButtonsPanel
};
_playPause.Clicked += (image, button) => PlayPause();
playbackButtonsPanel.Width += playbackButtonsSize;
_seekRight = new Image(playbackButtonsPanel.Width, 0, playbackButtonsSize, playbackButtonsSize)
{
TooltipText = "Move one frame forward (Right Arrow)",
Brush = new SpriteBrush(icons.Right32),
MouseOverColor = playbackButtonsMouseOverColor,
Parent = playbackButtonsPanel
};
_seekRight.Clicked += (image, button) => SeekRight();
playbackButtonsPanel.Width += playbackButtonsSize;
_seekEnd = new Image(playbackButtonsPanel.Width, 0, playbackButtonsSize, playbackButtonsSize)
{
TooltipText = "Rewind to timeline end (End)",
Brush = new SpriteBrush(icons.Skip64),
MouseOverColor = playbackButtonsMouseOverColor,
Parent = playbackButtonsPanel
};
_seekEnd.Clicked += (image, button) => SeekEnd();
playbackButtonsPanel.Width += playbackButtonsSize;
playbackButtonsPanel.X = (playbackButtonsPanel.Parent.Width - playbackButtonsPanel.Width) * 0.5f;
}
private void PlayPause()
{
if (_videoPlayer.State == VideoPlayer.States.Playing)
_videoPlayer.Pause();
else
_videoPlayer.Play();
}
private void Stop()
{
_videoPlayer.Stop();
}
private void SeekBegin()
{
_videoPlayer.Time = 0.0f;
}
private void SeekEnd()
{
_videoPlayer.Time = _videoPlayer.Duration;
}
private void SeekLeft()
{
if (_videoPlayer.State == VideoPlayer.States.Paused)
_videoPlayer.Time -= 1.0f / _videoPlayer.FrameRate;
}
private void SeekRight()
{
if (_videoPlayer.State == VideoPlayer.States.Paused)
_videoPlayer.Time += 1.0f / _videoPlayer.FrameRate;
}
/// <inheritdoc />
public override bool OnKeyDown(KeyboardKeys key)
{
if (base.OnKeyDown(key))
return true;
switch (key)
{
case KeyboardKeys.ArrowLeft:
SeekLeft();
return true;
case KeyboardKeys.ArrowRight:
SeekRight();
return true;
case KeyboardKeys.Home:
SeekBegin();
return true;
case KeyboardKeys.End:
SeekEnd();
return true;
case KeyboardKeys.Spacebar:
PlayPause();
return true;
}
return false;
}
/// <inheritdoc />
public override void Update(float deltaTime)
{
base.Update(deltaTime);
// Update UI
var state = _videoPlayer.State;
var icons = Editor.Icons;
_stop.Enabled = state != VideoPlayer.States.Stopped;
_seekLeft.Enabled = _seekRight.Enabled = state != VideoPlayer.States.Playing;
((SpriteBrush)_playPause.Brush).Sprite = state == VideoPlayer.States.Playing ? icons.Pause64 : icons.Play64;
}
/// <inheritdoc />
public override void OnDestroy()
{
if (IsDisposing)
return;
_item.RemoveReference(this);
_item = null;
base.OnDestroy();
}
/// <inheritdoc />
public void OnItemDeleted(ContentItem item)
{
if (item == _item)
Close();
}
/// <inheritdoc />
public void OnItemRenamed(ContentItem item)
{
}
/// <inheritdoc />
public void OnItemReimported(ContentItem item)
{
}
/// <inheritdoc />
public void OnItemDispose(ContentItem item)
{
if (item == _item)
Close();
}
}
}

View File

@@ -17,6 +17,7 @@ public class Engine : EngineModule
options.PublicDependencies.Add("AI");
options.PublicDependencies.Add("Animations");
options.PublicDependencies.Add("Audio");
options.PublicDependencies.Add("Video");
options.PublicDependencies.Add("Content");
options.PublicDependencies.Add("Debug");
options.PublicDependencies.Add("Foliage");

View File

@@ -34,8 +34,11 @@ GPUTasksContext::~GPUTasksContext()
for (int32 i = 0; i < tasks.Count(); i++)
{
auto task = tasks[i];
LOG(Warning, "{0} has been canceled before a sync", task->ToString());
tasks[i]->CancelSync();
if (task->GetSyncPoint() <= _currentSyncPoint && task->GetState() != TaskState::Finished)
{
LOG(Warning, "{0} has been canceled before a sync", task->ToString());
task->CancelSync();
}
}
#if GPU_TASKS_USE_DEDICATED_CONTEXT

View File

@@ -296,6 +296,7 @@ struct GPUDevice::PrivateData
AssetReference<Shader> QuadShader;
GPUPipelineState* PS_CopyLinear = nullptr;
GPUPipelineState* PS_Clear = nullptr;
GPUPipelineState* PS_DecodeYUY2 = nullptr;
GPUBuffer* FullscreenTriangleVB = nullptr;
AssetReference<Material> DefaultMaterial;
SoftAssetReference<Material> DefaultDeformableMaterial;
@@ -489,6 +490,7 @@ void GPUDevice::preDispose()
_res->DefaultBlackTexture = nullptr;
SAFE_DELETE_GPU_RESOURCE(_res->PS_CopyLinear);
SAFE_DELETE_GPU_RESOURCE(_res->PS_Clear);
SAFE_DELETE_GPU_RESOURCE(_res->PS_DecodeYUY2);
SAFE_DELETE_GPU_RESOURCE(_res->FullscreenTriangleVB);
Locker.Unlock();
@@ -701,6 +703,18 @@ GPUPipelineState* GPUDevice::GetClearPS() const
return _res->PS_Clear;
}
GPUPipelineState* GPUDevice::GetDecodeYUY2PS() const
{
if (_res->PS_DecodeYUY2 == nullptr)
{
auto psDesc = GPUPipelineState::Description::DefaultFullscreenTriangle;
psDesc.PS = QuadShader->GetPS("PS_DecodeYUY2");
_res->PS_DecodeYUY2 = const_cast<GPUDevice*>(this)->CreatePipelineState();
_res->PS_DecodeYUY2->Init(psDesc);
}
return _res->PS_DecodeYUY2;
}
GPUBuffer* GPUDevice::GetFullscreenTriangleVB() const
{
return _res->FullscreenTriangleVB;

View File

@@ -270,6 +270,11 @@ public:
/// </summary>
GPUPipelineState* GetClearPS() const;
/// <summary>
/// Gets the shader pipeline state object for YUY2 frame decoding to RGBA.
/// </summary>
GPUPipelineState* GetDecodeYUY2PS() const;
/// <summary>
/// Gets the fullscreen-triangle vertex buffer.
/// </summary>

View File

@@ -17,7 +17,7 @@ namespace FlaxEngine.GUI
/// <summary>
/// The texture sampling filter mode.
/// </summary>
[ExpandGroups, Tooltip("The texture sampling filter mode.")]
[ExpandGroups]
public BrushFilter Filter = BrushFilter.Linear;
/// <summary>
@@ -37,7 +37,7 @@ namespace FlaxEngine.GUI
}
/// <inheritdoc />
public Float2 Size => Texture != null ? Texture.Size : Float2.Zero;
public Float2 Size => Texture != null ? Texture.Size : Float2.One;
/// <inheritdoc />
public void Draw(Rectangle rect, Color color)

View File

@@ -0,0 +1,61 @@
// Copyright (c) 2012-2024 Wojciech Figat. All rights reserved.
namespace FlaxEngine.GUI
{
/// <summary>
/// Implementation of <see cref="IBrush"/> for <see cref="FlaxEngine.VideoPlayer"/> frame displaying.
/// </summary>
/// <seealso cref="IBrush" />
public sealed class VideoBrush : IBrush
{
/// <summary>
/// The video player to display frame from it.
/// </summary>
public VideoPlayer Player;
/// <summary>
/// The texture sampling filter mode.
/// </summary>
[ExpandGroups]
public BrushFilter Filter = BrushFilter.Linear;
/// <summary>
/// Initializes a new instance of the <see cref="VideoBrush"/> class.
/// </summary>
public VideoBrush()
{
}
/// <summary>
/// Initializes a new instance of the <see cref="VideoBrush"/> struct.
/// </summary>
/// <param name="player">The video player to preview.</param>
public VideoBrush(VideoPlayer player)
{
Player = player;
}
/// <inheritdoc />
public Float2 Size
{
get
{
if (Player && Player.Size.LengthSquared > 0)
return (Float2)Player.Size;
return new Float2(1920, 1080);
}
}
/// <inheritdoc />
public void Draw(Rectangle rect, Color color)
{
var texture = Player?.Frame;
if (texture == null || !texture.IsAllocated)
texture = GPUDevice.Instance.DefaultBlackTexture;
if (Filter == BrushFilter.Point)
Render2D.DrawTexturePoint(texture, rect, color);
else
Render2D.DrawTexture(texture, rect, color);
}
}
}

View File

@@ -0,0 +1,489 @@
// Copyright (c) 2012-2024 Wojciech Figat. All rights reserved.
#if VIDEO_API_MF
#include "VideoBackendMF.h"
#include "Engine/Profiler/ProfilerCPU.h"
#include "Engine/Core/Log.h"
#include "Engine/Engine/Time.h"
#include "Engine/Audio/Types.h"
#if USE_EDITOR
#include "Editor/Editor.h"
#endif
#include <mfapi.h>
#include <mfidl.h>
#include <mfreadwrite.h>
#define VIDEO_API_MF_ERROR(api, err) LOG(Warning, "[VideoBackendMF] {} failed with error 0x{:x}", TEXT(#api), (uint64)err)
struct VideoPlayerMF
{
IMFSourceReader* SourceReader;
uint8 Loop : 1;
uint8 Playing : 1;
uint8 FirstFrame : 1;
uint8 Seek : 1;
TimeSpan Time;
};
namespace
{
Array<VideoBackendPlayer*> Players;
bool Configure(VideoBackendPlayer& player, VideoPlayerMF& playerMF, DWORD streamIndex)
{
PROFILE_CPU_NAMED("Configure");
IMFMediaType *mediaType = nullptr, *nativeType = nullptr;
bool result = true;
// Find the native format of the stream
HRESULT hr = playerMF.SourceReader->GetNativeMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM, MF_SOURCE_READER_CURRENT_TYPE_INDEX, &nativeType);
if (FAILED(hr))
{
VIDEO_API_MF_ERROR(GetNativeMediaType, hr);
goto END;
}
hr = playerMF.SourceReader->GetCurrentMediaType(streamIndex, &mediaType);
if (FAILED(hr))
{
VIDEO_API_MF_ERROR(GetCurrentMediaType, hr);
goto END;
}
GUID majorType, subtype;
hr = mediaType->GetGUID(MF_MT_MAJOR_TYPE, &majorType);
if (FAILED(hr))
{
VIDEO_API_MF_ERROR(GetGUID, hr);
goto END;
}
hr = mediaType->GetGUID(MF_MT_SUBTYPE, &subtype);
if (FAILED(hr))
{
VIDEO_API_MF_ERROR(GetGUID, hr);
goto END;
}
// Extract media information
if (majorType == MFMediaType_Video)
{
UINT32 width, height;
hr = MFGetAttributeSize(mediaType, MF_MT_FRAME_SIZE, &width, &height);
if (SUCCEEDED(hr))
{
player.Width = player.VideoFrameWidth = width;
player.Height = player.VideoFrameHeight = height;
}
MFVideoArea videoArea;
hr = mediaType->GetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE, (UINT8*)&videoArea, sizeof(MFVideoArea), NULL);
if (SUCCEEDED(hr) && videoArea.Area.cx > 0 && videoArea.Area.cy > 0)
{
// Video frame has different size in memory than for display (eg. 1080p video will use 1088 height due to H264 decoding)
player.Width = videoArea.Area.cx;
player.Height = videoArea.Area.cy;
}
player.AvgBitRate = MFGetAttributeUINT32(mediaType, MF_MT_AVG_BITRATE, 0);
uint64_t fpsValue;
hr = mediaType->GetUINT64(MF_MT_FRAME_RATE, &fpsValue);
if (SUCCEEDED(hr))
{
player.FrameRate = (float)HI32(fpsValue) / (float)LO32(fpsValue);
}
if (subtype == MFVideoFormat_RGB32)
player.Format = PixelFormat::B8G8R8X8_UNorm;
else if (subtype == MFVideoFormat_ARGB32)
player.Format = PixelFormat::B8G8R8A8_UNorm;
else if (subtype == MFVideoFormat_RGB555)
player.Format = PixelFormat::B5G6R5_UNorm;
else if (subtype == MFVideoFormat_RGB555)
player.Format = PixelFormat::B5G5R5A1_UNorm;
else if (subtype == MFVideoFormat_YUY2)
player.Format = PixelFormat::YUY2;
#if (WDK_NTDDI_VERSION >= NTDDI_WIN10)
else if (subtype == MFVideoFormat_A2R10G10B10)
player.Format = PixelFormat::R10G10B10A2_UNorm;
else if (subtype == MFVideoFormat_A16B16G16R16F)
player.Format = PixelFormat::R16G16B16A16_Float;
#endif
else
{
// Reconfigure decoder to output supported format by force
IMFMediaType* customType = nullptr;
hr = MFCreateMediaType(&customType);
if (FAILED(hr))
{
VIDEO_API_MF_ERROR(MFCreateMediaType, hr);
goto END;
}
customType->SetGUID(MF_MT_MAJOR_TYPE, majorType);
customType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_YUY2);
MFSetAttributeSize(customType, MF_MT_FRAME_SIZE, width, height);
hr = playerMF.SourceReader->SetCurrentMediaType(streamIndex, nullptr, customType);
if (FAILED(hr))
{
VIDEO_API_MF_ERROR(SetCurrentMediaType, hr);
goto END;
}
player.Format = PixelFormat::YUY2;
customType->Release();
}
}
else if (majorType == MFMediaType_Audio)
{
player.AudioInfo.SampleRate = MFGetAttributeUINT32(mediaType, MF_MT_AUDIO_SAMPLES_PER_SECOND, 0);
player.AudioInfo.NumChannels = MFGetAttributeUINT32(mediaType, MF_MT_AUDIO_NUM_CHANNELS, 0);
player.AudioInfo.BitDepth = MFGetAttributeUINT32(mediaType, MF_MT_AUDIO_BITS_PER_SAMPLE, 16);
}
result = false;
END:
SAFE_RELEASE(mediaType);
return result;
}
}
bool VideoBackendMF::Player_Create(const VideoBackendPlayerInfo& info, VideoBackendPlayer& player)
{
PROFILE_CPU();
player = VideoBackendPlayer();
auto& playerMF = player.GetBackendState<VideoPlayerMF>();
// Load media
IMFAttributes* attributes = nullptr;
HRESULT hr = MFCreateAttributes(&attributes, 1);
if (FAILED(hr))
{
VIDEO_API_MF_ERROR(MFCreateAttributes, hr);
return true;
}
attributes->SetUINT32(MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, 1);
attributes->SetUINT32(MF_SOURCE_READER_ENABLE_VIDEO_PROCESSING, 1);
IMFSourceReader* sourceReader = nullptr;
hr = MFCreateSourceReaderFromURL(*info.Url, attributes, &sourceReader);
attributes->Release();
if (FAILED(hr) || !sourceReader)
{
VIDEO_API_MF_ERROR(MFCreateSourceReaderFromURL, hr);
return true;
}
sourceReader->SetStreamSelection(MF_SOURCE_READER_FIRST_VIDEO_STREAM, 1);
sourceReader->SetStreamSelection(MF_SOURCE_READER_FIRST_AUDIO_STREAM, 1);
playerMF.SourceReader = sourceReader;
// Read media info
if (Configure(player, playerMF, MF_SOURCE_READER_FIRST_VIDEO_STREAM) ||
Configure(player, playerMF, MF_SOURCE_READER_FIRST_AUDIO_STREAM))
return true;
PROPVARIANT var;
hr = sourceReader->GetPresentationAttribute(MF_SOURCE_READER_MEDIASOURCE, MF_PD_DURATION, &var);
if (SUCCEEDED(hr))
{
player.Duration.Ticks = var.vt == VT_UI8 ? var.uhVal.QuadPart : 0;
PropVariantClear(&var);
}
// Setup player data
player.Backend = this;
playerMF.Loop = info.Loop;
playerMF.FirstFrame = 1;
Players.Add(&player);
return false;
}
void VideoBackendMF::Player_Destroy(VideoBackendPlayer& player)
{
PROFILE_CPU();
player.ReleaseResources();
auto& playerMF = player.GetBackendState<VideoPlayerMF>();
playerMF.SourceReader->Release();
Players.Remove(&player);
player = VideoBackendPlayer();
}
void VideoBackendMF::Player_UpdateInfo(VideoBackendPlayer& player, const VideoBackendPlayerInfo& info)
{
PROFILE_CPU();
auto& playerMF = player.GetBackendState<VideoPlayerMF>();
playerMF.Loop = true;
}
void VideoBackendMF::Player_Play(VideoBackendPlayer& player)
{
PROFILE_CPU();
auto& playerMF = player.GetBackendState<VideoPlayerMF>();
playerMF.Playing = 1;
}
void VideoBackendMF::Player_Pause(VideoBackendPlayer& player)
{
PROFILE_CPU();
auto& playerMF = player.GetBackendState<VideoPlayerMF>();
playerMF.Playing = 0;
}
void VideoBackendMF::Player_Stop(VideoBackendPlayer& player)
{
PROFILE_CPU();
auto& playerMF = player.GetBackendState<VideoPlayerMF>();
playerMF.Time = TimeSpan::Zero();
playerMF.Playing = 0;
playerMF.FirstFrame = 1;
playerMF.Seek = 1;
}
void VideoBackendMF::Player_Seek(VideoBackendPlayer& player, TimeSpan time)
{
PROFILE_CPU();
auto& playerMF = player.GetBackendState<VideoPlayerMF>();
playerMF.Time = time;
playerMF.Seek = 1;
}
TimeSpan VideoBackendMF::Player_GetTime(const VideoBackendPlayer& player)
{
PROFILE_CPU();
auto& playerMF = player.GetBackendState<VideoPlayerMF>();
return playerMF.Time;
}
const Char* VideoBackendMF::Base_Name()
{
return TEXT("Media Foundation");
}
bool VideoBackendMF::Base_Init()
{
PROFILE_CPU();
// Init COM
HRESULT hr = CoInitializeEx(0, COINIT_MULTITHREADED);
if (FAILED(hr) && hr != 0x80010106) // 0x80010106 = Cannot change thread mode after it is set.
{
VIDEO_API_MF_ERROR(CoInitializeEx, hr);
return true;
}
// Init Media Foundation
hr = MFStartup(MF_VERSION);
if (FAILED(hr))
{
VIDEO_API_MF_ERROR(MFStartup, hr);
return true;
}
return false;
}
void VideoBackendMF::Base_Update()
{
PROFILE_CPU();
// TODO: use async Task Graph to update videos
HRESULT hr;
for (auto* e : Players)
{
auto& player = *e;
auto& playerMF = player.GetBackendState<VideoPlayerMF>();
// Skip paused player
if (!playerMF.Playing && !playerMF.Seek)
continue;
bool useTimeScale = true;
#if USE_EDITOR
if (!Editor::IsPlayMode)
useTimeScale = false;
#endif
TimeSpan dt = useTimeScale ? Time::Update.DeltaTime : Time::Update.UnscaledDeltaTime;
// Update playback time
if (playerMF.FirstFrame)
{
playerMF.FirstFrame = 0;
playerMF.Seek = 1;
}
else if (playerMF.Playing)
{
playerMF.Time += dt;
}
if (playerMF.Time > player.Duration)
{
if (playerMF.Loop)
{
// Loop
playerMF.Time.Ticks %= player.Duration.Ticks;
playerMF.Seek = 1;
}
else
{
// End
playerMF.Time = player.Duration;
}
}
// Update current position
int32 seeks = 0;
SEEK_START:
if (playerMF.Seek)
{
seeks++;
playerMF.Seek = 0;
PROPVARIANT var;
PropVariantInit(&var);
var.vt = VT_I8;
var.hVal.QuadPart = playerMF.Time.Ticks;
PROFILE_CPU_NAMED("SetCurrentPosition");
playerMF.SourceReader->SetCurrentPosition(GUID_NULL, var);
// Note:
// SetCurrentPosition method does not guarantee exact seeking.
// The accuracy of the seek depends on the media content.
// If the media content contains a video stream, the SetCurrentPosition method typically seeks to the nearest key frame before the desired position.
// After seeking, the application should call ReadSample and advance to the desired position.
}
// Check if the current frame is valid (eg. when playing 24fps video at 60fps)
if (player.VideoFrameDuration.Ticks > 0 &&
Math::IsInRange(playerMF.Time, player.VideoFrameTime, player.VideoFrameTime + player.VideoFrameDuration))
{
continue;
}
// Read samples until frame is matching the current time
int32 samplesLeft = 500;
for (; samplesLeft > 0; samplesLeft--)
{
// Read sample
DWORD streamIndex = 0, flags = 0;
LONGLONG samplePos = 0, sampleDuration = 0;
IMFSample* videoSample = nullptr;
{
PROFILE_CPU_NAMED("ReadSample");
hr = playerMF.SourceReader->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, &streamIndex, &flags, &samplePos, &videoSample);
if (FAILED(hr))
{
VIDEO_API_MF_ERROR(ReadSample, hr);
break;
}
}
TimeSpan frameTime((int64)samplePos);
TimeSpan franeDuration = player.FrameRate > 0 ? TimeSpan::FromSeconds(1.0 / player.FrameRate) : dt;
if (videoSample && videoSample->GetSampleDuration(&sampleDuration) == S_OK && sampleDuration > 0)
{
franeDuration.Ticks = sampleDuration;
}
//const int32 framesToTime = (playerMF.Time.Ticks - frameTime.Ticks) / franeDuration.Ticks;
const bool isGoodSample = Math::IsInRange(playerMF.Time, frameTime, frameTime + franeDuration);
// Process sample
if (videoSample && isGoodSample)
{
PROFILE_CPU_NAMED("ProcessSample");
// Lock sample buffer memory (try to use 2D buffer for more direct memory access)
IMFMediaBuffer* buffer = nullptr;
IMF2DBuffer* buffer2D = nullptr;
BYTE* bufferData = nullptr;
LONG bufferStride = 0;
if (videoSample->GetBufferByIndex(0, &buffer) == S_OK && buffer->QueryInterface(IID_PPV_ARGS(&buffer2D)) == S_OK)
{
LONG bufferPitch = 0;
hr = buffer2D->Lock2D(&bufferData, &bufferPitch);
if (FAILED(hr))
{
VIDEO_API_MF_ERROR(GetCurrentLength, hr);
goto PROCESS_SAMPLE_END;
}
if (bufferPitch < 0)
bufferPitch = -bufferPitch; // Flipped image
bufferStride = bufferPitch * player.VideoFrameHeight;
}
else
{
if (buffer)
{
buffer->Release();
buffer = nullptr;
}
DWORD bufferLength;
hr = videoSample->ConvertToContiguousBuffer(&buffer);
if (FAILED(hr))
{
VIDEO_API_MF_ERROR(ConvertToContiguousBuffer, hr);
goto PROCESS_SAMPLE_END;
}
hr = buffer->GetCurrentLength(&bufferLength);
if (FAILED(hr))
{
VIDEO_API_MF_ERROR(GetCurrentLength, hr);
goto PROCESS_SAMPLE_END;
}
DWORD bufferMaxLen = 0, bufferCurrentLength = 0;
hr = buffer->Lock(&bufferData, &bufferMaxLen, &bufferCurrentLength);
if (FAILED(hr))
{
VIDEO_API_MF_ERROR(Lock, hr);
goto PROCESS_SAMPLE_END;
}
bufferStride = bufferCurrentLength;
}
// Send pixels to the texture
player.UpdateVideoFrame(Span<byte>(bufferData, bufferStride), frameTime, franeDuration);
// Unlock sample buffer memory
if (buffer2D)
{
hr = buffer2D->Unlock2D();
if (FAILED(hr))
{
VIDEO_API_MF_ERROR(Unlock2D, hr);
}
}
else
{
hr = buffer->Unlock();
if (FAILED(hr))
{
VIDEO_API_MF_ERROR(Unlock, hr);
}
}
PROCESS_SAMPLE_END:
buffer->Release();
}
if (videoSample)
videoSample->Release();
if (flags & MF_SOURCE_READERF_ENDOFSTREAM)
{
// Media ended
break;
}
if (flags & MF_SOURCE_READERF_NATIVEMEDIATYPECHANGED || flags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED)
{
// Format/metadata might have changed so update the stream
Configure(player, playerMF, streamIndex);
}
// End loop if got good sample or need to seek back
if (isGoodSample)
break;
}
if (samplesLeft == 0 && seeks < 2)
{
// Failed to pick a valid sample so try again with seeking
playerMF.Seek = 1;
goto SEEK_START;
}
}
}
void VideoBackendMF::Base_Dispose()
{
PROFILE_CPU();
// Shutdown
MFShutdown();
}
#endif

View File

@@ -0,0 +1,30 @@
// Copyright (c) 2012-2024 Wojciech Figat. All rights reserved.
#pragma once
#if VIDEO_API_MF
#include "../VideoBackend.h"
/// <summary>
/// The Media Foundation video backend.
/// </summary>
class VideoBackendMF : public VideoBackend
{
public:
// [VideoBackend]
bool Player_Create(const VideoBackendPlayerInfo& info, VideoBackendPlayer& player) override;
void Player_Destroy(VideoBackendPlayer& player) override;
void Player_UpdateInfo(VideoBackendPlayer& player, const VideoBackendPlayerInfo& info) override;
void Player_Play(VideoBackendPlayer& player) override;
void Player_Pause(VideoBackendPlayer& player) override;
void Player_Stop(VideoBackendPlayer& player) override;
void Player_Seek(VideoBackendPlayer& player, TimeSpan time) override;
TimeSpan Player_GetTime(const VideoBackendPlayer& player) override;
const Char* Base_Name() override;
bool Base_Init() override;
void Base_Update() override;
void Base_Dispose() override;
};
#endif

View File

@@ -0,0 +1,56 @@
// Copyright (c) 2012-2024 Wojciech Figat. All rights reserved.
#pragma once
#include "Engine/Core/Types/BaseTypes.h"
#include "Engine/Core/Types/TimeSpan.h"
#include "Engine/Core/Types/DataContainer.h"
#include "Engine/Audio/Types.h"
#include "Engine/Graphics/PixelFormat.h"
class Video;
class VideoPlayer;
class VideoBackend;
struct VideoBackendPlayer;
struct VideoBackendPlayerInfo;
class GPUTexture;
class GPUBuffer;
class GPUPipelineState;
/// <summary>
/// Video player instance created by backend.
/// </summary>
struct VideoBackendPlayer
{
VideoBackend* Backend = nullptr;
GPUTexture* Frame = nullptr;
GPUBuffer* FrameUpload = nullptr;
int32 Width = 0, Height = 0, AvgBitRate = 0, FramesCount = 0;
int32 VideoFrameWidth = 0, VideoFrameHeight = 0;
PixelFormat Format = PixelFormat::Unknown;
float FrameRate = 0.0f;
TimeSpan Duration = TimeSpan(0);
TimeSpan VideoFrameTime = TimeSpan(0), VideoFrameDuration = TimeSpan(0);
AudioDataInfo AudioInfo = {};
BytesContainer VideoFrameMemory;
class GPUUploadVideoFrameTask* UploadVideoFrameTask = nullptr;
uintptr BackendState[8] = {};
template<typename T>
FORCE_INLINE T& GetBackendState()
{
static_assert(sizeof(T) <= sizeof(BackendState), "Increase state data to fit per-backend storage.");
return *(T*)BackendState;
}
template<typename T>
FORCE_INLINE const T& GetBackendState() const
{
static_assert(sizeof(T) <= sizeof(BackendState), "Increase state data to fit per-backend storage.");
return *(const T*)BackendState;
}
void InitVideoFrame();
void UpdateVideoFrame(Span<byte> frame, TimeSpan time, TimeSpan duration);
void ReleaseResources();
};

View File

@@ -0,0 +1,45 @@
// Copyright (c) 2012-2024 Wojciech Figat. All rights reserved.
using System.Collections.Generic;
using System.IO;
using Flax.Build;
using Flax.Build.NativeCpp;
/// <summary>
/// Video module.
/// </summary>
public class Video : EngineModule
{
/// <inheritdoc />
public override void Setup(BuildOptions options)
{
base.Setup(options);
options.SourcePaths.Clear();
options.SourceFiles.AddRange(Directory.GetFiles(FolderPath, "*.*", SearchOption.TopDirectoryOnly));
switch (options.Platform.Target)
{
case TargetPlatform.Windows:
case TargetPlatform.UWP:
case TargetPlatform.XboxOne:
case TargetPlatform.XboxScarlett:
// Media Foundation
options.SourcePaths.Add(Path.Combine(FolderPath, "MF"));
options.CompileEnv.PreprocessorDefinitions.Add("VIDEO_API_MF");
options.OutputFiles.Add("mf.lib");
options.OutputFiles.Add("mfcore.lib");
options.OutputFiles.Add("mfplat.lib");
options.OutputFiles.Add("mfplay.lib");
options.OutputFiles.Add("mfreadwrite.lib");
options.OutputFiles.Add("mfuuid.lib");
break;
}
}
/// <inheritdoc />
public override void GetFilesToDeploy(List<string> files)
{
files.AddRange(Directory.GetFiles(FolderPath, "*.h", SearchOption.TopDirectoryOnly));
}
}

View File

@@ -0,0 +1,245 @@
// Copyright (c) 2012-2024 Wojciech Figat. All rights reserved.
#include "Video.h"
#include "VideoBackend.h"
#include "Engine/Core/Log.h"
#include "Engine/Profiler/ProfilerCPU.h"
#include "Engine/Engine/EngineService.h"
#include "Engine/Graphics/GPUDevice.h"
#include "Engine/Graphics/GPUBuffer.h"
#include "Engine/Graphics/GPUResource.h"
#include "Engine/Graphics/GPUPipelineState.h"
#include "Engine/Graphics/PixelFormatExtensions.h"
#include "Engine/Graphics/RenderTools.h"
#include "Engine/Graphics/Async/GPUTask.h"
#include "Engine/Graphics/Shaders/GPUShader.h"
#include "Engine/Graphics/Textures/GPUTexture.h"
#include "Engine/Scripting/Enums.h"
#if VIDEO_API_MF
#include "MF/VideoBackendMF.h"
#endif
/// <summary>
/// Video frame upload task to the GPU.
/// </summary>
class GPUUploadVideoFrameTask : public GPUTask
{
private:
VideoBackendPlayer* _player;
public:
GPUUploadVideoFrameTask(VideoBackendPlayer* player)
: GPUTask(Type::UploadTexture, 0)
, _player(player)
{
}
public:
// [GPUTask]
bool HasReference(Object* resource) const override
{
return _player && _player->Frame == resource;
}
protected:
// [GPUTask]
Result run(GPUTasksContext* context) override
{
if (!_player || _player->VideoFrameMemory.IsInvalid())
return Result::MissingResources;
GPUTexture* frame = _player->Frame;
if (!frame->IsAllocated())
return Result::MissingResources;
if (PixelFormatExtensions::IsVideo(_player->Format))
{
// Allocate compressed frame uploading texture
if (!_player->FrameUpload)
_player->FrameUpload = GPUDevice::Instance->CreateBuffer(TEXT("VideoFrameUpload"));
auto desc = GPUBufferDescription::Buffer(_player->VideoFrameMemory.Length(), GPUBufferFlags::ShaderResource, PixelFormat::R32_UInt, nullptr, 4, GPUResourceUsage::Dynamic);
// TODO: add support for Transient textures (single frame data upload)
if (_player->FrameUpload->GetDescription() != desc)
{
if (_player->FrameUpload->Init(desc))
return Result::Failed;
}
// Upload compressed texture data
context->GPU->UpdateBuffer(_player->FrameUpload, _player->VideoFrameMemory.Get(), _player->VideoFrameMemory.Length());
// Decompress data into RGBA texture
auto cb = GPUDevice::Instance->QuadShader->GetCB(0);
QuadShaderData cbData;
cbData.Color = Float4((float)_player->Width, (float)_player->Height, 0, 0);
context->GPU->UpdateCB(cb, &cbData);
context->GPU->BindCB(0, cb);
context->GPU->SetViewportAndScissors((float)_player->Width, (float)_player->Height);
context->GPU->SetRenderTarget(frame->View());
context->GPU->BindSR(0, _player->FrameUpload->View());
ASSERT_LOW_LAYER(_player->Format == PixelFormat::YUY2);
context->GPU->SetState(GPUDevice::Instance->GetDecodeYUY2PS());
context->GPU->DrawFullscreenTriangle();
}
else
{
// Raw texture data upload
uint32 rowPitch, slicePitch;
frame->ComputePitch(0, rowPitch, slicePitch);
context->GPU->UpdateTexture(frame, 0, 0, _player->VideoFrameMemory.Get(), rowPitch, slicePitch);
}
// Frame has been updated
_player->FramesCount++;
return Result::Ok;
}
void OnEnd() override
{
// Unlink
if (_player && _player->UploadVideoFrameTask == this)
_player->UploadVideoFrameTask = nullptr;
_player = nullptr;
GPUTask::OnEnd();
}
};
class VideoService : public EngineService
{
public:
VideoService()
: EngineService(TEXT("Video"), -40)
{
}
VideoBackend* Backends[4] = {};
void InitBackend(int32 index, VideoBackend* backend)
{
LOG(Info, "Video initialization... (backend: {0})", backend->Base_Name());
if (backend->Base_Init())
{
LOG(Warning, "Failed to initialize Video backend.");
}
Backends[index] = backend;
}
void Update() override;
void Dispose() override;
};
VideoService VideoServiceInstance;
void VideoService::Update()
{
PROFILE_CPU_NAMED("Video.Update");
// Update backends
for (VideoBackend*& backend : VideoServiceInstance.Backends)
{
if (backend)
backend->Base_Update();
}
}
void VideoService::Dispose()
{
PROFILE_CPU_NAMED("Video.Dispose");
// Dispose backends
for (VideoBackend*& backend : VideoServiceInstance.Backends)
{
if (backend)
{
delete backend;
backend = nullptr;
}
}
}
bool Video::CreatePlayerBackend(const VideoBackendPlayerInfo& info, VideoBackendPlayer& player)
{
// Pick the first backend to support the player info
int32 index = 0;
VideoBackend* backend;
#define TRY_USE_BACKEND(type) \
backend = VideoServiceInstance.Backends[index]; \
if (!backend) \
VideoServiceInstance.InitBackend(index, backend = new type()); \
if (!backend->Player_Create(info, player)) \
return false;
#if VIDEO_API_MF
TRY_USE_BACKEND(VideoBackendMF);
#endif
#undef TRY_USE_BACKEND
LOG(Error, "Failed to setup Video playback backend for '{}'", info.Url);
return true;
}
void VideoBackendPlayer::InitVideoFrame()
{
if (!GPUDevice::Instance)
return;
if (!Frame)
Frame = GPUDevice::Instance->CreateTexture(TEXT("VideoFrame"));
}
void VideoBackendPlayer::UpdateVideoFrame(Span<byte> frame, TimeSpan time, TimeSpan duration)
{
PROFILE_CPU();
VideoFrameTime = time;
VideoFrameDuration = duration;
if (!GPUDevice::Instance || GPUDevice::Instance->GetRendererType() == RendererType::Null)
return;
// Ensure that sampled frame data matches the target texture size
uint32 rowPitch, slicePitch;
RenderTools::ComputePitch(Format, VideoFrameWidth, VideoFrameHeight, rowPitch, slicePitch);
if (slicePitch != frame.Length())
{
LOG(Warning, "Incorrect video frame stride {}, doesn't match stride {} of video {}x{} in format {}", frame.Length(), slicePitch, Width, Height, ScriptingEnum::ToString(Format));
return;
}
// Copy frame into buffer for video frames uploading
if (VideoFrameMemory.Length() < (int32)slicePitch)
{
VideoFrameMemory.Allocate(slicePitch);
if (VideoFrameMemory.IsInvalid())
{
OUT_OF_MEMORY;
return;
}
}
Platform::MemoryCopy(VideoFrameMemory.Get(), frame.Get(), slicePitch);
// Update output frame texture
InitVideoFrame();
auto desc = GPUTextureDescription::New2D(Width, Height, PixelFormat::R8G8B8A8_UNorm, GPUTextureFlags::ShaderResource | GPUTextureFlags::RenderTarget);
if (Frame->GetDescription() != desc)
{
if (Frame->Init(desc))
{
LOG(Error, "Failed to allocate video frame texture");
return;
}
}
// Start texture upload task (if not already - only one is needed to upload the latest frame)
if (!UploadVideoFrameTask)
{
UploadVideoFrameTask = New<GPUUploadVideoFrameTask>(this);
UploadVideoFrameTask->Start();
}
}
void VideoBackendPlayer::ReleaseResources()
{
if (UploadVideoFrameTask)
UploadVideoFrameTask->Cancel();
VideoFrameMemory.Release();
SAFE_DELETE_GPU_RESOURCE(Frame);
SAFE_DELETE_GPU_RESOURCE(FrameUpload);
}

View File

@@ -0,0 +1,14 @@
// Copyright (c) 2012-2024 Wojciech Figat. All rights reserved.
#pragma once
#include "Types.h"
/// <summary>
/// The video service used for video media playback.
/// </summary>
class Video
{
public:
static bool CreatePlayerBackend(const VideoBackendPlayerInfo& info, VideoBackendPlayer& player);
};

View File

@@ -0,0 +1,43 @@
// Copyright (c) 2012-2024 Wojciech Figat. All rights reserved.
#pragma once
#include "Types.h"
#include "Engine/Core/Types/BaseTypes.h"
#include "Engine/Core/Types/StringView.h"
/// <summary>
/// Description for new video player creation by backend.
/// </summary>
struct VideoBackendPlayerInfo
{
StringView Url;
bool Loop;
};
/// <summary>
/// The helper class for that handles active Video backend operations.
/// </summary>
class VideoBackend
{
public:
virtual ~VideoBackend()
{
}
// Player
virtual bool Player_Create(const VideoBackendPlayerInfo& info, VideoBackendPlayer& player) = 0;
virtual void Player_Destroy(VideoBackendPlayer& player) = 0;
virtual void Player_UpdateInfo(VideoBackendPlayer& player, const VideoBackendPlayerInfo& info) = 0;
virtual void Player_Play(VideoBackendPlayer& player) = 0;
virtual void Player_Pause(VideoBackendPlayer& player) = 0;
virtual void Player_Stop(VideoBackendPlayer& player) = 0;
virtual void Player_Seek(VideoBackendPlayer& player, TimeSpan time) = 0;
virtual TimeSpan Player_GetTime(const VideoBackendPlayer& player) = 0;
// Base
virtual const Char* Base_Name() = 0;
virtual bool Base_Init() = 0;
virtual void Base_Update() = 0;
virtual void Base_Dispose() = 0;
};

View File

@@ -0,0 +1,186 @@
// Copyright (c) 2012-2024 Wojciech Figat. All rights reserved.
#include "VideoPlayer.h"
#include "Video.h"
#include "VideoBackend.h"
#include "Engine/Core/Log.h"
#include "Engine/Core/Math/Vector2.h"
#if USE_EDITOR
#include "Engine/Engine/Time.h"
#include "Engine/Level/Scene/SceneRendering.h"
#endif
VideoPlayer::VideoPlayer(const SpawnParams& params)
: Actor(params)
{
}
VideoPlayer::~VideoPlayer()
{
// Ensure to free player memory
Stop();
if (_player.Backend)
_player.Backend->Player_Destroy(_player);
}
void VideoPlayer::SetIsLooping(bool value)
{
if (_loop == value)
return;
_loop = value;
UpdateInfo();
}
void VideoPlayer::Play()
{
auto state = _state;
if (state == States::Playing)
return;
if (!_player.Backend)
{
if (Url.IsEmpty())
{
LOG(Warning, "Cannot play Video source without an url ({0})", GetNamePath());
return;
}
// Create video player
VideoBackendPlayerInfo info;
GetInfo(info);
if (Video::CreatePlayerBackend(info, _player))
return;
// Pre-allocate output video texture
_player.InitVideoFrame();
}
_player.Backend->Player_Play(_player);
_state = States::Playing;
}
void VideoPlayer::Pause()
{
if (_state != States::Playing)
return;
_state = States::Paused;
if (_player.Backend)
_player.Backend->Player_Pause(_player);
}
void VideoPlayer::Stop()
{
if (_state == States::Stopped)
return;
_state = States::Stopped;
if (_player.Backend)
_player.Backend->Player_Stop(_player);
}
float VideoPlayer::GetTime() const
{
if (_state == States::Stopped || _player.Backend == nullptr)
return 0.0f;
return _player.Backend->Player_GetTime(_player).GetTotalSeconds();
}
void VideoPlayer::SetTime(float time)
{
if (_state == States::Stopped || _player.Backend == nullptr)
return;
TimeSpan timeSpan = TimeSpan::FromSeconds(time);
timeSpan.Ticks = Math::Clamp<int64>(timeSpan.Ticks, 0, _player.Duration.Ticks);
_player.Backend->Player_Seek(_player, timeSpan);
}
float VideoPlayer::GetDuration() const
{
return _player.Duration.GetTotalSeconds();
}
float VideoPlayer::GetFrameRate() const
{
return _player.FrameRate;
}
int32 VideoPlayer::GetFramesCount() const
{
return _player.FramesCount;
}
bool VideoPlayer::IntersectsItself(const Ray& ray, Real& distance, Vector3& normal)
{
return false;
}
Int2 VideoPlayer::GetSize() const
{
return Int2(_player.Width, _player.Height);
}
GPUTexture* VideoPlayer::GetFrame() const
{
return _player.Frame;
}
void VideoPlayer::GetInfo(VideoBackendPlayerInfo& info) const
{
info.Url = Url;
info.Loop = _loop;
}
void VideoPlayer::UpdateInfo()
{
if (_player.Backend)
{
VideoBackendPlayerInfo info;
GetInfo(info);
_player.Backend->Player_UpdateInfo(_player, info);
}
}
void VideoPlayer::OnEnable()
{
#if USE_EDITOR
GetSceneRendering()->AddViewportIcon(this);
#endif
Actor::OnEnable();
}
void VideoPlayer::OnDisable()
{
Stop();
if (_player.Backend)
_player.Backend->Player_Destroy(_player);
#if USE_EDITOR
GetSceneRendering()->RemoveViewportIcon(this);
#endif
Actor::OnDisable();
}
void VideoPlayer::OnTransformChanged()
{
Actor::OnTransformChanged();
_box = BoundingBox(_transform.Translation);
_sphere = BoundingSphere(_transform.Translation, 0.0f);
}
void VideoPlayer::BeginPlay(SceneBeginData* data)
{
Actor::BeginPlay(data);
// Play on start
if (IsActiveInHierarchy() && PlayOnStart)
{
#if USE_EDITOR
if (Time::GetGamePaused())
return;
#endif
Play();
if (StartTime > 0)
SetTime(StartTime);
}
}

View File

@@ -0,0 +1,162 @@
// Copyright (c) 2012-2024 Wojciech Figat. All rights reserved.
#pragma once
#include "Engine/Level/Actor.h"
#include "Engine/Content/AssetReference.h"
#include "Types.h"
/// <summary>
/// Video playback utility. Video content can be presented in UI (via VideoBrush), used in materials (via texture parameter bind) or used manually in shaders.
/// </summary>
API_CLASS(Attributes="ActorContextMenu(\"New/Visuals/Video Player\"), ActorToolbox(\"Visuals\")")
class FLAXENGINE_API VideoPlayer : public Actor
{
DECLARE_SCENE_OBJECT(VideoPlayer);
API_AUTO_SERIALIZATION();
public:
/// <summary>
/// Valid states in which VideoPlayer can be in.
/// </summary>
API_ENUM() enum class States
{
/// <summary>
/// The video is currently stopped (play will resume from start).
/// </summary>
Stopped = 0,
/// <summary>
/// The video is currently playing.
/// </summary>
Playing = 1,
/// <summary>
/// The video is currently paused (play will resume from paused point).
/// </summary>
Paused = 2,
};
private:
VideoBackendPlayer _player;
States _state = States::Stopped;
bool _loop = false;
public:
~VideoPlayer();
/// <summary>
/// The video clip Url path used as a source of the media. Can be local file (absolute or relative path), or streamed resource ('http://').
/// </summary>
API_FIELD(Attributes="EditorOrder(10), DefaultValue(\"\"), EditorDisplay(\"Video Player\")")
String Url;
/// <summary>
/// Determines whether the video clip should loop when it finishes playing.
/// </summary>
API_PROPERTY(Attributes="EditorOrder(20), DefaultValue(false), EditorDisplay(\"Video Player\")")
FORCE_INLINE bool GetIsLooping() const
{
return _loop;
}
/// <summary>
/// Determines whether the video clip should loop when it finishes playing.
/// </summary>
API_PROPERTY() void SetIsLooping(bool value);
/// <summary>
/// Determines whether the video clip should auto play on level start.
/// </summary>
API_FIELD(Attributes="EditorOrder(30), DefaultValue(false), EditorDisplay(\"Video Player\", \"Play On Start\")")
bool PlayOnStart = false;
/// <summary>
/// Determines the time (in seconds) at which the video clip starts playing if Play On Start is enabled.
/// </summary>
API_FIELD(Attributes = "EditorOrder(35), DefaultValue(0.0f), Limit(0, float.MaxValue, 0.01f), EditorDisplay(\"Video Player\"), VisibleIf(nameof(PlayOnStart))")
float StartTime = 0.0f;
public:
/// <summary>
/// Starts playing the currently assigned video Url.
/// </summary>
API_FUNCTION() void Play();
/// <summary>
/// Pauses the video playback.
/// </summary>
API_FUNCTION() void Pause();
/// <summary>
/// Stops video playback, rewinding it to the start.
/// </summary>
API_FUNCTION() void Stop();
/// <summary>
/// Gets the current state of the video playback (playing/paused/stopped).
/// </summary>
API_PROPERTY() FORCE_INLINE VideoPlayer::States GetState() const
{
return _state;
}
/// <summary>
/// Gets the current time of playback. If playback has not yet started, it specifies the time at which playback will start at. The time is in seconds, in range [0, Duration].
/// </summary>
API_PROPERTY(Attributes="HideInEditor, NoSerialize") float GetTime() const;
/// <summary>
/// Sets the current time of playback. If playback has not yet started, it specifies the time at which playback will start at. The time is in seconds, in range [0, Duration].
/// </summary>
/// <param name="time">The time.</param>
API_PROPERTY() void SetTime(float time);
/// <summary>
/// Gets the media duration of playback (in seconds).
/// </summary>
API_PROPERTY() float GetDuration() const;
/// <summary>
/// Gets the media frame rate of playback (amount of frames to be played per second).
/// </summary>
API_PROPERTY() float GetFrameRate() const;
/// <summary>
/// Gets the amount of video frames decoded and send to GPU during playback. Can be used to detect if video has started playback with any visible changes (for video frame texture contents).
/// </summary>
API_PROPERTY() int32 GetFramesCount() const;
/// <summary>
/// Gets the video frame dimensions (in pixels).
/// </summary>
API_PROPERTY() Int2 GetSize() const;
/// <summary>
/// Gets the video frame texture (GPU resource). Created on the playback start. Can be binded to materials and shaders to display the video image.
/// </summary>
API_PROPERTY() GPUTexture* GetFrame() const;
private:
void GetInfo(VideoBackendPlayerInfo& info) const;
void UpdateInfo();
public:
// [Actor]
#if USE_EDITOR
BoundingBox GetEditorBox() const override
{
const Vector3 size(50);
return BoundingBox(_transform.Translation - size, _transform.Translation + size);
}
#endif
bool IntersectsItself(const Ray& ray, Real& distance, Vector3& normal) override;
protected:
// [Actor]
void OnEnable() override;
void OnDisable() override;
void OnTransformChanged() override;
void BeginPlay(SceneBeginData* data) override;
};

View File

@@ -82,3 +82,32 @@ float PS_DepthCopy(Quad_VS2PS input) : SV_Depth
}
#endif
#ifdef _PS_DecodeYUY2
// Raw memory with texture of format YUY2 and size passed in Color.xy
Buffer<uint> SourceYUY2 : register(t0);
// Pixel Shader for copying depth buffer
META_PS(true, FEATURE_LEVEL_ES2)
float4 PS_DecodeYUY2(Quad_VS2PS input) : SV_Target
{
// Read YUY2 pixel
uint p = (uint)input.Position.y * (uint)Color.x + (uint)input.Position.x;
uint data = SourceYUY2[p / 2];
// Unpack YUY components
uint v = (data & 0xff000000) >> 24;
uint y1 = (data & 0xff0000) >> 16;
uint u = (data & 0xff00) >> 8;
uint y0 = data & 0x000000FF;
uint y = p % 2 == 0 ? y0: y1;
// Convert yuv to rgb
float r = (y + 1.402 * (v - 128.0));
float g = (y - 0.344 * (u - 128.0) - 0.714 * (v - 128.0));
float b = (y + 1.772 * (u - 128.0));
return float4(r, g, b, 256.0f) / 256.0f;
}
#endif