我遇到了以下问题--我的任务如下:我需要在3d对象上播放流视频( UDP协议上的原始h264视频)。目前,我正在使用FFmpegInteropX将MediaSource设置为Windowsobject.Media.Playback.MediaPlayer。媒体播放器在帧服务器模式下工作,然后我订阅videoFrameAvailable事件并将结果帧传输给Unity
问题是,Hololens2 (UWP)的性能很低,如果使用大于720x720的纹理大小,就无法获得足够的平滑度和较低的延迟。同时,如果我在PC上运行应用程序,我可以顺利地、毫不拖延地播放4096x4096的所有内容。也许有人对如何提高Hololens2的性能有一些想法?
private SoftwareBitmap frameServerDest = new SoftwareBitmap(BitmapPixelFormat.Rgba8, 1024,1024,BitmapAlphaMode.Premultiplied );
private UnityEngine.Texture2D tex = new UnityEngine.Texture2D(frameServerDest.PixelWidth, frameServerDest.PixelHeight, UnityEngine.TextureFormat.RGBA32, false);
private async void InitializeMediaPlayer(){
FFmpegInteropLogging.SetDefaultLogProvider();
FFmpegInteropConfig configuration = new FFmpegInteropConfig()
{
MaxVideoThreads = 8,
SkipErrors = uint.MaxValue,
DefaultBufferTime = TimeSpan.Zero,
FastSeek = true,
VideoDecoderMode = VideoDecoderMode.ForceFFmpegSoftwareDecoder,
};
configuration.FFmpegOptions.Add("tune", "zerolatency");
configuration.FFmpegOptions.Add("flags", "low_delay");
configuration.FFmpegOptions.Add("fflags", "discardcorrupt+shortest+sortdts+ignidx+nobuffer");
decoder = await FFmpegInteropMSS.CreateFromUriAsync("udp://127.0.0.1:9005",configuration)
var mediaStreamSource = decoder.GetMediaStreamSource();
mediaStreamSource.BufferTime = TimeSpan.FromSeconds(0);
Debug.WriteLine($"{decoder.CurrentVideoStream.CodecName} {decoder.CurrentVideoStream.DecoderEngine} {decoder.CurrentVideoStream.HardwareDecoderStatus} {decoder.CurrentVideoStream.PixelWidth} x {decoder.CurrentVideoStream.PixelHeight}");
var FrameServer = new Windows.Media.Playback.MediaPlayer() { IsVideoFrameServerEnabled = true };
FrameServer.Source = MediaSource.CreateFromMediaStreamSource(mediaStreamSource);
FrameServer.RealTimePlayback = true;
FrameServer.VideoFrameAvailable += MediaPlayer_VideoFrameAvailable;
FrameServer.Play();
}
//FrameAvailable:
private void MediaPlayer_VideoFrameAvailable(Windows.Media.Playback.MediaPlayer sender, object args)
{
CanvasDevice canvasDevice = CanvasDevice.GetSharedDevice();
using (CanvasBitmap canvasBitmap = CanvasBitmap.CreateFromSoftwareBitmap(canvasDevice, frameServerDest))
{
sender.CopyFrameToVideoSurface(canvasBitmap);
byte[] bytes = canvasBitmap.GetPixelBytes();
if (AppCallbacks.Instance.IsInitialized())
{
AppCallbacks.Instance.InvokeOnAppThread(() =>
{
tex.LoadRawTextureData(bytes);
tex.Apply();
Display.GetComponent<UnityEngine.UI.RawImage>().texture = tex;
}, false);
}
GC.Collect();
}
}我的FFmpeg输出设置ffmpeg -r 60 -f gdigrab -i desktop -f h264 -framerate 60 -vcodec libx264 -preset ultrafast -tune zerolatency -threads 8 -thread_type slice udp://127.0.0.1:9005
更新:你好,我做了一些工作。我所做的:
。
为此,我找到了让d3d11设备由Unity使用的以下方法--为此,我必须使用库SharpDX和类似的线程https://forum.unity.com/threads/d3d11-texture2d-blitting-framerate.562552
但是,还有一些问题我还无法解决:1 FFmpeg只在VideoDecoderMode = VideoDecoderMode.Automatic或VideoDecoderMode.ForceFFmpegSoftwareDecoder模式下工作;2在事件处理程序(videoframe_available)中,垃圾收集器的负载仍然很大,显然这会导致性能问题。而且,这种表现只会在霍洛伦斯身上受到影响。在其他的视频解码模型中,流参数是确定的,但是videoframe_available事件从不触发。延迟接近于零,但性能仍然不是很好。
也许有一些想法可以解决GarbageColletor的问题?
private SoftwareBitmap frameServerDist = new SoftwareBitmap(BitmapPixelFormat.Rgba8, 780, 780,
BitmapAlphaMode.Premultiplied);
private FFmpegInteropMSS decoder;
private UnityEngine.GameObject Display;
private UnityEngine.Texture2D targetTexture;
private UnityEngine.GameObject MainCamera;
private SharpDX.Direct3D11.Device dstDevice;
private SharpDX.Direct3D11.DeviceContext dstContenxt;
private SharpDX.Direct3D11.Texture2D m_DstTexture;
private SharpDX.Direct3D11.Device srcDevice;
private SharpDX.Direct3D11.DeviceContext srcContext;
private static DataRectangle _rect;
private SharpDX.Direct3D11.Texture2DDescription Texture2DDescription = new SharpDX.Direct3D11.Texture2DDescription()
{
ArraySize = 1,
BindFlags = SharpDX.Direct3D11.BindFlags.ShaderResource,
Usage = SharpDX.Direct3D11.ResourceUsage.Immutable, //GPU Only
CpuAccessFlags = SharpDX.Direct3D11.CpuAccessFlags.None,
Format = SharpDX.DXGI.Format.R8G8B8A8_UNorm,
MipLevels = 1,
OptionFlags = SharpDX.Direct3D11.ResourceOptionFlags.None,
SampleDescription = new SharpDX.DXGI.SampleDescription()
{
Count = 1,
Quality = 0
}
};//This event occurs when UnityEngine Initialized
private void AppCallbacks_Initialized()
{
srcDevice = new SharpDX.Direct3D11.Device(SharpDX.Direct3D.DriverType.Hardware);
srcContext = srcDevice.ImmediateContext;
UnityEngine.WSA.Application.InvokeOnAppThread(() =>
{
Display = UnityEngine.GameObject.Find("Display");
targetTexture = null;
//Create texture for get Device and Device context
UnityEngine.Texture2D deviceTexture = new UnityEngine.Texture2D(frameServerDist.PixelWidth, frameServerDist.PixelHeight, UnityEngine.TextureFormat.RGBA32, false);
IntPtr txPtr = deviceTexture.GetNativeTexturePtr();
SharpDX.Direct3D11.Texture2D dstTextureX = new SharpDX.Direct3D11.Texture2D(txPtr);
dstDevice = dstTextureX.Device;
dstContenxt = dstDevice.ImmediateContext;
//Create sharedResource
SharpDX.Direct3D11.Texture2DDescription sharedTextureDesc = dstTextureX.Description;
sharedTextureDesc.OptionFlags = SharpDX.Direct3D11.ResourceOptionFlags.Shared;
m_DstTexture = new SharpDX.Direct3D11.Texture2D(dstDevice, sharedTextureDesc);
SharpDX.Direct3D11.ShaderResourceViewDescription rvdesc = new SharpDX.Direct3D11.ShaderResourceViewDescription
{
Format = sharedTextureDesc.Format,
Dimension = SharpDX.Direct3D.ShaderResourceViewDimension.Texture2D
};
rvdesc.Texture2D.MostDetailedMip = 0; rvdesc.Texture2D.MipLevels = 1;
SharpDX.Direct3D11.ShaderResourceView rvptr = new SharpDX.Direct3D11.ShaderResourceView(
dstDevice,
m_DstTexture, rvdesc);
targetTexture = UnityEngine.Texture2D.CreateExternalTexture(sharedTextureDesc.Width, sharedTextureDesc.Height, UnityEngine.TextureFormat.BGRA32, false, false, rvptr.NativePointer);
MainCamera = UnityEngine.GameObject.Find("Main Camera");
Display.GetComponent<UnityEngine.UI.RawImage>().texture = targetTexture;
InitializeMediaPlayer();
}, false); private void MediaPlayer_VideoFrameAvailable(Windows.Media.Playback.MediaPlayer sender, object args)
{
canvasBitmap = CanvasBitmap.CreateFromSoftwareBitmap(canvasDevice, frameServerDist);
sender.CopyFrameToVideoSurface(canvasBitmap);
var sharedResourceDst = m_DstTexture.QueryInterface<SharpDX.DXGI.Resource>();
var sharedTexDst = srcDevice.OpenSharedResource<SharpDX.Direct3D11.Texture2D>(sharedResourceDst.SharedHandle);
using (var _stream = DataStream.Create(canvasBitmap.GetPixelBytes(), true, false))
{
_rect.DataPointer = _stream.DataPointer;
_rect.Pitch = Texture2DDescription.Width * 4;
var srcTexture = new SharpDX.Direct3D11.Texture2D(srcDevice, Texture2DDescription, _rect);
srcContext.CopyResource(srcTexture, sharedTexDst);
srcContext.Flush();
sharedResourceDst.Dispose();
sharedTexDst.Dispose();
srcTexture.Dispose();
}
}发布于 2022-03-02 12:54:53
问题是从CPU复制到GPU,SharpDX库允许将帧直接复制到Idirect3dsurface。我正在附加代码,也许它会有用。Direct3D11帮助程序可在microsoft文档https://learn.microsoft.com/en-us/windows/uwp/audio-video-camera/screen-capture-video#helper-wrapper-classes中获得。
private UnityEngine.GameObject MainCamera;
private UnityEngine.Texture2D targetTexture;
private IDirect3DSurface surface;
private SharpDX.Direct3D11.Device dstDevice;private void AppCallbacks_Initialized()
{
SharpDX.Direct3D11.Device srcDevice = new SharpDX.Direct3D11.Device(SharpDX.Direct3D.DriverType.Hardware);
UnityEngine.WSA.Application.InvokeOnAppThread(() =>
{
Display = UnityEngine.GameObject.Find("Display");
targetTexture = null;
//Create texture for get Device and Device context from Unity
UnityEngine.Texture2D deviceTexture = new UnityEngine.Texture2D(2048, 2048, UnityEngine.TextureFormat.RGBA32, false);
IntPtr txPtr = deviceTexture.GetNativeTexturePtr();
SharpDX.Direct3D11.Texture2D dstTexture = new SharpDX.Direct3D11.Texture2D(txPtr);
dstDevice = dstTexture.Device;
//Create sharedResource
SharpDX.Direct3D11.Texture2DDescription sharedTextureDesc = dstTexture.Description;
sharedTextureDesc.OptionFlags = SharpDX.Direct3D11.ResourceOptionFlags.Shared;
SharpDX.Direct3D11.Texture2D m_DstTexture = new SharpDX.Direct3D11.Texture2D(dstDevice, sharedTextureDesc);
SharpDX.Direct3D11.ShaderResourceViewDescription rvdesc = new SharpDX.Direct3D11.ShaderResourceViewDescription
{
Format = sharedTextureDesc.Format,
Dimension = SharpDX.Direct3D.ShaderResourceViewDimension.Texture2D
};
rvdesc.Texture2D.MostDetailedMip = 0;
rvdesc.Texture2D.MipLevels = 1;
SharpDX.Direct3D11.ShaderResourceView rvptr = new SharpDX.Direct3D11.ShaderResourceView(
dstDevice,
m_DstTexture, rvdesc);
targetTexture = UnityEngine.Texture2D.CreateExternalTexture(sharedTextureDesc.Width, sharedTextureDesc.Height, UnityEngine.TextureFormat.BGRA32, false, false, rvptr.NativePointer);
MainCamera = UnityEngine.GameObject.Find("Main Camera");
Display.GetComponent<UnityEngine.UI.RawImage>().texture = targetTexture;
var sharedResourceDst = m_DstTexture.QueryInterface<SharpDX.DXGI.Resource>();
var sharedTexDst = srcDevice.OpenSharedResource<SharpDX.Direct3D11.Texture2D>(sharedResourceDst.SharedHandle);
surface = Direct3D11Helper.CreateDirect3DSurfaceFromSharpDXTexture(sharedTexDst);
sharedResourceDst.Dispose();
sharedTexDst.Dispose();
dstTexture.Dispose();
m_DstTexture.Dispose();
}, false);
InitializeMediaPlayer();
}private void MediaPlayer_VideoFrameAvailable(Windows.Media.Playback.MediaPlayer sender, object args)
{
Debug.WriteLine("frameAvail");
sender.CopyFrameToVideoSurface(surface);
}https://stackoverflow.com/questions/71154248
复制相似问题