多个屏幕上的媒体基础 (EVR)

Media Foundation (EVR) on Multiple Screens

提问人:Linus 提问时间:10/27/2022 最后编辑:Linus 更新时间:11/1/2022 访问量:192

问:

我现在正在编写一个 Windows 窗体应用程序 (C#.Net),使用媒体基础(更准确地说,EVR)将 USB 摄像头的输出显示到屏幕上。

通常它工作正常。但是,如果我在具有多个屏幕的 Windows 上运行,则如果将窗口从主屏幕移动到辅助屏幕,则显示相机输出的窗口将显示为黑色。

例如,如果窗口在主屏幕中为 60%,在辅助屏幕中为 40%,则输出正常显示。如果窗口在辅助屏幕中为 60%,在主屏幕中为 40%,则它显示为黑色。

我用于初始化 EVR 引擎的示例代码类似如下:

private IMFActivate pActive = null;
private object objVideoSink = null, objVideoRenderer = null, objVideoDisplayControl = null;
private IMFMediaSink pVideoSink = null;
private IMFVideoRenderer pVideoRenderer = null;
private IMFVideoDisplayControl pVideoDisplayControl = null;
private IMFStreamSink pStreamSink = null;
private IMFMediaTypeHandler pSinkMediaTypeHandler = null;
private int sinkMediaTypeCount = 0;
private IMFMediaType pVideoOutType = null;
private IMFMediaEventGenerator pEventGenerator = null, pstreamSinkEventGenerator = null;
private SecureConference.AV.MediaEventHandler mediaEvtHandler = null, streamSinkMediaEvtHandler = null;
private IMFVideoSampleAllocator pVideoSampleAllocator = null;
private object objVideoSampleAllocator = null, objD3DManager = null;
private IMFSample pD3DVideoSample = null;
private IMFMediaBuffer pDstBuffer = null;
private IMF2DBuffer p2DBuffer = null;
private IMFPresentationClock pClock = null;
private IMFPresentationTimeSource pTimeSource = null;

private void Init()
{
    HResult hr = MFExtern.MFCreateVideoRendererActivate(this.Handle, out pActive);
    hr = pActive.ActivateObject(typeof(IMFMediaSink).GUID, out objVideoSink);
    pVideoSink = (IMFMediaSink)objVideoSink;
    hr = MFExtern.MFGetService(
        pVideoSink,
        MFServices.MR_VIDEO_RENDER_SERVICE,
        typeof(IMFVideoRenderer).GUID,
        out objVideoRenderer
        );
    pVideoRenderer = (IMFVideoRenderer)objVideoRenderer;
    hr = pVideoRenderer.InitializeRenderer(null, null);
    
    hr = MFExtern.MFGetService(
        pVideoSink,
        MFServices.MR_VIDEO_RENDER_SERVICE,
        typeof(IMFVideoDisplayControl).GUID,
        out objVideoDisplayControl
        );
    pVideoDisplayControl = objVideoDisplayControl as IMFVideoDisplayControl;
    hr = pVideoDisplayControl.SetVideoWindow(this.Handle);
    hr = pVideoDisplayControl.SetVideoPosition(null, new MediaFoundation.Misc.MFRect(0, 0, this.Width, this.Height));
    
    hr = pVideoSink.GetStreamSinkByIndex(0, out pStreamSink);
    hr = pStreamSink.GetMediaTypeHandler(out pSinkMediaTypeHandler);
    hr = pSinkMediaTypeHandler.GetMediaTypeCount(out sinkMediaTypeCount);
    
    hr = MFExtern.MFCreateMediaType(out pVideoOutType);
    hr = pVideoOutType.SetGUID(MFAttributesClsid.MF_MT_MAJOR_TYPE, MFMediaType.Video);
    hr = pVideoOutType.SetGUID(MFAttributesClsid.MF_MT_SUBTYPE, MFMediaType.RGB32);
    hr = pVideoOutType.SetUINT32(MFAttributesClsid.MF_MT_INTERLACE_MODE, 2);
    hr = pVideoOutType.SetUINT32(MediaFoundation.MFAttributesClsid.MF_MT_ALL_SAMPLES_INDEPENDENT, 1);
    // to be adjusted
    hr = SetMFSize(MFAttributesClsid.MF_MT_PIXEL_ASPECT_RATIO, 1, 1);
    //hr = SetMFSize(MFAttributesClsid.MF_MT_FRAME_SIZE, this.Width, this.Height);
    hr = SetMFSize(MFAttributesClsid.MF_MT_FRAME_SIZE, miImageWidth, miImageHeight);
    //hr = pVideoOutType.SetUINT64(MFAttributesClsid.MF_MT_PIXEL_ASPECT_RATIO, 4294967297);
    //hr = pVideoOutType.SetUINT64(MFAttributesClsid.MF_MT_FRAME_SIZE, 2748779069920);
    
    hr = pSinkMediaTypeHandler.SetCurrentMediaType(pVideoOutType);
    
    mediaEvtHandler = new MediaEventHandler();
    streamSinkMediaEvtHandler = new MediaEventHandler();
    //hr = MFExtern.MFGetService(
    //   pVideoSink,
    //   MFServices.sink,
    //   typeof(IMFMediaEventGenerator).GUID,
    //   out objEventGenerator
    //   );
    //pEventGenerator = (IMFMediaEventGenerator)objEventGenerator;
    pEventGenerator = (IMFMediaEventGenerator)pVideoSink;
    hr = pEventGenerator.BeginGetEvent(mediaEvtHandler, pEventGenerator);
    
    pstreamSinkEventGenerator = (IMFMediaEventGenerator)pStreamSink;
    hr = pstreamSinkEventGenerator.BeginGetEvent(streamSinkMediaEvtHandler, pstreamSinkEventGenerator);
    
    hr = MFExtern.MFGetService(
        pStreamSink,
        MFServices.MR_VIDEO_ACCELERATION_SERVICE,
        typeof(IMFVideoSampleAllocator).GUID,
        out objVideoSampleAllocator
        );
    pVideoSampleAllocator = (IMFVideoSampleAllocator)objVideoSampleAllocator;
    hr = MFExtern.MFGetService(
        pVideoSink,
        MFServices.MR_VIDEO_ACCELERATION_SERVICE,
        Guid.Parse("a0cade0f-06d5-4cf4-a1c7-f3cdd725aa75"),
        out objD3DManager
        );
    
    hr = pVideoSampleAllocator.SetDirectXManager(objD3DManager);
    hr = pVideoSampleAllocator.InitializeSampleAllocator(1, pVideoOutType);
    hr = pVideoSampleAllocator.AllocateSample(out pD3DVideoSample);
    hr = pD3DVideoSample.GetBufferByIndex(0, out pDstBuffer);
    p2DBuffer = (IMF2DBuffer)pDstBuffer;
    
    hr = MFExtern.MFCreatePresentationClock(out pClock);
    hr = MFExtern.MFCreateSystemTimeSource(out pTimeSource);
    hr = pClock.SetTimeSource(pTimeSource);
    pVideoSink.SetPresentationClock(pClock);
    pClock.Start(0);
}

// and once I received a bitmap (bmp2), I put it to the engine:

private void SetBitmap(Bitmap bmp2, long timestamp)
{
    Rectangle rect = new Rectangle(0, 0, bmp2.Width, bmp2.Height);
    BitmapData bd = bmp2.LockBits(rect, ImageLockMode.ReadOnly, PixelFormat.Format32bppRgb);
    bitmapBuffer = bd.Scan0;
    int bitmapBufferLength = 4 * bmp2.Width * bmp2.Height;
    HResult hr = HResult.S_OK;
    
    if (!(bmp2.Width == miImageWidth && bmp2.Height == miImageHeight))
    {
        miImageWidth = bmp2.Width;
        miImageHeight = bmp2.Height;
        hr = SetMFSize(MFAttributesClsid.MF_MT_FRAME_SIZE, miImageWidth, miImageHeight);
    }
    
    if (mlStartTimestamp == 0)
    {
        mlStartTimestamp = timestamp;
        hr = pD3DVideoSample.SetSampleTime(0);
    }
    else
    {
    
        hr = pD3DVideoSample.SetSampleTime(timestamp - mlStartTimestamp);
    }
    hr = pD3DVideoSample.SetSampleDuration(sampleDuration);
    hr = p2DBuffer.ContiguousCopyFrom(bitmapBuffer, bitmapBufferLength);
    
    hr = pD3DVideoSample.SetUINT32(MFAttributesClsid.MFSampleExtension_FrameCorruption, 0);
    if (bitmapCount > 0)
    {
            hr = pD3DVideoSample.SetUINT32(MFAttributesClsid.MFSampleExtension_Discontinuity, 0);
    }
    hr = pD3DVideoSample.SetUINT32(MFAttributesClsid.MFSampleExtension_CleanPoint, 1);
    
    hr = pStreamSink.ProcessSample(pD3DVideoSample);
    
    bmp2.UnlockBits(bd);
}

我该如何解决?多谢!

C# Windows DirectShow MS-Media-Foundation directshow.net

评论

0赞 Linus 10/28/2022
我尝试了 github.com/sipsorcery/mediafoundationsamples 的示例 MFVideoEVRWebCam,它也有同样的问题!我的代码与此项目类似。
0赞 Roman R. 10/28/2022
一般来说,这是设计使然的行为:只能保证在托管最大部分的监视器或您明确指向的监视器(VMR-7 和 VMR-9,而不是 EVR)上显示视频。如果您需要视频可靠地跨越多个显示器,则必须自己拆分视频并通过多个渲染器进行显示。
0赞 Javid 11/4/2022
我有完全相同的问题。如果我能以某种方式切换屏幕 EVR 变黑,那可以解决我的问题,但我什至不知道为什么它会变黑,更不用说翻转情况了。
0赞 Javid 11/4/2022
@RomanR。拆分是什么意思,以及如何为不同的显示器配置渲染器?
1赞 Linus 11/7/2022
@Javid。我还没有解决这个问题。我尝试使用 PictureBox 而不是 EVR 来显示视频。但是,我的其他一些线程是与时间相关的,使用 PictureBox 似乎会占用一些 PC 资源,从而影响与时间相关的线程。因此,我继续使用 EVR,但只是通知用户只有主屏幕有效。

答: 暂无答案