我正在尝试使用Windows Desktop Duplication API来捕获屏幕并将原始输出保存为视频。我正在使用超时值非常高(999ms)的AcquireNextFrame。这样一来,我就应该在windows上一有新的帧就从它那里得到每个新的帧,这自然应该是60fps的。我最终得到了一切看起来都很好的序列(第6-11帧),然后得到了看起来很糟糕的序列(第12-14帧)。如果我选中AccumulatedFrames
lFrameInfo.AccumulatedFrames该值通常为2或更高。据我所知,这意味着windows会说“嘿,等等,我还没有为你准备好框架”,因为调用AcquireNextFrame的时间太长了。但是,一旦windows最终给了我一个框架,它就会说“嘿,你实际上太慢了,最后错过了一个框架”。如果我能以某种方式得到这些帧,我想我会得到60 If。
这一点可以通过日志进一步阐明:
I0608 10:40:16.964375 4196 window_capturer_dd.cc:438] 206 - Frame 6 start acquire
I0608 10:40:16.973867 4196 window_capturer_dd.cc:451] 216 - Frame 6 acquired
I0608 10:40:16.981364 4196 window_capturer_dd.cc:438] 223 - Frame 7 start acquire
I0608 10:40:16.990864 4196 window_capturer_dd.cc:451] 233 - Frame 7 acquired
I0608 10:40:16.998364 4196 window_capturer_dd.cc:438] 240 - Frame 8 start acquire
I0608 10:40:17.007876 4196 window_capturer_dd.cc:451] 250 - Frame 8 acquired
I0608 10:40:17.015393 4196 window_capturer_dd.cc:438] 257 - Frame 9 start acquire
I0608 10:40:17.023905 4196 window_capturer_dd.cc:451] 266 - Frame 9 acquired
I0608 10:40:17.032411 4196 window_capturer_dd.cc:438] 274 - Frame 10 start acquire
I0608 10:40:17.039912 4196 window_capturer_dd.cc:451] 282 - Frame 10 acquired
I0608 10:40:17.048925 4196 window_capturer_dd.cc:438] 291 - Frame 11 start acquire
I0608 10:40:17.058428 4196 window_capturer_dd.cc:451] 300 - Frame 11 acquired
I0608 10:40:17.065943 4196 window_capturer_dd.cc:438] 308 - Frame 12 start acquire
I0608 10:40:17.096945 4196 window_capturer_dd.cc:451] 336 - Frame 12 acquired
I0608 10:40:17.098947 4196 window_capturer_dd.cc:464] 1 FRAMES MISSED on frame: 12
I0608 10:40:17.101444 4196 window_capturer_dd.cc:438] 343 - Frame 13 start acquire
I0608 10:40:17.128958 4196 window_capturer_dd.cc:451] 368 - Frame 13 acquired
I0608 10:40:17.130957 4196 window_capturer_dd.cc:464] 1 FRAMES MISSED on frame: 13
I0608 10:40:17.135459 4196 window_capturer_dd.cc:438] 377 - Frame 14 start acquire
I0608 10:40:17.160959 4196 window_capturer_dd.cc:451] 399 - Frame 14 acquired
I0608 10:40:17.162958 4196 window_capturer_dd.cc:464] 1 FRAMES MISSED on frame: 14帧6-11看起来很好,两个获得物大约相隔17ms。第12帧应在(300+17=317ms)处获取。第12帧在308开始等待,但直到336ms才得到任何内容。直到(300+17+17~=336ms)之后的帧,Windows才有任何东西给我。好吧,当然,也许windows刚刚错过了一帧,但当我最终得到它时,我可以检查AccumulatedFrames,它的值是2(这意味着我错过了一帧,因为我在调用AcquireNextFrame之前等待了太久)。在我的理解中,只有当AcquireNextFrame立即返回时,AccumulatedFrames才有意义大于1。
此外,我可以在我的捕获软件运行时使用PresentMon。日志显示每一帧的MsBetweenDisplayChange,相当稳定在16.666ms (有几个异常值,但比我的捕获软件看到的要少得多)。
这些人(1,2)似乎已经能够达到60fps,所以我想知道我做错了什么。
我的代码是基于this的
int main() {
int FPS = 60;
int video_length_sec = 5;
int total_frames = FPS * video_length_sec;
for (int i = 0; i < total_frames; i++) {
if(!CaptureSingleFrame()){
i--;
}
}
}
ComPtr<ID3D11Device> lDevice;
ComPtr<ID3D11DeviceContext> lImmediateContext;
ComPtr<IDXGIOutputDuplication> lDeskDupl;
ComPtr<ID3D11Texture2D> lAcquiredDesktopImage;
ComPtr<ID3D11Texture2D> lGDIImage;
ComPtr<ID3D11Texture2D> lDestImage;
DXGI_OUTPUT_DESC lOutputDesc;
DXGI_OUTDUPL_DESC lOutputDuplDesc;
D3D11_TEXTURE2D_DESC desc;
// Driver types supported
D3D_DRIVER_TYPE gDriverTypes[] = {
D3D_DRIVER_TYPE_HARDWARE
};
UINT gNumDriverTypes = ARRAYSIZE(gDriverTypes);
// Feature levels supported
D3D_FEATURE_LEVEL gFeatureLevels[] = {
D3D_FEATURE_LEVEL_11_0,
D3D_FEATURE_LEVEL_10_1,
D3D_FEATURE_LEVEL_10_0,
D3D_FEATURE_LEVEL_9_1
};
UINT gNumFeatureLevels = ARRAYSIZE(gFeatureLevels);
bool Init() {
int lresult(-1);
D3D_FEATURE_LEVEL lFeatureLevel;
HRESULT hr(E_FAIL);
// Create device
for (UINT DriverTypeIndex = 0; DriverTypeIndex < gNumDriverTypes; ++DriverTypeIndex)
{
hr = D3D11CreateDevice(
nullptr,
gDriverTypes[DriverTypeIndex],
nullptr,
0,
gFeatureLevels,
gNumFeatureLevels,
D3D11_SDK_VERSION,
&lDevice,
&lFeatureLevel,
&lImmediateContext);
if (SUCCEEDED(hr))
{
// Device creation success, no need to loop anymore
break;
}
lDevice.Reset();
lImmediateContext.Reset();
}
if (FAILED(hr))
return false;
if (lDevice == nullptr)
return false;
// Get DXGI device
ComPtr<IDXGIDevice> lDxgiDevice;
hr = lDevice.As(&lDxgiDevice);
if (FAILED(hr))
return false;
// Get DXGI adapter
ComPtr<IDXGIAdapter> lDxgiAdapter;
hr = lDxgiDevice->GetParent(
__uuidof(IDXGIAdapter), &lDxgiAdapter);
if (FAILED(hr))
return false;
lDxgiDevice.Reset();
UINT Output = 0;
// Get output
ComPtr<IDXGIOutput> lDxgiOutput;
hr = lDxgiAdapter->EnumOutputs(
Output,
&lDxgiOutput);
if (FAILED(hr))
return false;
lDxgiAdapter.Reset();
hr = lDxgiOutput->GetDesc(
&lOutputDesc);
if (FAILED(hr))
return false;
// QI for Output 1
ComPtr<IDXGIOutput1> lDxgiOutput1;
hr = lDxgiOutput.As(&lDxgiOutput1);
if (FAILED(hr))
return false;
lDxgiOutput.Reset();
// Create desktop duplication
hr = lDxgiOutput1->DuplicateOutput(
lDevice.Get(), //TODO what im i doing here
&lDeskDupl);
if (FAILED(hr))
return false;
lDxgiOutput1.Reset();
// Create GUI drawing texture
lDeskDupl->GetDesc(&lOutputDuplDesc);
desc.Width = lOutputDuplDesc.ModeDesc.Width;
desc.Height = lOutputDuplDesc.ModeDesc.Height;
desc.Format = lOutputDuplDesc.ModeDesc.Format;
desc.ArraySize = 1;
desc.BindFlags = D3D11_BIND_FLAG::D3D11_BIND_RENDER_TARGET;
desc.MiscFlags = D3D11_RESOURCE_MISC_GDI_COMPATIBLE;
desc.SampleDesc.Count = 1;
desc.SampleDesc.Quality = 0;
desc.MipLevels = 1;
desc.CPUAccessFlags = 0;
desc.Usage = D3D11_USAGE_DEFAULT;
hr = lDevice->CreateTexture2D(&desc, NULL, &lGDIImage);
if (FAILED(hr))
return false;
if (lGDIImage == nullptr)
return false;
// Create CPU access texture
desc.Width = lOutputDuplDesc.ModeDesc.Width;
desc.Height = lOutputDuplDesc.ModeDesc.Height;
desc.Format = lOutputDuplDesc.ModeDesc.Format;
std::cout << desc.Width << "x" << desc.Height << "\n\n\n";
desc.ArraySize = 1;
desc.BindFlags = 0;
desc.MiscFlags = 0;
desc.SampleDesc.Count = 1;
desc.SampleDesc.Quality = 0;
desc.MipLevels = 1;
desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE;
desc.Usage = D3D11_USAGE_STAGING;
return true;
}
void WriteFrameToCaptureFile(ID3D11Texture2D* texture) {
D3D11_MAPPED_SUBRESOURCE* pRes = new D3D11_MAPPED_SUBRESOURCE;
UINT subresource = D3D11CalcSubresource(0, 0, 0);
lImmediateContext->Map(texture, subresource, D3D11_MAP_READ_WRITE, 0, pRes);
void* d = pRes->pData;
char* data = reinterpret_cast<char*>(d);
// writes data to file
WriteFrameToCaptureFile(data, 0);
}
bool CaptureSingleFrame()
{
HRESULT hr(E_FAIL);
ComPtr<IDXGIResource> lDesktopResource = nullptr;
DXGI_OUTDUPL_FRAME_INFO lFrameInfo;
ID3D11Texture2D* currTexture;
hr = lDeskDupl->AcquireNextFrame(
999,
&lFrameInfo,
&lDesktopResource);
if (FAILED(hr)) {
LOG(INFO) << "Failed to acquire new frame";
return false;
}
if (lFrameInfo.LastPresentTime.HighPart == 0) {
// not interested in just mouse updates, which can happen much faster than 60fps if you really shake the mouse
hr = lDeskDupl->ReleaseFrame();
return false;
}
int accum_frames = lFrameInfo.AccumulatedFrames;
if (accum_frames > 1 && current_frame != 1) {
// TOO MANY OF THESE is the problem
// especially after having to wait >17ms in AcquireNextFrame()
}
// QI for ID3D11Texture2D
hr = lDesktopResource.As(&lAcquiredDesktopImage);
// Copy image into a newly created CPU access texture
hr = lDevice->CreateTexture2D(&desc, NULL, &currTexture);
if (FAILED(hr))
return false;
if (currTexture == nullptr)
return false;
lImmediateContext->CopyResource(currTexture, lAcquiredDesktopImage.Get());
writer_thread->Schedule(
FROM_HERE, [this, currTexture]() {
WriteFrameToCaptureFile(currTexture);
});
pending_write_counts_++;
hr = lDeskDupl->ReleaseFrame();
return true;
}**编辑-根据my measurements的说法,在帧实际出现大约10ms之前,您必须调用AcquireNextFrame(),否则windows将无法获取它并获取下一个帧。每当我的录制程序回绕超过7ms时(在获取帧i之后,直到在i+1上调用AcquireNextFrame() ),帧i+1就会丢失。
*EDIT- Heres显示我正在谈论的GPU视图的屏幕截图。前6帧在任何时间内处理,然后第7帧花费119毫秒。"capture_to_argb.exe“旁边的长方形对应于我被卡在AcquireNextFrame()中。如果你抬头看硬件队列,你可以看到它以60fps的速度清晰地呈现出来,即使我被AcquireNextFrame()卡住了。至少这是我的解释(我不知道我在做什么)。
发布于 2017-06-07 20:14:17
“当前显示模式: 3840 x 2160 (32位)(60 32)”指的是显示刷新率,即每秒可以显示多少帧。然而,呈现新帧的速率通常要低得多。您可以使用PresentMon或类似的实用程序检查此速率。当我不移动鼠标时,它会报告如下所示:

正如你所看到的,当什么都没有发生时,Windows每秒只显示两次新的帧,甚至更慢。然而,这对于视频编码来说通常是非常好的,因为即使你正在以60fps的速度录制视频,而AcquireNextFrame报告没有新的帧可用,那么这意味着当前帧与之前的帧完全相同。
发布于 2017-06-09 04:17:34
在下次调用AcquireNextFrame之前进行阻塞等待,您会丢失实际的帧。桌面复制API逻辑建议您尝试立即获取下一帧,如果您希望获得合适的帧率。您的休眠调用有效地放弃了剩余的执行超时时间,而没有硬性承诺您将在计划的时间间隔中获得新的切片。
您必须以最大帧速率进行轮询。不要睡眠(即使睡眠时间为零),并立即请求下一帧。您可以选择丢弃过早到来的帧。桌面复制API的设计方式是,获得额外的帧可能不会太昂贵,因为您可以提前识别它们并停止它们的处理。
如果您仍然喜欢在帧之间休眠,则可能需要读取accuracy remark
要提高睡眠间隔的准确性,请调用
timeGetDevCaps函数以确定支持的最小计时器分辨率,并调用timeBeginPeriod函数将计时器分辨率设置为其最小值。在调用timeBeginPeriod时要小心,因为频繁调用会严重影响系统时钟、系统电源使用和调度程序。如果您调用timeBeginPeriod,请在应用程序的早期调用它一次,并确保在应用程序的最后调用timeEndPeriod函数。
发布于 2017-08-31 23:39:20
如其他人所提到的,60 As的刷新率仅指示显示可以改变的频率()。这实际上并不意味着It 将那么频繁地改变。仅当复制的输出上显示的内容发生更改时,AcquireNextFrame才会返回帧。
我的建议是...
这将以所需的速率产生帧序列。如果显示没有更改,您将拥有前一帧的副本以用于保持帧速率。
https://stackoverflow.com/questions/44403173
复制相似问题