前言
之前发布过一篇文章 windows directshow使用lav filter开发h264码流摄像头
,但是存在一个问题就是 预览的延时太大
。因为 Video Mixing Renderer 9
是比较老的呈现器,无法使用 lav filter
的硬件加速,需要将呈现器更换成 Enhanced Video Renderer(EVR)
呈现器才可以使用硬件加速,但两个呈现器的使用方法有所差别,于是就有了这篇文章。
这篇文章主要实现以下三个功能:
- 使用硬件加速降低h264摄像头预览延迟
- 预览实时码流显示
- 预览视频录制
先提供实际效果图和graphedt的调试图流程:
注意 lav filter
框起来的地方要正确显示激活硬件加速,才可以降低预览的延时。
功能开发
对于开发环境,开发流程和技巧在 windows directshow使用lav filter开发h264码流摄像头
有详细的说明,这篇文章主要是对新增功能展开讨论。
一、降低预览延迟
这里要做的主要是将 Video Mixing Renderer 9
呈现器更换成 Enhanced Video Renderer(EVR)
呈现器。两者用法不一样,主要表现在设置呈现窗口句柄
、位图等
…
/*
* funtion: GetUnconnectedPin
* 获取filter未连接的Pin口
*/
IPin* GetUnconnectedPin(IBaseFilter* pFilter, PIN_DIRECTION PinDir) {
IEnumPins* pEnumPins = NULL;
IPin* pPin = NULL;
pFilter->EnumPins(&pEnumPins);
while (pEnumPins->Next(1, &pPin, NULL) == S_OK) {
PIN_INFO pinInfo;
pPin->QueryPinInfo(&pinInfo);
if (pinInfo.pFilter) {
pinInfo.pFilter->Release(); // 释放资源
}
// 检查 pin 方向是否匹配
if (pinInfo.dir == PinDir) {
IPin* pTmp = NULL;
// 检查是否未连接
if (pPin->ConnectedTo(&pTmp) != S_OK) {
// 找到未连接的 pin
pEnumPins->Release();
return pPin;
}
pTmp->Release();
}
pPin->Release();
}
pEnumPins->Release();
return NULL;
}
HRESULT hr;
IGraphBuilder* pGraphBuilder;
hr = CoCreateInstance(CLSID_FilterGraph, nullptr, CLSCTX_INPROC_SERVER,
IID_IGraphBuilder, (void **)&pGraphBuilder);
//创建EVR呈现器实例
IBaseFilter* pEvrFilter;
hr = CoCreateInstance(CLSID_EnhancedVideoRenderer, nullptr, CLSCTX_INPROC_SERVER,
IID_IBaseFilter, (void **) &pEvrFilter);
hr = pGraphBuilder->AddFilter(pEvrFilter, L"EVR Filter");
//连接lav filter和EVR呈现器
pGraphBuilder->ConnectDirect(GetUnconnectedPin(m_pLavVideoFilter, PINDIR_OUTPUT), GetUnconnectedPin(m_pMixingFilter, PINDIR_INPUT), nullptr);
//获取设置窗口句柄的接口和设置位图的接口
IMFGetService* pService = NULL;
IMFVideoDisplayControl* pVideoDisplayCtl;
IMFVideoMixerBitmap* pBMP;
hr = m_pMixingFilter->QueryInterface(IID_IMFGetService, (LPVOID *) &pService);
if (SUCCEEDED(hr)) {
hr = pService->GetService(MR_VIDEO_RENDER_SERVICE, IID_IMFVideoDisplayControl, (void**)&pVideoDisplayCtl);
if (SUCCEEDED(hr)) {
// 设置视频窗口句柄
hr = m_pVideoDisplayCtl->SetVideoWindow((HWND)effectiveWinId());
} else {
std::cout << "query video display control failed, hr = " << hr;
pService->Release();
return false;
}
hr = pService->GetService(MR_VIDEO_MIXER_SERVICE, IID_IMFVideoMixerBitmap, (void**)&pBMP);
if (FAILED(hr)) {
std::cout << "query video display control failed, hr = " << hr;
pService->Release();
return false;
}
pService->Release();
} else {
std::cout << "query IMF Service failed, hr = " << hr;
return false;
}
二、显示预览实时码率
实时码率无法通过接口直接获取,主要是通过重载 SampleCB(double, IMediaSample*)
函数,获取数据大小,通过码率的计算公式计算后得到。这里通过位图实时显示,就顺便提供 EVR呈现器
位图的使用方法(与Video Mixing Renderer 9
不一样)。
int updateMarker( std::vector<std::wstring> qslPrama, float t=0.0f, float b=0.2f, float l=0.0f, float r=0.3f )
{
LONG hr;
HWND hwndApp = (HWND)winId();
const TCHAR *szNewText = reinterpret_cast<const wchar_t *>(qslPrama.first().c_str());
HDC hdc = GetDC(hwndApp);
HDC hdcBmp = CreateCompatibleDC(hdc);
int nLength, nTextBmpWidth, nTextBmpHeight;
SIZE sz={0};
nLength = (int) _tcslen(szNewText);
GetTextExtentPoint32(hdcBmp, szNewText, nLength, &sz);
nTextBmpHeight = sz.cy;
nTextBmpWidth = sz.cx;
HBITMAP hbm = CreateCompatibleBitmap(hdc, nTextBmpWidth, nTextBmpHeight*qslPrama.size());
ReleaseDC(hwndApp, hdc);
HBITMAP hbmOld = (HBITMAP)SelectObject(hdcBmp, hbm);
RECT rcText;
SetRect(&rcText, 0, 0, nTextBmpWidth, nTextBmpHeight*qslPrama.size());
SetBkColor(hdcBmp, RGB(0, 0, 0));
SetTextColor(hdcBmp, RGB(0, 255, 0));
for( int i = 0; i < qslPrama.size(); i++ ) {
szNewText = reinterpret_cast<const wchar_t *>(qslPrama.at(i).c_str());
nLength = (int) _tcslen(szNewText);
TextOut(hdcBmp, 0, nTextBmpHeight*i, szNewText, nLength);
}
// 准备位图信息
MFVideoAlphaBitmap ab;
ZeroMemory(&ab, sizeof(ab));
ab.GetBitmapFromDC = TRUE;
ab.params.dwFlags = VMRBITMAP_HDC;
ab.bitmap.hdc = hdcBmp;
// 设置位图显示的位置和大小(归一化坐标:0.0 - 1.0)
MFVideoNormalizedRect nrcDest = { l, t, r, b };
ab.params.nrcDest = nrcDest;
ab.params.rcSrc = rcText;
ab.params.fAlpha = 0.6f; // 设置透明度 (0.0-1.0)
ab.params.clrSrcKey = RGB(255, 255, 255);
ab.params.dwFlags |= MFVideoAlphaBitmap_SrcColorKey;
ab.params.dwFlags |= MFVideoAlphaBitmap_Alpha;
ab.params.dwFlags |= MFVideoAlphaBitmap_DestRect;
hr = m_pBMP->SetAlphaBitmap(&ab);
if (FAILED(hr))
std::cout << "SetAlphaBitmap FAILED! hr=" << hr;
DeleteObject(SelectObject(hdcBmp, hbmOld));
DeleteObject(hbm);
DeleteDC(hdcBmp);
return hr;
}
//重载的函数回调频率是根据帧率来决定的,根据实际情况控制码率计算
//继承ISampleGrabberCB后重载函数
class CSampleGrabberCB : public ISampleGrabberCB {
int m_u8CallCounts{0};
int m_u16SecSize{0};
int m_u8fps{15};
public:
STDMETHODIMP SampleCB(double SampleTime, IMediaSample *pSample) {
long size = pSample->GetActualDataLength(); // 获取帧的大小(字节)
if(m_u8CallCounts == m_u8fps) {
double bitrate = (double)m_u16SecSize*8/1000;
m_u16SecSize = 0;
m_u8CallCounts = 0;
std::vector<std::wstring> qslPrama;
qslPrama.push_back(L"码率: "+std::to_string(bitrate)+L"/kbps");
updateMarker(qslPrama);
} else {
m_u16SecSize += size;
m_u8CallCounts++;
}
return S_OK;
}
};
//设置我们继承的类
ISampleGrabber* pSampleGrabber;
IBaseFilter* pGrabberFilter;
hr = CoCreateInstance(CLSID_SampleGrabber, nullptr, CLSCTX_INPROC_SERVER,
IID_ISampleGrabber, (void **)&pSampleGrabber);
hr = pSampleGrabber->QueryInterface(IID_IBaseFilter, (void **)&pGrabberFilter);
hr = pGraphBuilder->AddFilter(pGrabberFilter, L"Sample Grabber");
pGraphBuilder->ConnectDirect(GetUnconnectedPin(pVideoCaptureFilter, PINDIR_OUTPUT), GetUnconnectedPin(pTeeFilter, PINDIR_INPUT), nullptr);
pGraphBuilder->ConnectDirect(GetUnconnectedPin(pTeeFilter, PINDIR_OUTPUT), GetUnconnectedPin(pGrabberFilter, PINDIR_INPUT), nullptr);
pGraphBuilder->ConnectDirect(GetUnconnectedPin(pGrabberFilter, PINDIR_OUTPUT), GetUnconnectedPin(m_pLavVideoFilter, PINDIR_INPUT), nullptr);
CSampleGrabberCB* pSampleGrabberCB = new CSampleGrabberCB();
pSampleGrabber->SetCallback(pSampleGrabberCB, 0);
三、预览视频录制
这个很简单直接给代码,如果需要输出mp4格式,需要下载一个 GDCL Mpeg-4 filter
。这里给出网址 https://www.gdcl.co.uk/mpeg4/
。
如果是保存avi格式,直接使用自带的 AVI Mux
就可以了。
IBaseFilter* pWriter;
IBaseFilter* pMp4Mux;
hr = CoCreateInstance(CLSID_FileWriter, nullptr, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&pWriter);
hr = CoCreateInstance(CLSID_GDCLMp4Mux, nullptr, CLSCTX_INPROC_SERVER, IID_IBaseFilter, (void**)&pMp4Mux);
//设置输出文件路径
IFileSinkFilter *pFileSink = nullptr;
pWriter->QueryInterface(IID_IFileSinkFilter, (void**)&pFileSink);
std::wstring outFileName = L"out.mp4";
hr = pFileSink->SetFileName(outFileName.c_str(), nullptr);
hr = pGraphBuilder->AddFilter(pMp4Mux, L"GDCL Mp4 Mux");
hr = pGraphBuilder->AddFilter(pWriter, L"File Writer");
//按照graphedt图连接
pGraphBuilder->ConnectDirect(GetUnconnectedPin(pVideoCaptureFilter, PINDIR_OUTPUT), GetUnconnectedPin(pTeeFilter, PINDIR_INPUT), nullptr);
pGraphBuilder->ConnectDirect(GetUnconnectedPin(pTeeFilter, PINDIR_OUTPUT), GetUnconnectedPin(pGrabberFilter, PINDIR_INPUT), nullptr);
pGraphBuilder->ConnectDirect(GetUnconnectedPin(pGrabberFilter, PINDIR_OUTPUT), GetUnconnectedPin(m_pLavVideoFilter, PINDIR_INPUT), nullptr);
pGraphBuilder->ConnectDirect(GetUnconnectedPin(m_pLavVideoFilter, PINDIR_OUTPUT), GetUnconnectedPin(m_pMixingFilter, PINDIR_INPUT), nullptr);
pGraphBuilder->ConnectDirect(GetUnconnectedPin(pTeeFilter, PINDIR_OUTPUT), GetUnconnectedPin(pMp4Mux, PINDIR_INPUT), nullptr);
pGraphBuilder->ConnectDirect(GetUnconnectedPin(pMp4Mux, PINDIR_OUTPUT), GetUnconnectedPin(pWriter, PINDIR_INPUT), nullptr);
代码是节选出来的,可能无法直接运行。