int( (LONG)(LONG_PTR)&(((type *)0)->field))的最好解释

本文介绍了如何使用NULL指针((type*)0)来计算C语言中结构体成员的偏移量,这种方法避免了实例化对象并在编译期间完成计算,减轻了运行时的负担。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

在优快云上查到的。
ANSI C标准允许值为0的常量被强制转换成任何一种类型的指针,
并且转换结果是一个NULL指针,因此((type *)0)的结果就是一个类型为type *的NULL指针。
如果利用这个NULL指针来访问type的成员当然是非法的,
但&( ((type *)0)->field )的意图仅仅是计算field字段的地址。
聪明的编译器根本就不生成访问type的代码,
而仅仅是根据type的内存布局和结构体实例首址在编译期计算这个(常量)地址,
这样就完全避免了通过NULL指针访问内存的问题。
又因为首址为0,所以这个地址的值就是字段相对于结构体基址的偏移。
以上方法避免了实例化一个type对象,并且求值在编译期进行,没有运行期负担。
源代码: filter.cpp 源文件: #include "filter.h" #include "ioctl.h" ///global var wddm_filter_t __gbl_wddm_filter; #define DEV_NAME L"\\Device\\WddmFilterCtrlDevice" #define DOS_NAME L"\\DosDevices\\WddmFilterCtrlDevice" ///// static NTSTATUS create_ctrl_device() { NTSTATUS status = STATUS_SUCCESS; PDEVICE_OBJECT devObj; UNICODE_STRING dev_name; UNICODE_STRING dos_name; RtlInitUnicodeString(&dev_name, DEV_NAME); RtlInitUnicodeString(&dos_name, DOS_NAME); status = IoCreateDevice( wf->driver_object, 0, &dev_name, //dev name FILE_DEVICE_VIDEO, FILE_DEVICE_SECURE_OPEN, FALSE, &devObj); if (!NT_SUCCESS(status)) { DPT("IoCreateDevice err=0x%X\n", status ); return status; } status = IoCreateSymbolicLink(&dos_name, &dev_name); if (!NT_SUCCESS(status)) { DPT("IoCreateSymbolicLink err=0x%X\n", status ); IoDeleteDevice(devObj); return status; } // attach wf->dxgkrnl_nextDevice = IoAttachDeviceToDeviceStack(devObj, wf->dxgkrnl_pdoDevice); if (!wf->dxgkrnl_nextDevice) { DPT("IoAttachDeviceToDeviceStack error.\n"); IoDeleteDevice(devObj); IoDeleteSymbolicLink(&dos_name); return STATUS_NOT_FOUND; } devObj->Flags |= DO_POWER_PAGABLE | DO_BUFFERED_IO | DO_DIRECT_IO; wf->ctrl_devobj = devObj; ///// return status; } NTSTATUS create_wddm_filter_ctrl_device(PDRIVER_OBJECT drvObj ) { NTSTATUS status = STATUS_SUCCESS; UNICODE_STRING drvPath; UNICODE_STRING drvName; RtlInitUnicodeString(&drvPath, L"\\REGISTRY\\MACHINE\\SYSTEM\\CURRENTCONTROLSET\\SERVICES\\DXGKrnl"); RtlInitUnicodeString(&drvName, L"\\Device\\Dxgkrnl"); // RtlZeroMemory(wf, sizeof(wddm_filter_t)); wf->driver_object = drvObj; KeInitializeSpinLock(&wf->spin_lock); InitializeListHead(&wf->vidpn_if_head); InitializeListHead(&wf->topology_if_head); //����dxgkrnl.sys���� status = ZwLoadDriver(&drvPath); if (!NT_SUCCESS(status)) { if (status != STATUS_IMAGE_ALREADY_LOADED) { DPT("ZwLoadDriver error st=0x%X\n", status ); return status; } } status = IoGetDeviceObjectPointer(&drvName, FILE_ALL_ACCESS, &wf->dxgkrnl_fileobj, &wf->dxgkrnl_pdoDevice); if (!NT_SUCCESS(status)) { DPT("IoGetDeviceObjectPointer Get DxGkrnl err=0x%X\n", status ); return status; } KEVENT evt; IO_STATUS_BLOCK ioStatus; KeInitializeEvent(&evt, NotificationEvent, FALSE); PIRP pIrp = IoBuildDeviceIoControlRequest( IOCTL_VIDEO_DDI_FUNC_REGISTER, //0x23003F , dxgkrnl.sys ����ע�ắ�� wf->dxgkrnl_pdoDevice, NULL, 0, &wf->dxgkrnl_dpiInit, sizeof(PDXGKRNL_DPIINITIALIZE), TRUE, // IRP_MJ_INTERNAL_DEVICE_CONTROL &evt, &ioStatus); if (!pIrp) { DPT("IoBuildDeviceIoControlRequest return NULL.\n"); ObDereferenceObject(wf->dxgkrnl_fileobj); return STATUS_NO_MEMORY; } status = IoCallDriver(wf->dxgkrnl_pdoDevice, pIrp); if (status == STATUS_PENDING) { KeWaitForSingleObject(&evt, Executive, KernelMode, FALSE, NULL); status = ioStatus.Status; } if (!wf->dxgkrnl_dpiInit) {// DPT("Can not Load PDXGKRNL_DPIINITIALIZE function address. st=0x%X\n", status ); ObDereferenceObject(wf->dxgkrnl_fileobj); return STATUS_NOT_FOUND; } ///create filter device status = create_ctrl_device(); if (!NT_SUCCESS(status)) { ObDereferenceObject(wf->dxgkrnl_fileobj); return status; } //// return status; } NTSTATUS log_event(PUNICODE_STRING str) { NTSTATUS status = STATUS_SUCCESS; return status; } filter.h 源文件: #pragma once #include <ntddk.h> #include <wdm.h> #include <ntstrsafe.h> #include <ntddvdeo.h> #include <initguid.h> #include <Dispmprt.h> #include <d3dkmdt.h> //////////////////////////////////////////////////////////// #ifdef DBG #define DPT DbgPrint #else #define DPT // #endif ///����VIDPN�����豸ID�� #define VIDPN_CHILD_UDID 0x667b0099 ///////// ///0x23003F , dxgkrnl.sys ����ע�ắ�� DXGKRNL_DPIINITIALIZE #define IOCTL_VIDEO_DDI_FUNC_REGISTER \ CTL_CODE( FILE_DEVICE_VIDEO, 0xF, METHOD_NEITHER, FILE_ANY_ACCESS ) typedef __checkReturn NTSTATUS DXGKRNL_DPIINITIALIZE( PDRIVER_OBJECT DriverObject, PUNICODE_STRING RegistryPath, DRIVER_INITIALIZATION_DATA* DriverInitData ); typedef DXGKRNL_DPIINITIALIZE* PDXGKRNL_DPIINITIALIZE; /////// struct vidpn_target_id { LONG num; D3DDDI_VIDEO_PRESENT_TARGET_ID ids[1]; }; struct vidpn_paths_t { LONG num_paths; vidpn_target_id* target_paths[1]; }; struct vidpn_intf_t { LIST_ENTRY list; /// D3DKMDT_HVIDPN hVidPn; DXGK_VIDPN_INTERFACE vidpn_if, mod_vidpn_if; //// D3DKMDT_HVIDPNTOPOLOGY hTopology; DXGK_VIDPNTOPOLOGY_INTERFACE topology_if, mod_topology_if; vidpn_paths_t* paths; //// }; struct wddm_filter_t { PDRIVER_OBJECT driver_object; //// PDEVICE_OBJECT ctrl_devobj; //// PFILE_OBJECT dxgkrnl_fileobj; PDEVICE_OBJECT dxgkrnl_pdoDevice; PDEVICE_OBJECT dxgkrnl_nextDevice; /// PDXGKRNL_DPIINITIALIZE dxgkrnl_dpiInit; /// KSPIN_LOCK spin_lock; KIRQL kirql; LIST_ENTRY vidpn_if_head; LIST_ENTRY topology_if_head; //// DRIVER_INITIALIZATION_DATA orgDpiFunc; //ԭʼ��DRIVER_INITIALIZATION_DATA ULONG vidpn_source_count; ULONG vidpn_target_count; DXGKRNL_INTERFACE DxgkInterface; }; extern wddm_filter_t __gbl_wddm_filter; #define wf (&(__gbl_wddm_filter)) #define wf_lock() KeAcquireSpinLock(&wf->spin_lock, &wf->kirql); #define wf_unlock() KeReleaseSpinLock(&wf->spin_lock, wf->kirql); ////////////////function NTSTATUS create_wddm_filter_ctrl_device(PDRIVER_OBJECT drvObj); inline NTSTATUS call_lower_driver(PIRP irp) { IoSkipCurrentIrpStackLocation(irp); return IoCallDriver(wf->dxgkrnl_nextDevice, irp); } NTSTATUS DpiInitialize( PDRIVER_OBJECT DriverObject, PUNICODE_STRING RegistryPath, DRIVER_INITIALIZATION_DATA* DriverInitData); NTSTATUS DxgkDdiEnumVidPnCofuncModality(CONST HANDLE hAdapter, CONST DXGKARG_ENUMVIDPNCOFUNCMODALITY* CONST pEnumCofuncModalityArg); NTSTATUS DxgkDdiIsSupportedVidPn( IN_CONST_HANDLE hAdapter, INOUT_PDXGKARG_ISSUPPORTEDVIDPN pIsSupportedVidPn); NTSTATUS DxgkDdiCommitVidPn( IN_CONST_HANDLE hAdapter, IN_CONST_PDXGKARG_COMMITVIDPN_CONST pCommitVidPn); NTSTATUS DxgkDdiSetVidPnSourceVisibility( IN_CONST_HANDLE hAdapter, IN_CONST_PDXGKARG_SETVIDPNSOURCEVISIBILITY pSetVidPnSourceVisibility); NTSTATUS APIENTRY DxgkDdiSetVidPnSourceAddress( const HANDLE hAdapter, const DXGKARG_SETVIDPNSOURCEADDRESS *pSetVidPnSourceAddress); main.cpp: /// by fanxiushu 2018-08-29 #include "filter.h" static NTSTATUS commonDispatch(PDEVICE_OBJECT devObj, PIRP irp) { PIO_STACK_LOCATION irpStack = IoGetCurrentIrpStackLocation(irp); switch (irpStack->MajorFunction) { case IRP_MJ_CREATE: break; case IRP_MJ_CLEANUP: break; case IRP_MJ_CLOSE: break; case IRP_MJ_INTERNAL_DEVICE_CONTROL: if (irpStack->Parameters.DeviceIoControl.IoControlCode == IOCTL_VIDEO_DDI_FUNC_REGISTER) { ///////�Կ�������DxgkInitialize�����е��� IOCTL��ȡdxgkrnl.sys��ע��ص�����������hook�˴�����ȡ���Կ������ṩ������DDI���� irp->IoStatus.Information = 0; irp->IoStatus.Status = STATUS_SUCCESS; ///�����ǵĻص��������ظ��Կ�����. if (irp->UserBuffer) { /// irp->IoStatus.Information = sizeof(PDXGKRNL_DPIINITIALIZE); *((PDXGKRNL_DPIINITIALIZE*)irp->UserBuffer) = DpiInitialize; } ///// IoCompleteRequest(irp, IO_NO_INCREMENT); return STATUS_SUCCESS; /// } break; } //// return call_lower_driver(irp); } extern "C" NTSTATUS DriverEntry( IN PDRIVER_OBJECT DriverObject, IN PUNICODE_STRING RegistryPath) { NTSTATUS status = STATUS_SUCCESS; for (UCHAR i = 0; i < IRP_MJ_MAXIMUM_FUNCTION; ++i) { DriverObject->MajorFunction[i] = commonDispatch; } status = create_wddm_filter_ctrl_device(DriverObject); /// DriverObject->DriverUnload = NULL; ///������ж�� return status; } miniport.cpp 源文件: #include "filter.h" static NTSTATUS DxgkDdiAddDevice( IN_CONST_PDEVICE_OBJECT PhysicalDeviceObject, OUT PVOID *MiniportDeviceContext) { DPT("Hook: DxgkDdiAddDevice. \n"); return wf->orgDpiFunc.DxgkDdiAddDevice(PhysicalDeviceObject, MiniportDeviceContext); } static NTSTATUS DxgkDdiRemoveDevice(IN PVOID MiniportDeviceContext) { DPT("Hook: DxgkDdiRemoveDevice\n"); return wf->orgDpiFunc.DxgkDdiRemoveDevice(MiniportDeviceContext); } ////HOOK DxgkCbQueryVidPnInterface, �ҹ�DxgkCbQueryVidPnInterface��ص����лص�������������ƭԭʼ������Target Source �� Path ��ѯ. //��ѯ����·�������Ұ���SourceId���� static vidpn_paths_t* enum_all_paths(IN_CONST_D3DKMDT_HVIDPNTOPOLOGY topology_handle, const DXGK_VIDPNTOPOLOGY_INTERFACE* topology_if ) { NTSTATUS status = STATUS_SUCCESS; SIZE_T num = 0; status = topology_if->pfnGetNumPaths(topology_handle, &num); if (num <= 0) { return NULL; } LONG sz = sizeof(vidpn_paths_t) + sizeof(vidpn_target_id*)*wf->vidpn_source_count + wf->vidpn_source_count*( sizeof(vidpn_target_id) + num* sizeof(D3DDDI_VIDEO_PRESENT_TARGET_ID) ); vidpn_paths_t* p = (vidpn_paths_t*)ExAllocatePoolWithTag(NonPagedPool, sz, 'FXSD'); if (!p)return NULL; /// RtlZeroMemory(p, sz); //// p->num_paths = num; CHAR* ptr = (CHAR*)p + sizeof(vidpn_paths_t) + sizeof(vidpn_target_id*)*wf->vidpn_source_count; for (INT i = 0; i < wf->vidpn_source_count; ++i) { p->target_paths[i] = (vidpn_target_id*)( ptr + i* ( sizeof(vidpn_target_id) + num * sizeof(D3DDDI_VIDEO_PRESENT_TARGET_ID) ) ); } ////// CONST D3DKMDT_VIDPN_PRESENT_PATH *curr_path_info; CONST D3DKMDT_VIDPN_PRESENT_PATH *next_path_info; status = topology_if->pfnAcquireFirstPathInfo(topology_handle, &curr_path_info); if (status == STATUS_GRAPHICS_DATASET_IS_EMPTY) { ExFreePool(p); return NULL; } else if (!NT_SUCCESS(status)) { ExFreePool(p); return NULL; } ///// INT t_num = 0; do { /// UINT sid = curr_path_info->VidPnSourceId; UINT did = curr_path_info->VidPnTargetId; if ( sid < (UINT)wf->vidpn_source_count) { /// if (did != VIDPN_CHILD_UDID) {// skip my target path /// LONG n = p->target_paths[sid]->num; p->target_paths[sid]->num++; p->target_paths[sid]->ids[n] = did; /// t_num++; } /// } ///next status = topology_if->pfnAcquireNextPathInfo(topology_handle, curr_path_info, &next_path_info); topology_if->pfnReleasePathInfo(topology_handle, curr_path_info); curr_path_info = next_path_info; if (status == STATUS_GRAPHICS_NO_MORE_ELEMENTS_IN_DATASET) { /// end curr_path_info = NULL; // DPT("pfnAcquireNextPathInfo no more data.\n"); break; } else if (!NT_SUCCESS(status)) { curr_path_info = NULL; DPT("pfnAcquireNextPathInfo err=0x%X\n", status); break; } //// } while (TRUE); p->num_paths = t_num; /// return p; } NTSTATUS pfnGetNumPaths( IN_CONST_D3DKMDT_HVIDPNTOPOLOGY hVidPnTopology, OUT_PSIZE_T pNumPaths) { NTSTATUS status = STATUS_INVALID_PARAMETER; DXGKDDI_VIDPNTOPOLOGY_GETNUMPATHS ptr_pfnGetNumPaths = NULL; wf_lock(); for (PLIST_ENTRY entry = wf->topology_if_head.Flink; entry != &wf->topology_if_head; entry = entry->Flink) { vidpn_intf_t* intf = CONTAINING_RECORD(entry, vidpn_intf_t, list); if (intf->hTopology == hVidPnTopology) { ptr_pfnGetNumPaths = intf->topology_if.pfnGetNumPaths; if (intf->paths && pNumPaths) { *pNumPaths = intf->paths->num_paths; wf_unlock(); DPT("pfnGetNumPaths Cache called num=%d\n", *pNumPaths); return STATUS_SUCCESS; } break; } } wf_unlock(); ///// if (!ptr_pfnGetNumPaths) { return STATUS_INVALID_PARAMETER; } status = ptr_pfnGetNumPaths(hVidPnTopology, pNumPaths); DPT("pfnGetNumPaths called num=%d\n", *pNumPaths ); return status; } NTSTATUS pfnGetNumPathsFromSource( IN_CONST_D3DKMDT_HVIDPNTOPOLOGY hVidPnTopology, IN_CONST_D3DDDI_VIDEO_PRESENT_SOURCE_ID VidPnSourceId, OUT_PSIZE_T pNumPathsFromSource) { NTSTATUS status = STATUS_SUCCESS; DXGKDDI_VIDPNTOPOLOGY_GETNUMPATHSFROMSOURCE ptr_pfnGetNumPathsFromSource = NULL; wf_lock(); for (PLIST_ENTRY entry = wf->topology_if_head.Flink; entry != &wf->topology_if_head; entry = entry->Flink) { vidpn_intf_t* intf = CONTAINING_RECORD(entry, vidpn_intf_t, list); if (intf->hTopology == hVidPnTopology) { ptr_pfnGetNumPathsFromSource = intf->topology_if.pfnGetNumPathsFromSource; if (intf->paths && pNumPathsFromSource && VidPnSourceId < wf->vidpn_source_count ) { *pNumPathsFromSource = intf->paths->target_paths[VidPnSourceId]->num; wf_unlock(); DPT("pfnGetNumPathsFromSource Cache called. num=%d\n", *pNumPathsFromSource); return STATUS_SUCCESS; } break; } } wf_unlock(); //// if (!ptr_pfnGetNumPathsFromSource) { return STATUS_INVALID_PARAMETER; } status = ptr_pfnGetNumPathsFromSource(hVidPnTopology, VidPnSourceId, pNumPathsFromSource); DPT("pfnGetNumPathsFromSource called. num=%d\n", *pNumPathsFromSource); return status; } NTSTATUS pfnEnumPathTargetsFromSource( IN_CONST_D3DKMDT_HVIDPNTOPOLOGY hVidPnTopology, IN_CONST_D3DDDI_VIDEO_PRESENT_SOURCE_ID VidPnSourceId, IN_CONST_D3DKMDT_VIDPN_PRESENT_PATH_INDEX VidPnPresentPathIndex, OUT_PD3DDDI_VIDEO_PRESENT_TARGET_ID pVidPnTargetId) { NTSTATUS status = STATUS_SUCCESS; DXGKDDI_VIDPNTOPOLOGY_ENUMPATHTARGETSFROMSOURCE ptr_pfnEnumPathTargetsFromSource = NULL; wf_lock(); for (PLIST_ENTRY entry = wf->topology_if_head.Flink; entry != &wf->topology_if_head; entry = entry->Flink) { vidpn_intf_t* intf = CONTAINING_RECORD(entry, vidpn_intf_t, list); if (intf->hTopology == hVidPnTopology) { ptr_pfnEnumPathTargetsFromSource = intf->topology_if.pfnEnumPathTargetsFromSource; if (intf->paths && VidPnSourceId < wf->vidpn_source_count && pVidPnTargetId ) { if (VidPnPresentPathIndex >= intf->paths->target_paths[VidPnSourceId]->num) { wf_unlock(); DPT("VidPnPresentPathIndex >= intf->paths->target_path_num[VidPnSourceId]\n"); return STATUS_INVALID_PARAMETER; } *pVidPnTargetId = intf->paths->target_paths[VidPnSourceId]->ids[VidPnPresentPathIndex]; //// wf_unlock(); DPT("pfnEnumPathTargetsFromSource Cache called sourceId=%d, index=%d, targetid=%d, st=0x%X\n", VidPnSourceId, VidPnPresentPathIndex, *pVidPnTargetId, status); return STATUS_SUCCESS; } break; } } wf_unlock(); ///// if (!ptr_pfnEnumPathTargetsFromSource) { return STATUS_INVALID_PARAMETER; } status = ptr_pfnEnumPathTargetsFromSource(hVidPnTopology, VidPnSourceId, VidPnPresentPathIndex, pVidPnTargetId); DPT("pfnEnumPathTargetsFromSource called sourceId=%d, index=%d, targetid=%d, st=0x%X\n", VidPnSourceId, VidPnPresentPathIndex, *pVidPnTargetId, status ); return status; } static NTSTATUS skip_my_target_path( IN_CONST_D3DKMDT_HVIDPNTOPOLOGY hVidPnTopology, IN_CONST_PD3DKMDT_VIDPN_PRESENT_PATH_CONST pVidPnPresentPathInfo, DEREF_OUT_CONST_PPD3DKMDT_VIDPN_PRESENT_PATH ppNextVidPnPresentPathInfo, DXGKDDI_VIDPNTOPOLOGY_ACQUIRENEXTPATHINFO ptr_pfnAcquireNextPathInfo, DXGKDDI_VIDPNTOPOLOGY_RELEASEPATHINFO ptr_pfnReleasePathInfo) { NTSTATUS status = STATUS_SUCCESS; CONST D3DKMDT_VIDPN_PRESENT_PATH* curr_path = pVidPnPresentPathInfo; do { if (curr_path->VidPnTargetId != VIDPN_CHILD_UDID) {//����Ƿ����ǵ�target ID break; } /////skip my target id status = ptr_pfnAcquireNextPathInfo(hVidPnTopology, curr_path, ppNextVidPnPresentPathInfo ); ptr_pfnReleasePathInfo(hVidPnTopology, curr_path); /// release pathinfo /// if (status == STATUS_GRAPHICS_NO_MORE_ELEMENTS_IN_DATASET) { break; } else if (!NT_SUCCESS(status)) { break; } curr_path = *ppNextVidPnPresentPathInfo; //// ///// } while (TRUE); /// return status; } static NTSTATUS pfnAcquireFirstPathInfo( IN_CONST_D3DKMDT_HVIDPNTOPOLOGY hVidPnTopology, DEREF_OUT_CONST_PPD3DKMDT_VIDPN_PRESENT_PATH ppFirstVidPnPresentPathInfo) { NTSTATUS status = STATUS_SUCCESS; DXGKDDI_VIDPNTOPOLOGY_ACQUIREFIRSTPATHINFO ptr_pfnAcquireFirstPathInfo = NULL; DXGKDDI_VIDPNTOPOLOGY_ACQUIRENEXTPATHINFO ptr_pfnAcquireNextPathInfo = NULL; DXGKDDI_VIDPNTOPOLOGY_RELEASEPATHINFO ptr_pfnReleasePathInfo = NULL; wf_lock(); for (PLIST_ENTRY entry = wf->topology_if_head.Flink; entry != &wf->topology_if_head; entry = entry->Flink) { vidpn_intf_t* intf = CONTAINING_RECORD(entry, vidpn_intf_t, list); if (intf->hTopology == hVidPnTopology) { ptr_pfnAcquireFirstPathInfo = intf->topology_if.pfnAcquireFirstPathInfo; ptr_pfnAcquireNextPathInfo = intf->topology_if.pfnAcquireNextPathInfo; ptr_pfnReleasePathInfo = intf->topology_if.pfnReleasePathInfo; break; } } wf_unlock(); /// if (!ptr_pfnAcquireFirstPathInfo) { DPT("** pfnAcquireFirstPathInfo NULL.\n"); return STATUS_INVALID_PARAMETER; } status = ptr_pfnAcquireFirstPathInfo(hVidPnTopology, ppFirstVidPnPresentPathInfo); if ( NT_SUCCESS(status) && status != STATUS_GRAPHICS_DATASET_IS_EMPTY ) { CONST D3DKMDT_VIDPN_PRESENT_PATH* curr_path = *ppFirstVidPnPresentPathInfo; status = skip_my_target_path(hVidPnTopology, curr_path, ppFirstVidPnPresentPathInfo, ptr_pfnAcquireNextPathInfo, ptr_pfnReleasePathInfo); //// } // DPT("ppFirstVidPnPresentPathInfo called. st=0x%X\n", status ); ///// return status; } static NTSTATUS pfnAcquireNextPathInfo( IN_CONST_D3DKMDT_HVIDPNTOPOLOGY hVidPnTopology, IN_CONST_PD3DKMDT_VIDPN_PRESENT_PATH_CONST pVidPnPresentPathInfo, DEREF_OUT_CONST_PPD3DKMDT_VIDPN_PRESENT_PATH ppNextVidPnPresentPathInfo) { NTSTATUS status = STATUS_SUCCESS; DXGKDDI_VIDPNTOPOLOGY_ACQUIRENEXTPATHINFO ptr_pfnAcquireNextPathInfo = NULL; DXGKDDI_VIDPNTOPOLOGY_RELEASEPATHINFO ptr_pfnReleasePathInfo = NULL; wf_lock(); for (PLIST_ENTRY entry = wf->topology_if_head.Flink; entry != &wf->topology_if_head; entry = entry->Flink) { vidpn_intf_t* intf = CONTAINING_RECORD(entry, vidpn_intf_t, list); if (intf->hTopology == hVidPnTopology) { ptr_pfnAcquireNextPathInfo = intf->topology_if.pfnAcquireNextPathInfo; ptr_pfnReleasePathInfo = intf->topology_if.pfnReleasePathInfo; break; } } wf_unlock(); ///// if (!ptr_pfnAcquireNextPathInfo) { DPT("** pfnAcquireNextPathInfo NULL.\n"); return STATUS_INVALID_PARAMETER; } status = ptr_pfnAcquireNextPathInfo(hVidPnTopology, pVidPnPresentPathInfo, ppNextVidPnPresentPathInfo ); if (NT_SUCCESS(status) && status != STATUS_GRAPHICS_NO_MORE_ELEMENTS_IN_DATASET ) { CONST D3DKMDT_VIDPN_PRESENT_PATH* curr_path = *ppNextVidPnPresentPathInfo; status = skip_my_target_path(hVidPnTopology, curr_path, ppNextVidPnPresentPathInfo, ptr_pfnAcquireNextPathInfo, ptr_pfnReleasePathInfo); //// } // DPT("pfnAcquireNextPathInfo called. st=0x%X\n", status ); return status; } static NTSTATUS pfnGetTopology( IN_CONST_D3DKMDT_HVIDPN hVidPn, OUT_PD3DKMDT_HVIDPNTOPOLOGY phVidPnTopology, DEREF_OUT_CONST_PPDXGK_VIDPNTOPOLOGY_INTERFACE ppVidPnTopologyInterface) { NTSTATUS status = STATUS_SUCCESS; DXGKDDI_VIDPN_GETTOPOLOGY ptr_pfnGetTopology = NULL; wf_lock(); for (PLIST_ENTRY entry = wf->vidpn_if_head.Flink; entry != &wf->vidpn_if_head; entry = entry->Flink) { vidpn_intf_t* intf = CONTAINING_RECORD(entry, vidpn_intf_t, list); if (hVidPn == intf->hVidPn) { ptr_pfnGetTopology = intf->vidpn_if.pfnGetTopology; break; } } wf_unlock(); if (!ptr_pfnGetTopology) { DPT("pfnGetTopology==NULL.\n"); return STATUS_INVALID_PARAMETER; } status = ptr_pfnGetTopology(hVidPn, phVidPnTopology, ppVidPnTopologyInterface); // DPT("pfnGetTopology called.\n"); if (NT_SUCCESS(status) && ppVidPnTopologyInterface && *ppVidPnTopologyInterface && phVidPnTopology ) { ///���¼��㲻���������Լ���target path��·�� vidpn_paths_t* p = enum_all_paths(*phVidPnTopology, *ppVidPnTopologyInterface); /// //// wf_lock(); /// PLIST_ENTRY entry; BOOLEAN find = FALSE; vidpn_intf_t* intf = NULL; for (entry = wf->topology_if_head.Flink; entry != &wf->topology_if_head; entry = entry->Flink) { vidpn_intf_t* it = CONTAINING_RECORD(entry, vidpn_intf_t, list); if (it->hTopology == *phVidPnTopology) { intf = it; if (intf->paths) { ExFreePool(intf->paths); intf->paths = NULL; }/// break; } } if (!intf) { intf = (vidpn_intf_t*)ExAllocatePoolWithTag(NonPagedPool, sizeof(vidpn_intf_t), 'FXSD'); if (intf) { InsertTailList(&wf->topology_if_head, &intf->list); intf->hTopology = *phVidPnTopology; intf->paths = NULL; ///// } } if (intf) { intf->paths = p; /// intf->topology_if = **ppVidPnTopologyInterface; intf->mod_topology_if = intf->topology_if; *ppVidPnTopologyInterface = &intf->mod_topology_if; /// ///�滻���� intf->mod_topology_if.pfnGetNumPaths = pfnGetNumPaths; intf->mod_topology_if.pfnGetNumPathsFromSource = pfnGetNumPathsFromSource; intf->mod_topology_if.pfnEnumPathTargetsFromSource = pfnEnumPathTargetsFromSource; intf->mod_topology_if.pfnAcquireFirstPathInfo = pfnAcquireFirstPathInfo; intf->mod_topology_if.pfnAcquireNextPathInfo = pfnAcquireNextPathInfo; ///// } //// wf_unlock(); } /// return status; } static NTSTATUS DxgkCbQueryVidPnInterface( IN_CONST_D3DKMDT_HVIDPN hVidPn, IN_CONST_DXGK_VIDPN_INTERFACE_VERSION VidPnInterfaceVersion, DEREF_OUT_CONST_PPDXGK_VIDPN_INTERFACE ppVidPnInterface) { NTSTATUS status = STATUS_SUCCESS; status = wf->DxgkInterface.DxgkCbQueryVidPnInterface(hVidPn, VidPnInterfaceVersion, ppVidPnInterface); /// �滻���Լ��Ļص���������������������Hook Driver��ߵ� Target . if (NT_SUCCESS(status) && ppVidPnInterface && *ppVidPnInterface ) { /// PLIST_ENTRY entry; BOOLEAN find = FALSE; /// wf_lock(); for (entry = wf->vidpn_if_head.Flink; entry != &wf->vidpn_if_head; entry = entry->Flink) { vidpn_intf_t* intf = CONTAINING_RECORD(entry, vidpn_intf_t, list); if (intf->hVidPn == hVidPn) { intf->vidpn_if = *(*ppVidPnInterface); intf->mod_vidpn_if = intf->vidpn_if; intf->mod_vidpn_if.pfnGetTopology = pfnGetTopology; //// *ppVidPnInterface = &intf->mod_vidpn_if; find = TRUE; break; } } if (!find) { vidpn_intf_t* intf = (vidpn_intf_t*)ExAllocatePoolWithTag(NonPagedPool, sizeof(vidpn_intf_t), 'Fxsd'); if (intf) { intf->hVidPn = hVidPn; intf->vidpn_if = *(*ppVidPnInterface); intf->mod_vidpn_if = intf->vidpn_if; intf->mod_vidpn_if.pfnGetTopology = pfnGetTopology; /// *ppVidPnInterface = &intf->mod_vidpn_if; InsertTailList(&wf->vidpn_if_head, &intf->list); //// //// } } wf_unlock(); //// } //// return status; } /////// static NTSTATUS DxgkDdiStartDevice( IN PVOID MiniportDeviceContext, IN PDXGK_START_INFO DxgkStartInfo, IN PDXGKRNL_INTERFACE DxgkInterface, OUT PULONG NumberOfVideoPresentSources, OUT PULONG NumberOfChildren) { NTSTATUS status = STATUS_SUCCESS; ////WDDM1.1 �� WDDM2.3 ÿ�������в�ͬ���壬������WDK7�±��룬���ֻcopy WDDM1.1�IJ��֡� wf->DxgkInterface = *DxgkInterface; /// save interface function,����VIDPN���� ///////�滻ԭ���Ľӿ� DxgkInterface->DxgkCbQueryVidPnInterface = DxgkCbQueryVidPnInterface; ////// status = wf->orgDpiFunc.DxgkDdiStartDevice(MiniportDeviceContext, DxgkStartInfo, DxgkInterface, NumberOfVideoPresentSources, NumberOfChildren); //// DxgkInterface->DxgkCbQueryVidPnInterface = wf->DxgkInterface.DxgkCbQueryVidPnInterface; /// DPT("Hook: DxgkDdiStartDevice status=0x%X.\n", status ); /// if (NT_SUCCESS(status)) { DPT("org: DxgkDdiStartDevice, NumberOfVideoPresentSources=%d, NumberOfChildren=%d\n", *NumberOfVideoPresentSources, *NumberOfChildren); //// �ֱ����� 1������ source �� target wf->vidpn_source_count = *NumberOfVideoPresentSources; // +1; wf->vidpn_target_count = *NumberOfChildren + 1; ////// *NumberOfVideoPresentSources = wf->vidpn_source_count; *NumberOfChildren = wf->vidpn_target_count; //// } //// return status; } static NTSTATUS DxgkDdiStopDevice(IN PVOID MiniportDeviceContext) { DPT("Hook: DxgkDdiStopDevice.\n"); return wf->orgDpiFunc.DxgkDdiStopDevice(MiniportDeviceContext); } static NTSTATUS DxgkDdiQueryChildRelations(IN PVOID pvMiniportDeviceContext, IN OUT PDXGK_CHILD_DESCRIPTOR pChildRelations, IN ULONG ChildRelationsSize) { NTSTATUS status; status = wf->orgDpiFunc.DxgkDdiQueryChildRelations(pvMiniportDeviceContext, pChildRelations, ChildRelationsSize); DPT("Hook: DxgkDdiQueryChildRelations status=0x%X\n", status); //// if (NT_SUCCESS(status)) { //// LONG reqSize = sizeof(DXGK_CHILD_DESCRIPTOR)*wf->vidpn_target_count; if (reqSize > ChildRelationsSize) { return STATUS_BUFFER_TOO_SMALL; } ///// pChildRelations[wf->vidpn_target_count - 1] = pChildRelations[0]; ///�ѵ�һ�����Ƹ����ǵ�target pChildRelations[wf->vidpn_target_count - 1].ChildUid = VIDPN_CHILD_UDID; //�������ǵ�target vidpn��ID pChildRelations[wf->vidpn_target_count - 1].AcpiUid = VIDPN_CHILD_UDID; //// } return status; } static NTSTATUS DxgkDdiQueryChildStatus(IN PVOID MiniportDeviceContext, IN PDXGK_CHILD_STATUS ChildStatus, IN BOOLEAN NonDestructiveOnly) { DPT("Hook: DxgkDdiQueryChildStatus Uid=0x%X\n", ChildStatus->ChildUid); if (ChildStatus->ChildUid == VIDPN_CHILD_UDID) { ChildStatus->HotPlug.Connected = TRUE; /// /// return STATUS_SUCCESS; } //// return wf->orgDpiFunc.DxgkDdiQueryChildStatus(MiniportDeviceContext, ChildStatus, NonDestructiveOnly); } static NTSTATUS DxgkDdiQueryDeviceDescriptor(IN_CONST_PVOID MiniportDeviceContext, IN_ULONG ChildUid, INOUT_PDXGK_DEVICE_DESCRIPTOR DeviceDescriptor) { DPT("Hook: DxgkDdiQueryDeviceDescriptor Uid=0x%X\n", ChildUid); if (ChildUid == VIDPN_CHILD_UDID) { /// return STATUS_MONITOR_NO_MORE_DESCRIPTOR_DATA; } //// return wf->orgDpiFunc.DxgkDdiQueryDeviceDescriptor(MiniportDeviceContext, ChildUid, DeviceDescriptor); } ///// NTSTATUS DpiInitialize( PDRIVER_OBJECT DriverObject, PUNICODE_STRING RegistryPath, DRIVER_INITIALIZATION_DATA* DriverInitData) { NTSTATUS status = STATUS_SUCCESS; static BOOLEAN is_hooked = FALSE; //// UNICODE_STRING vm_str; RtlInitUnicodeString(&vm_str, L"\\Driver\\vm3dmp_loader"); // Vmware 3D UNICODE_STRING igfx_str; RtlInitUnicodeString(&igfx_str, L"\\Driver\\igfx"); // Intel Graphics UNICODE_STRING nv_str; RtlInitUnicodeString(&nv_str, L"\\Driver\\nvlddmkm"); // nvidia Graphics if ( !is_hooked && ( RtlEqualUnicodeString(&vm_str, &DriverObject->DriverName, TRUE) || RtlEqualUnicodeString(&nv_str, &DriverObject->DriverName, TRUE) //vmware��������Կ�����Intel�Կ� ) ) { //����ֻHOOK��һ���Կ� is_hooked = TRUE; /// //���︴����Ҫע�⣺ // DRIVER_INITIALIZATION_DATA�ṹ���壬WDDM1.1 �� WDDM2.3 ÿ�������в�ͬ���壬������WDK7�±��룬���ֻcopy WDDM1.1�IJ��֡� RtlCopyMemory(&wf->orgDpiFunc, DriverInitData, sizeof(DRIVER_INITIALIZATION_DATA)); ////replace some function DriverInitData->DxgkDdiAddDevice = DxgkDdiAddDevice; DriverInitData->DxgkDdiRemoveDevice = DxgkDdiRemoveDevice; DriverInitData->DxgkDdiStartDevice = DxgkDdiStartDevice; DriverInitData->DxgkDdiStopDevice = DxgkDdiStopDevice; DriverInitData->DxgkDdiQueryChildRelations = DxgkDdiQueryChildRelations; DriverInitData->DxgkDdiQueryChildStatus = DxgkDdiQueryChildStatus; DriverInitData->DxgkDdiQueryDeviceDescriptor = DxgkDdiQueryDeviceDescriptor; DriverInitData->DxgkDdiEnumVidPnCofuncModality = DxgkDdiEnumVidPnCofuncModality; //// DriverInitData->DxgkDdiIsSupportedVidPn = DxgkDdiIsSupportedVidPn; DriverInitData->DxgkDdiCommitVidPn = DxgkDdiCommitVidPn; DriverInitData->DxgkDdiSetVidPnSourceVisibility = DxgkDdiSetVidPnSourceVisibility; DriverInitData->DxgkDdiSetVidPnSourceAddress = DxgkDdiSetVidPnSourceAddress; // DriverInitData->DxgkDdiPresent = DxgkDdiPresent; ///// } ///�滻��ijЩ�����󣬽��ŵ��� dxgkrnl.sys �ص�����ע�� return wf->dxgkrnl_dpiInit(DriverObject, RegistryPath, DriverInitData); } vidpn.cpp 源文件: #include "filter.h" static D3DKMDT_2DREGION Modes[]= { {1024, 768}, {1366, 768}, {1920, 1080}, // {6000, 4000} }; static NTSTATUS add_source_mode(D3DKMDT_HVIDPNSOURCEMODESET source_mode_set_hdl, CONST DXGK_VIDPNSOURCEMODESET_INTERFACE *source_mode_set_if, D3DKMDT_2DREGION* mode) { NTSTATUS status = STATUS_SUCCESS; D3DKMDT_VIDPN_SOURCE_MODE *source_mode; D3DKMDT_GRAPHICS_RENDERING_FORMAT *fmt; status = source_mode_set_if->pfnCreateNewModeInfo(source_mode_set_hdl, &source_mode); if (!NT_SUCCESS(status)) { DPT("** pfnCreateNewModeInfo(Source) err=0x%X\n", status ); return status; } /* Let OS assign the ID, set the type.*/ source_mode->Type = D3DKMDT_RMT_GRAPHICS; /* Initialize the rendering format per our constraints and the current mode. */ fmt = &source_mode->Format.Graphics; fmt->PrimSurfSize.cx = mode->cx; fmt->PrimSurfSize.cy = mode->cy; fmt->VisibleRegionSize.cx = mode->cx; fmt->VisibleRegionSize.cy = mode->cy; fmt->Stride = mode->cx*4 ; // RGBA fmt->PixelFormat = D3DDDIFMT_A8R8G8B8; fmt->ColorBasis = D3DKMDT_CB_SRGB; fmt->PixelValueAccessMode = D3DKMDT_PVAM_DIRECT; status = source_mode_set_if->pfnAddMode(source_mode_set_hdl, source_mode); if (!NT_SUCCESS(status)) { DPT("** pfnAddMode(Source) err=0x%X\n", status ); source_mode_set_if->pfnReleaseModeInfo(source_mode_set_hdl, source_mode); } /// return status; } static NTSTATUS update_source_modes( CONST D3DKMDT_HVIDPN vidpn_hdl, CONST D3DKMDT_VIDPN_PRESENT_PATH *curr_path_info, CONST DXGK_VIDPN_INTERFACE* vidpn_if) { NTSTATUS status = STATUS_SUCCESS; D3DKMDT_HVIDPNSOURCEMODESET source_mode_set_hdl = NULL; CONST DXGK_VIDPNSOURCEMODESET_INTERFACE *source_mode_set_if; CONST D3DKMDT_VIDPN_SOURCE_MODE *src_mode_info = NULL; status = vidpn_if->pfnAcquireSourceModeSet(vidpn_hdl, curr_path_info->VidPnSourceId, &source_mode_set_hdl, &source_mode_set_if); if (!NT_SUCCESS(status)) { DPT("** not pfnAcquireSourceModeSet st=0x%X\n", status ); return status; } //// status = source_mode_set_if->pfnAcquirePinnedModeInfo(source_mode_set_hdl, &src_mode_info); if (!NT_SUCCESS(status)) { vidpn_if->pfnReleaseSourceModeSet(vidpn_hdl, source_mode_set_hdl); DPT("pfnAcquirePinnedModeInfo(Source) err=0x%X\n", status ); return status; } //// if (src_mode_info != NULL) { source_mode_set_if->pfnReleaseModeInfo(source_mode_set_hdl, src_mode_info); } vidpn_if->pfnReleaseSourceModeSet(vidpn_hdl, source_mode_set_hdl); source_mode_set_hdl = NULL; /// /// if (status == STATUS_SUCCESS && src_mode_info != NULL) { // pinned mode . /// DPT("Source Mode Pinned Mode: 0x%X -> 0x%X\n", curr_path_info->VidPnSourceId, curr_path_info->VidPnTargetId); return STATUS_SUCCESS;///�Ѿ����ˣ������� } //// status = vidpn_if->pfnCreateNewSourceModeSet(vidpn_hdl, curr_path_info->VidPnSourceId, &source_mode_set_hdl, &source_mode_set_if); if (!NT_SUCCESS(status)) { DPT("** pfnCreateNewSourceModeSet err=0x%X\n", status); return status; } //// for (INT i = 0; i < sizeof(Modes) / sizeof(Modes[0]); ++i) { //// status = add_source_mode(source_mode_set_hdl, source_mode_set_if, &Modes[i]); if (!NT_SUCCESS(status)) { /// vidpn_if->pfnReleaseSourceModeSet(vidpn_hdl, source_mode_set_hdl); DPT("add_source_mode err=0x%X\n", status); return status; } //// } ////// status = vidpn_if->pfnAssignSourceModeSet(vidpn_hdl, curr_path_info->VidPnSourceId, source_mode_set_hdl); if (!NT_SUCCESS(status)) { DPT("** pfnAssignSourceModeSet err=0x%X\n", status); vidpn_if->pfnReleaseSourceModeSet(vidpn_hdl, source_mode_set_hdl); } //// return status; } //// target static NTSTATUS add_target_mode(D3DKMDT_HVIDPNTARGETMODESET tgt_mode_set_hdl, CONST DXGK_VIDPNTARGETMODESET_INTERFACE *target_mode_set_if, D3DKMDT_2DREGION* mode) { D3DKMDT_VIDPN_TARGET_MODE *target_mode; D3DKMDT_VIDEO_SIGNAL_INFO *signal_info; NTSTATUS status; status = target_mode_set_if->pfnCreateNewModeInfo(tgt_mode_set_hdl, &target_mode); if (!NT_SUCCESS(status)) { DPT("** pfnCreateNewModeInfo(Target) err=0x%X\n", status ); return status; } //// /* Let OS assign the ID, set the preferred mode field.*/ target_mode->Preference = D3DKMDT_MP_PREFERRED; //// #define REFRESH_RATE 60 signal_info = &target_mode->VideoSignalInfo; signal_info->VideoStandard = D3DKMDT_VSS_VESA_DMT;// D3DKMDT_VSS_OTHER; signal_info->TotalSize.cx = mode->cx; signal_info->TotalSize.cy = mode->cy; signal_info->ActiveSize.cx = mode->cx; signal_info->ActiveSize.cy = mode->cy; signal_info->PixelRate = mode->cx * mode->cy * REFRESH_RATE; signal_info->VSyncFreq.Numerator = REFRESH_RATE * 1000; signal_info->VSyncFreq.Denominator = 1000; signal_info->HSyncFreq.Numerator = (UINT)((signal_info->PixelRate / signal_info->TotalSize.cy) * 1000); signal_info->HSyncFreq.Denominator = 1000; signal_info->ScanLineOrdering = D3DDDI_VSSLO_PROGRESSIVE; status = target_mode_set_if->pfnAddMode(tgt_mode_set_hdl, target_mode); if (!NT_SUCCESS(status)) { DPT("pfnAddMode failed: 0x%x", status); target_mode_set_if->pfnReleaseModeInfo(tgt_mode_set_hdl, target_mode); return status; } return status; } static NTSTATUS update_target_modes( CONST D3DKMDT_HVIDPN vidpn_hdl, CONST D3DKMDT_VIDPN_PRESENT_PATH *curr_path_info, CONST DXGK_VIDPN_INTERFACE* vidpn_if) { NTSTATUS status = STATUS_SUCCESS; D3DKMDT_HVIDPNTARGETMODESET tgt_mode_set_hdl = NULL; CONST DXGK_VIDPNTARGETMODESET_INTERFACE *target_mode_set_if; CONST D3DKMDT_VIDPN_TARGET_MODE *tgt_mode_info = NULL; status = vidpn_if->pfnAcquireTargetModeSet(vidpn_hdl, curr_path_info->VidPnTargetId, &tgt_mode_set_hdl, &target_mode_set_if); if (!NT_SUCCESS(status)) { DPT("** pfnAcquireTargetModeSet err=0x%X\n", status ); return status; } status = target_mode_set_if->pfnAcquirePinnedModeInfo(tgt_mode_set_hdl, &tgt_mode_info); if (!NT_SUCCESS(status)) { vidpn_if->pfnReleaseTargetModeSet(vidpn_hdl, tgt_mode_set_hdl); DPT("** pfnAcquirePinnedModeInfo(Source) err=0x%X\n", status ); return status; } //// if (tgt_mode_info) { target_mode_set_if->pfnReleaseModeInfo(tgt_mode_set_hdl, tgt_mode_info); } vidpn_if->pfnReleaseTargetModeSet(vidpn_hdl, tgt_mode_set_hdl); tgt_mode_set_hdl = NULL; if (status == STATUS_SUCCESS && tgt_mode_info != NULL) { DPT("Target Mode Pinned Mode: 0x%X -> 0x%X\n", curr_path_info->VidPnSourceId, curr_path_info->VidPnTargetId); return STATUS_SUCCESS;///�Ѿ����ˣ������� /// } ///// status = vidpn_if->pfnCreateNewTargetModeSet(vidpn_hdl, curr_path_info->VidPnTargetId, &tgt_mode_set_hdl, &target_mode_set_if); if (!NT_SUCCESS(status)) { DPT("** pfnCreateNewTargetModeSet err=0x%X\n", status ); return status; } ///add target for (INT i = 0; i < sizeof(Modes) / sizeof(Modes[0]); ++i) { status = add_target_mode(tgt_mode_set_hdl, target_mode_set_if, &Modes[i]); if (!NT_SUCCESS(status)) { /// vidpn_if->pfnReleaseTargetModeSet(vidpn_hdl, tgt_mode_set_hdl); DPT("add_target_mode err=0x%X\n", status); return status; } /// } ////// status = vidpn_if->pfnAssignTargetModeSet(vidpn_hdl, curr_path_info->VidPnTargetId, tgt_mode_set_hdl); if (!NT_SUCCESS(status)) { DPT("** pfnAssignTargetModeSet err=0x%x\n", status ); vidpn_if->pfnReleaseTargetModeSet(vidpn_hdl, tgt_mode_set_hdl); return status; } return status; } static NTSTATUS DxgkDdiEnumVidPnCofuncModality_modify(CONST DXGKARG_ENUMVIDPNCOFUNCMODALITY* CONST arg) { NTSTATUS status = STATUS_SUCCESS; D3DKMDT_HVIDPN hConstrainingVidPn = arg->hConstrainingVidPn; CONST DXGK_VIDPN_INTERFACE* vidpn_if; status = wf->DxgkInterface.DxgkCbQueryVidPnInterface( hConstrainingVidPn, DXGK_VIDPN_INTERFACE_VERSION_V1, &vidpn_if); if (!NT_SUCCESS(status)) { return status; } //// D3DKMDT_HVIDPNTOPOLOGY topology_handle = NULL; CONST DXGK_VIDPNTOPOLOGY_INTERFACE* topology_if = NULL; CONST D3DKMDT_VIDPN_PRESENT_PATH *curr_path_info; CONST D3DKMDT_VIDPN_PRESENT_PATH *next_path_info; status = vidpn_if->pfnGetTopology(hConstrainingVidPn, &topology_handle, &topology_if); if (!NT_SUCCESS(status)) { return status; } //// status = topology_if->pfnAcquireFirstPathInfo(topology_handle, &curr_path_info); if (status == STATUS_GRAPHICS_DATASET_IS_EMPTY) { // Empty topology, nothing to do. DPT("pfnAcquireFirstPathInfo: Empty topology.\n"); return STATUS_SUCCESS; } else if (!NT_SUCCESS(status)) { DPT("pfnAcquireFirstPathInfo failed: 0x%x", status); return STATUS_NO_MEMORY; //// } //// do { ////����ÿ��·�� DPT("0x%X --> 0x%X\n", curr_path_info->VidPnSourceId, curr_path_info->VidPnTargetId); if (curr_path_info->VidPnTargetId == VIDPN_CHILD_UDID) {//·��Ŀ���������Լ��� /// if ((arg->EnumPivotType != D3DKMDT_EPT_VIDPNSOURCE) || (arg->EnumPivot.VidPnSourceId != curr_path_info->VidPnSourceId)) { ///// status = update_source_modes(arg->hConstrainingVidPn, curr_path_info, vidpn_if); DPT("update_source_modes st=0x%X\n",status ); if (!NT_SUCCESS(status)) { DPT("** update_source_modes err=0x%X\n", status ); } ////// } ///// if ((arg->EnumPivotType != D3DKMDT_EPT_VIDPNTARGET) || (arg->EnumPivot.VidPnTargetId != curr_path_info->VidPnTargetId)) { status = update_target_modes(arg->hConstrainingVidPn, curr_path_info, vidpn_if); DPT("update_target_modes st=0x%X\n", status); if (!NT_SUCCESS(status)) { DPT("** update_target_modes err=0x%X\n", status); } } //////// } ///next status = topology_if->pfnAcquireNextPathInfo(topology_handle, curr_path_info, &next_path_info); topology_if->pfnReleasePathInfo(topology_handle, curr_path_info); curr_path_info = next_path_info; if (status == STATUS_GRAPHICS_NO_MORE_ELEMENTS_IN_DATASET) { /// end curr_path_info = NULL; // DPT("pfnAcquireNextPathInfo no more data.\n"); break; } else if (!NT_SUCCESS(status)) { curr_path_info = NULL; DPT("pfnAcquireNextPathInfo err=0x%X\n", status ); break; } ///// } while (TRUE); return status; } NTSTATUS DxgkDdiEnumVidPnCofuncModality(CONST HANDLE hAdapter, CONST DXGKARG_ENUMVIDPNCOFUNCMODALITY* CONST pEnumCofuncModalityArg) { NTSTATUS status = STATUS_SUCCESS; DPT("DxgkDdiEnumVidPnCofuncModality: type=%d, 0x%X -> 0x%X, [%d, %d]\n", pEnumCofuncModalityArg->EnumPivotType, pEnumCofuncModalityArg->EnumPivot.VidPnSourceId, pEnumCofuncModalityArg->EnumPivot.VidPnTargetId, wf->vidpn_source_count, wf->vidpn_target_count ); /// DxgkDdiEnumVidPnCofuncModality_modify(pEnumCofuncModalityArg); //// //// status = wf->orgDpiFunc.DxgkDdiEnumVidPnCofuncModality(hAdapter, pEnumCofuncModalityArg); if (!NT_SUCCESS(status)) { DPT("** DxgkDdiEnumVidPnCofuncModality err=0x%X\n", status ); } return status; } NTSTATUS DxgkDdiIsSupportedVidPn( IN_CONST_HANDLE hAdapter, INOUT_PDXGKARG_ISSUPPORTEDVIDPN pIsSupportedVidPn) { NTSTATUS status; status = wf->orgDpiFunc.DxgkDdiIsSupportedVidPn(hAdapter, pIsSupportedVidPn); if (NT_SUCCESS(status)) { DPT("DxgkDdiIsSupportedVidPn handle=%p, supported=%d, \n", pIsSupportedVidPn->hDesiredVidPn, pIsSupportedVidPn->IsVidPnSupported ); } else { DPT("** DxgkDdiIsSupportedVidPn err=0x%X, handle=%p, supported=%d\n", status , pIsSupportedVidPn->hDesiredVidPn, pIsSupportedVidPn->IsVidPnSupported ); } return status; } NTSTATUS DxgkDdiCommitVidPn( IN_CONST_HANDLE hAdapter, IN_CONST_PDXGKARG_COMMITVIDPN_CONST pCommitVidPn) { NTSTATUS status; status = wf->orgDpiFunc.DxgkDdiCommitVidPn(hAdapter, pCommitVidPn ); // if (!NT_SUCCESS(status)) { /// DPT(" DxgkDdiCommitVidPn st=0x%X\n", status ); // } //// return status; } NTSTATUS DxgkDdiSetVidPnSourceVisibility( IN_CONST_HANDLE hAdapter, IN_CONST_PDXGKARG_SETVIDPNSOURCEVISIBILITY pSetVidPnSourceVisibility) { NTSTATUS status; status = wf->orgDpiFunc.DxgkDdiSetVidPnSourceVisibility(hAdapter, pSetVidPnSourceVisibility); DPT(" DxgkDdiSetVidPnSourceVisibility sourceId=0x%X, visible=0x%X, st=0x%X\n", pSetVidPnSourceVisibility->VidPnSourceId, pSetVidPnSourceVisibility->Visible ,status ); return status; } NTSTATUS APIENTRY DxgkDdiSetVidPnSourceAddress( const HANDLE hAdapter, const DXGKARG_SETVIDPNSOURCEADDRESS *pSetVidPnSourceAddress) { NTSTATUS status; status = wf->orgDpiFunc.DxgkDdiSetVidPnSourceAddress(hAdapter, pSetVidPnSourceAddress ); DPT("DxgkDdiSetVidPnSourceAddress sourceId=0x%X, paddr=%llu, st=0x%X\n", pSetVidPnSourceAddress->VidPnSourceId, pSetVidPnSourceAddress->PrimaryAddress.QuadPart, status ); return status; } 修改以上代码,添加虚拟显示器管理代码
07-10
#include "stm32f10x.h" // Device header #include "Delay.h" #include "Key.h" #include "Led.h" #include "serial.h" #include "OLED.h" #include "Time.h" #include "Stack.h" static unsigned char Clear_Index=0; //清零检索 static unsigned char Count_Index=0; //计算检索 static uint32_t Index=0; static unsigned char Firmula[100]; //存储算数式子 static unsigned int Result; //存储结果 void USART1_IRQHandler(void); unsigned char a=19; int main(void) { Key_Init(); OLED_Init(); Serial_Init(); while(1) { if(Key_GetNum1()) //计算算数式 { Result=Deposit(Firmula); Count_Index=1; } if(Count_Index) //发送结果 { if(Key_GetNum2()) { printf("结果=%d",Result); OLED_ShowNum(1,1,Result,4); Index=0; Clear_Index=1; Count_Index=0; } } if(Clear_Index) //清零 { if(Key_GetNum3()) { Clear_Index=0; Init(); } } else if(!Clear_Index) { if(Key_GetNum3()) { printf("请输入运算式"); } } } } void USART1_IRQHandler(void) //串口中断函数 { Firmula[Index]=Serial_Getbyte(); printf("%c",Firmula[Index]); Index++; }#include "stm32f10x.h"// Device header #include "Stack.h" #include<ctype.h> #include "Serial.h" Stack_char Stack_CHAR; Stack_num Stack_NUM; uint8_t Push_char(Stack_char *stack,uint8_t CH); uint8_t Pop_char(Stack_char *stack,uint8_t *c); uint8_t Push_num(Stack_num *stack,unsigned int NUM); uint8_t Pop_num(Stack_num *stack,unsigned int *n); void Eval(void); uint16_t Priority(uint8_t ch); void Init(void) { Stack_NUM.top=0; Stack_CHAR.top=0; } uint16_t Priority(uint8_t ch) { switch(ch) { case '(' : case ')' : return 3; case '*' : case '/' : return 2; case '+' : case '-' : return 1; default : return 0; //分化优先级 } } uint32_t Deposit(uint8_t *String) { unsigned int i,j,index=0; uint8_t C; Init(); for(i = 0;String[i]!='\0'&&i < Stack_Size ;i++) { if(isdigit(String[i])) //判断是否 '0'<=string<='9' { index=0; j=i; for(;isdigit(String[j])&&j< Stack_Size;j++) { index=index*10+(String[j]-'0'); } Push_num(&Stack_NUM,index); i=j-1; //因为for循环多加了1,所以减去1 } else if(String[i]=='(') { Push_char(&Stack_CHAR,String[i]); } else if(String[i]==')') { while(Stack_CHAR.ch[Stack_CHAR.top] != '(') {Eval();} //直到遇到左括号,并且计算 if(Stack_CHAR.top != 0 && Stack_CHAR.ch[Stack_CHAR.top] == '(') { Pop_char(&Stack_CHAR,&C); //弹出左括号 } } else { while(Stack_CHAR.top!=0&&Stack_CHAR.ch[Stack_CHAR.top]!='('&&Priority(Stack_CHAR.ch[Stack_CHAR.top])>=Priority(String[i])) { Eval(); } Push_char(&Stack_CHAR,String[i]); } } while(Stack_CHAR.top) { Eval(); } //循环直至操作符为空 return Stack_NUM.num[Stack_NUM.top]; //此时数栈顶元素即为表达式值 } void Eval(void) { uint32_t a,x,b; uint8_t cha; Pop_num(&Stack_NUM,&b); Pop_num(&Stack_NUM,&a); //由于栈是陷进后出,与队列有区别(先进出) Pop_char(&Stack_CHAR,&cha); switch(cha) { case '*' : x=a*b;break; //计算 case '/' : { if(b==0) {printf("除数不能为0");x=0;} else {x=a/b;} break; } case '+' : {x=a+b;break;} case '-' : {x=a-b;break;} default :break; } Push_num(&Stack_NUM,x); } uint8_t Push_char(Stack_char *stack,uint8_t CH) { if(stack->top>=Stack_Size) { return 0; } stack->top++; stack->ch[stack->top]=CH; return 1; } uint8_t Push_num(Stack_num *stack,unsigned int NUM) { if(stack->top>=Stack_Size) { return 0; } stack->top++; stack->num[stack->top]=NUM; return 1; } uint8_t Pop_char(Stack_char *stack,uint8_t *c) { if(stack->top<=0) { return 0; } *c=stack->ch[stack->top]; stack->top--; return 1; } uint8_t Pop_num(Stack_num *stack,unsigned int *n) { if(stack->top<=0) { return 0; } *n=stack->num[stack->top]; stack->top--; return 1; } /* Copyright (C) ARM Ltd., 1999,2014 */ /* All rights reserved */ /* * RCS $Revision$ * Checkin $Date$ * Revising $Author: agrant $ */ #ifndef __stdint_h #define __stdint_h #define __ARMCLIB_VERSION 5060034 #ifdef __INT64_TYPE__ /* armclang predefines '__INT64_TYPE__' and '__INT64_C_SUFFIX__' */ #define __INT64 __INT64_TYPE__ #else /* armcc has builtin '__int64' which can be used in --strict mode */ #define __INT64 __int64 #define __INT64_C_SUFFIX__ ll #endif #define __PASTE2(x, y) x ## y #define __PASTE(x, y) __PASTE2(x, y) #define __INT64_C(x) __ESCAPE__(__PASTE(x, __INT64_C_SUFFIX__)) #define __UINT64_C(x) __ESCAPE__(__PASTE(x ## u, __INT64_C_SUFFIX__)) #if defined(__clang__) || (defined(__ARMCC_VERSION) && !defined(__STRICT_ANSI__)) /* armclang and non-strict armcc allow 'long long' in system headers */ #define __LONGLONG long long #else /* strict armcc has '__int64' */ #define __LONGLONG __int64 #endif #ifndef __STDINT_DECLS #define __STDINT_DECLS #undef __CLIBNS #ifdef __cplusplus namespace std { #define __CLIBNS std:: extern "C" { #else #define __CLIBNS #endif /* __cplusplus */ /* * 'signed' is redundant below, except for 'signed char' and if * the typedef is used to declare a bitfield. */ /* 7.18.1.1 */ /* exact-width signed integer types */ typedef signed char int8_t; typedef signed short int int16_t; typedef signed int int32_t; typedef signed __INT64 int64_t; /* exact-width unsigned integer types */ typedef unsigned char uint8_t; typedef unsigned short int uint16_t; typedef unsigned int uint32_t; typedef unsigned __INT64 uint64_t; /* 7.18.1.2 */ /* smallest type of at least n bits */ /* minimum-width signed integer types */ typedef signed char int_least8_t; typedef signed short int int_least16_t; typedef signed int int_least32_t; typedef signed __INT64 int_least64_t; /* minimum-width unsigned integer types */ typedef unsigned char uint_least8_t; typedef unsigned short int uint_least16_t; typedef unsigned int uint_least32_t; typedef unsigned __INT64 uint_least64_t; /* 7.18.1.3 */ /* fastest minimum-width signed integer types */ typedef signed int int_fast8_t; typedef signed int int_fast16_t; typedef signed int int_fast32_t; typedef signed __INT64 int_fast64_t; /* fastest minimum-width unsigned integer types */ typedef unsigned int uint_fast8_t; typedef unsigned int uint_fast16_t; typedef unsigned int uint_fast32_t; typedef unsigned __INT64 uint_fast64_t; /* 7.18.1.4 integer types capable of holding object pointers */ #if __sizeof_ptr == 8 typedef signed __INT64 intptr_t; typedef unsigned __INT64 uintptr_t; #else typedef signed int intptr_t; typedef unsigned int uintptr_t; #endif /* 7.18.1.5 greatest-width integer types */ typedef signed __LONGLONG intmax_t; typedef unsigned __LONGLONG uintmax_t; #if !defined(__cplusplus) || defined(__STDC_LIMIT_MACROS) /* 7.18.2.1 */ /* minimum values of exact-width signed integer types */ #define INT8_MIN -128 #define INT16_MIN -32768 #define INT32_MIN (~0x7fffffff) /* -2147483648 is unsigned */ #define INT64_MIN __INT64_C(~0x7fffffffffffffff) /* -9223372036854775808 is unsigned */ /* maximum values of exact-width signed integer types */ #define INT8_MAX 127 #define INT16_MAX 32767 #define INT32_MAX 2147483647 #define INT64_MAX __INT64_C(9223372036854775807) /* maximum values of exact-width unsigned integer types */ #define UINT8_MAX 255 #define UINT16_MAX 65535 #define UINT32_MAX 4294967295u #define UINT64_MAX __UINT64_C(18446744073709551615) /* 7.18.2.2 */ /* minimum values of minimum-width signed integer types */ #define INT_LEAST8_MIN -128 #define INT_LEAST16_MIN -32768 #define INT_LEAST32_MIN (~0x7fffffff) #define INT_LEAST64_MIN __INT64_C(~0x7fffffffffffffff) /* maximum values of minimum-width signed integer types */ #define INT_LEAST8_MAX 127 #define INT_LEAST16_MAX 32767 #define INT_LEAST32_MAX 2147483647 #define INT_LEAST64_MAX __INT64_C(9223372036854775807) /* maximum values of minimum-width unsigned integer types */ #define UINT_LEAST8_MAX 255 #define UINT_LEAST16_MAX 65535 #define UINT_LEAST32_MAX 4294967295u #define UINT_LEAST64_MAX __UINT64_C(18446744073709551615) /* 7.18.2.3 */ /* minimum values of fastest minimum-width signed integer types */ #define INT_FAST8_MIN (~0x7fffffff) #define INT_FAST16_MIN (~0x7fffffff) #define INT_FAST32_MIN (~0x7fffffff) #define INT_FAST64_MIN __INT64_C(~0x7fffffffffffffff) /* maximum values of fastest minimum-width signed integer types */ #define INT_FAST8_MAX 2147483647 #define INT_FAST16_MAX 2147483647 #define INT_FAST32_MAX 2147483647 #define INT_FAST64_MAX __INT64_C(9223372036854775807) /* maximum values of fastest minimum-width unsigned integer types */ #define UINT_FAST8_MAX 4294967295u #define UINT_FAST16_MAX 4294967295u #define UINT_FAST32_MAX 4294967295u #define UINT_FAST64_MAX __UINT64_C(18446744073709551615) /* 7.18.2.4 */ /* minimum value of pointer-holding signed integer type */ #if __sizeof_ptr == 8 #define INTPTR_MIN INT64_MIN #else #define INTPTR_MIN INT32_MIN #endif /* maximum value of pointer-holding signed integer type */ #if __sizeof_ptr == 8 #define INTPTR_MAX INT64_MAX #else #define INTPTR_MAX INT32_MAX #endif /* maximum value of pointer-holding unsigned integer type */ #if __sizeof_ptr == 8 #define UINTPTR_MAX UINT64_MAX #else #define UINTPTR_MAX UINT32_MAX #endif /* 7.18.2.5 */ /* minimum value of greatest-width signed integer type */ #define INTMAX_MIN __ESCAPE__(~0x7fffffffffffffffll) /* maximum value of greatest-width signed integer type */ #define INTMAX_MAX __ESCAPE__(9223372036854775807ll) /* maximum value of greatest-width unsigned integer type */ #define UINTMAX_MAX __ESCAPE__(18446744073709551615ull) /* 7.18.3 */ /* limits of ptrdiff_t */ #if __sizeof_ptr == 8 #define PTRDIFF_MIN INT64_MIN #define PTRDIFF_MAX INT64_MAX #else #define PTRDIFF_MIN INT32_MIN #define PTRDIFF_MAX INT32_MAX #endif /* limits of sig_atomic_t */ #define SIG_ATOMIC_MIN (~0x7fffffff) #define SIG_ATOMIC_MAX 2147483647 /* limit of size_t */ #if __sizeof_ptr == 8 #define SIZE_MAX UINT64_MAX #else #define SIZE_MAX UINT32_MAX #endif /* limits of wchar_t */ /* NB we have to undef and redef because they're defined in both * stdint.h and wchar.h */ #undef WCHAR_MIN #undef WCHAR_MAX #if defined(__WCHAR32) || (defined(__ARM_SIZEOF_WCHAR_T) && __ARM_SIZEOF_WCHAR_T == 4) #define WCHAR_MIN 0 #define WCHAR_MAX 0xffffffffU #else #define WCHAR_MIN 0 #define WCHAR_MAX 65535 #endif /* limits of wint_t */ #define WINT_MIN (~0x7fffffff) #define WINT_MAX 2147483647 #endif /* __STDC_LIMIT_MACROS */ #if !defined(__cplusplus) || defined(__STDC_CONSTANT_MACROS) /* 7.18.4.1 macros for minimum-width integer constants */ #define INT8_C(x) (x) #define INT16_C(x) (x) #define INT32_C(x) (x) #define INT64_C(x) __INT64_C(x) #define UINT8_C(x) (x ## u) #define UINT16_C(x) (x ## u) #define UINT32_C(x) (x ## u) #define UINT64_C(x) __UINT64_C(x) /* 7.18.4.2 macros for greatest-width integer constants */ #define INTMAX_C(x) __ESCAPE__(x ## ll) #define UINTMAX_C(x) __ESCAPE__(x ## ull) #endif /* __STDC_CONSTANT_MACROS */ #ifdef __cplusplus } /* extern "C" */ } /* namespace std */ #endif /* __cplusplus */ #endif /* __STDINT_DECLS */ #ifdef __cplusplus #ifndef __STDINT_NO_EXPORTS using ::std::int8_t; using ::std::int16_t; using ::std::int32_t; using ::std::int64_t; using ::std::uint8_t; using ::std::uint16_t; using ::std::uint32_t; using ::std::uint64_t; using ::std::int_least8_t; using ::std::int_least16_t; using ::std::int_least32_t; using ::std::int_least64_t; using ::std::uint_least8_t; using ::std::uint_least16_t; using ::std::uint_least32_t; using ::std::uint_least64_t; using ::std::int_fast8_t; using ::std::int_fast16_t; using ::std::int_fast32_t; using ::std::int_fast64_t; using ::std::uint_fast8_t; using ::std::uint_fast16_t; using ::std::uint_fast32_t; using ::std::uint_fast64_t; using ::std::intptr_t; using ::std::uintptr_t; using ::std::intmax_t; using ::std::uintmax_t; #endif #endif /* __cplusplus */ #undef __INT64 #undef __LONGLONG #endif /* __stdint_h */ /* end of stdint.h */ 在不改变原代码变量名情况下,实现小数点预算
07-16
typedef struct { unsigned int rec_count; unsigned char is_valid; } RecStatus; typedef struct measure_info_t { /*导航信息*/ double longitude; //bit 7:status bit 6 - 0:rec times RecStatus longitude_rec_status; double latitude; RecStatus latitude_rec_status; //航迹向 double track_angle; RecStatus track_angle_rec_status; //航迹速 double ground_speed; RecStatus ground_speed_rec_status; double magnetic_declination; RecStatus magnetic_declination_rec_status; double pitch; RecStatus pitch_rec_status; double roll; RecStatus roll_rec_status; double course; RecStatus course_rec_status; double pitch_rate; RecStatus pitch_rate_rec_status; double roll_rate; RecStatus roll_rate_rec_status; double course_rate; RecStatus course_rate_rec_status; int satellite_num; RecStatus satellite_num_rec_status; //对地速度 double speed_over_ground; RecStatus speed_over_ground_rec_status; //对水速度 double speed_through_water; RecStatus speed_through_water_rec_status; int utc_year; unsigned char utc_month; unsigned char utc_day; unsigned char utc_hour; unsigned char utc_minute; unsigned char utc_second; char time_zone; RecStatus utc_rec_status; int beijing_time_year; unsigned char beijing_time_month; unsigned char beijing_time_day; unsigned char beijing_time_hour; unsigned char beijing_time_minute; unsigned char beijing_time_second; /*气象水文信息*/ double air_pressure; RecStatus air_pressure_rec_status; double air_temperature; RecStatus air_temperature_rec_status; double humidity; RecStatus humidity_rec_status; double wind_direction; RecStatus wind_direction_rec_status; double wind_speed; RecStatus wind_speed_rec_status; int measure_time; RecStatus measure_time_rec_status; //波导高度 double duct_height; RecStatus duct_height_rec_status; //波导强度 double duct_strength; RecStatus duct_strength_rec_status; //穿透角 double penetration_angle; RecStatus penetration_angle_rec_status; //截止频率 double cutoff_frequency; RecStatus cutoff_frequency_rec_status; double water_depth; RecStatus water_depth_rec_status; //吃水深度 double draft_depth; RecStatus draft_depth_rec_status; //气温 double temperature; RecStatus temperature_rec_status; //空温 double ambient_temperature; RecStatus ambient_temperature_rec_status; //海温 double sea_temperature; RecStatus sea_temperature_rec_status; }MEASURE_INFO; MEASURE_INFO net_measure; MEASURE_INFO uart_measure; MEASURE_INFO can0_measure; MEASURE_INFO can1_measure; 这是我的数据结构及定义。$VMWCV,vv.vv,s*CC<CR><LF> $---引导符 VM---计程仪标志 WCV---数据内容标志 vv.vv---航速,单位:节 vv---如为个数,前面加0 .vv---航速精度为二位小数 s ---数据方向: P表示船向前行进,N表示船向后退 *---校验和标志 CC---校验和(“$”与“*”之间的字符代码异或,不含“$”和“*” <CR><LF>---数据终止标志. ,---数据间隔标志 数据实例: $VMWCV,00.98,N*38 这是我的处理函数: #define MAX_FIELDS 32 #define MAX_FIELD_LEN 32 #define TYPE_INT 0 #define TYPE_DOUBLE 1 #define TYPE_STRING 2 // 解析结果结构体 typedef struct { int type; // 0=int, 1=double, 2=string union { int i_value; double d_value; char str_value[MAX_FIELD_LEN]; } data; int valid; // 是否有效 } NmeaField; // 检查报头是否以指定后缀结尾 static int header_matches(const char *sentence, const char *suffix) { // 找到第一个逗号的位置 const char *comma = strchr(sentence, ','); if (!comma) return 0; // 计算报头长度(从$后到逗号前) size_t header_len = comma - (sentence + 1); size_t suffix_len = strlen(suffix); // 报头必须至少和suffix一样长 if (header_len < suffix_len) return 0; // 比较报头最后几个字符是否匹配suffix const char *header_end = comma - suffix_len; return (strncmp(header_end, suffix, suffix_len) == 0); } // NMEA解析函数 int parse_nmea(const char *sentence, const char *suffix, const int *field_pos, const int *field_types, NmeaField *results, int num_fields) { // 检查输入有效性 if (!sentence || !suffix || !field_pos || !field_types || !results || num_fields <= 0) { return -1; } // 验证基本格式 if (sentence[0] != '$') { return 0; // 无效报文格式 } // 验证报头后缀 if (!header_matches(sentence, suffix)) { return 0; // 不是目标报文 } // 查找字段结束位置 const char *asterisk = strchr(sentence, '*'); const char *end = asterisk ? asterisk : sentence + strlen(sentence); // 分割字段 char fields[MAX_FIELDS][MAX_FIELD_LEN]; int field_count = 0; // 找到第一个逗号后的位置 const char *ptr = strchr(sentence, ','); if (!ptr) { return 0; // 没有字段 } ptr++; // 跳过第一个逗号 while (field_count < MAX_FIELDS && ptr < end) { const char *comma = strchr(ptr, ','); size_t len = comma ? (size_t)(comma - ptr) : (size_t)(end - ptr); // 防止字段过长 len = len < MAX_FIELD_LEN - 1 ? len : MAX_FIELD_LEN - 1; strncpy(fields[field_count], ptr, len); fields[field_count][len] = '\0'; field_count++; ptr = comma ? comma + 1 : end; } // 解析请求的字段 int valid_count = 0; for (int i = 0; i < num_fields; i++) { results[i].valid = 0; // 默认无效 int pos = field_pos[i] - 1; // 转换为0-based索引 if (pos < 0 || pos >= field_count) { continue; // 位置无效 } char *field = fields[pos]; // 空字段处理 if (field[0] == '\0') { continue; } // 根据类型解析 switch (field_types[i]) { case TYPE_INT: // int { char *end; long val = strtol(field, &end, 10); // 检查转换是否成功且没有溢出 if (end != field && *end == '\0' && val >= INT_MIN && val <= INT_MAX) { results[i].data.i_value = (int)val; results[i].type = TYPE_INT; results[i].valid = 1; valid_count++; } break; } case TYPE_DOUBLE: // double { char *end; double val = strtod(field, &end); if (end != field && *end == '\0' && !isinf(val) && !isnan(val)) { results[i].data.d_value = val; results[i].type = TYPE_DOUBLE; results[i].valid = 1; valid_count++; } break; } case TYPE_STRING: // string strncpy(results[i].data.str_value, field, MAX_FIELD_LEN - 1); results[i].data.str_value[MAX_FIELD_LEN - 1] = '\0'; results[i].type = TYPE_STRING; results[i].valid = 1; valid_count++; break; default: // 未知类型 break; } } return valid_count; } 帮我提取数据结构中的对水速度,前进为正数,后退为负数,别忘了 rec_count自增
06-07
/* * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ #include "api/neteq/neteq.h" #include <math.h> #include <stdlib.h> #include <string.h> // memset #include <algorithm> #include <memory> #include <set> #include <string> #include <vector> #include "absl/flags/flag.h" #include "api/audio/audio_frame.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "modules/audio_coding/codecs/pcm16b/pcm16b.h" #include "modules/audio_coding/neteq/test/neteq_decoding_test.h" #include "modules/audio_coding/neteq/tools/audio_loop.h" #include "modules/audio_coding/neteq/tools/neteq_rtp_dump_input.h" #include "modules/audio_coding/neteq/tools/neteq_test.h" #include "modules/include/module_common_types_public.h" #include "modules/rtp_rtcp/include/rtcp_statistics.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "rtc_base/ignore_wundef.h" #include "rtc_base/message_digest.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/system/arch.h" #include "test/field_trial.h" #include "test/gtest.h" #include "test/testsupport/file_utils.h" ABSL_FLAG(bool, gen_ref, false, "Generate reference files."); namespace webrtc { #if defined(WEBRTC_LINUX) && defined(WEBRTC_ARCH_X86_64) && \ defined(WEBRTC_NETEQ_UNITTEST_BITEXACT) && \ (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX)) && \ defined(WEBRTC_CODEC_ILBC) #define MAYBE_TestBitExactness TestBitExactness #else #define MAYBE_TestBitExactness DISABLED_TestBitExactness #endif TEST_F(NetEqDecodingTest, MAYBE_TestBitExactness) { const std::string input_rtp_file = webrtc::test::ResourcePath("audio_coding/neteq_universal_new", "rtp"); const std::string output_checksum = "dee7a10ab92526876a70a85bc48a4906901af3df"; const std::string network_stats_checksum = "911dbf5fd97f48d25b8f0967286eb73c9d6f6158"; DecodeAndCompare(input_rtp_file, output_checksum, network_stats_checksum, absl::GetFlag(FLAGS_gen_ref)); } #if defined(WEBRTC_LINUX) && defined(WEBRTC_ARCH_X86_64) && \ defined(WEBRTC_NETEQ_UNITTEST_BITEXACT) && defined(WEBRTC_CODEC_OPUS) #define MAYBE_TestOpusBitExactness TestOpusBitExactness #else #define MAYBE_TestOpusBitExactness DISABLED_TestOpusBitExactness #endif TEST_F(NetEqDecodingTest, MAYBE_TestOpusBitExactness) { const std::string input_rtp_file = webrtc::test::ResourcePath("audio_coding/neteq_opus", "rtp"); const std::string output_checksum = "fec6827bb9ee0b21770bbbb4a3a6f8823bf537dc|" "3610cc7be4b3407b9c273b1299ab7f8f47cca96b"; const std::string network_stats_checksum = "3d043e47e5f4bb81d37e7bce8c44bf802965c853|" "076662525572dba753b11578330bd491923f7f5e"; DecodeAndCompare(input_rtp_file, output_checksum, network_stats_checksum, absl::GetFlag(FLAGS_gen_ref)); } #if defined(WEBRTC_LINUX) && defined(WEBRTC_ARCH_X86_64) && \ defined(WEBRTC_NETEQ_UNITTEST_BITEXACT) && defined(WEBRTC_CODEC_OPUS) #define MAYBE_TestOpusDtxBitExactness TestOpusDtxBitExactness #else #define MAYBE_TestOpusDtxBitExactness DISABLED_TestOpusDtxBitExactness #endif TEST_F(NetEqDecodingTest, MAYBE_TestOpusDtxBitExactness) { const std::string input_rtp_file = webrtc::test::ResourcePath("audio_coding/neteq_opus_dtx", "rtp"); const std::string output_checksum = "b3c4899eab5378ef5e54f2302948872149f6ad5e|" "589e975ec31ea13f302457fea1425be9380ffb96"; const std::string network_stats_checksum = "dc8447b9fee1a21fd5d1f4045d62b982a3fb0215"; DecodeAndCompare(input_rtp_file, output_checksum, network_stats_checksum, absl::GetFlag(FLAGS_gen_ref)); } // Use fax mode to avoid time-scaling. This is to simplify the testing of // packet waiting times in the packet buffer. class NetEqDecodingTestFaxMode : public NetEqDecodingTest { protected: NetEqDecodingTestFaxMode() : NetEqDecodingTest() { config_.for_test_no_time_stretching = true; } void TestJitterBufferDelay(bool apply_packet_loss); }; TEST_F(NetEqDecodingTestFaxMode, TestFrameWaitingTimeStatistics) { // Insert 30 dummy packets at once. Each packet contains 10 ms 16 kHz audio. size_t num_frames = 30; const size_t kSamples = 10 * 16; const size_t kPayloadBytes = kSamples * 2; for (size_t i = 0; i < num_frames; ++i) { const uint8_t payload[kPayloadBytes] = {0}; RTPHeader rtp_info; rtp_info.sequenceNumber = rtc::checked_cast<uint16_t>(i); rtp_info.timestamp = rtc::checked_cast<uint32_t>(i * kSamples); rtp_info.ssrc = 0x1234; // Just an arbitrary SSRC. rtp_info.payloadType = 94; // PCM16b WB codec. rtp_info.markerBit = 0; ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload)); } // Pull out all data. for (size_t i = 0; i < num_frames; ++i) { bool muted; ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted)); ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_); } NetEqNetworkStatistics stats; EXPECT_EQ(0, neteq_->NetworkStatistics(&stats)); // Since all frames are dumped into NetEQ at once, but pulled out with 10 ms // spacing (per definition), we expect the delay to increase with 10 ms for // each packet. Thus, we are calculating the statistics for a series from 10 // to 300, in steps of 10 ms. EXPECT_EQ(155, stats.mean_waiting_time_ms); EXPECT_EQ(155, stats.median_waiting_time_ms); EXPECT_EQ(10, stats.min_waiting_time_ms); EXPECT_EQ(300, stats.max_waiting_time_ms); // Check statistics again and make sure it's been reset. EXPECT_EQ(0, neteq_->NetworkStatistics(&stats)); EXPECT_EQ(-1, stats.mean_waiting_time_ms); EXPECT_EQ(-1, stats.median_waiting_time_ms); EXPECT_EQ(-1, stats.min_waiting_time_ms); EXPECT_EQ(-1, stats.max_waiting_time_ms); } TEST_F(NetEqDecodingTest, LongCngWithNegativeClockDrift) { // Apply a clock drift of -25 ms / s (sender faster than receiver). const double kDriftFactor = 1000.0 / (1000.0 + 25.0); const double kNetworkFreezeTimeMs = 0.0; const bool kGetAudioDuringFreezeRecovery = false; const int kDelayToleranceMs = 20; const int kMaxTimeToSpeechMs = 100; LongCngWithClockDrift(kDriftFactor, kNetworkFreezeTimeMs, kGetAudioDuringFreezeRecovery, kDelayToleranceMs, kMaxTimeToSpeechMs); } TEST_F(NetEqDecodingTest, LongCngWithPositiveClockDrift) { // Apply a clock drift of +25 ms / s (sender slower than receiver). const double kDriftFactor = 1000.0 / (1000.0 - 25.0); const double kNetworkFreezeTimeMs = 0.0; const bool kGetAudioDuringFreezeRecovery = false; const int kDelayToleranceMs = 40; const int kMaxTimeToSpeechMs = 100; LongCngWithClockDrift(kDriftFactor, kNetworkFreezeTimeMs, kGetAudioDuringFreezeRecovery, kDelayToleranceMs, kMaxTimeToSpeechMs); } TEST_F(NetEqDecodingTest, LongCngWithNegativeClockDriftNetworkFreeze) { // Apply a clock drift of -25 ms / s (sender faster than receiver). const double kDriftFactor = 1000.0 / (1000.0 + 25.0); const double kNetworkFreezeTimeMs = 5000.0; const bool kGetAudioDuringFreezeRecovery = false; const int kDelayToleranceMs = 60; const int kMaxTimeToSpeechMs = 200; LongCngWithClockDrift(kDriftFactor, kNetworkFreezeTimeMs, kGetAudioDuringFreezeRecovery, kDelayToleranceMs, kMaxTimeToSpeechMs); } TEST_F(NetEqDecodingTest, LongCngWithPositiveClockDriftNetworkFreeze) { // Apply a clock drift of +25 ms / s (sender slower than receiver). const double kDriftFactor = 1000.0 / (1000.0 - 25.0); const double kNetworkFreezeTimeMs = 5000.0; const bool kGetAudioDuringFreezeRecovery = false; const int kDelayToleranceMs = 40; const int kMaxTimeToSpeechMs = 100; LongCngWithClockDrift(kDriftFactor, kNetworkFreezeTimeMs, kGetAudioDuringFreezeRecovery, kDelayToleranceMs, kMaxTimeToSpeechMs); } TEST_F(NetEqDecodingTest, LongCngWithPositiveClockDriftNetworkFreezeExtraPull) { // Apply a clock drift of +25 ms / s (sender slower than receiver). const double kDriftFactor = 1000.0 / (1000.0 - 25.0); const double kNetworkFreezeTimeMs = 5000.0; const bool kGetAudioDuringFreezeRecovery = true; const int kDelayToleranceMs = 40; const int kMaxTimeToSpeechMs = 100; LongCngWithClockDrift(kDriftFactor, kNetworkFreezeTimeMs, kGetAudioDuringFreezeRecovery, kDelayToleranceMs, kMaxTimeToSpeechMs); } TEST_F(NetEqDecodingTest, LongCngWithoutClockDrift) { const double kDriftFactor = 1.0; // No drift. const double kNetworkFreezeTimeMs = 0.0; const bool kGetAudioDuringFreezeRecovery = false; const int kDelayToleranceMs = 10; const int kMaxTimeToSpeechMs = 50; LongCngWithClockDrift(kDriftFactor, kNetworkFreezeTimeMs, kGetAudioDuringFreezeRecovery, kDelayToleranceMs, kMaxTimeToSpeechMs); } TEST_F(NetEqDecodingTest, UnknownPayloadType) { const size_t kPayloadBytes = 100; uint8_t payload[kPayloadBytes] = {0}; RTPHeader rtp_info; PopulateRtpInfo(0, 0, &rtp_info); rtp_info.payloadType = 1; // Not registered as a decoder. EXPECT_EQ(NetEq::kFail, neteq_->InsertPacket(rtp_info, payload)); } #if defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX) #define MAYBE_DecoderError DecoderError #else #define MAYBE_DecoderError DISABLED_DecoderError #endif TEST_F(NetEqDecodingTest, MAYBE_DecoderError) { const size_t kPayloadBytes = 100; uint8_t payload[kPayloadBytes] = {0}; RTPHeader rtp_info; PopulateRtpInfo(0, 0, &rtp_info); rtp_info.payloadType = 103; // iSAC, but the payload is invalid. EXPECT_EQ(0, neteq_->InsertPacket(rtp_info, payload)); // Set all of `out_data_` to 1, and verify that it was set to 0 by the call // to GetAudio. int16_t* out_frame_data = out_frame_.mutable_data(); for (size_t i = 0; i < AudioFrame::kMaxDataSizeSamples; ++i) { out_frame_data[i] = 1; } bool muted; EXPECT_EQ(NetEq::kFail, neteq_->GetAudio(&out_frame_, &muted)); ASSERT_FALSE(muted); // Verify that the first 160 samples are set to 0. static const int kExpectedOutputLength = 160; // 10 ms at 16 kHz sample rate. const int16_t* const_out_frame_data = out_frame_.data(); for (int i = 0; i < kExpectedOutputLength; ++i) { rtc::StringBuilder ss; ss << "i = " << i; SCOPED_TRACE(ss.str()); // Print out the parameter values on failure. EXPECT_EQ(0, const_out_frame_data[i]); } } TEST_F(NetEqDecodingTest, GetAudioBeforeInsertPacket) { // Set all of `out_data_` to 1, and verify that it was set to 0 by the call // to GetAudio. int16_t* out_frame_data = out_frame_.mutable_data(); for (size_t i = 0; i < AudioFrame::kMaxDataSizeSamples; ++i) { out_frame_data[i] = 1; } bool muted; EXPECT_EQ(0, neteq_->GetAudio(&out_frame_, &muted)); ASSERT_FALSE(muted); // Verify that the first block of samples is set to 0. static const int kExpectedOutputLength = kInitSampleRateHz / 100; // 10 ms at initial sample rate. const int16_t* const_out_frame_data = out_frame_.data(); for (int i = 0; i < kExpectedOutputLength; ++i) { rtc::StringBuilder ss; ss << "i = " << i; SCOPED_TRACE(ss.str()); // Print out the parameter values on failure. EXPECT_EQ(0, const_out_frame_data[i]); } // Verify that the sample rate did not change from the initial configuration. EXPECT_EQ(config_.sample_rate_hz, neteq_->last_output_sample_rate_hz()); } class NetEqBgnTest : public NetEqDecodingTest { protected: void CheckBgn(int sampling_rate_hz) { size_t expected_samples_per_channel = 0; uint8_t payload_type = 0xFF; // Invalid. if (sampling_rate_hz == 8000) { expected_samples_per_channel = kBlockSize8kHz; payload_type = 93; // PCM 16, 8 kHz. } else if (sampling_rate_hz == 16000) { expected_samples_per_channel = kBlockSize16kHz; payload_type = 94; // PCM 16, 16 kHZ. } else if (sampling_rate_hz == 32000) { expected_samples_per_channel = kBlockSize32kHz; payload_type = 95; // PCM 16, 32 kHz. } else { ASSERT_TRUE(false); // Unsupported test case. } AudioFrame output; test::AudioLoop input; // We are using the same 32 kHz input file for all tests, regardless of // `sampling_rate_hz`. The output may sound weird, but the test is still // valid. ASSERT_TRUE(input.Init( webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm"), 10 * sampling_rate_hz, // Max 10 seconds loop length. expected_samples_per_channel)); // Payload of 10 ms of PCM16 32 kHz. uint8_t payload[kBlockSize32kHz * sizeof(int16_t)]; RTPHeader rtp_info; PopulateRtpInfo(0, 0, &rtp_info); rtp_info.payloadType = payload_type; bool muted; for (int n = 0; n < 10; ++n) { // Insert few packets and get audio. auto block = input.GetNextBlock(); ASSERT_EQ(expected_samples_per_channel, block.size()); size_t enc_len_bytes = WebRtcPcm16b_Encode(block.data(), block.size(), payload); ASSERT_EQ(enc_len_bytes, expected_samples_per_channel * 2); ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, rtc::ArrayView<const uint8_t>( payload, enc_len_bytes))); output.Reset(); ASSERT_EQ(0, neteq_->GetAudio(&output, &muted)); ASSERT_EQ(1u, output.num_channels_); ASSERT_EQ(expected_samples_per_channel, output.samples_per_channel_); ASSERT_EQ(AudioFrame::kNormalSpeech, output.speech_type_); // Next packet. rtp_info.timestamp += rtc::checked_cast<uint32_t>(expected_samples_per_channel); rtp_info.sequenceNumber++; } output.Reset(); // Get audio without inserting packets, expecting PLC and PLC-to-CNG. Pull // one frame without checking speech-type. This is the first frame pulled // without inserting any packet, and might not be labeled as PLC. ASSERT_EQ(0, neteq_->GetAudio(&output, &muted)); ASSERT_EQ(1u, output.num_channels_); ASSERT_EQ(expected_samples_per_channel, output.samples_per_channel_); // To be able to test the fading of background noise we need at lease to // pull 611 frames. const int kFadingThreshold = 611; // Test several CNG-to-PLC packet for the expected behavior. The number 20 // is arbitrary, but sufficiently large to test enough number of frames. const int kNumPlcToCngTestFrames = 20; bool plc_to_cng = false; for (int n = 0; n < kFadingThreshold + kNumPlcToCngTestFrames; ++n) { output.Reset(); // Set to non-zero. memset(output.mutable_data(), 1, AudioFrame::kMaxDataSizeBytes); ASSERT_EQ(0, neteq_->GetAudio(&output, &muted)); ASSERT_FALSE(muted); ASSERT_EQ(1u, output.num_channels_); ASSERT_EQ(expected_samples_per_channel, output.samples_per_channel_); if (output.speech_type_ == AudioFrame::kPLCCNG) { plc_to_cng = true; double sum_squared = 0; const int16_t* output_data = output.data(); for (size_t k = 0; k < output.num_channels_ * output.samples_per_channel_; ++k) sum_squared += output_data[k] * output_data[k]; EXPECT_EQ(0, sum_squared); } else { EXPECT_EQ(AudioFrame::kPLC, output.speech_type_); } } EXPECT_TRUE(plc_to_cng); // Just to be sure that PLC-to-CNG has occurred. } }; TEST_F(NetEqBgnTest, RunTest) { CheckBgn(8000); CheckBgn(16000); CheckBgn(32000); } TEST_F(NetEqDecodingTest, SequenceNumberWrap) { // Start with a sequence number that will soon wrap. std::set<uint16_t> drop_seq_numbers; // Don't drop any packets. WrapTest(0xFFFF - 10, 0, drop_seq_numbers, true, false); } TEST_F(NetEqDecodingTest, SequenceNumberWrapAndDrop) { // Start with a sequence number that will soon wrap. std::set<uint16_t> drop_seq_numbers; drop_seq_numbers.insert(0xFFFF); drop_seq_numbers.insert(0x0); WrapTest(0xFFFF - 10, 0, drop_seq_numbers, true, false); } TEST_F(NetEqDecodingTest, TimestampWrap) { // Start with a timestamp that will soon wrap. std::set<uint16_t> drop_seq_numbers; WrapTest(0, 0xFFFFFFFF - 3000, drop_seq_numbers, false, true); } TEST_F(NetEqDecodingTest, TimestampAndSequenceNumberWrap) { // Start with a timestamp and a sequence number that will wrap at the same // time. std::set<uint16_t> drop_seq_numbers; WrapTest(0xFFFF - 10, 0xFFFFFFFF - 5000, drop_seq_numbers, true, true); } TEST_F(NetEqDecodingTest, DiscardDuplicateCng) { uint16_t seq_no = 0; uint32_t timestamp = 0; const int kFrameSizeMs = 10; const int kSampleRateKhz = 16; const int kSamples = kFrameSizeMs * kSampleRateKhz; const size_t kPayloadBytes = kSamples * 2; const int algorithmic_delay_samples = std::max(algorithmic_delay_ms_ * kSampleRateKhz, 5 * kSampleRateKhz / 8); // Insert three speech packets. Three are needed to get the frame length // correct. uint8_t payload[kPayloadBytes] = {0}; RTPHeader rtp_info; bool muted; for (int i = 0; i < 3; ++i) { PopulateRtpInfo(seq_no, timestamp, &rtp_info); ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload)); ++seq_no; timestamp += kSamples; // Pull audio once. ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted)); ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_); } // Verify speech output. EXPECT_EQ(AudioFrame::kNormalSpeech, out_frame_.speech_type_); // Insert same CNG packet twice. const int kCngPeriodMs = 100; const int kCngPeriodSamples = kCngPeriodMs * kSampleRateKhz; size_t payload_len; PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len); // This is the first time this CNG packet is inserted. ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, rtc::ArrayView<const uint8_t>( payload, payload_len))); // Pull audio once and make sure CNG is played. ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted)); ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_); EXPECT_EQ(AudioFrame::kCNG, out_frame_.speech_type_); EXPECT_FALSE( neteq_->GetPlayoutTimestamp()); // Returns empty value during CNG. EXPECT_EQ(timestamp - algorithmic_delay_samples, out_frame_.timestamp_ + out_frame_.samples_per_channel_); // Insert the same CNG packet again. Note that at this point it is old, since // we have already decoded the first copy of it. ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, rtc::ArrayView<const uint8_t>( payload, payload_len))); // Pull audio until we have played `kCngPeriodMs` of CNG. Start at 10 ms since // we have already pulled out CNG once. for (int cng_time_ms = 10; cng_time_ms < kCngPeriodMs; cng_time_ms += 10) { ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted)); ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_); EXPECT_EQ(AudioFrame::kCNG, out_frame_.speech_type_); EXPECT_FALSE( neteq_->GetPlayoutTimestamp()); // Returns empty value during CNG. EXPECT_EQ(timestamp - algorithmic_delay_samples, out_frame_.timestamp_ + out_frame_.samples_per_channel_); } ++seq_no; timestamp += kCngPeriodSamples; uint32_t first_speech_timestamp = timestamp; // Insert speech again. for (int i = 0; i < 3; ++i) { PopulateRtpInfo(seq_no, timestamp, &rtp_info); ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload)); ++seq_no; timestamp += kSamples; } // Pull audio once and verify that the output is speech again. ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted)); ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_); EXPECT_EQ(AudioFrame::kNormalSpeech, out_frame_.speech_type_); absl::optional<uint32_t> playout_timestamp = neteq_->GetPlayoutTimestamp(); ASSERT_TRUE(playout_timestamp); EXPECT_EQ(first_speech_timestamp + kSamples - algorithmic_delay_samples, *playout_timestamp); } TEST_F(NetEqDecodingTest, CngFirst) { uint16_t seq_no = 0; uint32_t timestamp = 0; const int kFrameSizeMs = 10; const int kSampleRateKhz = 16; const int kSamples = kFrameSizeMs * kSampleRateKhz; const int kPayloadBytes = kSamples * 2; const int kCngPeriodMs = 100; const int kCngPeriodSamples = kCngPeriodMs * kSampleRateKhz; size_t payload_len; uint8_t payload[kPayloadBytes] = {0}; RTPHeader rtp_info; PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len); ASSERT_EQ(NetEq::kOK, neteq_->InsertPacket( rtp_info, rtc::ArrayView<const uint8_t>(payload, payload_len))); ++seq_no; timestamp += kCngPeriodSamples; // Pull audio once and make sure CNG is played. bool muted; ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted)); ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_); EXPECT_EQ(AudioFrame::kCNG, out_frame_.speech_type_); // Insert some speech packets. const uint32_t first_speech_timestamp = timestamp; int timeout_counter = 0; do { ASSERT_LT(timeout_counter++, 20) << "Test timed out"; PopulateRtpInfo(seq_no, timestamp, &rtp_info); ASSERT_EQ(0, neteq_->InsertPacket(rtp_info, payload)); ++seq_no; timestamp += kSamples; // Pull audio once. ASSERT_EQ(0, neteq_->GetAudio(&out_frame_, &muted)); ASSERT_EQ(kBlockSize16kHz, out_frame_.samples_per_channel_); } while (!IsNewerTimestamp(out_frame_.timestamp_, first_speech_timestamp)); // Verify speech output. EXPECT_EQ(AudioFrame::kNormalSpeech, out_frame_.speech_type_); } class NetEqDecodingTestWithMutedState : public NetEqDecodingTest { public: NetEqDecodingTestWithMutedState() : NetEqDecodingTest() { config_.enable_muted_state = true; } protected: static constexpr size_t kSamples = 10 * 16; static constexpr size_t kPayloadBytes = kSamples * 2; void InsertPacket(uint32_t rtp_timestamp) { uint8_t payload[kPayloadBytes] = {0}; RTPHeader rtp_info; PopulateRtpInfo(0, rtp_timestamp, &rtp_info); EXPECT_EQ(0, neteq_->InsertPacket(rtp_info, payload)); } void InsertCngPacket(uint32_t rtp_timestamp) { uint8_t payload[kPayloadBytes] = {0}; RTPHeader rtp_info; size_t payload_len; PopulateCng(0, rtp_timestamp, &rtp_info, payload, &payload_len); EXPECT_EQ(NetEq::kOK, neteq_->InsertPacket(rtp_info, rtc::ArrayView<const uint8_t>( payload, payload_len))); } bool GetAudioReturnMuted() { bool muted; EXPECT_EQ(0, neteq_->GetAudio(&out_frame_, &muted)); return muted; } void GetAudioUntilMuted() { while (!GetAudioReturnMuted()) { ASSERT_LT(counter_++, 1000) << "Test timed out"; } } void GetAudioUntilNormal() { bool muted = false; while (out_frame_.speech_type_ != AudioFrame::kNormalSpeech) { EXPECT_EQ(0, neteq_->GetAudio(&out_frame_, &muted)); ASSERT_LT(counter_++, 1000) << "Test timed out"; } EXPECT_FALSE(muted); } int counter_ = 0; }; // Verifies that NetEq goes in and out of muted state as expected. TEST_F(NetEqDecodingTestWithMutedState, MutedState) { // Insert one speech packet. InsertPacket(0); // Pull out audio once and expect it not to be muted. EXPECT_FALSE(GetAudioReturnMuted()); // Pull data until faded out. GetAudioUntilMuted(); EXPECT_TRUE(out_frame_.muted()); // Verify that output audio is not written during muted mode. Other parameters // should be correct, though. AudioFrame new_frame; int16_t* frame_data = new_frame.mutable_data(); for (size_t i = 0; i < AudioFrame::kMaxDataSizeSamples; i++) { frame_data[i] = 17; } bool muted; EXPECT_EQ(0, neteq_->GetAudio(&new_frame, &muted)); EXPECT_TRUE(muted); EXPECT_TRUE(out_frame_.muted()); for (size_t i = 0; i < AudioFrame::kMaxDataSizeSamples; i++) { EXPECT_EQ(17, frame_data[i]); } EXPECT_EQ(out_frame_.timestamp_ + out_frame_.samples_per_channel_, new_frame.timestamp_); EXPECT_EQ(out_frame_.samples_per_channel_, new_frame.samples_per_channel_); EXPECT_EQ(out_frame_.sample_rate_hz_, new_frame.sample_rate_hz_); EXPECT_EQ(out_frame_.num_channels_, new_frame.num_channels_); EXPECT_EQ(out_frame_.speech_type_, new_frame.speech_type_); EXPECT_EQ(out_frame_.vad_activity_, new_frame.vad_activity_); // Insert new data. Timestamp is corrected for the time elapsed since the last // packet. Verify that normal operation resumes. InsertPacket(kSamples * counter_); GetAudioUntilNormal(); EXPECT_FALSE(out_frame_.muted()); NetEqNetworkStatistics stats; EXPECT_EQ(0, neteq_->NetworkStatistics(&stats)); // NetEqNetworkStatistics::expand_rate tells the fraction of samples that were // concealment samples, in Q14 (16384 = 100%) .The vast majority should be // concealment samples in this test. EXPECT_GT(stats.expand_rate, 14000); // And, it should be greater than the speech_expand_rate. EXPECT_GT(stats.expand_rate, stats.speech_expand_rate); } // Verifies that NetEq goes out of muted state when given a delayed packet. TEST_F(NetEqDecodingTestWithMutedState, MutedStateDelayedPacket) { // Insert one speech packet. InsertPacket(0); // Pull out audio once and expect it not to be muted. EXPECT_FALSE(GetAudioReturnMuted()); // Pull data until faded out. GetAudioUntilMuted(); // Insert new data. Timestamp is only corrected for the half of the time // elapsed since the last packet. That is, the new packet is delayed. Verify // that normal operation resumes. InsertPacket(kSamples * counter_ / 2); GetAudioUntilNormal(); } // Verifies that NetEq goes out of muted state when given a future packet. TEST_F(NetEqDecodingTestWithMutedState, MutedStateFuturePacket) { // Insert one speech packet. InsertPacket(0); // Pull out audio once and expect it not to be muted. EXPECT_FALSE(GetAudioReturnMuted()); // Pull data until faded out. GetAudioUntilMuted(); // Insert new data. Timestamp is over-corrected for the time elapsed since the // last packet. That is, the new packet is too early. Verify that normal // operation resumes. InsertPacket(kSamples * counter_ * 2); GetAudioUntilNormal(); } // Verifies that NetEq goes out of muted state when given an old packet. TEST_F(NetEqDecodingTestWithMutedState, MutedStateOldPacket) { // Insert one speech packet. InsertPacket(0); // Pull out audio once and expect it not to be muted. EXPECT_FALSE(GetAudioReturnMuted()); // Pull data until faded out. GetAudioUntilMuted(); EXPECT_NE(AudioFrame::kNormalSpeech, out_frame_.speech_type_); // Insert a few packets which are older than the first packet. for (int i = 0; i < 5; ++i) { InsertPacket(kSamples * (i - 1000)); } EXPECT_FALSE(GetAudioReturnMuted()); EXPECT_EQ(AudioFrame::kNormalSpeech, out_frame_.speech_type_); } // Verifies that NetEq doesn't enter muted state when CNG mode is active and the // packet stream is suspended for a long time. TEST_F(NetEqDecodingTestWithMutedState, DoNotMuteExtendedCngWithoutPackets) { // Insert one CNG packet. InsertCngPacket(0); // Pull 10 seconds of audio (10 ms audio generated per lap). for (int i = 0; i < 1000; ++i) { bool muted; EXPECT_EQ(0, neteq_->GetAudio(&out_frame_, &muted)); ASSERT_FALSE(muted); } EXPECT_EQ(AudioFrame::kCNG, out_frame_.speech_type_); } // Verifies that NetEq goes back to normal after a long CNG period with the // packet stream suspended. TEST_F(NetEqDecodingTestWithMutedState, RecoverAfterExtendedCngWithoutPackets) { // Insert one CNG packet. InsertCngPacket(0); // Pull 10 seconds of audio (10 ms audio generated per lap). for (int i = 0; i < 1000; ++i) { bool muted; EXPECT_EQ(0, neteq_->GetAudio(&out_frame_, &muted)); } // Insert new data. Timestamp is corrected for the time elapsed since the last // packet. Verify that normal operation resumes. InsertPacket(kSamples * counter_); GetAudioUntilNormal(); } namespace { ::testing::AssertionResult AudioFramesEqualExceptData(const AudioFrame& a, const AudioFrame& b) { if (a.timestamp_ != b.timestamp_) return ::testing::AssertionFailure() << "timestamp_ diff (" << a.timestamp_ << " != " << b.timestamp_ << ")"; if (a.sample_rate_hz_ != b.sample_rate_hz_) return ::testing::AssertionFailure() << "sample_rate_hz_ diff (" << a.sample_rate_hz_ << " != " << b.sample_rate_hz_ << ")"; if (a.samples_per_channel_ != b.samples_per_channel_) return ::testing::AssertionFailure() << "samples_per_channel_ diff (" << a.samples_per_channel_ << " != " << b.samples_per_channel_ << ")"; if (a.num_channels_ != b.num_channels_) return ::testing::AssertionFailure() << "num_channels_ diff (" << a.num_channels_ << " != " << b.num_channels_ << ")"; if (a.speech_type_ != b.speech_type_) return ::testing::AssertionFailure() << "speech_type_ diff (" << a.speech_type_ << " != " << b.speech_type_ << ")"; if (a.vad_activity_ != b.vad_activity_) return ::testing::AssertionFailure() << "vad_activity_ diff (" << a.vad_activity_ << " != " << b.vad_activity_ << ")"; return ::testing::AssertionSuccess(); } ::testing::AssertionResult AudioFramesEqual(const AudioFrame& a, const AudioFrame& b) { ::testing::AssertionResult res = AudioFramesEqualExceptData(a, b); if (!res) return res; if (memcmp(a.data(), b.data(), a.samples_per_channel_ * a.num_channels_ * sizeof(*a.data())) != 0) { return ::testing::AssertionFailure() << "data_ diff"; } return ::testing::AssertionSuccess(); } } // namespace TEST_F(NetEqDecodingTestTwoInstances, CompareMutedStateOnOff) { ASSERT_FALSE(config_.enable_muted_state); config2_.enable_muted_state = true; CreateSecondInstance(); // Insert one speech packet into both NetEqs. const size_t kSamples = 10 * 16; const size_t kPayloadBytes = kSamples * 2; uint8_t payload[kPayloadBytes] = {0}; RTPHeader rtp_info; PopulateRtpInfo(0, 0, &rtp_info); EXPECT_EQ(0, neteq_->InsertPacket(rtp_info, payload)); EXPECT_EQ(0, neteq2_->InsertPacket(rtp_info, payload)); AudioFrame out_frame1, out_frame2; bool muted; for (int i = 0; i < 1000; ++i) { rtc::StringBuilder ss; ss << "i = " << i; SCOPED_TRACE(ss.str()); // Print out the loop iterator on failure. EXPECT_EQ(0, neteq_->GetAudio(&out_frame1, &muted)); EXPECT_FALSE(muted); EXPECT_EQ(0, neteq2_->GetAudio(&out_frame2, &muted)); if (muted) { EXPECT_TRUE(AudioFramesEqualExceptData(out_frame1, out_frame2)); } else { EXPECT_TRUE(AudioFramesEqual(out_frame1, out_frame2)); } } EXPECT_TRUE(muted); // Insert new data. Timestamp is corrected for the time elapsed since the last // packet. for (int i = 0; i < 5; ++i) { PopulateRtpInfo(0, kSamples * 1000 + kSamples * i, &rtp_info); EXPECT_EQ(0, neteq_->InsertPacket(rtp_info, payload)); EXPECT_EQ(0, neteq2_->InsertPacket(rtp_info, payload)); } int counter = 0; while (out_frame1.speech_type_ != AudioFrame::kNormalSpeech) { ASSERT_LT(counter++, 1000) << "Test timed out"; rtc::StringBuilder ss; ss << "counter = " << counter; SCOPED_TRACE(ss.str()); // Print out the loop iterator on failure. EXPECT_EQ(0, neteq_->GetAudio(&out_frame1, &muted)); EXPECT_FALSE(muted); EXPECT_EQ(0, neteq2_->GetAudio(&out_frame2, &muted)); if (muted) { EXPECT_TRUE(AudioFramesEqualExceptData(out_frame1, out_frame2)); } else { EXPECT_TRUE(AudioFramesEqual(out_frame1, out_frame2)); } } EXPECT_FALSE(muted); } TEST_F(NetEqDecodingTest, TestConcealmentEvents) { const int kNumConcealmentEvents = 19; const size_t kSamples = 10 * 16; const size_t kPayloadBytes = kSamples * 2; int seq_no = 0; RTPHeader rtp_info; rtp_info.ssrc = 0x1234; // Just an arbitrary SSRC. rtp_info.payloadType = 94; // PCM16b WB codec. rtp_info.markerBit = 0; const uint8_t payload[kPayloadBytes] = {0}; bool muted; for (int i = 0; i < kNumConcealmentEvents; i++) { // Insert some packets of 10 ms size. for (int j = 0; j < 10; j++) { rtp_info.sequenceNumber = seq_no++; rtp_info.timestamp = rtp_info.sequenceNumber * kSamples; neteq_->InsertPacket(rtp_info, payload); neteq_->GetAudio(&out_frame_, &muted); } // Lose a number of packets. int num_lost = 1 + i; for (int j = 0; j < num_lost; j++) { seq_no++; neteq_->GetAudio(&out_frame_, &muted); } } // Check number of concealment events. NetEqLifetimeStatistics stats = neteq_->GetLifetimeStatistics(); EXPECT_EQ(kNumConcealmentEvents, static_cast<int>(stats.concealment_events)); } // Test that the jitter buffer delay stat is computed correctly. void NetEqDecodingTestFaxMode::TestJitterBufferDelay(bool apply_packet_loss) { const int kNumPackets = 10; const int kDelayInNumPackets = 2; const int kPacketLenMs = 10; // All packets are of 10 ms size. const size_t kSamples = kPacketLenMs * 16; const size_t kPayloadBytes = kSamples * 2; RTPHeader rtp_info; rtp_info.ssrc = 0x1234; // Just an arbitrary SSRC. rtp_info.payloadType = 94; // PCM16b WB codec. rtp_info.markerBit = 0; const uint8_t payload[kPayloadBytes] = {0}; bool muted; int packets_sent = 0; int packets_received = 0; int expected_delay = 0; int expected_target_delay = 0; uint64_t expected_emitted_count = 0; while (packets_received < kNumPackets) { // Insert packet. if (packets_sent < kNumPackets) { rtp_info.sequenceNumber = packets_sent++; rtp_info.timestamp = rtp_info.sequenceNumber * kSamples; neteq_->InsertPacket(rtp_info, payload); } // Get packet. if (packets_sent > kDelayInNumPackets) { neteq_->GetAudio(&out_frame_, &muted); packets_received++; // The delay reported by the jitter buffer never exceeds // the number of samples previously fetched with GetAudio // (hence the min()). int packets_delay = std::min(packets_received, kDelayInNumPackets + 1); // The increase of the expected delay is the product of // the current delay of the jitter buffer in ms * the // number of samples that are sent for play out. int current_delay_ms = packets_delay * kPacketLenMs; expected_delay += current_delay_ms * kSamples; expected_target_delay += neteq_->TargetDelayMs() * kSamples; expected_emitted_count += kSamples; } } if (apply_packet_loss) { // Extra call to GetAudio to cause concealment. neteq_->GetAudio(&out_frame_, &muted); } // Check jitter buffer delay. NetEqLifetimeStatistics stats = neteq_->GetLifetimeStatistics(); EXPECT_EQ(expected_delay, rtc::checked_cast<int>(stats.jitter_buffer_delay_ms)); EXPECT_EQ(expected_emitted_count, stats.jitter_buffer_emitted_count); EXPECT_EQ(expected_target_delay, rtc::checked_cast<int>(stats.jitter_buffer_target_delay_ms)); } TEST_F(NetEqDecodingTestFaxMode, TestJitterBufferDelayWithoutLoss) { TestJitterBufferDelay(false); } TEST_F(NetEqDecodingTestFaxMode, TestJitterBufferDelayWithLoss) { TestJitterBufferDelay(true); } TEST_F(NetEqDecodingTestFaxMode, TestJitterBufferDelayWithAcceleration) { const int kPacketLenMs = 10; // All packets are of 10 ms size. const size_t kSamples = kPacketLenMs * 16; const size_t kPayloadBytes = kSamples * 2; RTPHeader rtp_info; rtp_info.ssrc = 0x1234; // Just an arbitrary SSRC. rtp_info.payloadType = 94; // PCM16b WB codec. rtp_info.markerBit = 0; const uint8_t payload[kPayloadBytes] = {0}; int expected_target_delay = neteq_->TargetDelayMs() * kSamples; neteq_->InsertPacket(rtp_info, payload); bool muted; neteq_->GetAudio(&out_frame_, &muted); rtp_info.sequenceNumber += 1; rtp_info.timestamp += kSamples; neteq_->InsertPacket(rtp_info, payload); rtp_info.sequenceNumber += 1; rtp_info.timestamp += kSamples; neteq_->InsertPacket(rtp_info, payload); expected_target_delay += neteq_->TargetDelayMs() * 2 * kSamples; // We have two packets in the buffer and kAccelerate operation will // extract 20 ms of data. neteq_->GetAudio(&out_frame_, &muted, nullptr, NetEq::Operation::kAccelerate); // Check jitter buffer delay. NetEqLifetimeStatistics stats = neteq_->GetLifetimeStatistics(); EXPECT_EQ(10 * kSamples * 3, stats.jitter_buffer_delay_ms); EXPECT_EQ(kSamples * 3, stats.jitter_buffer_emitted_count); EXPECT_EQ(expected_target_delay, rtc::checked_cast<int>(stats.jitter_buffer_target_delay_ms)); } namespace test { TEST(NetEqNoTimeStretchingMode, RunTest) { NetEq::Config config; config.for_test_no_time_stretching = true; auto codecs = NetEqTest::StandardDecoderMap(); std::map<int, RTPExtensionType> rtp_ext_map = { {1, kRtpExtensionAudioLevel}, {3, kRtpExtensionAbsoluteSendTime}, {5, kRtpExtensionTransportSequenceNumber}, {7, kRtpExtensionVideoContentType}, {8, kRtpExtensionVideoTiming}}; std::unique_ptr<NetEqInput> input = CreateNetEqRtpDumpInput( webrtc::test::ResourcePath("audio_coding/neteq_universal_new", "rtp"), rtp_ext_map, absl::nullopt /*No SSRC filter*/); std::unique_ptr<TimeLimitedNetEqInput> input_time_limit( new TimeLimitedNetEqInput(std::move(input), 20000)); std::unique_ptr<AudioSink> output(new VoidAudioSink); NetEqTest::Callbacks callbacks; NetEqTest test(config, CreateBuiltinAudioDecoderFactory(), codecs, /*text_log=*/nullptr, /*neteq_factory=*/nullptr, /*input=*/std::move(input_time_limit), std::move(output), callbacks); test.Run(); const auto stats = test.SimulationStats(); EXPECT_EQ(0, stats.accelerate_rate); EXPECT_EQ(0, stats.preemptive_rate); } } // namespace test } // namespace webrtc
最新发布
08-02
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值