#include <streams.h> 报错

本文探讨了在编译MFC DirectShow程序时遇到的C1083错误,并详细列举了可能导致此错误的六种原因,包括头文件路径问题、编译器兼容性、文件位置变动等。
    今天在编译一个mfc的directshow程序发现报了一个c1083错误:fatal error C1083: Cannot open include file: 'streams.h': No such file or directory
CFilterGraph.cpp,自以为是没有在头部#program comment(lib,"")造成的,后来导入lib仍报错,后查找tools->options->directories 的lib设置,没有发现问题,因为刚装的系统忘记了directshow的base类编译过没有,所以去检查,发现已经编译了。后来去网上查找了才发现原来导致C1083的原因还很多。。。
 
1 头部#include文件错误,#include 后面的包含的文件时,如果使用的是 "<>" 尖括号的话, 表示让编译器在系统头文件夹中查找对应名字的头文件, 当使用""引号时,就是让编译器在当前程序目录下查找对应名字的头文件,如果没有找到,则编译器会再在系统头文件夹中查找该头文件. 注意#include文件到位置。
2 感觉是编译器兼容问题,比如:include “streams.h”有的情况下在vs2003编译通过的情况下,在vs。net编译无法通过,改成,include "streams"去掉.h就可以了。
 
3 有可能是文件整理后移动位置,将与编译头文件和cpp文件目录修改导致错误,用记事本打开.dsp文件,找到与编译cpp文件,少了个与编译开关所以不能生成与编译头文件:.pch
加入开关:# ADD CPP /Yc"。h文件"。例如:
SOURCE=.\StdAfx.cpp
# End Source File
# Begin Source File
发现少了个与编译开关所以不能生成与编译头文件:。pch
加入开关:# ADD CPP /Yc"stdafx.h"
SOURCE=.\StdAfx.cpp
# ADD CPP /Yc"stdafx.h"
# End Source File
# Begin Source File
 
4 在编译directshow源码的时候注意将C:\DXSDK\Samples\C++\DirectShow\BaseClasses下的文件先进性一次编译。
 
5 tools->options->directories 里的lib设置问题
 
6 有的时候需要使用#program comment(lib,"")命令导入一个lib库文件。让编译器在该库文件中搜索,引入的头文件。
 
总的感觉是C1083错误的原因就是找不到所需要的头文件,不果让我很郁闷的是我的程序还找不到头文件,一会还得去找错,实在不知道他凭啥就是找不到。h,郁闷。。。C1083错误!!
#include <fcntl.h> #include <unistd.h> #include <sys/mman.h> #include <xf86drm.h> #include <xf86drmMode.h> #include <drm_fourcc.h> #include <libdrm/drm.h> #include <rockchip/rk_mpi.h> #include <rockchip/mpp_frame.h> #include <rockchip/mpp_packet.h> #include <libavformat/avformat.h> #include <libavcodec/avcodec.h> // DRM显示结构体 struct DrmDisplay { int fd; uint32_t conn_id; uint32_t crtc_id; drmModeModeInfo mode; uint32_t plane_id; }; // 初始化DRM显示 bool init_drm(DrmDisplay* disp, const char* device = "/dev/dri/card0") { disp->fd = open(device, O_RDWR); if (disp->fd < 0) return false; drmModeRes* res = drmModeGetResources(disp->fd); if (!res) return false; // 查找第一个连接的显示器 for (int i = 0; i < res->count_connectors; i++) { drmModeConnector* conn = drmModeGetConnector(disp->fd, res->connectors[i]); if (conn->connection == DRM_MODE_CONNECTED) { disp->conn_id = conn->connector_id; disp->mode = conn->modes[0]; // 使用第一个可用模式 break; } drmModeFreeConnector(conn); } // 查找CRTC for (int i = 0; i < res->count_crtcs; i++) { drmModeCrtc* crtc = drmModeGetCrtc(disp->fd, res->crtcs[i]); if (crtc->mode_valid) { disp->crtc_id = crtc->crtc_id; break; } drmModeFreeCrtc(crtc); } // 查找Overlay平面 drmModePlaneRes* planes = drmModeGetPlaneResources(disp->fd); for (uint32_t i = 0; i < planes->count_planes; i++) { drmModePlane* plane = drmModeGetPlane(disp->fd, planes->planes[i]); if (plane->possible_crtcs & (1 << disp->crtc_id)) { disp->plane_id = plane->plane_id; drmModeFreePlane(plane); break; } drmModeFreePlane(plane); } drmModeFreePlaneResources(planes); drmModeFreeResources(res); return true; } // 创建DRM FrameBuffer uint32_t create_drm_fb(int drm_fd, MppFrame frame) { int fd = mpp_frame_get_fd(frame); // 从MPP帧获取DMA-BUF句柄 uint32_t width = mpp_frame_get_width(frame); uint32_t height = mpp_frame_get_height(frame); uint32_t format = mpp_frame_get_fmt(frame) == MPP_FMT_YUV420SP ? DRM_FORMAT_NV12 : DRM_FORMAT_INVALID; uint32_t handles[4] = {0}, pitches[4] = {0}, offsets[4] = {0}; handles[0] = fd; pitches[0] = width; offsets[0] = 0; uint32_t fb_id = 0; drmModeAddFB2(drm_fd, width, height, format, handles, pitches, offsets, &fb_id, 0); return fb_id; } int main() { const char* mp4_path = "input.mp4"; // 1. FFmpeg初始化 av_register_all(); AVFormatContext* fmt_ctx = nullptr; avformat_open_input(&fmt_ctx, mp4_path, nullptr, nullptr); avformat_find_stream_info(fmt_ctx, nullptr); // 查找视频流 int video_stream = -1; for (unsigned i = 0; i < fmt_ctx->nb_streams; i++) { if (fmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { video_stream = i; break; } } // 2. rkmpp解码器初始化 MppCtx dec_ctx; MppApi* dec_mpi; mpp_create(&dec_ctx, &dec_mpi); mpp_init(dec_ctx, MPP_CTX_DEC, MPP_VIDEO_CodingAVC); // 假设H.264 // 3. DRM显示初始化 DrmDisplay disp; if (!init_drm(&disp)) { fprintf(stderr, "DRM init failed\n"); return -1; } AVPacket pkt; av_init_packet(&pkt); while (av_read_frame(fmt_ctx, &pkt) >= 0) { if (pkt.stream_index != video_stream) { av_packet_unref(&pkt); continue; } // 4. 解码MP4数据 MppPacket dec_pkt; mpp_packet_init(&dec_pkt, pkt.data, pkt.size); mpp_packet_set_pts(dec_pkt, pkt.pts); MPP_RET ret = dec_mpi->decode_put_packet(dec_ctx, dec_pkt); if (ret != MPP_OK) { fprintf(stderr, "Decode put packet failed\n"); continue; } // 5. 获取解码帧 MppFrame dec_frame = nullptr; do { ret = dec_mpi->decode_get_frame(dec_ctx, &dec_frame); if (ret != MPP_OK || !dec_frame) break; if (mpp_frame_get_info_change(dec_frame)) { // 处理分辨率变化 continue; } // 6. 创建DRM FrameBuffer uint32_t fb_id = create_drm_fb(disp.fd, dec_frame); // 7. DRM显示帧 drmModeSetPlane(disp.fd, disp.plane_id, disp.crtc_id, fb_id, 0, 0, 0, disp.mode.hdisplay, disp.mode.vdisplay, 0, 0, mpp_frame_get_width(dec_frame) << 16, mpp_frame_get_height(dec_frame) << 16); // 8. 帧显示延迟 (模拟帧率控制) usleep(33000); // 约30fps // 清理资源 drmModeRmFB(disp.fd, fb_id); mpp_frame_deinit(&dec_frame); } while (1); av_packet_unref(&pkt); } // 资源清理 mpp_destroy(dec_ctx); avformat_close_input(&fmt_ctx); close(disp.fd); return 0; } 这段代码编译报错mpp_frame_get_fd was not declared in this scope;,怎么解决
07-15
#pragma once #include <glm/glm.hpp> #include <glm/gtc/matrix_transform.hpp> #include <glm/gtc/type_ptr.hpp> #include <string> #include <glad/glad.h> // 必须包含,提供 GLuint, GLenum 等定义 #include <Shader.h> typedef struct { glm::vec3 coordinate; glm::vec3 color; glm::vec2 texture; } TextureColorVertex; typedef struct { GLuint latSegments; GLuint longSegments; GLfloat radius; GLfloat height; }DataParam; class Object { public: Object(std::string vs, std::string fs, std::string texName = ""); virtual ~Object(); protected: Shader* shader; TextureColorVertex* vertices; GLushort * indices; GLuint indexCount; GLint verticesSize; GLuint indexSize; GLuint texture; GLuint VBO, VAO, EBO; protected: glm::mat4 model; float rotationSpeed; float revolutionSpeed; float rotationAngle; float revolutionAngle; glm::vec3 translation; GLsizei stride; void createBuffer(GLsizei stride,bool createEBO = false, GLenum ussage = GL_STATIC_DRAW); public: void createShader(const char* vs, const char* fs); virtual void initData(DataParam *param = nullptr) = 0; void setTexture(std::string texName); void render(glm::mat4& view, glm::mat4& projection); virtual void update(float dt) {} virtual void renderObject() = 0; virtual void updateDataBuffer(); }; 严重性 代码 说明 项目 文件 行 禁止显示状态 错误(活动) E1696 无法打开 源 文件 "Shader.h" Demo08 E:\C++\Demo08\Demo08\Object.h 8 错误(活动) E0020 未定义标识符 "Shader" Demo08 E:\C++\Demo08\Demo08\Object.h 30 我是不是要把Shader.h加入项目#pragma once //#ifndef SHADER_H //#define SHADER_H #include <glad/glad.h> #include <glm/glm.hpp> #include <string> #include <fstream> #include <sstream> #include <iostream> class Shader { public: unsigned int ID; Shader(const char* vertexPath, const char* fragmentPath, const char* geometryPath = nullptr) { // 1. retrieve the vertex/fragment source code from filePath std::string vertexCode; std::string fragmentCode; std::string geometryCode; std::ifstream vShaderFile; std::ifstream fShaderFile; std::ifstream gShaderFile; // ensure ifstream objects can throw exceptions: vShaderFile.exceptions(std::ifstream::failbit | std::ifstream::badbit); fShaderFile.exceptions(std::ifstream::failbit | std::ifstream::badbit); gShaderFile.exceptions(std::ifstream::failbit | std::ifstream::badbit); try { // open files vShaderFile.open(vertexPath); fShaderFile.open(fragmentPath); std::stringstream vShaderStream, fShaderStream; // read file's buffer contents into streams vShaderStream << vShaderFile.rdbuf(); fShaderStream << fShaderFile.rdbuf(); // close file handlers vShaderFile.close(); fShaderFile.close(); // convert stream into string vertexCode = vShaderStream.str(); fragmentCode = fShaderStream.str(); // if geometry shader path is present, also load a geometry shader if (geometryPath != nullptr) { gShaderFile.open(geometryPath); std::stringstream gShaderStream; gShaderStream << gShaderFile.rdbuf(); gShaderFile.close(); geometryCode = gShaderStream.str(); } } catch (std::ifstream::failure e) { std::cout << "ERROR::SHADER::FILE_NOT_SUCCESFULLY_READ" << std::endl; } const char* vShaderCode = vertexCode.c_str(); const char * fShaderCode = fragmentCode.c_str(); unsigned int vertex, fragment; // vertex shader vertex = glCreateShader(GL_VERTEX_SHADER); glShaderSource(vertex, 1, &vShaderCode, NULL); glCompileShader(vertex); checkCompileErrors(vertex, "VERTEX"); // fragment Shader fragment = glCreateShader(GL_FRAGMENT_SHADER); glShaderSource(fragment, 1, &fShaderCode, NULL); glCompileShader(fragment); checkCompileErrors(fragment, "FRAGMENT"); // if geometry shader is given, compile geometry shader unsigned int geometry; if (geometryPath != nullptr) { const char * gShaderCode = geometryCode.c_str(); geometry = glCreateShader(GL_GEOMETRY_SHADER); glShaderSource(geometry, 1, &gShaderCode, NULL); glCompileShader(geometry); checkCompileErrors(geometry, "GEOMETRY"); } // shader Program ID = glCreateProgram(); glAttachShader(ID, vertex); glAttachShader(ID, fragment); if (geometryPath != nullptr) glAttachShader(ID, geometry); glLinkProgram(ID); checkCompileErrors(ID, "PROGRAM"); // delete the shaders as they're linked into our program now and no longer necessery glDeleteShader(vertex); glDeleteShader(fragment); if (geometryPath != nullptr) glDeleteShader(geometry); } // activate the shader // ------------------------------------------------------------------------ void use() { glUseProgram(ID); } // utility uniform functions // ------------------------------------------------------------------------ void setBool(const std::string &name, bool value) const { glUniform1i(glGetUniformLocation(ID, name.c_str()), (int)value); } // ------------------------------------------------------------------------ void setInt(const std::string &name, int value) const { glUniform1i(glGetUniformLocation(ID, name.c_str()), value); } // ------------------------------------------------------------------------ void setFloat(const std::string &name, float value) const { glUniform1f(glGetUniformLocation(ID, name.c_str()), value); } // ------------------------------------------------------------------------ void setVec2(const std::string &name, const glm::vec2 &value) const { glUniform2fv(glGetUniformLocation(ID, name.c_str()), 1, &value[0]); } void setVec2(const std::string &name, float x, float y) const { glUniform2f(glGetUniformLocation(ID, name.c_str()), x, y); } // ------------------------------------------------------------------------ void setVec3(const std::string &name, const glm::vec3 &value) const { glUniform3fv(glGetUniformLocation(ID, name.c_str()), 1, &value[0]); } void setVec3(const std::string &name, float x, float y, float z) const { glUniform3f(glGetUniformLocation(ID, name.c_str()), x, y, z); } // ------------------------------------------------------------------------ void setVec4(const std::string &name, const glm::vec4 &value) const { glUniform4fv(glGetUniformLocation(ID, name.c_str()), 1, &value[0]); } void setVec4(const std::string &name, float x, float y, float z, float w) { glUniform4f(glGetUniformLocation(ID, name.c_str()), x, y, z, w); } // ------------------------------------------------------------------------ void setMat2(const std::string &name, const glm::mat2 &mat) const { glUniformMatrix2fv(glGetUniformLocation(ID, name.c_str()), 1, GL_FALSE, &mat[0][0]); } // ------------------------------------------------------------------------ void setMat3(const std::string &name, const glm::mat3 &mat) const { glUniformMatrix3fv(glGetUniformLocation(ID, name.c_str()), 1, GL_FALSE, &mat[0][0]); } // ------------------------------------------------------------------------ void setMat4(const std::string &name, const glm::mat4 &mat) const { glUniformMatrix4fv(glGetUniformLocation(ID, name.c_str()), 1, GL_FALSE, &mat[0][0]); } private: // utility function for checking shader compilation/linking errors. // ------------------------------------------------------------------------ void checkCompileErrors(GLuint shader, std::string type) { GLint success; GLchar infoLog[1024]; if (type != "PROGRAM") { glGetShaderiv(shader, GL_COMPILE_STATUS, &success); if (!success) { glGetShaderInfoLog(shader, 1024, NULL, infoLog); std::cout << "ERROR::SHADER_COMPILATION_ERROR of type: " << type << "\n" << infoLog << "\n -- --------------------------------------------------- -- " << std::endl; } } else { glGetProgramiv(shader, GL_LINK_STATUS, &success); if (!success) { glGetProgramInfoLog(shader, 1024, NULL, infoLog); std::cout << "ERROR::PROGRAM_LINKING_ERROR of type: " << type << "\n" << infoLog << "\n -- --------------------------------------------------- -- " << std::endl; } } } }; //#endif
最新发布
10-30
#include "ffrtmp_read.h" #include "ADTSHeader.h" #include "comm_interface.h" #include <sys/socket.h> #include <netdb.h> #include <arpa/inet.h> #include <sys/types.h> #include <netinet/in.h> CFFRtmpRead::CFFRtmpRead() { } CFFRtmpRead::~CFFRtmpRead() { } int CFFRtmpRead::interrupt_callback(void* ctx) { CFFRtmpRead* pThis = static_cast<CFFRtmpRead*>(ctx); // 处理超时时间为0的情况(无限等待) if (pThis->m_time_out_ms <= 0) { return 0; // 永不超时 } const int64_t timeout_us = static_cast<int64_t>(pThis->m_time_out_ms) * 1000; // 转为微秒 const int64_t elapsed = av_gettime() - pThis->m_open_start_time; // 超时返回1(中断),否则返回0(继续) return (elapsed >= timeout_us) ? 1 : 0; } int CFFRtmpRead::Start(const char *url, int time_out_ms) { // 添加必要的头文件后,使用现代网络API printf("\n=== Network Diagnostics (Modern API) ===\n"); struct addrinfo hints = {0}, *res = nullptr; hints.ai_family = AF_UNSPEC; hints.ai_socktype = SOCK_STREAM; int dns_ret = getaddrinfo("sgiothubms.tracksolidpro.com", "1936", &hints, &res); if (dns_ret != 0) { printf("DNS resolution failed! Error: %s\n", gai_strerror(dns_ret)); } else { char ipstr[INET6_ADDRSTRLEN]; void *addr; const char *ipver; // 获取第一个IP地址 if (res->ai_family == AF_INET) { // IPv4 struct sockaddr_in *ipv4 = (struct sockaddr_in *)res->ai_addr; addr = &(ipv4->sin_addr); ipver = "IPv4"; } else { // IPv6 struct sockaddr_in6 *ipv6 = (struct sockaddr_in6 *)res->ai_addr; addr = &(ipv6->sin6_addr); ipver = "IPv6"; } inet_ntop(res->ai_family, addr, ipstr, sizeof(ipstr)); printf("Resolved %s: %s\n", ipver, ipstr); freeaddrinfo(res); } printf("==========================\n\n"); AVDictionary *options = NULL; int ret = 0; int sockfd; struct sockaddr_in serv_addr; // 1. 添加详细的协议识别日志 printf("Opening URL: %s\n", url); printf("Protocol: %s\n", avio_find_protocol_name(url)); m_IfmtCtx = avformat_alloc_context(); if (m_IfmtCtx == nullptr) { printf("[%s: %s: %d]\n", __FILE__, __FUNCTION__, __LINE__); goto fail; } m_time_out_ms = time_out_ms; // 保存超时时间 // 设置超时中断回调 m_IfmtCtx->interrupt_callback.callback = &CFFRtmpRead::interrupt_callback; m_IfmtCtx->interrupt_callback.opaque = this; // 传递当前对象指针 m_open_start_time = av_gettime(); // 记录开始时间 if (time_out_ms > 0) { char tmp_str[256] = {0}; snprintf(tmp_str, sizeof(tmp_str), "%d", time_out_ms * 1000); // 微秒 // 核心超时选项 av_dict_set(&options, "timeout", tmp_str, 0); av_dict_set(&options, "rw_timeout", tmp_str, 0); // RTMP特定优化 av_dict_set(&options, "rtmp_live", "live", 0); // 关键修复:添加TCP传输层选项 av_dict_set(&options, "rtsp_transport", "tcp", 0); // 本地绑定选项 - 解决"Cannot assign requested address" av_dict_set(&options, "localaddr", "0.0.0.0", 0); av_dict_set(&options, "localport", "0", 0); // 简化其他选项 av_dict_set(&options, "analyzeduration", "1000000", 0); av_dict_set(&options, "probesize", "500000", 0); } // 4. 添加调试级别 av_log_set_level(AV_LOG_DEBUG); m_IfmtCtx->probesize = 100 * 1024; m_IfmtCtx->max_analyze_duration = AV_TIME_BASE; // if (avformat_open_input(&m_IfmtCtx, url, NULL, &options) != 0) // { // perror("avformat_open_input"); // goto fail; // } // 6. 带详细错误处理的avformat_open_input printf("Calling avformat_open_input...\n"); avformat_network_init(); printf("Testing raw socket connection...\n"); sockfd = socket(AF_INET, SOCK_STREAM, 0); if (sockfd < 0) { perror("socket creation failed"); } else { memset(&serv_addr, 0, sizeof(serv_addr)); serv_addr.sin_family = AF_INET; serv_addr.sin_port = htons(1936); // 使用之前解析的IP或直接使用域名 if (inet_pton(AF_INET, "解析的IP", &serv_addr.sin_addr) <= 0) { printf("Invalid address\n"); } else { if (connect(sockfd, (struct sockaddr*)&serv_addr, sizeof(serv_addr))) { perror("connect failed"); } else { printf("Raw socket connected successfully!\n"); } } close(sockfd); } ret = avformat_open_input(&m_IfmtCtx, url, NULL, &options); if (ret != 0) { char errbuf[256]; av_strerror(ret, errbuf, sizeof(errbuf)); // 获取更多系统级错误信息 const char* sys_err = strerror(errno); printf("avformat_open_input failed (%d): %s\n", ret, errbuf); printf("System error: %d - %s\n", errno, sys_err); // 检查是否超时触发 if (ret == AVERROR_EXIT) { printf("Operation aborted by interrupt callback\n"); } goto fail; } if (options != nullptr) { av_dict_free(&options); } if (avformat_find_stream_info(m_IfmtCtx, NULL) < 0) { perror("avformat_find_stream_info"); fprintf(stdout, "Couldn't find stream information \n"); goto fail; } if (m_IfmtCtx->nb_streams == 0) { printf("nb_streams is 0,exit \n"); goto fail; } m_VideoStreamIndex = av_find_best_stream(m_IfmtCtx, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0); if (m_VideoStreamIndex < 0) { printf("Didn't find a video stream\n"); } else { // m_VideoType = m_IfmtCtx->streams[m_VideoStreamIndex]->codecpar->codec_id; } printf("m_VideoStreamIndex = %d\n", m_VideoStreamIndex); m_AudioStreamIndex = av_find_best_stream(m_IfmtCtx, AVMEDIA_TYPE_AUDIO, -1, -1, NULL, 0); if (m_AudioStreamIndex < 0) { printf("Didn't find a audio stream \n"); } else { m_AudioType = m_IfmtCtx->streams[m_AudioStreamIndex]->codecpar->codec_id; } printf("m_AudioStreamIndex = %d\n", m_AudioStreamIndex); m_AvPacket = av_packet_alloc(); if (m_AvPacket == nullptr) { printf("[%s: %s: %d]\n", __FILE__, __FUNCTION__, __LINE__); goto fail; } av_dump_format(m_IfmtCtx, 0, url, 0); return 0; fail: printf("[%s: %s: %d] open %s fail\n", __FILE__, __FUNCTION__, __LINE__, url); Stop(); return -1; } void CFFRtmpRead::Stop() { if (m_IfmtCtx != nullptr) { avformat_close_input(&m_IfmtCtx); avformat_free_context(m_IfmtCtx); m_IfmtCtx = nullptr; } if (m_AvPacket != nullptr) { av_free(m_AvPacket); m_AvPacket = nullptr; } m_VideoStreamIndex = -1; m_AudioStreamIndex = -1; m_VideoType = AV_CODEC_ID_NONE; m_AudioType = AV_CODEC_ID_NONE; m_Buffer.Drain(); } uint8_t *CFFRtmpRead::GetStream(FrameInfo_t &FrameInfo) { m_Buffer.Drain(); int ret = av_read_frame(m_IfmtCtx, m_AvPacket); if (ret >= 0) { if (m_AvPacket->stream_index == m_VideoStreamIndex) { AVRational time_base = m_IfmtCtx->streams[m_VideoStreamIndex]->time_base; AVRational time_base_q = {1, AV_TIME_BASE}; FrameInfo.frame_pts = av_rescale_q(m_AvPacket->dts, time_base, time_base_q); FrameInfo.frame_mode = m_VideoType == AV_CODEC_ID_H265 ? 1 : 0; FrameInfo.frame_type = m_AvPacket->flags ? 1 : 0; m_Buffer.Add(m_AvPacket->data, m_AvPacket->size); FrameInfo.frame_len = m_Buffer.Length(); av_packet_unref(m_AvPacket); } else if (m_AvPacket->stream_index == m_AudioStreamIndex) { ADTS_Header header(m_IfmtCtx->streams[m_AudioStreamIndex]->codecpar->sample_rate, m_IfmtCtx->streams[m_AudioStreamIndex]->codecpar->ch_layout.nb_channels, m_AvPacket->size + 7); m_Buffer.Add(&header, sizeof(ADTS_Header)); m_Buffer.Add(m_AvPacket->data, m_AvPacket->size); FrameInfo.frame_len = m_Buffer.Length(); FrameInfo.frame_type = 3; FrameInfo.frame_mode = m_AudioType == AV_CODEC_ID_AAC ? 5 : 3; AVRational time_base = m_IfmtCtx->streams[m_AudioStreamIndex]->time_base; AVRational time_base_q = {1, AV_TIME_BASE}; FrameInfo.frame_pts = av_rescale_q(m_AvPacket->dts, time_base, time_base_q); av_packet_unref(m_AvPacket); } if (m_Buffer.Length() > 0) { return m_Buffer.Buffer(); } } return nullptr; } 编译报错 error: ‘close’ was not declared in this scope close(sockfd); 如何解决
06-13
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值