解决 avformat_alloc_context无法识别的问题

本文详细解释了在使用FFmpeg-full-SDK-3.2时遇到的avformat_alloc_context无法识别问题,并提供了解决方案:将oc=avformat_alloc_context修改为oc=av_alloc_format_context();。适用于对FFmpeg进行编程和开发的读者。
  由于最近库更新,如果还是用原来的测试用例会碰到avformat_alloc_context 无法识别的问题 avformat_alloc_context is cannot indentified。

解决办法就是将

oc=avformat_alloc_context

改成

oc=av_alloc_format_context();

就ok啦

注:我用的库是FFmpeg-full-SDK-3.2


av_alloc_format_context  是旧接口,
avformat_alloc_context   现已经成名为这个接口。

#include "rtspthread.h" #include <QDebug> RTSPThread::RTSPThread(const QString &url, QObject *parent) : QThread(parent), rtspUrl(url) { // 初始化FFmpeg网络库 avformat_network_init(); } RTSPThread::~RTSPThread() { mutex.lock(); abort = true; mutex.unlock(); wait(); if (formatContext) { avformat_close_input(&formatContext); } avformat_network_deinit(); } void RTSPThread::run() { // 打开RTSP流 formatContext = avformat_alloc_context(); if (!formatContext) { qWarning() << "Failed to allocate format context"; return; } // 强制使用TCP协议 AVDictionary *options = nullptr; av_dict_set(&options, "rtsp_transport", "tcp", 0); if (avformat_open_input(&formatContext, rtspUrl.toUtf8().constData(), nullptr, &options) < 0) { qWarning() << "Failed to open RTSP stream"; return; } if (avformat_find_stream_info(formatContext, nullptr) < 0) { qWarning() << "Failed to find stream info"; return; } // 查找视频流 int videoStreamIndex = -1; for (unsigned int i = 0; i < formatContext->nb_streams; i++) { if (formatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { videoStreamIndex = i; break; } } if (videoStreamIndex == -1) { qWarning() << "No video stream found"; return; } // 读取数据包 AVPacket *packet = av_packet_alloc(); while (!abort) { if (av_read_frame(formatContext, packet) < 0) { qWarning() << "Failed to read frame"; break; } if (packet->stream_index == videoStreamIndex) { // 发送数据包到解码线程 emit packetReady(packet); } av_packet_unref(packet); } av_packet_free(&packet); } 这是我的代码
03-08
#include "ffrtmp_read.h" #include "ADTSHeader.h" #include "comm_interface.h" #include <sys/socket.h> #include <netdb.h> #include <arpa/inet.h> #include <sys/types.h> #include <netinet/in.h> #include <unistd.h> CFFRtmpRead::CFFRtmpRead() { } CFFRtmpRead::~CFFRtmpRead() { } int CFFRtmpRead::interrupt_callback(void* ctx) { CFFRtmpRead* pThis = static_cast<CFFRtmpRead*>(ctx); // 处理超时时间为0的情况(无限等待) if (pThis->m_time_out_ms <= 0) { return 0; // 永不超时 } const int64_t timeout_us = static_cast<int64_t>(pThis->m_time_out_ms) * 1000; // 转为微秒 const int64_t elapsed = av_gettime() - pThis->m_open_start_time; // 超时返回1(中断),否则返回0(继续) return (elapsed >= timeout_us) ? 1 : 0; } int CFFRtmpRead::Start(const char *url, int time_out_ms) { // 添加必要的头文件后,使用现代网络API printf("\n=== Network Diagnostics (Modern API) ===\n"); struct addrinfo hints = {0}, *res = nullptr; hints.ai_family = AF_UNSPEC; hints.ai_socktype = SOCK_STREAM; int dns_ret = getaddrinfo("sgiothubms.tracksolidpro.com", "1936", &hints, &res); if (dns_ret != 0) { printf("DNS resolution failed! Error: %s\n", gai_strerror(dns_ret)); } else { char ipstr[INET6_ADDRSTRLEN]; void *addr; const char *ipver; // 获取第一个IP地址 if (res->ai_family == AF_INET) { // IPv4 struct sockaddr_in *ipv4 = (struct sockaddr_in *)res->ai_addr; addr = &(ipv4->sin_addr); ipver = "IPv4"; } else { // IPv6 struct sockaddr_in6 *ipv6 = (struct sockaddr_in6 *)res->ai_addr; addr = &(ipv6->sin6_addr); ipver = "IPv6"; } inet_ntop(res->ai_family, addr, ipstr, sizeof(ipstr)); printf("Resolved %s: %s\n", ipver, ipstr); freeaddrinfo(res); } printf("==========================\n\n"); AVDictionary *options = NULL; int ret = 0; int sockfd; struct sockaddr_in serv_addr; // 1. 添加详细的协议识别日志 printf("Opening URL: %s\n", url); printf("Protocol: %s\n", avio_find_protocol_name(url)); // 1. 移除无效的原始socket测试代码 printf("Skipping raw socket test for production\n"); m_IfmtCtx = avformat_alloc_context(); if (m_IfmtCtx == nullptr) { printf("[%s: %s: %d]\n", __FILE__, __FUNCTION__, __LINE__); goto fail; } m_time_out_ms = time_out_ms; // 保存超时时间 // 设置超时中断回调 m_IfmtCtx->interrupt_callback.callback = &CFFRtmpRead::interrupt_callback; m_IfmtCtx->interrupt_callback.opaque = this; // 传递当前对象指针 m_open_start_time = av_gettime(); // 记录开始时间 if (time_out_ms > 0) { char tmp_str[256] = {0}; snprintf(tmp_str, sizeof(tmp_str), "%d", time_out_ms * 1000); // 微秒 // 核心超时选项 av_dict_set(&options, "timeout", tmp_str, 0); av_dict_set(&options, "rw_timeout", tmp_str, 0); // 2. 修复RTMP关键参数 av_dict_set(&options, "rtmp_live", "any", 0); // 支持所有直播模式 av_dict_set(&options, "rtmp_buffer", "1000", 0); // 增加缓冲区 // 5. 添加TCP连接参数 av_dict_set(&options, "rtsp_transport", "tcp", 0); av_dict_set(&options, "tcp_nodelay", "1", 0); } // 6. 强制使用TCP协议 av_dict_set(&options, "rtmp_proto", "tcp", 0); // 7. 设置用户代理(部分服务器需要) av_dict_set(&options, "user_agent", "FFmpeg/LibRtmp", 0); // 4. 添加调试级别 av_log_set_level(AV_LOG_DEBUG); m_IfmtCtx->probesize = 100 * 1024; m_IfmtCtx->max_analyze_duration = AV_TIME_BASE; // if (avformat_open_input(&m_IfmtCtx, url, NULL, &options) != 0) // { // perror("avformat_open_input"); // goto fail; // } // 6. 带详细错误处理的avformat_open_input printf("Calling avformat_open_input...\n"); avformat_network_init(); printf("Testing raw socket connection...\n"); sockfd = socket(AF_INET, SOCK_STREAM, 0); if (sockfd < 0) { perror("socket creation failed"); } else { memset(&serv_addr, 0, sizeof(serv_addr)); serv_addr.sin_family = AF_INET; serv_addr.sin_port = htons(1936); // 使用之前解析的IP或直接使用域名 if (inet_pton(AF_INET, "解析的IP", &serv_addr.sin_addr) <= 0) { printf("Invalid address\n"); } else { if (connect(sockfd, (struct sockaddr*)&serv_addr, sizeof(serv_addr))) { perror("connect failed"); } else { printf("Raw socket connected successfully!\n"); } } close(sockfd); } ret = avformat_open_input(&m_IfmtCtx, url, NULL, &options); if (ret != 0) { char errbuf[256]; av_strerror(ret, errbuf, sizeof(errbuf)); // 获取更多系统级错误信息 const char* sys_err = strerror(errno); printf("avformat_open_input failed (%d): %s\n", ret, errbuf); printf("System error: %d - %s\n", errno, sys_err); // 检查是否超时触发 if (ret == AVERROR_EXIT) { printf("Operation aborted by interrupt callback\n"); } goto fail; } if (options != nullptr) { av_dict_free(&options); } if (avformat_find_stream_info(m_IfmtCtx, NULL) < 0) { perror("avformat_find_stream_info"); fprintf(stdout, "Couldn't find stream information \n"); goto fail; } if (m_IfmtCtx->nb_streams == 0) { printf("nb_streams is 0,exit \n"); goto fail; } m_VideoStreamIndex = av_find_best_stream(m_IfmtCtx, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0); if (m_VideoStreamIndex < 0) { printf("Didn't find a video stream\n"); } else { // m_VideoType = m_IfmtCtx->streams[m_VideoStreamIndex]->codecpar->codec_id; } printf("m_VideoStreamIndex = %d\n", m_VideoStreamIndex); m_AudioStreamIndex = av_find_best_stream(m_IfmtCtx, AVMEDIA_TYPE_AUDIO, -1, -1, NULL, 0); if (m_AudioStreamIndex < 0) { printf("Didn't find a audio stream \n"); } else { m_AudioType = m_IfmtCtx->streams[m_AudioStreamIndex]->codecpar->codec_id; } printf("m_AudioStreamIndex = %d\n", m_AudioStreamIndex); m_AvPacket = av_packet_alloc(); if (m_AvPacket == nullptr) { printf("[%s: %s: %d]\n", __FILE__, __FUNCTION__, __LINE__); goto fail; } av_dump_format(m_IfmtCtx, 0, url, 0); return 0; fail: printf("[%s: %s: %d] open %s fail\n", __FILE__, __FUNCTION__, __LINE__, url); Stop(); return -1; } void CFFRtmpRead::Stop() { if (m_IfmtCtx != nullptr) { avformat_close_input(&m_IfmtCtx); avformat_free_context(m_IfmtCtx); m_IfmtCtx = nullptr; } if (m_AvPacket != nullptr) { av_free(m_AvPacket); m_AvPacket = nullptr; } m_VideoStreamIndex = -1; m_AudioStreamIndex = -1; m_VideoType = AV_CODEC_ID_NONE; m_AudioType = AV_CODEC_ID_NONE; m_Buffer.Drain(); } uint8_t *CFFRtmpRead::GetStream(FrameInfo_t &FrameInfo) { m_Buffer.Drain(); int ret = av_read_frame(m_IfmtCtx, m_AvPacket); if (ret >= 0) { if (m_AvPacket->stream_index == m_VideoStreamIndex) { AVRational time_base = m_IfmtCtx->streams[m_VideoStreamIndex]->time_base; AVRational time_base_q = {1, AV_TIME_BASE}; FrameInfo.frame_pts = av_rescale_q(m_AvPacket->dts, time_base, time_base_q); FrameInfo.frame_mode = m_VideoType == AV_CODEC_ID_H265 ? 1 : 0; FrameInfo.frame_type = m_AvPacket->flags ? 1 : 0; m_Buffer.Add(m_AvPacket->data, m_AvPacket->size); FrameInfo.frame_len = m_Buffer.Length(); av_packet_unref(m_AvPacket); } else if (m_AvPacket->stream_index == m_AudioStreamIndex) { ADTS_Header header(m_IfmtCtx->streams[m_AudioStreamIndex]->codecpar->sample_rate, m_IfmtCtx->streams[m_AudioStreamIndex]->codecpar->ch_layout.nb_channels, m_AvPacket->size + 7); m_Buffer.Add(&header, sizeof(ADTS_Header)); m_Buffer.Add(m_AvPacket->data, m_AvPacket->size); FrameInfo.frame_len = m_Buffer.Length(); FrameInfo.frame_type = 3; FrameInfo.frame_mode = m_AudioType == AV_CODEC_ID_AAC ? 5 : 3; AVRational time_base = m_IfmtCtx->streams[m_AudioStreamIndex]->time_base; AVRational time_base_q = {1, AV_TIME_BASE}; FrameInfo.frame_pts = av_rescale_q(m_AvPacket->dts, time_base, time_base_q); av_packet_unref(m_AvPacket); } if (m_Buffer.Length() > 0) { return m_Buffer.Buffer(); } } return nullptr; } 更改后代码如上,测试依旧失败,报错信息如下 === Network Diagnostics (Modern API) === Resolved IPv4: 124.243.184.237 ========================== Opening URL: rtmp://sgiothubms.tracksolidpro.com:1936/16/865478070000569 Protocol: rtmp Skipping raw socket test for production Calling avformat_open_input... Testing raw socket connection... Invalid address [NULL @ 0xb28005a0] Opening 'rtmp://sgiothubms.tracksolidpro.com:1936/16/865478070000569' for reading [rtmp @ 0xb2800a00] No default whitelist set [tcp @ 0xb2800b30] No default whitelist set [rtmp @ 0xb2800a00] Cannot open connection tcp://sgiothubms.tracksolidpro.com:1936?listen&listen_timeout=2000000000&tcp_nodelay=1 avformat_open_input failed (-99): Cannot assign requested address System error: 99 - Cannot assign requested address [ffrtmp_read.cpp: Start: 222] open rtmp://sgiothubms.tracksolidpro.com:1936/16/865478070000569 fail
最新发布
06-13
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值