get out of black background

本文指导如何在AdobePremierePro中使用ColorKey功能移除视频背景,包括设置透明、应用mask以及处理不支持透明的视频格式。同时提到导出时动画GIF的大小问题及分辨率和比特率的调整技巧。

文章目录

  • 基础
    • Sequence settings (after selected a Sequence)
      在这里插入图片描述
  • 看见 ( 让Pr表示透明 )
    在这里插入图片描述
  • Effects-> Color Key, drag into your Sequence
    >.如果看不到 Effects 面板, 可以在 Window 菜单中打开
    在Effect Controls 你可以调整 Color Key 的效果了先吸取黑色
    还可以使用mask , 保住你不想去掉黑色的区域(mask可inverted)
    在这里插入图片描述
    我已经去掉了黑色背景,所以Pr用方格表示透明
    在这里插入图片描述

注意: 有些视频格式不支持透明
导出时选Animated GIF
Pr导出的格式非常大, 在Sequence Settings中把分辨率比特率都调低, 调很低都很大, 尽管调低

package com.dnf.main; import java.awt.image.BufferedImage; import java.awt.image.DataBufferByte; import java.nio.ByteBuffer; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import javax.swing.SwingUtilities; import org.opencv.core.Mat; import org.opencv.imgproc.Imgproc; import org.opencv.videoio.VideoCapture; import org.opencv.videoio.Videoio; import org.opencv.videoio.VideoWriter; import javafx.application.Platform; import javafx.embed.swing.JFXPanel; import javafx.scene.Scene; import javafx.scene.image.ImageView; import javafx.scene.image.PixelFormat; import javafx.scene.image.WritableImage; import javafx.scene.layout.Background; import javafx.scene.layout.BackgroundFill; import javafx.scene.layout.CornerRadii; import javafx.scene.layout.StackPane; import javafx.scene.paint.Color; public class MS2130FXCapture { // 核心资源 private static ImageView imageView; private static VideoCapture videoCapture; // 状态控制 private static final AtomicBoolean isRunning = new AtomicBoolean(false); private static final AtomicBoolean isFXInitialized = new AtomicBoolean(false); // 线程管理 private static Thread captureThread; private static ExecutorService frameProcessingPool; // 资源池控制 private static final int MAT_POOL_SIZE = 4; private static final BlockingQueue<Mat> matPool = new ArrayBlockingQueue<>(MAT_POOL_SIZE); // 显示资源 private static final AtomicReference<BufferedImage> bufImgRef = new AtomicReference<>(); private static final AtomicReference<WritableImage> fxImgRef = new AtomicReference<>(); private static volatile Mat displayMat; // 帧率控制 private static final int TARGET_FPS = 30; private static final long FRAME_INTERVAL_MS = 1000 / TARGET_FPS; private static long lastFrameTime = System.currentTimeMillis(); // 设备参数 private static int deviceWidth = 1920; private static int deviceHeight = 1080; /** * 启动视频采集 */ public static void startCapture(JFXPanel fxPanel) { System.out.println("[自动采集] 开始初始化采集"); // 确保停止之前的采集 stopCapture(); // 重置状态 isRunning.set(true); System.out.println("[状态] isRunning 设置为 true"); int deviceIndex = findValidDeviceIndex(); if (deviceIndex == -1) { System.err.println("[错误] 未找到MS2130设备"); return; } // 初始化Mat池并预热 initMatPool(); // 初始化JavaFX环境 if (!isFXInitialized.get()) { System.out.println("[初始化] 首次初始化JavaFX"); SwingUtilities.invokeLater(() -> { Platform.runLater(() -> { System.out.println("[JavaFX] 在JavaFX线程中初始化"); initFXCapture(fxPanel, deviceIndex); isFXInitialized.set(true); }); }); } else { System.out.println("[初始化] JavaFX已初始化,直接启动采集"); Platform.runLater(() -> initFXCapture(fxPanel, deviceIndex)); } // 初始化线程池 frameProcessingPool = Executors.newFixedThreadPool(2); System.out.println("[线程池] 帧处理线程池已创建"); } /** * 初始化Mat对象池 */ private static void initMatPool() { synchronized (matPool) { matPool.clear(); for (int i = 0; i < MAT_POOL_SIZE; i++) { matPool.offer(new Mat()); } System.out.println("[资源池] Mat对象池已初始化,大小: " + MAT_POOL_SIZE); } } /** * 初始化JavaFX UI组件 */ private static void initFXCapture(JFXPanel fxPanel, int deviceIndex) { System.out.println("[UI] 初始化JavaFX UI组件"); imageView = new ImageView(); imageView.setPreserveRatio(true); imageView.setSmooth(true); imageView.setCache(false); StackPane root = new StackPane(imageView); root.setBackground(new Background( new BackgroundFill(Color.BLACK, CornerRadii.EMPTY, javafx.geometry.Insets.EMPTY) )); fxPanel.setScene(new Scene(root)); System.out.println("[UI] JavaFX场景已设置"); openDeviceAndStartCapture(deviceIndex); } /** * 枚举有效设备 */ private static int findValidDeviceIndex() { System.out.println("[设备检测] 开始检测可用设备..."); for (int i = 0; i < 4; i++) { VideoCapture testCapture = null; try { System.out.println("[设备检测] 尝试索引: " + i); // 尝试使用DSHOW驱动 testCapture = new VideoCapture(i, Videoio.CAP_DSHOW); if (!testCapture.isOpened()) { System.out.println("[设备检测] DSHOW驱动打开失败,尝试MSMF"); testCapture.release(); testCapture = new VideoCapture(i, Videoio.CAP_MSMF); } if (!testCapture.isOpened()) { System.out.println("[设备检测] 设备 " + i + " 未打开"); continue; } Mat testFrame = new Mat(); if (testCapture.read(testFrame) && !testFrame.empty()) { // deviceWidth = (int) testCapture.get(Videoio.CAP_PROP_FRAME_WIDTH); // deviceHeight = (int) testCapture.get(Videoio.CAP_PROP_FRAME_HEIGHT); // System.out.printf("[设备检测] 发现设备 %d: %dx%d%n", i, deviceWidth, deviceHeight); testFrame.release(); testCapture.release(); return i; } else { System.out.println("[设备检测] 设备 " + i + " 读取帧失败"); } testFrame.release(); } catch (Exception e) { System.err.println("设备检测错误: " + e.getMessage()); } finally { if (testCapture != null && testCapture.isOpened()) { testCapture.release(); } } } System.out.println("[设备检测] 未找到可用设备"); return -1; } /** * 打开设备并配置 */ private static void openDeviceAndStartCapture(int deviceIndex) { System.out.println("[设备] 尝试打开设备: " + deviceIndex); // 尝试多种驱动 videoCapture = new VideoCapture(deviceIndex, Videoio.CAP_DSHOW); if (!videoCapture.isOpened()) { System.out.println("[设备] DSHOW驱动打开失败,尝试MSMF"); videoCapture.open(deviceIndex, Videoio.CAP_MSMF); } if (!videoCapture.isOpened()) { System.err.println("[错误] 无法打开设备"); return; } // 设置设备参数 videoCapture.set(Videoio.CAP_PROP_FRAME_WIDTH, deviceWidth); videoCapture.set(Videoio.CAP_PROP_FRAME_HEIGHT, deviceHeight); videoCapture.set(Videoio.CAP_PROP_FOURCC, VideoWriter.fourcc('M', 'J', 'P', 'G')); videoCapture.set(Videoio.CAP_PROP_BUFFERSIZE, 2); videoCapture.set(Videoio.CAP_PROP_FPS, TARGET_FPS); System.out.printf("[设备] 设备已打开: %dx%d@%dFPS%n", deviceWidth, deviceHeight, TARGET_FPS); // 创建显示用的Mat displayMat = new Mat(deviceHeight, deviceWidth, org.opencv.core.CvType.CV_8UC3); System.out.println("[资源] 显示Mat已创建"); // 启动采集线程 captureThread = new Thread(() -> captureLoop()); captureThread.setName("VideoCapture-Thread"); captureThread.setDaemon(true); captureThread.setPriority(Thread.MAX_PRIORITY - 1); // 适度优先级 captureThread.start(); System.out.println("[线程] 采集线程已启动"); } /** * 优化的采集循环 */ private static void captureLoop() { System.out.println("[采集线程] 开始运行"); System.out.println("[状态] isRunning = " + isRunning.get()); long frameCount = 0; final long startTime = System.currentTimeMillis(); int consecutiveFailures = 0; while (isRunning.get()) { try { // 帧率控制 long currentTime = System.currentTimeMillis(); long elapsed = currentTime - lastFrameTime; if (elapsed < FRAME_INTERVAL_MS) { Thread.sleep(1); continue; } // 获取空闲Mat Mat frameMat = matPool.poll(); if (frameMat == null) { System.out.println("[资源] Mat池为空,创建新Mat"); frameMat = new Mat(); } // 读取帧 boolean readSuccess = videoCapture.read(frameMat); if (!readSuccess || frameMat.empty()) { System.out.println("[警告] 读取帧失败: " + (frameMat.empty() ? "空帧" : "读取失败")); consecutiveFailures++; if (consecutiveFailures > 10) { System.err.println("[错误] 连续10次读取帧失败,停止采集"); break; } // 归还Mat并等待 if (frameMat != null && frameMat.cols() > 0) { matPool.offer(frameMat); } Thread.sleep(10); continue; } consecutiveFailures = 0; // 重置失败计数 // 更新计时器 lastFrameTime = currentTime; frameCount++; // 提交处理任务 final Mat finalFrameMat = frameMat; frameProcessingPool.execute(() -> processFrame(finalFrameMat)); // 性能日志(每秒输出一次) if (currentTime - startTime > 1000) { double fps = frameCount / ((currentTime - startTime) / 1000.0); System.out.printf("[性能] FPS: %.1f%n", fps); frameCount = 0; } } catch (InterruptedException e) { System.out.println("[线程] 采集线程被中断"); Thread.currentThread().interrupt(); break; } catch (Exception e) { System.err.println("采集线程错误: " + e.getMessage()); e.printStackTrace(); } } System.out.println("[采集线程] 安全退出"); } /** * 优化的帧处理 */ private static void processFrame(Mat frameMat) { try { // 转换颜色空间 if (displayMat == null) { System.out.println("[警告] displayMat为空,创建新的displayMat"); displayMat = new Mat(frameMat.rows(), frameMat.cols(), frameMat.type()); } Imgproc.cvtColor(frameMat, displayMat, Imgproc.COLOR_BGR2RGB); // 获取帧参数 final int width = displayMat.cols(); final int height = displayMat.rows(); // 初始化或复用BufferedImage BufferedImage bufImg = bufImgRef.get(); if (bufImg == null || bufImg.getWidth() != width || bufImg.getHeight() != height) { System.out.printf("[资源] 创建新BufferedImage: %dx%d%n", width, height); bufImg = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR); bufImgRef.set(bufImg); } // 直接像素复制 byte[] imgPixels = ((DataBufferByte) bufImg.getRaster().getDataBuffer()).getData(); displayMat.get(0, 0, imgPixels); // JavaFX线程安全更新 Platform.runLater(() -> { if (!isRunning.get() || imageView == null) { System.out.println("[更新] 跳过UI更新: isRunning=" + isRunning.get() + ", imageView=" + (imageView != null)); return; } // 获取或创建JavaFX图像 WritableImage fxImg = fxImgRef.get(); if (fxImg == null || (int)fxImg.getWidth() != width || (int)fxImg.getHeight() != height) { System.out.printf("[资源] 创建新WritableImage: %dx%d%n", width, height); fxImg = new WritableImage(width, height); fxImgRef.set(fxImg); } // 直接写入像素数据 PixelFormat<ByteBuffer> pixelFormat = PixelFormat.getByteRgbInstance(); fxImg.getPixelWriter().setPixels( 0, 0, width, height, pixelFormat, imgPixels, 0, width * 3 ); imageView.setImage(fxImg); }); } catch (Exception e) { System.err.println("帧处理错误: " + e.getMessage()); e.printStackTrace(); } finally { // 释放资源 if (frameMat != null) { frameMat.release(); } // 补充新的Mat到池中 if (matPool.remainingCapacity() > 0) { matPool.offer(new Mat()); } } } /** * 全局资源清理 */ private static synchronized void cleanupResources() { System.out.println("[资源清理] 开始清理资源"); isRunning.set(false); System.out.println("[状态] isRunning 设置为 false"); // 关闭视频捕获 if (videoCapture != null) { if (videoCapture.isOpened()) { videoCapture.release(); System.out.println("[资源] VideoCapture已释放"); } videoCapture = null; } // 释放Mat对象池 synchronized (matPool) { Mat mat; while ((mat = matPool.poll()) != null) { if (!mat.empty()) { mat.release(); } } System.out.println("[资源] Mat对象池已清空"); } // 释放显示Mat if (displayMat != null) { displayMat.release(); displayMat = null; System.out.println("[资源] displayMat已释放"); } // 关闭线程池 if (frameProcessingPool != null) { try { frameProcessingPool.shutdown(); if (!frameProcessingPool.awaitTermination(500, TimeUnit.MILLISECONDS)) { frameProcessingPool.shutdownNow(); } System.out.println("[线程池] 帧处理线程池已关闭"); } catch (InterruptedException e) { frameProcessingPool.shutdownNow(); Thread.currentThread().interrupt(); } } // 清空图像引用 Platform.runLater(() -> { if (imageView != null) { imageView.setImage(null); } }); bufImgRef.set(null); fxImgRef.set(null); System.out.println("[资源清理] 资源清理完成"); } /** * 停止采集 */ public static synchronized void stopCapture() { if (!isRunning.get()) { System.out.println("[停止采集] 采集未运行,无需停止"); return; } System.out.println("[停止采集] 请求停止..."); isRunning.set(false); if (captureThread != null && captureThread.isAlive()) { captureThread.interrupt(); try { captureThread.join(1000); System.out.println("[线程] 采集线程已停止"); } catch (InterruptedException e) { System.out.println("[线程] 停止采集线程时被中断"); Thread.currentThread().interrupt(); } } cleanupResources(); System.out.println("[停止采集] 资源已释放"); } } 1.设备的分辨率是1920*1080这个采集的数据要固定下来,之后我需要对这种分辨率的图像进行处理; 2.采集画面的显示可以不用1920*1080的显示,可以保持画面比例16:9的比例低一点的像素去显示默认按照800*450的比例去显示吧;应该怎么修改
10-20
/** @brief Header file of video process module.\n This file contains the functions which is related to video process in the chip. @file hd_videoprocess.h @ingroup mhdal @note Nothing. Copyright Novatek Microelectronics Corp. 2024. All rights reserved. */ #ifndef _HD_VIDEOPROCESS_H_ #define _HD_VIDEOPROCESS_H_ #ifdef __cplusplus extern "C" { #endif /******************************************************************** INCLUDE FILES ********************************************************************/ #include "hd_type.h" #include "hd_util.h" /******************************************************************** MACRO CONSTANT DEFINITIONS ********************************************************************/ #define HD_DAL_VIDEOPROC_CHIP_DEV_COUNT 512 ///< total number of device per chip /******************************************************************** MACRO FUNCTION DEFINITIONS ********************************************************************/ /* Get device id by chip */ #define HD_DAL_VIDEOPROC_CHIP1(did) (HD_DAL_VIDEOPROC((HD_DAL_VIDEOPROC_CHIP_DEV_COUNT * 1) + did)) #define HD_VIDEOPROC_CHIP1_CTRL(dev_id) ((HD_DAL_VIDEOPROC_CHIP1(dev_id) << 16) | HD_CTRL) #define HD_VIDEOPROC_CHIP1_IN(dev_id, in_id) ((HD_DAL_VIDEOPROC_CHIP1(dev_id) << 16) | ((HD_IN(in_id) & 0x00ff) << 8)) #define HD_VIDEOPROC_CHIP1_OUT(dev_id, out_id) ((HD_DAL_VIDEOPROC_CHIP1(dev_id) << 16) | (HD_OUT(out_id) & 0x00ff)) #define HD_VIDEOPROC_CHIP(chip, did) (HD_DAL_VIDEOPROC((HD_DAL_VIDEOPROC_CHIP_DEV_COUNT * (chip)) + (did))) #define HD_VIDEOPROC_CHIP_CTRL(chip, dev_id) ((HD_VIDEOPROC_CHIP(chip, dev_id) << 16) | HD_CTRL) #define HD_VIDEOPROC_CHIP_IN(chip, dev_id, in_id) ((HD_VIDEOPROC_CHIP(chip, dev_id) << 16) | ((HD_IN(in_id) & 0x00ff) << 8)) #define HD_VIDEOPROC_CHIP_OUT(chip, dev_id, out_id) ((HD_VIDEOPROC_CHIP(chip, dev_id) << 16) | (HD_OUT(out_id) & 0x00ff)) /* Set buffer count of memory pool */ #define HD_VIDEOPROC_SET_COUNT(a, b) ((a)*10)+(b) ///< ex: use HD_VIDEOPROC_SET_COUNT(1, 5) for setting 1.5 /******************************************************************** TYPE DEFINITION ********************************************************************/ #define HD_VP_MAX_IN 1 ///< max count of input of this device (interface) #define HD_VP_MAX_OUT 16 ///< max count of output of this device (interface) #define HD_VP_MAX_DATA_TYPE 4 ///< max count of output pool of this device (interface) /** @name capability of device (extend from common HD_DEVICE_CAPS) */ typedef enum _HD_VIDEOPROC_DEVCAPS { HD_VIDEOPROC_DEVCAPS_3DNR = 0x00000100, ///< caps of HD_VIDEOPROC_FUNC_3DNR and HD_VIDEOPROC_FUNC_3DNR_STA HD_VIDEOPROC_DEVCAPS_WDR = 0x00000200, ///< caps of HD_VIDEOPROC_FUNC_WDR HD_VIDEOPROC_DEVCAPS_SHDR = 0x00000400, ///< caps of HD_VIDEOPROC_FUNC_SHDR HD_VIDEOPROC_DEVCAPS_DEFOG = 0x00000800, ///< caps of HD_VIDEOPROC_FUNC_DEFOG HD_VIDEOPROC_DEVCAPS_DI = 0x00001000, ///< caps of HD_VIDEOPROC_FUNC_DI HD_VIDEOPROC_DEVCAPS_SHARP = 0x00002000, ///< caps of HD_VIDEOPROC_FUNC_SHARP HD_VIDEOPROC_DEVCAPS_PATTERN = 0x00008000, ///< caps of user pattern function HD_VIDEOPROC_DEVCAPS_MOSAIC = 0x00010000, ///< caps of HD_VIDEOPROC_FUNC_MOSAIC HD_VIDEOPROC_DEVCAPS_COLORNR = 0x00020000, ///< caps of HD_VIDEOPROC_FUNC_COLORNR HD_VIDEOPROC_DEVCAPS_AF = 0x00040000, ///< caps of HD_VIDEOPROC_FUNC_AF ENUM_DUMMY4WORD(HD_VIDEOPROC_DEVCAPS) } HD_VIDEOPROC_DEVCAPS; /** @name capability of input (extend from common HD_VIDEO_CAPS) */ typedef enum HD_VIDEOPROC_INCAPS { HD_VIDEOPROC_INCAPS_DIRECT = 0x01000000, ///< caps of HD_VIDEOPROC_INFUNC_DIRECT HD_VIDEOPROC_INCAPS_ONEBUF = 0x02000000, ///< caps of HD_VIDEOPROC_INFUNC_ONEBUF ENUM_DUMMY4WORD(HD_VIDEOPROC_INCAPS) } HD_VIDEOPROC_INCAPS; /** @name capability of output (extend from common HD_VIDEO_CAPS) */ typedef enum HD_VIDEOPROC_OUTCAPS { HD_VIDEOPROC_OUTCAPS_MD = 0x01000000, ///< caps of HD_VIDEOPROC_OUTFUNC_MD HD_VIDEOPROC_OUTCAPS_DIS = 0x02000000, ///< caps of HD_VIDEOPROC_OUTFUNC_DIS HD_VIDEOPROC_OUTCAPS_LOWLATENCY = 0x04000000, ///< caps of HD_VIDEOPROC_OUTFUNC_LOWLATENCY HD_VIDEOPROC_OUTCAPS_ONEBUF = 0x08000000, ///< caps of HD_VIDEOPROC_OUTFUNC_ONEBUF ENUM_DUMMY4WORD(HD_VIDEOPROC_OUTCAPS) } HD_VIDEOPROC_OUTCAPS; /** @name system capability */ typedef struct _HD_VIDEOPROC_SYSCAPS { HD_DAL dev_id; ///< device id UINT32 chip_id; ///< chip id of this device UINT32 max_in_count; ///< max count of input of this device UINT32 max_out_count; ///< max count of output of this device HD_DEVICE_CAPS dev_caps; ///< capability of device, combine caps of HD_DEVICE_CAPS and HD_VIDEOPROC_DEVCAPS HD_VIDEO_CAPS in_caps[HD_VP_MAX_IN];///< capability of input, combine caps of HD_VIDEO_CAPS and HD_VIDEOPROC_INCAPS HD_VIDEO_CAPS out_caps[HD_VP_MAX_OUT]; ///< capability of output, combine caps of HD_VIDEO_CAPS and HD_VIDEOPROC_OUTCAPS UINT32 max_w_scaleup_ratio; ///< max scaling up ratio UINT32 max_w_scaledown_ratio; ///< max scaling up ratio UINT32 max_h_scaleup_ratio; ///< max scaling down ratio UINT32 max_h_scaledown_ratio; ///< max scaling down ratio UINT32 max_in_stamp; ///< max input stamp UINT32 max_in_stamp_ex; ///< max input stamp_ex UINT32 max_in_mask; ///< max input mask UINT32 max_in_mask_ex; ///< max input mask_ex } HD_VIDEOPROC_SYSCAPS; /** @name system information */ typedef struct _HD_VIDEOPROC_SYSINFO { HD_DAL dev_id; ///< device id UINT32 cur_in_fps[HD_VP_MAX_IN]; ///< current input fps UINT32 cur_out_fps[HD_VP_MAX_OUT]; ///< current output fps } HD_VIDEOPROC_SYSINFO; /** @name option of input function */ typedef enum _HD_VIDEOPROC_INFUNC { HD_VIDEOPROC_INFUNC_DIRECT = 0x00000001, ///< enable input direct from vdocap (zero-buffer) (require bind before start) HD_VIDEOPROC_INFUNC_ONEBUF = 0x00000002, ///< enable one-buffer from vdocap ENUM_DUMMY4WORD(HD_VIDEOPROC_INFUNC) } HD_VIDEOPROC_INFUNC; /** @name input crop or output crop */ typedef struct _HD_VIDEOPROC_CROP { HD_CROP_MODE mode; ///< crop mode HD_VIDEO_CROP win; ///< crop window x,y,w,h } HD_VIDEOPROC_CROP; /** @name input frc or output frc */ typedef struct _HD_VIDEOPROC_FRC { HD_VIDEO_FRC frc; ///< frame rate control } HD_VIDEOPROC_FRC; /** @name input frame */ typedef struct _HD_VIDEOPROC_IN { UINT32 func; ///< (reserved) HD_DIM dim; ///< input dimension. set when first unit HD_VIDEO_PXLFMT pxlfmt; ///< input pixel format. set when first unit HD_VIDEO_DIR dir; ///< output direction like mirror/flip HD_VIDEO_FRC frc; ///< input frame-control } HD_VIDEOPROC_IN; /** @name input frame */ typedef struct _HD_VIDEOPROC_IN3 { UINT32 func; ///< reserved HD_DIM dim; ///< input dimension. set when first unit HD_VIDEO_PXLFMT pxlfmt; ///< input pixel format. set when first unit HD_VIDEO_DIR dir; ///< output direction like mirror/flip HD_VIDEO_FRC frc; ///< input frame-control UINT32 depth; ///< input queue depth, set larger than 0 to adjust (default 2) } HD_VIDEOPROC_IN3; /** @name option of output function */ typedef enum _HD_VIDEOPROC_OUTFUNC { HD_VIDEOPROC_OUTFUNC_MD = 0x00000100, ///< enable motion detection HD_VIDEOPROC_OUTFUNC_DIS = 0x00000200, ///< enable digital image stabilization HD_VIDEOPROC_OUTFUNC_LOWLATENCY = 0x00000400, ///< enable low-latency to vdoenc HD_VIDEOPROC_OUTFUNC_ONEBUF = 0x00000800, ///< enable one-buffer to vdoenc/vdoout ENUM_DUMMY4WORD(HD_VIDEOPROC_OUTFUNC) } HD_VIDEOPROC_OUTFUNC; /** @name physical output frame */ typedef struct _HD_VIDEOPROC_OUT { UINT32 func; ///< (reserved) HD_DIM dim; ///< output dimension HD_VIDEO_PXLFMT pxlfmt; ///< output pixel format HD_VIDEO_DIR dir; ///< output direction like mirror/flip/rotate HD_VIDEO_FRC frc; ///< output frame rate control UINT32 depth; ///< output queue depth, set larger than 0 to allow pull_out HD_URECT rect; ///< output window x,y,w,h HD_DIM bg; ///< output backgrond dimension } HD_VIDEOPROC_OUT; /** @name extened output frame */ typedef struct _HD_VIDEOPROC_OUT_EX { HD_PATH_ID src_path; ///< select a physical out as source of this extend out HD_DIM dim; ///< output dim w,h HD_VIDEO_PXLFMT pxlfmt; ///< output pixel format HD_VIDEO_DIR dir; ///< output direction like mirror/flip/rotate HD_VIDEO_FRC frc; ///< output frame-control UINT32 depth; ///< output queue depth, set larger than 0 to allow pull_out } HD_VIDEOPROC_OUT_EX; /** @name pattern function */ typedef enum _HD_VIDEOPROC_PALETTE { HD_VIDEOPROC_PALETTE_COLOR_BLACK, HD_VIDEOPROC_PALETTE_COLOR_WHITE, HD_VIDEOPROC_PALETTE_COLOR_RED, HD_VIDEOPROC_PALETTE_COLOR_BLUE, HD_VIDEOPROC_PALETTE_COLOR_YELLOW, HD_VIDEOPROC_PALETTE_COLOR_GREEN, HD_VIDEOPROC_PALETTE_COLOR_BROWN, HD_VIDEOPROC_PALETTE_COLOR_DODGERBLUE, HD_VIDEOPROC_PALETTE_COLOR_MAX, ENUM_DUMMY4WORD(HD_VIDEOPROC_PALETTE) } HD_VIDEOPROC_PALETTE; /** @name pattern image setting */ typedef struct _HD_VIDEOPROC_PATTERN_IMG { UINT32 index; ///< pattern index HD_VIDEO_FRAME image; ///< pattern image } HD_VIDEOPROC_PATTERN_IMG; /** @name pattern select */ #define HD_VIDEOPROC_PATTERN_DISABLE 0xffffffff #define HD_VIDEOPROC_PATTERN_CROP_ENABLE 0xEFEF0001 typedef struct _HD_VIDEOPROC_PATTERN_SELECT { UINT32 index; ///< pattern index select, set VPE_PATTERN_SEL_DIABLE to disable HD_URECT rect; ///< destination window ratio (0 ~ 100) HD_VIDEOPROC_PALETTE bg_color_sel; ///< background color select, using HD_VIDEOPROC_PALETTE UINT32 img_crop_enable; ///< set HD_VIDEOPROC_PATTERN_CROP_ENABLE to enable img_crop HD_URECT img_crop; ///< select pattern region to display } HD_VIDEOPROC_PATTERN_SELECT; /** @name vpe mask setting */ typedef struct _HD_VIDEOPROC_VPEMASK_ONEINFO { UINT32 index; ///< pattern index, set 0 to disable UINT32 mask_idx; ///< index = priority 0>1>2>3>4>5>6>7 UINT32 mask_area; ///< 0:inside, 1:outside, 2:line HD_IPOINT point[4]; ///< position of 4 point UINT32 mosaic_en; ///< use original image or mosaic image in mask area UINT32 alpha; ///< alpha blending 0~256, only effect at bitmap = 0,1 } HD_VIDEOPROC_VPEMASK_ONEINFO; /** @name scale working buffer, set when scale up/down over 8x */ typedef struct _HD_VIDEOPROC_SCA_BUF_INFO{ UINT32 ddr_id; ///< DDR ID UINTPTR pbuf_addr; ///< working buffer address, set -1 to disable UINT32 pbuf_size; ///< working buffer size } HD_VIDEOPROC_SCA_BUF_INFO; /** @name option of ctrl function (whole device) */ typedef enum _HD_VIDEOPROC_CTRLFUNC { HD_VIDEOPROC_FUNC_3DNR = 0x00010000, ///< enable 3DNR effect (DN) HD_VIDEOPROC_FUNC_WDR = 0x00020000, ///< enable WDR effect (single frame) HD_VIDEOPROC_FUNC_SHDR = 0x00040000, ///< enable Sensor HDR effect (multi frame) HD_VIDEOPROC_FUNC_DEFOG = 0x00080000, ///< enable Defog effect HD_VIDEOPROC_FUNC_DI = 0x00100000, ///< enable De-Interlace effect (DI) HD_VIDEOPROC_FUNC_SHARP = 0x00200000, ///< enable Sharpness filter effect HD_VIDEOPROC_FUNC_MOSAIC = 0x00400000, ///< enable Mosaic effect of Mask HD_VIDEOPROC_FUNC_COLORNR = 0x00800000, ///< enable Color NR effect HD_VIDEOPROC_FUNC_3DNR_STA = 0x01000000, ///< enable 3DNR statistic for ISP tools HD_VIDEOPROC_FUNC_AF = 0x02000000, ///< enable AF HD_VIDEOPROC_FUNC_BNR = 0x20000000, ///< enable Bayer 3DNR effect HD_VIDEOPROC_FUNC_BNR_STA = 0x40000000, ///< enable Bayer 3DNR statistic for ISP tools ENUM_DUMMY4WORD(HD_VIDEOPROC_CTRLFUNC) } HD_VIDEOPROC_CTRLFUNC; /** @name ctrl function (whole device) */ typedef struct _HD_VIDEOPROC_CTRL { HD_VIDEOPROC_CTRLFUNC func; ///< additional function of HD_CTRL (whole device) (bit-wise mask) HD_PATH_ID ref_path_3dnr; ///< select one of physical out as 3DNR reference path } HD_VIDEOPROC_CTRL; /** @name low-latency config (output) */ typedef struct _HD_VIDEOPROC_LL_CONFIG { UINT32 delay_trig_lowlatency; ///< set delay trigger time of LOWLATENCY output path } HD_VIDEOPROC_LL_CONFIG; /** @name options of device pipeline */ typedef enum _HD_VIDEOPROC_PIPE { HD_VIDEOPROC_PIPE_OFF = 0, HD_VIDEOPROC_PIPE_RAWALL = 0x000000FE, ///< 1 RAW frame to 5 YUV frame, support in-crop, in-direct, WDR/SHDR, NR, GDC, DEFOG, color and gamma tuning, out-scaling, out-crop HD_VIDEOPROC_PIPE_RAWCAP = 0x000000FF, ///< RAWALL with capture quality. HD_VIDEOPROC_PIPE_YUVALL = 0x000000E0, ///< 1 YUV frame to 5 YUV frame, support color and gamma tuning, out-scaling, out-crop HD_VIDEOPROC_PIPE_YUVCAP = 0x000000E1, ///< YUVALL with capture quality. HD_VIDEOPROC_PIPE_YUVAUX = 0x000000C2, ///< 1 YUV frame to 5 YUV frame, support in-crop, out-scaling, out-crop. (an auxiliary device running with RAWALL+INFUNC_DIRECT device) HD_VIDEOPROC_PIPE_DEWARP = 0x00000010, ///< 1 YUV frame to 1 YUV frame, support GDC effect only. HD_VIDEOPROC_PIPE_COLOR = 0x00000020, ///< 1 YUV frame to 1 YUV frame, support color and gamma tuning only. HD_VIDEOPROC_PIPE_SCALE = 0x00000040, ///< 1 YUV frame to 5 YUV frame, support out-scaling only. HD_VIDEOPROC_PIPE_RAWCOLOR = 0x0000003E, ///< 1 RAW frame to 1 YUV frame, support in-crop, WDR/SHDR, NR, DEFOG, color and gamma tuning. HD_VIDEOPROC_PIPE_BNR_RAWALL = 0x000600F8, ///< 1 RAW frame to 5 YUV frame, do RAW-domain pre-process and YUV-domain process, support in-crop, in-direct, BNR, WDR/SHDR, NR, DEFOG, color and gamma tuning, out-scaling, out-crop HD_VIDEOPROC_PIPE_BNR_RAWCOLOR = 0x00060038, ///< 1 RAW frame to 1 YUV frame, do RAW-domain pre-process and YUV-domain process, support in-crop, BNR, WDR/SHDR, NR, DEFOG, color and gamma tuning. HD_VIDEOPROC_PIPE_VPE = 0x000000F2, ///< do YUV-domain scale process by VPE engine HD_VIDEOPROC_PIPE_VSP = 0x000000F4, ///< Video Stitching Process HD_VIDEOPROC_PIPE_PANO360 = 0x000001FE, ///< RAWALL with panorama 360 effect. HD_VIDEOPROC_PIPE_PANO360_4V = 0x000002FE, ///< RAWALL with panorama 360 quad view effect. ENUM_DUMMY4WORD(HD_VIDEOPROC_PIPE) } HD_VIDEOPROC_PIPE; /** @name pool mode */ typedef enum _HD_VIDEOPROC_POOL_MODE { HD_VIDEOPROC_POOL_AUTO = 0, HD_VIDEOPROC_POOL_ENABLE = 1, HD_VIDEOPROC_POOL_DISABLE = 2, ENUM_DUMMY4WORD(HD_VIDEOPROC_POOL_MODE), } HD_VIDEOPROC_POOL_MODE; /** @name pool config */ typedef struct _HD_VIDEOPROC_POOL { INT ddr_id; ///< DDR ID UINT32 counts; ///< count of buffer, use HD_VIDEOPROC_SET_COUNT to set INT mode; ///< pool mode, 0: auto, 1:enable, 2:disable } HD_VIDEOPROC_POOL; /** @name device config */ typedef struct _HD_VIDEOPROC_DEV_CONFIG { HD_VIDEOPROC_PIPE pipe; ///< pipeline setting (for physical out) UINT32 isp_id; ///< ISP id HD_VIDEOPROC_CTRL ctrl_max; ///< maximum control settings HD_VIDEOPROC_IN in_max; ///< maximum input settings HD_VIDEOPROC_POOL data_pool[HD_VP_MAX_DATA_TYPE]; ///< pool memory information } HD_VIDEOPROC_DEV_CONFIG; /** @name func config */ typedef struct _HD_VIDEOPROC_FUNC_CONFIG { UINT32 ddr_id; ///< DDR ID HD_VIDEOPROC_INFUNC in_func; ///< additional function of in (bit-wise mask) HD_VIDEOPROC_OUTFUNC out_func; ///< additional function of out (bit-wise mask) UINT32 out_order; ///< output order (0 ~ n) } HD_VIDEOPROC_FUNC_CONFIG; /** @name poll return */ typedef struct { BOOL event; ///< poll status } HD_PROC_RET_EVENT; /** @name poll event list */ typedef struct _HD_VIDEOPROC_POLL_LIST { HD_PATH_ID path_id; ///< path ID HD_PROC_RET_EVENT revent; ///< the returned event value } HD_VIDEOPROC_POLL_LIST; /** @name time align mode */ typedef enum _HD_VIDEOPROC_ALIGN { HD_VIDEOPROC_TIME_ALIGN_ENABLE = 0xFEFE01FE, ///< (default) playback time align by LCD period (ex. 60HZ is 33333us) HD_VIDEOPROC_TIME_ALIGN_DISABLE = 0xFEFE07FE, ///< play timestamp by gm_send_multi_bitstreams called HD_VIDEOPROC_TIME_ALIGN_USER = 0xFEFE09FE, ///< start to play at previous play point + time_diff(us) ENUM_DUMMY4WORD(HD_VIDEOPROC_ALIGN) } HD_VIDEOPROC_ALIGN; /** @name yuv buffer for input */ typedef struct _HD_VIDEOPROC_USER_BS { UINT32 sign; ///< signature = MAKEFOURCC('V','S','T','M') HD_METADATA *p_next; ///< pointer to next meta CHAR *p_bs_buf; ///< bitstream buffer address pointer UINT32 bs_buf_size; ///< bitstream buffer size INT32 retval; ///< less than 0: send bistream fail. HD_VIDEOPROC_ALIGN time_align; ///< timestamp alignment UINT32 time_diff; ///< time_diff(us): playback interval time by micro-second UINT64 timestamp; ///< Decode bs timestamp (unit: microsecond) to encode for transcode UINT32 user_flag; ///< Special flag to control } HD_VIDEOPROC_USER_BS; /** @name send bitstream list */ typedef struct _HD_VIDEOPROC_SEND_LIST { HD_PATH_ID path_id; ///< path id HD_VIDEOPROC_USER_BS user_bs; ///< video decode user bitstream INT32 retval; ///< less than 0: send bistream fail. } HD_VIDEOPROC_SEND_LIST; /** @name status */ typedef struct _HD_VIDEOPROC_STATUS { UINT32 left_recv_frame; ///< number of frames to be received. } HD_VIDEOPROC_STATUS; /** @name parameter id */ typedef enum _HD_VIDEOPROC_PARAM_ID { HD_VIDEOPROC_PARAM_DEVCOUNT, ///< support get with ctrl path, using HD_DEVCOUNT struct (device id max count) HD_VIDEOPROC_PARAM_SYSCAPS, ///< support get with ctrl path, using HD_VIDEOPROC_SYSCAPS struct (system capabilitiy) HD_VIDEOPROC_PARAM_SYSINFO, ///< support get with ctrl path, using HD_VIDEOPROC_SYSINFO struct (system infomation) HD_VIDEOPROC_PARAM_DEV_CONFIG, ///< support set with ctrl path, using HD_VIDEOPROC_DEV_CONFIG struct (device device config) HD_VIDEOPROC_PARAM_CTRL, ///< support get/set with ctrl path, using HD_VIDEOPROC_CTRL struct (effect of whole device) HD_VIDEOPROC_PARAM_IN, ///< support get/set with i/o path, using HD_VIDEOPROC_IN struct (input frame paramter) HD_VIDEOPROC_PARAM_IN_FRC, ///< support get/set with i/o path, using HD_VIDEOPROC_FRC struct (input frc parameter) HD_VIDEOPROC_PARAM_IN_CROP, ///< support get/set with i/o path, using HD_VIDEOPROC_CROP struct (input crop parameter) ///< note: 1. the coord attr in HD_VIDEOPROC_CROP must be set. ///< 2. if the mode attr in HD_VIDEOPROC_CROP changed, AP must do hd_videoproc_start. HD_VIDEOPROC_PARAM_IN_CROP_PSR, ///< support get/set with i/o path, using HD_VIDEOPROC_CROP struct (output crop parameter) HD_VIDEOPROC_PARAM_OUT, ///< support get/set with i/o path, using HD_VIDEOPROC_OUT struct (output frame paramter) HD_VIDEOPROC_PARAM_OUT_FRC, ///< support get/set with i/o path, using HD_VIDEOPROC_FRC struct (output frc parameter) HD_VIDEOPROC_PARAM_OUT_CROP, ///< support get/set with i/o path, using HD_VIDEOPROC_CROP struct (output crop parameter) HD_VIDEOPROC_PARAM_OUT_CROP_PSR, ///< support get/set with i/o path, using HD_VIDEOPROC_CROP struct (output crop parameter) HD_VIDEOPROC_PARAM_OUT_EX, ///< support get/set with i/o path, using HD_VIDEOPROC_OUT_EX struct (output frame paramter) HD_VIDEOPROC_PARAM_OUT_EX_CROP, ///< support get/set with i/o path, using HD_VIDEOPROC_CROP struct (output crop parameter) HD_VIDEOPROC_PARAM_IN_STAMP_BUF, ///< support get/set with i/stamp path, using HD_OSG_STAMP_BUF struct (stamp buffer parameter) HD_VIDEOPROC_PARAM_IN_STAMP_IMG, ///< support get/set with i/stamp path, using HD_OSG_STAMP_IMG struct (stamp image parameter) HD_VIDEOPROC_PARAM_IN_STAMP_ATTR, ///< support get/set with i/stamp path, using HD_OSG_STAMP_ATTR struct (stamp display attribute) HD_VIDEOPROC_PARAM_IN_MASK_ATTR, ///< support get/set with i/mask path, using HD_OSG_MASK_ATTR struct (mask display attribute) HD_VIDEOPROC_PARAM_IN_MOSAIC_ATTR, ///< support get/set with i/mask path, using HD_OSG_MOSAIC_ATTR struct (mosaic display attribute) HD_VIDEOPROC_PARAM_PATTERN_IMG, ///< support get/set with ctrl path, using HD_VIDEOPROC_PATTERN_IMG struct (pattern parameter) HD_VIDEOPROC_PARAM_PATTERN_SELECT, ///< support get/set with ctrl path, using HD_VIDEOPROC_PATTERN_SELECT struct (pattern parameter) HD_VIDEOPROC_PARAM_VPEMASK_ATTR, ///< support get/set with i/mask path, using HD_VIDEOPROC_VPEMASK_ONEINFO struct (vpe mask attribute) HD_VIDEOPROC_PARAM_SCA_WK_BUF, ///< support get/set with i/o path, using HD_VIDEOPROC_SCA_BUF_INFO struct (scale working buffer) HD_VIDEOPROC_PARAM_FUNC_CONFIG, ///< support get/set with i/o path, using HD_VIDEOPROC_FUNC_CONFIG struct (path func config) HD_VIDEOPROC_PARAM_LL_CONFIG, ///< support get/set with i/o path, using HD_VIDEOPROC_LL_CONFIG struct (output low-latency parameter) HD_VIDEOPROC_PARAM_IN_PALETTE_TABLE,///< support get/set with i path, using HD_OSG_PALETTE_TBL struct or HD_PALETTE_TBL struct HD_VIDEOPROC_PARAM_IN3, ///< support get/set with i/o path, using HD_VIDEOPROC_IN3 struct (input frame paramter) HD_VIDEOPROC_PARAM_STATUS, ///< support get with i/o path, using HD_VIDEOPROC_STATUS struct HD_VIDEOPROC_PARAM_MAX, ENUM_DUMMY4WORD(HD_VIDEOPROC_PARAM_ID) } HD_VIDEOPROC_PARAM_ID; /******************************************************************** EXTERN VARIABLES & FUNCTION PROTOTYPES DECLARATIONS ********************************************************************/ HD_RESULT hd_videoproc_init(VOID); HD_RESULT hd_videoproc_bind(HD_OUT_ID out_id, HD_IN_ID dest_in_id); HD_RESULT hd_videoproc_unbind(HD_OUT_ID out_id); HD_RESULT hd_videoproc_open(HD_IN_ID in_id, HD_OUT_ID out_id, HD_PATH_ID *p_path_id); HD_RESULT hd_videoproc_start(HD_PATH_ID path_id); HD_RESULT hd_videoproc_stop(HD_PATH_ID path_id); HD_RESULT hd_videoproc_start_list(HD_PATH_ID *path_id, UINT num); HD_RESULT hd_videoproc_stop_list(HD_PATH_ID *path_id, UINT num); HD_RESULT hd_videoproc_get(HD_PATH_ID path_id, HD_VIDEOPROC_PARAM_ID id, VOID *p_param); HD_RESULT hd_videoproc_set(HD_PATH_ID path_id, HD_VIDEOPROC_PARAM_ID id, VOID *p_param); HD_RESULT hd_videoproc_push_in_buf(HD_PATH_ID path_id, HD_VIDEO_FRAME *p_in_video_frame, HD_VIDEO_FRAME *p_user_out_video_frame, INT32 wait_ms); HD_RESULT hd_videoproc_pull_out_buf(HD_PATH_ID path_id, HD_VIDEO_FRAME *p_video_frame, INT32 wait_ms); HD_RESULT hd_videoproc_release_out_buf(HD_PATH_ID path_id, HD_VIDEO_FRAME *p_video_frame); HD_RESULT hd_videoproc_poll_list(HD_VIDEOPROC_POLL_LIST *p_poll, UINT32 num, INT32 wait_ms); HD_RESULT hd_videoproc_send_list(HD_VIDEOPROC_SEND_LIST *p_videoproc_list, UINT32 num, INT32 wait_ms); HD_RESULT hd_videoproc_close(HD_PATH_ID path_id); HD_RESULT hd_videoproc_uninit(VOID); #ifdef __cplusplus } #endif #endif 有什么函数能从HD_OUT_ID中得到到path_id
10-12
【四轴飞行器】非线性三自由度四轴飞行器模拟器研究(Matlab代码实现)内容概要:本文围绕非线性三自由度四轴飞行器模拟器的研究展开,重点介绍基于Matlab代码实现的四轴飞行器动力学建模与仿真方法。研究构建了考虑非线性特性的飞行器数学模型,涵盖姿态动力学与运动学方程,实现了三自由度(滚转、俯仰、偏航)的精确模拟。文中详细阐述了系统建模过程、控制算法设计思路及仿真结果分析,帮助读者深入理解四轴飞行器的飞行动力学特性与控制机制;同时,该模拟器可用于算法验证、控制器设计与教学实验。; 适合人群:具备一定自动控制理论基础和Matlab编程能力的高校学生、科研人员及无人机相关领域的工程技术人员,尤其适合从事飞行器建模、控制算法开发的研究生和初级研究人员。; 使用场景及目标:①用于四轴飞行器非线性动力学特性的学习与仿真验证;②作为控制器(如PID、LQR、MPC等)设计与测试的仿真平台;③支持无人机控制系统教学与科研项目开发,提升对姿态控制与系统仿真的理解。; 阅读建议:建议读者结合Matlab代码逐模块分析,重点关注动力学方程的推导与实现方式,动手运行并调试仿真程序,以加深对飞行器姿态控制过程的理解。同时可扩展为六自由度模型或加入外部干扰以增强仿真真实性。
基于分布式模型预测控制DMPC的多智能体点对点过渡轨迹生成研究(Matlab代码实现)内容概要:本文围绕“基于分布式模型预测控制(DMPC)的多智能体点对点过渡轨迹生成研究”展开,重点介绍如何利用DMPC方法实现多智能体系统在复杂环境下的协同轨迹规划与控制。文中结合Matlab代码实现,详细阐述了DMPC的基本原理、数学建模过程以及在多智能体系统中的具体应用,涵盖点对点转移、避障处理、状态约束与通信拓扑等关键技术环节。研究强调算法的分布式特性,提升系统的可扩展性与鲁棒性,适用于多无人机、无人车编队等场景。同时,文档列举了大量相关科研方向与代码资源,展示了DMPC在路径规划、协同控制、电力系统、信号处理等多领域的广泛应用。; 适合人群:具备一定自动化、控制理论或机器人学基础的研究生、科研人员及从事智能系统开发的工程技术人员;熟悉Matlab/Simulink仿真环境,对多智能体协同控制、优化算法有一定兴趣或研究需求的人员。; 使用场景及目标:①用于多智能体系统的轨迹生成与协同控制研究,如无人机集群、无人驾驶车队等;②作为DMPC算法学习与仿真实践的参考资料,帮助理解分布式优化与模型预测控制的结合机制;③支撑科研论文复现、毕业设计或项目开发中的算法验证与性能对比。; 阅读建议:建议读者结合提供的Matlab代码进行实践操作,重点关注DMPC的优化建模、约束处理与信息交互机制;按文档结构逐步学习,同时参考文中提及的路径规划、协同控制等相关案例,加深对分布式控制系统的整体理解。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值