大华JAVA SDK抓拍图片

本文介绍了一个名为 DHController 的 Java 类,该类通过 RESTful API 接口接收参数并实现从指定 IP 地址的设备上抓取图片的功能。文章详细展示了如何通过登录设备、发送抓拍命令及接收图片数据的过程。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

DHController.java

@RestController
public class DHController {

    Logger logger = LoggerFactory.getLogger(DHController.class);

    @Autowired
    HttpServletRequest request;

    @RequestMapping(value = "/capture", method = RequestMethod.POST)
    public JsonResponse capture(@RequestBody Map<String, String> params) {
        JsonResponse jsonResponse;
        String key = null;
        NetSDKLib.LLong loginhandle = new NetSDKLib.LLong(0);
        try {
            String ip = params.get("ip");
            String port = params.get("port");
            String user = params.get("user");
            String password = params.get("password");
            String channel = params.get("channel");
            String filename = params.get("filename");
            boolean init = this.initClient();
            if (!init) {
                throw new Exception("客户端初始化失败");
            }
            CaptureReceiveCB captureReceiveCB = new CaptureReceiveCB(ip, filename);
            loginhandle = this.login(ip, Integer.parseInt(port), user, password, captureReceiveCB);
            if(loginhandle.longValue() == 0) {
                throw new Exception("登录失败,用户名或密码不正确");
            }
            boolean resultCapture = this.capture(loginhandle, Integer.parseInt(channel), captureReceiveCB);
            if(!resultCapture) {
                throw new Exception("抓拍图片失败");
            }
            key = ip + loginhandle.longValue();
            Semaphore semaphore = Cache.getSemaphore(key);
            semaphore.tryAcquire(1, 5, TimeUnit.SECONDS);
            String filepath = request.getScheme() + "://" + request.getServerName() + ":" + request.getServerPort()+ request.getContextPath() + "/static/" + filename;
            jsonResponse = JsonResponse.success("抓拍图片成功", filepath);
        } catch (Exception e) {
            jsonResponse = JsonResponse.error("抓拍图片失败:" + e.getMessage());
            logger.error(e.getMessage(), e);
        }finally {
            if(key != null) {
                Cache.removeSemaphore(key);
            }
            if(loginhandle.longValue() != 0) {
                NetSDKLib.NETSDK_INSTANCE.CLIENT_Logout(loginhandle);
            }
            NetSDKLib.NETSDK_INSTANCE.CLIENT_Cleanup();
        }
        return jsonResponse;
    }

    private boolean initClient() {
        DisConnect disConnect = new DisConnect();
        return NetSDKLib.NETSDK_INSTANCE.CLIENT_Init(disConnect, null);
    }

    private NetSDKLib.LLong login(String ip, int port, String user, String password, CaptureReceiveCB captureReceiveCB) throws Exception {
        Native.setCallbackThreadInitializer(captureReceiveCB,
                new CallbackThreadInitializer(false, false, "snapPicture callback thread"));
        NetSDKLib.NET_IN_LOGIN_WITH_HIGHLEVEL_SECURITY pstInParam = new NetSDKLib.NET_IN_LOGIN_WITH_HIGHLEVEL_SECURITY();
        pstInParam.nPort = port;
        pstInParam.szIP = ip.getBytes();
        pstInParam.szPassword = password.getBytes();
        pstInParam.szUserName = user.getBytes();
        //出参
        NetSDKLib.NET_DEVICEINFO_Ex m_stDeviceInfo = new NetSDKLib.NET_DEVICEINFO_Ex();
        NetSDKLib.NET_OUT_LOGIN_WITH_HIGHLEVEL_SECURITY pstOutParam = new NetSDKLib.NET_OUT_LOGIN_WITH_HIGHLEVEL_SECURITY();
        pstOutParam.stuDeviceInfo = m_stDeviceInfo;
        return NetSDKLib.NETSDK_INSTANCE.CLIENT_LoginWithHighLevelSecurity(pstInParam, pstOutParam);
    }

    private boolean capture(NetSDKLib.LLong loginhandle, int channel, CaptureReceiveCB captureReceiveCB) throws Exception {
        NetSDKLib.NETSDK_INSTANCE.CLIENT_SetSnapRevCallBack(captureReceiveCB, null);
        // send caputre picture command to device
        NetSDKLib.SNAP_PARAMS stuSnapParams = new NetSDKLib.SNAP_PARAMS();
        stuSnapParams.Channel = channel;  			// channel
        stuSnapParams.mode = 0;    			// capture picture mode
        stuSnapParams.Quality = 3;				// picture quality
        stuSnapParams.InterSnap = 0; 	// timer capture picture time interval
        stuSnapParams.CmdSerial = 0;  			// request serial
        IntByReference reserved = new IntByReference(0);
        return NetSDKLib.NETSDK_INSTANCE.CLIENT_SnapPictureEx(loginhandle, stuSnapParams, reserved);
    }
}

CaptureReceiveCB.java

public class CaptureReceiveCB implements NetSDKLib.fSnapRev{

    Logger logger = LoggerFactory.getLogger(CaptureReceiveCB.class);

    String ip;

    private String filename;

    public CaptureReceiveCB(String ip, String filename) {
        this.ip = ip;
        this.filename = filename;
    }

    @Override
    public void invoke(NetSDKLib.LLong lLoginID, Pointer pBuf, int RevLen, int EncodeType, int CmdSerial, Pointer dwUser) {
        if(pBuf != null && RevLen > 0) {
            try {
                byte[] buf = pBuf.getByteArray(0, RevLen);
                ByteArrayInputStream byteArrInput = new ByteArrayInputStream(buf);
                BufferedImage bufferedImage = ImageIO.read(byteArrInput);
                if(bufferedImage == null) {
                    return;
                }
                String path = this.getClass().getResource("/").getPath() + "/static/" + filename;
                ImageIO.write(bufferedImage, "jpg", new File(path));
            } catch (IOException e) {
                logger.error("写文件失败:" + e.getMessage(), e);
            }finally {
                String key = ip + lLoginID.longValue();
                Semaphore semaphore = Cache.getSemaphore(key);
                semaphore.release();
            }
        }
    }
}

Cache.java

public class Cache {

    private static ConcurrentHashMap<String, Semaphore> semaphores = new ConcurrentHashMap<String, Semaphore>();

    public static Semaphore getSemaphore(String key) {
        Semaphore newSemaphore = new Semaphore(0);
        Semaphore semaphore = semaphores.putIfAbsent(key, newSemaphore);
        return semaphore == null ? newSemaphore : semaphore;
    }

    public static void removeSemaphore(String key) {
        semaphores.remove(key);
    }
}

DisConnect.java

public class DisConnect implements NetSDKLib.fDisConnect{

    Logger logger = LoggerFactory.getLogger(DisConnect.class);

    @Override
    public void invoke(NetSDKLib.LLong lLong, String s, int i, Pointer pointer) {
        logger.error("客户端失去连接");
    }
}
### 配置Python环境下的大华摄像头 #### 安装必要的库 为了能够在Python环境中配置大华摄像头,首先需要安装一些必要的库。对于基于ONVIF协议的操作,可以使用`onvif-zeep`库来简化与摄像头的交互过程。 ```bash pip install onvif-zeep==0.3.3 ``` 此外,如果计划通过HTTP方式进行抓图或其他操作,则可能还需要安装用于处理HTTP请求的库: ```bash pip install requests ``` 这些命令确保了开发环境具备访问和控制大华摄像头所需的基础工具[^1]。 #### 初始化ONVIF客户端并连接到摄像机 一旦完成了依赖项的安装,就可以创建一个简单的Python脚本来初始化ONVIF服务并与指定的大华摄像头建立连接。下面是一个基本的例子展示如何做到这一点: ```python from onvif import ONVIFCamera # 创建相机实例, 替换为实际设备的信息 mycam = ONVIFCamera('CAMERA_IP', 80, 'USERNAME', 'PASSWORD') # 获取媒体服务对象 media_service = mycam.create_media_service() print("成功连接至摄像头") ``` 这段代码展示了怎样利用`ONVIFCamera`类去构建一个新的摄像机对象,并指定了目标摄像机的IP地址、端口号以及认证凭证。接着调用了`create_media_service()`函数获得媒体服务接口以便后续进一步操控。 #### 设置图像参数(如分辨率、亮度) 要调整像分辨率或亮度这样的图像属性,通常涉及到获取当前配置文件列表,从中选取合适的ProfileToken作为参照点来进行更改。以下是具体做法的一个例子: ```python profiles = media_service.GetProfiles() token = profiles[0]._token # 调整视频源模式 (例如改变分辨率) video_source_modes = media_service.GetVideoSourceModes(token=token) for mode in video_source_modes: print(f"Mode Name: {mode.Name}, Width: {mode.Resolution.Width}, Height: {mode.Resolution.Height}") new_mode_token = input("请输入想要切换的新模式对应的Token:") media_service.SetVideoSourceMode(VideoSourceModeToken=new_mode_token) # 修改图像质量参数 (比如亮度) imaging_service = mycam.create_imaging_service() request = imaging_service.create_type('SetImagingSettings') request.VideoSourceToken = token request.ImagingSettings.Brightness = float(input("输入新的亮度值(范围通常是0~1):")) response = imaging_service.SetImagingSettings(request) print(response.Status.String) ``` 此部分代码先列举出了可用的视频源模式供用户选择新分辨率;之后又提供了界面让用户手动设定亮度数值。值得注意的是,在设置任何参数之前都应该仔细查阅官方文档确认支持的功能集及其取值范围。
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值