1.前言
- openvino可以看成一个驱动,你只要找到合适的头文件和库,就可以使用相应的设备,如CPU、GPU、计算棒等等。
- 当然,你必须先有一个安装了openvino的设备,参考这里
- 直通车,下载越早,花费越少
2.头文件和库
- 头文件
- /opt/intel/openvino/deployment_tools/inference_engine/include
- /opt/intel/openvino/deployment_tools/ngraph/include
- /opt/intel/openvino/deployment_tools/inference_engine/samples/cpp/common/samples
- 库
- /opt/intel/openvino/deployment_tools/inference_engine/lib/intel64
- /opt/intel/openvino/deployment_tools/inference_engine/external/tbb/lib
- /opt/intel/openvino/deployment_tools/ngraph/lib
- 后面两个我只复制了libtbb.so.2和libngraph.so
3.C++代码
#include<string>
#include<inference_engine.hpp>
#include<samples/ocv_common.hpp>
#include <ngraph/ngraph.hpp>
#include<time.h>
#include <sys/time.h>
#include <iostream>
using namespace InferenceEngine;
void frameToBlob(const cv::Mat& frame, InferRequest::Ptr& inferRequest, const std::string& inputName) {
Blob::Ptr frameBlob = inferRequest->GetBlob(inputName);
matU8ToBlob<uint8_t>(frame, frameBlob);
}
std::string DEVICE = "MYRIAD";
std::string IR_FileNameNoExt = "/home/lwd/openvino/models/32/mobilenet-ssd";
std::string imageFile = "/home/lwd/openvino/mmgg.jpg";
float confidence_threshold = 0.07;
int main(void) {
Core ie;
std::cout << "1.Load Plugin..." << std::endl;
std::cout << ie.GetVersions(DEVICE) << std::endl;
std::cout << "2.Read IR File..." << std::endl;
CNNNetwork network = ie.ReadNetwork(IR_FileNameNoExt+".xml");
std::cout << "3.Prepare Input and Output..." << std::endl;
InputsDataMap inputsInfo(network.getInputsInfo()