基于前面的内容本期继续接着开发过程来说,下面给出 三套完整可直接使用的源码,全部基于 OpenWRT + QCS6490 + SNPE 环境:
- 自动化模型转换脚本(主机端,ONNX → DLC + 量化 + 校验)
- LuCI 推理控制面板(Web 上传模型、选择运行时、实时推理)
- OTA 模型更新服务(设备端自动下载、校验、替换、通知)
1. 自动化模型转换脚本(主机端)
文件:
convert_model.sh
路径:~/snpe-tools/convert_model.sh
功能:一键 ONNX → DLC → 量化 → 校验 → 打包 OTA 包
#!/bin/bash
# convert_model.sh
# 用法: ./convert_model.sh yolov8n.onnx yolov8n
set -e
MODEL_ONNX="$1"
MODEL_NAME="$2"
INPUT_SIZE="${3:-640}" # 默认 640x640
QUANTIZE="${4:-true}" # 是否量化
SNPE_ROOT="$HOME/snpe-sdk"
OUTPUT_DIR="./ota_models"
# 1. 检查环境
if [ ! -d "$SNPE_ROOT" ]; then
echo "SNPE_ROOT not found: $SNPE_ROOT"
exit 1
fi
source "$SNPE_ROOT/bin/envsetup.sh"
mkdir -p "$OUTPUT_DIR"
DLC_FILE="${OUTPUT_DIR}/${MODEL_NAME}.dlc"
DLC_QUANT="${OUTPUT_DIR}/${MODEL_NAME}_quant.dlc"
INPUT_LIST="${OUTPUT_DIR}/input_list.txt"
OTA_PKG="${OUTPUT_DIR}/${MODEL_NAME}_ota.tar.gz"
# 2. 生成输入列表(随机图像)
echo "Generating input list..."
mkdir -p images
for i in {
1..5}; do
convert -size ${INPUT_SIZE}x${INPUT_SIZE} xc:gray images/calib_$i.jpg
echo "images/calib_$i.jpg" >> "$INPUT_LIST"
done
# 3. ONNX → DLC
echo "Converting ONNX to DLC..."
snpe-onnx-to-dlc \
--input_model "$MODEL_ONNX" \
--output_model "$DLC_FILE" \
--input_dim input_0 1,3,${INPUT_SIZE},${INPUT_SIZE}
# 4. 量化(可选)
if [ "$QUANTIZE" = "true" ]; then
echo "Quantizing to INT8..."
snpe-dlc-quantize \
--input_dlc "$DLC_FILE" \
--input_list "$INPUT_LIST" \
--output_dlc "$DLC_QUANT" \
--use_enhanced_quantizer \
--use_adjusted_weights_quantizer
FINAL_DLC="$DLC_QUANT"
else
FINAL_DLC="$DLC_FILE"
fi
# 5. 校验模型
echo "Validating model..."
snpe-dlc-info --input_dlc "$FINAL_DLC" | head -20
# 6. 打包 OTA 包(含校验码)
echo "Packaging OTA update..."
cat > "${OUTPUT_DIR}/model.json" << EOF
{
"name": "$MODEL_NAME",
"version": "$(date +%Y%m%d%H%M)",
"size": $(stat -c%s "$FINAL_D cellular"),
"sha256": "$(sha256sum "$FINAL_DLC" | cut -d' ' -f1)",
"input_size": ${INPUT_SIZE},
"quantized": $QUANTIZE
}
EOF
tar -czf "$OTA_PKG" -C "$OUTPUT_DIR" \
"$(basename $FINAL_DLC)" model.json
echo "OTA package ready: $OTA_PKG"
echo "Upload to: http://your-server.com/ota/${MODEL_NAME}_ota.tar.gz"
使用示例:
./convert_model.sh yolov8n.onnx yolov8n 640 true
2. LuCI 推理控制面板(Web 界面)
路径:
package/luci-app-snpe/
目录结构
luci-app-snpe/
├── root/
│ └── usr/share/luci/menu.d/snpe.json
├── htdocs/
│ └── luci-static/resources/view/snpe/
│ ├── dashboard.htm
│ └── inference.js
└── src/
└── Makefile
src/Makefile
include $(TOPDIR)/rules.mk
PKG_NAME:=luci-app-snpe
PKG_VERSION:=1.0
PKG_RELEASE:=1
include $(INCLUDE_DIR)/package.mk
define Package/luci-app-snpe
SECTION:=luci
CATEGORY:=LuCI
SUBMENU:=3. Applications
TITLE:=SNPE AI Inference Panel
DEPENDS:=+snpe +python3 +python3-opencv +luci-base
endef
define Build/Compile
endef
define Package/luci-app-snpe/install
$(INSTALL_DIR) $(1)/usr/share/luci/menu.d
$(CP) ./root/usr/share/luci/menu.d/snpe.json $(1)/usr/share/luci/menu.d/
$(INSTALL_DIR) $(1)/usr/share/luci/static/resources/view/snpe
$(CP) ./htdocs/luci-static/resources/view/snpe/* $(1)/usr/share/luci/static/resources/view/snpe/
endef
$(eval $(call BuildPackage,luci-app-snpe))
root/usr/share/luci/menu.d/snpe.json
{
"admin/ai": {
"snpe": {
"title": "SNPE Inference",
"order": 10,
"action": {
"type": "view",
"path": "snpe/dashboard"
}
}
}
}
htdocs/luci-static/resources/view/snpe/dashboard.htm
<%+header%>
<h2>SNPE AI Inference Panel</h2>
<div class="cbi-section">
<div class="cbi-section-node">
<label class="cbi-value-title">Model</label>
<div class="cbi-value-field">
<select id="model_select" class="cbi-input-select"></select>
<button onclick="uploadModel()">Upload New</button>
<input type="file" id="model_file" style="display:none" accept=".dlc">
</div>
</div>
<div class="cbi-section-node">
<label>Runtime</label>
<select id="runtime">
<option value="DSP">DSP (Hexagon)</option>
<option value="HTP">HTP (NPU)</option>
</select>
</div>
<div class="cbi-section-node

最低0.47元/天 解锁文章
1857

被折叠的 条评论
为什么被折叠?



