MainActivity
package com.example.moth
import android.content.Intent
import android.content.res.AssetManager
import android.graphics.Bitmap
import android.os.Bundle
import android.util.Log
import android.view.Menu
import android.view.MenuItem
import android.widget.Button
import android.widget.TextView
import android.widget.Toast
import com.google.android.material.snackbar.Snackbar
import com.google.android.material.navigation.NavigationView
import androidx.navigation.findNavController
import androidx.navigation.ui.AppBarConfiguration
import androidx.navigation.ui.navigateUp
import androidx.navigation.ui.setupActionBarWithNavController
import androidx.navigation.ui.setupWithNavController
import androidx.drawerlayout.widget.DrawerLayout
import androidx.appcompat.app.AppCompatActivity
import com.example.moth.databinding.ActivityMainBinding
import java.io.ByteArrayOutputStream
import okhttp3.MediaType
import okhttp3.MultipartBody
import retrofit2.Call
import retrofit2.Callback
import retrofit2.Response
import com.example.moth.httpInterface.RecognizeClient.recognizeService
import com.example.moth.httpInterface.PredictResponse
import com.example.moth.httpInterface.PredictResult
class MainActivity : AppCompatActivity() {
private lateinit var appBarConfiguration: AppBarConfiguration
private lateinit var binding: ActivityMainBinding
private var selectedBitMap: Bitmap? = null
private lateinit var recognizeResultTextView: TextView
private lateinit var assetManager: AssetManager
companion object {
// 加载本地库
init {
System.loadLibrary("native-lib")
}
}
// 声明静态native方法
private external fun staticProcessImage(bitmap: Bitmap): String
private val insectMapping = mapOf(
"0" to "稻纵卷叶螟",
"1" to "亚洲稻螟虫",
"2" to "白蛾",
"3" to "鬼脸天蛾",
"4" to "玉米螟",
"5" to "夜盗蛾",
"6" to "桃蛀螟",
"7" to "甘蓝夜蛾",
"8" to "草地贪夜蛾",
"9" to "斜带葡萄天蛾",
"10" to "棉斜纹夜蛾",
"11" to "枯叶夜蛾",
"12" to "横线尾夜蛾"
)
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
// 检查token
val sharedPreferences = getSharedPreferences("AppPreferences", MODE_PRIVATE)
val token = sharedPreferences.getString("token", null)
if(token == null){
val intent = Intent(this, LoginActivity::class.java)
startActivity(intent)
finish()
return
}
assetManager = assets
binding = ActivityMainBinding.inflate(layoutInflater)
setContentView(binding.root)
setSupportActionBar(binding.appBarMain.toolbar)
binding.appBarMain.fab.setOnClickListener { view ->
Snackbar.make(view, "Replace with your own action", Snackbar.LENGTH_LONG)
.setAction("Action", null)
.setAnchorView(R.id.fab).show()
}
val drawerLayout: DrawerLayout = binding.drawerLayout
val navView: NavigationView = binding.navView
val navController = findNavController(R.id.nav_host_fragment_content_main)
// Passing each menu ID as a set of Ids because each
// menu should be considered as top level destinations.
appBarConfiguration = AppBarConfiguration(
setOf(
R.id.nav_home, R.id.nav_gallery, R.id.nav_slideshow
), drawerLayout
)
setupActionBarWithNavController(navController, appBarConfiguration)
navView.setupWithNavController(navController)
recognizeResultTextView = findViewById(R.id.recognizeResult)
val recognizeButton: Button = findViewById(R.id.recognizeButton)
val uploadButton: Button = findViewById(R.id.recognizeOnlineButton)
// 设置离线识别按钮的点击事件
recognizeButton.setOnClickListener {
selectedBitMap?.let {bitmap ->
val inferenceResult = staticProcessImage(bitmap)
val inferenceTop3 = inferenceResult.split(" ")
val mappedResult = inferenceTop3.map { insectMapping[it] ?: " "}
val returnText = "离线识别的结果是:$mappedResult"
recognizeResultTextView.text = returnText
} ?: run {
recognizeResultTextView.text = "请先选择图片"
}
}
// 设置上传识别按钮的点击事件
uploadButton.setOnClickListener {
selectedBitMap?.let {bitmap ->
val imageStream = bitmap2ByteArray(bitmap)
uploadImage(imageStream)
} ?: run{
recognizeResultTextView.text = "请先选择图片"
}
}
}
private fun bitmap2ByteArray(bitmap: Bitmap): ByteArray {
val outputStream = ByteArrayOutputStream()
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, outputStream)
return outputStream.toByteArray()
}
private fun uploadImage(byteArray: ByteArray){
val requestBody = okhttp3.RequestBody.create(MediaType.parse("image/jpeg"), byteArray)
val filePart = MultipartBody.Part.createFormData("image", "image.jpg", requestBody)
recognizeService.predict(filePart).enqueue(object : Callback<PredictResponse> {
override fun onResponse(
call: Call<PredictResponse>,
response: Response<PredictResponse>
) {
if(response.isSuccessful && response.body()?.success == true) {
val result = response.body()?.predictions
if(result == null){
Toast.makeText(this@MainActivity, "上传图片成功,但未返回识别结果", Toast.LENGTH_SHORT).show()
}else {
val labels: List<String> = result.map { it.label }
val mappedLabels = labels.map { insectMapping[it] ?: " "}
val returnText = "上传识别的结果是:$mappedLabels"
recognizeResultTextView.text = returnText
}
}else {
Toast.makeText(this@MainActivity, "上传图片失败", Toast.LENGTH_SHORT).show()
}
}
override fun onFailure(call: Call<PredictResponse>, t: Throwable) {
Toast.makeText(this@MainActivity, "请求失败", Toast.LENGTH_SHORT).show()
Log.e("MainActivity", "上传图片失败: ${t.message}")
}
})
}
override fun onCreateOptionsMenu(menu: Menu): Boolean {
// Inflate the menu; this adds items to the action bar if it is present.
menuInflater.inflate(R.menu.main, menu)
return true
}
override fun onOptionsItemSelected(item: MenuItem): Boolean {
when(item.itemId){
R.id.logout -> {
val sharedPreferences = getSharedPreferences("AppPreferences", MODE_PRIVATE)
val editor = sharedPreferences.edit()
editor.remove("token")
editor.apply()
val intent = Intent(this, LoginActivity::class.java)
startActivity(intent)
finish()
return true
}
}
return super.onOptionsItemSelected(item)
}
override fun onSupportNavigateUp(): Boolean {
val navController = findNavController(R.id.nav_host_fragment_content_main)
return navController.navigateUp(appBarConfiguration) || super.onSupportNavigateUp()
}
fun setSelectedBitMap(bitmap: Bitmap){
selectedBitMap = bitmap
}
}
CMakeList
project(moth)
cmake_minimum_required(VERSION 3.10.0)
# 设置Android环境
set(CMAKE_SYSTEM_NAME Android)
set(CMAKE_ANDROID_NDK $ENV{ANDROID_NDK_HOME})
set(CMAKE_ANDROID_STL_TYPE c++_static)
# 设置 ncnn 库路径
set(ncnn_DIR ${CMAKE_SOURCE_DIR}/ncnn/${ANDROID_ABI}/lib)
# 设置opencv 库路径
set(opencv_DIR ${CMAKE_SOURCE_DIR}/opencv/native)
# 查找并连接libandroid.so
find_library(android-lib android)
# 添加 ncnn 库
add_library(ncnn SHARED IMPORTED)
set_target_properties(ncnn PROPERTIES IMPORTED_LOCATION ${ncnn_DIR}/libncnn.so)
# 添加opencv 库
add_library(opencv SHARED IMPORTED)
set_target_properties(opencv PROPERTIES IMPORTED_LOCATION ${opencv_DIR}/libs/${ANDROID_ABI}/libopencv_java4.so)
include_directories(${opencv_DIR}/jni/include)
# 添加cpp源码
add_library(native-lib SHARED native-lib.cpp)
# 添加C++标准库
set(CMAKE_CXX_STANDARD 11)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
# 链接 ncnn 库
target_link_libraries(native-lib ${android-lib} ncnn opencv jnigraphics c++_shared)
native-lib.cpp
//
// Created by Dell on 2025/2/20.
//
//
// Created by Dell on 2025/2/20.
//
#define STB_IMAGE_IMPLEMENTATION
#include "stb_image.h"
#include <jni.h>
#include <string>
#include <android/asset_manager.h>
#include <android/asset_manager_jni.h>
#include <android/bitmap.h>
#include <opencv2/opencv.hpp>
#include "ncnn/arm64-v8a/include/ncnn/net.h"
// 中心裁切并归一化处理
cv::Mat preprocessImage(const cv::Mat& inputImage, int targetWidth, int targetHeight) {
cv::Mat resizedImage;
cv::resize(inputImage, resizedImage, cv::Size(targetWidth, targetHeight));
// 将裁切后的图像转换为浮点型并归一化
cv::Mat normalizedImage;
resizedImage.convertTo(normalizedImage, CV_32F, 1.0 / 255.0);
// 归一化处理
cv::Mat channels[3];
cv::split(normalizedImage, channels);
// 对每个通道进行归一化
for (int i = 0; i < 3; ++i) {
channels[i] = (channels[i] - 0.485) / 0.229; // 第一个通道R
if (i == 1) {
channels[i] = (channels[i] - 0.456) / 0.224; // 第二个通道G
} else if (i == 2) {
channels[i] = (channels[i] - 0.406) / 0.225; // 第三个通道B
}
}
// 合并通道
cv::merge(channels, 3, normalizedImage);
return normalizedImage;
}
extern "C"
JNIEXPORT jstring JNICALL
JNICALL
Java_com_example_moth_MainActivity_staticProcessImage(JNIEnv* env, jobject thiz, jobject bitmap) {
// 获取 AssetManager
jclass clazz = env->GetObjectClass(thiz);
jfieldID assetManagerFieldID = env->GetFieldID(clazz, "assetManager", "Landroid/content/res/AssetManager;");
jobject assetManagerObj = env->GetObjectField(thiz, assetManagerFieldID);
AAssetManager* assetManager = AAssetManager_fromJava(env, assetManagerObj);
// 加载 .param 文件
AAsset* paramAsset = AAssetManager_open(assetManager, "effivit.param", AASSET_MODE_BUFFER);
if (!paramAsset) {
return env->NewStringUTF("Failed to open model.param");
}
// 读取 .param 文件内容
size_t paramSize = AAsset_getLength(paramAsset);
char* paramBuffer = new char[paramSize];
AAsset_read(paramAsset, paramBuffer, paramSize);
AAsset_close(paramAsset);
// 加载 .bin 文件
AAsset* binAsset = AAssetManager_open(assetManager, "effivit.bin", AASSET_MODE_BUFFER);
if (!binAsset) {
delete[] paramBuffer;
return env->NewStringUTF("Failed to open model.bin");
}
// 读取 .bin 文件内容
size_t binSize = AAsset_getLength(binAsset);
char* binBuffer = new char[binSize];
AAsset_read(binAsset, binBuffer, binSize);
AAsset_close(binAsset);
// 创建 ncnn 网络
ncnn::Net net;
int ret = net.load_param(assetManager, "effivit.param");
if (ret != 0) {
delete[] paramBuffer;
delete[] binBuffer;
return env->NewStringUTF("Failed to load model.param");
}
ret = net.load_model(assetManager, "effivit.bin");
if (ret != 0) {
delete[] paramBuffer;
delete[] binBuffer;
return env->NewStringUTF("Failed to load model.bin");
}
// 释放内存
delete[] paramBuffer;
delete[] binBuffer;
// 获取 Bitmap 信息
AndroidBitmapInfo info;
void* pixels;
int retBitmap;
if ((retBitmap = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) {
return env->NewStringUTF("AndroidBitmap_getInfo() failed!");
}
if (info.format != ANDROID_BITMAP_FORMAT_RGBA_8888) {
return env->NewStringUTF("Bitmap format is not RGBA_8888!");
}
if ((retBitmap = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) {
return env->NewStringUTF("AndroidBitmap_lockPixels() failed!");
}
// 将 Bitmap 转换为 OpenCV Mat
cv::Mat mat(info.height, info.width, CV_8UC4, pixels);
cv::Mat mat3;
cv::cvtColor(mat, mat3, cv::COLOR_RGBA2BGR);
// 调整图像大小
int targetWidth = 224;
int targetHeight = 224;
cv::Mat resizedMat; //= preprocessImage(mat3, targetWidth, targetHeight);
cv::resize(mat3, resizedMat, cv::Size(targetWidth, targetHeight));
// 将 Mat 转换为 ncnn::Mat
ncnn::Mat in = ncnn::Mat::from_pixels(resizedMat.data, ncnn::Mat::PIXEL_BGR, targetWidth, targetHeight);
// 进行推理
ncnn::Extractor ex = net.create_extractor();
ex.input("input.1", in);
ncnn::Mat out;
ex.extract("1270", out);
// 释放 Bitmap 像素数据
AndroidBitmap_unlockPixels(env, bitmap);
// 处理输出结果
// 假设张量 out 的形状为 (1, 13),即 1 行 13 列
float* outputData = (float*)out.data;
int numClasses = out.w; // 获取类别数量(13)
// 找到概率最大的三个下标
std::vector<std::pair<float, int>> probIndexPairs;
for (int i = 0; i < numClasses; i++) {
probIndexPairs.push_back(std::make_pair(outputData[i], i));
}
// 按概率值从大到小排序
std::sort(probIndexPairs.begin(), probIndexPairs.end(), [](const std::pair<float, int>& a, const std::pair<float, int>& b) {
return a.first > b.first;
});
// 获取概率最大的三个下标
std::string result;
for (int i = 0; i < 3 && i < probIndexPairs.size(); i++) {
result += std::to_string(probIndexPairs[i].second) + " ";
}
// 返回结果给调用方
return env->NewStringUTF(result.c_str());
}
stb_image.h
在这里插入代码片