aaaaaa

import os
import cv2
from rknnlite.api import RKNNLite
import numpy as np

RKNN_MODEL = "./yolov8.rknn"
IMG_FOLDER = "dataset-1"
RESULT_PATH = './dataset-2'

CLASSES = ['car']

OBJ_THRESH = 0.45
NMS_THRESH = 0.45

MODEL_SIZE = (640, 640)

color_palette = np.random.uniform(0, 255, size=(len(CLASSES), 3))


def sigmoid(x):
    return 1 / (1 + np.exp(-x))


def letter_box(im, new_shape, pad_color=(0, 0, 0), info_need=False):
    # Resize and pad image while meeting stride-multiple constraints
    shape = im.shape[:2]  # current shape [height, width]
    if isinstance(new_shape, int):
        new_shape = (new_shape, new_shape)

    # Scale ratio
    r = min(new_shape[0] / shape[0], new_shape[1] / shape[1])

    # Compute padding
    ratio = r  # width, height ratios
    new_unpad = int(round(shape[1] * r)), int(round(shape[0] * r))
    dw, dh = new_shape[1] - new_unpad[0], new_shape[0] - new_unpad[1]  # wh padding

    dw /= 2  # divide padding into 2 sides
    dh /= 2

    if shape[::-1] != new_unpad:  # resize
        im = cv2.resize(im, new_unpad, interpolation=cv2.INTER_LINEAR)
    top, bottom = int(round(dh - 0.1)), int(round(dh + 0.1))
    left, right = int(round(dw - 0.1)), int(round(dw + 0.1))
    im = cv2.copyMakeBorder(im, top, bottom, left, right, cv2.BORDER_CONSTANT, value=pad_color)  # add border

    if info_need is True:
        return im, ratio, (dw, dh)
    else:
        return im


def filter_boxes(boxes, box_confidences, box_class_probs):
    """Filter boxes with object threshold.
    """
    box_confidences = box_confidences.reshape(-1)
    candidate, class_num = box_class_probs.shape

    class_max_score = np.max(box_class_probs, axis=-1)
    classes = np.argmax(box_class_probs, axis=-1)

    _class_pos = np.where(class_max_score * box_confidences >= OBJ_THRESH)
    scores = (class_max_score * box_confidences)[_class_pos]

    boxes = boxes[_class_pos]
    classes = classes[_class_pos]

    return boxes, classes, scores


def nms_boxes(boxes, scores):
    """Suppress non-maximal boxes.
    # Returns
        keep: ndarray, index of effective boxes.
    """
    x = boxes[:, 0]
    y = boxes[:, 1]
    w = boxes[:, 2] - boxes[:, 0]
    h = boxes[:, 3] - boxes[:, 1]

    areas = w * h
    order = scores.argsort()[::-1]

    keep = []
    while order.size > 0:
        i = order[0]
        keep.append(i)

        xx1 = np.maximum(x[i], x[order[1:]])
        yy1 = np.maximum(y[i], y[order[1:]])
        xx2 = np.minimum(x[i] + w[i], x[order[1:]] + w[order[1:]])
        yy2 = np.minimum(y[i] + h[i], y[order[1:]] + h[order[1:]])

        w1 = np.maximum(0.0, xx2 - xx1 + 0.00001)
        h1 = np.maximum(0.0, yy2 - yy1 + 0.00001)
        inter = w1 * h1

        ovr = inter / (areas[i] + areas[order[1:]] - inter)
        inds = np.where(ovr <= NMS_THRESH)[0]
        order = order[inds + 1]
    keep = np.array(keep)
    return keep


def softmax(x, axis=None):
    x = x - x.max(axis=axis, keepdims=True)
    y = np.exp(x)
    return y / y.sum(axis=axis, keepdims=True)


def dfl(position):
    # Distribution Focal Loss (DFL)
    n, c, h, w = position.shape
    p_num = 4
    mc = c // p_num
    y = position.reshape(n, p_num, mc, h, w)
    y = softmax(y, 2)
    acc_metrix = np.array(range(mc), dtype=float).reshape(1, 1, mc, 1, 1)
    y = (y * acc_metrix).sum(2)
    return y


def box_process(position):
    grid_h, grid_w = position.shape[2:4]
    col, row = np.meshgrid(np.arange(0, grid_w), np.arange(0, grid_h))
    col = col.reshape(1, 1, grid_h, grid_w)
    row = row.reshape(1, 1, grid_h, grid_w)
    grid = np.concatenate((col, row), axis=1)
    stride = np.array([MODEL_SIZE[1] // grid_h, MODEL_SIZE[0] // grid_w]).reshape(1, 2, 1, 1)

    position = dfl(position)
    box_xy = grid + 0.5 - position[:, 0:2, :, :]
    box_xy2 = grid + 0.5 + position[:, 2:4, :, :]
    xyxy = np.concatenate((box_xy * stride, box_xy2 * stride), axis=1)

    return xyxy


def post_process(input_data):
    boxes, scores, classes_conf = [], [], []
    defualt_branch = 3
    pair_per_branch = len(input_data) // defualt_branch
    # Python 忽略 score_sum 输出
    for i in range(defualt_branch):
        boxes.append(box_process(input_data[pair_per_branch * i]))
        classes_conf.append(input_data[pair_per_branch * i + 1])
        scores.append(np.ones_like(input_data[pair_per_branch * i + 1][:, :1, :, :], dtype=np.float32))

    def sp_flatten(_in):
        ch = _in.shape[1]
        _in = _in.transpose(0, 2, 3, 1)
        return _in.reshape(-1, ch)

    boxes = [sp_flatten(_v) for _v in boxes]
    classes_conf = [sp_flatten(_v) for _v in classes_conf]
    scores = [sp_flatten(_v) for _v in scores]

    boxes = np.concatenate(boxes)
    classes_conf = np.concatenate(classes_conf)
    scores = np.concatenate(scores)

    # filter according to threshold
    boxes, classes, scores = filter_boxes(boxes, scores, classes_conf)

    # nms
    nboxes, nclasses, nscores = [], [], []
    for c in set(classes):
        inds = np.where(classes == c)
        b = boxes[inds]
        c = classes[inds]
        s = scores[inds]
        keep = nms_boxes(b, s)

        if len(keep) != 0:
            nboxes.append(b[keep])
            nclasses.append(c[keep])
            nscores.append(s[keep])

    if not nclasses and not nscores:
        return None, None, None

    boxes = np.concatenate(nboxes)
    classes = np.concatenate(nclasses)
    scores = np.concatenate(nscores)

    return boxes, classes, scores


def draw_detections(img, left, top, right, bottom, score, class_id):
    """
    Draws bounding boxes and labels on the input image based on the detected objects.
    Args:
        img: The input image to draw detections on.
        box: Detected bounding box.
        score: Corresponding detection score.
        class_id: Class ID for the detected object.
    Returns:
        None
    """

    # Retrieve the color for the class ID
    color = color_palette[class_id]

    # Draw the bounding box on the image
    cv2.rectangle(img, (int(left), int(top)), (int(right), int(bottom)), color, 2)

    # Create the label text with class name and score
    label = f"{CLASSES[class_id]}: {score:.2f}"

    # Calculate the dimensions of the label text
    (label_width, label_height), _ = cv2.getTextSize(label, cv2.FONT_HERSHEY_SIMPLEX, 0.5, 1)

    # Calculate the position of the label text
    label_x = left
    label_y = top - 10 if top - 10 > label_height else top + 10

    # Draw a filled rectangle as the background for the label text
    cv2.rectangle(img, (label_x, label_y - label_height), (label_x + label_width, label_y + label_height), color,
                  cv2.FILLED)

    # Draw the label text on the image
    cv2.putText(img, label, (label_x, label_y), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 0), 1, cv2.LINE_AA)


def draw(image, boxes, scores, classes):
    img_h, img_w = image.shape[:2]
    # Calculate scaling factors for bounding box coordinates
    x_factor = img_w / MODEL_SIZE[0]
    y_factor = img_h / MODEL_SIZE[1]

    for box, score, cl in zip(boxes, scores, classes):
        x1, y1, x2, y2 = [int(_b) for _b in box]

        left = int(x1 * x_factor)
        top = int(y1 * y_factor)
        right = int(x2 * x_factor)
        bottom = int(y2 * y_factor)

        print('class: {}, score: {}'.format(CLASSES[cl], score))
        print('box coordinate left,top,right,down: [{}, {}, {}, {}]'.format(left, top, right, bottom))

        # Retrieve the color for the class ID

        draw_detections(image, left, top, right, bottom, score, cl)

        # cv2.rectangle(image, (left, top), (right, bottom), color, 2)
        # cv2.putText(image, '{0} {1:.2f}'.format(CLASSES[cl], score),
        #             (left, top - 6),
        #             cv2.FONT_HERSHEY_SIMPLEX,
        #             0.6, (0, 0, 255), 2)


if __name__ == '__main__':

    # 创建RKNN对象
    rknn_lite = RKNNLite()

    # 加载RKNN模型
    print('--> Load RKNN model')
    ret = rknn_lite.load_rknn(RKNN_MODEL)
    if ret != 0:
        print('Load RKNN model failed')
        exit(ret)
    print('done')

    # 初始化 runtime 环境
    print('--> Init runtime environment')
    # run on RK356x/RK3588 with Debian OS, do not need specify target.
    ret = rknn_lite.init_runtime()
    if ret != 0:
        print('Init runtime environment failed!')
        exit(ret)
    print('done')

    # 数据处理
    img_list = os.listdir(IMG_FOLDER)
    for i in range(len(img_list)):
        img_name = img_list[i]
        img_path = os.path.join(IMG_FOLDER, img_name)
        if not os.path.exists(img_path):
            print("{} is not found", img_name)
            continue
        img_src = cv2.imread(img_path)
        if img_src is None:
            print("文件不存在\n")

        # Due to rga init with (0,0,0), we using pad_color (0,0,0) instead of (114, 114, 114)
        pad_color = (0, 0, 0)
        img = letter_box(im=img_src.copy(), new_shape=(MODEL_SIZE[1], MODEL_SIZE[0]), pad_color=(0, 0, 0))
        # img = cv2.resize(img_src, (640, 512), interpolation=cv2.INTER_LINEAR) # direct resize
        input = np.expand_dims(img, axis=0)

        outputs = rknn_lite.inference([input])

        boxes, classes, scores = post_process(outputs)

        img_p = img_src.copy()

        if boxes is not None:
            draw(img_p, boxes, scores, classes)

        # 保存结果
        if not os.path.exists(RESULT_PATH):
            os.mkdir(RESULT_PATH)

        result_path = os.path.join(RESULT_PATH, img_name)
        cv2.imwrite(result_path, img_p)
        print('Detection result save to {}'.format(result_path))

        pass

    # cv2.imshow("full post process result", img_p)

    rknn_lite.release()

#pragma once
#include <opencv2/core.hpp>
#include "common.h"

class TrackingSession {
    public:
        virtual ~TrackingSession() {};
        virtual std::vector<TrackingBox> Update(const std::vector<DetectionBox> &dets) = 0;
};

#ifdef __cplusplus
extern "C" {
#endif

TrackingSession *CreateSession(int max_age, int min_hits, float iou_threshold);
void  ReleaseSession(TrackingSession **session_ptr);

#ifdef __cplusplus
}
#endif

#include "sort.h"
#include "Hungarian.h"
#include "KalmanTracker.h"

#include <atomic>
#include <vector>
#include <cfloat>   // for DBL_MAX
#include <iomanip>  // to format image names using setw() and setfill()
#include <unistd.h> // to check file existence using POSIX function access(). On Linux include <unistd.h>.

class Sort : public TrackingSession
{
public:
    Sort(int max_age, int min_hits, float iou_threshold) : m_max_age(max_age), m_min_hits(min_hits), m_iou_threshold(iou_threshold)
    {
        m_frame_count = 0;
        m_trackers = {};
        ms_num_session++;
    }
    std::vector<TrackingBox> Update(const std::vector<DetectionBox> &dets) override;
    ~Sort() { ms_num_session--; };

private:
    float m_iou_threshold;
    int m_max_age, m_min_hits, m_frame_count;
    std::vector<KalmanTracker> m_trackers;

    static std::atomic<int> ms_num_session;
};

std::atomic<int> Sort::ms_num_session(0);

TrackingSession *CreateSession(int max_age, int min_hits, float iou_threshold)
{
    return new Sort(max_age, min_hits, iou_threshold);
}

void ReleaseSession(TrackingSession **session_ptr)
{
    if (session_ptr && *session_ptr)
    {
        delete *session_ptr;
        session_ptr = nullptr;
    }
}

static double compute_iou(cv::Rect_<float> bb_test, cv::Rect_<float> bb_gt)
{
    float intersection_area = (bb_test & bb_gt).area();
    float union_area = bb_test.area() + bb_gt.area() - intersection_area;

    if (union_area < DBL_EPSILON)
        return 0;
    return (double)(intersection_area / union_area);
}

static void AssociateDetectionsToTrackers(const std::vector<DetectionBox> &dets, const std::vector<TrackingBox> &trks, float iou_threshold,
                                          std::vector<std::vector<int>> &matches, std::vector<int> &unmatched_detections, std::vector<int> &unmatched_trackers)
{
    int det_num = dets.size();
    int trk_num = trks.size();

    std::vector<std::vector<double>> iou_matrix;
    iou_matrix.resize(det_num, vector<double>(trk_num, 0));

    if (trk_num == 0)
    {
        for (int i = 0; i < det_num; i++)
            unmatched_detections.push_back(i);
        return;
    }

    for (int i = 0; i < det_num; i++)
        for (int j = 0; j < trk_num; j++)
            // use 1-iou because the hungarian algorithm computes a minimum-cost assignment.
            iou_matrix[i][j] = 1 - compute_iou(dets[i].box, trks[j].box);

    // solve the assignment problem using hungarian algorithm.
    HungarianAlgorithm hungalgo;
    std::vector<int> assignment;

    // the resulting assignment is [detection : tracker], with len=preNum

    hungalgo.Solve(iou_matrix, assignment);

    for (int i = 0; i < det_num; i++)
    {
        int j = assignment[i];
        // unassigned label will be set as -1 in the assignment algorithm
        if ((j != -1) && (1 - iou_matrix[i][j] >= iou_threshold))
        {
            std::vector<int> match = {i, j};
            matches.push_back(match);
        }
        else
            unmatched_detections.push_back(i);
    }

    for (int i = 0; i < trk_num; i++)
        for (int j = 0; j < matches.size(); j++)
            if (i != matches[j][1])
                unmatched_trackers.push_back(i);
}

std::vector<TrackingBox> Sort::Update(const std::vector<DetectionBox> &dets)
{
    m_frame_count += 1;
    std::vector<TrackingBox> trks;

    for (auto it = m_trackers.begin(); it != m_trackers.end();)
    {
        TrackingBox trk;
        trk.box = it->Predict();

        if (trk.box.x >= 0 && trk.box.y >= 0)
        {
            trks.push_back(trk);
            it++;
        }
        else
        {
            it = m_trackers.erase(it);
        }
    }

    if (dets.empty())
    {
        // 没有检测到目标时,输出预测框
        printf("no detections!\n");
        return trks;
    }

    std::vector<std::vector<int>> matches;
    std::vector<int> unmatched_detections, unmatched_trackers;

    AssociateDetectionsToTrackers(dets, trks, m_iou_threshold, matches, unmatched_detections, unmatched_trackers);

    // update matched trackers with assigned detections.
    for (auto &m : matches)
    {
        m_trackers[m[1]].Update(dets[m[0]].box);
        m_trackers[m[1]].m_det_name = std::string(dets[m[0]].det_name);
    }

    // create and initialise new trackers for unmatched detections
    for (auto &d : unmatched_detections)
    {
        KalmanTracker tracker = KalmanTracker(dets[d].box);
        tracker.m_det_name = std::string(dets[d].det_name);
        m_trackers.push_back(tracker);
    }

    trks.clear();
    // get trackers' output
    for (auto it = m_trackers.begin(); it != m_trackers.end(); it++)
    {
        // if (((*it).m_time_since_update < 1) && ((*it).m_hit_streak >= m_min_hits || m_frame_count <= m_min_hits))
        /*
        只有那些最近被检测到的,并且是稳定的(被连续检测到多次),或者是在视频的最初几帧的跟踪器,才会被认为是有效的,并且会输出它们的跟踪框。
        */
       if (((*it).m_time_since_update < m_max_age) && ((*it).m_hit_streak >= m_min_hits || m_frame_count <= m_min_hits))
        {
            TrackingBox trk;
            trk.box = it->GetState();
            trk.id = it->m_id + 1;
            trk.det_name = it->m_det_name;
            trks.push_back(trk);
        }

        // remove dead tracklet
        if ((*it).m_time_since_update > m_max_age)
        {
            it = m_trackers.erase(it);
            it--;
        }
    }
    return trks;
}

make[2]: *** [deepsort/CMakeFiles/deepsort.dir/build.make:160: deepsort/CMakeFiles/deepsort.dir/src/track.cpp.o] Error 1
make[1]: *** [CMakeFiles/Makefile2:126: deepsort/CMakeFiles/deepsort.dir/all] Error 2
 

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

芝士是只猫

开源使得世界变得更美丽

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值