gen similarity compute

1.similar_gen.cpp

#include <string.h>
#include <iostream>
#include <string>
#include <algorithm>
#include "boost/noncopyable.hpp"
using namespace std;
class similar_gen : public boost::noncopyable {
public:
    inline static similar_gen &get() {
        static similar_gen obj;
        return obj;
    }
    int compute_similarity(const string &gen_str0, const string &gen_str1) {
        if (false == check_gen_str(gen_str0) ||  false == check_gen_str(gen_str1)) {
            cerr << "invalid gen string...!" << endl;
            return -1;
        }
        make_status_equation(gen_str0, gen_str1);
        int size0 = gen_str0.size();
        int size1 = gen_str0.size();
        for (int i = 1;i <= size0;i++) {
            for (int j = 1;j <= size1;j++) {
                status_equation_[i][j] = max(status_equation_[i][j], status_equation_[i - 1][j - 1] + get_similarity(gen_str0[i - 1], gen_str1[j - 1]));
                status_equation_[i][j] = max(status_equation_[i][j], status_equation_[i][j - 1] + get_similarity(gen_str1[j - 1], '-'));
                status_equation_[i][j] = max(status_equation_[i][j], status_equation_[i - 1][j] + get_similarity(gen_str0[i - 1], '-'));
            }
        }
        return status_equation_[size0][size1];

    }
private:
    bool check_gen_str(const string &gen_str) {
        if (gen_str.size() > max_gen_len) {
            return false;
        }
        for (auto &ch : gen_str) {
            if (ch != 'A' && ch != 'C' && ch != 'G' && ch != 'T') {
                return false;
            }
        }
        return true;
    }
    void make_similarity() {
        similarity_array_['A']['A'] = 5;
        similarity_array_['A']['C'] = -1;
        similarity_array_['A']['G'] = -2;
        similarity_array_['A']['T'] = -1;
        similarity_array_['A']['-'] = -3;

        similarity_array_['C']['A'] = -1;
        similarity_array_['C']['C'] = 5;
        similarity_array_['C']['G'] = -3;
        similarity_array_['C']['T'] = -2;
        similarity_array_['C']['-'] = -4;

        similarity_array_['G']['A'] = -2;
        similarity_array_['G']['C'] = -3;
        similarity_array_['G']['G'] = 5;
        similarity_array_['G']['T'] = -2;
        similarity_array_['G']['-'] = -2;

        similarity_array_['T']['A'] = -1;
        similarity_array_['T']['C'] = -2;
        similarity_array_['T']['G'] = -2;
        similarity_array_['T']['T'] = 5;
        similarity_array_['T']['-'] = -1;

        similarity_array_['-']['A'] = -3;
        similarity_array_['-']['C'] = -4;
        similarity_array_['-']['G'] = -2;
        similarity_array_['-']['T'] = -1;
        similarity_array_['-']['-'] = -10;
    }
    inline int get_similarity(char ch0, char ch1) {
        return similarity_array_[ch0][ch1];
    }
    void make_status_equation(const string &gen_str0, const string &gen_str1) {
        memset(status_equation_, 0, sizeof(status_equation_));
        int size = gen_str0.size();
        for (int i = 1;i <= size;i++) {
            status_equation_[i][0] = status_equation_[i - 1][0] + get_similarity(gen_str0[i - 1], '-');
        }
        size = gen_str1.size();
        for (int i = 1;i <= size;i++) {
            status_equation_[0][i] = status_equation_[0][i - 1] + get_similarity(gen_str1[i - 1], '-');
        }
    }
private:
    similar_gen() {
        make_similarity();
    }
    virtual ~similar_gen() = default;
private:
    static const int max_gen_len = 100;
private:
    int similarity_array_[256][256];
    int status_equation_[max_gen_len + 1][max_gen_len + 1];

};
int main() {
    string str0 = "AGTGATG";
    string str1 = "GTTAG";
    cout << similar_gen::get().compute_similarity(str0, str1) << endl;

    return 0;
}

2.make.sh

g++ -std=c++14 -g -o Test similar_gen.cpp -I ../boost_1_69_0/ -L ../boost_1_69_0/libs/

### 关于深度学习图像风格迁移的评价指标 在深度学习领域,图像风格迁移算法的效果可以通过多种评价指标进行量化分析。这些指标主要分为两大类:**内容保持度**和**风格相似度**。 #### 1. 内容保持度 (Content Preservation) 为了衡量生成图像是否成功保留了原始内容信息,可以使用以下几种常见指标: - **感知损失 (Perceptual Loss)** 这是一种基于预训练卷积神经网络(如VGGNet)的高级特征表示的距离计算方法。通过比较生成图像与原内容图像在网络中间层激活值上的差异来评估内容保持程度[^3]。 - **结构相似性指数 (SSIM, Structural Similarity Index Measure)** SSIM 是一种广泛使用的图像质量评估工具,用于测量两幅图像之间的视觉相似性。它考虑亮度、对比度以及结构信息的变化情况,在一定程度上反映了人类视觉系统的特性[^1]。 #### 2. 风格相似度 (Style Similarity) 针对目标风格特性的再现效果,则有如下几个重要参数可供参考: - **Gram Matrix 基础下的 Style Loss** Gram矩阵能够捕捉到纹理模式而忽略具体位置关系的信息。通过对不同尺度下特征图构建对应的Gram矩阵并求取欧几里得距离或者均方误差等方式定义style loss函数,从而指导优化过程趋向更贴近指定艺术样式的输出结果[^2]。 - **Frechet Inception Distance (FID)** FID 被设计用来评测生成模型所产生物体的真实感及其多样性水平。尽管最初应用于GANs研究当中,但它同样适用于检验风格转化成果的真实性分布接近程度如何相对于真实数据集样本而言[^1]。 #### 实现示例 下面提供了一个简单的Python代码片段展示如何利用PyTorch框架实现上述提到的部分评价逻辑: ```python import torch from torchvision import models from PIL import Image import numpy as np def compute_gram_matrix(features): a, b, c, d = features.size() # batch size(=1), feature map num., height, width features = features.view(a * b, c * d) # resize F_XL into \hat F_XL G = torch.mm(features, features.t()) / (a*b*c*d) # normalize the values of gram matrix by dividing number elements in each feature maps. return G # Load pre-trained VGG model vgg = models.vgg19(pretrained=True).features.eval() # Assume content_img and style_img are loaded tensors representing images content_features = vgg(content_img.unsqueeze(0)) style_features = vgg(style_img.unsqueeze(0)) # Compute Perceptual Loss between Content Features perceptual_loss = torch.nn.MSELoss()(content_features[-2], generated_content_features[-2]) # Calculate Style Loss using Gram Matrices generated_style_features = vgg(generated_image.unsqueeze(0)) style_loss = 0 for gen_feat, orig_feat in zip(generated_style_features, style_features): gm_gen = compute_gram_matrix(gen_feat) gm_orig = compute_gram_matrix(orig_feat) style_loss += torch.nn.MSELoss()(gm_gen, gm_orig) total_loss = perceptual_loss + weight*style_loss ```
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值