使用模型
LSTM的结构有很多种形式,但是都大同小异,主要都包含输入门、输出门、遗忘门。

使用模型如上图所示
部分代码
//
// MLLstm.m
// LSTM
//
//
#import "MLLstm.h"
@implementation MLLstm
#pragma mark - Inner Method
+ (double)truncated_normal:(double)mean dev:(double)stddev
{
double outP = 0.0;
do {
static int hasSpare = 0;
static double spare;
if (hasSpare) {
hasSpare = 0;
outP = mean + stddev * spare;
continue;
}
hasSpare = 1;
static double u,v,s;
do {
u = (rand() / ((double) RAND_MAX)) * 2.0 - 1.0;
v = (rand() / ((double) RAND_MAX)) * 2.0 - 1.0;
s = u * u + v * v;
} while ((s >= 1.0) || (s == 0.0));
s = sqrt(-2.0 * log(s) / s);
spare = v * s;
outP = mean + stddev * u * s;
} while (fabsl(outP) > 2*stddev);
return outP;
}
+ (double *)fillVector:(double)num size:(int)size
{
double *outP = malloc(sizeof(double) * size);
vDSP_vfillD(&num, outP, 1, size);
return outP;
}
+ (double *)weight_init:(int)size
{
double *outP = malloc(sizeof(double) * size);
for (int i = 0; i < size; i++) {
outP[i] = [MLLstm truncated_normal:0 dev:0.1];
}
return outP;
}
+ (double *)bias_init:(int)size
{
return [MLLstm fillVector:0.1f size:size];
}
+ (double *)tanh:(double *)input size:(int)size
{
for (int i = 0; i < size; i++) {
double num = input[i];
if (num > 20) {
input[i] = 1;
}
else if (num < -20)
{
input[i] = -1;
}
else
{
input[i] = (exp(num) - exp(-num)) / (exp(num) + exp(-num));
}
}
return input;
}
+ (double *)sigmoid:(double *)input size:(int)size
{
for (int i = 0; i < size; i++) {
double num = input[i];
if (num > 20) {
input[i] = 1;
}
else if (num < -20)
{
input[i] = 0;
}
else
{
input[i] = exp(num) / (exp(num) + 1);
}
}
return input;
}
#pragma mark - Init
- (id)initWithNodeNum:(int)num layerSize:(int)size dataDim:(int)dim
{
self = [super init];
if (self) {
_nodeNum = num;
_layerSize = size;
_dataDim = dim;
[self setupNet];
}
return self;
}
- (id)init
{
self = [super init];
if (self) {
[self setupNet];
}
return self;
}
- (void)setupNet
{
_hState = calloc(_layerSize * _nodeNum, sizeof(double));
_rState = calloc(_layerSize * _nodeNum, sizeof(double));
_zState

本文详细介绍了长短期记忆网络(LSTM)的实现过程,包括输入门、输出门和遗忘门的运作机制,以及如何使用LSTM进行前向传播和反向传播。通过具体的代码示例,展示了权重初始化、激活函数应用、状态更新等关键步骤。
最低0.47元/天 解锁文章
1022

被折叠的 条评论
为什么被折叠?



