记录了将tensorflow转pytorch时,一些常用的函数转换:
不能直接转换
tf.transpose(input,[1, 0, 2])
->input.permute([1, 0, 2])
不能直接换成torch.transpose
,因为操作不了多维tf.expand_dims(input), axis=1)
->input.unsqueeze(1)
tf.concat([content1,content2], axis=1
->torch.cat((content1,content2), dim=1)
记得把axis换成dimtf.tile(input, [2, 1])
->input.repeat([2, 1])
tf.range(10)
->torch.arange(0)
tf.reduce_sum(x, axis=1, keep_dims=True)
->torch.sum(x,dim=1,keepdim=True)
tf.clip_by_value(x, min, max)
->torch.clamp(x, min, max)
tf.multinomial(logits=a, num_samples=1)
->torch.multinomial(input=a, num_samples=1, replacement=False)
tf.equal(x, y)
->torch.eq(x, y)
tf.nn.embedding_lookup(W_fe, Feature_input + 1)
->torch.index_select(W_fe, 0, Feature_input + 1)
tf.one_hot()
->functional.one_hot()
tf.gather_nd(x,y)转换
def gather_nd(self,params, indices):
''' 4D example params: tensor shaped [n_1, n_2, n_3, n_4] --> 4 dimensional indices: tensor shaped [m_1, m_2, m_3, m_4, 4] --> multidimensional list of 4D indices returns: tensor shaped [m_1, m_2, m_3, m_4] ND_example params: tensor shaped [n_1, ..., n_p] --> d-dimensional tensor indices: tensor shaped [m_1, ..., m_i, d] --> multidimensional list of d-dimensional indices returns: tensor shaped [m_1, ..., m_1] '''
out_shape = indices.shape[:-1]
indices = indices.unsqueeze(0).transpose(0, -1) # roll last axis to fring
ndim = indices.shape[0]
indices = indices.long()
idx = torch.zeros_like(indices[0], device=indices.device).long()
m = 1
for i in range(ndim)[::-1]:
idx += indices[i] * m
m *= params.size(i)
out = torch.take(params, idx)
return out.view(out_shape)
可以直接转换
tf.reshape()
->torch.reshape()
tf.log()
tf.squeeze