一、随机初始化的embedding
import torch.nn as nn
import torch
user_embedding = nn.Embedding(5, 4)
weight = user_embedding.weight.data.numpy()
print(weight)
input = torch.tensor([1,2])
print(user_embedding(input))# 输出
[[-1.2546824 0.15995328 -1.3933309 0.78180116][-1.160267 0.15576266 -1.2538452 0.3032509 ][-0.2289746 -0.86487883 -2.103254 -0.12548219][ 0.26746908 -0.60910237 -0.2590135 0.97958463][ 0.44616193 0.7447642 0.98776644 1.2315478 ]]
tensor([[-1.1603, 0.1558, -1.2538, 0.3033],[-0.2290, -0.8649, -2.1033, -0.1255]], grad_fn=<EmbeddingBackward>)
二、自定义embedding权重
方式一:
import torch.nn as nn
import torch
t = torch.tensor([[0.8245, 0.1198, 0.6111],[0.8917, 0.0699, 0.3550],[0.0691, 0.2732, 0.7184],[0.0359, 0.9635, 0.2277],[0.8692, 0.9012, 0.7116]])user_embedding = nn.Embedding(5, 3, _weight=nn.Parameter(t))
weight = user_embedding.weight.data.numpy()
print(weight)
input = torch.tensor([1,2])
print(user_embedding(input))# 输出
[[0.8245 0.1198 0.6111][0.8917 0.0699 0.355 ][0.0691 0.2732 0.7184][0.0359 0.9635 0.2277][0.8692 0.9012 0.7116]]
tensor([[0.8917, 0.0699, 0.3550],[0.0691, 0.2732, 0.7184]], grad_fn=<EmbeddingBackward>)
注意:如果仅仅使用类embedding操作,而不需要更新参数
设置 nn.Embedding(5, 3, _weight=nn.Parameter(t, requires_grad=False
))
方式二:
import torch.nn.functional as F
import torchinput = torch.tensor([[1,2,4,5],[4,3,2,9]])
embedding_matrix = torch.rand(10, 3)print(embedding_matrix)
print(F.embedding(input, embedding_matrix))# 输出
tensor([[0.8245, 0.1198, 0.6111],[0.8917, 0.0699, 0.3550],[0.0691, 0.2732, 0.7184],[0.0359, 0.9635, 0.2277],[0.8692, 0.9012, 0.7116],[0.4018, 0.3034, 0.3883],[0.2971, 0.2333, 0.3194],[0.1596, 0.5504, 0.8969],[0.4099, 0.5024, 0.0058],[0.1426, 0.4245, 0.7905]])
tensor([[[0.8917, 0.0699, 0.3550],[0.0691, 0.2732, 0.7184],[0.8692, 0.9012, 0.7116],[0.4018, 0.3034, 0.3883]],[[0.8692, 0.9012, 0.7116],[0.0359, 0.9635, 0.2277],[0.0691, 0.2732, 0.7184],[0.1426, 0.4245, 0.7905]]])