import torch
import torch.autograd as autograd
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import numpy as np
data=autograd.Variable(torch.FloatTensor([1.0,2.0,3.0]))
log_softmax=F.log_softmax(data,dim=0)
print("log_softmax",log_softmax)
softmax=F.softmax(data,dim=0)
print("softmax",softmax)
np_softmax=softmax.data.numpy()
log_np_softmax=np.log(np_softmax)
print("log_np_softmax",log_np_softmax)
‘‘‘
1 2 3
exp 2.718281828 7.389056099 20.08553692 30.19287485
softmax 0.090030573 0.244728471 0.665240956
log_softmax -2.407605964 -1.407605964 -0.407605964
0.090030573=2.718281828/30.19287485
-2.407605964=ln(0.090030573)
log_softmax tensor([-2.4076, -1.4076, -0.4076])
softmax tensor([0.0900, 0.2447, 0.6652])
log_np_softmax [-2.407606 -1.4076059 -0.40760598]
‘‘‘
原文:https://www.cnblogs.com/hapyygril/p/11593143.html