1.常见损失函数
1.1 L1loss
例子:
x = torch.Tensor([1, 2, 3])
y = torch.Tensor([1, 0, 6])
L1loss = nn.L1Loss()
loss1 = L1loss(x, y)
print(loss1)
输出:
tensor(1.6667)
1.2 MSELoss
例子:
x = torch.Tensor([1, 2, 3])
y = torch.Tensor([1, 0, 6])
mseloss = nn.MSELoss()
loss2 = mseloss(x, y)
print(loss2)
输出:
tensor(4.3333)
1.3 交叉熵
2.损失函数的作用——计算梯度
dataset_transform_compose = transforms.Compose([transforms.ToTensor()])
train_set = torchvision.datasets.CIFAR10(root="./dataset", train=True, transform=dataset_transform_compose, download=True)
test_set = torchvision.datasets.CIFAR10(root="./dataset", train=False, transform=dataset_transform_compose, download=True)
dataloader = DataLoader(test_set, batch_size=64, shuffle=True)
class model(nn.Module):
def __init__(self):
super(model,self).__init__()
self.model1 = nn.Sequential(
nn.Conv2d(3, 32, kernel_size=(5, 5), stride=(1, 1), padding=2),
nn.MaxPool2d(2),
nn.Conv2d(32, 32, (5, 5), padding=2),
nn.MaxPool2d(2),
nn.Conv2d(32, 64, (5, 5), padding=2),
nn.MaxPool2d(2),
nn.Flatten(),
nn.Linear(1024, 64),
nn.Linear(64, 10)
)
def forward(self, input):
input = self.model1(input)
return input
#
model1 = model()
bloss = nn.CrossEntropyLoss()
for data in dataloader:
imgs, targets = data
output = model1(imgs)
loss1 = bloss(output, targets)
loss1.backward()#用于计算梯度