1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66
| import torch import torch.nn as nn import torch.optim as optim from torch.utils.data import DataLoader, TensorDataset from torch.utils.tensorboard import SummaryWriter import numpy as np import os
os.environ["KMP_DUPLICATE_LIB_OK"] = "TRUE"
class Net(nn.Module): def __init__(self): super(Net, self).__init__() self.fc1 = nn.Linear(784, 256) self.fc2 = nn.Linear(256, 64) self.fc3 = nn.Linear(64, 10) self.relu = nn.ReLU()
def forward(self, x): x = x.view(-1, 784) x = self.relu(self.fc1(x)) x = self.relu(self.fc2(x)) x = self.fc3(x) return x
X = torch.randn(1000, 784) y = torch.randint(0, 10, (1000,)) dataset = TensorDataset(X, y) dataloader = DataLoader(dataset, batch_size=32, shuffle=True)
model = Net() criterion = nn.CrossEntropyLoss() optimizer = optim.Adam(model.parameters(), lr=0.001)
writer = SummaryWriter('runs/experiment_1')
sample_input = torch.randn(1, 784) writer.add_graph(model, sample_input)
for epoch in range(10): running_loss = 0.0 for i, (inputs, labels) in enumerate(dataloader): optimizer.zero_grad() outputs = model(inputs) loss = criterion(outputs, labels) loss.backward() optimizer.step()
running_loss += loss.item()
if i % 10 == 9: step = epoch * len(dataloader) + i avg_loss = running_loss / 10 writer.add_scalar('training loss', avg_loss, step) writer.add_histogram('fc1 weights', model.fc1.weight, step) running_loss = 0.0
print('训练完成') writer.close()
|