forked from patrickloeber/pytorchTutorial
-
Notifications
You must be signed in to change notification settings - Fork 0
/
06_1_loss_and_optimizer.py
57 lines (40 loc) · 1.28 KB
/
06_1_loss_and_optimizer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
# 1) Design model (input, output, forward pass with different layers)
# 2) Construct loss and optimizer
# 3) Training loop
# - Forward = compute prediction and loss
# - Backward = compute gradients
# - Update weights
import torch
import torch.nn as nn
# Linear regression
# f = w * x
# here : f = 2 * x
# 0) Training samples
X = torch.tensor([1, 2, 3, 4], dtype=torch.float32)
Y = torch.tensor([2, 4, 6, 8], dtype=torch.float32)
# 1) Design Model: Weights to optimize and forward function
w = torch.tensor(0.0, dtype=torch.float32, requires_grad=True)
def forward(x):
return w * x
print(f'Prediction before training: f(5) = {forward(5).item():.3f}')
# 2) Define loss and optimizer
learning_rate = 0.01
n_iters = 100
# callable function
loss = nn.MSELoss()
optimizer = torch.optim.SGD([w], lr=learning_rate)
# 3) Training loop
for epoch in range(n_iters):
# predict = forward pass
y_predicted = forward(X)
# loss
l = loss(Y, y_predicted)
# calculate gradients = backward pass
l.backward()
# update weights
optimizer.step()
# zero the gradients after updating
optimizer.zero_grad()
if epoch % 10 == 0:
print('epoch ', epoch+1, ': w = ', w, ' loss = ', l)
print(f'Prediction after training: f(5) = {forward(5).item():.3f}')