-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.py
66 lines (51 loc) · 1.72 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
import sys
sys.path.append(".")
sys.path.append("..")
import numpy as np
from Mytorch.utils.data import CsvDataset,DataLoader
from Mytorch.nn import Sequential,Linear,ReLU,Residual
from Mytorch.nn import Parameter,Module,CrossEntropyLoss
from Mytorch.optim import Adam, CosineDecayWithWarmRestarts
from Mytorch import Tensor
import Mytorch
class LinearReluBlock(Module):
def __init__(self,in_f,out_f):
super().__init__()
self.in_f = in_f
self.out_f=out_f
self.linear= Linear(in_f,out_f)
self.relu = ReLU()
def forward(self,x):
x =self.linear(x)
x = self.relu(x)
return x
model = Sequential(Linear(9,16),Residual(LinearReluBlock(16,16)),Residual(LinearReluBlock(16,16)) ,Linear(16,3))
D=CsvDataset("./data/iris.csv")
DL = DataLoader(D,batch_size=16,shuffle=True)
optimizer = Adam(model.parameters(),lr=0.01,weight_decay=0.1)
scheduler = CosineDecayWithWarmRestarts(optimizer,max_lr=optimizer.lr,warmup_steps=20,T_max=200)
criterion = CrossEntropyLoss()
model.train()
for i in range(100):
loss_list=[]
for x,y in DL:
pred_y = model(x)
loss = criterion(pred_y,y)
loss_list.append(loss.data)
loss.backward(Tensor(np.ones(loss.shape)))
scheduler.step()
optimizer.step()
if(((i) %10)==0):
print("Epoch {} Avg loss {}".format(i,np.sum(loss_list)/len(loss_list)))
def test(model,dataloader):
corrects=0
total=0
for x,y in dataloader:
pred_y = model(x)
hard_y = np.argmax(pred_y.data,axis=1)
corrects+=np.sum(hard_y==y.data)
total+=len(y.data)
print("Accuracy:",corrects/total)
Mytorch.no_grad()
model.eval()
test(model,DL)