In [76]:
x = torch.Tensor([i for i in range(10,20)])
x
Out[76]:
In [0]:
y = x*1.609
In [78]:
plt.scatter(x,y)
Out[78]:
In [0]:
import torch.nn as nn
In [0]:
class LinearRegression(nn.Module):
def __init__(self):
super().__init__()
self.linear = nn.Linear(1,1,bias=False)
def forward(self,x):
return self.linear(x)
In [92]:
model = LinearRegression()
w1= model.parameters()
print(list(w1))
In [0]:
optimizer = torch.optim.SGD(model.parameters(), lr = 0.001)
criterion = nn.MSELoss()
In [94]:
loss_list = []
for epoch in range(20):
y_pred = model.forward(x.reshape(-1,1))
loss = criterion(y_pred, y.reshape(-1,1))
loss.backward()
print(f"Epoch={epoch} --> Loss = {loss.item()}")
loss_list.append(loss.item())
optimizer.step()
optimizer.zero_grad()
In [95]:
list(model.parameters())
Out[95]:
In [0]:
In [104]:
model = LinearRegression()
w1= model.parameters()
print(list(w1))
In [0]:
optimizer = torch.optim.Adam(model.parameters(), lr = 0.01)
criterion = nn.MSELoss()
In [106]:
loss_list = []
for epoch in range(20):
y_pred = model.forward(x.reshape(-1,1))
loss = criterion(y_pred, y.reshape(-1,1))
loss.backward()
print(f"Epoch={epoch} --> Loss = {loss.item()}")
loss_list.append(loss.item())
optimizer.step()
optimizer.zero_grad()
In [0]:
In [107]:
import os
os.listdir()
Out[107]:
In [0]:
import pandas as pd
In [0]:
data = pd.read_csv('diabetes.csv.gz', compression='gzip', header=None, dtype=np.float32)
In [110]:
data.head()
Out[110]:
In [111]:
data.shape
Out[111]:
In [0]:
X = data.iloc[:,:-1].values
y = data.iloc[:,-1].values
In [0]:
X = torch.from_numpy(X)
y = torch.from_numpy(y)
In [0]:
class LinearRegression(nn.Module):
def __init__(self):
super().__init__()
self.l1 = nn.Linear(8,1)
def forward(self,x):
#x = torch.relu(self.l1(x))
x = torch.sigmoid(self.l1(x))
#x = torch.sigmoid(self.l3(x))
return x
In [130]:
model = LinearRegression()
optimizer = torch.optim.SGD(model.parameters(), lr = 0.01)
criterion = nn.BCELoss()
for epoch in range(150):
y_pred = model(X)
loss = criterion(y_pred, y)
loss.backward()
print(loss.item())
optimizer.step()
optimizer.zero_grad()
In [0]:
In [0]:
In [0]:
In [0]:
In [0]:
In [0]:
In [0]:
In [0]:
In [0]:
In [0]:
In [0]:
In [0]:
In [0]:
No comments :
Post a Comment