Skip to content

Commit

Permalink
Misc
Browse files Browse the repository at this point in the history
  • Loading branch information
vinhkhuc committed Feb 13, 2017
1 parent f6e6934 commit 77e5f71
Show file tree
Hide file tree
Showing 4 changed files with 26 additions and 26 deletions.
12 changes: 6 additions & 6 deletions 2_logistic_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@ def build_model(input_dim, output_dim):
return model


def train(model, loss, optimizer, x, y):
x = Variable(x, requires_grad=False)
y = Variable(y, requires_grad=False)
def train(model, loss, optimizer, x_val, y_val):
x = Variable(x_val, requires_grad=False)
y = Variable(y_val, requires_grad=False)

# Reset gradient
optimizer.zero_grad()
Expand All @@ -34,9 +34,9 @@ def train(model, loss, optimizer, x, y):
return output.data[0]


def predict(model, x):
var_x = Variable(x, requires_grad=False)
output = model.forward(var_x)
def predict(model, x_val):
x = Variable(x_val, requires_grad=False)
output = model.forward(x)
return output.data.numpy().argmax(axis=1)


Expand Down
12 changes: 6 additions & 6 deletions 3_neural_net.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@ def build_model(input_dim, output_dim):
return model


def train(model, loss, optimizer, x, y):
x = Variable(x, requires_grad=False)
y = Variable(y, requires_grad=False)
def train(model, loss, optimizer, x_val, y_val):
x = Variable(x_val, requires_grad=False)
y = Variable(y_val, requires_grad=False)

# Reset gradient
optimizer.zero_grad()
Expand All @@ -36,9 +36,9 @@ def train(model, loss, optimizer, x, y):
return output.data[0]


def predict(model, x):
var_x = Variable(x, requires_grad=False)
output = model.forward(var_x)
def predict(model, x_val):
x = Variable(x_val, requires_grad=False)
output = model.forward(x)
return output.data.numpy().argmax(axis=1)


Expand Down
14 changes: 7 additions & 7 deletions 4_modern_neural_net.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,9 @@ def build_model(input_dim, output_dim):
return model


def train(model, loss, optimizer, x, y):
x = Variable(x, requires_grad=False)
y = Variable(y, requires_grad=False)
def train(model, loss, optimizer, x_val, y_val):
x = Variable(x_val, requires_grad=False)
y = Variable(y_val, requires_grad=False)

# Reset gradient
optimizer.zero_grad()
Expand All @@ -40,9 +40,9 @@ def train(model, loss, optimizer, x, y):
return output.data[0]


def predict(model, x):
var_x = Variable(x, requires_grad=False)
output = model.forward(var_x)
def predict(model, x_val):
x = Variable(x_val, requires_grad=False)
output = model.forward(x)
return output.data.numpy().argmax(axis=1)


Expand All @@ -57,7 +57,7 @@ def main():
n_classes = 10
model = build_model(n_features, n_classes)
loss = torch.nn.CrossEntropyLoss(size_average=True)
optimizer = optim.RMSprop(model.parameters(), lr=0.01)
optimizer = optim.Adam(model.parameters())
batch_size = 100

for i in range(100):
Expand Down
14 changes: 7 additions & 7 deletions 5_convolutional_net.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,9 @@ def forward(self, x):
return self.fc.forward(x)


def train(model, loss, optimizer, x, y):
x = Variable(x, requires_grad=False)
y = Variable(y, requires_grad=False)
def train(model, loss, optimizer, x_val, y_val):
x = Variable(x_val, requires_grad=False)
y = Variable(y_val, requires_grad=False)

# Reset gradient
optimizer.zero_grad()
Expand All @@ -55,9 +55,9 @@ def train(model, loss, optimizer, x, y):
return output.data[0]


def predict(model, x):
var_x = Variable(x, requires_grad=False)
output = model.forward(var_x)
def predict(model, x_val):
x = Variable(x_val, requires_grad=False)
output = model.forward(x)
return output.data.numpy().argmax(axis=1)


Expand All @@ -75,7 +75,7 @@ def main():
n_classes = 10
model = ConvNet(output_dim=n_classes)
loss = torch.nn.CrossEntropyLoss(size_average=True)
optimizer = optim.RMSprop(model.parameters(), lr=0.001)
optimizer = optim.SGD(model.parameters())
batch_size = 100

for i in range(100):
Expand Down

0 comments on commit 77e5f71

Please sign in to comment.