Skip to content

Commit

Permalink
Merge pull request #4 from esvhd/py36_fix
Browse files Browse the repository at this point in the history
Py36 fix
  • Loading branch information
vinhkhuc authored Jul 2, 2017
2 parents 39902df + 28725ff commit cbf48ed
Show file tree
Hide file tree
Showing 6 changed files with 31 additions and 12 deletions.
2 changes: 1 addition & 1 deletion 1_linear_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def main():

for i in range(100):
cost = 0.
num_batches = len(X) / batch_size
num_batches = len(X) // batch_size
for k in range(num_batches):
start, end = k * batch_size, (k + 1) * batch_size
cost += train(model, loss, optimizer, X[start:end], Y[start:end])
Expand Down
11 changes: 7 additions & 4 deletions 2_logistic_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,11 @@


def build_model(input_dim, output_dim):
# We don't need the softmax layer here since CrossEntropyLoss already uses it internally.
# We don't need the softmax layer here since CrossEntropyLoss already
# uses it internally.
model = torch.nn.Sequential()
model.add_module("linear", torch.nn.Linear(input_dim, output_dim, bias=False))
model.add_module("linear",
torch.nn.Linear(input_dim, output_dim, bias=False))
return model


Expand Down Expand Up @@ -56,10 +58,11 @@ def main():

for i in range(100):
cost = 0.
num_batches = n_examples / batch_size
num_batches = n_examples // batch_size
for k in range(num_batches):
start, end = k * batch_size, (k + 1) * batch_size
cost += train(model, loss, optimizer, trX[start:end], trY[start:end])
cost += train(model, loss, optimizer,
trX[start:end], trY[start:end])
predY = predict(model, teX)
print("Epoch %d, cost = %f, acc = %.2f%%"
% (i + 1, cost / num_batches, 100. * np.mean(predY == teY)))
Expand Down
2 changes: 1 addition & 1 deletion 3_neural_net.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def main():

for i in range(100):
cost = 0.
num_batches = n_examples / batch_size
num_batches = n_examples // batch_size
for k in range(num_batches):
start, end = k * batch_size, (k + 1) * batch_size
cost += train(model, loss, optimizer, trX[start:end], trY[start:end])
Expand Down
2 changes: 1 addition & 1 deletion 4_modern_neural_net.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def main():

for i in range(100):
cost = 0.
num_batches = n_examples / batch_size
num_batches = n_examples // batch_size
for k in range(num_batches):
start, end = k * batch_size, (k + 1) * batch_size
cost += train(model, loss, optimizer, trX[start:end], trY[start:end])
Expand Down
2 changes: 1 addition & 1 deletion 5_convolutional_net.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def main():

for i in range(20):
cost = 0.
num_batches = n_examples / batch_size
num_batches = n_examples // batch_size
for k in range(num_batches):
start, end = k * batch_size, (k + 1) * batch_size
cost += train(model, loss, optimizer, trX[start:end], trY[start:end])
Expand Down
24 changes: 20 additions & 4 deletions data_util.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,20 @@
import gzip
import os
from os import path
import urllib
import numpy as np

import sys
if sys.version_info.major < 3:
import urllib
else:
import urllib.request as request


DATASET_DIR = 'datasets/'

MNIST_FILES = ["train-images-idx3-ubyte.gz", "train-labels-idx1-ubyte.gz",
"t10k-images-idx3-ubyte.gz", "t10k-labels-idx1-ubyte.gz"]


def download_file(url, local_path):
dir_path = path.dirname(local_path)
Expand All @@ -14,13 +23,15 @@ def download_file(url, local_path):
os.makedirs(dir_path)

print("Downloading from '%s' ..." % url)
urllib.URLopener().retrieve(url, local_path)
if sys.version_info.major < 3:
urllib.URLopener().retrieve(url, local_path)
else:
request.urlretrieve(url, local_path)


def download_mnist(local_path):
url_root = "http://yann.lecun.com/exdb/mnist/"
for f_name in ["train-images-idx3-ubyte.gz", "train-labels-idx1-ubyte.gz",
"t10k-images-idx3-ubyte.gz", "t10k-labels-idx1-ubyte.gz"]:
for f_name in MNIST_FILES:
f_path = os.path.join(local_path, f_name)
if not path.exists(f_path):
download_file(url_root + f_name, f_path)
Expand All @@ -39,6 +50,11 @@ def load_mnist(ntrain=60000, ntest=10000, onehot=True):
data_dir = os.path.join(DATASET_DIR, 'mnist/')
if not path.exists(data_dir):
download_mnist(data_dir)
else:
# check all files
checks = [path.exists(os.path.join(data_dir, f)) for f in MNIST_FILES]
if not np.all(checks):
download_mnist(data_dir)

with gzip.open(os.path.join(data_dir, 'train-images-idx3-ubyte.gz')) as fd:
buf = fd.read()
Expand Down

0 comments on commit cbf48ed

Please sign in to comment.