In [1]:
import numpy as np
import math
import scipy.io as sio
import os

%matplotlib inline
import matplotlib.pylab as pylab
import matplotlib.pyplot as plt
import mpld3
mpld3.enable_notebook()
pylab.rcParams['figure.figsize'] = 12, 8

import sys
sys.path.append('/Users/pi/Downloads/HintonCoursera/Assignment Answers -- Sinclair (Python)')

#os.setcwd('/Users/pi/Downloads/HintonCoursera/Assignment Answers -- Sinclair (Python)')
#courseraneuralnet.bla
from utility.utils import loadmat, logistic
from assignment3 import A3Run
/Users/pi/anaconda/lib/python3.5/site-packages/scipy/_lib/decorator.py:205: DeprecationWarning: inspect.getargspec() is deprecated, use inspect.signature() instead
  first = inspect.getargspec(caller)[0][0]  # first arg
In [2]:
n_hid = 7
NUM_INPUT_UNITS = 256
NUM_CLASSES = 10
In [3]:
a3 = A3Run()
In [4]:
#Q1
a3.a3_main(0, n_hid=0, n_iterations=0, lr_net=0, train_momentum=0, early_stopping=False, mini_batch_size=0)
The loss on the training data is 2.3025850929940463
The classification error rate on the training data is 0.9
The loss on the test data is 2.302585092994046
The classification error rate on the test data is 0.9
The loss on the validation data is 2.3025850929940463
The classification error rate on the validation data is 0.9
In [10]:
#Q2
a3.a3_main(wd_coeff=0, n_hid=10, n_iterations=70, lr_net=0.005, train_momentum=0.0, early_stopping=False, 
           mini_batch_size=4)
Now testing the gradient on the whole training set... 
---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
/Users/pi/anaconda/lib/python3.5/site-packages/numpy/core/fromnumeric.py in size(a, axis)
   2688         try:
-> 2689             return a.shape[axis]
   2690         except AttributeError:

AttributeError: 'dict_keys' object has no attribute 'shape'

During handling of the above exception, another exception occurred:

IndexError                                Traceback (most recent call last)
<ipython-input-10-9f7390d76041> in <module>()
      1 #Q2
      2 a3.a3_main(wd_coeff=0, n_hid=10, n_iterations=70, lr_net=0.005, train_momentum=0.0, early_stopping=False, 
----> 3            mini_batch_size=4)

/Users/pi/Downloads/HintonCoursera/Assignment Answers -- Sinclair (Python)/assignment3/assignment3.py in a3_main(self, wd_coeff, n_hid, n_iterations, lr_net, train_momentum, early_stopping, mini_batch_size)
    369         if n_iterations != 0:
    370             print ('Now testing the gradient on the whole training set... ')
--> 371             print("x")
    372             nn.test_gradient(self.data_sets['training'])
    373             nn.train(self.data_sets['training'])

/Users/pi/Downloads/HintonCoursera/Assignment Answers -- Sinclair (Python)/assignment3/assignment3.py in test_gradient(self, data)
    279         self._d_loss_by_d_model(data['inputs'], data['targets'])
    280         analytic_gradient_struct = self.theta_to_model(self.gradient)
--> 281         if np.size(analytic_gradient_struct.keys(), 0) != 2:
    282             raise Exception('The object returned by def d_loss_by_d_model should have exactly two field names: '
    283                             '.input_to_hid and .hid_to_class')

/Users/pi/anaconda/lib/python3.5/site-packages/numpy/core/fromnumeric.py in size(a, axis)
   2689             return a.shape[axis]
   2690         except AttributeError:
-> 2691             return asarray(a).shape[axis]
   2692 
   2693 

IndexError: tuple index out of range
In [ ]:
# Question 3-4
learning_rates = [0.002, 0.01, 0.05, 0.2, 1.0, 5.0, 20.0]
momentums = [0.0, 0.9]
for momentum in momentums:
    for learning_rate in learning_rates:
        print "Momentum and learning rate are ({0}, {1})".format(momentum, learning_rate)
        a3.a3_main(0, n_hid=10, n_iterations=70, lr_net=learning_rate, train_momentum=momentum, 
                   early_stopping=False, mini_batch_size=4)
        print
In [ ]:
## Question 5
a3.a3_main(0, n_hid=200, n_iterations=1000, lr_net=0.35, train_momentum=0.9, 
           early_stopping=False, mini_batch_size=100)
In [ ]:
## Question 6
a3.a3_main(0, n_hid=200, n_iterations=1000, lr_net=0.35, train_momentum=0.9, 
           early_stopping=True, mini_batch_size=100)
In [ ]:
## Question 7
for decay in [0, 0.0001, 0.001, 0.01, 1., 5]:
    print decay
    a3.a3_main(decay, n_hid=200, n_iterations=1000, lr_net=0.35, train_momentum=0.9, 
               early_stopping=False, mini_batch_size=100)
    print
In [ ]:
## Question 8
for size in [10, 30, 100, 130, 170]:
    print size
    a3.a3_main(0, n_hid=size, n_iterations=1000, lr_net=0.35, train_momentum=0.9, 
               early_stopping=False, mini_batch_size=100)
    print
In [ ]:
## Question 9/10
for size in [18, 37, 83, 113, 189]:
    print size
    a3.a3_main(0, n_hid=size, n_iterations=1000, lr_net=0.35, train_momentum=0.9, 
               early_stopping=True, mini_batch_size=100)
    print