aboutsummaryrefslogtreecommitdiff
path: root/Assignment 6/ex62.py
diff options
context:
space:
mode:
authorCamil Staps2016-01-08 23:15:14 +0100
committerCamil Staps2016-01-08 23:15:14 +0100
commit087f0526345ed45593295fdafcaeed496a621c68 (patch)
tree7804cac6319b4cc9e2037717ec6773bb3e464791 /Assignment 6/ex62.py
parentFix assignment 3.2 (diff)
Assignment 6
Diffstat (limited to 'Assignment 6/ex62.py')
-rw-r--r--Assignment 6/ex62.py101
1 files changed, 101 insertions, 0 deletions
diff --git a/Assignment 6/ex62.py b/Assignment 6/ex62.py
new file mode 100644
index 0000000..4f8c8fc
--- /dev/null
+++ b/Assignment 6/ex62.py
@@ -0,0 +1,101 @@
+# -*- coding: utf-8 -*-
+"""
+Created on Fri Jan 8 17:43:24 2016
+
+@author: camil
+"""
+
+import numpy as np
+import scipy.io as sciio
+from sklearn import cross_validation
+
+import matplotlib.pyplot as plt
+from matplotlib import cm
+
+import neurolab as nl
+
+def load_data():
+ data = sciio.loadmat('./data/xor.mat')
+ X, y = data['X'], data['y']
+ return X, y
+
+def plot_boundaries(nw, data=None):
+ plt.figure(1)
+ plt.hold(True)
+ delta = 0.05; levels = 100
+ a = np.arange(-1,2,delta)
+ b = np.arange(-1,2,delta)
+ A, B = np.meshgrid(a, b)
+ values = np.zeros(A.shape)
+
+ for i in range(len(a)):
+ for j in range(len(b)):
+ values[i,j] = nw.sim( np.mat([a[i],b[j]]) )[0,0]
+ plt.contour(A, B, values, levels=[.5], colors=['k'], linestyles='dashed')
+ plt.contourf(A, B, values, cmap=cm.RdBu,
+ levels=np.linspace(values.min(),values.max(),levels))
+
+ if data != None:
+ plt.scatter(data[0][:,0], data[0][:,1], c=data[1], cmap=cm.RdBu, s=40)
+
+# 6.2.1
+def ex621(X, y):
+ colors = ['green' if x == 0 else 'red' for x in y]
+ plt.scatter(X[:,0], X[:,1], c=colors, s=40, alpha=0.6)
+ plt.ylim([-1,2])
+ plt.xlim([-1,2])
+ plt.show()
+
+# 6.2.2
+def run_exercise(X, y, hidden_units=1, plot=True):
+ cv = cross_validation.KFold(len(X), 10)
+ errors = []
+ c = 0
+
+ if plot:
+ plt.figure(figsize=(15,6))
+
+ for train, test in cv:
+ c += 1
+ X_train, X_test, y_train, y_test = X[train], X[test], y[train], y[test]
+
+ shape = [hidden_units, 1] if hidden_units > 0 else [1]
+ net = nl.net.newff([[0,1], [0,1]], shape,
+ np.repeat(nl.trans.TanSig(), len(shape)))
+ net.train(X_train, y_train, show=501)
+
+ if plot:
+ plt.subplot(2, 5, c)
+ plot_boundaries(net, data=(X_train, y_train))
+
+ predictions = [1 if p > 0.5 else 0 for p in net.sim(X_test)]
+ correct = len([i for i, p in zip(y_test, predictions) if i == p])
+ error = 1 - correct / float(len(X_test))
+ errors.append(error)
+
+ if plot:
+ plt.suptitle('XOR with ' + str(hidden_units) + ' hidden unit' +\
+ ('s' if hidden_units != 1 else ''),
+ fontsize=18)
+ plt.show()
+
+ return np.mean(errors), np.std(errors)
+
+def main():
+ X, y = load_data()
+
+ ex621(X, y)
+
+ unit_counts = range(11)
+ means, stds = [], []
+ for i in unit_counts:
+ mean, std = run_exercise(X, y, hidden_units=i, plot=True)
+ means.append(mean)
+ stds.append(std)
+
+ means, stds = np.array(means), np.array(stds)
+ plt.plot(unit_counts, means, c='red')
+ plt.fill_between(unit_counts, means - stds, means + stds, alpha=0.1, color="r")
+
+if __name__ == '__main__':
+ main()