7 # use a constant seed to keep things reproducible
8 rg = np.random.default_rng(1)
12 # https://en.wikipedia.org/wiki/Sigmoid_function
13 # classic, differentiable, apparently worse for training
15 return 1 / (1 + np.exp(-x))
19 return sigmoid(x) * (1 - sigmoid(x))
22 # https://en.wikipedia.org/wiki/Rectifier_(neural_networks)
23 # mostly preferred these days, not differentiable at 0, but slope can be defined arbitrarily as 0 or 1 at 0
25 return np.maximum(x, 0)
29 return np.heaviside(x, 1)
32 train_images, train_labels, rows, cols = mnist.load('train-images-idx3-ubyte', 'train-labels-idx1-ubyte')
33 test_images, test_labels, rows2, cols2 = mnist.load('t10k-images-idx3-ubyte', 't10k-labels-idx1-ubyte')
37 # neural network structure: two hidden layers, one output layer
38 SIZES = (rows * cols, 20, 16, 10)
39 NUM_LAYERS = len(SIZES)
41 # initialize weight matrices and bias vectors with random numbers
44 for i in range(1, NUM_LAYERS):
45 weights.append(rg.normal(size=(SIZES[i], SIZES[i-1])))
46 biases.append(rg.normal(scale=10, size=SIZES[i]))
49 def feed_forward(x, transfer=sigmoid):
50 '''Compute all z and output vectors for given input vector'''
54 for w, b in zip(weights, biases):
57 a_s.append(transfer(x))
62 # the recognized digit is the index of the highest-valued output neuron
63 return np.argmax(y), np.max(y)
67 """Count percentage of test inputs which are being recognized correctly"""
70 num_images = test_images.shape[1]
71 for i in range(num_images):
72 # the recognized digit is the index of the highest-valued output neuron
73 y = classify(feed_forward(test_images[:, i])[1][-1])[0]
74 good += int(y == test_labels[i])
75 return 100 * (good / num_images)
78 res = feed_forward(test_images[:, 0])
79 print(f'output vector of first image: {res[1][-1]}')
80 digit, conf = classify(res[1][-1])
81 print(f'classification of first image: {digit} with confidence {conf}; real label {test_labels[0]}')
82 print(f'correctly recognized images after initialization: {test()}%')