diff --git a/deep-learning/tensor-flow-examples/notebooks/3_neural_networks/alexnet.ipynb b/deep-learning/tensor-flow-examples/notebooks/3_neural_networks/alexnet.ipynb index 8d70ee2b..d5a65861 100644 --- a/deep-learning/tensor-flow-examples/notebooks/3_neural_networks/alexnet.ipynb +++ b/deep-learning/tensor-flow-examples/notebooks/3_neural_networks/alexnet.ipynb @@ -93,7 +93,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": { "collapsed": true }, @@ -142,9 +142,28 @@ " # Apply Dropout\n", " norm3 = tf.nn.dropout(norm3, _dropout)\n", "\n", + " # Add a 4th Convolution Layer\n", + " conv4 = conv2d('conv4', norm3, _weights['wc4'], _biases['bc4'])\n", + " # Apply Max Pooling\n", + " pool4 = max_pool('pool4', conv4, k=2)\n", + " # Apply Normalization\n", + " norm4 = norm('norm4', pool4, lsize=4)\n", + " # Apply Dropout\n", + " norm4 = tf.nn.dropout(norm4, _dropout)\n", + "\n", + " # Add a 5th Convolution Layer\n", + " conv5 = conv2d('conv5', norm4, _weights['wc5'], _biases['bc5'])\n", + " # Apply Max Pooling\n", + " pool5 = max_pool('pool5', conv5, k=2)\n", + " # Apply Normalization\n", + " norm5 = norm('norm5', pool5, lsize=4)\n", + " # Apply Dropout\n", + " norm5 = tf.nn.dropout(norm5, _dropout)\n", + "\n", " # Fully connected layer\n", - " # Reshape conv3 output to fit dense layer input\n", - " dense1 = tf.reshape(norm3, [-1, _weights['wd1'].get_shape().as_list()[0]]) \n", + " # Update the reshape input for the fully connected layer to reflect the output of norm5\n", + " dense1 = tf.reshape(norm5, [-1, _weights['wd1'].get_shape().as_list()[0]]) \n", + "\n", " # Relu activation\n", " dense1 = tf.nn.relu(tf.matmul(dense1, _weights['wd1']) + _biases['bd1'], name='fc1')\n", " \n", @@ -158,7 +177,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": { "collapsed": true }, @@ -169,6 +188,8 @@ " 'wc1': tf.Variable(tf.random_normal([3, 3, 1, 64])),\n", " 'wc2': tf.Variable(tf.random_normal([3, 3, 64, 128])),\n", " 'wc3': tf.Variable(tf.random_normal([3, 3, 128, 256])),\n", + " 'wc4': tf.Variable(tf.random_normal([3, 3, 256, 384])),\n", + " 'wc5': tf.Variable(tf.random_normal([3, 3, 384, 256])),\n", " 'wd1': tf.Variable(tf.random_normal([4*4*256, 1024])),\n", " 'wd2': tf.Variable(tf.random_normal([1024, 1024])),\n", " 'out': tf.Variable(tf.random_normal([1024, 10]))\n", @@ -177,6 +198,8 @@ " 'bc1': tf.Variable(tf.random_normal([64])),\n", " 'bc2': tf.Variable(tf.random_normal([128])),\n", " 'bc3': tf.Variable(tf.random_normal([256])),\n", + " 'bc4': tf.Variable(tf.random_normal([384])),\n", + " 'bc5': tf.Variable(tf.random_normal([256])),\n", " 'bd1': tf.Variable(tf.random_normal([1024])),\n", " 'bd2': tf.Variable(tf.random_normal([1024])),\n", " 'out': tf.Variable(tf.random_normal([n_classes]))\n",