From a4522fa0d82a6721f64d57f678f9890cad2f3da9 Mon Sep 17 00:00:00 2001 From: Taha EZ-ZOURY Date: Fri, 15 Nov 2024 09:50:58 +0100 Subject: [PATCH 1/4] Added a 4th convolutional layer (conv4) to the AlexNet architecture to align with standard model design. --- .../notebooks/3_neural_networks/alexnet.ipynb | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/deep-learning/tensor-flow-examples/notebooks/3_neural_networks/alexnet.ipynb b/deep-learning/tensor-flow-examples/notebooks/3_neural_networks/alexnet.ipynb index 8d70ee2b..4539a470 100644 --- a/deep-learning/tensor-flow-examples/notebooks/3_neural_networks/alexnet.ipynb +++ b/deep-learning/tensor-flow-examples/notebooks/3_neural_networks/alexnet.ipynb @@ -93,7 +93,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": { "collapsed": true }, @@ -142,6 +142,15 @@ " # Apply Dropout\n", " norm3 = tf.nn.dropout(norm3, _dropout)\n", "\n", + " # Add a 4th Convolution Layer\n", + " conv4 = conv2d('conv4', norm3, _weights['wc4'], _biases['bc4'])\n", + " # Apply Max Pooling\n", + " pool4 = max_pool('pool4', conv4, k=2)\n", + " # Apply Normalization\n", + " norm4 = norm('norm4', pool4, lsize=4)\n", + " # Apply Dropout\n", + " norm4 = tf.nn.dropout(norm4, _dropout)\n", + "\n", " # Fully connected layer\n", " # Reshape conv3 output to fit dense layer input\n", " dense1 = tf.reshape(norm3, [-1, _weights['wd1'].get_shape().as_list()[0]]) \n", From 4b8143541de8aa2b37b705e270e31840860b2367 Mon Sep 17 00:00:00 2001 From: Taha EZ-ZOURY Date: Fri, 15 Nov 2024 09:57:32 +0100 Subject: [PATCH 2/4] Added a 5th convolutional layer (conv5) to extend the architecture and align with AlexNet's deeper structure. --- .../notebooks/3_neural_networks/alexnet.ipynb | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/deep-learning/tensor-flow-examples/notebooks/3_neural_networks/alexnet.ipynb b/deep-learning/tensor-flow-examples/notebooks/3_neural_networks/alexnet.ipynb index 4539a470..450fb63d 100644 --- a/deep-learning/tensor-flow-examples/notebooks/3_neural_networks/alexnet.ipynb +++ b/deep-learning/tensor-flow-examples/notebooks/3_neural_networks/alexnet.ipynb @@ -151,6 +151,15 @@ " # Apply Dropout\n", " norm4 = tf.nn.dropout(norm4, _dropout)\n", "\n", + " # Add a 5th Convolution Layer\n", + " conv5 = conv2d('conv5', norm4, _weights['wc5'], _biases['bc5'])\n", + " # Apply Max Pooling\n", + " pool5 = max_pool('pool5', conv5, k=2)\n", + " # Apply Normalization\n", + " norm5 = norm('norm5', pool5, lsize=4)\n", + " # Apply Dropout\n", + " norm5 = tf.nn.dropout(norm5, _dropout)\n", + "\n", " # Fully connected layer\n", " # Reshape conv3 output to fit dense layer input\n", " dense1 = tf.reshape(norm3, [-1, _weights['wd1'].get_shape().as_list()[0]]) \n", From 20ef87ef26dccc5246cf77524800e3239f89cd21 Mon Sep 17 00:00:00 2001 From: Taha EZ-ZOURY Date: Fri, 15 Nov 2024 09:59:09 +0100 Subject: [PATCH 3/4] Added weights and biases for conv4 and conv5 to the model's parameter dictionaries. --- .../notebooks/3_neural_networks/alexnet.ipynb | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/deep-learning/tensor-flow-examples/notebooks/3_neural_networks/alexnet.ipynb b/deep-learning/tensor-flow-examples/notebooks/3_neural_networks/alexnet.ipynb index 450fb63d..1bdc43f0 100644 --- a/deep-learning/tensor-flow-examples/notebooks/3_neural_networks/alexnet.ipynb +++ b/deep-learning/tensor-flow-examples/notebooks/3_neural_networks/alexnet.ipynb @@ -176,7 +176,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": { "collapsed": true }, @@ -187,6 +187,8 @@ " 'wc1': tf.Variable(tf.random_normal([3, 3, 1, 64])),\n", " 'wc2': tf.Variable(tf.random_normal([3, 3, 64, 128])),\n", " 'wc3': tf.Variable(tf.random_normal([3, 3, 128, 256])),\n", + " 'wc4': tf.Variable(tf.random_normal([3, 3, 256, 384])),\n", + " 'wc5': tf.Variable(tf.random_normal([3, 3, 384, 256])),\n", " 'wd1': tf.Variable(tf.random_normal([4*4*256, 1024])),\n", " 'wd2': tf.Variable(tf.random_normal([1024, 1024])),\n", " 'out': tf.Variable(tf.random_normal([1024, 10]))\n", @@ -195,6 +197,8 @@ " 'bc1': tf.Variable(tf.random_normal([64])),\n", " 'bc2': tf.Variable(tf.random_normal([128])),\n", " 'bc3': tf.Variable(tf.random_normal([256])),\n", + " 'bc4': tf.Variable(tf.random_normal([384])),\n", + " 'bc5': tf.Variable(tf.random_normal([256])),\n", " 'bd1': tf.Variable(tf.random_normal([1024])),\n", " 'bd2': tf.Variable(tf.random_normal([1024])),\n", " 'out': tf.Variable(tf.random_normal([n_classes]))\n", From 1a7b16ae8747d5aeb6ef5bc3f2fbeb72d546a818 Mon Sep 17 00:00:00 2001 From: Taha EZ-ZOURY Date: Fri, 15 Nov 2024 10:01:14 +0100 Subject: [PATCH 4/4] Updated the input to the first fully connected layer to align with the output shape of the added conv5 layer. --- .../notebooks/3_neural_networks/alexnet.ipynb | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/deep-learning/tensor-flow-examples/notebooks/3_neural_networks/alexnet.ipynb b/deep-learning/tensor-flow-examples/notebooks/3_neural_networks/alexnet.ipynb index 1bdc43f0..d5a65861 100644 --- a/deep-learning/tensor-flow-examples/notebooks/3_neural_networks/alexnet.ipynb +++ b/deep-learning/tensor-flow-examples/notebooks/3_neural_networks/alexnet.ipynb @@ -161,8 +161,9 @@ " norm5 = tf.nn.dropout(norm5, _dropout)\n", "\n", " # Fully connected layer\n", - " # Reshape conv3 output to fit dense layer input\n", - " dense1 = tf.reshape(norm3, [-1, _weights['wd1'].get_shape().as_list()[0]]) \n", + " # Update the reshape input for the fully connected layer to reflect the output of norm5\n", + " dense1 = tf.reshape(norm5, [-1, _weights['wd1'].get_shape().as_list()[0]]) \n", + "\n", " # Relu activation\n", " dense1 = tf.nn.relu(tf.matmul(dense1, _weights['wd1']) + _biases['bd1'], name='fc1')\n", " \n",