Skip to content

Commit 190400a

Browse files
committed
docs(notebook): fresh run
1 parent 4ca69b3 commit 190400a

File tree

8 files changed

+181
-173
lines changed

8 files changed

+181
-173
lines changed

examples/models-usages/cnn-classification/cnn_classification_mnist.ipynb

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -141,7 +141,7 @@
141141
"name": "stdout",
142142
"output_type": "stream",
143143
"text": [
144-
"Sequential(gradient_clip_threshold=5.0, enable_padding=False, padding_size=32, random_state=1732410251825704700)\n",
144+
"Sequential(gradient_clip_threshold=5.0, enable_padding=False, padding_size=32, random_state=1733508031486758100)\n",
145145
"-------------------------------------------------\n",
146146
"Layer 1: Input(input_shape=(28, 28, 1))\n",
147147
"Layer 2: Conv2D(num_filters=8, kernel_size=(3, 3), strides=(1, 1), padding=valid)\n",
@@ -190,16 +190,16 @@
190190
"name": "stdout",
191191
"output_type": "stream",
192192
"text": [
193-
"[==============================] 100% Epoch 1/10 - loss: 0.3256 - accuracy: 0.8995 - 29.60s - val_accuracy: 0.9630\n",
194-
"[==============================] 100% Epoch 2/10 - loss: 0.1166 - accuracy: 0.9651 - 29.43s - val_accuracy: 0.9739\n",
195-
"[==============================] 100% Epoch 3/10 - loss: 0.0918 - accuracy: 0.9727 - 30.73s - val_accuracy: 0.9782\n",
196-
"[==============================] 100% Epoch 4/10 - loss: 0.0792 - accuracy: 0.9768 - 31.23s - val_accuracy: 0.9804\n",
197-
"[==============================] 100% Epoch 5/10 - loss: 0.0711 - accuracy: 0.9798 - 30.94s - val_accuracy: 0.9812\n",
198-
"[==============================] 100% Epoch 6/10 - loss: 0.0646 - accuracy: 0.9821 - 29.67s - val_accuracy: 0.9820\n",
199-
"[==============================] 100% Epoch 7/10 - loss: 0.0600 - accuracy: 0.9845 - 29.63s - val_accuracy: 0.9816\n",
200-
"[==============================] 100% Epoch 8/10 - loss: 0.0563 - accuracy: 0.9861 - 30.65s - val_accuracy: 0.9815\n",
201-
"[==============================] 100% Epoch 9/10 - loss: 0.0536 - accuracy: 0.9871 - 30.41s - val_accuracy: 0.9806\n",
202-
"[==============================] 100% Epoch 10/10 - loss: 0.0518 - accuracy: 0.9874 - 30.63s - val_accuracy: 0.9831\n",
193+
"[==============================] 100% Epoch 1/10 - 35.50s - loss: 0.3256 - accuracy: 0.8995 - val_loss: 0.1242 - val_accuracy: 0.9630\n",
194+
"[==============================] 100% Epoch 2/10 - 33.76s - loss: 0.1166 - accuracy: 0.9651 - val_loss: 0.0835 - val_accuracy: 0.9739\n",
195+
"[==============================] 100% Epoch 3/10 - 32.98s - loss: 0.0918 - accuracy: 0.9727 - val_loss: 0.0693 - val_accuracy: 0.9782\n",
196+
"[==============================] 100% Epoch 4/10 - 34.40s - loss: 0.0792 - accuracy: 0.9768 - val_loss: 0.0653 - val_accuracy: 0.9804\n",
197+
"[==============================] 100% Epoch 5/10 - 34.27s - loss: 0.0711 - accuracy: 0.9798 - val_loss: 0.0666 - val_accuracy: 0.9812\n",
198+
"[==============================] 100% Epoch 6/10 - 35.14s - loss: 0.0646 - accuracy: 0.9821 - val_loss: 0.0653 - val_accuracy: 0.9820\n",
199+
"[==============================] 100% Epoch 7/10 - 36.32s - loss: 0.0600 - accuracy: 0.9845 - val_loss: 0.0693 - val_accuracy: 0.9816\n",
200+
"[==============================] 100% Epoch 8/10 - 34.42s - loss: 0.0563 - accuracy: 0.9861 - val_loss: 0.0740 - val_accuracy: 0.9815\n",
201+
"[==============================] 100% Epoch 9/10 - 34.39s - loss: 0.0536 - accuracy: 0.9871 - val_loss: 0.0734 - val_accuracy: 0.9806\n",
202+
"[==============================] 100% Epoch 10/10 - 34.89s - loss: 0.0518 - accuracy: 0.9874 - val_loss: 0.0738 - val_accuracy: 0.9831\n",
203203
"\n"
204204
]
205205
},

examples/models-usages/compression/autoencoder_fashonized_mnist_basic.ipynb

Lines changed: 41 additions & 27 deletions
Large diffs are not rendered by default.

examples/models-usages/compression/autoencoder_fashonized_mnist_convolution.ipynb

Lines changed: 30 additions & 28 deletions
Large diffs are not rendered by default.

examples/models-usages/mlp-classification-regression/cancer_binary.ipynb

Lines changed: 53 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -144,29 +144,30 @@
144144
"name": "stdout",
145145
"output_type": "stream",
146146
"text": [
147-
"Sequential(temperature=1.0, gradient_clip_threshold=5.0, enable_padding=False, padding_size=32, random_state=1731610971086591500)\n",
147+
"Sequential(gradient_clip_threshold=5.0, enable_padding=False, padding_size=32, random_state=1733514982124278900)\n",
148148
"-------------------------------------------------\n",
149149
"Layer 1: Input(input_shape=(30,))\n",
150150
"Layer 2: Dense(units=100)\n",
151151
"Layer 3: Activation(ReLU)\n",
152152
"Layer 4: Dense(units=100)\n",
153-
"Layer 5: BatchNormalization(momentum=0.99, epsilon=1e-08)\n",
153+
"Layer 5: BatchNormalization(momentum=0.9, epsilon=1e-05)\n",
154154
"Layer 6: Activation(ReLU)\n",
155155
"Layer 7: Dense(units=100)\n",
156-
"Layer 8: BatchNormalization(momentum=0.99, epsilon=1e-08)\n",
156+
"Layer 8: BatchNormalization(momentum=0.9, epsilon=1e-05)\n",
157157
"Layer 9: Activation(ReLU)\n",
158158
"Layer 10: Dense(units=100)\n",
159-
"Layer 11: BatchNormalization(momentum=0.99, epsilon=1e-08)\n",
159+
"Layer 11: BatchNormalization(momentum=0.9, epsilon=1e-05)\n",
160160
"Layer 12: Activation(ReLU)\n",
161161
"Layer 13: Dense(units=100)\n",
162-
"Layer 14: BatchNormalization(momentum=0.99, epsilon=1e-08)\n",
162+
"Layer 14: BatchNormalization(momentum=0.9, epsilon=1e-05)\n",
163163
"Layer 15: Activation(ReLU)\n",
164164
"Layer 16: Dense(units=1)\n",
165165
"Layer 17: Activation(Sigmoid)\n",
166166
"-------------------------------------------------\n",
167167
"Loss function: BinaryCrossentropy\n",
168168
"Optimizer: Adam(learning_rate=0.0001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, clip_norm=None, clip_value=None)\n",
169-
"-------------------------------------------------\n"
169+
"-------------------------------------------------\n",
170+
"\n"
170171
]
171172
}
172173
],
@@ -197,46 +198,47 @@
197198
"name": "stdout",
198199
"output_type": "stream",
199200
"text": [
200-
"[==============================] 100% Epoch 1/40 - loss: 0.7434 - accuracy: 0.5099 - 0.06s\n",
201-
"[==============================] 100% Epoch 2/40 - loss: 0.6053 - accuracy: 0.6440 - 0.08s\n",
202-
"[==============================] 100% Epoch 3/40 - loss: 0.4674 - accuracy: 0.7824 - 0.08s\n",
203-
"[==============================] 100% Epoch 4/40 - loss: 0.3663 - accuracy: 0.8681 - 0.08s\n",
204-
"[==============================] 100% Epoch 5/40 - loss: 0.3026 - accuracy: 0.8945 - 0.07s\n",
205-
"[==============================] 100% Epoch 6/40 - loss: 0.2609 - accuracy: 0.9121 - 0.08s\n",
206-
"[==============================] 100% Epoch 7/40 - loss: 0.2319 - accuracy: 0.9165 - 0.07s\n",
207-
"[==============================] 100% Epoch 8/40 - loss: 0.2113 - accuracy: 0.9231 - 0.07s\n",
208-
"[==============================] 100% Epoch 9/40 - loss: 0.1968 - accuracy: 0.9253 - 0.08s\n",
209-
"[==============================] 100% Epoch 10/40 - loss: 0.1869 - accuracy: 0.9319 - 0.07s\n",
210-
"[==============================] 100% Epoch 11/40 - loss: 0.1801 - accuracy: 0.9319 - 0.08s\n",
211-
"[==============================] 100% Epoch 12/40 - loss: 0.1756 - accuracy: 0.9297 - 0.08s\n",
212-
"[==============================] 100% Epoch 13/40 - loss: 0.1735 - accuracy: 0.9297 - 0.08s\n",
213-
"[==============================] 100% Epoch 14/40 - loss: 0.1733 - accuracy: 0.9275 - 0.08s\n",
214-
"[==============================] 100% Epoch 15/40 - loss: 0.1743 - accuracy: 0.9275 - 0.11s\n",
215-
"[==============================] 100% Epoch 16/40 - loss: 0.1765 - accuracy: 0.9297 - 0.08s\n",
216-
"[==============================] 100% Epoch 17/40 - loss: 0.1798 - accuracy: 0.9319 - 0.07s\n",
217-
"[==============================] 100% Epoch 18/40 - loss: 0.1829 - accuracy: 0.9297 - 0.08s\n",
218-
"[==============================] 100% Epoch 19/40 - loss: 0.1858 - accuracy: 0.9297 - 0.07s\n",
219-
"[==============================] 100% Epoch 20/40 - loss: 0.1883 - accuracy: 0.9275 - 0.08s\n",
220-
"[==============================] 100% Epoch 21/40 - loss: 0.1900 - accuracy: 0.9253 - 0.07s\n",
221-
"[==============================] 100% Epoch 22/40 - loss: 0.1910 - accuracy: 0.9253 - 0.07s\n",
222-
"[==============================] 100% Epoch 23/40 - loss: 0.1916 - accuracy: 0.9253 - 0.07s\n",
223-
"[==============================] 100% Epoch 24/40 - loss: 0.1920 - accuracy: 0.9231 - 0.07s\n",
224-
"[==============================] 100% Epoch 25/40 - loss: 0.1919 - accuracy: 0.9231 - 0.07s\n",
225-
"[==============================] 100% Epoch 26/40 - loss: 0.1911 - accuracy: 0.9209 - 0.07s\n",
226-
"[==============================] 100% Epoch 27/40 - loss: 0.1906 - accuracy: 0.9165 - 0.07s\n",
227-
"[==============================] 100% Epoch 28/40 - loss: 0.1904 - accuracy: 0.9143 - 0.07s\n",
228-
"[==============================] 100% Epoch 29/40 - loss: 0.1908 - accuracy: 0.9099 - 0.07s\n",
229-
"[==============================] 100% Epoch 30/40 - loss: 0.1919 - accuracy: 0.9055 - 0.08s\n",
230-
"[==============================] 100% Epoch 31/40 - loss: 0.1938 - accuracy: 0.9055 - 0.08s\n",
231-
"[==============================] 100% Epoch 32/40 - loss: 0.1958 - accuracy: 0.9011 - 0.07s\n",
232-
"[==============================] 100% Epoch 33/40 - loss: 0.1981 - accuracy: 0.8945 - 0.07s\n",
233-
"[==============================] 100% Epoch 34/40 - loss: 0.2009 - accuracy: 0.8923 - 0.07s\n",
234-
"[==============================] 100% Epoch 35/40 - loss: 0.2035 - accuracy: 0.8923 - 0.07s\n",
235-
"[==============================] 100% Epoch 36/40 - loss: 0.2055 - accuracy: 0.8901 - 0.07s\n",
236-
"[==============================] 100% Epoch 37/40 - loss: 0.2072 - accuracy: 0.8879 - 0.07s\n",
237-
"[==============================] 100% Epoch 38/40 - loss: 0.2088 - accuracy: 0.8857 - 0.07s\n",
238-
"[==============================] 100% Epoch 39/40 - loss: 0.2110 - accuracy: 0.8835 - 0.07s\n",
239-
"[==============================] 100% Epoch 40/40 - loss: 0.2136 - accuracy: 0.8791 - 0.07s\n"
201+
"[==============================] 100% Epoch 1/40 - 0.07s - loss: 0.7299 - accuracy: 0.5275\n",
202+
"[==============================] 100% Epoch 2/40 - 0.08s - loss: 0.5481 - accuracy: 0.6923\n",
203+
"[==============================] 100% Epoch 3/40 - 0.08s - loss: 0.3936 - accuracy: 0.8418\n",
204+
"[==============================] 100% Epoch 4/40 - 0.08s - loss: 0.2973 - accuracy: 0.9011\n",
205+
"[==============================] 100% Epoch 5/40 - 0.07s - loss: 0.2367 - accuracy: 0.9231\n",
206+
"[==============================] 100% Epoch 6/40 - 0.08s - loss: 0.1948 - accuracy: 0.9495\n",
207+
"[==============================] 100% Epoch 7/40 - 0.07s - loss: 0.1642 - accuracy: 0.9538\n",
208+
"[==============================] 100% Epoch 8/40 - 0.07s - loss: 0.1410 - accuracy: 0.9604\n",
209+
"[==============================] 100% Epoch 9/40 - 0.08s - loss: 0.1227 - accuracy: 0.9626\n",
210+
"[==============================] 100% Epoch 10/40 - 0.07s - loss: 0.1086 - accuracy: 0.9648\n",
211+
"[==============================] 100% Epoch 11/40 - 0.07s - loss: 0.0978 - accuracy: 0.9648\n",
212+
"[==============================] 100% Epoch 12/40 - 0.07s - loss: 0.0889 - accuracy: 0.9692\n",
213+
"[==============================] 100% Epoch 13/40 - 0.07s - loss: 0.0818 - accuracy: 0.9714\n",
214+
"[==============================] 100% Epoch 14/40 - 0.08s - loss: 0.0755 - accuracy: 0.9714\n",
215+
"[==============================] 100% Epoch 15/40 - 0.07s - loss: 0.0697 - accuracy: 0.9714\n",
216+
"[==============================] 100% Epoch 16/40 - 0.07s - loss: 0.0648 - accuracy: 0.9714\n",
217+
"[==============================] 100% Epoch 17/40 - 0.07s - loss: 0.0608 - accuracy: 0.9714\n",
218+
"[==============================] 100% Epoch 18/40 - 0.07s - loss: 0.0573 - accuracy: 0.9714\n",
219+
"[==============================] 100% Epoch 19/40 - 0.07s - loss: 0.0540 - accuracy: 0.9736\n",
220+
"[==============================] 100% Epoch 20/40 - 0.07s - loss: 0.0508 - accuracy: 0.9736\n",
221+
"[==============================] 100% Epoch 21/40 - 0.08s - loss: 0.0480 - accuracy: 0.9736\n",
222+
"[==============================] 100% Epoch 22/40 - 0.07s - loss: 0.0454 - accuracy: 0.9780\n",
223+
"[==============================] 100% Epoch 23/40 - 0.07s - loss: 0.0432 - accuracy: 0.9824\n",
224+
"[==============================] 100% Epoch 24/40 - 0.07s - loss: 0.0412 - accuracy: 0.9846\n",
225+
"[==============================] 100% Epoch 25/40 - 0.07s - loss: 0.0392 - accuracy: 0.9846\n",
226+
"[==============================] 100% Epoch 26/40 - 0.07s - loss: 0.0373 - accuracy: 0.9846\n",
227+
"[==============================] 100% Epoch 27/40 - 0.07s - loss: 0.0358 - accuracy: 0.9846\n",
228+
"[==============================] 100% Epoch 28/40 - 0.08s - loss: 0.0341 - accuracy: 0.9868\n",
229+
"[==============================] 100% Epoch 29/40 - 0.07s - loss: 0.0325 - accuracy: 0.9890\n",
230+
"[==============================] 100% Epoch 30/40 - 0.07s - loss: 0.0308 - accuracy: 0.9890\n",
231+
"[==============================] 100% Epoch 31/40 - 0.07s - loss: 0.0293 - accuracy: 0.9890\n",
232+
"[==============================] 100% Epoch 32/40 - 0.07s - loss: 0.0280 - accuracy: 0.9890\n",
233+
"[==============================] 100% Epoch 33/40 - 0.08s - loss: 0.0267 - accuracy: 0.9912\n",
234+
"[==============================] 100% Epoch 34/40 - 0.07s - loss: 0.0254 - accuracy: 0.9934\n",
235+
"[==============================] 100% Epoch 35/40 - 0.06s - loss: 0.0243 - accuracy: 0.9934\n",
236+
"[==============================] 100% Epoch 36/40 - 0.07s - loss: 0.0232 - accuracy: 0.9934\n",
237+
"[==============================] 100% Epoch 37/40 - 0.07s - loss: 0.0220 - accuracy: 0.9934\n",
238+
"[==============================] 100% Epoch 38/40 - 0.07s - loss: 0.0209 - accuracy: 0.9934\n",
239+
"[==============================] 100% Epoch 39/40 - 0.07s - loss: 0.0199 - accuracy: 0.9934\n",
240+
"[==============================] 100% Epoch 40/40 - 0.07s - loss: 0.0190 - accuracy: 0.9934\n",
241+
"\n"
240242
]
241243
}
242244
],
@@ -272,7 +274,7 @@
272274
"name": "stdout",
273275
"output_type": "stream",
274276
"text": [
275-
"Test loss: 2.4638212205449603\n"
277+
"Test loss: 3.6060167913973595\n"
276278
]
277279
}
278280
],
@@ -323,10 +325,10 @@
323325
"name": "stdout",
324326
"output_type": "stream",
325327
"text": [
326-
"Accuracy: 0.9035087719298246\n",
327-
"Precision: 0.9841269841269841\n",
328-
"Recall: 0.8611111111111112\n",
329-
"F1 Score: 0.9185185185185185\n"
328+
"Accuracy: 0.9824561403508771\n",
329+
"Precision: 1.0\n",
330+
"Recall: 0.9722222222222222\n",
331+
"F1 Score: 0.9859154929577464\n"
330332
]
331333
}
332334
],

examples/models-usages/mlp-classification-regression/diabete_regression.ipynb

Lines changed: 19 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,7 @@
145145
"name": "stdout",
146146
"output_type": "stream",
147147
"text": [
148-
"Sequential(temperature=1.0, gradient_clip_threshold=5.0, enable_padding=False, padding_size=32, random_state=1731611022349365300)\n",
148+
"Sequential(gradient_clip_threshold=5.0, enable_padding=False, padding_size=32, random_state=1733515038822283600)\n",
149149
"-------------------------------------------------\n",
150150
"Layer 1: Input(input_shape=(10,))\n",
151151
"Layer 2: Dense(units=2)\n",
@@ -157,7 +157,8 @@
157157
"-------------------------------------------------\n",
158158
"Loss function: MeanSquaredError\n",
159159
"Optimizer: Adam(learning_rate=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, clip_norm=None, clip_value=None)\n",
160-
"-------------------------------------------------\n"
160+
"-------------------------------------------------\n",
161+
"\n"
161162
]
162163
}
163164
],
@@ -188,21 +189,22 @@
188189
"name": "stdout",
189190
"output_type": "stream",
190191
"text": [
191-
"[==============================] 100% Epoch 1/10 - loss: 1.2543 - - 0.01s\n",
192-
"[==============================] 100% Epoch 2/10 - loss: 1.2448 - - 0.01s\n",
193-
"[==============================] 100% Epoch 3/10 - loss: 1.2368 - - 0.01s\n",
194-
"[==============================] 100% Epoch 4/10 - loss: 1.2296 - - 0.01s\n",
195-
"[==============================] 100% Epoch 5/10 - loss: 1.2239 - - 0.01s\n",
196-
"[==============================] 100% Epoch 6/10 - loss: 1.2188 - - 0.01s\n",
197-
"[==============================] 100% Epoch 7/10 - loss: 1.2145 - - 0.01s\n",
198-
"[==============================] 100% Epoch 8/10 - loss: 1.2106 - - 0.01s\n",
199-
"[==============================] 100% Epoch 9/10 - loss: 1.2071 - - 0.01s\n",
200-
"[==============================] 100% Epoch 10/10 - loss: 1.2037 - - 0.01s\n"
192+
"[==============================] 100% Epoch 1/10 - 0.01s - loss: 1.2543 \n",
193+
"[==============================] 100% Epoch 2/10 - 0.01s - loss: 1.2482 \n",
194+
"[==============================] 100% Epoch 3/10 - 0.01s - loss: 1.2422 \n",
195+
"[==============================] 100% Epoch 4/10 - 0.01s - loss: 1.2366 \n",
196+
"[==============================] 100% Epoch 5/10 - 0.01s - loss: 1.2320 \n",
197+
"[==============================] 100% Epoch 6/10 - 0.01s - loss: 1.2275 \n",
198+
"[==============================] 100% Epoch 7/10 - 0.01s - loss: 1.2231 \n",
199+
"[==============================] 100% Epoch 8/10 - 0.01s - loss: 1.2183 \n",
200+
"[==============================] 100% Epoch 9/10 - 0.01s - loss: 1.2134 \n",
201+
"[==============================] 100% Epoch 10/10 - 0.01s - loss: 1.2083 \n",
202+
"\n"
201203
]
202204
},
203205
{
204206
"data": {
205-
"text/plain": ""
207+
"text/plain": []
206208
},
207209
"execution_count": 6,
208210
"metadata": {},
@@ -234,7 +236,7 @@
234236
"name": "stdout",
235237
"output_type": "stream",
236238
"text": [
237-
"Test loss: 1.1178449818025717 function=MeanSquaredError\n"
239+
"Test loss: 1.1136541600695817 function=MeanSquaredError\n"
238240
]
239241
}
240242
],
@@ -264,7 +266,7 @@
264266
"name": "stdout",
265267
"output_type": "stream",
266268
"text": [
267-
"MAE: 0.888385818545332\n"
269+
"MAE: 0.8748635782918366\n"
268270
]
269271
}
270272
],
@@ -294,7 +296,7 @@
294296
"name": "stdout",
295297
"output_type": "stream",
296298
"text": [
297-
"MAE (original): 67.53570025620354\n"
299+
"MAE (original): 65.9770599527072\n"
298300
]
299301
}
300302
],
@@ -307,24 +309,11 @@
307309
"mae_original = np.mean(np.abs(y_test_original - y_pred))\n",
308310
"print(f'MAE (original): {mae_original}')"
309311
]
310-
},
311-
{
312-
"cell_type": "code",
313-
"execution_count": 9,
314-
"metadata": {
315-
"collapsed": false,
316-
"ExecuteTime": {
317-
"end_time": "2024-11-14T19:03:42.579142200Z",
318-
"start_time": "2024-11-14T19:03:42.534073600Z"
319-
}
320-
},
321-
"outputs": [],
322-
"source": []
323312
}
324313
],
325314
"metadata": {
326315
"kernelspec": {
327-
"display_name": "Python 3 (ipykernel)",
316+
"display_name": "Python 3",
328317
"language": "python",
329318
"name": "python3"
330319
},

examples/models-usages/mlp-classification-regression/mnist_loading_saved_model.ipynb

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -143,7 +143,7 @@
143143
"name": "stdout",
144144
"output_type": "stream",
145145
"text": [
146-
"Validation Accuracy: 0.968\n"
146+
"Validation Accuracy: 0.9619166666666666\n"
147147
]
148148
}
149149
],
@@ -174,18 +174,18 @@
174174
"name": "stdout",
175175
"output_type": "stream",
176176
"text": [
177-
"Test Accuracy: 0.949\n",
177+
"Test Accuracy: 0.9524\n",
178178
"Confusion Matrix:\n",
179-
"[[ 969 0 1 0 0 4 0 1 3 2]\n",
180-
" [ 0 1113 3 4 0 2 1 2 9 1]\n",
181-
" [ 6 2 980 8 5 1 5 12 12 1]\n",
182-
" [ 2 0 7 957 0 16 0 8 14 6]\n",
183-
" [ 3 1 4 0 898 3 8 8 8 49]\n",
184-
" [ 12 1 0 22 1 833 4 4 11 4]\n",
185-
" [ 22 4 8 1 8 14 889 0 12 0]\n",
186-
" [ 3 4 15 12 4 0 0 970 1 19]\n",
187-
" [ 6 2 6 18 4 10 3 4 915 6]\n",
188-
" [ 7 4 1 5 10 1 0 8 7 966]]\n"
179+
"[[ 966 0 1 1 1 6 1 2 2 0]\n",
180+
" [ 0 1122 4 1 0 0 1 2 5 0]\n",
181+
" [ 7 2 981 8 4 0 7 12 8 3]\n",
182+
" [ 2 2 7 963 0 6 0 13 12 5]\n",
183+
" [ 2 3 7 0 904 0 7 4 3 52]\n",
184+
" [ 5 7 1 26 3 824 2 4 11 9]\n",
185+
" [ 11 5 7 1 9 16 907 0 2 0]\n",
186+
" [ 2 8 17 4 4 0 0 981 1 11]\n",
187+
" [ 3 4 6 15 4 9 6 7 910 10]\n",
188+
" [ 6 4 0 9 7 3 0 8 6 966]]\n"
189189
]
190190
}
191191
],
@@ -199,7 +199,7 @@
199199
],
200200
"metadata": {
201201
"kernelspec": {
202-
"display_name": "Python 3 (ipykernel)",
202+
"display_name": "Python 3",
203203
"language": "python",
204204
"name": "python3"
205205
},

0 commit comments

Comments
 (0)