From 1654fde6115d5a6780b72ff266de135f85f33ca3 Mon Sep 17 00:00:00 2001 From: ZXF <1150123839@qq.com> Date: Tue, 15 Sep 2020 19:54:26 +0800 Subject: [PATCH 1/2] add my files --- .../README.md" | 50 +- .../code.ipynb" | 667 ++++++++++++++++++ .../src/1.png" | Bin 0 -> 1370 bytes .../src/2.png" | Bin 0 -> 1696 bytes .../src/3.png" | Bin 0 -> 7604 bytes 5 files changed, 714 insertions(+), 3 deletions(-) create mode 100644 "1 \351\201\227\345\244\261\345\256\240\347\211\251\347\232\204\346\231\272\350\203\275\345\257\273\346\211\276/\347\213\227\347\213\227\347\247\215\347\261\273\350\257\206\345\210\253 - Sunburst/code.ipynb" create mode 100644 "1 \351\201\227\345\244\261\345\256\240\347\211\251\347\232\204\346\231\272\350\203\275\345\257\273\346\211\276/\347\213\227\347\213\227\347\247\215\347\261\273\350\257\206\345\210\253 - Sunburst/src/1.png" create mode 100644 "1 \351\201\227\345\244\261\345\256\240\347\211\251\347\232\204\346\231\272\350\203\275\345\257\273\346\211\276/\347\213\227\347\213\227\347\247\215\347\261\273\350\257\206\345\210\253 - Sunburst/src/2.png" create mode 100644 "1 \351\201\227\345\244\261\345\256\240\347\211\251\347\232\204\346\231\272\350\203\275\345\257\273\346\211\276/\347\213\227\347\213\227\347\247\215\347\261\273\350\257\206\345\210\253 - Sunburst/src/3.png" diff --git "a/1 \351\201\227\345\244\261\345\256\240\347\211\251\347\232\204\346\231\272\350\203\275\345\257\273\346\211\276/README.md" "b/1 \351\201\227\345\244\261\345\256\240\347\211\251\347\232\204\346\231\272\350\203\275\345\257\273\346\211\276/README.md" index b28a0c16..99309b38 100644 --- "a/1 \351\201\227\345\244\261\345\256\240\347\211\251\347\232\204\346\231\272\350\203\275\345\257\273\346\211\276/README.md" +++ "b/1 \351\201\227\345\244\261\345\256\240\347\211\251\347\232\204\346\231\272\350\203\275\345\257\273\346\211\276/README.md" @@ -1,7 +1,51 @@ # 赛题一:遗失宠物的智能寻找 -伴随着人们物质生活水平的提高,越来越多的家庭选择饲养小动物,它们不仅是家庭的宠物,很多人也将他们视作家庭成员之一,给予百般宠爱和呵护。然而即便如此,意外还是难免发生——宠物的意外走失给很多宠物家庭带来了困扰,满大街发传单、贴广告不仅浪费时间,成效也甚微。 +* **作品介绍:** -如果可以通过人工智能的方式解决这一难题,相信将成为不少养宠家庭的福音——“狗脸识别”、智能追踪设备、“离家出走”路线预测…… + 伴随着人们物质生活水平的提高,宠物狗在人们生活中扮演着越来越重要的角色,它们不仅是家庭的宠物,很多人也将他们视作家庭成员之一,给予百般宠爱和呵护。然而即便如此,意外还是难免发生——宠物的意外走失给很多宠物家庭带来了困扰,满大街发传单、贴广告不仅浪费时间,成效也甚微。在实际的生活中,宠物的种类多种多样,每种宠物也都有着不少的品种。以宠物狗为例,有很多品种的狗狗长相都相当接近,连经常接触狗的专家也不太好分辨。所以,当宠物丢失的时候,确认宠物的品种就变成了一大难题。所以为了提高辨别宠物种类的能力,本项目以常见宠物种类——狗做为研究对象,使用[斯坦福的犬类数据集](http://vision.stanford.edu/aditya86/ImageNetDogs/)(包括常见的120个品种的20580张狗的图片)进行训练,得到了一个识别率颇高的犬类分类模型。该模型使用四种网络对宠物狗图片进行特征提取,然后将提取到的特征进行融合,使用DNN网络进行分类。实验证明,该方法能有效地对宠物狗进行分类。 + +* **作品截图:** + + 1.预测结果和实际结果的对比(每一个数字代表宠物狗的一个种类) + + ![1](.\狗狗种类识别 - Sunburst\src\1.png) + + 2.预测的正确率 + + ![2](.\狗狗种类识别 - Sunburst\src\2.png) + + 3.以概率的形式输出预测结果 + + ![3](.\狗狗种类识别 - Sunburst\src\3.png) + +* **安装、编译指南:** + + 1.下载[斯坦福的犬类数据集](http://vision.stanford.edu/aditya86/ImageNetDogs/)(下载images.tar文件)存放到 “狗狗种类识别 - Sunburst” 文件夹下,并进行解压。解压之后会在该目录下生成“Images”文件夹。 + + 2.安装代码运行所需要的环境: + + ```python + numpy + pandas + tensorflow-gpu==1.15.1 + matplotlib + sklearn + tqdm + keras==2.3.1 + ``` + 3.使用juputer note打开code.ipynb,依次运行每个单元格进行训练 + + 4.传入新的图片数据,调用相关函数进行预测 + +* 团队介绍: + + 团队名称:Sunburst + + 团队成员:赵现锋 + + 联系方式:17864211005 + +* 使用到的 AWS 技术: + + 使用Amazon SageMaker提供的GPU对模型进行训练。 -请以“遗失宠物的智能寻找”为主题,利用人工智能技术完成一款产品(软件或硬件)的开发。 diff --git "a/1 \351\201\227\345\244\261\345\256\240\347\211\251\347\232\204\346\231\272\350\203\275\345\257\273\346\211\276/\347\213\227\347\213\227\347\247\215\347\261\273\350\257\206\345\210\253 - Sunburst/code.ipynb" "b/1 \351\201\227\345\244\261\345\256\240\347\211\251\347\232\204\346\231\272\350\203\275\345\257\273\346\211\276/\347\213\227\347\213\227\347\247\215\347\261\273\350\257\206\345\210\253 - Sunburst/code.ipynb" new file mode 100644 index 00000000..05464de5 --- /dev/null +++ "b/1 \351\201\227\345\244\261\345\256\240\347\211\251\347\232\204\346\231\272\350\203\275\345\257\273\346\211\276/\347\213\227\347\213\227\347\247\215\347\261\273\350\257\206\345\210\253 - Sunburst/code.ipynb" @@ -0,0 +1,667 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "%matplotlib inline" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Using TensorFlow backend.\n" + ] + } + ], + "source": [ + "#引入需要的包\n", + "import numpy as np\n", + "import pandas as pd\n", + "import os\n", + "import tensorflow as tf\n", + "import matplotlib.pyplot as plt \n", + "import matplotlib.image as mpimg\n", + "from sklearn.model_selection import train_test_split\n", + "from tqdm import tqdm\n", + "np.random.seed(42)\n", + "\n", + "import keras\n", + "from keras.preprocessing.image import ImageDataGenerator\n", + "from keras.models import Model\n", + "from keras.layers import BatchNormalization, Dense, GlobalAveragePooling2D, Lambda, Dropout, InputLayer, Input\n", + "from keras.utils import to_categorical\n", + "from tensorflow.keras.preprocessing.image import load_img" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "120\n" + ] + }, + { + "data": { + "text/plain": [ + "['affenpinscher',\n", + " 'afghan_hound',\n", + " 'african_hunting_dog',\n", + " 'airedale',\n", + " 'american_staffordshire_terrier',\n", + " 'appenzeller',\n", + " 'australian_terrier',\n", + " 'basenji',\n", + " 'basset',\n", + " 'beagle']" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "data_dir = './Images'\n", + "dog_breeds = []\n", + "for name in os.listdir(data_dir):\n", + " dog_breeds.append(name.split('-')[1].lower())\n", + "dog_breeds = sorted(dog_breeds)\n", + "n_classes = len(dog_breeds)\n", + "print(n_classes)\n", + "dog_breeds[:10]" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "class_to_num = dict(zip(dog_breeds, range(n_classes)))" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "20580\n" + ] + } + ], + "source": [ + "data_size = sum([len(files) for root,dirs,files in os.walk(data_dir)])\n", + "print(data_size)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "def images_to_array(data_dir, img_size = (224,224,3)):\n", + " '''\n", + " 1- 从数据集的目录中读取图像\n", + " 2- 将图像大小进行调整,并存放在一个numpy数组里\n", + " 3- 读取图像对应的品种信息\n", + " 4- 将品种信息转化为One hot编码\n", + " 5- 将图像信息和品种信息进行随机打乱\n", + " '''\n", + " X = np.zeros([data_size, img_size[0], img_size[1], img_size[2]], dtype=np.uint8)\n", + " y = np.zeros([data_size,1], dtype=np.uint8)\n", + " #read data and lables.\n", + " cnt_dir = 0\n", + " cnt_file = 0\n", + " for root,dirs,files in os.walk(data_dir):\n", + " for now_file in files:\n", + " img_dir = os.path.join(root, now_file)\n", + " img_pixels = load_img(img_dir, target_size=img_size)\n", + " X[cnt_file] = img_pixels\n", + "\n", + " image_breed = root.split('-')[1].lower()\n", + " y[cnt_file] = class_to_num[image_breed]\n", + " cnt_file += 1\n", + " print(\"%3d / %3d , %5d / %5d done! \"%(cnt_dir,n_classes,cnt_file,data_size),end='\\r')\n", + " cnt_dir += 1\n", + " print(\"%3d / %3d , %5d / %5d done! \"%(cnt_dir,n_classes,cnt_file,data_size),end='\\r')\n", + " \n", + " print()\n", + " #One hot encoder\n", + " print('To one hot encoding! ',end='\\r')\n", + " y = to_categorical(y)\n", + " print('one hot encoding done! ',end='\\r')\n", + " #shuffle \n", + " print('shuffling! ',end='\\r')\n", + " ind = np.random.permutation(data_size)\n", + " X = X[ind]\n", + " y = y[ind]\n", + " print('shuffling done! ',end='\\r')\n", + " print('To one hot encoding and shuffling done ')\n", + " print('Ouptut Data Size : ' ,X.shape)\n", + " print('Ouptut Label Size : ' ,y.shape)\n", + " return X, y" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "121 / 120 , 20580 / 20580 done! \n", + "To one hot encoding and shuffling done \n", + "Ouptut Data Size : (20580, 331, 331, 3)\n", + "Ouptut Label Size : (20580, 120)\n" + ] + } + ], + "source": [ + "#图像大小选择为(331,331,3)\n", + "img_size = (331,331,3)\n", + "X, y = images_to_array(data_dir, img_size)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "train Data Size : 14406\n", + "test Data Size : 6174\n" + ] + } + ], + "source": [ + "X_train,X_test,y_train,y_test= train_test_split(X,y,test_size=0.3,random_state=0)\n", + "print(\"train Data Size : \",X_train.shape[0])\n", + "print(\"test Data Size : \",X_test.shape[0])" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "del X,y" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "def get_features(model_name, data_preprocessor, input_size, data):\n", + " '''\n", + " 1- 建立一个特征提取器从数据中提取特征\n", + " 2- 返回提取到的特征\n", + " '''\n", + " #Prepare pipeline.\n", + " input_layer = Input(input_size)\n", + " preprocessor = Lambda(data_preprocessor)(input_layer)\n", + " base_model = model_name(weights='imagenet', include_top=False,\n", + " input_shape=input_size)(preprocessor)\n", + " avg = GlobalAveragePooling2D()(base_model)\n", + " feature_extractor = Model(inputs = input_layer, outputs = avg)\n", + " #Extract feature.\n", + " feature_maps = feature_extractor.predict(data, batch_size=128, verbose=1)\n", + " print('Feature maps shape: ', feature_maps.shape)\n", + " return feature_maps" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow_core/python/ops/resource_variable_ops.py:1630: calling BaseResourceVariable.__init__ (from tensorflow.python.ops.resource_variable_ops) with constraint is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "If using Keras pass *_constraint arguments to layers.\n", + "WARNING:tensorflow:From /home/mist/.local/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:4070: The name tf.nn.max_pool is deprecated. Please use tf.nn.max_pool2d instead.\n", + "\n", + "WARNING:tensorflow:From /home/mist/.local/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:4074: The name tf.nn.avg_pool is deprecated. Please use tf.nn.avg_pool2d instead.\n", + "\n", + "WARNING:tensorflow:From /home/mist/.local/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py:422: The name tf.global_variables is deprecated. Please use tf.compat.v1.global_variables instead.\n", + "\n", + "14406/14406 [==============================] - 52s 4ms/step\n", + "Feature maps shape: (14406, 2048)\n" + ] + } + ], + "source": [ + "# 使用 InceptionV3 作为特征提取器\n", + "from keras.applications.inception_v3 import InceptionV3, preprocess_input\n", + "inception_preprocessor = preprocess_input\n", + "inception_features = get_features(InceptionV3,\n", + " inception_preprocessor,\n", + " img_size, X_train)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "14406/14406 [==============================] - 71s 5ms/step\n", + "Feature maps shape: (14406, 2048)\n" + ] + } + ], + "source": [ + "# 使用 Xception 作为特征提取器\n", + "from keras.applications.xception import Xception, preprocess_input\n", + "xception_preprocessor = preprocess_input\n", + "xception_features = get_features(Xception,\n", + " xception_preprocessor,\n", + " img_size, X_train)" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "14406/14406 [==============================] - 192s 13ms/step\n", + "Feature maps shape: (14406, 4032)\n" + ] + } + ], + "source": [ + "# 使用 NASNetLarge 作为特征提取器\n", + "from keras.applications.nasnet import NASNetLarge, preprocess_input\n", + "nasnet_preprocessor = preprocess_input\n", + "nasnet_features = get_features(NASNetLarge,\n", + " nasnet_preprocessor,\n", + " img_size, X_train)" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Downloading data from https://github.com/fchollet/deep-learning-models/releases/download/v0.7/inception_resnet_v2_weights_tf_dim_ordering_tf_kernels_notop.h5\n", + "219062272/219055592 [==============================] - 144s 1us/step\n", + "14406/14406 [==============================] - 99s 7ms/step\n", + "Feature maps shape: (14406, 1536)\n" + ] + } + ], + "source": [ + "# 使用 InceptionResNetV2 作为特征提取器\n", + "from keras.applications.inception_resnet_v2 import InceptionResNetV2, preprocess_input\n", + "inc_resnet_preprocessor = preprocess_input\n", + "inc_resnet_features = get_features(InceptionResNetV2,\n", + " inc_resnet_preprocessor,\n", + " img_size, X_train)" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [], + "source": [ + "# 释放内存空间\n", + "del X_train" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Final feature maps shape (14406, 9664)\n" + ] + } + ], + "source": [ + "final_features = np.concatenate([inception_features,\n", + " xception_features,\n", + " nasnet_features,\n", + " inc_resnet_features,], axis=-1)\n", + "print('Final feature maps shape', final_features.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [], + "source": [ + "from keras.callbacks import EarlyStopping\n", + "#Prepare call backs\n", + "EarlyStop_callback = keras.callbacks.EarlyStopping(monitor='val_loss', patience=10, restore_best_weights=True)\n", + "my_callback=[EarlyStop_callback]" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "WARNING:tensorflow:Large dropout rate: 0.7 (>0.5). In TensorFlow 2.x, dropout() uses dropout rate instead of keep_prob. Please ensure that this is intended.\n", + "Train on 12965 samples, validate on 1441 samples\n", + "Epoch 1/60\n", + "12965/12965 [==============================] - 5s 396us/step - loss: 1.3162 - accuracy: 0.7753 - val_loss: 0.1831 - val_accuracy: 0.9403\n", + "Epoch 2/60\n", + "12965/12965 [==============================] - 1s 52us/step - loss: 0.1698 - accuracy: 0.9440 - val_loss: 0.1643 - val_accuracy: 0.9480\n", + "Epoch 3/60\n", + "12965/12965 [==============================] - 1s 52us/step - loss: 0.1435 - accuracy: 0.9530 - val_loss: 0.1621 - val_accuracy: 0.9452\n", + "Epoch 4/60\n", + "12965/12965 [==============================] - 1s 52us/step - loss: 0.1252 - accuracy: 0.9602 - val_loss: 0.1635 - val_accuracy: 0.9507\n", + "Epoch 5/60\n", + "12965/12965 [==============================] - 1s 53us/step - loss: 0.1159 - accuracy: 0.9603 - val_loss: 0.1600 - val_accuracy: 0.9473\n", + "Epoch 6/60\n", + "12965/12965 [==============================] - 1s 51us/step - loss: 0.1015 - accuracy: 0.9665 - val_loss: 0.1630 - val_accuracy: 0.9500\n", + "Epoch 7/60\n", + "12965/12965 [==============================] - 1s 51us/step - loss: 0.0930 - accuracy: 0.9679 - val_loss: 0.1610 - val_accuracy: 0.9473\n", + "Epoch 8/60\n", + "12965/12965 [==============================] - 1s 53us/step - loss: 0.0819 - accuracy: 0.9728 - val_loss: 0.1624 - val_accuracy: 0.9459\n", + "Epoch 9/60\n", + "12965/12965 [==============================] - 1s 52us/step - loss: 0.0730 - accuracy: 0.9749 - val_loss: 0.1598 - val_accuracy: 0.9452\n", + "Epoch 10/60\n", + "12965/12965 [==============================] - 1s 51us/step - loss: 0.0716 - accuracy: 0.9762 - val_loss: 0.1578 - val_accuracy: 0.9514\n", + "Epoch 11/60\n", + "12965/12965 [==============================] - 1s 52us/step - loss: 0.0642 - accuracy: 0.9782 - val_loss: 0.1698 - val_accuracy: 0.9410\n", + "Epoch 12/60\n", + "12965/12965 [==============================] - 1s 52us/step - loss: 0.0630 - accuracy: 0.9799 - val_loss: 0.1689 - val_accuracy: 0.9473\n", + "Epoch 13/60\n", + "12965/12965 [==============================] - 1s 51us/step - loss: 0.0589 - accuracy: 0.9800 - val_loss: 0.1635 - val_accuracy: 0.9486\n", + "Epoch 14/60\n", + "12965/12965 [==============================] - 1s 51us/step - loss: 0.0542 - accuracy: 0.9806 - val_loss: 0.1768 - val_accuracy: 0.9445\n", + "Epoch 15/60\n", + "12965/12965 [==============================] - 1s 51us/step - loss: 0.0515 - accuracy: 0.9825 - val_loss: 0.1629 - val_accuracy: 0.9528\n", + "Epoch 16/60\n", + "12965/12965 [==============================] - 1s 52us/step - loss: 0.0462 - accuracy: 0.9856 - val_loss: 0.1656 - val_accuracy: 0.9452\n", + "Epoch 17/60\n", + "12965/12965 [==============================] - 1s 51us/step - loss: 0.0433 - accuracy: 0.9862 - val_loss: 0.1761 - val_accuracy: 0.9417\n", + "Epoch 18/60\n", + "12965/12965 [==============================] - 1s 52us/step - loss: 0.0436 - accuracy: 0.9869 - val_loss: 0.1703 - val_accuracy: 0.9431\n", + "Epoch 19/60\n", + "12965/12965 [==============================] - 1s 52us/step - loss: 0.0403 - accuracy: 0.9875 - val_loss: 0.1730 - val_accuracy: 0.9452\n", + "Epoch 20/60\n", + "12965/12965 [==============================] - 1s 52us/step - loss: 0.0377 - accuracy: 0.9888 - val_loss: 0.1744 - val_accuracy: 0.9466\n" + ] + } + ], + "source": [ + "# 构建DNN模型\n", + "dnn = keras.models.Sequential([\n", + " InputLayer(final_features.shape[1:]),\n", + " Dropout(0.7),\n", + " Dense(n_classes, activation='softmax')\n", + "])\n", + "\n", + "dnn.compile(optimizer='adam',\n", + " loss='categorical_crossentropy',\n", + " metrics=['accuracy'])\n", + "\n", + "# 用提取到的特征训练DNN\n", + "h = dnn.fit(final_features, y_train,\n", + " batch_size=512,\n", + " epochs=60,\n", + " validation_split=0.1,\n", + " callbacks=my_callback)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "6174/6174 [==============================] - 28s 5ms/step\n", + "Feature maps shape: (6174, 2048)\n" + ] + } + ], + "source": [ + "inception_features = get_features(InceptionV3, inception_preprocessor, img_size, X_test)" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "6174/6174 [==============================] - 37s 6ms/step\n", + "Feature maps shape: (6174, 2048)\n" + ] + } + ], + "source": [ + "xception_features = get_features(Xception, xception_preprocessor, img_size, X_test)" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "6174/6174 [==============================] - 97s 16ms/step\n", + "Feature maps shape: (6174, 4032)\n" + ] + } + ], + "source": [ + "nasnet_features = get_features(NASNetLarge, nasnet_preprocessor, img_size, X_test)" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "6174/6174 [==============================] - 55s 9ms/step\n", + "Feature maps shape: (6174, 1536)\n" + ] + } + ], + "source": [ + "inc_resnet_features = get_features(InceptionResNetV2, inc_resnet_preprocessor, img_size, X_test)" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Final feature maps shape (6174, 9664)\n" + ] + } + ], + "source": [ + "# 提取测试集的特征\n", + "test_features = np.concatenate([inception_features,\n", + " xception_features,\n", + " nasnet_features,\n", + " inc_resnet_features],axis=-1)\n", + "print('Final feature maps shape', test_features.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [], + "source": [ + "# 预测\n", + "y_pred = dnn.predict(test_features, batch_size=128)" + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "metadata": {}, + "outputs": [], + "source": [ + "y_result = y_pred.argmax(axis=1)\n", + "y_test_result = y_test.argmax(axis=1)" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[ 85 62 40 ... 39 114 67]\n", + "[ 85 62 40 ... 39 114 67]\n" + ] + } + ], + "source": [ + "print(y_result)\n", + "print(y_test_result)" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.9442824748947198\n" + ] + } + ], + "source": [ + "print((y_result == y_test_result.astype(int)).sum()/y_test_result.shape[0])" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[1.2796981e-05 1.3250595e-06 7.1861774e-07 ... 1.1318945e-07\n", + " 2.0019799e-07 1.4043926e-06]\n", + " [1.1272825e-06 9.4958580e-07 1.1536797e-06 ... 5.3315927e-07\n", + " 4.4875711e-07 8.8954891e-07]\n", + " [1.6093598e-07 8.2706998e-07 1.0283359e-06 ... 2.5320762e-07\n", + " 2.4572310e-07 9.4702358e-08]\n", + " ...\n", + " [8.5133320e-07 4.0933867e-07 6.8925914e-07 ... 7.4626388e-07\n", + " 1.1052952e-05 6.5475359e-07]\n", + " [9.8053363e-07 8.3240536e-07 1.5329607e-06 ... 8.9162114e-07\n", + " 1.3118838e-06 1.4811222e-06]\n", + " [1.6023027e-06 2.2533499e-07 2.5402828e-06 ... 4.4936533e-06\n", + " 2.0640696e-06 9.9410147e-07]]\n" + ] + } + ], + "source": [ + "print(y_pred)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.7" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git "a/1 \351\201\227\345\244\261\345\256\240\347\211\251\347\232\204\346\231\272\350\203\275\345\257\273\346\211\276/\347\213\227\347\213\227\347\247\215\347\261\273\350\257\206\345\210\253 - Sunburst/src/1.png" "b/1 \351\201\227\345\244\261\345\256\240\347\211\251\347\232\204\346\231\272\350\203\275\345\257\273\346\211\276/\347\213\227\347\213\227\347\247\215\347\261\273\350\257\206\345\210\253 - Sunburst/src/1.png" new file mode 100644 index 0000000000000000000000000000000000000000..442c41b6022246232fc9ff40cdbfa073cca66fd7 GIT binary patch literal 1370 zcmeAS@N?(olHy`uVBq!ia0y~yU~~eqQ#qJ`)YdWnvF?+y3Z=0eq{hj;ux_4Y#T{r<-hOAKfJil41| zyijG*#jXn%BRUFYT4MqlIzA=Z)Ghl~{eFjTx<+5|@i?}WEr&9AR;1ep=Wb*>le#3< zyl?*FPq*&oy~`6d_Kv#!_EtT+dGUhJeOL2NZkzxfo+vF6LVE(^rNEd)tB2JS!DaYUc35p z@4c7DpD$`qoAm71>+EdJ(|2CneW%w}c{;lMZk~reT-d%m6l&#tG^S@#(BZ0a?ztCy);WxL0UH90sX*4wE6Ily8ZwE{4vSoYDj*we1}~BaRZ*qCR?)v8uq?1 zU|Yafoa_Ko3r%py+#umQ_wu$sW$o{NU{DpPYW*oBf8MXJ&PLp`rGxo|tk!Hzp2%0Y z+t&8Tv$ZR9zvsTIeSEFDC#NUJKWBNhQTXZk)8=2BKVf~uu1a&2Yw_JKvnmfsHbIXY0)0^-hyg6tOB@=xy00Y-_Gps>>m36Ed+YZ~M)2mGkaSes*_<#kY0N^8-J= z)=@h5<9xhPe#70npLZX%+L0=^e3fx^{WH$QOSi)|8f>$2T*+Ph$Ly2nx-SbQchAWV z=(Xf4)3lM=oh>m_KC9Pm;p){hFZ+Ftiu{nL^tr|5+tt!lUsr89lB#d-}f%x&Sa~3t`prNcf+Ef^u(Wz ziTYoS9~V2>0mD%#?+n8e0b6sn2~CKMZhMLCfz6jiULMj8w;JBQ?R?u)voYUJ>DXhX zqZ7>(!UCh0@H!}6EKa_}=907R@@&Ney9*s}G3*5<#OpKn&fmhgYNDBg)g`tB;Y{Ww z#>?j#XEHx4+%6u`fzRw{Bz^1JfVG0hfQuQw_^!Zs4iudJ0!ncr(m#UhC`^1Mb3qm*Aw zk@B-9HMAdo#3-Sy^+;1gSc``}@2_`G=k?Bcf85VK_nvd_xqp3bs)xJtYE1)80KjTj z7e`M3)E5Jwm#Z&QRRa5SF|CMl@jC^8R`cJXmSw4BxY*R8IQde%B7-S0$H+wB6%i73 z>=cEF*Xwu-z_J~#jt;mpdGbN~-v^NEnxdhZTZO2wTqf&{<&e7-ZQ1qM?0{#GzScwM zve5i?UmNVLq}F%#AY=1h&vPn*H1p&dg1Q3y*3bR6plzXJvTQuJ^jcv9DtKh@z9e1t7bdUbo;&71T^V9{{b>t z$(+XVbjXEy_6~~j6drrY0IiyqD`cIoB?~0{*1%!zHR28cQJGtB4+$g>FdRWK=kR zSn^cCTq1gg;vi!h`Uh;NOKvW0Fi@7x*5Rk1A0@NWDMXw}Ay4Ttn!=IN6>B|5o}dV$ z!}8N(4=V#vVLo)|4uKcb9A38{NtocTueM}GKduUGd)!##*oeYUw8URq3&Zb>TSt2x zLP%~9E5JB-QlgrQl6y*5*C^ixV9DbYq;<@oqG91fReE`*vMxiU#kC$ADY@`vIrN(_;^ zxV0_w2dFoBA~{5^2s`1e&9gmZz|QWLm$CKM_f$^GD>5$`lW95&sHYTS*^4^-$puYV|B!9R-cUDOeDICBjW)8|X55ZR!lamc`H@tgPJUgr-kqxx*!+*DziA{pJkHvzZN)?+7l(1v zBmFKIyEDAUE)3_Krb?6~Z>HaqBIZsCnH23ukG^fuyNT(q|H-10)$5ct@Ugt!)Qd5h zoZmL2znh~kkMb^W`hZn_Eq>i~+fwSFsEhmNyEH6Itw%{T z&(<5YiBXZ^-fft?L{fyffR{&P>WDM<^d$-kT!wI!8n@N_{}h-8BHn zn-@)x0BPm?%j9>9{wbX?gR?EzQ>`@ zR`wNQ9a@mZXb8ahXvK|Y^ zQIu0l;wmu4kcq_sEA_T}d8EGjHM{SuA;v+7aUJ&CW(+(q)s=eZV^S*8F`T##)~1P| zNn5cUT{EY*@O}L76r)H3_?OoA3zuA!U78(qhMMzH^qG&uR4hq=b9m?<#3)$8TRad- zs^aUTTM%bG8ryk;26>9Gk=1I!QW;2Ct!onE8a`&M1yRW2nB7)WHg&mPR*x#3*uyIT z!kRWEC}5YIZ_3|uKe&5KL@57at0b8Xp&t^ctiae7^bSdB69*_ufCwoSFTbv**m-bJkvK{X*ZpRlG&PNC5x%>9)tL=u%8@;QYs`bwzLI9~ATm`A!6?EOToFVRBX0BEMEhk$SGdFiD zBf7pR0N|mLlDw?8ciLvUN*t~l^?HA1<%9ig;{dM}A#X2Zg|3wA=i{s_~ zOd0>n3*QD|0D!0EdYO#>#o6i133@A=Wr0v2uZD#3RB{1d9l@BDEe@k@dAVoT@6 z_9H3)kA-r~`M$c)n-u$LSW>ANH-spQ0bB+E+~sRDiizPCZUW~k&Y}G;PEN#lWJJ^p z((T!oZ1G^H5yNHna12@;O8A1}jHCquB9`JWPgDFaukL=-A=AeubFsg`#6$of--u$B z3YPr&k(WY7E*C<5PrZfWx7S8*it9fQA_Ay?u8)hP9!C2l{<1k;$Ld6^hq^;w-A4gj zh#F)MYD)M?11NQO+H@|2o7@;4Xc-R00RaJ!k+h8+x81;XLz~29bZ760lJui!d*!bZ zAA1(tur<8-O!Y_IZj}H4NW1B%HX=kvT_F3>RRMk8Rk;|kSQDglFVS(A0Wy;8G-gCu zA6;V_{BE5lO{3@)XRih1mduCq00OWt|LN2}W&XQ;pQtS_j~1dY&z5Ddg~Whu>4>`~ z|I*2DIRL=<=k=>1@>00|DdnS1Vu0#jtdsy=>OgKlv;3b={}PN1y9A#e!yElXol!!R zM1a}wTK^kPuZQZl39)^mi$o_JVo9*~XNC3jJN^7_H!z1GSzhHlT5_p~UOBXvMt}C| z_iv{*({(y4m7_5Aja5t?7B&FyDY7gW*?4>2D0t0R6pEQeKJU}0cal&^p34;AHlec& zf6CP^)ZZ@d_m+DFB;nWRzfBRpi{m5P^~f#S>|7raG(u%$y!4eX-R&Mp_D<58j>T4D zZCz094dGm?_m*w66+vA4BYWQ@@;uKNtFyaYE+^S_{aoI9&Kzc#bsU@{gBH=#^Wwtq zqBE56w*&@l*I2Ve)C| zTJtFek;qaXH@eEw9Jo3=#|5p5>6^A58L9D_XOFZQ!K{+1BEiF*lw=>$CIUBPL2Dfj zw7Gu;ra25KOr)vl2Yj+^Ey&5#5$N2+MQhx$DDq`?Xh}V^_mNqA*5D76%$4FWPVH8+ zulDboySjoIO5$IzzPz4g=N3$83PoJnc2Xz-PsQ#g0@wQM$gOsti!Lv z1xK#G2G*s}dQX{QL$NP}2gU9PkodMGVMlUX3XhLjvYCPz_=ys&eublZ zn926-M~hZbBs1<+92q3)6bSfF?Co@*AwSvnL+{0{z^O~=!~4>GEOYG+P7q#uj|XSA zq0}tAlFKhGhEr|@MKKo1YX{@o6IfwCC~^q0qTb>!z;#Dr*n0B45$);MVO*rdU1R__ zpA{w6O$S(LYrN0@96eU9qPj{62)pUGV+GmTtzyExsy$rX@)*%7|1GKe3ACF3q>*wB zFZCum4aflv&<9ppnF5$Gv!L8#R)!{TrEY>8eopX?K>`CE(wue)Gu`l=ycD5|3d5Jp zCuvu{KH#d->$1OCv1SF_o@CrwMx=@Z8d?9wCbSFDe?_QOsfcEn86Y5l=vm|VL70zF zPodhVn4OPx%zV6lhqW?|MY>D`O{W9veddl} zoCUfKr||Kh-B{3wFvwqC;}yGz*^lboJvuO9f@J5KABPYeq#6aILmn8sXH{1G!70zj zzv6z9+joTT3LDk}5!OE{yd`0COEdXU8&~JqE9f$y;b-cJyj|W@Bb_P$&(B-dgJfAx zd4%e`qrVJDPBaOo-Sl<8UL)SD6*|>x=D#$iUo)08|Z4_3T;UB zM`>2)e#tEX$4J?9wz9=?_-Rk38Gxg$MHY#~v7cqfckJh88RXFnhjrEKu;EzCd3LqQ zwno9kf@zp*x2$4Ygn3c)(5VnNqotd?PWauIN|jlZzUTBkf`#enS;QrCT~0dw(3cl{LkkBRqc@qk;x)(_wxNxJp&`pB7`@ikyGFe&pG3+(B#KyhmYK&m z>b5)m+Mi94zVCf88r~M>MC917rTp#@(VM?C6Am=YAx#L~B3f~-rI5i$hRH)}y z!#Z)_%--I|kq+7@I*>UI`HjjO?M`l~-yJ-q2J^4(B=@?^nb<&Pb;L`KHEvnJR zU;*2ub6flclgBi?utIYhE{fxoQX5B#WEm)Z^`GAZzsF-56Lfx2dW0BAudE#X=t;$f zb*C(5erHS{BINE~L6U8wXrUuJNzn4Sxtp`{uB(=VdRY^yhI8O9@W6@j>t|fE8S|3) zggkI6Nm8m1Hw7o7QFM>wA$3&1Tq`x`T9TBqAQ$MdKo36&dN%yEL&k%QWDLy$HpspP zC~BseI3)s1Q<%&V|39wv4>XlcnvooZlqixK_ZG2Ec&@#gSsAavdnP#v^Gum~@}nfsp*Dy!FyMWnR^sZ(cVc z!-*e2{LU3L&GyN0@ef_MHk-pa%`v_+1Tf_Do5z!qldc#GwD(0BS0dXJpAQJYmK8k4 zQRQBRhx-_GP0^I`FI_Sh9?BfbgKl-je%-r9+hng0F6Jsv4^2BEq)(IizNr)G7;_4I$!o+Apn|3H4GTdvpBkpZsLyUZ6tTP#Y( zh9^;allK{&L-#!cg#A}oV;I6wYer1p@->Tcjq_F*IR4Bjzg!;2s8vRXYzbZ3`BtM) z(Y%865!ug;5(bkGUJT(gDHdeze8Eb>1!c||E`^T7M3YUsd;J~3egyPu^&E42n;9;* zfN`TH3lZl3+%w^FBl1mE*Eca;T2Q&mEl^MxR`-@u&1hX!wl#0B@Z|_YTKv?Zk)c=e z&b(oA^Z<9!kmILONht{);q>1@w-JE@F+dhN;Q{9yVG~9VGr@rY*hFi?wW(JS`o-?T zqdt7s0r!3b`+2{U%AibG5`(-V3)+jEA7}#UfAFlhpu@1A3r2;vk#(XhyQOjtPl?IZ zCQeyaD(J>UvaLvE5i~{;OkdR9FCBUr^`Rs2wx(YtRpOaAeE!9&)$!rsid+tXU#%5q z-4{@U7}}1ek2CRaGuT~IWC>q3HOwJ9cI^oiH#%XNgRwR-Rw%)1{kyV0#Xo+CqblCG zbI`2Vj%#)F#Dj$lTV?1@4h5LgzeM75U z9c_g+q&hvh@58UwL64Z~j@K^zNRq*%zO#Q}I|P&(>^IC~5ynhp6sA*R_L~mhz3YW8 zWBPr*=kx~2hC0Sa&GR7(JoWQ`!yf`2kvyTr$v~5J7073EXnY2)5*uDEEp<5#r*%$& zJ>$11RS0V3S1bLtE07U-`4}bB>33c-PV*38N}AYnNCc?A(fE_}ztZ=`{EyVLYJX$N z5vu|02Z=-&p&`=1RwNBuS6!&BIGM~)fm&;@Vbfwm^Stl*T=gdgvwthtEJ1%c=tSs@ zj2e3?o$P+LK}+WIopV)4CyIVQwR3|*I8gIZ<8z@rA(lm#R%lna*vfyQGuUuRwp4h#a56r_3@xPYOF#8>F;9dD8=%ycV2T9pTs})$(cpX(RGpT`SCZNAjg0Kg2<-@14X_D8*$vHiDdPDXr!(4R&GAodB`IKXcENW0(pIJM|4_xP!;eaP;k z;4A%B59Q5%`KDBVKqD1VFUH9K*a)idO;i#0$sL%&MOsMi{3~JX<{pdvG4qMkSeocL z|Gji@wD5=VZ_?@JcdrIr(BwdA*tgSm^XN=P+V4j6#%}mpM|N=ak(==fW-x-udR7)ObRd z+}Fmk>W2pP{ER=VK~RXm>$)nr2%04+tMOX;B^8U+9ZTPNvbXQA>FE0{Dt}L$4c&-1 z#`gW35Zh}1LUIsolokbKDAweY7nbpeO2?3!MM+iVIvsmVI^TN|Z8C?9ENj!5&ow_Vc?fx~2kM&wcl z+A>=3qb5q$!OxK+m;%C|ryTg(xj+8Ru1&lR2jkPsCp%rfbpy&EcIDUi4Dx?#fZW`; zlk{wvF&RbcOe5N9zqkztcFT?7n|hEF{y{4+4T4Yc>x{7K zIREo5TH||1fj3_vo{5|qNVk1>hK3wZxR%w{gad=KRU6RuTNEHDO)OWCds$yY`G+H- z??leRERJlk;}MzF1FM2lc8N{qUO$!zH9Jx1GxL4wcVrC@NVqT)d%gJP9U5D0ykoTX zgenlqwc5Ct(r19@z)U2sseF^&``H)e8W>9XqD9HjpLW7-Pa-D8b-HX5f)np4T2*@U z*QempUbselkKhfm9P~iHZyT6@UppcCmS#n+Sawg*A}4;eT<^*5nE>6yq~tpxtZ;6J za^==%6Fe^GA^J^Vsh9D%tf>YPwrA2t(XusE_&St%&WjhG#Mz)b*RZnhtE)TyS!-Wy?cf zI&^zI#P!Fa;Rh}C4UaX3)=GV`WfO^O=Bk&xm`Z^zq9ZEFC(lw%vO!pDYeGg8`>5o7 z{4Fl1R2)5N@(+PPhmtcsI{6c>{?h|FgDmPKTafyEx{3@Mn(;gmWqF}2O2)Apz*@M> z(nsh2ij|j8-?G1fW{@|KQcSZ{As{oPBv9H+#LYe4FJo@m!0?MgGZhbsf%V0FuclqX zLW8r$DZs?KAIOY#wANNu?aW z&FaG`1wZm>$HX)17@X676zf+8pv_7S$zkJd)nI))D6ezwI=58E)|NOJxlBXZ@+ zf}ee6$RHX@q6{jhTZb*4q~3ggBb4N9+ZB6q`*$nJM;*RnOb=i+Z7v=Ee!IwfMiS!% z25;J?myQVxm<$fv0}qOC6OM*mV8S!9up;*n^llDn`qg}TXWP$lYUl~D!6axavzEmP zkz_Xdb`AINmjFb-da;UfSN$W0P=6Cn22Pzi;XvCn0*c%}+`Abi2esehFNnj%>1+TVq?CO))}xjmIsu* zR!HENy0~Z7Up&UkhuzP?+w0?a;0i%dTJw_b*~5~;1MKgudZAG=Lov$mnUr(@jK ze?+0pn6hR-?S)T~>m)mzVinUCNJAKcV4eJkDH@pNf$LUDu^Z^B}RcNLP#;5m9 zM`klqOVT0~1wA7QkC!`H>m)LvRBO&Z_ec^cG`SQ%Pfx1;ZmV7TwDmY@YNR}O2G!0A z!2nTDHP;_0kq+C!n9r)5fK$v5?_CzvPFqKG)@c>vGJ|x=PJDTkK22_cOr8?v6jJ(L zghWgkQhEC`_K<>+VTaik-aN}DndUiMmrUlh%w9sienLGmtj`SUP1=HMdmTT}$_M6J z8yCwMc_N))pAhcDYUV;flv2ODw{#P7i5&l8+ljl|*CKN}Wf}L?d%!LvBh?}2dA)7s z>WCKKR5y4Rx%{nXU$A3ZGFu-A5<}VEX?WKW4?VlXp<)D*&DyJF<+~(y$gwwx?}UJmyHC$Q5nVBh>Ym=ZiiB&D zKfsNKka}74v36m8&5IJxS9RI-=Czno_;RAIx}#Rt*MUjO8{1T5gSzW&bV%!POjVzsHgpMIUqKda_(bS5CB-k_qH5U>r#R~Ig;79B(grEB;k`gKr^0AD)v8l>&Xsm9RVaz0gC`ZDkkxY8GLSAL0KOO8!^pr`1AE z><7EDm9^D7U58?(n-6o~1BUrUK(xW?k(Qf5>?A8F5kKYyzp&5!Sg~32>&h{Ju1F+> zH!dR9nfY53(4}YRESr5YqVu$Cs4$F(3?vdH?POeJs4ZR0={Yhi+$`|P=9r7r&O&zM z5!CQNA1dS_?rSTQpu*E=S~a+3r0wy234d%;MYquxTv>`PIF6|i^SYYjDJRn^*D{L;&WFBZZpUsX2BcwEZR!i|Q)S*HRQ73My zxY4+K#_)3>n6d1o^!Qc|&ljK&&p{4pmoMjg1vUhBcDhjSV(ilIn*faF=?LR5pCqFj zvWl`Hz&FBd(S(m!T(ChE!u8R4a`s7`Xz%Dd0%xP;5F9@PuM$)K-ZY){tFi0JJThZE8d6U|> zQSQqKR(p1ey_ylA364dI`jUW2J9^>jrjp9Pc14H*HEAFY)L2m|?%321LyFW@bq27W?zh;f zxB~)cz1Ozx00I=7|09)`T;L_HPwk5o1po!4JE_LL;v74|t6Gu4J5=2`%4%z%IV!Xs ziIX(51JoO#>Y#Uc+?%VO%h1Q2I>oNE=y8-7H}vAK1Ek#ci80jplg^{ z)D&!=mvsn90jOJUD}TQ%+Zl2*mt@?3I^Aw%D?i10*i90k$I~dz(lYV171K~za?Ify zx$7fuz;&>rO!3khHAOnV{;=VA3ofS%UDm2A8b%0$hV1*i2dFG@_y@2e?G36~E7coi zEj*tyjx<3g(9G$kydvds3gFLed;6)WQnlXE9?7P@u1` zYwmUSlg@;1#-mfmjVtL=t2~}cw~25#7gXqhckRM@`* Date: Tue, 15 Sep 2020 20:00:16 +0800 Subject: [PATCH 2/2] Update README.md --- .../README.md" | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git "a/1 \351\201\227\345\244\261\345\256\240\347\211\251\347\232\204\346\231\272\350\203\275\345\257\273\346\211\276/README.md" "b/1 \351\201\227\345\244\261\345\256\240\347\211\251\347\232\204\346\231\272\350\203\275\345\257\273\346\211\276/README.md" index 99309b38..019411d7 100644 --- "a/1 \351\201\227\345\244\261\345\256\240\347\211\251\347\232\204\346\231\272\350\203\275\345\257\273\346\211\276/README.md" +++ "b/1 \351\201\227\345\244\261\345\256\240\347\211\251\347\232\204\346\231\272\350\203\275\345\257\273\346\211\276/README.md" @@ -8,15 +8,15 @@ 1.预测结果和实际结果的对比(每一个数字代表宠物狗的一个种类) - ![1](.\狗狗种类识别 - Sunburst\src\1.png) + ![1](https://github.com/ZxfBugProgrammer/aws-hackathon-2020/blob/master/1%20%E9%81%97%E5%A4%B1%E5%AE%A0%E7%89%A9%E7%9A%84%E6%99%BA%E8%83%BD%E5%AF%BB%E6%89%BE/%E7%8B%97%E7%8B%97%E7%A7%8D%E7%B1%BB%E8%AF%86%E5%88%AB%20-%20Sunburst/src/1.png) 2.预测的正确率 - ![2](.\狗狗种类识别 - Sunburst\src\2.png) + ![2](https://github.com/ZxfBugProgrammer/aws-hackathon-2020/blob/master/1%20%E9%81%97%E5%A4%B1%E5%AE%A0%E7%89%A9%E7%9A%84%E6%99%BA%E8%83%BD%E5%AF%BB%E6%89%BE/%E7%8B%97%E7%8B%97%E7%A7%8D%E7%B1%BB%E8%AF%86%E5%88%AB%20-%20Sunburst/src/2.png) 3.以概率的形式输出预测结果 - ![3](.\狗狗种类识别 - Sunburst\src\3.png) + ![3](https://github.com/ZxfBugProgrammer/aws-hackathon-2020/blob/master/1%20%E9%81%97%E5%A4%B1%E5%AE%A0%E7%89%A9%E7%9A%84%E6%99%BA%E8%83%BD%E5%AF%BB%E6%89%BE/%E7%8B%97%E7%8B%97%E7%A7%8D%E7%B1%BB%E8%AF%86%E5%88%AB%20-%20Sunburst/src/3.png) * **安装、编译指南:**