无意中发现了一个巨牛的人工智能教程,忍不住分享一下给大家。教程不仅是零基础,通俗易懂,而且非常风趣幽默,像看小说一样!觉得太牛了,所以分享给大家。点这里可以跳转到教程。人工智能教程
创建一个数据集文件夹并命名(如 dataset)
在数据集文件中创建一个名称为 train 的子文件夹
在数据集文件中创建一个名称为 val 的子文件夹
在 train 文件夹中,为每个你要训练的对象创建文件夹并命名
在 val 文件夹中,为每个你要训练的对象创建文件夹并命名
把每个对象的图像放在 train 文件夹下对应名称的子文件夹,这些图像是用于训练模型的图像,为了训练出精准度较高的模型,我建议每个对象收集大约500张以上图像。
目录结构如下:
.
|-- train
| |-- animal
| |-- flower
| |-- guitar
| |-- houses
| `-- plane
`-- val
|-- animal
|-- flower
|-- guitar
|-- houses
`-- plane
使用tensorflow2.0 训练 残差神经网络resnet-50 。
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Conv2D, Flatten, Dropout, MaxPooling2D
from tensorflow.keras.preprocessing.image import ImageDataGenerator
import json
import os
from tensorflow.keras.applications.resnet50 import ResNet50
batch_size = 32
epochs = 100
IMG_HEIGHT = 224
IMG_WIDTH = 224
num_classes=5
image_input=224
PATH = os.path.join('/home/dongli/tensorflow2.0/corpus/dataset/')
train_dir = os.path.join(PATH, 'train')
validation_dir = os.path.join(PATH, 'val')
train_animal_dir = os.path.join(train_dir, 'animal')
train_flower_dir = os.path.join(train_dir, 'flower')
train_guitar_dir = os.path.join(train_dir, 'guitar')
train_houses_dir = os.path.join(train_dir, 'houses')
train_plane_dir = os.path.join(train_dir, 'plane')
validation_animal_dir = os.path.join(train_dir, 'animal')
validation_flower_dir = os.path.join(train_dir, 'flower')
validation_guitar_dir = os.path.join(train_dir, 'guitar')
validation_houses_dir = os.path.join(train_dir, 'houses')
validation_plane_dir = os.path.join(train_dir, 'plane')
num_animal_tr = len(os.listdir(train_animal_dir))
num_flower_tr = len(os.listdir(train_flower_dir))
num_guitar_tr = len(os.listdir(train_guitar_dir))
num_houses_tr = len(os.listdir(train_houses_dir))
num_plane_tr = len(os.listdir(train_plane_dir))
num_animal_val = len(os.listdir(validation_animal_dir))
num_flower_val = len(os.listdir(validation_flower_dir))
num_guitar_val = len(os.listdir(validation_guitar_dir))
num_houses_val = len(os.listdir(validation_houses_dir))
num_plane_val = len(os.listdir(validation_plane_dir))
total_train = num_animal_tr+num_flower_tr+num_guitar_tr+num_houses_tr+num_plane_tr
total_val = num_animal_val + num_flower_val+num_guitar_val+num_houses_val+num_plane_val
print("Total training images:", total_train)
print("Total validation images:", total_val)
# 训练集
# 对训练图像应用了重新缩放,45度旋转,宽度偏移,高度偏移,水平翻转和缩放增强。
image_gen_train = ImageDataGenerator(
rescale=1./255,
width_shift_range=0.1,
height_shift_range=0.1
)
train_data_gen = image_gen_train.flow_from_directory(batch_size=batch_size,
directory=train_dir,
shuffle=True,
target_size=(IMG_HEIGHT, IMG_WIDTH),
class_mode='categorical')
# 验证集
image_gen_val = ImageDataGenerator(rescale=1./255)
val_data_gen = image_gen_val.flow_from_directory(batch_size=batch_size,
directory=validation_dir,
target_size=(IMG_HEIGHT, IMG_WIDTH),
class_mode='categorical')
# 创建模型
model=ResNet50(include_top=True, weights=None,classes=num_classes)
# 编译模型
model.compile(optimizer='adam',loss='categorical_crossentropy',metrics=['accuracy'])
# 模型总结
model.summary()
# 模型保存格式定义
model_class_dir='./flower_model/'
class_indices = train_data_gen.class_indices
class_json = {}
for eachClass in class_indices:
class_json[str(class_indices[eachClass])] = eachClass
with open(os.path.join(model_class_dir, "model_class.json"), "w+") as json_file:
json.dump(class_json, json_file, indent=4, separators=(",", " : "),ensure_ascii=True)
json_file.close()
print("JSON Mapping for the model classes saved to ", os.path.join(model_class_dir, "model_class.json"))
model_name = 'model_ex-{epoch:03d}_acc-{val_accuracy:03f}.h5'
trained_model_dir='./flower_model/'
model_path = os.path.join(trained_model_dir, model_name)
checkpoint = tf.keras.callbacks.ModelCheckpoint(
filepath=model_path,
monitor='val_accuracy',
verbose=2,
save_weights_only=True,
save_best_only=True,
mode='max',
period=1)
def lr_schedule(epoch):
# Learning Rate Schedule
lr =1e-3
total_epochs =epoch
check_1 = int(total_epochs * 0.9)
check_2 = int(total_epochs * 0.8)
check_3 = int(total_epochs * 0.6)
check_4 = int(total_epochs * 0.4)
if epoch > check_1:
lr *= 1e-4
elif epoch > check_2:
lr *= 1e-3
elif epoch > check_3:
lr *= 1e-2
elif epoch > check_4:
lr *= 1e-1
return lr
#lr_scheduler =tf.keras.callbacks.LearningRateScheduler(lr_schedule)
lr_scheduler = tf.keras.callbacks.ReduceLROnPlateau(monitor='val_loss', factor=0.2,patience=5, min_lr=0.001)
num_train = len(train_data_gen.filenames)
num_test = len(val_data_gen.filenames)
print(num_train,num_test)
# 模型训练
# 使用fit_generator方法ImageDataGenerator来训练网络。
history = model.fit_generator(
train_data_gen,
steps_per_epoch=int(num_train / batch_size),
epochs=epochs,
validation_data=val_data_gen,
validation_steps=int(num_test / batch_size),
callbacks=[checkpoint,lr_scheduler])
模型保存
flower_model
|-- model_class.json
|-- model_ex-001_acc-0.197690.h5
|-- model_ex-001_acc-0.199728.h5
|-- model_ex-002_acc-0.222826.h5
|-- model_ex-003_acc-0.230299.h5
|-- model_ex-004_acc-0.338315.h5
|-- model_ex-005_acc-0.442255.h5
|-- model_ex-006_acc-0.618886.h5
|-- model_ex-007_acc-0.629755.h5
|-- model_ex-008_acc-0.698370.h5
|-- model_ex-011_acc-0.798234.h5
|-- model_ex-012_acc-0.819973.h5
|-- model_ex-018_acc-0.834239.h5
|-- model_ex-020_acc-0.852582.h5
|-- model_ex-023_acc-0.877038.h5
|-- model_ex-024_acc-0.884511.h5
|-- model_ex-029_acc-0.890625.h5
|-- model_ex-030_acc-0.908967.h5
|-- model_ex-035_acc-0.910326.h5
|-- model_ex-041_acc-0.930707.h5
|-- model_ex-051_acc-0.953804.h5
|-- model_ex-054_acc-0.958560.h5
`-- model_ex-095_acc-0.959239.h5
模型保存,如果模型保存了模型训练好的权重和图结构信息。采用load_model()导入、若需要只导入权重文件,采用load_weights()方式,需要重新构建一样的模型和编译模型,方能成功。
def create_model():
base_model=ResNet50(include_top=True, weights=None,classes=class_num)
model = tf.keras.Model(inputs=base_model.input, outputs=base_model.output)
return model
# 重新构建模型
model=create_model()
# 编译模型
model.compile(optimizer='adam',loss='categorical_crossentropy',metrics=['accuracy'])
# 导入权重文件
model.load_weights('./flower_model/model_ex-023_acc-0.864130.h5')
加载训练好的模型权重去预测新图片:
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import tensorflow as tf
from PIL import Image
import numpy as np
from io import BytesIO
import json
import requests
CLASS_INDEX = None
import keras
input_image_size=224
class_num=5
model_jsonPath='./flower_model/model_class.json'
def preprocess_input(x):
x *= (1./255)
return x
def decode_predictions(preds, top=5, model_json=""):
global CLASS_INDEX
if CLASS_INDEX is None:
CLASS_INDEX = json.load(open(model_json))
results = []
for pred in preds:
top_indices = pred.argsort()[-top:][::-1]
for i in top_indices:
each_result = []
each_result.append(CLASS_INDEX[str(i)])
each_result.append(pred[i])
results.append(each_result)
return results
prediction_results = []
prediction_probabilities = []
url='https://timgsa.baidu.com/timg?image&quality=80&size=b9999_10000&sec=1573119512&di=95ad0908ab5e5ce22a674471f0e4d5d1&imgtype=jpg&er=1&src=http%3A%2F%2Fwww.sinaimg.cn%2Fjc%2Fp%2F2007-06-21%2FU2143P27T1D450794F3DT20070621164533.jpg'
response=requests.get(url).content
image_input=response
image_input = Image.open(BytesIO(image_input))
image_input = image_input.convert('RGB')
image_input = image_input.resize((input_image_size,input_image_size))
image_input = np.expand_dims(image_input, axis=0)
image_to_predict = image_input.copy()
image_to_predict = np.asarray(image_to_predict, dtype=np.float64)
image_to_predict = preprocess_input(image_to_predict)
from tensorflow.keras.applications.resnet50 import ResNet50
from tensorflow import keras
def create_model():
base_model=ResNet50(include_top=True, weights=None,classes=class_num)
model = tf.keras.Model(inputs=base_model.input, outputs=base_model.output)
return model
model=create_model()
# 编译模型
model.compile(optimizer='adam',loss='categorical_crossentropy',metrics=['accuracy'])
model.load_weights('./flower_model/model_ex-023_acc-0.864130.h5')
prediction = model.predict(x=image_to_predict)
try:
predictiondata = decode_predictions(prediction, top=int(class_num), model_json=model_jsonPath)
for result in predictiondata:
prediction_results.append(str(result[0]))
prediction_probabilities.append(result[1] * 100)
except:
raise ValueError("An error occured! Try again.")
print(prediction_results[0],prediction_probabilities[0])