import tensorflow as tf
import matplotlib.pyplot as plt
import os,PIL,pathlib
import pandas as pd
import numpy as np
from tensorflow import keras
from tensorflow.keras import layers,models
from tensorflow.keras import layers, models, Input
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Conv2D, Dense, Flatten, Dropout,BatchNormalization,Activation
from tensorflow.keras.layers import MaxPooling2D, AveragePooling2D, Concatenate, Lambda,GlobalAveragePooling2D
from tensorflow.keras import backend as K
2.導入數據
數據形式如下所示:
其實images中包含5998張交通標志的圖片,其中一共是58種類別。
annotations中是各個圖片的名稱以及它所代表的種類,一共是58種。
#圖片預處理defpreprocess_image(image):##歸一化&&調整圖片大小image = tf.image.decode_jpeg(image,channels=3)image = tf.image.resize(image,[299,299])return image/255.0defload_and_preprocess_image(path):#根據路徑讀入圖片image = tf.io.read_file(path)return preprocess_image(image)#導入數據
data_dir ="E:/tmp/.keras/datasets/trasig_photos/images"
data_dir = pathlib.Path(data_dir)#導入訓練數據的圖片路徑以及標簽
train = pd.read_csv("E:/tmp/.keras/datasets/trasig_photos/annotations.csv")#圖片所在的主路徑
img_dir ="E:/tmp/.keras/datasets/trasig_photos/images/"#訓練數據的標簽
train_image_label =[i for i in train["category"]]
train_label_ds = tf.data.Dataset.from_tensor_slices(train_image_label)#訓練數據的路徑既每一張圖片的具體路徑
train_image_paths =[img_dir+i for i in train["file_name"]]#加載圖片路徑
train_path_ds = tf.data.Dataset.from_tensor_slices(train_image_paths)#加載圖片數據
train_image_ds = train_path_ds.map(load_and_preprocess_image,num_parallel_calls=tf.data.experimental.AUTOTUNE)#將圖片與路徑對應進行打包
image_label_ds = tf.data.Dataset.zip((train_image_ds,train_label_ds))
model = models.Sequential([tf.keras.layers.Conv2D(32,(3,3),activation='relu',input_shape=(299,299,3)),tf.keras.layers.MaxPooling2D(),tf.keras.layers.Conv2D(64,(3,3),activation='relu'),tf.keras.layers.MaxPooling2D(),tf.keras.layers.Conv2D(128,(3,3),activation='relu'),tf.keras.layers.AveragePooling2D(),tf.keras.layers.Flatten(),tf.keras.layers.Dense(1000,activation='relu'),tf.keras.layers.Dense(58,activation='softmax')])
model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=lr_sch),loss='sparse_categorical_crossentropy',metrics=['accuracy'])
history = model.fit(train_ds,validation_data=test_ds,epochs=epochs
)