Deepラーニングのメモです
498 views
cudaとかcudnnとかtensorflowのバージョンによって書き方が異なるのがしんどい。
以下はwindows版。
# coding: UTF-8
import os
import csv
import random
import numpy as np
from PIL import Image
from keras.models import Sequential, Model
from keras.layers import Dense, Activation, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D, BatchNormalization
from keras.callbacks import EarlyStopping, ModelCheckpoint, CSVLogger
from keras.utils import to_categorical
from keras.optimizers import SGD, Adam
def load_train_data():
train_data_list = []
with open(r"./data/train_master.tsv", "r") as f:
reader = csv.reader(f, delimiter='\t')
# ヘッダスキップ
next(reader)
for record in reader:
full_path = os.path.join(r"./data/train", record[0])
label = record[1]
train_data_list.append([full_path, label])
return train_data_list
def train_data_generator(train_data_list, batch_size=32, class_num=20):
max_len = len(train_data_list)
while True:
rtn_images = []
rtn_labels = []
for i in range(batch_size):
index = random.randint(0, max_len-1)
file_path = train_data_list[index][0]
label = train_data_list[index][1]
img = Image.open(file_path)
img = np.asarray(img)/255.
label = to_categorical(label, num_classes=class_num)
rtn_images.append(img)
rtn_labels.append(label)
yield np.array(rtn_images), np.array(rtn_labels)
def my_model():
model = Sequential()
model.add(Conv2D(filters=32, kernel_size=(3, 3), padding='same', input_shape=(32, 32, 3)))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(filters=32, kernel_size=(3, 3), padding='same'))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(3, 3), padding='same')) # 16×16
model.add(Conv2D(filters=64, kernel_size=(3, 3), padding='same'))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(filters=64, kernel_size=(3, 3), padding='same'))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(3, 3), padding='same')) # 8×8
model.add(Conv2D(filters=128, kernel_size=(3, 3), padding='same'))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(filters=128, kernel_size=(3, 3), padding='same'))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(3, 3), padding='same')) # 4×4
model.add(Flatten())
model.add(Dense(1024))
model.add(Activation('relu'))
model.add(Dense(20))
model.add(Activation('softmax'))
return model
def main():
batch_size = 32
save_weight_file = 'practice.{epoch:02d}-{loss:.2f}-{acc:.2f}.hdf5'
weights_path = r"./weights"
csv_dir = r"./csv"
weight_path = os.path.join(weights_path, save_weight_file)
csv_path = os.path.join(csv_dir, "log.csv")
if not os.path.exists(weights_path):
os.makedirs(weights_path)
if not os.path.exists(csv_dir):
os.makedirs(csv_dir)
train_data_list = load_train_data()
model = my_model()
model.compile(
optimizer=Adam(lr=1e-3),
loss="categorical_crossentropy",
metrics=["accuracy"]
)
model.summary()
cb_weight = ModelCheckpoint(filepath=weight_path, monitor='acc', verbose=1, save_best_only=False, mode='auto')
cb_csv = CSVLogger(csv_path)
model.fit_generator(
train_data_generator(train_data_list, batch_size=batch_size),
steps_per_epoch=int(50000/batch_size),
epochs=100,
callbacks=[cb_weight, cb_csv]
)
if __name__ == '__main__':
main()
以下はLinux版。
# coding: UTF-8
import os
import csv
import random
import numpy as np
from PIL import Image
from tensorflow.keras.models import Sequential, Model
from tensorflow.keras.layers import Dense, Activation, Dropout, Flatten
from tensorflow.keras.layers import BatchNormalization
from tensorflow.keras.layers import Conv2D, MaxPooling2D
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint, CSVLogger
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.optimizers import SGD, Adam
def load_train_data():
train_data_list = []
with open(r"./data/train_master.tsv", "r") as f:
reader = csv.reader(f, delimiter='\t')
# ヘッダスキップ
next(reader)
for record in reader:
full_path = os.path.join(r"./data/train", record[0])
label = record[1]
train_data_list.append([full_path, label])
return train_data_list
def train_data_generator(train_data_list, batch_size=32, class_num=20):
max_len = len(train_data_list)
while True:
rtn_images = []
rtn_labels = []
for i in range(batch_size):
index = random.randint(0, max_len-1)
file_path = train_data_list[index][0]
label = train_data_list[index][1]
img = Image.open(file_path)
img = np.asarray(img)/255.
label = to_categorical(label, num_classes=class_num)
rtn_images.append(img)
rtn_labels.append(label)
yield np.array(rtn_images), np.array(rtn_labels)
def my_model():
model = Sequential()
model.add(Conv2D(filters=32, kernel_size=(3, 3), padding='same', input_shape=(32, 32, 3)))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(filters=32, kernel_size=(3, 3), padding='same'))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(3, 3), padding='same')) # 16×16
model.add(Conv2D(filters=64, kernel_size=(3, 3), padding='same'))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(filters=64, kernel_size=(3, 3), padding='same'))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(3, 3), padding='same')) # 8×8
model.add(Conv2D(filters=128, kernel_size=(3, 3), padding='same'))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(filters=128, kernel_size=(3, 3), padding='same'))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(3, 3), padding='same')) # 4×4
model.add(Flatten())
model.add(Dense(1024))
model.add(Activation('relu'))
model.add(Dense(20))
model.add(Activation('softmax'))
return model
def main():
batch_size = 256
save_weight_file = "practice.{epoch:02d}-{loss:.2f}-{accuracy:.2f}.hdf5"
weights_path = r"./weights"
csv_dir = r"./csv"
weight_path = os.path.join(weights_path, save_weight_file)
csv_path = os.path.join(csv_dir, "log.csv")
if not os.path.exists(weights_path):
os.makedirs(weights_path)
if not os.path.exists(csv_dir):
os.makedirs(csv_dir)
train_data_list = load_train_data()
model = my_model()
model.compile(
optimizer=Adam(lr=1e-3),
loss="categorical_crossentropy",
metrics=["accuracy"]
)
model.summary()
cb_weight = ModelCheckpoint(filepath=weight_path, monitor='acc', verbose=1, save_best_only=False, mode='auto')
cb_csv = CSVLogger(csv_path)
model.fit_generator(
train_data_generator(train_data_list, batch_size=batch_size),
steps_per_epoch=int(50000/batch_size),
epochs=100,
callbacks=[cb_weight, cb_csv]
)
if __name__ == '__main__':
main()
Page 26 of 33.
すぺぺぺ
本サイトの作成者。
プログラムは趣味と勉強を兼ねて、のんびり本サイトを作っています。
フレームワークはdjango。
ChatGPTで自動プログラム作成に取り組み中。
https://www.osumoi-stdio.com/novel/