Deep

Deepラーニングのメモです

222 views

画像分類のプログラム

cudaとかcudnnとかtensorflowのバージョンによって書き方が異なるのがしんどい。
以下はwindows版。

  • tensorflow-gpu==1.13.1
  • keras-nightly==2.5.0.dev2021032900
  • cudaは10.0 cuDNNは7.4
# coding: UTF-8
import os
import csv
import random
import numpy as np
from PIL import Image
from keras.models import Sequential, Model
from keras.layers import Dense, Activation, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D, BatchNormalization
from keras.callbacks import EarlyStopping, ModelCheckpoint, CSVLogger
from keras.utils import to_categorical
from keras.optimizers import SGD, Adam


def load_train_data():
    train_data_list = []
    with open(r"./data/train_master.tsv", "r") as f:
        reader = csv.reader(f, delimiter='\t')
        # ヘッダスキップ
        next(reader)
        for record in reader:
            full_path = os.path.join(r"./data/train", record[0])
            label = record[1]
            train_data_list.append([full_path, label])

    return train_data_list


def train_data_generator(train_data_list, batch_size=32, class_num=20):
    max_len = len(train_data_list)
    while True:
        rtn_images = []
        rtn_labels = []
        for i in range(batch_size):
            index = random.randint(0, max_len-1)
            file_path = train_data_list[index][0]
            label = train_data_list[index][1]
            img = Image.open(file_path)
            img = np.asarray(img)/255.
            label = to_categorical(label, num_classes=class_num)
            rtn_images.append(img)
            rtn_labels.append(label)

        yield np.array(rtn_images), np.array(rtn_labels)


def my_model():
    model = Sequential()
    model.add(Conv2D(filters=32, kernel_size=(3, 3), padding='same', input_shape=(32, 32, 3)))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv2D(filters=32, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(MaxPooling2D(pool_size=(3, 3), padding='same')) # 16×16

    model.add(Conv2D(filters=64, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv2D(filters=64, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(MaxPooling2D(pool_size=(3, 3), padding='same')) # 8×8

    model.add(Conv2D(filters=128, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv2D(filters=128, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(MaxPooling2D(pool_size=(3, 3), padding='same')) # 4×4

    model.add(Flatten())

    model.add(Dense(1024))
    model.add(Activation('relu'))
    model.add(Dense(20))
    model.add(Activation('softmax'))

    return model


def main():
    batch_size = 32
    save_weight_file = 'practice.{epoch:02d}-{loss:.2f}-{acc:.2f}.hdf5'
    weights_path = r"./weights"
    csv_dir = r"./csv"
    weight_path = os.path.join(weights_path, save_weight_file)
    csv_path = os.path.join(csv_dir, "log.csv")
    if not os.path.exists(weights_path):
        os.makedirs(weights_path)

    if not os.path.exists(csv_dir):
        os.makedirs(csv_dir)

    train_data_list = load_train_data()
    model = my_model()
    model.compile(
        optimizer=Adam(lr=1e-3),
        loss="categorical_crossentropy",
        metrics=["accuracy"]
    )
    model.summary()

    cb_weight = ModelCheckpoint(filepath=weight_path, monitor='acc', verbose=1, save_best_only=False, mode='auto')
    cb_csv = CSVLogger(csv_path)
    model.fit_generator(
        train_data_generator(train_data_list, batch_size=batch_size),
        steps_per_epoch=int(50000/batch_size),
        epochs=100,
        callbacks=[cb_weight, cb_csv]
    )


if __name__ == '__main__':
    main()

以下はLinux版。

  • tensorflow==2.3.0
  • kerasはtensorflowをインストールしたら自動的に入るものを使用
  • export LD_LIBRARY_PATH=/usr/local/cuda-10.1/lib64
# coding: UTF-8
import os
import csv
import random
import numpy as np
from PIL import Image
from tensorflow.keras.models import Sequential, Model
from tensorflow.keras.layers import Dense, Activation, Dropout, Flatten
from tensorflow.keras.layers import BatchNormalization
from tensorflow.keras.layers import Conv2D, MaxPooling2D
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint, CSVLogger
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.optimizers import SGD, Adam


def load_train_data():
    train_data_list = []
    with open(r"./data/train_master.tsv", "r") as f:
        reader = csv.reader(f, delimiter='\t')
        # ヘッダスキップ
        next(reader)
        for record in reader:
            full_path = os.path.join(r"./data/train", record[0])
            label = record[1]
            train_data_list.append([full_path, label])

    return train_data_list


def train_data_generator(train_data_list, batch_size=32, class_num=20):
    max_len = len(train_data_list)
    while True:
        rtn_images = []
        rtn_labels = []
        for i in range(batch_size):
            index = random.randint(0, max_len-1)
            file_path = train_data_list[index][0]
            label = train_data_list[index][1]
            img = Image.open(file_path)
            img = np.asarray(img)/255.
            label = to_categorical(label, num_classes=class_num)
            rtn_images.append(img)
            rtn_labels.append(label)

        yield np.array(rtn_images), np.array(rtn_labels)


def my_model():
    model = Sequential()
    model.add(Conv2D(filters=32, kernel_size=(3, 3), padding='same', input_shape=(32, 32, 3)))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv2D(filters=32, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(MaxPooling2D(pool_size=(3, 3), padding='same')) # 16×16

    model.add(Conv2D(filters=64, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv2D(filters=64, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(MaxPooling2D(pool_size=(3, 3), padding='same')) # 8×8

    model.add(Conv2D(filters=128, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(Conv2D(filters=128, kernel_size=(3, 3), padding='same'))
    model.add(BatchNormalization())
    model.add(Activation('relu'))
    model.add(MaxPooling2D(pool_size=(3, 3), padding='same')) # 4×4

    model.add(Flatten())

    model.add(Dense(1024))
    model.add(Activation('relu'))
    model.add(Dense(20))
    model.add(Activation('softmax'))

    return model


def main():
    batch_size = 256
    save_weight_file = "practice.{epoch:02d}-{loss:.2f}-{accuracy:.2f}.hdf5"
    weights_path = r"./weights"
    csv_dir = r"./csv"
    weight_path = os.path.join(weights_path, save_weight_file)
    csv_path = os.path.join(csv_dir, "log.csv")
    if not os.path.exists(weights_path):
        os.makedirs(weights_path)

    if not os.path.exists(csv_dir):
        os.makedirs(csv_dir)

    train_data_list = load_train_data()
    model = my_model()
    model.compile(
        optimizer=Adam(lr=1e-3),
        loss="categorical_crossentropy",
        metrics=["accuracy"]
    )
    model.summary()

    cb_weight = ModelCheckpoint(filepath=weight_path, monitor='acc', verbose=1, save_best_only=False, mode='auto')
    cb_csv = CSVLogger(csv_path)
    model.fit_generator(
        train_data_generator(train_data_list, batch_size=batch_size),
        steps_per_epoch=int(50000/batch_size),
        epochs=100,
        callbacks=[cb_weight, cb_csv]
    )


if __name__ == '__main__':
    main()


Page 26 of 29.

前のページ 次のページ



[添付ファイル]


お問い合わせ

プロフィール

マッスル

自己紹介

本サイトの作成者。
趣味:プログラム/水耕栽培/仮想通貨/激辛好き
プログラムは趣味と勉強を兼ねて、のんびり本サイトを作っています。
フレームワークはdjango。
仮想通貨はNEMが好き。
水耕栽培は激辛好きが高じて、キャロライナ・リーパーの栽培にチャレンジ中。

サイト/ブログ

https://www.osumoi-stdio.com/pyarticle/

ツイッター

@darkimpact0626