본문 바로가기

DEEP LEARNING/Tensorflow Training

cnn 실습_나만의 이미지 데이터를 만들고 학습하기(3) - functional API & save_model

# Functional API를 사용하여 model 구성
def create_model2():
    inputs = keras.Input(shape=(128, 128, 3))
    conv1 = keras.layers.Conv2D(filters=32, kernel_size=[3, 3], 
                                padding='SAME', activation='relu')(inputs)
    pool1 = keras.layers.MaxPool2D(padding='SAME')(conv1)
    conv2 = keras.layers.Conv2D(filters=64, kernel_size=[3, 3], 
                                padding='SAME', activation='relu')(pool1)
    pool2 = keras.layers.MaxPool2D(padding='SAME')(conv2)
    conv3 = keras.layers.Conv2D(filters=128, kernel_size=[3, 3], 
                                padding='SAME', activation='relu')(pool2)
    pool3 = keras.layers.MaxPool2D(padding='SAME')(conv3)
    pool3_flat = keras.layers.Flatten()(pool3)
    dense4 = keras.layers.Dense(units=256, activation='relu')(pool3_flat)
    drop4 = keras.layers.Dropout(rate=0.4)(dense4)
    logits = keras.layers.Dense(units=5, activation='softmax')(drop4)
    return keras.Model(inputs=inputs, outputs=logits)
model2 = create_model2()
model2.compile(optimizer=tf.keras.optimizers.Adam(learning_rate),
              loss='categorical_crossentropy',
              metrics=['accuracy'])
model2.summary()

## Creating a checkpoint directory
cur_dir = os.getcwd()
ckpt_dir_name = 'checkpoint'
model_dir_name = 'cnn_sample'
ckpt_name = 'sample_{epoch:04d}.ckpt'

checkpoint_dir = os.path.join(cur_dir, ckpt_dir_name, model_dir_name)
checkpoint_path = os.path.join(checkpoint_dir, ckpt_name)


# callback 만들기
cp_callback = keras.callbacks.ModelCheckpoint(filepath=checkpoint_path,
                                              save_weights_only=True)

## Training
history = model2.fit(train_dataset, epochs=N_EPOCHS, steps_per_epoch=steps_per_epoch, 
                    validation_data=test_dataset, validation_steps=validation_steps,
                    callbacks=[cp_callback])

## checkpoint 확인
!ls 'checkpoint/cnn_sample'

## 마지막으로 저장된 checkpoint 불러오기
latest = tf.train.latest_checkpoint(checkpoint_dir)

# Create a new model instance
new_model = create_model2()
new_model.compile(optimizer=keras.optimizers.Adam(learning_rate),
                 loss='categorical_crossentropy',
                 metrics=['accuracy'])

# Before loading weights
new_model.evaluate(test_dataset)

# Load the previously saved weights
new_model.load_weights(latest)

# Re-evaluate the model
new_model.evaluate(test_dataset)

## HDF5 format으로 전체 model 저장하기
save_dir_name = 'saved_models'
os.makedirs(save_dir_name, exist_ok=True)
hdf5_model_path = os.path.join(cur_dir, save_dir_name, 'my_model.h5')
## 저장
model.save(hdf5_model_path)
## 확인
!ls saved_models

## 불러오기
my_model = keras.models.load_model(hdf5_model_path)
my_model.summary()

## 결과 확인
my_model.evaluate(test_dataset)