CNN:畳み込みニューラルネットワーク
##LeNet
###概要
1998年、Yann LeCun氏が発案
2014年
①VGG16
import keras from keras.layers import Conv2D, MaxPooling2D, Input, Dense, Flatten from keras.models import Model inputs = Input(shape=(224, 224, 3)) x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv1')(inputs) x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv2')(x) x = MaxPooling2D((2, 2), strides=(2, 2), padding='valid', name='block1_pool')(x) x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv1')(x) x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv2')(x) x = MaxPooling2D((2, 2), strides=(2, 2), padding='valid', name='block2_pool')(x) x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv1')(x) x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv2')(x) x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv3')(x) x = MaxPooling2D((2, 2), strides=(2, 2), padding='valid', name='block3_pool')(x) x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv1')(x) x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv2')(x) x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv3')(x) x = MaxPooling2D((2, 2), strides=(2, 2), padding='valid', name='block4_pool')(x) x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv1')(x) x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv2')(x) x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv3')(x) x = MaxPooling2D((2, 2), strides=(2, 2), padding='valid', name='block5_pool')(x) flattened = Flatten(name='flatten')(x) x = Dense(4096, activation='relu', name='fc1')(flattened) x = Dense(4096, activation='relu', name='fc2')(x) predictions = Dense(1000, activation='softmax', name='predictions')(x) model = Model(inputs=inputs, outputs=predictions) model.summary()
②AlexNet
import keras from keras.layers import Conv2D, MaxPooling2D, Input, Dense, Flatten, BatchNormalization from keras.models import Model inputs = Input(shape=(227, 227, 3)) # Block 1 x = Conv2D(96, (11, 11), padding='valid', strides=(4, 4), activation='relu', name='conv1')(inputs) #out:55*55*96 x = MaxPooling2D((3, 3), padding='valid', strides=(2, 2), name='pool1')(x) #out:27*27*96 x = BatchNormalization(name='norm1')(x) # Block 2 x = Conv2D(256, (5, 5), padding='same', strides=(1, 1), activation='relu', name='conv2')(x) #out:27*27*256 padding:2 x = MaxPooling2D((3, 3), padding='valid', strides=(2, 2), name='pool2')(x) #out:13*13*256 x = BatchNormalization(name='norm2')(x) # Block 3 x = Conv2D(384, (3, 3), padding='same', strides=(1, 1), activation='relu', name='conv3')(x) #out:13*13*384 padding:1 x = Conv2D(384, (3, 3), padding='same', strides=(1, 1), activation='relu', name='conv4')(x) #out:13*13*384 padding:1 x = Conv2D(256, (3, 3), padding='same', strides=(1, 1), activation='relu', name='conv5')(x) #out:13*13*256 padding:1 x = MaxPooling2D((3, 3), padding='valid', strides=(2, 2), name='pool3')(x) #out:6*6*256 # Classification block flattened = Flatten(name='flatten')(x) # out:9216 x = Dense(4096, activation='relu', name='fc1')(flattened) # out:4096 x = Dense(4096, activation='relu', name='fc2')(x) # out:4096 predictions = Dense(1000, activation='softmax', name='predictions')(x) model = Model(inputs=inputs, outputs=predictions) model.summary()
③GoogLeNet
④ResNet