Обновление по расписанию.
Но есть отвратительный баг, который я так и не решил.Он прямо грохнул некую мою лабу.После моей отладки градиент стал 0.Я думаю это из-за того,что градиент исчезает и дело надо решать.Текущий уровень мало, копай себе яму.
Следующая статья, обработка естественного языка, небольшой проект анализа обзора фильмов, продолжайте спешить
import tensorflow as tf
tf.__version__
'2.6.0'
# 检测是否支持GPU
tf.test.is_gpu_available()
True
import numpy as np
import IPython.display as display
import PIL.Image
from tensorflow.keras.preprocessing import image
Сначала определите соответствующую функцию
# 图像标准化
def normalize_image(img):
img = 255 * (img + 1.0) / 2.0
return tf.cast(img,tf.uint8)
# 图像可视化
import matplotlib.pyplot as plt
def show_image(img):
display.display(PIL.Image.fromarray(np.array(img)))
# plt.imshow(np.array(img))
# 保存图像文件
def save_image(img,file_name):
PIL.Image.fromarray(np.array(img)).save(file_name)
2. Прочитайте файл изображения для обработки (эквивалент набора данных)
def read_image(file_name,max_dim=None):
'''
file_name:文件路径
max_dim:最大的尺寸数
'''
img = PIL.Image.open(file_name)
if max_dim:
img.thumbnail(size=(max_dim,max_dim)) # thumbnail() 函数是制作当前图片的缩略图, 参数size指定了图片的最大的宽度和高度。
return np.array(img)
image_file = './jupyter.png'
img = read_image(image_file,max_dim=500)
show_image(img)
img.shape
(276, 500, 3)
3. Постройте модель
1. Загрузите предварительно обученную модель
base_model = tf.keras.applications.InceptionV3(include_top=False,weights='imagenet')
Downloading data from https://storage.googleapis.com/tensorflow/keras-applications/inception_v3/inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5
87916544/87910968 [==============================] - 1s 0us/step
87924736/87910968 [==============================] - 1s 0us/step
base_model.summary()
Model: "inception_v3"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) [(None, None, None, 0
__________________________________________________________________________________________________
conv2d (Conv2D) (None, None, None, 3 864 input_1[0][0]
__________________________________________________________________________________________________
batch_normalization (BatchNorma (None, None, None, 3 96 conv2d[0][0]
__________________________________________________________________________________________________
activation (Activation) (None, None, None, 3 0 batch_normalization[0][0]
__________________________________________________________________________________________________
conv2d_1 (Conv2D) (None, None, None, 3 9216 activation[0][0]
__________________________________________________________________________________________________
batch_normalization_1 (BatchNor (None, None, None, 3 96 conv2d_1[0][0]
__________________________________________________________________________________________________
activation_1 (Activation) (None, None, None, 3 0 batch_normalization_1[0][0]
__________________________________________________________________________________________________
conv2d_2 (Conv2D) (None, None, None, 6 18432 activation_1[0][0]
__________________________________________________________________________________________________
batch_normalization_2 (BatchNor (None, None, None, 6 192 conv2d_2[0][0]
__________________________________________________________________________________________________
activation_2 (Activation) (None, None, None, 6 0 batch_normalization_2[0][0]
__________________________________________________________________________________________________
max_pooling2d (MaxPooling2D) (None, None, None, 6 0 activation_2[0][0]
__________________________________________________________________________________________________
conv2d_3 (Conv2D) (None, None, None, 8 5120 max_pooling2d[0][0]
__________________________________________________________________________________________________
batch_normalization_3 (BatchNor (None, None, None, 8 240 conv2d_3[0][0]
__________________________________________________________________________________________________
activation_3 (Activation) (None, None, None, 8 0 batch_normalization_3[0][0]
__________________________________________________________________________________________________
conv2d_4 (Conv2D) (None, None, None, 1 138240 activation_3[0][0]
__________________________________________________________________________________________________
batch_normalization_4 (BatchNor (None, None, None, 1 576 conv2d_4[0][0]
__________________________________________________________________________________________________
activation_4 (Activation) (None, None, None, 1 0 batch_normalization_4[0][0]
__________________________________________________________________________________________________
max_pooling2d_1 (MaxPooling2D) (None, None, None, 1 0 activation_4[0][0]
__________________________________________________________________________________________________
conv2d_8 (Conv2D) (None, None, None, 6 12288 max_pooling2d_1[0][0]
__________________________________________________________________________________________________
batch_normalization_8 (BatchNor (None, None, None, 6 192 conv2d_8[0][0]
__________________________________________________________________________________________________
activation_8 (Activation) (None, None, None, 6 0 batch_normalization_8[0][0]
__________________________________________________________________________________________________
conv2d_6 (Conv2D) (None, None, None, 4 9216 max_pooling2d_1[0][0]
__________________________________________________________________________________________________
conv2d_9 (Conv2D) (None, None, None, 9 55296 activation_8[0][0]
__________________________________________________________________________________________________
batch_normalization_6 (BatchNor (None, None, None, 4 144 conv2d_6[0][0]
__________________________________________________________________________________________________
batch_normalization_9 (BatchNor (None, None, None, 9 288 conv2d_9[0][0]
__________________________________________________________________________________________________
activation_6 (Activation) (None, None, None, 4 0 batch_normalization_6[0][0]
__________________________________________________________________________________________________
activation_9 (Activation) (None, None, None, 9 0 batch_normalization_9[0][0]
__________________________________________________________________________________________________
average_pooling2d (AveragePooli (None, None, None, 1 0 max_pooling2d_1[0][0]
__________________________________________________________________________________________________
conv2d_5 (Conv2D) (None, None, None, 6 12288 max_pooling2d_1[0][0]
__________________________________________________________________________________________________
conv2d_7 (Conv2D) (None, None, None, 6 76800 activation_6[0][0]
__________________________________________________________________________________________________
conv2d_10 (Conv2D) (None, None, None, 9 82944 activation_9[0][0]
__________________________________________________________________________________________________
conv2d_11 (Conv2D) (None, None, None, 3 6144 average_pooling2d[0][0]
__________________________________________________________________________________________________
batch_normalization_5 (BatchNor (None, None, None, 6 192 conv2d_5[0][0]
__________________________________________________________________________________________________
batch_normalization_7 (BatchNor (None, None, None, 6 192 conv2d_7[0][0]
__________________________________________________________________________________________________
batch_normalization_10 (BatchNo (None, None, None, 9 288 conv2d_10[0][0]
__________________________________________________________________________________________________
batch_normalization_11 (BatchNo (None, None, None, 3 96 conv2d_11[0][0]
__________________________________________________________________________________________________
activation_5 (Activation) (None, None, None, 6 0 batch_normalization_5[0][0]
__________________________________________________________________________________________________
activation_7 (Activation) (None, None, None, 6 0 batch_normalization_7[0][0]
__________________________________________________________________________________________________
activation_10 (Activation) (None, None, None, 9 0 batch_normalization_10[0][0]
__________________________________________________________________________________________________
activation_11 (Activation) (None, None, None, 3 0 batch_normalization_11[0][0]
__________________________________________________________________________________________________
mixed0 (Concatenate) (None, None, None, 2 0 activation_5[0][0]
activation_7[0][0]
activation_10[0][0]
activation_11[0][0]
__________________________________________________________________________________________________
conv2d_15 (Conv2D) (None, None, None, 6 16384 mixed0[0][0]
__________________________________________________________________________________________________
batch_normalization_15 (BatchNo (None, None, None, 6 192 conv2d_15[0][0]
__________________________________________________________________________________________________
activation_15 (Activation) (None, None, None, 6 0 batch_normalization_15[0][0]
__________________________________________________________________________________________________
conv2d_13 (Conv2D) (None, None, None, 4 12288 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_16 (Conv2D) (None, None, None, 9 55296 activation_15[0][0]
__________________________________________________________________________________________________
batch_normalization_13 (BatchNo (None, None, None, 4 144 conv2d_13[0][0]
__________________________________________________________________________________________________
batch_normalization_16 (BatchNo (None, None, None, 9 288 conv2d_16[0][0]
__________________________________________________________________________________________________
activation_13 (Activation) (None, None, None, 4 0 batch_normalization_13[0][0]
__________________________________________________________________________________________________
activation_16 (Activation) (None, None, None, 9 0 batch_normalization_16[0][0]
__________________________________________________________________________________________________
average_pooling2d_1 (AveragePoo (None, None, None, 2 0 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_12 (Conv2D) (None, None, None, 6 16384 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_14 (Conv2D) (None, None, None, 6 76800 activation_13[0][0]
__________________________________________________________________________________________________
conv2d_17 (Conv2D) (None, None, None, 9 82944 activation_16[0][0]
__________________________________________________________________________________________________
conv2d_18 (Conv2D) (None, None, None, 6 16384 average_pooling2d_1[0][0]
__________________________________________________________________________________________________
batch_normalization_12 (BatchNo (None, None, None, 6 192 conv2d_12[0][0]
__________________________________________________________________________________________________
batch_normalization_14 (BatchNo (None, None, None, 6 192 conv2d_14[0][0]
__________________________________________________________________________________________________
batch_normalization_17 (BatchNo (None, None, None, 9 288 conv2d_17[0][0]
__________________________________________________________________________________________________
batch_normalization_18 (BatchNo (None, None, None, 6 192 conv2d_18[0][0]
__________________________________________________________________________________________________
activation_12 (Activation) (None, None, None, 6 0 batch_normalization_12[0][0]
__________________________________________________________________________________________________
activation_14 (Activation) (None, None, None, 6 0 batch_normalization_14[0][0]
__________________________________________________________________________________________________
activation_17 (Activation) (None, None, None, 9 0 batch_normalization_17[0][0]
__________________________________________________________________________________________________
activation_18 (Activation) (None, None, None, 6 0 batch_normalization_18[0][0]
__________________________________________________________________________________________________
mixed1 (Concatenate) (None, None, None, 2 0 activation_12[0][0]
activation_14[0][0]
activation_17[0][0]
activation_18[0][0]
__________________________________________________________________________________________________
conv2d_22 (Conv2D) (None, None, None, 6 18432 mixed1[0][0]
__________________________________________________________________________________________________
batch_normalization_22 (BatchNo (None, None, None, 6 192 conv2d_22[0][0]
__________________________________________________________________________________________________
activation_22 (Activation) (None, None, None, 6 0 batch_normalization_22[0][0]
__________________________________________________________________________________________________
conv2d_20 (Conv2D) (None, None, None, 4 13824 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_23 (Conv2D) (None, None, None, 9 55296 activation_22[0][0]
__________________________________________________________________________________________________
batch_normalization_20 (BatchNo (None, None, None, 4 144 conv2d_20[0][0]
__________________________________________________________________________________________________
batch_normalization_23 (BatchNo (None, None, None, 9 288 conv2d_23[0][0]
__________________________________________________________________________________________________
activation_20 (Activation) (None, None, None, 4 0 batch_normalization_20[0][0]
__________________________________________________________________________________________________
activation_23 (Activation) (None, None, None, 9 0 batch_normalization_23[0][0]
__________________________________________________________________________________________________
average_pooling2d_2 (AveragePoo (None, None, None, 2 0 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_19 (Conv2D) (None, None, None, 6 18432 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_21 (Conv2D) (None, None, None, 6 76800 activation_20[0][0]
__________________________________________________________________________________________________
conv2d_24 (Conv2D) (None, None, None, 9 82944 activation_23[0][0]
__________________________________________________________________________________________________
conv2d_25 (Conv2D) (None, None, None, 6 18432 average_pooling2d_2[0][0]
__________________________________________________________________________________________________
batch_normalization_19 (BatchNo (None, None, None, 6 192 conv2d_19[0][0]
__________________________________________________________________________________________________
batch_normalization_21 (BatchNo (None, None, None, 6 192 conv2d_21[0][0]
__________________________________________________________________________________________________
batch_normalization_24 (BatchNo (None, None, None, 9 288 conv2d_24[0][0]
__________________________________________________________________________________________________
batch_normalization_25 (BatchNo (None, None, None, 6 192 conv2d_25[0][0]
__________________________________________________________________________________________________
activation_19 (Activation) (None, None, None, 6 0 batch_normalization_19[0][0]
__________________________________________________________________________________________________
activation_21 (Activation) (None, None, None, 6 0 batch_normalization_21[0][0]
__________________________________________________________________________________________________
activation_24 (Activation) (None, None, None, 9 0 batch_normalization_24[0][0]
__________________________________________________________________________________________________
activation_25 (Activation) (None, None, None, 6 0 batch_normalization_25[0][0]
__________________________________________________________________________________________________
mixed2 (Concatenate) (None, None, None, 2 0 activation_19[0][0]
activation_21[0][0]
activation_24[0][0]
activation_25[0][0]
__________________________________________________________________________________________________
conv2d_27 (Conv2D) (None, None, None, 6 18432 mixed2[0][0]
__________________________________________________________________________________________________
batch_normalization_27 (BatchNo (None, None, None, 6 192 conv2d_27[0][0]
__________________________________________________________________________________________________
activation_27 (Activation) (None, None, None, 6 0 batch_normalization_27[0][0]
__________________________________________________________________________________________________
conv2d_28 (Conv2D) (None, None, None, 9 55296 activation_27[0][0]
__________________________________________________________________________________________________
batch_normalization_28 (BatchNo (None, None, None, 9 288 conv2d_28[0][0]
__________________________________________________________________________________________________
activation_28 (Activation) (None, None, None, 9 0 batch_normalization_28[0][0]
__________________________________________________________________________________________________
conv2d_26 (Conv2D) (None, None, None, 3 995328 mixed2[0][0]
__________________________________________________________________________________________________
conv2d_29 (Conv2D) (None, None, None, 9 82944 activation_28[0][0]
__________________________________________________________________________________________________
batch_normalization_26 (BatchNo (None, None, None, 3 1152 conv2d_26[0][0]
__________________________________________________________________________________________________
batch_normalization_29 (BatchNo (None, None, None, 9 288 conv2d_29[0][0]
__________________________________________________________________________________________________
activation_26 (Activation) (None, None, None, 3 0 batch_normalization_26[0][0]
__________________________________________________________________________________________________
activation_29 (Activation) (None, None, None, 9 0 batch_normalization_29[0][0]
__________________________________________________________________________________________________
max_pooling2d_2 (MaxPooling2D) (None, None, None, 2 0 mixed2[0][0]
__________________________________________________________________________________________________
mixed3 (Concatenate) (None, None, None, 7 0 activation_26[0][0]
activation_29[0][0]
max_pooling2d_2[0][0]
__________________________________________________________________________________________________
conv2d_34 (Conv2D) (None, None, None, 1 98304 mixed3[0][0]
__________________________________________________________________________________________________
batch_normalization_34 (BatchNo (None, None, None, 1 384 conv2d_34[0][0]
__________________________________________________________________________________________________
activation_34 (Activation) (None, None, None, 1 0 batch_normalization_34[0][0]
__________________________________________________________________________________________________
conv2d_35 (Conv2D) (None, None, None, 1 114688 activation_34[0][0]
__________________________________________________________________________________________________
batch_normalization_35 (BatchNo (None, None, None, 1 384 conv2d_35[0][0]
__________________________________________________________________________________________________
activation_35 (Activation) (None, None, None, 1 0 batch_normalization_35[0][0]
__________________________________________________________________________________________________
conv2d_31 (Conv2D) (None, None, None, 1 98304 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_36 (Conv2D) (None, None, None, 1 114688 activation_35[0][0]
__________________________________________________________________________________________________
batch_normalization_31 (BatchNo (None, None, None, 1 384 conv2d_31[0][0]
__________________________________________________________________________________________________
batch_normalization_36 (BatchNo (None, None, None, 1 384 conv2d_36[0][0]
__________________________________________________________________________________________________
activation_31 (Activation) (None, None, None, 1 0 batch_normalization_31[0][0]
__________________________________________________________________________________________________
activation_36 (Activation) (None, None, None, 1 0 batch_normalization_36[0][0]
__________________________________________________________________________________________________
conv2d_32 (Conv2D) (None, None, None, 1 114688 activation_31[0][0]
__________________________________________________________________________________________________
conv2d_37 (Conv2D) (None, None, None, 1 114688 activation_36[0][0]
__________________________________________________________________________________________________
batch_normalization_32 (BatchNo (None, None, None, 1 384 conv2d_32[0][0]
__________________________________________________________________________________________________
batch_normalization_37 (BatchNo (None, None, None, 1 384 conv2d_37[0][0]
__________________________________________________________________________________________________
activation_32 (Activation) (None, None, None, 1 0 batch_normalization_32[0][0]
__________________________________________________________________________________________________
activation_37 (Activation) (None, None, None, 1 0 batch_normalization_37[0][0]
__________________________________________________________________________________________________
average_pooling2d_3 (AveragePoo (None, None, None, 7 0 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_30 (Conv2D) (None, None, None, 1 147456 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_33 (Conv2D) (None, None, None, 1 172032 activation_32[0][0]
__________________________________________________________________________________________________
conv2d_38 (Conv2D) (None, None, None, 1 172032 activation_37[0][0]
__________________________________________________________________________________________________
conv2d_39 (Conv2D) (None, None, None, 1 147456 average_pooling2d_3[0][0]
__________________________________________________________________________________________________
batch_normalization_30 (BatchNo (None, None, None, 1 576 conv2d_30[0][0]
__________________________________________________________________________________________________
batch_normalization_33 (BatchNo (None, None, None, 1 576 conv2d_33[0][0]
__________________________________________________________________________________________________
batch_normalization_38 (BatchNo (None, None, None, 1 576 conv2d_38[0][0]
__________________________________________________________________________________________________
batch_normalization_39 (BatchNo (None, None, None, 1 576 conv2d_39[0][0]
__________________________________________________________________________________________________
activation_30 (Activation) (None, None, None, 1 0 batch_normalization_30[0][0]
__________________________________________________________________________________________________
activation_33 (Activation) (None, None, None, 1 0 batch_normalization_33[0][0]
__________________________________________________________________________________________________
activation_38 (Activation) (None, None, None, 1 0 batch_normalization_38[0][0]
__________________________________________________________________________________________________
activation_39 (Activation) (None, None, None, 1 0 batch_normalization_39[0][0]
__________________________________________________________________________________________________
mixed4 (Concatenate) (None, None, None, 7 0 activation_30[0][0]
activation_33[0][0]
activation_38[0][0]
activation_39[0][0]
__________________________________________________________________________________________________
conv2d_44 (Conv2D) (None, None, None, 1 122880 mixed4[0][0]
__________________________________________________________________________________________________
batch_normalization_44 (BatchNo (None, None, None, 1 480 conv2d_44[0][0]
__________________________________________________________________________________________________
activation_44 (Activation) (None, None, None, 1 0 batch_normalization_44[0][0]
__________________________________________________________________________________________________
conv2d_45 (Conv2D) (None, None, None, 1 179200 activation_44[0][0]
__________________________________________________________________________________________________
batch_normalization_45 (BatchNo (None, None, None, 1 480 conv2d_45[0][0]
__________________________________________________________________________________________________
activation_45 (Activation) (None, None, None, 1 0 batch_normalization_45[0][0]
__________________________________________________________________________________________________
conv2d_41 (Conv2D) (None, None, None, 1 122880 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_46 (Conv2D) (None, None, None, 1 179200 activation_45[0][0]
__________________________________________________________________________________________________
batch_normalization_41 (BatchNo (None, None, None, 1 480 conv2d_41[0][0]
__________________________________________________________________________________________________
batch_normalization_46 (BatchNo (None, None, None, 1 480 conv2d_46[0][0]
__________________________________________________________________________________________________
activation_41 (Activation) (None, None, None, 1 0 batch_normalization_41[0][0]
__________________________________________________________________________________________________
activation_46 (Activation) (None, None, None, 1 0 batch_normalization_46[0][0]
__________________________________________________________________________________________________
conv2d_42 (Conv2D) (None, None, None, 1 179200 activation_41[0][0]
__________________________________________________________________________________________________
conv2d_47 (Conv2D) (None, None, None, 1 179200 activation_46[0][0]
__________________________________________________________________________________________________
batch_normalization_42 (BatchNo (None, None, None, 1 480 conv2d_42[0][0]
__________________________________________________________________________________________________
batch_normalization_47 (BatchNo (None, None, None, 1 480 conv2d_47[0][0]
__________________________________________________________________________________________________
activation_42 (Activation) (None, None, None, 1 0 batch_normalization_42[0][0]
__________________________________________________________________________________________________
activation_47 (Activation) (None, None, None, 1 0 batch_normalization_47[0][0]
__________________________________________________________________________________________________
average_pooling2d_4 (AveragePoo (None, None, None, 7 0 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_40 (Conv2D) (None, None, None, 1 147456 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_43 (Conv2D) (None, None, None, 1 215040 activation_42[0][0]
__________________________________________________________________________________________________
conv2d_48 (Conv2D) (None, None, None, 1 215040 activation_47[0][0]
__________________________________________________________________________________________________
conv2d_49 (Conv2D) (None, None, None, 1 147456 average_pooling2d_4[0][0]
__________________________________________________________________________________________________
batch_normalization_40 (BatchNo (None, None, None, 1 576 conv2d_40[0][0]
__________________________________________________________________________________________________
batch_normalization_43 (BatchNo (None, None, None, 1 576 conv2d_43[0][0]
__________________________________________________________________________________________________
batch_normalization_48 (BatchNo (None, None, None, 1 576 conv2d_48[0][0]
__________________________________________________________________________________________________
batch_normalization_49 (BatchNo (None, None, None, 1 576 conv2d_49[0][0]
__________________________________________________________________________________________________
activation_40 (Activation) (None, None, None, 1 0 batch_normalization_40[0][0]
__________________________________________________________________________________________________
activation_43 (Activation) (None, None, None, 1 0 batch_normalization_43[0][0]
__________________________________________________________________________________________________
activation_48 (Activation) (None, None, None, 1 0 batch_normalization_48[0][0]
__________________________________________________________________________________________________
activation_49 (Activation) (None, None, None, 1 0 batch_normalization_49[0][0]
__________________________________________________________________________________________________
mixed5 (Concatenate) (None, None, None, 7 0 activation_40[0][0]
activation_43[0][0]
activation_48[0][0]
activation_49[0][0]
__________________________________________________________________________________________________
conv2d_54 (Conv2D) (None, None, None, 1 122880 mixed5[0][0]
__________________________________________________________________________________________________
batch_normalization_54 (BatchNo (None, None, None, 1 480 conv2d_54[0][0]
__________________________________________________________________________________________________
activation_54 (Activation) (None, None, None, 1 0 batch_normalization_54[0][0]
__________________________________________________________________________________________________
conv2d_55 (Conv2D) (None, None, None, 1 179200 activation_54[0][0]
__________________________________________________________________________________________________
batch_normalization_55 (BatchNo (None, None, None, 1 480 conv2d_55[0][0]
__________________________________________________________________________________________________
activation_55 (Activation) (None, None, None, 1 0 batch_normalization_55[0][0]
__________________________________________________________________________________________________
conv2d_51 (Conv2D) (None, None, None, 1 122880 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_56 (Conv2D) (None, None, None, 1 179200 activation_55[0][0]
__________________________________________________________________________________________________
batch_normalization_51 (BatchNo (None, None, None, 1 480 conv2d_51[0][0]
__________________________________________________________________________________________________
batch_normalization_56 (BatchNo (None, None, None, 1 480 conv2d_56[0][0]
__________________________________________________________________________________________________
activation_51 (Activation) (None, None, None, 1 0 batch_normalization_51[0][0]
__________________________________________________________________________________________________
activation_56 (Activation) (None, None, None, 1 0 batch_normalization_56[0][0]
__________________________________________________________________________________________________
conv2d_52 (Conv2D) (None, None, None, 1 179200 activation_51[0][0]
__________________________________________________________________________________________________
conv2d_57 (Conv2D) (None, None, None, 1 179200 activation_56[0][0]
__________________________________________________________________________________________________
batch_normalization_52 (BatchNo (None, None, None, 1 480 conv2d_52[0][0]
__________________________________________________________________________________________________
batch_normalization_57 (BatchNo (None, None, None, 1 480 conv2d_57[0][0]
__________________________________________________________________________________________________
activation_52 (Activation) (None, None, None, 1 0 batch_normalization_52[0][0]
__________________________________________________________________________________________________
activation_57 (Activation) (None, None, None, 1 0 batch_normalization_57[0][0]
__________________________________________________________________________________________________
average_pooling2d_5 (AveragePoo (None, None, None, 7 0 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_50 (Conv2D) (None, None, None, 1 147456 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_53 (Conv2D) (None, None, None, 1 215040 activation_52[0][0]
__________________________________________________________________________________________________
conv2d_58 (Conv2D) (None, None, None, 1 215040 activation_57[0][0]
__________________________________________________________________________________________________
conv2d_59 (Conv2D) (None, None, None, 1 147456 average_pooling2d_5[0][0]
__________________________________________________________________________________________________
batch_normalization_50 (BatchNo (None, None, None, 1 576 conv2d_50[0][0]
__________________________________________________________________________________________________
batch_normalization_53 (BatchNo (None, None, None, 1 576 conv2d_53[0][0]
__________________________________________________________________________________________________
batch_normalization_58 (BatchNo (None, None, None, 1 576 conv2d_58[0][0]
__________________________________________________________________________________________________
batch_normalization_59 (BatchNo (None, None, None, 1 576 conv2d_59[0][0]
__________________________________________________________________________________________________
activation_50 (Activation) (None, None, None, 1 0 batch_normalization_50[0][0]
__________________________________________________________________________________________________
activation_53 (Activation) (None, None, None, 1 0 batch_normalization_53[0][0]
__________________________________________________________________________________________________
activation_58 (Activation) (None, None, None, 1 0 batch_normalization_58[0][0]
__________________________________________________________________________________________________
activation_59 (Activation) (None, None, None, 1 0 batch_normalization_59[0][0]
__________________________________________________________________________________________________
mixed6 (Concatenate) (None, None, None, 7 0 activation_50[0][0]
activation_53[0][0]
activation_58[0][0]
activation_59[0][0]
__________________________________________________________________________________________________
conv2d_64 (Conv2D) (None, None, None, 1 147456 mixed6[0][0]
__________________________________________________________________________________________________
batch_normalization_64 (BatchNo (None, None, None, 1 576 conv2d_64[0][0]
__________________________________________________________________________________________________
activation_64 (Activation) (None, None, None, 1 0 batch_normalization_64[0][0]
__________________________________________________________________________________________________
conv2d_65 (Conv2D) (None, None, None, 1 258048 activation_64[0][0]
__________________________________________________________________________________________________
batch_normalization_65 (BatchNo (None, None, None, 1 576 conv2d_65[0][0]
__________________________________________________________________________________________________
activation_65 (Activation) (None, None, None, 1 0 batch_normalization_65[0][0]
__________________________________________________________________________________________________
conv2d_61 (Conv2D) (None, None, None, 1 147456 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_66 (Conv2D) (None, None, None, 1 258048 activation_65[0][0]
__________________________________________________________________________________________________
batch_normalization_61 (BatchNo (None, None, None, 1 576 conv2d_61[0][0]
__________________________________________________________________________________________________
batch_normalization_66 (BatchNo (None, None, None, 1 576 conv2d_66[0][0]
__________________________________________________________________________________________________
activation_61 (Activation) (None, None, None, 1 0 batch_normalization_61[0][0]
__________________________________________________________________________________________________
activation_66 (Activation) (None, None, None, 1 0 batch_normalization_66[0][0]
__________________________________________________________________________________________________
conv2d_62 (Conv2D) (None, None, None, 1 258048 activation_61[0][0]
__________________________________________________________________________________________________
conv2d_67 (Conv2D) (None, None, None, 1 258048 activation_66[0][0]
__________________________________________________________________________________________________
batch_normalization_62 (BatchNo (None, None, None, 1 576 conv2d_62[0][0]
__________________________________________________________________________________________________
batch_normalization_67 (BatchNo (None, None, None, 1 576 conv2d_67[0][0]
__________________________________________________________________________________________________
activation_62 (Activation) (None, None, None, 1 0 batch_normalization_62[0][0]
__________________________________________________________________________________________________
activation_67 (Activation) (None, None, None, 1 0 batch_normalization_67[0][0]
__________________________________________________________________________________________________
average_pooling2d_6 (AveragePoo (None, None, None, 7 0 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_60 (Conv2D) (None, None, None, 1 147456 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_63 (Conv2D) (None, None, None, 1 258048 activation_62[0][0]
__________________________________________________________________________________________________
conv2d_68 (Conv2D) (None, None, None, 1 258048 activation_67[0][0]
__________________________________________________________________________________________________
conv2d_69 (Conv2D) (None, None, None, 1 147456 average_pooling2d_6[0][0]
__________________________________________________________________________________________________
batch_normalization_60 (BatchNo (None, None, None, 1 576 conv2d_60[0][0]
__________________________________________________________________________________________________
batch_normalization_63 (BatchNo (None, None, None, 1 576 conv2d_63[0][0]
__________________________________________________________________________________________________
batch_normalization_68 (BatchNo (None, None, None, 1 576 conv2d_68[0][0]
__________________________________________________________________________________________________
batch_normalization_69 (BatchNo (None, None, None, 1 576 conv2d_69[0][0]
__________________________________________________________________________________________________
activation_60 (Activation) (None, None, None, 1 0 batch_normalization_60[0][0]
__________________________________________________________________________________________________
activation_63 (Activation) (None, None, None, 1 0 batch_normalization_63[0][0]
__________________________________________________________________________________________________
activation_68 (Activation) (None, None, None, 1 0 batch_normalization_68[0][0]
__________________________________________________________________________________________________
activation_69 (Activation) (None, None, None, 1 0 batch_normalization_69[0][0]
__________________________________________________________________________________________________
mixed7 (Concatenate) (None, None, None, 7 0 activation_60[0][0]
activation_63[0][0]
activation_68[0][0]
activation_69[0][0]
__________________________________________________________________________________________________
conv2d_72 (Conv2D) (None, None, None, 1 147456 mixed7[0][0]
__________________________________________________________________________________________________
batch_normalization_72 (BatchNo (None, None, None, 1 576 conv2d_72[0][0]
__________________________________________________________________________________________________
activation_72 (Activation) (None, None, None, 1 0 batch_normalization_72[0][0]
__________________________________________________________________________________________________
conv2d_73 (Conv2D) (None, None, None, 1 258048 activation_72[0][0]
__________________________________________________________________________________________________
batch_normalization_73 (BatchNo (None, None, None, 1 576 conv2d_73[0][0]
__________________________________________________________________________________________________
activation_73 (Activation) (None, None, None, 1 0 batch_normalization_73[0][0]
__________________________________________________________________________________________________
conv2d_70 (Conv2D) (None, None, None, 1 147456 mixed7[0][0]
__________________________________________________________________________________________________
conv2d_74 (Conv2D) (None, None, None, 1 258048 activation_73[0][0]
__________________________________________________________________________________________________
batch_normalization_70 (BatchNo (None, None, None, 1 576 conv2d_70[0][0]
__________________________________________________________________________________________________
batch_normalization_74 (BatchNo (None, None, None, 1 576 conv2d_74[0][0]
__________________________________________________________________________________________________
activation_70 (Activation) (None, None, None, 1 0 batch_normalization_70[0][0]
__________________________________________________________________________________________________
activation_74 (Activation) (None, None, None, 1 0 batch_normalization_74[0][0]
__________________________________________________________________________________________________
conv2d_71 (Conv2D) (None, None, None, 3 552960 activation_70[0][0]
__________________________________________________________________________________________________
conv2d_75 (Conv2D) (None, None, None, 1 331776 activation_74[0][0]
__________________________________________________________________________________________________
batch_normalization_71 (BatchNo (None, None, None, 3 960 conv2d_71[0][0]
__________________________________________________________________________________________________
batch_normalization_75 (BatchNo (None, None, None, 1 576 conv2d_75[0][0]
__________________________________________________________________________________________________
activation_71 (Activation) (None, None, None, 3 0 batch_normalization_71[0][0]
__________________________________________________________________________________________________
activation_75 (Activation) (None, None, None, 1 0 batch_normalization_75[0][0]
__________________________________________________________________________________________________
max_pooling2d_3 (MaxPooling2D) (None, None, None, 7 0 mixed7[0][0]
__________________________________________________________________________________________________
mixed8 (Concatenate) (None, None, None, 1 0 activation_71[0][0]
activation_75[0][0]
max_pooling2d_3[0][0]
__________________________________________________________________________________________________
conv2d_80 (Conv2D) (None, None, None, 4 573440 mixed8[0][0]
__________________________________________________________________________________________________
batch_normalization_80 (BatchNo (None, None, None, 4 1344 conv2d_80[0][0]
__________________________________________________________________________________________________
activation_80 (Activation) (None, None, None, 4 0 batch_normalization_80[0][0]
__________________________________________________________________________________________________
conv2d_77 (Conv2D) (None, None, None, 3 491520 mixed8[0][0]
__________________________________________________________________________________________________
conv2d_81 (Conv2D) (None, None, None, 3 1548288 activation_80[0][0]
__________________________________________________________________________________________________
batch_normalization_77 (BatchNo (None, None, None, 3 1152 conv2d_77[0][0]
__________________________________________________________________________________________________
batch_normalization_81 (BatchNo (None, None, None, 3 1152 conv2d_81[0][0]
__________________________________________________________________________________________________
activation_77 (Activation) (None, None, None, 3 0 batch_normalization_77[0][0]
__________________________________________________________________________________________________
activation_81 (Activation) (None, None, None, 3 0 batch_normalization_81[0][0]
__________________________________________________________________________________________________
conv2d_78 (Conv2D) (None, None, None, 3 442368 activation_77[0][0]
__________________________________________________________________________________________________
conv2d_79 (Conv2D) (None, None, None, 3 442368 activation_77[0][0]
__________________________________________________________________________________________________
conv2d_82 (Conv2D) (None, None, None, 3 442368 activation_81[0][0]
__________________________________________________________________________________________________
conv2d_83 (Conv2D) (None, None, None, 3 442368 activation_81[0][0]
__________________________________________________________________________________________________
average_pooling2d_7 (AveragePoo (None, None, None, 1 0 mixed8[0][0]
__________________________________________________________________________________________________
conv2d_76 (Conv2D) (None, None, None, 3 409600 mixed8[0][0]
__________________________________________________________________________________________________
batch_normalization_78 (BatchNo (None, None, None, 3 1152 conv2d_78[0][0]
__________________________________________________________________________________________________
batch_normalization_79 (BatchNo (None, None, None, 3 1152 conv2d_79[0][0]
__________________________________________________________________________________________________
batch_normalization_82 (BatchNo (None, None, None, 3 1152 conv2d_82[0][0]
__________________________________________________________________________________________________
batch_normalization_83 (BatchNo (None, None, None, 3 1152 conv2d_83[0][0]
__________________________________________________________________________________________________
conv2d_84 (Conv2D) (None, None, None, 1 245760 average_pooling2d_7[0][0]
__________________________________________________________________________________________________
batch_normalization_76 (BatchNo (None, None, None, 3 960 conv2d_76[0][0]
__________________________________________________________________________________________________
activation_78 (Activation) (None, None, None, 3 0 batch_normalization_78[0][0]
__________________________________________________________________________________________________
activation_79 (Activation) (None, None, None, 3 0 batch_normalization_79[0][0]
__________________________________________________________________________________________________
activation_82 (Activation) (None, None, None, 3 0 batch_normalization_82[0][0]
__________________________________________________________________________________________________
activation_83 (Activation) (None, None, None, 3 0 batch_normalization_83[0][0]
__________________________________________________________________________________________________
batch_normalization_84 (BatchNo (None, None, None, 1 576 conv2d_84[0][0]
__________________________________________________________________________________________________
activation_76 (Activation) (None, None, None, 3 0 batch_normalization_76[0][0]
__________________________________________________________________________________________________
mixed9_0 (Concatenate) (None, None, None, 7 0 activation_78[0][0]
activation_79[0][0]
__________________________________________________________________________________________________
concatenate (Concatenate) (None, None, None, 7 0 activation_82[0][0]
activation_83[0][0]
__________________________________________________________________________________________________
activation_84 (Activation) (None, None, None, 1 0 batch_normalization_84[0][0]
__________________________________________________________________________________________________
mixed9 (Concatenate) (None, None, None, 2 0 activation_76[0][0]
mixed9_0[0][0]
concatenate[0][0]
activation_84[0][0]
__________________________________________________________________________________________________
conv2d_89 (Conv2D) (None, None, None, 4 917504 mixed9[0][0]
__________________________________________________________________________________________________
batch_normalization_89 (BatchNo (None, None, None, 4 1344 conv2d_89[0][0]
__________________________________________________________________________________________________
activation_89 (Activation) (None, None, None, 4 0 batch_normalization_89[0][0]
__________________________________________________________________________________________________
conv2d_86 (Conv2D) (None, None, None, 3 786432 mixed9[0][0]
__________________________________________________________________________________________________
conv2d_90 (Conv2D) (None, None, None, 3 1548288 activation_89[0][0]
__________________________________________________________________________________________________
batch_normalization_86 (BatchNo (None, None, None, 3 1152 conv2d_86[0][0]
__________________________________________________________________________________________________
batch_normalization_90 (BatchNo (None, None, None, 3 1152 conv2d_90[0][0]
__________________________________________________________________________________________________
activation_86 (Activation) (None, None, None, 3 0 batch_normalization_86[0][0]
__________________________________________________________________________________________________
activation_90 (Activation) (None, None, None, 3 0 batch_normalization_90[0][0]
__________________________________________________________________________________________________
conv2d_87 (Conv2D) (None, None, None, 3 442368 activation_86[0][0]
__________________________________________________________________________________________________
conv2d_88 (Conv2D) (None, None, None, 3 442368 activation_86[0][0]
__________________________________________________________________________________________________
conv2d_91 (Conv2D) (None, None, None, 3 442368 activation_90[0][0]
__________________________________________________________________________________________________
conv2d_92 (Conv2D) (None, None, None, 3 442368 activation_90[0][0]
__________________________________________________________________________________________________
average_pooling2d_8 (AveragePoo (None, None, None, 2 0 mixed9[0][0]
__________________________________________________________________________________________________
conv2d_85 (Conv2D) (None, None, None, 3 655360 mixed9[0][0]
__________________________________________________________________________________________________
batch_normalization_87 (BatchNo (None, None, None, 3 1152 conv2d_87[0][0]
__________________________________________________________________________________________________
batch_normalization_88 (BatchNo (None, None, None, 3 1152 conv2d_88[0][0]
__________________________________________________________________________________________________
batch_normalization_91 (BatchNo (None, None, None, 3 1152 conv2d_91[0][0]
__________________________________________________________________________________________________
batch_normalization_92 (BatchNo (None, None, None, 3 1152 conv2d_92[0][0]
__________________________________________________________________________________________________
conv2d_93 (Conv2D) (None, None, None, 1 393216 average_pooling2d_8[0][0]
__________________________________________________________________________________________________
batch_normalization_85 (BatchNo (None, None, None, 3 960 conv2d_85[0][0]
__________________________________________________________________________________________________
activation_87 (Activation) (None, None, None, 3 0 batch_normalization_87[0][0]
__________________________________________________________________________________________________
activation_88 (Activation) (None, None, None, 3 0 batch_normalization_88[0][0]
__________________________________________________________________________________________________
activation_91 (Activation) (None, None, None, 3 0 batch_normalization_91[0][0]
__________________________________________________________________________________________________
activation_92 (Activation) (None, None, None, 3 0 batch_normalization_92[0][0]
__________________________________________________________________________________________________
batch_normalization_93 (BatchNo (None, None, None, 1 576 conv2d_93[0][0]
__________________________________________________________________________________________________
activation_85 (Activation) (None, None, None, 3 0 batch_normalization_85[0][0]
__________________________________________________________________________________________________
mixed9_1 (Concatenate) (None, None, None, 7 0 activation_87[0][0]
activation_88[0][0]
__________________________________________________________________________________________________
concatenate_1 (Concatenate) (None, None, None, 7 0 activation_91[0][0]
activation_92[0][0]
__________________________________________________________________________________________________
activation_93 (Activation) (None, None, None, 1 0 batch_normalization_93[0][0]
__________________________________________________________________________________________________
mixed10 (Concatenate) (None, None, None, 2 0 activation_85[0][0]
mixed9_1[0][0]
concatenate_1[0][0]
activation_93[0][0]
==================================================================================================
Total params: 21,802,784
Trainable params: 21,768,352
Non-trainable params: 34,432
__________________________________________________________________________________________________
2. Выберите сверточные слои и каналы
# 最大限度的激活这些层的指定的层
layer_names = ['conv2d_92','mixed10']
layers = [base_model.get_layer(name).output for name in layer_names]
layers
[<KerasTensor: shape=(None, None, None, 384) dtype=float32 (created by layer 'conv2d_92')>,
<KerasTensor: shape=(None, None, None, 2048) dtype=float32 (created by layer 'mixed10')>]
3. Создайте модель извлечения признаков
dream_model = tf.keras.Model(inputs=base_model.input,outputs=layers)
dream_model.summary()
Model: "model"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) [(None, None, None, 0
__________________________________________________________________________________________________
conv2d (Conv2D) (None, None, None, 3 864 input_1[0][0]
__________________________________________________________________________________________________
batch_normalization (BatchNorma (None, None, None, 3 96 conv2d[0][0]
__________________________________________________________________________________________________
activation (Activation) (None, None, None, 3 0 batch_normalization[0][0]
__________________________________________________________________________________________________
conv2d_1 (Conv2D) (None, None, None, 3 9216 activation[0][0]
__________________________________________________________________________________________________
batch_normalization_1 (BatchNor (None, None, None, 3 96 conv2d_1[0][0]
__________________________________________________________________________________________________
activation_1 (Activation) (None, None, None, 3 0 batch_normalization_1[0][0]
__________________________________________________________________________________________________
conv2d_2 (Conv2D) (None, None, None, 6 18432 activation_1[0][0]
__________________________________________________________________________________________________
batch_normalization_2 (BatchNor (None, None, None, 6 192 conv2d_2[0][0]
__________________________________________________________________________________________________
activation_2 (Activation) (None, None, None, 6 0 batch_normalization_2[0][0]
__________________________________________________________________________________________________
max_pooling2d (MaxPooling2D) (None, None, None, 6 0 activation_2[0][0]
__________________________________________________________________________________________________
conv2d_3 (Conv2D) (None, None, None, 8 5120 max_pooling2d[0][0]
__________________________________________________________________________________________________
batch_normalization_3 (BatchNor (None, None, None, 8 240 conv2d_3[0][0]
__________________________________________________________________________________________________
activation_3 (Activation) (None, None, None, 8 0 batch_normalization_3[0][0]
__________________________________________________________________________________________________
conv2d_4 (Conv2D) (None, None, None, 1 138240 activation_3[0][0]
__________________________________________________________________________________________________
batch_normalization_4 (BatchNor (None, None, None, 1 576 conv2d_4[0][0]
__________________________________________________________________________________________________
activation_4 (Activation) (None, None, None, 1 0 batch_normalization_4[0][0]
__________________________________________________________________________________________________
max_pooling2d_1 (MaxPooling2D) (None, None, None, 1 0 activation_4[0][0]
__________________________________________________________________________________________________
conv2d_8 (Conv2D) (None, None, None, 6 12288 max_pooling2d_1[0][0]
__________________________________________________________________________________________________
batch_normalization_8 (BatchNor (None, None, None, 6 192 conv2d_8[0][0]
__________________________________________________________________________________________________
activation_8 (Activation) (None, None, None, 6 0 batch_normalization_8[0][0]
__________________________________________________________________________________________________
conv2d_6 (Conv2D) (None, None, None, 4 9216 max_pooling2d_1[0][0]
__________________________________________________________________________________________________
conv2d_9 (Conv2D) (None, None, None, 9 55296 activation_8[0][0]
__________________________________________________________________________________________________
batch_normalization_6 (BatchNor (None, None, None, 4 144 conv2d_6[0][0]
__________________________________________________________________________________________________
batch_normalization_9 (BatchNor (None, None, None, 9 288 conv2d_9[0][0]
__________________________________________________________________________________________________
activation_6 (Activation) (None, None, None, 4 0 batch_normalization_6[0][0]
__________________________________________________________________________________________________
activation_9 (Activation) (None, None, None, 9 0 batch_normalization_9[0][0]
__________________________________________________________________________________________________
average_pooling2d (AveragePooli (None, None, None, 1 0 max_pooling2d_1[0][0]
__________________________________________________________________________________________________
conv2d_5 (Conv2D) (None, None, None, 6 12288 max_pooling2d_1[0][0]
__________________________________________________________________________________________________
conv2d_7 (Conv2D) (None, None, None, 6 76800 activation_6[0][0]
__________________________________________________________________________________________________
conv2d_10 (Conv2D) (None, None, None, 9 82944 activation_9[0][0]
__________________________________________________________________________________________________
conv2d_11 (Conv2D) (None, None, None, 3 6144 average_pooling2d[0][0]
__________________________________________________________________________________________________
batch_normalization_5 (BatchNor (None, None, None, 6 192 conv2d_5[0][0]
__________________________________________________________________________________________________
batch_normalization_7 (BatchNor (None, None, None, 6 192 conv2d_7[0][0]
__________________________________________________________________________________________________
batch_normalization_10 (BatchNo (None, None, None, 9 288 conv2d_10[0][0]
__________________________________________________________________________________________________
batch_normalization_11 (BatchNo (None, None, None, 3 96 conv2d_11[0][0]
__________________________________________________________________________________________________
activation_5 (Activation) (None, None, None, 6 0 batch_normalization_5[0][0]
__________________________________________________________________________________________________
activation_7 (Activation) (None, None, None, 6 0 batch_normalization_7[0][0]
__________________________________________________________________________________________________
activation_10 (Activation) (None, None, None, 9 0 batch_normalization_10[0][0]
__________________________________________________________________________________________________
activation_11 (Activation) (None, None, None, 3 0 batch_normalization_11[0][0]
__________________________________________________________________________________________________
mixed0 (Concatenate) (None, None, None, 2 0 activation_5[0][0]
activation_7[0][0]
activation_10[0][0]
activation_11[0][0]
__________________________________________________________________________________________________
conv2d_15 (Conv2D) (None, None, None, 6 16384 mixed0[0][0]
__________________________________________________________________________________________________
batch_normalization_15 (BatchNo (None, None, None, 6 192 conv2d_15[0][0]
__________________________________________________________________________________________________
activation_15 (Activation) (None, None, None, 6 0 batch_normalization_15[0][0]
__________________________________________________________________________________________________
conv2d_13 (Conv2D) (None, None, None, 4 12288 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_16 (Conv2D) (None, None, None, 9 55296 activation_15[0][0]
__________________________________________________________________________________________________
batch_normalization_13 (BatchNo (None, None, None, 4 144 conv2d_13[0][0]
__________________________________________________________________________________________________
batch_normalization_16 (BatchNo (None, None, None, 9 288 conv2d_16[0][0]
__________________________________________________________________________________________________
activation_13 (Activation) (None, None, None, 4 0 batch_normalization_13[0][0]
__________________________________________________________________________________________________
activation_16 (Activation) (None, None, None, 9 0 batch_normalization_16[0][0]
__________________________________________________________________________________________________
average_pooling2d_1 (AveragePoo (None, None, None, 2 0 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_12 (Conv2D) (None, None, None, 6 16384 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_14 (Conv2D) (None, None, None, 6 76800 activation_13[0][0]
__________________________________________________________________________________________________
conv2d_17 (Conv2D) (None, None, None, 9 82944 activation_16[0][0]
__________________________________________________________________________________________________
conv2d_18 (Conv2D) (None, None, None, 6 16384 average_pooling2d_1[0][0]
__________________________________________________________________________________________________
batch_normalization_12 (BatchNo (None, None, None, 6 192 conv2d_12[0][0]
__________________________________________________________________________________________________
batch_normalization_14 (BatchNo (None, None, None, 6 192 conv2d_14[0][0]
__________________________________________________________________________________________________
batch_normalization_17 (BatchNo (None, None, None, 9 288 conv2d_17[0][0]
__________________________________________________________________________________________________
batch_normalization_18 (BatchNo (None, None, None, 6 192 conv2d_18[0][0]
__________________________________________________________________________________________________
activation_12 (Activation) (None, None, None, 6 0 batch_normalization_12[0][0]
__________________________________________________________________________________________________
activation_14 (Activation) (None, None, None, 6 0 batch_normalization_14[0][0]
__________________________________________________________________________________________________
activation_17 (Activation) (None, None, None, 9 0 batch_normalization_17[0][0]
__________________________________________________________________________________________________
activation_18 (Activation) (None, None, None, 6 0 batch_normalization_18[0][0]
__________________________________________________________________________________________________
mixed1 (Concatenate) (None, None, None, 2 0 activation_12[0][0]
activation_14[0][0]
activation_17[0][0]
activation_18[0][0]
__________________________________________________________________________________________________
conv2d_22 (Conv2D) (None, None, None, 6 18432 mixed1[0][0]
__________________________________________________________________________________________________
batch_normalization_22 (BatchNo (None, None, None, 6 192 conv2d_22[0][0]
__________________________________________________________________________________________________
activation_22 (Activation) (None, None, None, 6 0 batch_normalization_22[0][0]
__________________________________________________________________________________________________
conv2d_20 (Conv2D) (None, None, None, 4 13824 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_23 (Conv2D) (None, None, None, 9 55296 activation_22[0][0]
__________________________________________________________________________________________________
batch_normalization_20 (BatchNo (None, None, None, 4 144 conv2d_20[0][0]
__________________________________________________________________________________________________
batch_normalization_23 (BatchNo (None, None, None, 9 288 conv2d_23[0][0]
__________________________________________________________________________________________________
activation_20 (Activation) (None, None, None, 4 0 batch_normalization_20[0][0]
__________________________________________________________________________________________________
activation_23 (Activation) (None, None, None, 9 0 batch_normalization_23[0][0]
__________________________________________________________________________________________________
average_pooling2d_2 (AveragePoo (None, None, None, 2 0 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_19 (Conv2D) (None, None, None, 6 18432 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_21 (Conv2D) (None, None, None, 6 76800 activation_20[0][0]
__________________________________________________________________________________________________
conv2d_24 (Conv2D) (None, None, None, 9 82944 activation_23[0][0]
__________________________________________________________________________________________________
conv2d_25 (Conv2D) (None, None, None, 6 18432 average_pooling2d_2[0][0]
__________________________________________________________________________________________________
batch_normalization_19 (BatchNo (None, None, None, 6 192 conv2d_19[0][0]
__________________________________________________________________________________________________
batch_normalization_21 (BatchNo (None, None, None, 6 192 conv2d_21[0][0]
__________________________________________________________________________________________________
batch_normalization_24 (BatchNo (None, None, None, 9 288 conv2d_24[0][0]
__________________________________________________________________________________________________
batch_normalization_25 (BatchNo (None, None, None, 6 192 conv2d_25[0][0]
__________________________________________________________________________________________________
activation_19 (Activation) (None, None, None, 6 0 batch_normalization_19[0][0]
__________________________________________________________________________________________________
activation_21 (Activation) (None, None, None, 6 0 batch_normalization_21[0][0]
__________________________________________________________________________________________________
activation_24 (Activation) (None, None, None, 9 0 batch_normalization_24[0][0]
__________________________________________________________________________________________________
activation_25 (Activation) (None, None, None, 6 0 batch_normalization_25[0][0]
__________________________________________________________________________________________________
mixed2 (Concatenate) (None, None, None, 2 0 activation_19[0][0]
activation_21[0][0]
activation_24[0][0]
activation_25[0][0]
__________________________________________________________________________________________________
conv2d_27 (Conv2D) (None, None, None, 6 18432 mixed2[0][0]
__________________________________________________________________________________________________
batch_normalization_27 (BatchNo (None, None, None, 6 192 conv2d_27[0][0]
__________________________________________________________________________________________________
activation_27 (Activation) (None, None, None, 6 0 batch_normalization_27[0][0]
__________________________________________________________________________________________________
conv2d_28 (Conv2D) (None, None, None, 9 55296 activation_27[0][0]
__________________________________________________________________________________________________
batch_normalization_28 (BatchNo (None, None, None, 9 288 conv2d_28[0][0]
__________________________________________________________________________________________________
activation_28 (Activation) (None, None, None, 9 0 batch_normalization_28[0][0]
__________________________________________________________________________________________________
conv2d_26 (Conv2D) (None, None, None, 3 995328 mixed2[0][0]
__________________________________________________________________________________________________
conv2d_29 (Conv2D) (None, None, None, 9 82944 activation_28[0][0]
__________________________________________________________________________________________________
batch_normalization_26 (BatchNo (None, None, None, 3 1152 conv2d_26[0][0]
__________________________________________________________________________________________________
batch_normalization_29 (BatchNo (None, None, None, 9 288 conv2d_29[0][0]
__________________________________________________________________________________________________
activation_26 (Activation) (None, None, None, 3 0 batch_normalization_26[0][0]
__________________________________________________________________________________________________
activation_29 (Activation) (None, None, None, 9 0 batch_normalization_29[0][0]
__________________________________________________________________________________________________
max_pooling2d_2 (MaxPooling2D) (None, None, None, 2 0 mixed2[0][0]
__________________________________________________________________________________________________
mixed3 (Concatenate) (None, None, None, 7 0 activation_26[0][0]
activation_29[0][0]
max_pooling2d_2[0][0]
__________________________________________________________________________________________________
conv2d_34 (Conv2D) (None, None, None, 1 98304 mixed3[0][0]
__________________________________________________________________________________________________
batch_normalization_34 (BatchNo (None, None, None, 1 384 conv2d_34[0][0]
__________________________________________________________________________________________________
activation_34 (Activation) (None, None, None, 1 0 batch_normalization_34[0][0]
__________________________________________________________________________________________________
conv2d_35 (Conv2D) (None, None, None, 1 114688 activation_34[0][0]
__________________________________________________________________________________________________
batch_normalization_35 (BatchNo (None, None, None, 1 384 conv2d_35[0][0]
__________________________________________________________________________________________________
activation_35 (Activation) (None, None, None, 1 0 batch_normalization_35[0][0]
__________________________________________________________________________________________________
conv2d_31 (Conv2D) (None, None, None, 1 98304 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_36 (Conv2D) (None, None, None, 1 114688 activation_35[0][0]
__________________________________________________________________________________________________
batch_normalization_31 (BatchNo (None, None, None, 1 384 conv2d_31[0][0]
__________________________________________________________________________________________________
batch_normalization_36 (BatchNo (None, None, None, 1 384 conv2d_36[0][0]
__________________________________________________________________________________________________
activation_31 (Activation) (None, None, None, 1 0 batch_normalization_31[0][0]
__________________________________________________________________________________________________
activation_36 (Activation) (None, None, None, 1 0 batch_normalization_36[0][0]
__________________________________________________________________________________________________
conv2d_32 (Conv2D) (None, None, None, 1 114688 activation_31[0][0]
__________________________________________________________________________________________________
conv2d_37 (Conv2D) (None, None, None, 1 114688 activation_36[0][0]
__________________________________________________________________________________________________
batch_normalization_32 (BatchNo (None, None, None, 1 384 conv2d_32[0][0]
__________________________________________________________________________________________________
batch_normalization_37 (BatchNo (None, None, None, 1 384 conv2d_37[0][0]
__________________________________________________________________________________________________
activation_32 (Activation) (None, None, None, 1 0 batch_normalization_32[0][0]
__________________________________________________________________________________________________
activation_37 (Activation) (None, None, None, 1 0 batch_normalization_37[0][0]
__________________________________________________________________________________________________
average_pooling2d_3 (AveragePoo (None, None, None, 7 0 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_30 (Conv2D) (None, None, None, 1 147456 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_33 (Conv2D) (None, None, None, 1 172032 activation_32[0][0]
__________________________________________________________________________________________________
conv2d_38 (Conv2D) (None, None, None, 1 172032 activation_37[0][0]
__________________________________________________________________________________________________
conv2d_39 (Conv2D) (None, None, None, 1 147456 average_pooling2d_3[0][0]
__________________________________________________________________________________________________
batch_normalization_30 (BatchNo (None, None, None, 1 576 conv2d_30[0][0]
__________________________________________________________________________________________________
batch_normalization_33 (BatchNo (None, None, None, 1 576 conv2d_33[0][0]
__________________________________________________________________________________________________
batch_normalization_38 (BatchNo (None, None, None, 1 576 conv2d_38[0][0]
__________________________________________________________________________________________________
batch_normalization_39 (BatchNo (None, None, None, 1 576 conv2d_39[0][0]
__________________________________________________________________________________________________
activation_30 (Activation) (None, None, None, 1 0 batch_normalization_30[0][0]
__________________________________________________________________________________________________
activation_33 (Activation) (None, None, None, 1 0 batch_normalization_33[0][0]
__________________________________________________________________________________________________
activation_38 (Activation) (None, None, None, 1 0 batch_normalization_38[0][0]
__________________________________________________________________________________________________
activation_39 (Activation) (None, None, None, 1 0 batch_normalization_39[0][0]
__________________________________________________________________________________________________
mixed4 (Concatenate) (None, None, None, 7 0 activation_30[0][0]
activation_33[0][0]
activation_38[0][0]
activation_39[0][0]
__________________________________________________________________________________________________
conv2d_44 (Conv2D) (None, None, None, 1 122880 mixed4[0][0]
__________________________________________________________________________________________________
batch_normalization_44 (BatchNo (None, None, None, 1 480 conv2d_44[0][0]
__________________________________________________________________________________________________
activation_44 (Activation) (None, None, None, 1 0 batch_normalization_44[0][0]
__________________________________________________________________________________________________
conv2d_45 (Conv2D) (None, None, None, 1 179200 activation_44[0][0]
__________________________________________________________________________________________________
batch_normalization_45 (BatchNo (None, None, None, 1 480 conv2d_45[0][0]
__________________________________________________________________________________________________
activation_45 (Activation) (None, None, None, 1 0 batch_normalization_45[0][0]
__________________________________________________________________________________________________
conv2d_41 (Conv2D) (None, None, None, 1 122880 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_46 (Conv2D) (None, None, None, 1 179200 activation_45[0][0]
__________________________________________________________________________________________________
batch_normalization_41 (BatchNo (None, None, None, 1 480 conv2d_41[0][0]
__________________________________________________________________________________________________
batch_normalization_46 (BatchNo (None, None, None, 1 480 conv2d_46[0][0]
__________________________________________________________________________________________________
activation_41 (Activation) (None, None, None, 1 0 batch_normalization_41[0][0]
__________________________________________________________________________________________________
activation_46 (Activation) (None, None, None, 1 0 batch_normalization_46[0][0]
__________________________________________________________________________________________________
conv2d_42 (Conv2D) (None, None, None, 1 179200 activation_41[0][0]
__________________________________________________________________________________________________
conv2d_47 (Conv2D) (None, None, None, 1 179200 activation_46[0][0]
__________________________________________________________________________________________________
batch_normalization_42 (BatchNo (None, None, None, 1 480 conv2d_42[0][0]
__________________________________________________________________________________________________
batch_normalization_47 (BatchNo (None, None, None, 1 480 conv2d_47[0][0]
__________________________________________________________________________________________________
activation_42 (Activation) (None, None, None, 1 0 batch_normalization_42[0][0]
__________________________________________________________________________________________________
activation_47 (Activation) (None, None, None, 1 0 batch_normalization_47[0][0]
__________________________________________________________________________________________________
average_pooling2d_4 (AveragePoo (None, None, None, 7 0 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_40 (Conv2D) (None, None, None, 1 147456 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_43 (Conv2D) (None, None, None, 1 215040 activation_42[0][0]
__________________________________________________________________________________________________
conv2d_48 (Conv2D) (None, None, None, 1 215040 activation_47[0][0]
__________________________________________________________________________________________________
conv2d_49 (Conv2D) (None, None, None, 1 147456 average_pooling2d_4[0][0]
__________________________________________________________________________________________________
batch_normalization_40 (BatchNo (None, None, None, 1 576 conv2d_40[0][0]
__________________________________________________________________________________________________
batch_normalization_43 (BatchNo (None, None, None, 1 576 conv2d_43[0][0]
__________________________________________________________________________________________________
batch_normalization_48 (BatchNo (None, None, None, 1 576 conv2d_48[0][0]
__________________________________________________________________________________________________
batch_normalization_49 (BatchNo (None, None, None, 1 576 conv2d_49[0][0]
__________________________________________________________________________________________________
activation_40 (Activation) (None, None, None, 1 0 batch_normalization_40[0][0]
__________________________________________________________________________________________________
activation_43 (Activation) (None, None, None, 1 0 batch_normalization_43[0][0]
__________________________________________________________________________________________________
activation_48 (Activation) (None, None, None, 1 0 batch_normalization_48[0][0]
__________________________________________________________________________________________________
activation_49 (Activation) (None, None, None, 1 0 batch_normalization_49[0][0]
__________________________________________________________________________________________________
mixed5 (Concatenate) (None, None, None, 7 0 activation_40[0][0]
activation_43[0][0]
activation_48[0][0]
activation_49[0][0]
__________________________________________________________________________________________________
conv2d_54 (Conv2D) (None, None, None, 1 122880 mixed5[0][0]
__________________________________________________________________________________________________
batch_normalization_54 (BatchNo (None, None, None, 1 480 conv2d_54[0][0]
__________________________________________________________________________________________________
activation_54 (Activation) (None, None, None, 1 0 batch_normalization_54[0][0]
__________________________________________________________________________________________________
conv2d_55 (Conv2D) (None, None, None, 1 179200 activation_54[0][0]
__________________________________________________________________________________________________
batch_normalization_55 (BatchNo (None, None, None, 1 480 conv2d_55[0][0]
__________________________________________________________________________________________________
activation_55 (Activation) (None, None, None, 1 0 batch_normalization_55[0][0]
__________________________________________________________________________________________________
conv2d_51 (Conv2D) (None, None, None, 1 122880 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_56 (Conv2D) (None, None, None, 1 179200 activation_55[0][0]
__________________________________________________________________________________________________
batch_normalization_51 (BatchNo (None, None, None, 1 480 conv2d_51[0][0]
__________________________________________________________________________________________________
batch_normalization_56 (BatchNo (None, None, None, 1 480 conv2d_56[0][0]
__________________________________________________________________________________________________
activation_51 (Activation) (None, None, None, 1 0 batch_normalization_51[0][0]
__________________________________________________________________________________________________
activation_56 (Activation) (None, None, None, 1 0 batch_normalization_56[0][0]
__________________________________________________________________________________________________
conv2d_52 (Conv2D) (None, None, None, 1 179200 activation_51[0][0]
__________________________________________________________________________________________________
conv2d_57 (Conv2D) (None, None, None, 1 179200 activation_56[0][0]
__________________________________________________________________________________________________
batch_normalization_52 (BatchNo (None, None, None, 1 480 conv2d_52[0][0]
__________________________________________________________________________________________________
batch_normalization_57 (BatchNo (None, None, None, 1 480 conv2d_57[0][0]
__________________________________________________________________________________________________
activation_52 (Activation) (None, None, None, 1 0 batch_normalization_52[0][0]
__________________________________________________________________________________________________
activation_57 (Activation) (None, None, None, 1 0 batch_normalization_57[0][0]
__________________________________________________________________________________________________
average_pooling2d_5 (AveragePoo (None, None, None, 7 0 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_50 (Conv2D) (None, None, None, 1 147456 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_53 (Conv2D) (None, None, None, 1 215040 activation_52[0][0]
__________________________________________________________________________________________________
conv2d_58 (Conv2D) (None, None, None, 1 215040 activation_57[0][0]
__________________________________________________________________________________________________
conv2d_59 (Conv2D) (None, None, None, 1 147456 average_pooling2d_5[0][0]
__________________________________________________________________________________________________
batch_normalization_50 (BatchNo (None, None, None, 1 576 conv2d_50[0][0]
__________________________________________________________________________________________________
batch_normalization_53 (BatchNo (None, None, None, 1 576 conv2d_53[0][0]
__________________________________________________________________________________________________
batch_normalization_58 (BatchNo (None, None, None, 1 576 conv2d_58[0][0]
__________________________________________________________________________________________________
batch_normalization_59 (BatchNo (None, None, None, 1 576 conv2d_59[0][0]
__________________________________________________________________________________________________
activation_50 (Activation) (None, None, None, 1 0 batch_normalization_50[0][0]
__________________________________________________________________________________________________
activation_53 (Activation) (None, None, None, 1 0 batch_normalization_53[0][0]
__________________________________________________________________________________________________
activation_58 (Activation) (None, None, None, 1 0 batch_normalization_58[0][0]
__________________________________________________________________________________________________
activation_59 (Activation) (None, None, None, 1 0 batch_normalization_59[0][0]
__________________________________________________________________________________________________
mixed6 (Concatenate) (None, None, None, 7 0 activation_50[0][0]
activation_53[0][0]
activation_58[0][0]
activation_59[0][0]
__________________________________________________________________________________________________
conv2d_64 (Conv2D) (None, None, None, 1 147456 mixed6[0][0]
__________________________________________________________________________________________________
batch_normalization_64 (BatchNo (None, None, None, 1 576 conv2d_64[0][0]
__________________________________________________________________________________________________
activation_64 (Activation) (None, None, None, 1 0 batch_normalization_64[0][0]
__________________________________________________________________________________________________
conv2d_65 (Conv2D) (None, None, None, 1 258048 activation_64[0][0]
__________________________________________________________________________________________________
batch_normalization_65 (BatchNo (None, None, None, 1 576 conv2d_65[0][0]
__________________________________________________________________________________________________
activation_65 (Activation) (None, None, None, 1 0 batch_normalization_65[0][0]
__________________________________________________________________________________________________
conv2d_61 (Conv2D) (None, None, None, 1 147456 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_66 (Conv2D) (None, None, None, 1 258048 activation_65[0][0]
__________________________________________________________________________________________________
batch_normalization_61 (BatchNo (None, None, None, 1 576 conv2d_61[0][0]
__________________________________________________________________________________________________
batch_normalization_66 (BatchNo (None, None, None, 1 576 conv2d_66[0][0]
__________________________________________________________________________________________________
activation_61 (Activation) (None, None, None, 1 0 batch_normalization_61[0][0]
__________________________________________________________________________________________________
activation_66 (Activation) (None, None, None, 1 0 batch_normalization_66[0][0]
__________________________________________________________________________________________________
conv2d_62 (Conv2D) (None, None, None, 1 258048 activation_61[0][0]
__________________________________________________________________________________________________
conv2d_67 (Conv2D) (None, None, None, 1 258048 activation_66[0][0]
__________________________________________________________________________________________________
batch_normalization_62 (BatchNo (None, None, None, 1 576 conv2d_62[0][0]
__________________________________________________________________________________________________
batch_normalization_67 (BatchNo (None, None, None, 1 576 conv2d_67[0][0]
__________________________________________________________________________________________________
activation_62 (Activation) (None, None, None, 1 0 batch_normalization_62[0][0]
__________________________________________________________________________________________________
activation_67 (Activation) (None, None, None, 1 0 batch_normalization_67[0][0]
__________________________________________________________________________________________________
average_pooling2d_6 (AveragePoo (None, None, None, 7 0 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_60 (Conv2D) (None, None, None, 1 147456 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_63 (Conv2D) (None, None, None, 1 258048 activation_62[0][0]
__________________________________________________________________________________________________
conv2d_68 (Conv2D) (None, None, None, 1 258048 activation_67[0][0]
__________________________________________________________________________________________________
conv2d_69 (Conv2D) (None, None, None, 1 147456 average_pooling2d_6[0][0]
__________________________________________________________________________________________________
batch_normalization_60 (BatchNo (None, None, None, 1 576 conv2d_60[0][0]
__________________________________________________________________________________________________
batch_normalization_63 (BatchNo (None, None, None, 1 576 conv2d_63[0][0]
__________________________________________________________________________________________________
batch_normalization_68 (BatchNo (None, None, None, 1 576 conv2d_68[0][0]
__________________________________________________________________________________________________
batch_normalization_69 (BatchNo (None, None, None, 1 576 conv2d_69[0][0]
__________________________________________________________________________________________________
activation_60 (Activation) (None, None, None, 1 0 batch_normalization_60[0][0]
__________________________________________________________________________________________________
activation_63 (Activation) (None, None, None, 1 0 batch_normalization_63[0][0]
__________________________________________________________________________________________________
activation_68 (Activation) (None, None, None, 1 0 batch_normalization_68[0][0]
__________________________________________________________________________________________________
activation_69 (Activation) (None, None, None, 1 0 batch_normalization_69[0][0]
__________________________________________________________________________________________________
mixed7 (Concatenate) (None, None, None, 7 0 activation_60[0][0]
activation_63[0][0]
activation_68[0][0]
activation_69[0][0]
__________________________________________________________________________________________________
conv2d_72 (Conv2D) (None, None, None, 1 147456 mixed7[0][0]
__________________________________________________________________________________________________
batch_normalization_72 (BatchNo (None, None, None, 1 576 conv2d_72[0][0]
__________________________________________________________________________________________________
activation_72 (Activation) (None, None, None, 1 0 batch_normalization_72[0][0]
__________________________________________________________________________________________________
conv2d_73 (Conv2D) (None, None, None, 1 258048 activation_72[0][0]
__________________________________________________________________________________________________
batch_normalization_73 (BatchNo (None, None, None, 1 576 conv2d_73[0][0]
__________________________________________________________________________________________________
activation_73 (Activation) (None, None, None, 1 0 batch_normalization_73[0][0]
__________________________________________________________________________________________________
conv2d_70 (Conv2D) (None, None, None, 1 147456 mixed7[0][0]
__________________________________________________________________________________________________
conv2d_74 (Conv2D) (None, None, None, 1 258048 activation_73[0][0]
__________________________________________________________________________________________________
batch_normalization_70 (BatchNo (None, None, None, 1 576 conv2d_70[0][0]
__________________________________________________________________________________________________
batch_normalization_74 (BatchNo (None, None, None, 1 576 conv2d_74[0][0]
__________________________________________________________________________________________________
activation_70 (Activation) (None, None, None, 1 0 batch_normalization_70[0][0]
__________________________________________________________________________________________________
activation_74 (Activation) (None, None, None, 1 0 batch_normalization_74[0][0]
__________________________________________________________________________________________________
conv2d_71 (Conv2D) (None, None, None, 3 552960 activation_70[0][0]
__________________________________________________________________________________________________
conv2d_75 (Conv2D) (None, None, None, 1 331776 activation_74[0][0]
__________________________________________________________________________________________________
batch_normalization_71 (BatchNo (None, None, None, 3 960 conv2d_71[0][0]
__________________________________________________________________________________________________
batch_normalization_75 (BatchNo (None, None, None, 1 576 conv2d_75[0][0]
__________________________________________________________________________________________________
activation_71 (Activation) (None, None, None, 3 0 batch_normalization_71[0][0]
__________________________________________________________________________________________________
activation_75 (Activation) (None, None, None, 1 0 batch_normalization_75[0][0]
__________________________________________________________________________________________________
max_pooling2d_3 (MaxPooling2D) (None, None, None, 7 0 mixed7[0][0]
__________________________________________________________________________________________________
mixed8 (Concatenate) (None, None, None, 1 0 activation_71[0][0]
activation_75[0][0]
max_pooling2d_3[0][0]
__________________________________________________________________________________________________
conv2d_80 (Conv2D) (None, None, None, 4 573440 mixed8[0][0]
__________________________________________________________________________________________________
batch_normalization_80 (BatchNo (None, None, None, 4 1344 conv2d_80[0][0]
__________________________________________________________________________________________________
activation_80 (Activation) (None, None, None, 4 0 batch_normalization_80[0][0]
__________________________________________________________________________________________________
conv2d_77 (Conv2D) (None, None, None, 3 491520 mixed8[0][0]
__________________________________________________________________________________________________
conv2d_81 (Conv2D) (None, None, None, 3 1548288 activation_80[0][0]
__________________________________________________________________________________________________
batch_normalization_77 (BatchNo (None, None, None, 3 1152 conv2d_77[0][0]
__________________________________________________________________________________________________
batch_normalization_81 (BatchNo (None, None, None, 3 1152 conv2d_81[0][0]
__________________________________________________________________________________________________
activation_77 (Activation) (None, None, None, 3 0 batch_normalization_77[0][0]
__________________________________________________________________________________________________
activation_81 (Activation) (None, None, None, 3 0 batch_normalization_81[0][0]
__________________________________________________________________________________________________
conv2d_78 (Conv2D) (None, None, None, 3 442368 activation_77[0][0]
__________________________________________________________________________________________________
conv2d_79 (Conv2D) (None, None, None, 3 442368 activation_77[0][0]
__________________________________________________________________________________________________
conv2d_82 (Conv2D) (None, None, None, 3 442368 activation_81[0][0]
__________________________________________________________________________________________________
conv2d_83 (Conv2D) (None, None, None, 3 442368 activation_81[0][0]
__________________________________________________________________________________________________
average_pooling2d_7 (AveragePoo (None, None, None, 1 0 mixed8[0][0]
__________________________________________________________________________________________________
conv2d_76 (Conv2D) (None, None, None, 3 409600 mixed8[0][0]
__________________________________________________________________________________________________
batch_normalization_78 (BatchNo (None, None, None, 3 1152 conv2d_78[0][0]
__________________________________________________________________________________________________
batch_normalization_79 (BatchNo (None, None, None, 3 1152 conv2d_79[0][0]
__________________________________________________________________________________________________
batch_normalization_82 (BatchNo (None, None, None, 3 1152 conv2d_82[0][0]
__________________________________________________________________________________________________
batch_normalization_83 (BatchNo (None, None, None, 3 1152 conv2d_83[0][0]
__________________________________________________________________________________________________
conv2d_84 (Conv2D) (None, None, None, 1 245760 average_pooling2d_7[0][0]
__________________________________________________________________________________________________
batch_normalization_76 (BatchNo (None, None, None, 3 960 conv2d_76[0][0]
__________________________________________________________________________________________________
activation_78 (Activation) (None, None, None, 3 0 batch_normalization_78[0][0]
__________________________________________________________________________________________________
activation_79 (Activation) (None, None, None, 3 0 batch_normalization_79[0][0]
__________________________________________________________________________________________________
activation_82 (Activation) (None, None, None, 3 0 batch_normalization_82[0][0]
__________________________________________________________________________________________________
activation_83 (Activation) (None, None, None, 3 0 batch_normalization_83[0][0]
__________________________________________________________________________________________________
batch_normalization_84 (BatchNo (None, None, None, 1 576 conv2d_84[0][0]
__________________________________________________________________________________________________
activation_76 (Activation) (None, None, None, 3 0 batch_normalization_76[0][0]
__________________________________________________________________________________________________
mixed9_0 (Concatenate) (None, None, None, 7 0 activation_78[0][0]
activation_79[0][0]
__________________________________________________________________________________________________
concatenate (Concatenate) (None, None, None, 7 0 activation_82[0][0]
activation_83[0][0]
__________________________________________________________________________________________________
activation_84 (Activation) (None, None, None, 1 0 batch_normalization_84[0][0]
__________________________________________________________________________________________________
mixed9 (Concatenate) (None, None, None, 2 0 activation_76[0][0]
mixed9_0[0][0]
concatenate[0][0]
activation_84[0][0]
__________________________________________________________________________________________________
conv2d_89 (Conv2D) (None, None, None, 4 917504 mixed9[0][0]
__________________________________________________________________________________________________
batch_normalization_89 (BatchNo (None, None, None, 4 1344 conv2d_89[0][0]
__________________________________________________________________________________________________
activation_89 (Activation) (None, None, None, 4 0 batch_normalization_89[0][0]
__________________________________________________________________________________________________
conv2d_90 (Conv2D) (None, None, None, 3 1548288 activation_89[0][0]
__________________________________________________________________________________________________
conv2d_86 (Conv2D) (None, None, None, 3 786432 mixed9[0][0]
__________________________________________________________________________________________________
batch_normalization_90 (BatchNo (None, None, None, 3 1152 conv2d_90[0][0]
__________________________________________________________________________________________________
batch_normalization_86 (BatchNo (None, None, None, 3 1152 conv2d_86[0][0]
__________________________________________________________________________________________________
activation_90 (Activation) (None, None, None, 3 0 batch_normalization_90[0][0]
__________________________________________________________________________________________________
activation_86 (Activation) (None, None, None, 3 0 batch_normalization_86[0][0]
__________________________________________________________________________________________________
conv2d_92 (Conv2D) (None, None, None, 3 442368 activation_90[0][0]
__________________________________________________________________________________________________
conv2d_87 (Conv2D) (None, None, None, 3 442368 activation_86[0][0]
__________________________________________________________________________________________________
conv2d_88 (Conv2D) (None, None, None, 3 442368 activation_86[0][0]
__________________________________________________________________________________________________
conv2d_91 (Conv2D) (None, None, None, 3 442368 activation_90[0][0]
__________________________________________________________________________________________________
average_pooling2d_8 (AveragePoo (None, None, None, 2 0 mixed9[0][0]
__________________________________________________________________________________________________
conv2d_85 (Conv2D) (None, None, None, 3 655360 mixed9[0][0]
__________________________________________________________________________________________________
batch_normalization_87 (BatchNo (None, None, None, 3 1152 conv2d_87[0][0]
__________________________________________________________________________________________________
batch_normalization_88 (BatchNo (None, None, None, 3 1152 conv2d_88[0][0]
__________________________________________________________________________________________________
batch_normalization_91 (BatchNo (None, None, None, 3 1152 conv2d_91[0][0]
__________________________________________________________________________________________________
batch_normalization_92 (BatchNo (None, None, None, 3 1152 conv2d_92[0][0]
__________________________________________________________________________________________________
conv2d_93 (Conv2D) (None, None, None, 1 393216 average_pooling2d_8[0][0]
__________________________________________________________________________________________________
batch_normalization_85 (BatchNo (None, None, None, 3 960 conv2d_85[0][0]
__________________________________________________________________________________________________
activation_87 (Activation) (None, None, None, 3 0 batch_normalization_87[0][0]
__________________________________________________________________________________________________
activation_88 (Activation) (None, None, None, 3 0 batch_normalization_88[0][0]
__________________________________________________________________________________________________
activation_91 (Activation) (None, None, None, 3 0 batch_normalization_91[0][0]
__________________________________________________________________________________________________
activation_92 (Activation) (None, None, None, 3 0 batch_normalization_92[0][0]
__________________________________________________________________________________________________
batch_normalization_93 (BatchNo (None, None, None, 1 576 conv2d_93[0][0]
__________________________________________________________________________________________________
activation_85 (Activation) (None, None, None, 3 0 batch_normalization_85[0][0]
__________________________________________________________________________________________________
mixed9_1 (Concatenate) (None, None, None, 7 0 activation_87[0][0]
activation_88[0][0]
__________________________________________________________________________________________________
concatenate_1 (Concatenate) (None, None, None, 7 0 activation_91[0][0]
activation_92[0][0]
__________________________________________________________________________________________________
activation_93 (Activation) (None, None, None, 1 0 batch_normalization_93[0][0]
__________________________________________________________________________________________________
mixed10 (Concatenate) (None, None, None, 2 0 activation_85[0][0]
mixed9_1[0][0]
concatenate_1[0][0]
activation_93[0][0]
==================================================================================================
Total params: 21,802,784
Trainable params: 21,768,352
Non-trainable params: 34,432
__________________________________________________________________________________________________
4. Обучение
1. Определите потери
# 损失是选中层的通道输出
def loss(img,model):
# channel = 16 # 选定第16通道,这个数不能大于48,上面的所截取的层的通道数
channels = [16,24]
# 对图像进行变形,由(300,300,3)扩展为(1,300,300,3)符合InceptionV3网络的输入特点
img = tf.expand_dims(img,axis=0)
# print(f'img.shape{img.shape}')
res = model(img)
# print(f'res.shape{res.shape}')
# 取选中通道的值
# act_res = res[:,:,:,channel]
losses = []
# for c in channels:
# act = res[:,:,:,c]
# loss = tf.reduce_mean(act)
# losses.append(act)
for re in res:
loss_ = tf.reduce_mean(re)
losses.append(loss_)
# 选中通道的输出结果求均值
# loss = tf.reduce_mean(act_res)
# return loss
return tf.reduce_sum(losses)
2. Определите процесс оптимизации изображения (градиентное восхождение)
# 计算梯度
def grad(img,model):
with tf.GradientTape() as tape:
tape.watch(img)
loss_ = loss(img,model)
return tape.gradient(loss_,img),loss_
def deep_dream(model,img,epochs=30,step_size=0.03,verbose=1):
'''
model:模型
img:输入的图像numpy数组
epochs:训练的轮数
step_size:类似于学习率
verbose:显示过程信息,这里是自定义的
'''
for step in range(epochs):
# 计算图像的梯度
grads,loss_ = grad(img,model)
# 归一化梯度值
grads /= tf.math.reduce_std(grads) + 1e-8
# 梯度上升
img += grads*step_size
# 功能:可以将一个张量中的数值限制在一个范围之内。(可以避免一些运算错误:可以保证在进行log运算时,不会出现log0这样的错误或者大于1的概率)
img = tf.clip_by_value(img,-1,1)
# 输出过程提示信息
if (verbose==1):
if((step+1)%10 ==1):
print(f'Step{step+1}/{epochs},loss{loss_}')
return img
5. Применение
1. Предварительная обработка данных
# Inception_v3对输入的图像进行预处理,输出float32的array或者tensor,值域在-1-1之间
img = tf.keras.applications.inception_v3.preprocess_input(img)
print(type(img))
img = tf.convert_to_tensor(img) # 将给定值转换为张量。
# img = tf.constant(img)
print(type(img))
<class 'numpy.ndarray'>
<class 'tensorflow.python.framework.ops.EagerTensor'>
2. Приложение
# 开始做梦
import time
start = time.time()
print('开始做梦…')
# 调用优化过程
dream_img = deep_dream(dream_model,img)
end = time.time()
print('梦醒了…')
# 标准化图像
dream_img = normalize_image(dream_img)
# 显示结果图像
show_image(dream_img)
# 保存结果图像
file_name = f'deepdream_{layer_names}.jpg'
save_image(dream_img,file_name)
print(f'梦境保存为{file_name}')
开始做梦…
Step1/30,loss0.39452841877937317
Step11/30,loss1.039328694343567
Step21/30,loss1.060717225074768
梦醒了…
梦境保存为deepdream_['conv2d_92', 'mixed10'].jpg
В-шестых, оптимизация первая
Существует проблема
- Выход шумный
- низкое разрешение изображения
- Выходные характеристики аналогичны
Градиентное восхождение можно использовать для решения этих задач на графиках в разных масштабах, а результаты, полученные на графиках в малых масштабах, объединяются на графиках в более крупных масштабах.
# 开始做梦
import time
start = time.time()
OCTAVE_SCALE = 1.30
print('开始做梦…')
initial_shape = img.shape[:-1]
print(initial_shape,type(initial_shape))
# 调用优化过程
for octave in range(-2,3):
new_size = tf.cast(tf.convert_to_tensor(initial_shape),tf.float32)*(OCTAVE_SCALE**octave)
print(new_size)
img = tf.image.resize(img,tf.cast(new_size,tf.int32))
dream_img = deep_dream(dream_model,img)
end = time.time()
print('梦醒了…')
# 把图像调整回原始大小
dream_img = tf.image.resize(dream_img,initial_shape)
# 标准化图像
dream_img = normalize_image(dream_img)
# 显示结果图像
show_image(dream_img)
# 保存结果图像
file_name = f'deepdream_{layer_names}.jpg'
save_image(dream_img,file_name)
print(f'梦境保存为{file_name}')
开始做梦…
(276, 500) <class 'tensorflow.python.framework.tensor_shape.TensorShape'>
tf.Tensor([163.31361 295.858 ], shape=(2,), dtype=float32)
Step1/30,loss0.6132913827896118
Step11/30,loss1.716591715812683
Step21/30,loss2.456547260284424
tf.Tensor([212.3077 384.6154], shape=(2,), dtype=float32)
Step1/30,loss0.4894857406616211
Step11/30,loss1.3406188488006592
Step21/30,loss1.8961268663406372
tf.Tensor([276. 500.], shape=(2,), dtype=float32)
Step1/30,loss0.37319672107696533
Step11/30,loss0.9798624515533447
Step21/30,loss1.107140302658081
tf.Tensor([358.8 650. ], shape=(2,), dtype=float32)
Step1/30,loss0.2809557616710663
Step11/30,loss0.7942595481872559
Step21/30,loss1.0603138208389282
tf.Tensor([466.44 845. ], shape=(2,), dtype=float32)
Step1/30,loss0.21746864914894104
Step11/30,loss0.6842349171638489
Step21/30,loss1.0456043481826782
梦醒了…
梦境保存为deepdream_['conv2d_92', 'mixed10'].jpg
Седьмая, оптимизация 2 (недоделанная, есть проблема с исчезновением градиента)
Если размер одного изображения слишком велик, время и память, необходимые для вычисления градиента, также увеличатся, и некоторые машины могут не поддерживать
Изображение можно разделить на несколько небольших блоков для расчета градиента и, наконец, сгладить, чтобы получить окончательное изображение.
1. Определите функцию движения изображения
def random_roll(img,maxroll=512):
shift = tf.random.uniform(shape=[2],minval=-maxroll,maxval=maxroll,dtype=tf.int32)
# print(shift)
shift_down,shift_right = shift[0], shift[1]
# print(shift_down,shift[1])
img_rolled = tf.roll(tf.roll(img,shift_right,axis=1),shift_down,axis=0) # 0轴表示列,1轴表示行
return shift_down,shift_right,img_rolled
# 移动示例
shift_down,shift_right,img_rolled = random_roll(np.array(img))
print(img_rolled)
show_image(normalize_image(img_rolled))
tf.Tensor(
[[[ 0.03375863 0.08721063 0.09207988]
[ 0.03150825 0.08516623 0.09036239]
[ 0.03068571 0.08467986 0.08965567]
...
[ 0.04770055 0.10158308 0.10371731]
[ 0.04204269 0.0956436 0.09888568]
[ 0.03731464 0.09075234 0.09494041]]
[[ 0.06118715 0.11372916 0.11734722]
[ 0.05836392 0.11109311 0.11487096]
[ 0.05730766 0.11033451 0.11389744]
...
[ 0.08053858 0.13347098 0.1353471 ]
[ 0.07257649 0.12523928 0.12785396]
[ 0.06597748 0.11849853 0.12171155]]
[[ 0.08274817 0.13277765 0.13612607]
[ 0.07926945 0.12944232 0.13284971]
[ 0.07801585 0.12841931 0.13166244]
...
[ 0.10768503 0.1580585 0.16038708]
[ 0.09740195 0.14752938 0.15032531]
[ 0.08886529 0.13887693 0.14201552]]
...
[[-0.06078916 -0.00828693 0.00368667]
[-0.06019123 -0.00802475 0.00514615]
[-0.05725315 -0.00517168 0.00854181]
...
[-0.0510629 0.00199292 0.00816277]
[-0.05603319 -0.00299458 0.00532976]
[-0.05935075 -0.00652474 0.00379198]]
[[-0.02253462 0.03045658 0.04034043]
[-0.02449172 0.0284123 0.03943559]
[-0.02411189 0.02879655 0.04010865]
...
[-0.01000494 0.04310312 0.04689666]
[-0.01499609 0.0381572 0.04424807]
[-0.01925799 0.03383339 0.04202073]]
[[ 0.00443651 0.05780583 0.06481148]
[ 0.00251653 0.05602315 0.06365975]
[ 0.00210891 0.05588182 0.06343263]
...
[ 0.01533515 0.06907414 0.07193381]
[ 0.01101077 0.06454635 0.06902351]
[ 0.00732829 0.0607167 0.06662704]]], shape=(466, 845, 3), dtype=float32)
2. Определите функцию градиента расчета блока
def get_gradients(model,img,size=150):
print('get_gradients')
shift_down,shift_right,img_rolled = random_roll(img,size)
# 初始化梯度为0
gradients = tf.zeros_like(img_rolled)
# 产生分块坐标列表
xs = tf.range(0,img_rolled.shape[0],size)
ys = tf.range(0,img_rolled.shape[1],size)
for x in xs:
for y in ys:
# 计算该图块的梯度
with tf.GradientTape() as tape:
tape.watch(img_rolled)
# 从图像中提取该图块,最后一块大小会按实际提取
img_tile = img_rolled[x:x+size,y:y+size]
loss_ = loss(img_tile,model)
# 更新图像的梯度
gradients = gradients + tape.gradient(loss_,img_rolled)
print('图块放回原来的位置')
# 将图块放回原来的位置
gradients = tf.roll(tf.roll(gradients,-shift_right,axis=1),-shift_down,axis=0)
# 归一化梯度
gradients /= tf.math.reduce_std(gradients) + 1e-8
return gradients
3. Определите функцию оптимизации
def deep_dream_new(img,model,epochs=30,step_size=0.01,OCTAVE_SCALE=1.3):
initial_shape = img.shape[:-1]
# print(initial_shape,type(initial_shape))
for octave in range(-2,3):
new_size = tf.cast(tf.convert_to_tensor(initial_shape),tf.float32)*(OCTAVE_SCALE**octave)
# print(new_size)
img = tf.image.resize(img,tf.cast(new_size,tf.int32))
for step in range(epochs):
# 计算图像的梯度
grads,loss_ = get_gradients(model,img)
# 归一化梯度值
grads /= tf.math.reduce_std(grads) + 1e-8
# 梯度上升
img += grads*step_size
# 功能:可以将一个张量中的数值限制在一个范围之内。(可以避免一些运算错误:可以保证在进行log运算时,不会出现log0这样的错误或者大于1的概率)
img = tf.clip_by_value(img,-1,1)
# 输出过程提示信息
if (verbose==1):
if((step+1)%10 ==1):
print(f'Step{step+1}/{epochs},loss{loss_}')
return img
Все вышеперечисленные части работают, но в конце возникает проблема.
# 开始做梦
import time
start = time.time()
print('开始做梦…')
# 调用优化过程
dream_img = deep_dream_new(img,dream_model)
end = time.time()
print('梦醒了…')
# 把图像调整回原始大小
dream_img = tf.image.resize(dream_img,initial_shape)
# 标准化图像
dream_img = normalize_image(dream_img)
# 显示结果图像
show_image(dream_img)
# 保存结果图像
file_name = f'deepdream_{layer_names}.jpg'
save_image(dream_img,file_name)
print(f'梦境保存为{file_name}')
开始做梦…
get_gradients