x = keras.layers.Conv2D(64,(3,3),padding='same')(input_tensor)
in_channels = K.int_shape(x)[-1]
###注意力机制
in_tensor = x
#Squeeze操作
x = layers.GlobalAveragePooling2D()(x)
ratio1 = 0.5
#Exicitation
out = Conv2D(filters=in_channels//ratio1, kernel_size=(1,1))(x)
out = Activation("relu")(out)
out = Conv2D(filters=in_channels, kernel_size=(1,1))(out)
out = Activation('sigmoid')(out)
out = layers.Reshape((1, in_channels))(out)
scale = tf.multiply(in_tensor, out)