tf.keras.layers模块中的函数有哪些

其他教程   发布日期:2023年06月29日   浏览次数:550

本文小编为大家详细介绍“tf.keras.layers模块中的函数有哪些”,内容详细,步骤清晰,细节处理妥当,希望这篇“tf.keras.layers模块中的函数有哪些”文章能帮助大家解决疑惑,下面跟着小编的思路慢慢深入,一起来学习新知识吧。

    tf.keras.layers模块中的函数

    1. from __future__ import print_function as _print_function
    2. import sys as _sys
    3. from . import experimental
    4. from tensorflow.python.keras.engine.base_layer import Layer
    5. from tensorflow.python.keras.engine.input_layer import Input
    6. from tensorflow.python.keras.engine.input_layer import InputLayer
    7. from tensorflow.python.keras.engine.input_spec import InputSpec
    8. from tensorflow.python.keras.feature_column.dense_features_v2 import DenseFeatures
    9. from tensorflow.python.keras.layers.advanced_activations import ELU
    10. from tensorflow.python.keras.layers.advanced_activations import LeakyReLU
    11. from tensorflow.python.keras.layers.advanced_activations import PReLU
    12. from tensorflow.python.keras.layers.advanced_activations import ReLU
    13. from tensorflow.python.keras.layers.advanced_activations import Softmax
    14. from tensorflow.python.keras.layers.advanced_activations import ThresholdedReLU
    15. from tensorflow.python.keras.layers.convolutional import Conv1D
    16. from tensorflow.python.keras.layers.convolutional import Conv1D as Convolution1D
    17. from tensorflow.python.keras.layers.convolutional import Conv1DTranspose
    18. from tensorflow.python.keras.layers.convolutional import Conv1DTranspose as Convolution1DTranspose
    19. from tensorflow.python.keras.layers.convolutional import Conv2D
    20. from tensorflow.python.keras.layers.convolutional import Conv2D as Convolution2D
    21. from tensorflow.python.keras.layers.convolutional import Conv2DTranspose
    22. from tensorflow.python.keras.layers.convolutional import Conv2DTranspose as Convolution2DTranspose
    23. from tensorflow.python.keras.layers.convolutional import Conv3D
    24. from tensorflow.python.keras.layers.convolutional import Conv3D as Convolution3D
    25. from tensorflow.python.keras.layers.convolutional import Conv3DTranspose
    26. from tensorflow.python.keras.layers.convolutional import Conv3DTranspose as Convolution3DTranspose
    27. from tensorflow.python.keras.layers.convolutional import Cropping1D
    28. from tensorflow.python.keras.layers.convolutional import Cropping2D
    29. from tensorflow.python.keras.layers.convolutional import Cropping3D
    30. from tensorflow.python.keras.layers.convolutional import DepthwiseConv2D
    31. from tensorflow.python.keras.layers.convolutional import SeparableConv1D
    32. from tensorflow.python.keras.layers.convolutional import SeparableConv1D as SeparableConvolution1D
    33. from tensorflow.python.keras.layers.convolutional import SeparableConv2D
    34. from tensorflow.python.keras.layers.convolutional import SeparableConv2D as SeparableConvolution2D
    35. from tensorflow.python.keras.layers.convolutional import UpSampling1D
    36. from tensorflow.python.keras.layers.convolutional import UpSampling2D
    37. from tensorflow.python.keras.layers.convolutional import UpSampling3D
    38. from tensorflow.python.keras.layers.convolutional import ZeroPadding1D
    39. from tensorflow.python.keras.layers.convolutional import ZeroPadding2D
    40. from tensorflow.python.keras.layers.convolutional import ZeroPadding3D
    41. from tensorflow.python.keras.layers.convolutional_recurrent import ConvLSTM2D
    42. from tensorflow.python.keras.layers.core import Activation
    43. from tensorflow.python.keras.layers.core import ActivityRegularization
    44. from tensorflow.python.keras.layers.core import Dense
    45. from tensorflow.python.keras.layers.core import Dropout
    46. from tensorflow.python.keras.layers.core import Flatten
    47. from tensorflow.python.keras.layers.core import Lambda
    48. from tensorflow.python.keras.layers.core import Masking
    49. from tensorflow.python.keras.layers.core import Permute
    50. from tensorflow.python.keras.layers.core import RepeatVector
    51. from tensorflow.python.keras.layers.core import Reshape
    52. from tensorflow.python.keras.layers.core import SpatialDropout1D
    53. from tensorflow.python.keras.layers.core import SpatialDropout2D
    54. from tensorflow.python.keras.layers.core import SpatialDropout3D
    55. from tensorflow.python.keras.layers.dense_attention import AdditiveAttention
    56. from tensorflow.python.keras.layers.dense_attention import Attention
    57. from tensorflow.python.keras.layers.embeddings import Embedding
    58. from tensorflow.python.keras.layers.local import LocallyConnected1D
    59. from tensorflow.python.keras.layers.local import LocallyConnected2D
    60. from tensorflow.python.keras.layers.merge import Add
    61. from tensorflow.python.keras.layers.merge import Average
    62. from tensorflow.python.keras.layers.merge import Concatenate
    63. from tensorflow.python.keras.layers.merge import Dot
    64. from tensorflow.python.keras.layers.merge import Maximum
    65. from tensorflow.python.keras.layers.merge import Minimum
    66. from tensorflow.python.keras.layers.merge import Multiply
    67. from tensorflow.python.keras.layers.merge import Subtract
    68. from tensorflow.python.keras.layers.merge import add
    69. from tensorflow.python.keras.layers.merge import average
    70. from tensorflow.python.keras.layers.merge import concatenate
    71. from tensorflow.python.keras.layers.merge import dot
    72. from tensorflow.python.keras.layers.merge import maximum
    73. from tensorflow.python.keras.layers.merge import minimum
    74. from tensorflow.python.keras.layers.merge import multiply
    75. from tensorflow.python.keras.layers.merge import subtract
    76. from tensorflow.python.keras.layers.noise import AlphaDropout
    77. from tensorflow.python.keras.layers.noise import GaussianDropout
    78. from tensorflow.python.keras.layers.noise import GaussianNoise
    79. from tensorflow.python.keras.layers.normalization import LayerNormalization
    80. from tensorflow.python.keras.layers.normalization_v2 import BatchNormalization
    81. from tensorflow.python.keras.layers.pooling import AveragePooling1D
    82. from tensorflow.python.keras.layers.pooling import AveragePooling1D as AvgPool1D
    83. from tensorflow.python.keras.layers.pooling import AveragePooling2D
    84. from tensorflow.python.keras.layers.pooling import AveragePooling2D as AvgPool2D
    85. from tensorflow.python.keras.layers.pooling import AveragePooling3D
    86. from tensorflow.python.keras.layers.pooling import AveragePooling3D as AvgPool3D
    87. from tensorflow.python.keras.layers.pooling import GlobalAveragePooling1D
    88. from tensorflow.python.keras.layers.pooling import GlobalAveragePooling1D as GlobalAvgPool1D
    89. from tensorflow.python.keras.layers.pooling import GlobalAveragePooling2D
    90. from tensorflow.python.keras.layers.pooling import GlobalAveragePooling2D as GlobalAvgPool2D
    91. from tensorflow.python.keras.layers.pooling import GlobalAveragePooling3D
    92. from tensorflow.python.keras.layers.pooling import GlobalAveragePooling3D as GlobalAvgPool3D
    93. from tensorflow.python.keras.layers.pooling import GlobalMaxPooling1D
    94. from tensorflow.python.keras.layers.pooling import GlobalMaxPooling1D as GlobalMaxPool1D
    95. from tensorflow.python.keras.layers.pooling import GlobalMaxPooling2D
    96. from tensorflow.python.keras.layers.pooling import GlobalMaxPooling2D as GlobalMaxPool2D
    97. from tensorflow.python.keras.layers.pooling import GlobalMaxPooling3D
    98. from tensorflow.python.keras.layers.pooling import GlobalMaxPooling3D as GlobalMaxPool3D
    99. from tensorflow.python.keras.layers.pooling import MaxPooling1D
    100. from tensorflow.python.keras.layers.pooling import MaxPooling1D as MaxPool1D
    101. from tensorflow.python.keras.layers.pooling import MaxPooling2D
    102. from tensorflow.python.keras.layers.pooling import MaxPooling2D as MaxPool2D
    103. from tensorflow.python.keras.layers.pooling import MaxPooling3D
    104. from tensorflow.python.keras.layers.pooling import MaxPooling3D as MaxPool3D
    105. from tensorflow.python.keras.layers.recurrent import AbstractRNNCell
    106. from tensorflow.python.keras.layers.recurrent import RNN
    107. from tensorflow.python.keras.layers.recurrent import SimpleRNN
    108. from tensorflow.python.keras.layers.recurrent import SimpleRNNCell
    109. from tensorflow.python.keras.layers.recurrent import StackedRNNCells
    110. from tensorflow.python.keras.layers.recurrent_v2 import GRU
    111. from tensorflow.python.keras.layers.recurrent_v2 import GRUCell
    112. from tensorflow.python.keras.layers.recurrent_v2 import LSTM
    113. from tensorflow.python.keras.layers.recurrent_v2 import LSTMCell
    114. from tensorflow.python.keras.layers.serialization import deserialize
    115. from tensorflow.python.keras.layers.serialization import serialize
    116. from tensorflow.python.keras.layers.wrappers import Bidirectional
    117. from tensorflow.python.keras.layers.wrappers import TimeDistributed
    118. from tensorflow.python.keras.layers.wrappers import Wrapper
    119. del _print_function

    汇总tf.keras模型层layers

      1. tf.keras.layers.Dense()
      :密集连接层。参数个数 = 输入层特征数× 输出层特征数(weight)+ 输出层特征数(bias)
      1. tf.keras.layers.Activation()
      :激活函数层。一般放在Dense层后面,等价于在Dense层中指定activation。
      1. tf.keras.layers.Dropout()
      :随机置零层。训练期间以一定几率将输入置0,一种正则化手段。
      1. tf.keras.layers.BatchNormalization()
      :批标准化层。通过线性变换将输入批次缩放平移到稳定的均值和标准差。可以增强模型对输入不同分布的适应性,加快模型训练速度,有轻微正则化效果。一般在激活函数之前使用。
      1. tf.keras.layers.SpatialDropout2D()
      :空间随机置零层。训练期间以一定几率将整个特征图置0,一种正则化手段,有利于避免特征图之间过高的相关性。
      1. tf.keras.layers.Input()
      :输入层。通常使用Functional API方式构建模型时作为第一层。
      1. tf.keras.layers.DenseFeature()
      :特征列接入层,用于接收一个特征列列表并产生一个密集连接层。
      1. tf.keras.layers.Flatten()
      :压平层,用于将多维张量压成一维。
      1. tf.keras.layers.Reshape()
      :形状重塑层,改变输入张量的形状。
      1. tf.keras.layers.Concatenate()
      :拼接层,将多个张量在某个维度上拼接。
      1. tf.keras.layers.Add()
      :加法层。
      1. tf.keras.layers.Subtract()
      :减法层。
      1. tf.keras.layers.Maximum()
      :取最大值层。
      1. tf.keras.layers.Minimum()
      :取最小值层。

    卷积网络相关层

      1. tf.keras.layers.Conv1D()
      :普通一维卷积,常用于文本。参数个数 = 输入通道数×卷积核尺寸(如3)×卷积核个数
      1. tf.keras.layers.Conv2D()
      :普通二维卷积,常用于图像。参数个数 = 输入通道数×卷积核尺寸(如3乘3)×卷积核个数
      1. tf.keras.layers.Conv3D()
      :普通三维卷积,常用于视频。参数个数 = 输入通道数×卷积核尺寸(如3乘3乘3)×卷积核个数
      1. tf.keras.layers.SeparableConv2D()
      :二维深度可分离卷积层。不同于普通卷积同时对区域和通道操作,深度可分离卷积先操作区域,再操作通道。即先对每个通道做独立卷即先操作区域,再用1乘1卷积跨通道组合即再操作通道。参数个数 = 输入通道数×卷积核尺寸 + 输入通道数×1×1×输出通道数。深度可分离卷积的参数数量一般远小于普通卷积,效果一般也更好。
      1. tf.keras.layers.DepthwiseConv2D()
      :二维深度卷积层。仅有SeparableConv2D前半部分操作,即只操作区域,不操作通道,一般输出通道数和输入通道数相同,但也可以通过设置depth_multiplier让输出通道为输入通道的若干倍数。输出通道数 = 输入通道数 × depth_multiplier。参数个数 = 输入通道数×卷积核尺寸× depth_multiplier。
      1. tf.keras.layers.Conv2DTranspose()
      :二维卷积转置层,俗称反卷积层。并非卷积的逆操作,但在卷积核相同的情况下,当其输入尺寸是卷积操作输出尺寸的情况下,卷积转置的输出尺寸恰好是卷积操作的输入尺寸。
      1. tf.keras.layers.LocallyConnected2D()
      :二维局部连接层。类似Conv2D,唯一的差别是没有空间上的权值共享,所以其参数个数远高于二维卷积。
      1. tf.keras.layers.MaxPooling2D()
      :二维最大池化层。也称作下采样层。池化层无参数,主要作用是降维。
      1. tf.keras.layers.AveragePooling2D()
      :二维平均池化层。
      1. tf.keras.layers.GlobalMaxPool2D()
      :全局最大池化层。每个通道仅保留一个值。一般从卷积层过渡到全连接层时使用,是Flatten的替代方案。
      1. tf.keras.layers.GlobalAvgPool2D()
      :全局平均池化层。每个通道仅保留一个值。

    示例代码一、搭建LeNet-5神经网络

    1. import tensorflow as tf
    2. from tensorflow.keras import datasets, layers, optimizers, Sequential, metrics, losses
    3. # 1.数据集准备
    4. (x, y), (x_val, y_val) = datasets.mnist.load_data() # 加载数据集,返回的是两个元组,分别表示训练集和测试集
    5. x = tf.convert_to_tensor(x, dtype=tf.float32) / 255. # 转换为张量,并缩放到0~1
    6. y = tf.convert_to_tensor(y, dtype=tf.int32) # 转换为张量(标签)
    7. print(x.shape, y.shape)
    8. train_dataset = tf.data.Dataset.from_tensor_slices((x, y)) # 构建数据集对象
    9. train_dataset = train_dataset.batch(32).repeat(10) # 设置批量训练的batch为32,要将训练集重复训练10遍
    10. # 2.搭建网络
    11. network = Sequential([ # 搭建网络容器
    12. layers.Conv2D(6, kernel_size=3, strides=1), # 第一个卷积层,6个3*3*1卷积核
    13. layers.MaxPooling2D(pool_size=2, strides=2), # 池化层,卷积核2*2,步长2
    14. layers.ReLU(), # 激活函数
    15. layers.Conv2D(16, kernel_size=3, strides=1), # 第二个卷积层,16个3*3*6卷积核
    16. layers.MaxPooling2D(pool_size=2, strides=2), # 池化层
    17. layers.ReLU(), # 激活函数
    18. layers.Flatten(), # 拉直,方便全连接层处理
    19. layers.Dense(120, activation='relu'), # 全连接层,120个节点
    20. layers.Dense(84, activation='relu'), # 全连接层,84个节点
    21. layers.Dense(10) # 输出层,10个节点
    22. ])
    23. network.build(input_shape=(None, 28, 28, 1)) # 定义输入,batch_size=32,输入图片大小是28*28,通道数为1。
    24. network.summary() # 显示出每层的待优化参数量
    25. # 3.模型训练(计算梯度,迭代更新网络参数)
    26. optimizer = optimizers.SGD(lr=0.01) # 声明采用批量随机梯度下降方法,学习率=0.01
    27. acc_meter = metrics.Accuracy() # 新建accuracy测量器
    28. for step, (x, y) in enumerate(train_dataset): # 一次输入batch组数据进行训练
    29. with tf.GradientTape() as tape: # 构建梯度记录环境
    30. x = tf.reshape(x, (32, 28, 28, 1)) # 将输入拉直,[b,28,28]->[b,784]
    31. # x = tf.extand_dims(x, axis=3)
    32. out = network(x) # 输出[b, 10]
    33. y_onehot = tf.one_hot(y, depth=10) # one-hot编码
    34. loss = tf.square(out - y_onehot)
    35. loss = tf.reduce_sum(loss) / 32 # 定义均方差损失函数,注意此处的32对应为batch的大小
    36. grads = tape.gradient(loss, network.trainable_variables) # 计算网络中各个参数的梯度
    37. optimizer.apply_gradients(zip(grads, network.trainable_variables)) # 更新网络参数
    38. acc_meter.update_state(tf.argmax(out, axis=1), y) # 比较预测值与标签,并计算精确度(写入数据,进行求精度)
    39. if step % 200 == 0: # 每200个step,打印一次结果
    40. print('Step', step, ': Loss is: ', float(loss), ' Accuracy: ', acc_meter.result().numpy()) # 读取数据
    41. acc_meter.reset_states() # 清零测量器l

    以上就是tf.keras.layers模块中的函数有哪些的详细内容,更多关于tf.keras.layers模块中的函数有哪些的资料请关注九品源码其它相关文章!