RML2016.10a (CNN、LSTM、CNN-LSTM、CNN-LSTM-Multi-head-attention, TensorFlow)

包含所有数据和代码,支持一键运行(jupyter)

# Create the CLMHA model
class CustomMultiHeadAttention(layers.Layer):
    def __init__(self, d_model, num_heads):
        super(CustomMultiHeadAttention, self).__init__()
        self.mha = MultiHeadAttention(d_model, num_heads)

    def call(self, inputs):
        q = inputs
        k = inputs
        v = inputs
        output, _ = self.mha(v, k, q)  # Return only the output tensor
        return output

num_heads = 8
d_model = 128
# 创建CLMHA模型
CLMHA_model = keras.Sequential([
    layers.Conv2D(32, (1, 3), activation='relu', input_shape=(2, 128, 1)),
    layers.MaxPooling2D((1, 2)),
    layers.Reshape((32, -1)),
    layers.LSTM(64, return_sequences=True),
    CustomMultiHeadAttention(d_model, num_heads),
    layers.GlobalAveragePooling1D(),
    layers.Dropout(0.5),
    layers.Dense(64, activation='relu', kernel_regularizer=keras.

你可能感兴趣的:(cnn,lstm,tensorflow)