【人工智能笔记】第五节:基于TensorFlow 2( 二 )


数据下载可参考:
下载股票数据
数据结构如下的CSV文件:
下面为预测模型代码实现:
1.引入依赖模块
import tensorflow as tfimport osimport sysimport numpy as npimport timeprint("TensorFlow version: {}".format(tf.version.VERSION))print("Eager execution: {}".format(tf.executing_eagerly()))# 根目录ROOT_DIR = os.path.abspath("./")# GuPiaoLoadersys.path.append(ROOT_DIR)from DataLoader.gupiao_loader import GuPiaoLoader
【【人工智能笔记】第五节:基于TensorFlow 2】2.编码器,对历史记录进行编码,用于提取历史记录特征:
class Encoder(tf.keras.Model):'''编码器'''def __init__(self):super(Encoder, self).__init__()# GRUself.gru1 = tf.keras.layers.GRU(128, return_sequences=True, return_state=True, activation=tf.keras.activations.relu, name='feature_gru1')self.gru2 = tf.keras.layers.GRU(128, return_state=True, activation=tf.keras.activations.relu, name='feature_gru2')def call(self, input_data):'''input_data:批量已知数据(None,history_size,15)'''x, gru_state1 = self.gru1(input_data)x, gru_state2 = self.gru2(x)return x, gru_state1, gru_state2

【人工智能笔记】第五节:基于TensorFlow 2

文章插图
3.注意力模块,用于长序列预测的记忆保留:
class BahdanauAttention(tf.keras.Model):'''注意力模块'''def __init__(self, units):super(BahdanauAttention, self).__init__()self.W1 = tf.keras.layers.Dense(units, name='feature_denseW1')self.W2 = tf.keras.layers.Dense(units, name='feature_denseW2')self.V = tf.keras.layers.Dense(1, name='feature_denseV')def call(self, query, values):'''query:状态(batch_size,hidden_size)values:编码器输出,记忆(batch_size,1,hidden_size)'''# hidden shape == (batch_size, hidden size)# hidden_with_time_axis shape == (batch_size, 1, hidden size)hidden_with_time_axis = tf.expand_dims(query, 1)# 分数(batch_size, 1, 1),通过上一状态与记忆计算分数,觉得保留多少记忆score = self.V(tf.nn.tanh(self.W1(values) + self.W2(hidden_with_time_axis)))# attention_weights shape == (batch_size, max_length, 1)# 注意力权重,0-1范围(batch_size, 1, 1)attention_weights = tf.nn.softmax(score, axis=1)# context_vector shape after sum == (batch_size, hidden_size)# 通过注意力权重决定保留多少记忆信息context_vector = attention_weights * values# 求和第二维度(batch_size, hidden_size)context_vector = tf.reduce_sum(context_vector, axis=1)return context_vector, attention_weights
4.解码器,循环预测序列数据,使用两层GUR:
class Decoder(tf.keras.Model):'''解码器'''def __init__(self, class_num):super(Decoder, self).__init__()# used for attention# 注意力模块self.attention1 = BahdanauAttention(128)self.attention2 = BahdanauAttention(128)# GRUself.gru1 = tf.keras.layers.GRU(128, return_sequences=True, return_state=True, activation=tf.keras.activations.relu, name='feature_gru1')self.gru2 = tf.keras.layers.GRU(128, return_state=True, activation=tf.keras.activations.relu, name='feature_gru2')# 输出self.dense1 = tf.keras.layers.Dense(class_num, name='feature_dense1')def call(self, input_data, gru_state1, gru_state2, encoder_output):'''input_data:单步预测数据(None,1,15)gru_state1:上一步的状态(None,128)gru_state2:上一步的状态(None,128)encoder_output:编码器最后状态,已知数据提取的特征(None,128)'''context_vector1, _ = self.attention1(gru_state1, encoder_output)context_vector2, _ = self.attention2(gru_state2, encoder_output)# print('context_vector', context_vector1.shape, context_vector2.shape)# tf.print('context_vector', tf.shape(context_vector1), tf.shape(context_vector2))x = tf.concat([context_vector1, context_vector2, input_data], axis=-1)x = tf.expand_dims(x, 1)x, gru_state1 = self.gru1(x, initial_state=gru_state1)x, gru_state2 = self.gru2(x, initial_state=gru_state2)x = self.dense1(x)return x, gru_state1, gru_state2