chore: 添加Stock-Prediction-Models项目文件

添加了Stock-Prediction-Models项目的多个文件,包括数据集、模型代码、README文档和CSS样式文件。这些文件用于股票预测模型的训练和展示,涵盖了LSTM、GRU等深度学习模型的应用。
This commit is contained in:
2025-04-27 16:28:06 +08:00
parent f57150dae8
commit 2757a4d0d2
200 changed files with 79402 additions and 0 deletions

View File

@@ -0,0 +1,41 @@
import tensorflow as tf
import numpy as np
import time
def reducedimension(input_, dimension = 2, learning_rate = 0.01, hidden_layer = 256, epoch = 20):
input_size = input_.shape[1]
X = tf.placeholder("float", [None, input_size])
weights = {
'encoder_h1': tf.Variable(tf.random_normal([input_size, hidden_layer])),
'encoder_h2': tf.Variable(tf.random_normal([hidden_layer, dimension])),
'decoder_h1': tf.Variable(tf.random_normal([dimension, hidden_layer])),
'decoder_h2': tf.Variable(tf.random_normal([hidden_layer, input_size])),
}
biases = {
'encoder_b1': tf.Variable(tf.random_normal([hidden_layer])),
'encoder_b2': tf.Variable(tf.random_normal([dimension])),
'decoder_b1': tf.Variable(tf.random_normal([hidden_layer])),
'decoder_b2': tf.Variable(tf.random_normal([input_size])),
}
first_layer_encoder = tf.nn.sigmoid(tf.add(tf.matmul(X, weights['encoder_h1']), biases['encoder_b1']))
second_layer_encoder = tf.nn.sigmoid(tf.add(tf.matmul(first_layer_encoder, weights['encoder_h2']), biases['encoder_b2']))
first_layer_decoder = tf.nn.sigmoid(tf.add(tf.matmul(second_layer_encoder, weights['decoder_h1']), biases['decoder_b1']))
second_layer_decoder = tf.nn.sigmoid(tf.add(tf.matmul(first_layer_decoder, weights['decoder_h2']), biases['decoder_b2']))
cost = tf.reduce_mean(tf.pow(X - second_layer_decoder, 2))
optimizer = tf.train.RMSPropOptimizer(learning_rate).minimize(cost)
sess = tf.InteractiveSession()
sess.run(tf.global_variables_initializer())
for i in range(epoch):
last_time = time.time()
_, loss = sess.run([optimizer, cost], feed_dict={X: input_})
if (i + 1) % 10 == 0:
print('epoch:', i + 1, 'loss:', loss, 'time:', time.time() - last_time)
vectors = sess.run(second_layer_encoder, feed_dict={X: input_})
tf.reset_default_graph()
return vectors

View File

@@ -0,0 +1,19 @@
import tensorflow as tf
import numpy as np
class Model:
def __init__(self, learning_rate, num_layers, size, size_layer, output_size, forget_bias = 0.1):
def lstm_cell(size_layer):
return tf.nn.rnn_cell.LSTMCell(size_layer, state_is_tuple = False)
rnn_cells = tf.nn.rnn_cell.MultiRNNCell([lstm_cell(size_layer) for _ in range(num_layers)], state_is_tuple = False)
self.X = tf.placeholder(tf.float32, (None, None, size))
self.Y = tf.placeholder(tf.float32, (None, output_size))
drop = tf.contrib.rnn.DropoutWrapper(rnn_cells, output_keep_prob = forget_bias)
self.hidden_layer = tf.placeholder(tf.float32, (None, num_layers * 2 * size_layer))
self.outputs, self.last_state = tf.nn.dynamic_rnn(drop, self.X, initial_state = self.hidden_layer, dtype = tf.float32)
rnn_W = tf.Variable(tf.random_normal((size_layer, output_size)))
rnn_B = tf.Variable(tf.random_normal([output_size]))
self.logits = tf.matmul(self.outputs[-1], rnn_W) + rnn_B
self.cost = tf.reduce_mean(tf.square(self.Y - self.logits))
self.optimizer = tf.train.AdamOptimizer(learning_rate).minimize(self.cost)