import pandas_datareader.data as web
import datetime
start = datetime.datetime(2000,1,1)
end = datetime.datetime(2024,1,1)
df = web.DataReader('GOOGL', 'stooq',start,end)
def Stock_Price_LSTM_Data_Precesing(df,mem_his_days,pre_days):
df.dropna(inplace=True)
df.sort_index(inplace=True)
df['label']= df['Close'].shift(-pre_days)
from sklearn.preprocessing import StandardScaler
scaler = StandardScaler()
sca_X=scaler.fit_transform(df.iloc[:,:-1])
mem_his_days = 10
from collections import deque
deq = deque(maxlen=mem_his_days)
X = []
for i in sca_X:
deq.append(list(i))
if len(deq)==mem_his_days:
X.append(list(deq))
X_lately = X[-pre_days:]
X = X[:-pre_days]
y = df['label'].values[mem_his_days-1:-pre_days]
import numpy as np
X = np.array(X)
y = np.array(y)
return X,y,X_lately
X,y,X_lately = Stock_Price_LSTM_Data_Precesing(df,5,10)
pre_days = 10
# mem_days=[5,10,15]
# lstm_layers=[1,2,3]
# dense_layers=[1,2,3]
# units = [16,32]
mem_days =[5]
lstm_layers = [1]
dense_layers = [1]
units = [32]
from tensorflow.keras.callbacks import ModelCheckpoint
for the_mem_days in mem_days:
for the_lstm_layers in lstm_layers:
for the_dense_layers in dense_layers:
for the_units in units:
filepath=f"./chankankushuju/{{val_mape:.2f}}{{epoch:02d}}men{the_lstm_layers}lstm{the_lstm_layers}dense{the_dense_layers}unit{the_units}.weights.h5"
checkpoint = ModelCheckpoint(
filepath=filepath,
save_weights_only=False,
monitor='val_mape',
mode='min',
save_best_only=True)
X,y,X_lately = Stock_Price_LSTM_Data_Precesing(df,the_mem_days,pre_days)
from sklearn.model_selection import train_test_split
X_train,X_test,y_train,y_test = train_test_split(X,y,shuffle=False,test_size=0.1)
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import LSTM,Dense,Dropout
model = Sequential()
model.add(LSTM(the_units,input_shape=X.shape[1:],activation='relu',return_sequences=True))
model.add(Dropout(0.1))
for i in range(the_lstm_layers):
model.add(LSTM(the_units,activation='relu',return_sequences=True))
model.add(Dropout(0.1))
model.add(LSTM(the_units,activation='relu'))
model.add(Dropout(0.1))
for i in range(the_dense_layers):
model.add(Dense(the_units,activation='relu'))
model.add(Dropout(0.1))
model.add(Dense(1))
model.compile(optimizer='adam',
loss='mse',
metrics=['mape'])
model.fit(X_train,y_train,batch_size=32,epochs=50,validation_data=(X_test,y_test),callbacks=[checkpoint])
from tensorflow.keras.models import load_model
best_model = load_model('./chankankushuju/10.3250men1lstm1dense1unit32.weights.h5')
这个是我的代码
ValueError Traceback (most recent call last)
Cell In[29], line 16
14 for the_units in units:
15 filepath=f"./chankankushuju/{{val_mape:.2f}}{{epoch:02d}}men{the_lstm_layers}lstm{the_lstm_layers}dense{the_dense_layers}unit{the_units}.weights.h5"
---> 16 checkpoint = ModelCheckpoint(
17 filepath=filepath,
18 save_weights_only=False,
19 monitor='val_mape',
20 mode='min',
21 save_best_only=True)
22 X,y,X_lately = Stock_Price_LSTM_Data_Precesing(df,the_mem_days,pre_days)
23 from sklearn.model_selection import train_test_split
File D:\anaconda\Lib\site-packages\keras\src\callbacks\model_checkpoint.py:191, in ModelCheckpoint.__init__(self, filepath, monitor, verbose, save_best_only, save_weights_only, mode, save_freq, initial_value_threshold)
189 else:
190 if not self.filepath.endswith(".keras"):
--> 191 raise ValueError(
192 "The filepath provided must end in `.keras` "
193 "(Keras model format). Received: "
194 f"filepath={self.filepath}"
195 )
ValueError: The filepath provided must end in `.keras` (Keras model format). Received: filepath=./chankankushuju/{val_mape:.2f}{epoch:02d}men1lstm1dense1unit32.weights.h5
找到了拟合最好的一个文件10.3250men1lstm1dense1unit32.weights.h5,想要查看这个文件的数据,但是报错了,这个错误,这是什么问题呢,应该怎么解决呢