前言:
如果没有深入了解LSTM原理及结构,推荐看下面两篇blog,不在赘述:
从深度学习到LSTM:https://blog.csdn.net/hz371071798/article/details/82532183
LSTM结构详解:https://blog.csdn.net/zhangbaoanhadoop/article/details/81952284
正文开始,简单写一下编程实现:
注:和上文一样,data直接采用 facebook 的prophet时序算法中examples的数据。
一些引用的包:
from __future__ import print_function
import numpy as np
import pandas as pd
from keras.layers import Dense, Activation, Dropout, LSTM
# from keras.layers.recurrent import LSTM
from keras.models import Sequential
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler
from sklearn.metrics import mean_squared_error
import matplotlib.pyplot as plt
import math
import warnings
warnings.filterwarnings('ignore')
plt.style.use('seaborn-poster')
用到的函数:
def create_dataset(dataset, look_back):
'''
# convert an array of values into a time series dataset
:param dataset:
:param look_back: step
:return:
'''
dataX, dataY = [], []
for i in range(len(dataset)-look_back-1):
a = dataset[i:(i+look_back), 0]
dataX.append(a)
dataY.append(dataset[i + look_back, 0])
return np.array(dataX), np.array(dataY)
def return_rmse(test,predicted):
rmse = math.sqrt(mean_squared_error(test, predicted))
print("rmse is {}.".format(rmse))
主函数:main_run
# Load data
df = pd.read_csv('data/example_air_passengers.csv')
df.ds = pd.to_datetime(df.ds)
df.index = df.ds
df.drop(['ds'], axis=1, inplace=True)
print(df.head())
# print(df.info())
# 基本参数设置,自行设定初始值:
look_back = 7
epochs = 1000
batch_size = 32
# convert type
air_passengers_num = df.y.values.astype('float32')
# reshape to column vector
air_passengers_num = air_passengers_num.reshape(len(air_passengers_num), 1)
print(air_passengers_num)
正则化:
# normalize
scaler = MinMaxScaler(feature_range=(0, 1))
air_passengers_num = scaler.fit_transform(air_passengers_num)
切分数据集并reshape
# split data
train, test = air_passengers_num[0:train_size, :], air_passengers_num[train_size:len(air_passengers_num), :]
# split
trainX, trainY = create_dataset(train, look_back)
testX, testY = create_dataset(test, look_back)
# reshape format
LSTM fit
model = Sequential()
model.add(LSTM(4, input_shape=(look_back, 1)))
model.add(Dense(1))
model.compile(loss='mse', optimizer='adam')
model.fit(trainX, trainY, nb_epoch=epochs, batch_size=batch_size)
# make predictions
trainPredict = model.predict(trainX)
# invert predictions and targets to unscaled
trainPredict = scaler.inverse_transform(trainPredict)
trainY = scaler.inverse_transform([trainY])
# calculate rmse
trainScore = return_rmse(trainY[0], trainPredict[:, 0])
print('Train Score: {} RMSE'.format(trainScore))
testScore = return_rmse(testY[0], testPredict[:, 0])
print('Test Score: {} RMSE'.format(testScore))
plot
# shift predictions of training data for plotting
trainPredictPlot = np.empty_like(air_passengers_num)
trainPredictPlot[:, :] = np.nan
trainPredictPlot[look_back:len(trainPredict) + look_back, :] = trainPred
# shift predictions of test data for plotting
testPredictPlot = np.empty_like(air_passengers_num)
testPredictPlot[:, :] = np.nan
# 这里留一个思考:look_back为什么*2
testPredictPlot[len(trainPredict) + (look_back*2) + 1:len(air_passengers_num) - 1, :] = testPred
# plot baseline and predictions
plt.plot(scaler.inverse_transform(air_passengers_num))
plt.plot(trainPredictPlot)
plt.plot(testPredictPlot)
plt.show()
result:
Train Score: 24.93 RMSE
Test Score: 55.86 RMSE