Python时间序列LSTM预测系列教程(11)-多步预测

来源:互联网 发布:酷狗 mac版 歌手写真 编辑:程序博客网 时间:2024/06/05 07:30

Multi-Step LSTM预测(2)


教程原文链接


关联教程:

Python时间序列LSTM预测系列教程(10)-多步预测

Python时间序列LSTM预测系列教程(11)-多步预测


多步预测的LSTM网络


数据准备

1、变成具有稳定性数据
2、缩放数据
Python时间序列LSTM预测系列教程(2)-单变量

LSTM模型预测过程


1、数据预测处理,准备数据
2、定义模型
3、训练模型
4、预测
5、数据逆变换
6、评估

代码


from pandas import DataFramefrom pandas import Seriesfrom pandas import concatfrom pandas import read_csvfrom pandas import datetimefrom sklearn.metrics import mean_squared_errorfrom sklearn.preprocessing import MinMaxScalerfrom keras.models import Sequentialfrom keras.layers import Densefrom keras.layers import LSTMfrom math import sqrtfrom matplotlib import pyplotfrom numpy import array# date-time parsing function for loading the datasetdef parser(x):return datetime.strptime('190'+x, '%Y-%m')# convert time series into supervised learning problemdef series_to_supervised(data, n_in=1, n_out=1, dropnan=True):n_vars = 1 if type(data) is list else data.shape[1]df = DataFrame(data)cols, names = list(), list()# input sequence (t-n, ... t-1)for i in range(n_in, 0, -1):cols.append(df.shift(i))names += [('var%d(t-%d)' % (j+1, i)) for j in range(n_vars)]# forecast sequence (t, t+1, ... t+n)for i in range(0, n_out):cols.append(df.shift(-i))if i == 0:names += [('var%d(t)' % (j+1)) for j in range(n_vars)]else:names += [('var%d(t+%d)' % (j+1, i)) for j in range(n_vars)]# put it all togetheragg = concat(cols, axis=1)agg.columns = names# drop rows with NaN valuesif dropnan:agg.dropna(inplace=True)return agg# create a differenced seriesdef difference(dataset, interval=1):diff = list()for i in range(interval, len(dataset)):value = dataset[i] - dataset[i - interval]diff.append(value)return Series(diff)# transform series into train and test sets for supervised learningdef prepare_data(series, n_test, n_lag, n_seq):# extract raw valuesraw_values = series.values# transform data to be stationarydiff_series = difference(raw_values, 1)diff_values = diff_series.valuesdiff_values = diff_values.reshape(len(diff_values), 1)# rescale values to -1, 1scaler = MinMaxScaler(feature_range=(-1, 1))scaled_values = scaler.fit_transform(diff_values)scaled_values = scaled_values.reshape(len(scaled_values), 1)# transform into supervised learning problem X, ysupervised = series_to_supervised(scaled_values, n_lag, n_seq)supervised_values = supervised.values# split into train and test setstrain, test = supervised_values[0:-n_test], supervised_values[-n_test:]return scaler, train, test# fit an LSTM network to training datadef fit_lstm(train, n_lag, n_seq, n_batch, nb_epoch, n_neurons):# reshape training into [samples, timesteps, features]X, y = train[:, 0:n_lag], train[:, n_lag:]X = X.reshape(X.shape[0], 1, X.shape[1])# design networkmodel = Sequential()model.add(LSTM(n_neurons, batch_input_shape=(n_batch, X.shape[1], X.shape[2]), stateful=True))model.add(Dense(y.shape[1]))model.compile(loss='mean_squared_error', optimizer='adam')# fit networkfor i in range(nb_epoch):model.fit(X, y, epochs=1, batch_size=n_batch, verbose=0, shuffle=False)model.reset_states()return model# make one forecast with an LSTM,def forecast_lstm(model, X, n_batch):# reshape input pattern to [samples, timesteps, features]X = X.reshape(1, 1, len(X))# make forecastforecast = model.predict(X, batch_size=n_batch)# convert to arrayreturn [x for x in forecast[0, :]]# evaluate the persistence modeldef make_forecasts(model, n_batch, train, test, n_lag, n_seq):forecasts = list()for i in range(len(test)):X, y = test[i, 0:n_lag], test[i, n_lag:]# make forecastforecast = forecast_lstm(model, X, n_batch)# store the forecastforecasts.append(forecast)return forecasts# invert differenced forecastdef inverse_difference(last_ob, forecast):# invert first forecastinverted = list()inverted.append(forecast[0] + last_ob)# propagate difference forecast using inverted first valuefor i in range(1, len(forecast)):inverted.append(forecast[i] + inverted[i-1])return inverted# inverse data transform on forecastsdef inverse_transform(series, forecasts, scaler, n_test):inverted = list()for i in range(len(forecasts)):# create array from forecastforecast = array(forecasts[i])forecast = forecast.reshape(1, len(forecast))# invert scalinginv_scale = scaler.inverse_transform(forecast)inv_scale = inv_scale[0, :]# invert differencingindex = len(series) - n_test + i - 1last_ob = series.values[index]inv_diff = inverse_difference(last_ob, inv_scale)# storeinverted.append(inv_diff)return inverted# evaluate the RMSE for each forecast time stepdef evaluate_forecasts(test, forecasts, n_lag, n_seq):for i in range(n_seq):actual = [row[i] for row in test]predicted = [forecast[i] for forecast in forecasts]rmse = sqrt(mean_squared_error(actual, predicted))print('t+%d RMSE: %f' % ((i+1), rmse))# plot the forecasts in the context of the original datasetdef plot_forecasts(series, forecasts, n_test):# plot the entire dataset in bluepyplot.plot(series.values)# plot the forecasts in redfor i in range(len(forecasts)):off_s = len(series) - n_test + i - 1off_e = off_s + len(forecasts[i]) + 1xaxis = [x for x in range(off_s, off_e)]yaxis = [series.values[off_s]] + forecasts[i]pyplot.plot(xaxis, yaxis, color='red')# show the plotpyplot.show()# load datasetseries = read_csv('shampoo-sales.csv', header=0, parse_dates=[0], index_col=0, squeeze=True, date_parser=parser)# configuren_lag = 1n_seq = 3n_test = 10n_epochs = 1500n_batch = 1n_neurons = 1# prepare datascaler, train, test = prepare_data(series, n_test, n_lag, n_seq)# fit modelmodel = fit_lstm(train, n_lag, n_seq, n_batch, n_epochs, n_neurons)# make forecastsforecasts = make_forecasts(model, n_batch, train, test, n_lag, n_seq)# inverse transform forecasts and testforecasts = inverse_transform(series, forecasts, scaler, n_test+2)actual = [row[n_lag:] for row in test]actual = inverse_transform(series, actual, scaler, n_test+2)# evaluate forecastsevaluate_forecasts(actual, forecasts, n_lag, n_seq)# plot forecastsplot_forecasts(series, forecasts, n_test+2)





阅读全文
0 0
原创粉丝点击