Recurrent Neural Networks Tutorial

来源:互联网 发布:2017怎么开手机淘宝店 编辑:程序博客网 时间:2024/05/17 20:13

http://www.tuicool.com/articles/nQBjUj


# -*- coding: utf-8 -*-
"""
Created on Sat Jun 11 15:30:33 2016

@author: Shemmy
"""
from keras.models import Sequential  
from keras.layers.core import TimeDistributedDense, Dense, Activation, Dropout  
from keras.layers.recurrent import GRU, LSTM
from keras.layers.embeddings import Embedding
from keras.preprocessing import sequence
from keras.optimizers import RMSprop
import numpy as np

def _load_data(data, steps = 4):
    docX, docY = [], []
    for i in range(0, data.shape[0]-steps):
        docX.append(data[i:i+steps,:])
        docY.append(data[i+steps,:])
    return np.array(docX), np.array(docY)

def train_test_split(data, test_size=0.15):
    #    This just splits data to training and testing parts
    X,Y = _load_data(data)
    ntrn = round(X.shape[0] * (1 - test_size))
    X_train, Y_train = X[0:ntrn], Y[0:ntrn]
    X_test, Y_test = X[ntrn:],Y[ntrn:]
    return (X_train, Y_train), (X_test, Y_test)

np.random.seed(0)  # For reproducability
data = np.arange(5).reshape((5,1))
for i in xrange(10):
    data = np.append(data, data, axis=0)
(X_train, y_train), (X_test, y_test) = train_test_split(np.flipud(data))  # retrieve data
print "Data loaded."

in_out_neurons = 1
hidden_neurons = 10

model = Sequential()
model.add(GRU(hidden_neurons, input_dim=in_out_neurons, return_sequences=False))
model.add(Dropout(0.2))
model.add(Dense(in_out_neurons))
model.add(Activation("linear"))
model.compile(loss="mean_squared_error", optimizer="rmsprop")
print "Model compiled."

model.fit(X_train, y_train, batch_size=10, nb_epoch=10, validation_split=0.1)
predicted = model.predict(X_test)
print np.sqrt(((predicted - y_test) ** 2).mean(axis=0)).mean()
print predicted


https://github.com/fchollet/keras/issues/1029

0 0
原创粉丝点击