iT邦幫忙

0

tensorflow CNN_LSTM出現維度錯誤

  • 分享至 

  • xImage

我使用CNN+LSTM要預測
出現錯誤:ValueError: Error when checking target: expected time_distributed_2 to have 3 dimensions, but got array with shape (5692, 1)。
詢問要如何改善

# -*- coding: utf-8 -*-
"""
Created on Mon Jul 22 06:17:16 2019

@author: feather
"""

import pandas as pd

foxconndf= pd.read_csv('SPY.csv', index_col=0 )
foxconndf.dropna(how='any',inplace=True)
foxconndf = foxconndf.drop(["Adj Close"], axis=1)

from sklearn import preprocessing

def normalize(df):
    newdf= df.copy()
    min_max_scaler = preprocessing.MinMaxScaler()
    
    newdf['Open'] = min_max_scaler.fit_transform(df.Open.values.reshape(-1,1))
    newdf['Low'] = min_max_scaler.fit_transform(df.Low.values.reshape(-1,1))
    newdf['High'] = min_max_scaler.fit_transform(df.High.values.reshape(-1,1))
    newdf['Volume'] = min_max_scaler.fit_transform(df.Volume.values.reshape(-1,1))
    newdf['Close'] = min_max_scaler.fit_transform(df.Close.values.reshape(-1,1))
    
    return newdf

foxconndf_norm= normalize(foxconndf)

import numpy as np

def data_helper(df, time_frame):
    
    # 資料維度: 開盤價、收盤價、最高價、最低價、成交量, 5維
    number_features = len(df.columns)

    # 將dataframe 轉成 numpy array
    datavalue = df.as_matrix()

    result = []
    # 若想要觀察的 time_frame 為20天, 需要多加一天做為驗證答案
    for index in range( len(datavalue) - (time_frame+1) ): # 從 datavalue 的第0個跑到倒數第 time_frame+1 個
        result.append(datavalue[index: index + (time_frame+1) ]) # 逐筆取出 time_frame+1 個K棒數值做為一筆 instance
    
    result = np.array(result)
    number_train = round(0.9 * result.shape[0]) # 取 result 的前90% instance做為訓練資料
    
    x_train = result[:int(number_train), :-1] # 訓練資料中, 只取每一個 time_frame 中除了最後一筆的所有資料做為feature
    y_train = result[:int(number_train), -1][:,-1] # 訓練資料中, 取每一個 time_frame 中最後一筆資料的最後一個數值(收盤價)做為答案
    
    # 測試資料
    x_test = result[int(number_train):, :-1]
    y_test = result[int(number_train):, -1][:,-1]
    
    # 將資料組成變好看一點
    x_train = np.reshape(x_train, (x_train.shape[0], x_train.shape[1], number_features))
    x_test = np.reshape(x_test, (x_test.shape[0], x_test.shape[1], number_features))  

    return [x_train, y_train, x_test, y_test]

# 以20天為一區間進行股價預測
X_train, y_train, X_test, y_test = data_helper(foxconndf_norm, 20)
print('X_train:', np.array(X_train).shape) #X_train: (5692, 20, 5)
print('y_train:', np.array(y_train).shape) #y_train: (5692,)
print('X_test:', np.array(X_test).shape)   #X_test: (633, 20, 5)
print('y_test:', np.array(y_train).shape)  #y_test: (5692,)
#print('x:', np.array(X).shape) #x: (113813, 20, 1)

from keras.models import Sequential
#from keras.layers.core import Dense, Dropout, Activation
#from keras.layers.recurrent import LSTM
from keras.layers import Dense, Dropout, Activation
from keras.layers import LSTM
import keras


from keras.layers import Dense
from keras.layers import Flatten
from keras.layers import LSTM
from keras.layers import RepeatVector
from keras.layers import TimeDistributed
from keras.layers.convolutional import Conv1D
from keras.layers.convolutional import MaxPooling1D, MaxPooling2D, Conv2D
from keras.models import Sequential
from keras.layers import Dense,Dropout

import keras.backend as K

def r2_keras(y_true, y_pred):
    """Coefficient of Determination 
    """
    SS_res =  K.sum(K.square( y_true - y_pred ))
    SS_tot = K.sum(K.square( y_true - K.mean(y_true) ) )
    return ( 1 - SS_res/(SS_tot + K.epsilon()) )


def build_model(input_length, input_dim):    
    # CNN+LSTM
    input_length=20
    input_dim=5
    model = Sequential()
    model.add(Conv1D(filters=64, kernel_size=3, activation='relu', input_shape=(input_length, input_dim)))
    model.add(Conv1D(filters=64, kernel_size=3, activation='relu'))
    model.add(MaxPooling1D(pool_size=2))
    model.add(Flatten())
    n_outputs=1
    model.add(RepeatVector(n_outputs))
    model.add(LSTM(200, activation='relu', return_sequences=True))
    model.add(TimeDistributed(Dense(100, activation='relu')))
    model.add(TimeDistributed(Dense(1)))
    #model.compile(loss='mse', optimizer='adam')  
    model.compile(loss='mean_squared_error', optimizer='rmsprop',metrics=['mae',r2_keras])
    model.summary()
    
    print(model.summary())
    return model

# 20天、5維
model = build_model( 20, 5 )



# 一個batch有128個instance,總共跑50個迭代
#model.fit( X_train, y_train, batch_size=128, epochs=1, validation_split=0.1, verbose=1)

model_path='binary_model.h5'
history=model.fit( X_train, y_train, epochs=2800, batch_size=10, validation_split=0.1, verbose=2,
          callbacks = [keras.callbacks.EarlyStopping(monitor='val_loss', min_delta=0, patience=10, verbose=0, mode='min'),
                       keras.callbacks.ModelCheckpoint(model_path,monitor='val_loss', save_best_only=True, mode='min', verbose=0)]
          )


def denormalize(df, norm_value):
    original_value = df['Close'].values.reshape(-1,1)
    norm_value = norm_value.reshape(-1,1)
    
    min_max_scaler = preprocessing.MinMaxScaler()
    min_max_scaler.fit_transform(original_value)
    denorm_value = min_max_scaler.inverse_transform(norm_value)
   
    return denorm_value

# 用訓練好的 LSTM 模型對測試資料集進行預測
pred = model.predict(X_test)

# 將預測值與正確答案還原回原來的區間值
denorm_pred = denormalize(foxconndf, pred)
denorm_ytest = denormalize(foxconndf, y_test)

import matplotlib.pyplot as plt
#%matplotlib inline  

plt.plot(denorm_pred,color='red', label='Prediction')
plt.plot(denorm_ytest,color='blue', label='Answer')
plt.legend(loc='best')
plt.show()


# list all data in history
print(history.history.keys())

# summarize history for R^2
fig_acc = plt.figure(figsize=(5, 5))
plt.plot(history.history['r2_keras'])
plt.plot(history.history['val_r2_keras'])
plt.title('model r^2')
plt.ylabel('R^2')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()

# summarize history for MAE
fig_acc = plt.figure(figsize=(5, 5))
plt.plot(history.history['mean_absolute_error'])
plt.plot(history.history['val_mean_absolute_error'])
plt.title('model MAE')
plt.ylabel('MAE')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()

# summarize history for Loss
fig_acc = plt.figure(figsize=(5, 5))
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()

內容如下:
https://drive.google.com/file/d/1SGolkmQn50GjKsckT3C7TWqVjajvkDSd/view?usp=sharing

圖片
  直播研討會
圖片
{{ item.channelVendor }} {{ item.webinarstarted }} |
{{ formatDate(item.duration) }}
直播中

1 個回答

0
I code so I am
iT邦高手 1 級 ‧ 2019-08-02 09:32:40

最後一層要得到3維的資料,但你只餵進2維的資料,可能的原因是:

  1. 最初的input就錯了
  2. 中間的layer轉換錯了

偵錯方式:減少成一兩層看看有無錯誤,再逐步加層,就可以知道哪裡錯了。

我要發表回答

立即登入回答