'Why does my LSTM model perform so poorly for very smooth data ?

enter image description here

enter image description here

Why is my model so bad at predicting what looks like regular data?

'''from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.layers.recurrent import LSTM
from keras.callbacks import ModelCheckpoint, EarlyStopping
d = 0.2
# time_callback = TimeHistory() 
early_stop = EarlyStopping(monitor='val_loss', patience=5, mode='auto')
model = Sequential()#建立层次模型
# model.add(LSTM(16,activation='tanh',input_shape=(X_train.shape[1],X_train.shape[2],return_sequences=False)))
model.add(LSTM(16,activation='relu',input_shape=(X_train.shape[1], X_train.shape[2])))
model.add(Dense(1,kernel_initializer='random_uniform'))
# model.compile(loss='mse',optimizer=tf.keras.optimizers.RMSprop(learning_rate=0.001, rho=0.9, epsilon=1e-06),metrics=['accuracy'])
print(model.summary( ))

'''



Solution 1:[1]

strong textenter image description here

enter image description here

I am sorry that IT is the first time FOR me to use this website and I do not know whether I have uploaded the picture correctly strong text

'''

#??????
# -*- coding: utf-8 -*-
import pandas as pd
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
%matplotlib inline
from sklearn.preprocessing import MinMaxScaler
import time
from sklearn.model_selection import train_test_split
from keras.layers import Input, Dense, LSTM
from keras.models import Model
from keras.layers import *
from keras.models import *
from keras.optimizers import *
print('df',df,df.shape,'\n')
tf.random.set_seed(42)
random_seed = 42  
TIME_STEPS = 16
train_data_rate = 0.8
valid_data_rate = 0.2
predict_day = 0
data_len = df.shape[0]
train_len = int(data_len * train_data_rate)
test_len  = len(df)-train_len
print(data_len,train_len,test_len,'\n')
shuffle_train_data = True   # ????????shuffle
All_Train = df[:train_len,14:]
print('All_Train','\n',All_Train,All_Train.shape,'\n')
scaler = MinMaxScaler(feature_range=(0,1))
All_Train_Scaler = scaler.fit_transform(All_Train)
print('All_Train_Scaler','\n',All_Train_Scaler,All_Train_Scaler.shape,'\n')
print(scaler.data_max_)
shuffle_train_data = Tru
SF_train = SF_RE_T[:train_len:,0].reshape(-1,1)
print('SF_train','\n',SF_train,SF_train.shape,'\n')
MF_train = All_Train_Scaler
print('MF_train','\n',MF_train,MF_train.shape,'\n')
MF_train_3d,SF_train_3d = [],[]
for i in range(train_len- TIME_STEPS - predict_day):
    a = MF_train[i:(i+TIME_STEPS),:]
    MF_train_3d.append(a)
    SF_train_3d.append(SF_train[i + TIME_STEPS, :])
train_x = np.array(MF_train_3d)
train_y = np.array(SF_train_3d)
print('train_x','\n','\n',train_x.shape,'\n')
print('train_y','\n',train_y,train_y.shape)
train_x, valid_x, train_y, valid_y = train_test_split(train_x, train_y, test_size=valid_data_rate, random_state=random_seed, shuffle=shuffle_train_data)   # ????????????    
All_Test = df[train_len:,14:]
print('All_Test','\n',All_Test,'\n',All_Test.shape,'\n')
All_Test_Scaler = scaler.fit_transform(All_Test)
print('All_Test_Scaler','\n',All_Test_Scaler,'\n',All_Test_Scaler.shape,'\n')
MF_test = All_Test_Scaler
print('MF_test','\n',MF_test,MF_test.shape,'\n')
MF_test_3d = []
for i in range(test_len- TIME_STEPS - predict_day):
    a = MF_test[i:(i+TIME_STEPS),:]
    MF_test_3d.append(a)
test_x = np.array(MF_test_3d)
test_y = SF_RE_T[train_len+TIME_STEPS + predict_day:,0]
print('test_x','\n',test_x.shape,'\n')
print('test_y','\n',test_y,test_y.shape)
X_train = train_x
y_train = train_y
X_test = test_x
y_test = test_y
print(X_train.shape,y_train.shape,valid_x.shape,valid_y.shape,X_test.shape,y_test.shape)

'''

Sources

This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.

Source: Stack Overflow

Solution Source
Solution 1