-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathclasstrain1.py
196 lines (189 loc) · 8.5 KB
/
classtrain1.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 22 16:33:01 2018
@author: Wei Yuxuan
@Course Project of LOAD PREDICT: Short-Term Load Forecasting (STLF)
based on classtrain.py
Revision: remove "today_t-1" data source from training and prediction
DATA
last_week_t, last_week_t-1, yesterday_t, yesterday_t-1
and is_weekend_one_hot (1:weekday, 0:weekend) for yesterday, today
"""
import numpy as np
from keras import Sequential
from keras.layers import Dense
from keras import optimizers
import time
import os
class ANN96_1:
# global constant
xcols = 6# last_week_t, last_week_t-1, yesterday_t, yesterday_t-1
norm1 = [0,1,2,3] # list of load normalization
def __init__(self): # construction function
# build model
self.model = Sequential()
self.model.add(Dense(8, input_shape=(ANN96_1.xcols,), activation='relu'))
self.model.add(Dense(1, activation=None))
self.model.summary()
# loss = mean squared error
sgd = optimizers.SGD(lr=0.05)
self.model.compile(optimizer=sgd, loss='mse', metrics=['mse'])
# load data
self.load = np.load('load.npy')
# and is_weekend_one_hot (1:weekday, 0:weekend) for yesterday, today
self.loadmax = self.load.max() # max load ever
load0 = self.load[self.load>0] # delete the 0s
self.loadmin = load0.min() # min load ever (not 0)
self.nepoch = 100 # default training epochs
# def ANNmodel(self): # ANN model structure for all 96 classifiers
# # 2 layers dense network
# self.model = Sequential()
# self.model.add(Dense(8, input_shape=(ANN96.xcols,), activation='relu'))
# self.model.add(Dense(1, activation=None))
# self.model.summary()
# # loss = mean squared error
# sgd = optimizers.SGD(lr=0.5)
# self.model.compile(optimizer=sgd, loss='mse', metrics=['mse'])
def setEpochs(self, Nepoch): # set training epochs
self.nepoch = Nepoch
# def load_data(self):
# self.load = np.load('load.npy')
# # and is_weekend_one_hot (1:weekday, 0:weekend) for yesterday, today
# self.loadmax = self.load.max() # max load ever
# load0 = self.load[self.load>0] # delete the 0s
# self.loadmin = load0.min() # min load ever (not 0)
def train(self, n): # use 0~n-1 date for training
start_time = time.time()
for j in range(96):
x = np.zeros((1, ANN96_1.xcols), dtype=np.float) # train data
y = list() # train label
index = list()# original date index, for Mon/Tues recognition in sample_weight
if j == 0:
# prepare data
for i in range(8, n-1):
xt = np.zeros((1, ANN96_1.xcols), dtype=np.float)
if self.load[i-7, j]!=0 and self.load[i-8, 95]!=0 and \
self.load[i-1, j]!=0 and self.load[i-2, 95]!=0 \
and self.load[i, j]!=0:
xt[0,0] = self.load[i-7, j]
xt[0,1] = self.load[i-8, 95]
xt[0,2] = self.load[i-1, j]
xt[0,3] = self.load[i-2, 95]
if (i-1) % 7 == 3 or (i-1) % 7 == 4:
xt[0,4] = 0
else:
xt[0,4] = 1
if i % 7 == 3 or i % 5 == 4:
xt[0,5] = 0
else:
xt[0,5] = 1
if x[0, 0] == 0:
x = xt
else:
x = np.concatenate((x, xt), axis=0)
y.append(self.load[i, j])
index.append(i)
else:
for i in range(7, n-1):
xt = np.zeros((1, ANN96_1.xcols), dtype=np.float) # temporary variable
if self.load[i-7, j]!=0 and self.load[i-7, j-1]!=0 \
and self.load[i-1, j]!=0 and self.load[i-1, j-1]!=0 \
and self.load[i, j]!=0:
xt[0,0] = self.load[i-7, j]
xt[0,1] = self.load[i-7, j-1]
xt[0,2] = self.load[i-1, j]
xt[0,3] = self.load[i-1, j-1]
if (i-1) % 7 == 3 or (i-1) % 7 == 4:
xt[0,4] = 0
else:
xt[0,4] = 1
if i % 7 == 3 or i % 5 == 4:
xt[0,5] = 0
else:
xt[0,5] = 1
if x[0, 0] == 0:
x = xt
else:
x = np.concatenate((x, xt), axis=0)
y.append(self.load[i, j])
index.append(i)
# training
# normalization
nsample = x.shape[0] # number of training samples
for i in range(nsample):
for k in ANN96_1.norm1:
x[i, k] = (x[i, k] - self.loadmin)\
/ (self.loadmax - self.loadmin)
y = (y - self.loadmin) / (self.loadmax - self.loadmin)
# sample weight:
# 1. the closer the date, the larger the weight in loss function
# 2. the weight of Monday and Tuesday (days to be predicted) sample is amplified
sw = np.zeros((nsample,))
for i in range(nsample):
sw[i] = 0.998**(nsample-i)
if index[i] % 7 == 5 or index[i] % 7 == 6: # Mon or Tues
sw[i] = sw[i] * 2
elif index[i] % 7 == 3 or index[i] % 7 == 4: # delete weekend
sw[i] = 0
# train
if j == 0: # j=0, nepoch *= 10
history = self.model.fit\
(x, y, batch_size=32, epochs=10*self.nepoch, \
sample_weight=sw, verbose=0)
else:
history = self.model.fit\
(x, y, batch_size=32, epochs=self.nepoch,sample_weight=sw, verbose=1)
# verbose=0, not to show training details
# save model
self.model.save_weights(os.path.join('new_weights',str(j))+'.h5')
# evaluate model by mse
mse = history.history['mean_squared_error']
last_mse = mse[len(mse)-1]
print(str(j) + '/96: ' + str(last_mse))
end_time = time.time()
use_time = np.ceil((end_time - start_time) / 60)
print(str(use_time) + ' min used in trainng process')
def predict(self, n): # predict the nth date
neural = np.zeros((96,))
xp = np.zeros((1, ANN96_1.xcols), dtype=np.float) # predict data
prediction = 0
for j in range(96):
if j == 0:
xp[0,0] = self.load[n-7, j]
xp[0,1] = self.load[n-8, 95]
xp[0,2] = self.load[n-1, j]
xp[0,3] = self.load[n-2, 95]
if (n-1) % 7 == 3 or (n-1) % 7 == 4:
xp[0,4] = 0
else:
xp[0,4] = 1
if n % 7 == 3 or n % 7 == 4:
xp[0,5] = 0
else:
xp[0,5] = 1
for k in ANN96_1.norm1:
xp[0, k] = (xp[0, k] - self.loadmin)\
/ (self.loadmax - self.loadmin)
else:
xp[0,0] = self.load[n-7, j]
xp[0,1] = self.load[n-7, j-1]
xp[0,2] = self.load[n-1, j]
xp[0,3] = self.load[n-1, j-1]
if (n-1) % 7 == 3 or (n-1) % 7 == 4:
xp[0,4] = 0
else:
xp[0,4] = 1
if n % 7 == 3 or n % 7 == 4:
xp[0,5] = 0
else:
xp[0,5] = 1
for k in ANN96_1.norm1:
xp[0, k] = (xp[0, k] - self.loadmin)\
/ (self.loadmax - self.loadmin)
# predict
self.model.load_weights(os.path.join('new_weights',str(j)+'.h5'))
prediction = self.model.predict(xp)
prediction = \
(self.loadmax - self.loadmin) * prediction + self.loadmin
neural[j] = prediction
return neural