-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathmodel.py
150 lines (136 loc) · 5.2 KB
/
model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
""" Steering angle prediction model for SDCND Behavariol cloning project
"""
import os
import argparse
import json
import csv
import pickle
import cv2
import numpy as np
import math
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten, Lambda, ELU
from keras.layers.convolutional import Convolution2D
from keras.optimizers import Adam
from keras.callbacks import ModelCheckpoint
from sklearn.utils import shuffle
from sklearn.model_selection import train_test_split
# Fix error with Keras and TensorFlow
import tensorflow as tf
tf.python.control_flow_ops = tf
def get_model():
""" Get the model, this is a slight modification of comma.ai model
"""
# image size should be (45,160) with 3 color channels
ch, row, col = 3, 45, 160
model = Sequential()
model.add(Lambda(lambda x: x/255. - 0.5,
input_shape=(row, col, ch),
output_shape=(row, col, ch)))
# model.add(Convolution2D(3, 1, 1, border_mode="same"))
# model.add(ELU())
# Conv Layer1 of 16 filters having size(8, 8) with strides (4,4)
model.add(Convolution2D(16, 8, 8, subsample=(4, 4), border_mode="same"))
model.add(ELU())
# Conv Layer1 of 32 filters having size(5, 5) with strides (2,2)
model.add(Convolution2D(32, 5, 5, subsample=(2, 2), border_mode="same"))
model.add(ELU())
# Conv Layer1 of 64 filters having size(5, 5) with strides (2,2)
model.add(Convolution2D(64, 5, 5, subsample=(2, 2), border_mode="same"))
model.add(Flatten())
model.add(Dropout(.5))
model.add(ELU())
model.add(Dense(512))
model.add(Dropout(.5))
model.add(ELU())
model.add(Dense(1))
return model
def batch_data_generator (x, y, batch_size):
""" Generates (inputs, outputs) for training in batches
"""
total = (len(y) // batch_size ) * batch_size
while True:
# Choose a random start index for current batch
start = np.random.randint(0, total - batch_size)
end = min(start + batch_size, total)
x_data = np.empty((0,45,160, 3))
y_data = np.array([])
for i in np.arange(start, end):
image = cv2.imread('./data/' + x[i][0])
# Crop the image vertically to remove unnecessary portions such as sky
image = image[40:130]
# Resize image to half i.e., (45,160, 3)
image = cv2.resize(image, (160,45))
# Generate extra flipped image
flipped_image = cv2.flip(image, 1)
x_data = np.append(x_data, np.array([image]), axis = 0)
y_data = np.append(y_data, y[i])
x_data = np.append(x_data, np.array([flipped_image]), axis = 0)
# Flip the steering angle too for the flipped image
y_data = np.append(y_data, -1 * y[i])
yield x_data, y_data
def train():
""" Train model using train data and save weights and architecture
"""
with open('data.p', 'rb') as data_file:
data = pickle.load(data_file)
x_train, y_train, x_val, y_val = data['x_train'], data['y_train'], data['x_val'], data['y_val']
print("Train data : (X, Y) ", x_train.shape, y_train.shape)
print("Validation data : (X, Y) ", x_val.shape, y_val.shape)
nb_epoch = 1
batch_size = 16
model = get_model()
adam = Adam(lr=1e-12)
model.compile(optimizer=adam, loss="mse")
#model.load_weights('model.h5')
model.summary()
#checkpointer = ModelCheckpoint(filepath="model_best.h5", verbose=1, save_best_only=True)
history = model.fit_generator(
batch_data_generator(x_train, y_train, batch_size),
samples_per_epoch = ((len(y_train) // batch_size ) * batch_size) * 2,
nb_epoch = nb_epoch,
verbose = 1,
validation_data = batch_data_generator(x_val, y_val, batch_size),
nb_val_samples = ((len(y_val) // batch_size ) * batch_size) * 2
)
# Save weights
model.save_weights("./model.h5", True)
# Save model architecture
with open('./model.json', 'w') as outfile:
json.dump(model.to_json(), outfile)
def build_data():
""" Construct training, validation and test data from log file
"""
# Get center image path and steering angle
with open('./data/driving_log.csv') as log_file:
reader = csv.DictReader(log_file)
x , y = [], []
for row in reader:
speed = float(row['speed'])
steering_angle = float(row['steering'])
x.append(row['left'].strip())
y.append(steering_angle + 0.25)
x.append(row['center'].strip())
y.append(steering_angle)
x.append(row['right'].strip())
y.append(steering_angle - 0.25)
x = np.vstack(x)
y = np.vstack(y)
print("Total data : (X, Y) ", x.shape, y.shape)
# Split into train and test data
x_train, x_val, y_train, y_val = train_test_split(x, y, test_size=0.15, random_state=3234)
data = {
'x_train' : x_train,
'y_train' : y_train,
'x_val' : x_val,
'y_val' : y_val
}
# Save as a file
with open('data.p', "wb") as data_file:
pickle.dump(data, data_file)
if __name__ == '__main__':
# Build train, validationa nd test data
#build_data()
# Start training
print("Training started")
train()