-
Notifications
You must be signed in to change notification settings - Fork 16
/
Copy pathtrecognizer.py
134 lines (95 loc) · 3.75 KB
/
trecognizer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
import cv2
import numpy as np
import os
import sys
import tensorflow as tf
from sklearn.model_selection import train_test_split
EPOCHS = 10
IMG_WIDTH = 30
IMG_HEIGHT = 30
NUM_CATEGORIES = 43
TEST_SIZE = 0.4
def main():
# Check for command-line arguments
if len(sys.argv) not in [2, 3]:
sys.exit("Usage: python traffic.py data_directory [model.h5]")
# Get image arrays and labels for all image files
images, labels = load_data(sys.argv[1])
# Split data into training and testing sets
labels = tf.keras.utils.to_categorical(labels)
x_train, x_test, y_train, y_test = train_test_split(
np.array(images), np.array(labels), test_size=TEST_SIZE
)
# Get a compiled neural network
model = get_model()
# Fit model on training data
model.fit(x_train, y_train, epochs=EPOCHS)
# Evaluate neural network performance
model.evaluate(x_test, y_test, verbose=2)
# Save model to file
if len(sys.argv) == 3:
filename = sys.argv[2]
model.save(filename)
print(f"Model saved to {filename}.")
def load_data(data_dir):
"""
Load image data from directory `data_dir`.
Assume `data_dir` has one directory named after each category, numbered
0 through NUM_CATEGORIES - 1. Inside each category directory will be some
number of image files.
Return tuple `(images, labels)`. `images` should be a list of all
of the images in the data directory, where each image is formatted as a
numpy ndarray with dimensions IMG_WIDTH x IMG_HEIGHT x 3. `labels` should
be a list of integer labels, representing the categories for each of the
corresponding `images`.
"""
images = list()
labels = list()
for folder in os.listdir(data_dir):
folder_path = os.path.join(data_dir, folder)
if os.path.isdir(folder_path):
print(f"Loading files from {folder_path}...")
for file in os.listdir(folder_path):
try:
image = cv2.imread(os.path.join(folder_path, file), cv2.IMREAD_COLOR)
image = cv2.resize(image, (IMG_WIDTH, IMG_HEIGHT), interpolation=cv2.INTER_AREA)
images.append(image)
labels.append(int(folder))
except Exception as e:
print(f"There is a problem with file: {file}")
print(str(e))
return images, labels
def get_model():
"""
Returns a compiled convolutional neural network model. Assume that the
`input_shape` of the first layer is `(IMG_WIDTH, IMG_HEIGHT, 3)`.
The output layer should have `NUM_CATEGORIES` units, one for each category.
"""
# Creating onvolutional neural network (CNN) model
model = tf.keras.models.Sequential([
# Convolutional layer having 32 learn filters using a 3x3 kernel
tf.keras.layers.Conv2D(
32, (3, 3), activation="relu", input_shape=(IMG_WIDTH, IMG_HEIGHT, 3)
),
# Max-pooling layer, using 3x3 pool size
tf.keras.layers.MaxPooling2D(pool_size=(3, 3)),
# Flatten units
tf.keras.layers.Flatten(),
tf.keras.layers.Dropout(0.2),
# Multiple Hidden layers with dropout
tf.keras.layers.Dense(NUM_CATEGORIES * 32, activation="relu"),
tf.keras.layers.Dropout(0.2),
tf.keras.layers.Dense(NUM_CATEGORIES * 16, activation="relu"),
tf.keras.layers.Dense(NUM_CATEGORIES * 8, activation="relu"),
# Output layer of the network
tf.keras.layers.Dense(NUM_CATEGORIES, activation="softmax")
])
# Training neural network
model.compile(
optimizer="adam",
loss="categorical_crossentropy",
metrics=["accuracy"]
)
return model
if __name__ == "__main__":
main()