-
Notifications
You must be signed in to change notification settings - Fork 23
/
Copy pathnet_predictor.py
158 lines (136 loc) · 4.97 KB
/
net_predictor.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
from keras.models import Sequential
from keras.layers import Dense
from keras.layers.normalization import BatchNormalization
from keras.optimizers import SGD
from arch_generator import arch_generator
import json
import numpy as np
import copy as cp
from collections import OrderedDict
from keras.optimizers import Adam
import numpy
from random import shuffle
class net_predictor:
input_dim = 0
output_dim = 100 # the resolution of accuracy is 100
explore_depth = 0
env_model = None
ag = None
def __init__(self):
self.input_dim = 56
self.env_model = self.build_env_model()
def build_env_model(self):
model = Sequential()
model.add(
Dense(512, input_dim=(self.input_dim), activation='relu', use_bias=True, kernel_initializer='RandomUniform',
bias_initializer='zeros'))
model.add(
Dense(2048, activation='relu', use_bias=True, kernel_initializer='RandomUniform', bias_initializer='zeros'))
model.add(
Dense(2048, activation='relu', use_bias=True, kernel_initializer='RandomUniform', bias_initializer='zeros'))
model.add(
Dense(512, activation='relu', use_bias=True, kernel_initializer='glorot_uniform', bias_initializer='zeros'))
model.add(Dense(1, activation='sigmoid', use_bias=True))
model.compile(loss='mean_squared_error',
optimizer=Adam(lr=0.0002), metrics=['mse'])
# try:
# print('------loading from from file------')
# model.load_weights("env_model_weights.h5")
# except:
# print('no saved model founded, start from scratch')
return model
def exp_to_train(self, trained_networks):
exp_codes = []
acc_codes = []
if len(trained_networks) <= 0:
return None, None
for i in range(len(trained_networks)):
network = trained_networks[i][0]
acc = trained_networks[i][1]
acc_codes.append(acc)
# acc = int(np.around(acc * 100, 0))
# print(acc)
# if acc == 100:
# acc = 99
# acc_code = np.zeros((1, 100))
# acc_code[0, acc] = 1
# acc_codes.append(acc_code)
exp_codes.append(network)
exp_codes = np.array(exp_codes)
acc_codes = np.array(acc_codes)
acc_codes = acc_codes.reshape(exp_codes.shape[0], -1)
numpy.set_printoptions(threshold=numpy.nan)
return exp_codes, acc_codes
def predict(self, network):
# Input: a list
# Output: a float of accuracy
network = predict_encoder(network=network)
network = np.array(network)
network = np.reshape(network, [1, len(network)])
# print("network to predict:", network)
accuracy = self.env_model.predict(network)
# accuracy = float(np.argmax(accuracy)/100.0)
# print("predicted accuracy", accuracy)
return float(accuracy[0])
def env_train(self, networks):
concat_code = encoder(networks)
# print("!!!!!env model training------------> start", len(exp_codes))
# print(exp_codes)
exp_codes, acc_codes = self.exp_to_train(concat_code)
self.env_model.fit(exp_codes, acc_codes, batch_size=128, epochs=20, verbose=1)
# save the model
# print("!!!!!env model training------------> end", len(exp_codes))
def net_encoder(net):
net_code =[]
for i in range(len(net)-1):
if net[i] == 'input':
net_code.append(2)
if net[i] == 'conv1x1-bn-relu':
net_code.append(3)
if net[i] == 'maxpool3x3':
net_code.append(4)
if net[i] == 'conv3x3-bn-relu':
net_code.append(5)
if net[i] == 'output':
net_code.append(6)
while len(net_code) < 7:
net_code.append(9)
return net_code
def predict_encoder(network):
# Input: a OrderedDict
# Output: a list
net_arch = []
node_list = network["node_list"]
adj_mat = network["adj_mat"]
net_code = net_encoder(node_list)
for adj in adj_mat:
for element in adj:
net_arch.append(element)
while len(net_arch) < 49:
net_arch.append(0)
for code in net_code:
net_arch.append(code)
return net_arch
def encoder(arch):
concat_code = []
for l, v in arch.items():
net_info = []
net_arch = []
network = json.loads(l)
node_list = network["node_list"]
if node_list[-1] == 'term':
node_list = node_list[:-1]
adj_mat = network["adj_mat"]
net_code = net_encoder(node_list)
for adj in adj_mat:
for element in adj:
net_arch.append(element)
while len(net_arch) < 49:
net_arch.append(0)
for code in net_code:
net_arch.append(code)
net_info.append(net_arch)
net_info.append(v)
concat_code.append(net_info)
shuffle(concat_code)
return concat_code