@@ -56,7 +56,7 @@ def process_data():
56
56
57
57
# get MAX_FILE_SIZE
58
58
cwd = os .getcwd ()
59
- max_file_name = call (['ls' , '-S' , cwd + '/seeds/' ]).split ('\n ' )[0 ].rstrip ('\n ' )
59
+ max_file_name = call (['ls' , '-S' , cwd + '/seeds/' ]).decode ( 'utf8' ). split ('\n ' )[0 ].rstrip ('\n ' )
60
60
MAX_FILE_SIZE = os .path .getsize (cwd + '/seeds/' + max_file_name )
61
61
62
62
# create directories to save label, spliced seeds, variant length seeds, crashes and mutated seeds.
@@ -84,7 +84,7 @@ def process_data():
84
84
except subprocess .CalledProcessError :
85
85
print ("find a crash" )
86
86
for line in out .splitlines ():
87
- edge = line .split (':' )[0 ]
87
+ edge = line .split (b ':' )[0 ]
88
88
tmp_cnt .append (edge )
89
89
tmp_list .append (edge )
90
90
raw_bitmap [f ] = tmp_list
@@ -115,11 +115,11 @@ def generate_training_data(lb, ub):
115
115
seed = np .zeros ((ub - lb , MAX_FILE_SIZE ))
116
116
bitmap = np .zeros ((ub - lb , MAX_BITMAP_SIZE ))
117
117
for i in range (lb , ub ):
118
- tmp = open (seed_list [i ], 'r ' ).read ()
118
+ tmp = open (seed_list [i ], 'rb ' ).read ()
119
119
ln = len (tmp )
120
120
if ln < MAX_FILE_SIZE :
121
- tmp = tmp + (MAX_FILE_SIZE - ln ) * ' \0 '
122
- seed [i - lb ] = [ord ( j ) for j in list (tmp )]
121
+ tmp = tmp + (MAX_FILE_SIZE - ln ) * b' \x00 '
122
+ seed [i - lb ] = [j for j in bytearray (tmp )]
123
123
124
124
for i in range (lb , ub ):
125
125
file_name = "./bitmaps/" + seed_list [i ].split ('/' )[- 1 ] + ".npy"
@@ -181,23 +181,23 @@ def train_generate(batch_size):
181
181
182
182
def vectorize_file (fl ):
183
183
seed = np .zeros ((1 , MAX_FILE_SIZE ))
184
- tmp = open (fl , 'r ' ).read ()
184
+ tmp = open (fl , 'rb ' ).read ()
185
185
ln = len (tmp )
186
186
if ln < MAX_FILE_SIZE :
187
- tmp = tmp + (MAX_FILE_SIZE - ln ) * ' \0 '
188
- seed [0 ] = [ord ( j ) for j in list (tmp )]
187
+ tmp = tmp + (MAX_FILE_SIZE - ln ) * b' \x00 '
188
+ seed [0 ] = [j for j in bytearray (tmp )]
189
189
seed = seed .astype ('float32' ) / 255
190
190
return seed
191
191
192
192
# splice two seeds to a new seed
193
193
194
194
195
195
def splice_seed (fl1 , fl2 , idxx ):
196
- tmp1 = open (fl1 , 'r ' ).read ()
196
+ tmp1 = open (fl1 , 'rb ' ).read ()
197
197
ret = 1
198
198
randd = fl2
199
199
while (ret == 1 ):
200
- tmp2 = open (randd , 'r ' ).read ()
200
+ tmp2 = open (randd , 'rb ' ).read ()
201
201
if len (tmp1 ) >= len (tmp2 ):
202
202
lenn = len (tmp2 )
203
203
head = tmp2
@@ -221,8 +221,8 @@ def splice_seed(fl1, fl2, idxx):
221
221
head = list (head )
222
222
tail = list (tail )
223
223
tail [:splice_at ] = head [:splice_at ]
224
- with open ('./splice_seeds/tmp_' + str (idxx ), 'w ' ) as f :
225
- f .write ("" . join (tail ))
224
+ with open ('./splice_seeds/tmp_' + str (idxx ), 'wb ' ) as f :
225
+ f .write (bytearray (tail ))
226
226
ret = 0
227
227
print ((f_diff , l_diff ))
228
228
randd = random .choice (seed_list )
@@ -404,14 +404,14 @@ def setup_server():
404
404
conn , addr = sock .accept ()
405
405
print ('connected by neuzz execution moduel' + str (addr ))
406
406
gen_grad ('train' )
407
- conn .sendall ("start" )
407
+ conn .sendall (b "start" )
408
408
while True :
409
409
data = conn .recv (1024 )
410
410
if not data :
411
411
break
412
412
else :
413
413
gen_grad (data )
414
- conn .sendall ("start" )
414
+ conn .sendall (b "start" )
415
415
conn .close ()
416
416
417
417
0 commit comments