@@ -86,60 +86,58 @@ def run_operation(self, **options):
86
86
pass
87
87
88
88
def index_list (self , ** options ):
89
- print ("Known, managed indexes{}:" .format (" (limited results due to --indexes option)" if self .index_names else "" ))
89
+ logger .info ("Known, managed indexes{}:" .format (
90
+ " (limited results due to --indexes option)" if self .index_names else "" ))
90
91
for name , index in self .indexes .items ():
91
- print (" - {}" .format (name ))
92
+ logger . info (" - {}" .format (name ))
92
93
93
94
def index_init (self , ** options ):
94
95
for name , index in self .new_indexes .items ():
95
96
if not self .client .indices .exists (name ):
96
97
self ._index_create (index , name , True )
97
98
else :
98
- print ("Index '{}' already exists. No change made." .format (index ._name ))
99
+ logger . info ("Index '{}' already exists. No change made." .format (index ._name ))
99
100
100
101
def index_update (self , ** options ):
101
102
for name , index in self .indexes .items ():
102
103
if not self .client .indices .exists (name ):
103
104
self ._index_create (self .new_indexes [name ], name , True )
104
105
else :
105
- # index.close(using=self.using)
106
106
try :
107
107
index .save (using = self .using )
108
- print ("Updated index mapping for '{}'." .format (name ))
108
+ logger . info ("Updated index mapping for '{}'." .format (name ))
109
109
except elasticsearch .exceptions .RequestError as e :
110
- print (str (e ))
111
- # index.open(using=self.using)
110
+ logger .info (str (e ))
112
111
113
112
def index_rebuild (self , ** options ):
114
- multiproc = options .get ('multiproc' , False )
115
- if multiproc :
113
+ if options .get ('multi' ) is not None :
116
114
policy = ParallelStreamingPolicy (self .parallel_prep )
117
115
else :
118
116
policy = StreamingPolicy ()
119
117
120
118
for name , index in self .new_indexes .items ():
121
119
self ._index_create (index , name , set_alias = False )
122
120
123
- print ("Updating index settings to be bulk-indexing friendly..." )
121
+ logger . info ("Updating index settings to be bulk-indexing friendly..." )
124
122
original_settings = index .get_settings (using = self .using ).get (index ._name , {}).get ('settings' , {})
125
123
index .put_settings (body = {
126
124
"index.number_of_replicas" : 0 ,
127
125
"index.refresh_interval" : '-1'
128
126
})
129
127
130
- print ("Indexing data for '{}'..." .format (index ._name ))
128
+ logger . info ("Indexing data for '{}'..." .format (index ._name ))
131
129
132
130
for serializer in self ._serializers [name ]:
133
- print (" - processing '{}' documents: " .format (serializer .document .__name__ ), end = '' , flush = True )
131
+ logger . info (" - processing '{}' documents" .format (serializer .document .__name__ ))
134
132
policy .bulk_operation (serializer , index = index ._name , client = self .client , ** options )
135
- print ()
133
+ logger . info ()
136
134
137
- print ("Data indexed data for '{}'." .format (index ._name ))
135
+ logger . info ("Data indexed data for '{}'." .format (index ._name ))
138
136
139
- print ("Force merging index data..." )
137
+ logger . info ("Force merging index data..." )
140
138
index .forcemerge ()
141
139
142
- print ("Restoring original/default index settings..." )
140
+ logger . info ("Restoring original/default index settings..." )
143
141
index .put_settings (body = {
144
142
"index.number_of_replicas" : original_settings .get ('index' , {}).get ('number_of_replicas' , 1 ),
145
143
"index.refresh_interval" : original_settings .get ('index' , {}).get ('refresh_interval' , '1s' )
@@ -153,7 +151,7 @@ def index_rebuild(self, **options):
153
151
{'add' : {'index' : index ._name , 'alias' : name }}
154
152
]
155
153
})
156
- print ("Created alias '{}' for '{}'." .format (name , index ._name ))
154
+ logger . info ("Created alias '{}' for '{}'." .format (name , index ._name ))
157
155
158
156
policy .close ()
159
157
@@ -166,10 +164,10 @@ def index_cleanup(self, **options):
166
164
def _index_create (self , index , alias , set_alias = False ):
167
165
if not set_alias :
168
166
index .create (using = self .using )
169
- print ("Created index '{}', no alias set." .format (index ._name ))
167
+ logger . info ("Created index '{}', no alias set." .format (index ._name ))
170
168
else :
171
169
index .aliases (** {alias : {}}).create (using = self .using )
172
- print ("Created index '{}', aliased to '{}'." .format (index ._name , alias ))
170
+ logger . info ("Created index '{}', aliased to '{}'." .format (index ._name , alias ))
173
171
174
172
def _indexes_delete (self , ** options ):
175
173
old_indexes = []
@@ -182,9 +180,9 @@ def _indexes_delete(self, **options):
182
180
no_input = options .get ('no_input' , False )
183
181
if no_input :
184
182
self .client .indices .delete ("," .join (old_indexes ))
185
- print ("Deleting old unaliased indexes:" )
183
+ logger . info ("Deleting old unaliased indexes:" )
186
184
for _old in old_indexes :
187
- print (" - deleted index '{}'" .format (_old ))
185
+ logger . info (" - deleted index '{}'" .format (_old ))
188
186
else :
189
187
for _old in old_indexes :
190
188
user_input = 'y' if no_input else ''
@@ -194,4 +192,4 @@ def _indexes_delete(self, **options):
194
192
break
195
193
if user_input == 'y' :
196
194
self .client .indices .delete (_old )
197
- print (" - deleted index '{}'" .format (_old ))
195
+ logger . info (" - deleted index '{}'" .format (_old ))
0 commit comments