diff --git a/readthedocs/search/documents.py b/readthedocs/search/documents.py index 571472fd242..f50150c389f 100644 --- a/readthedocs/search/documents.py +++ b/readthedocs/search/documents.py @@ -43,7 +43,7 @@ class SphinxDomainDocument(DocType): modified_model_field = 'modified' - class Meta(object): + class Meta: model = SphinxDomain fields = ('commit',) ignore_signals = True @@ -77,7 +77,7 @@ class ProjectDocument(DocType): modified_model_field = 'modified_date' - class Meta(object): + class Meta: model = Project fields = ('name', 'slug', 'description') ignore_signals = True @@ -111,7 +111,7 @@ class PageDocument(DocType): modified_model_field = 'modified_date' - class Meta(object): + class Meta: model = HTMLFile fields = ('commit',) ignore_signals = True diff --git a/readthedocs/search/management/commands/reindex_elasticsearch.py b/readthedocs/search/management/commands/reindex_elasticsearch.py index dc6191e4f8a..32f9a4c8534 100644 --- a/readthedocs/search/management/commands/reindex_elasticsearch.py +++ b/readthedocs/search/management/commands/reindex_elasticsearch.py @@ -35,7 +35,7 @@ def _get_indexing_tasks(app_label, model_name, index_name, queryset, document_cl def _run_reindex_tasks(self, models, queue): apply_async_kwargs = {'priority': 0} if queue: - log.info('Adding indexing tasks to queue {0}'.format(queue)) + log.info('Adding indexing tasks to queue %s', queue) apply_async_kwargs['queue'] = queue else: log.info('Adding indexing tasks to default queue') diff --git a/readthedocs/search/tasks.py b/readthedocs/search/tasks.py index d4164cc924a..89e8a5dac80 100644 --- a/readthedocs/search/tasks.py +++ b/readthedocs/search/tasks.py @@ -43,7 +43,7 @@ def index_objects_to_es( document._doc_type.index = index_name log.info('Replacing index name %s with %s', old_index_name, index_name) - log.info("Indexing model: {}, '{}' objects".format(model.__name__, queryset.count())) + log.info("Indexing model: %s, '%s' objects", model.__name__, queryset.count()) doc_obj.update(queryset.iterator()) if index_name: @@ -59,7 +59,7 @@ def delete_objects_in_es(app_label, model_name, document_class, objects_id): doc_obj = document() queryset = doc_obj.get_queryset() queryset = queryset.filter(id__in=objects_id) - log.info("Deleting model: {}, '{}' objects".format(model.__name__, queryset.count())) + log.info("Deleting model: %s, '%s' objects", model.__name__, queryset.count()) doc_obj.update(queryset.iterator(), action='delete') @@ -111,6 +111,6 @@ def index_missing_objects(app_label, model_name, document_class, index_generatio queryset = document().get_queryset().exclude(**{query_string: index_generation_time}) document().update(queryset.iterator()) - log.info("Indexed {} missing objects from model: {}'".format(queryset.count(), model.__name__)) + log.info("Indexed %s missing objects from model: %s'", queryset.count(), model.__name__) # TODO: Figure out how to remove the objects from ES index that has been deleted diff --git a/readthedocs/search/views.py b/readthedocs/search/views.py index e2176622a3b..9c9cf239227 100644 --- a/readthedocs/search/views.py +++ b/readthedocs/search/views.py @@ -14,7 +14,7 @@ ) log = logging.getLogger(__name__) -LOG_TEMPLATE = '(Elastic Search) [{user}:{type}] [{project}:{version}:{language}] {msg}' +LOG_TEMPLATE = '(Elastic Search) [%(user)s:%(type)s] [%(project)s:%(version)s:%(language)s] %(msg)s' UserInput = collections.namedtuple( 'UserInput', @@ -92,14 +92,15 @@ def elastic_search(request, project_slug=None): facets = results.facets log.info( - LOG_TEMPLATE.format( - user=request.user, - project=user_input.project or '', - type=user_input.type or '', - version=user_input.version or '', - language=user_input.language or '', - msg=user_input.query or '', - ), + LOG_TEMPLATE, + { + 'user': request.user, + 'project': user_input.project or '', + 'type': user_input.type or '', + 'version': user_input.version or '', + 'language': user_input.language or '', + 'msg': user_input.query or '', + } ) # Make sure our selected facets are displayed even when they return 0 results