From e13ee368218ee05f3c12d7552c136835e511d579 Mon Sep 17 00:00:00 2001 From: s-pace Date: Wed, 17 Apr 2019 18:26:42 +0200 Subject: [PATCH] fix: use .format instead of str() --- cli/src/commands/disable_connector.py | 2 +- cli/src/commands/enable_connector.py | 2 +- cli/src/commands/reindex_connector.py | 2 +- deployer/src/algolia_internal_api.py | 25 +++++++++++----------- deployer/src/config_creator.py | 8 +++---- deployer/src/emails.py | 4 ++-- deployer/src/fetchers.py | 2 +- deployer/src/helpdesk_helper.py | 19 ++++++++++------ deployer/src/helpers.py | 19 ++++++++++------ scraper/src/algolia_helper.py | 8 +++---- scraper/src/config/nb_hits_updater.py | 13 ++++++----- scraper/src/index.py | 2 +- scraper/src/strategies/default_strategy.py | 18 ++++++++-------- 13 files changed, 69 insertions(+), 55 deletions(-) diff --git a/cli/src/commands/disable_connector.py b/cli/src/commands/disable_connector.py index 838969ac..5661e677 100644 --- a/cli/src/commands/disable_connector.py +++ b/cli/src/commands/disable_connector.py @@ -21,7 +21,7 @@ def run(self, args): configs, inverted, crawler_ids = get_configs_from_website() connector_name = args[0] - make_request("/" + str(inverted[connector_name]) + "/deactivate", + make_request('/{}/deactivate'.format(inverted[connector_name]), "PUT") remove_crawling_issue(connector_name) diff --git a/cli/src/commands/enable_connector.py b/cli/src/commands/enable_connector.py index d477bf1b..7c3909d8 100644 --- a/cli/src/commands/enable_connector.py +++ b/cli/src/commands/enable_connector.py @@ -20,7 +20,7 @@ def run(self, args): configs, inverted, crawler_ids = get_configs_from_website() connector_name = args[0] - make_request("/" + str(inverted[connector_name]) + "/activate", "PUT") + make_request('/{}/activate'.format(inverted[connector_name]), "PUT") send_slack_notif([{ "title": "Enable connectors", diff --git a/cli/src/commands/reindex_connector.py b/cli/src/commands/reindex_connector.py index 0a824d4e..c43e9e41 100644 --- a/cli/src/commands/reindex_connector.py +++ b/cli/src/commands/reindex_connector.py @@ -22,7 +22,7 @@ def run(self, args): configs, inverted, crawler_ids = get_configs_from_website() connector_name = args[0] - make_request("/" + str(inverted[connector_name]) + "/reindex", "POST") + make_request('/{}/reindex'.format(inverted[connector_name]), "POST") send_slack_notif([{ "title": "Manually reindexed connectors", diff --git a/deployer/src/algolia_internal_api.py b/deployer/src/algolia_internal_api.py index 689e6aa0..d071fdc4 100644 --- a/deployer/src/algolia_internal_api.py +++ b/deployer/src/algolia_internal_api.py @@ -18,7 +18,7 @@ def get_headers(): app_id = environ.get('APPLICATION_ID_PROD').encode() admin_api_key = environ.get('API_KEY_PROD').encode() auth_token = b64encode(app_id + b":" + admin_api_key).decode().replace('=', - '').replace( + '').replace( "\n", '') return { @@ -92,7 +92,7 @@ def add_user_to_index(index_name, user_email): # User has already access to some other indices if right: - endpoint = get_endpoint('/application_rights/' + str(right['id'])) + endpoint = get_endpoint('/application_rights/{}'.format(right['id'])) requests.patch(endpoint, json=payload, headers=headers) print( user_email + " is already registered on algolia dashboard (has right to other DOCSEARCH indices), analytics granted to " + index_name) @@ -132,18 +132,19 @@ def remove_user_from_index(index_name, user_email): indices.remove(index_name) if len(indices) > 0: - requests.patch(get_endpoint('/application_rights/' + str(right['id'])), - json={ - 'application_right': { - 'application_id': APPLICATION_ID_PROD_INTERNAL, - 'user_email': user_email, - 'indices': indices, - 'analytics': True - } - }, headers=get_headers()) + requests.patch( + get_endpoint('/application_rights/{}'.format(right['id'])), + json={ + 'application_right': { + 'application_id': APPLICATION_ID_PROD_INTERNAL, + 'user_email': user_email, + 'indices': indices, + 'analytics': True + } + }, headers=get_headers()) else: requests.delete( - get_endpoint('/application_rights/' + str(right['id'])), + get_endpoint('/application_rights/{}'.format(right['id'])), headers=get_headers()) print(user_email + " uninvite from " + index_name) diff --git a/deployer/src/config_creator.py b/deployer/src/config_creator.py index 1a79f8af..29928852 100644 --- a/deployer/src/config_creator.py +++ b/deployer/src/config_creator.py @@ -322,12 +322,12 @@ def create_config(u=None): config['start_urls'] = urls user_index_name = helpers.get_user_value( - "index_name is " + "\033[1;33m" + config[ - 'index_name'] + "\033[0m" + ' [enter to confirm]: ') + 'index_name is \033[1;33m{}\033[0m [enter to confirm]: '.format(config[ + "index_name"])) if user_index_name != "": config['index_name'] = user_index_name - print("index_name is now " + "\033[1;33m" + config[ - 'index_name'] + "\033[0m") + print('index_name is now \033[1;33m{}\033[0m'.format(config[ + "index_name"])) return config diff --git a/deployer/src/emails.py b/deployer/src/emails.py index ddecc9e4..9e2abd58 100644 --- a/deployer/src/emails.py +++ b/deployer/src/emails.py @@ -51,7 +51,7 @@ def _prompt_command(emails): def _retrieve(config_name, config_dir): - file_path = path.join(config_dir, 'infos', config_name + '.json') + file_path = path.join('{}infos{}.json'.format(config_dir, config_name)) if path.isfile(file_path): with open(file_path, 'r') as f: @@ -80,7 +80,7 @@ def _commit_push(config_name, action, config_dir): def _write(emails, config_name, config_dir): - file_path = path.join(config_dir, 'infos', config_name + '.json') + file_path = path.join('{}infos{}.json'.format(config_dir, config_name)) new_file = True obj = OrderedDict(( diff --git a/deployer/src/fetchers.py b/deployer/src/fetchers.py index e1d8e84c..1ebf95f1 100644 --- a/deployer/src/fetchers.py +++ b/deployer/src/fetchers.py @@ -31,7 +31,7 @@ def get_configs_from_repos(): txt = f.read() config = json.loads(txt, object_pairs_hook=OrderedDict) configs[config['index_name']] = config - print(str(len(configs)) + " docs in public and private repo") + print('{} docs in public and private repo'.format(len(configs))) return configs diff --git a/deployer/src/helpdesk_helper.py b/deployer/src/helpdesk_helper.py index 847412c9..c8bebf67 100644 --- a/deployer/src/helpdesk_helper.py +++ b/deployer/src/helpdesk_helper.py @@ -25,16 +25,19 @@ def get_conversation_ID_from_url(hs_url): if not len(cuid) > 0: raise ValueError( - "Wrong help scout url " + hs_url + ", must have a conversation sub part with ID") + 'Wrong help scout url {}, must have a conversation sub part with ID'.format( + hs_url)) if not RepresentsInt(cuid): - raise ValueError("Conversation ID : " + cuid + " must be an integer") + raise ValueError( + 'Conversation ID : {} must be an integer'.format(cuid)) return cuid def get_conversation(cuid): - conversation_endpoint = "https://api.helpscout.net/v1/conversations/" + cuid + ".json" + conversation_endpoint = 'https://api.helpscout.net/v1/conversations/{}.json'.format( + cuid) hs_api_key = get_helpscout_api_key() response_json = json.loads(helpers.make_request(conversation_endpoint, @@ -67,8 +70,9 @@ def get_start_url_from_conversation(conversation): "First thread from the conversation thread wasn't sent by customer") print( - "URL fetched is \033[1;36m" + url_from_conversation + "\033[0m sent by \033[1;33m" + first_thread.get( - "customer").get("email") + "\033[0m") + 'URL fetched is \033[1;36m{}\033[0m sent by \033[1;33m{}\033[0m'.format( + url_from_conversation, first_thread.get( + "customer").get("email"))) return url_from_conversation @@ -109,7 +113,8 @@ def get_emails_from_conversation(conversation): def add_note(cuid, body): - conversation_endpoint = "https://api.helpscout.net/v1/conversations/" + cuid + ".json" + conversation_endpoint = 'https://api.helpscout.net/v1/conversations/{}.json'.format( + cuid) hs_api_key = get_helpscout_api_key() @@ -134,7 +139,7 @@ def get_conversation_url_from_cuid(cuid): if not cuid: raise ValueError("Wrong input conversation ID") - return "https://secure.helpscout.net/conversation/" + cuid + return 'https://secure.helpscout.net/conversation/{}'.format(cuid) def is_docusaurus_conversation(conversation): diff --git a/deployer/src/helpers.py b/deployer/src/helpers.py index c77841b5..e85b046e 100644 --- a/deployer/src/helpers.py +++ b/deployer/src/helpers.py @@ -67,8 +67,9 @@ def make_request(endpoint, type=None, data=None, username=None, password=None, data=data) if r.status_code // 100 != 2: - print("ISSUE for POST request : " + url + " with params: " + str( - data)) + print( + 'ISSUE for POST request : {} with params: {}'.format(url, + data)) print(r.text) return r @@ -77,8 +78,9 @@ def make_request(endpoint, type=None, data=None, username=None, password=None, auth=(username, password)) if r.status_code not in success_codes: - print("ISSUE for DELETE request : " + url + " with params: " + str( - data)) + print( + 'ISSUE for DELETE request : {} with params: {}'.format(url, + data)) return r if type == 'PUT': @@ -87,8 +89,9 @@ def make_request(endpoint, type=None, data=None, username=None, password=None, data=data) print(r.status_code) if r.status_code // 100 != 2: - print("ISSUE for PUT request : " + url + " with params: " + str( - data)) + print( + 'ISSUE for PUT request : {} with params: {}'.format(url, + data)) return r if data != None: @@ -100,7 +103,9 @@ def make_request(endpoint, type=None, data=None, username=None, password=None, auth=(username, password)) if r.status_code // 100 != 2: - print("ISSUE for GET request : " + url + " with params:" + data) + print( + 'ISSUE for GET request : {} with params: {}'.format(url, + data)) if json_request: r.json() diff --git a/scraper/src/algolia_helper.py b/scraper/src/algolia_helper.py index 4388b3e0..a953c5cd 100644 --- a/scraper/src/algolia_helper.py +++ b/scraper/src/algolia_helper.py @@ -31,8 +31,8 @@ def add_records(self, records, url, from_sitemap): color = "96" if from_sitemap else "94" print( - "\033[" + color + "m> DocSearch: \033[0m" + url + " (\033[93m" + str( - record_count) + " records\033[0m)") + '\033[{}m> DocSearch: \033[0m{}\033[93m {} records\033[0m)'.format( + color, url, record_count)) def add_synonyms(self, synonyms): synonyms_list = [] @@ -41,8 +41,8 @@ def add_synonyms(self, synonyms): self.algolia_index_tmp.batch_synonyms(synonyms_list) print( - "\033[94m> DocSearch: \033[0m" + "Synonyms" + " (\033[93m" + str( - len(synonyms_list)) + " synonyms\033[0m)") + '\033[94m> DocSearch: \033[0m Synonyms (\033[93m{} synonyms\033[0m)'.format( + len(synonyms_list))) def commit_tmp_index(self): """Overwrite the real index with the temporary one""" diff --git a/scraper/src/config/nb_hits_updater.py b/scraper/src/config/nb_hits_updater.py index 808bf41f..a05fe9ac 100644 --- a/scraper/src/config/nb_hits_updater.py +++ b/scraper/src/config/nb_hits_updater.py @@ -19,21 +19,24 @@ def __init__(self, config_file, config_content, previous_nb_hits, def update(self, perform_update): if self._update_needed(): - print("previous nb_hits: " + str(self.previous_nb_hits) + "\n") - + print('previous nb_hits: {}\n'.format(self.previous_nb_hits)) if perform_update is None: if sys.stdout.isatty(): - perform_update = confirm('Do you want to update the nb_hits in ' + self.config_file + ' ?') + perform_update = confirm( + 'Do you want to update the nb_hits in {} ?'.format( + self.config_file)) else: perform_update = True if perform_update: try: self._update_config() - print("\n[OK] " + self.config_file + " has been updated") + print( + '\n[OK] {} has been updated'.format(self.config_file)) except Exception: print( - "\n[KO] " + "Was not able to update " + self.config_file) + '\n[KO] Was not able to update {}'.format( + self.config_file)) def _update_needed(self): return self.previous_nb_hits is None or self.previous_nb_hits != self.new_nb_hit diff --git a/scraper/src/index.py b/scraper/src/index.py index ea4036b7..529c33fe 100644 --- a/scraper/src/index.py +++ b/scraper/src/index.py @@ -83,7 +83,7 @@ def run_config(config): if DocumentationSpider.NB_INDEXED > 0: algolia_helper.commit_tmp_index() - print('Nb hits: ' + str(DocumentationSpider.NB_INDEXED)) + print('Nb hits: {}'.format(DocumentationSpider.NB_INDEXED)) config.update_nb_hits_value(DocumentationSpider.NB_INDEXED) else: print('Crawling issue: nbHits 0 for ' + config.index_name) diff --git a/scraper/src/strategies/default_strategy.py b/scraper/src/strategies/default_strategy.py index d39de421..b602a274 100644 --- a/scraper/src/strategies/default_strategy.py +++ b/scraper/src/strategies/default_strategy.py @@ -40,9 +40,9 @@ def get_records_from_response(self, response): def _update_hierarchy_with_global_content(self, hierarchy, current_level_int): for index in range(0, current_level_int + 1): - if 'lvl' + str(index) in self.global_content: - hierarchy['lvl' + str(index)] = self.global_content[ - 'lvl' + str(index)] + if 'lvl{}'.format(index) in self.global_content: + hierarchy['lvl{}'.format(index)] = self.global_content[ + 'lvl{}'.format(index)] return hierarchy @@ -92,8 +92,8 @@ def get_records_from_dom(self, current_page_url=None): anchors[current_level] = Anchor.get_anchor(node) for index in range(current_level_int + 1, 7): - hierarchy['lvl' + str(index)] = None - anchors['lvl' + str(index)] = None + hierarchy['lvl{}'.format(index)] = None + anchors['lvl{}'.format(index)] = None previous_hierarchy = hierarchy if self.config.only_content_level: @@ -111,7 +111,7 @@ def get_records_from_dom(self, current_page_url=None): node, self.get_strip_chars(current_level, selectors)) if ( - content is None or content == "") and current_level == 'content': + content is None or content == "") and current_level == 'content': continue hierarchy, content = self._handle_default_values(hierarchy, @@ -190,7 +190,7 @@ def _get_text_content_for_level(self, node, current_level, selectors): if 'attributes' in selectors[current_level]: attributes = {} for attribute_name in list(selectors[current_level][ - 'attributes'].keys()): + 'attributes'].keys()): matching_nodes = node.xpath( selectors[current_level]['attributes'][attribute_name][ 'selector']) @@ -212,7 +212,7 @@ def _get_text_content_for_level(self, node, current_level, selectors): def _get_closest_anchor(anchors): # Getting the element anchor as the closest one for index in list(range(6, -1, -1)): - potential_anchor = anchors['lvl' + str(index)] + potential_anchor = anchors['lvl{}'.format(index)] if potential_anchor is None: continue return potential_anchor @@ -324,7 +324,7 @@ def _body_contains_stop_content(self, response): def _get_url_with_anchor(self, current_page_url, anchor): if ( - not self.config.js_render or not self.config.use_anchors) and anchor is not None: + not self.config.js_render or not self.config.use_anchors) and anchor is not None: return current_page_url + '#' + anchor return current_page_url