diff --git a/README.md b/README.md index 57068d304..e114aaddb 100644 --- a/README.md +++ b/README.md @@ -50,8 +50,10 @@ $ python3 -m pip install -r requirements.txt $ python3 sherlock --help usage: sherlock [-h] [--version] [--verbose] [--folderoutput FOLDEROUTPUT] [--output OUTPUT] [--tor] [--unique-tor] [--csv] + [--merge] [--site SITE_NAME] [--proxy PROXY_URL] [--json JSON_FILE] [--timeout TIMEOUT] [--print-all] [--print-found] [--no-color] + [--no-txt] [--browse] [--local] USERNAMES [USERNAMES ...] @@ -77,6 +79,7 @@ optional arguments: request; increases runtime; requires Tor to be installed and in system path. --csv Create Comma-Separated Values (CSV) File. + --merge, -m Merges output from multiple username searches into one file --site SITE_NAME Limit analysis to just the listed sites. Add multiple options to specify more than one site. --proxy PROXY_URL, -p PROXY_URL @@ -93,6 +96,7 @@ optional arguments: --print-all Output sites where the username was not found. --print-found Output sites where the username was found. --no-color Don't color terminal output + --no-txt Don't create txt output file --browse, -b Browse to all results on default browser. --local, -l Force the use of the local data.json file. ``` diff --git a/sherlock/sherlock.py b/sherlock/sherlock.py index 49517c457..0e229231b 100644 --- a/sherlock/sherlock.py +++ b/sherlock/sherlock.py @@ -506,6 +506,8 @@ def main(): action="store_true", dest="csv", default=False, help="Create Comma-Separated Values (CSV) File." ) + parser.add_argument("--merge", "-m", dest="merge", + help="Merges output from multiple username searches into one file") parser.add_argument("--site", action="append", metavar="SITE_NAME", dest="site_list", default=None, @@ -538,6 +540,10 @@ def main(): action="store_true", dest="no_color", default=False, help="Don't color terminal output" ) + parser.add_argument("--no-txt", + action="store_true", dest="no_txt", default=False, + help="Don't create txt output file" + ) parser.add_argument("username", nargs="+", metavar="USERNAMES", action="store", @@ -579,7 +585,7 @@ def main(): if args.tor or args.unique_tor: print("Using Tor to make requests") - + print( "Warning: some websites might refuse connecting over Tor, so note that using this option might increase connection errors.") @@ -648,6 +654,7 @@ def main(): # Run report on all specified users. all_usernames = [] + csv_rows = [] for username in args.username: if(CheckForParameter(username)): for name in MultipleUsernames(username): @@ -663,7 +670,6 @@ def main(): unique_tor=args.unique_tor, proxy=args.proxy, timeout=args.timeout) - if args.output: result_file = args.output elif args.folderoutput: @@ -671,18 +677,17 @@ def main(): # If the folder doesn't exist, create it first os.makedirs(args.folderoutput, exist_ok=True) result_file = os.path.join(args.folderoutput, f"{username}.txt") - else: + elif not args.no_txt: result_file = f"{username}.txt" - - with open(result_file, "w", encoding="utf-8") as file: - exists_counter = 0 - for website_name in results: - dictionary = results[website_name] - if dictionary.get("status").status == QueryStatus.CLAIMED: - exists_counter += 1 - file.write(dictionary["url_user"] + "\n") - file.write( - f"Total Websites Username Detected On : {exists_counter}\n") + with open(result_file, "w", encoding="utf-8") as file: + exists_counter = 0 + for website_name in results: + dictionary = results[website_name] + if dictionary.get("status").status == QueryStatus.CLAIMED: + exists_counter += 1 + file.write(dictionary["url_user"] + "\n") + file.write( + f"Total Websites Username Detected On : {exists_counter}\n") if args.csv: result_file = f"{username}.csv" @@ -691,6 +696,9 @@ def main(): # If the folder doesn't exist, create it first os.makedirs(args.folderoutput, exist_ok=True) result_file = os.path.join(args.folderoutput, result_file) + if args.merge: + # This is the filepath for the merged file + result_file = f"{args.merge}.csv" with open(result_file, "w", newline='', encoding="utf-8") as csv_report: writer = csv.writer(csv_report) @@ -707,15 +715,21 @@ def main(): response_time_s = results[site]["status"].query_time if response_time_s is None: response_time_s = "" - writer.writerow([username, + + result_output = [username, site, results[site]["url_main"], results[site]["url_user"], str(results[site]["status"].status), results[site]["http_status"], - response_time_s - ] - ) + response_time_s] + csv_rows.append(result_output) + + if not args.merge: + writer.writerow(result_output) + + if args.merge: + writer.writerows(csv_rows) print() query_notify.finish()