Skip to content

Commit

Permalink
Merge pull request #103 from aldokkani/Feature_PEP8
Browse files Browse the repository at this point in the history
Feature pep8
  • Loading branch information
PSNAppz authored Jul 26, 2018
2 parents a89409b + d99c11c commit 877a91e
Show file tree
Hide file tree
Showing 5 changed files with 17 additions and 19 deletions.
2 changes: 1 addition & 1 deletion modules/getemails.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def getMails(soup):

emails = get_urls_from_page(soup, email=True)

"""Pretty print output as below"""
# Pretty print output as below
print('')
print(b_colors.OKGREEN+'Mails Found - '+b_colors.ENDC+str(len(emails)))
print('-------------------------------')
Expand Down
2 changes: 1 addition & 1 deletion modules/getweblinks.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def get_links(soup, ext=False, live=False):
b_colors = Bcolors()
if isinstance(soup, BeautifulSoup):
websites = get_urls_from_page(soup, extension=ext)
"""Pretty print output as below"""
# Pretty print output as below
print(''.join((b_colors.OKGREEN,
'Websites Found - ', b_colors.ENDC, str(len(websites)))))
print('------------------------------------')
Expand Down
11 changes: 4 additions & 7 deletions modules/net_utils.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,8 @@
import re
import requests

from requests.exceptions import ConnectionError, HTTPError


def check_connection(url):

print("Attempting to connect to {site}".format(site=url))
if get_url_status(url) != 0:
return 1
Expand All @@ -28,20 +25,20 @@ def get_url_status(url, headers=False):
"""
try:
if headers:
resp = requests.get(url, headers=headers)
resp = requests.get(url, headers=headers)
else:
resp = requests.get(url)
resp = requests.get(url)
resp.raise_for_status()
return resp
except (ConnectionError, HTTPError):
except (requests.exceptions.ConnectionError, requests.exceptions.HTTPError):
return 0


def is_url(url):
pattern = r"^https?:\/\/(www\.)?([a-z,A-Z,0-9]*)\.([a-z, A-Z]+)(.*)"
regex = re.compile(pattern)
if regex.match(url):
return 1
return 1
return 0


Expand Down
17 changes: 9 additions & 8 deletions modules/pagereader.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import sys
from bs4 import BeautifulSoup
from modules.net_utils import get_url_status
from modules.bcolors import Bcolors
from sys import exit


def connection_msg(site):
Expand All @@ -24,31 +24,32 @@ def read_first_page(site):
continue

if attempts_left == 2:
https_url = 'https://'+site
https_url = 'https://' + site
print(next(connection_msg(https_url)))
response = get_url_status(https_url, headers)
if response != 0:
page = BeautifulSoup(response.text, 'html.parser')
return page, response
page = BeautifulSoup(response.text, 'html.parser')
return page, response
else:
attempts_left -= 1
continue

if attempts_left == 1:
http_url = 'http://'+site
http_url = 'http://' + site
print(next(connection_msg(http_url)))
response = get_url_status(http_url, headers)
if response != 0:
page = BeautifulSoup(response.text, 'html.parser')
return page, response
page = BeautifulSoup(response.text, 'html.parser')
return page, response
else:
attempts_left -= 1
continue

if not attempts_left:
msg = ''.join(("There has been an {err} while attempting to ",
"connect to {site}.")).format(err=err, site=site)
exit(msg)
sys.exit(msg)


def get_ip():
"""Returns users tor ip address
Expand Down
4 changes: 2 additions & 2 deletions torBot.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ def get_args():
return parser.parse_args()


def main(conn=False):
def main():
args = get_args()
connect(args.ip, args.port)
link = args.url
Expand Down Expand Up @@ -199,7 +199,7 @@ def main(conn=False):
if __name__ == '__main__':

try:
main(conn=True)
main()

except KeyboardInterrupt:
print("Interrupt received! Exiting cleanly...")

0 comments on commit 877a91e

Please sign in to comment.