diff --git a/modules/getemails.py b/modules/getemails.py
index 7e1d5b0b..960c4e82 100644
--- a/modules/getemails.py
+++ b/modules/getemails.py
@@ -11,16 +11,16 @@
def get_mails(soup):
"""
- Searches for tags for links then checks if link contains the
- substring 'mailto' indicating that it's an email. If it is determined
- to be an email then the link is split and the username is appeneded to
- the list
+ Searches for tags for links then checks if link contains the
+ substring 'mailto' indicating that it's an email. If it is determined
+ to be an email then the link is split and the username is appeneded to
+ the list
- Args:
- soup: BeautifulSoup isntance that will be used for parsing
+ Args:
+ soup: BeautifulSoup isntance that will be used for parsing
- Returns:
- emails: list of email IDs
+ Returns:
+ emails: list of email IDs
"""
if isinstance(type(soup), type(BeautifulSoup)):
diff --git a/modules/pagereader.py b/modules/pagereader.py
index fc78ef84..8dfe478f 100644
--- a/modules/pagereader.py
+++ b/modules/pagereader.py
@@ -12,14 +12,14 @@
def display_url(url):
"""
- Prints the status of a url based on if it can be reached using a GET
- request. url is printed with a color based on status.
- Green for a reachable status code and red for not reachable.
-
- Args:
- url (str): url to be printed
- Returns:
- None
+ Prints the status of a url based on if it can be reached using a GET
+ request. url is printed with a color based on status.
+ Green for a reachable status code and red for not reachable.
+
+ Args:
+ url (str): url to be printed
+ Returns:
+ None
"""
resp = get_url_status(url)
if resp != 0:
diff --git a/modules/savefile.py b/modules/savefile.py
index 95375a4b..11a09ce8 100644
--- a/modules/savefile.py
+++ b/modules/savefile.py
@@ -4,11 +4,11 @@
def saveJson(datatype, data):
"""
- Creates json file and stores json
+ Creates json file and stores json
- Args:
- datatype: the type of the object being passed
- data = data that is being stored with object
+ Args:
+ datatype: the type of the object being passed
+ data = data that is being stored with object
"""
timestr = time.strftime("%Y%m%d-%H%M%S")
diff --git a/modules/updater.py b/modules/updater.py
index fb6974d0..df790e97 100644
--- a/modules/updater.py
+++ b/modules/updater.py
@@ -4,9 +4,8 @@
def updateTor():
"""
- Currently updates Tor by calling terminal commands using subprocess
- Not a great method and will be replaced in the future.
-
+ Currently updates Tor by calling terminal commands using subprocess
+ Not a great method and will be replaced in the future.
"""
print("Checking for latest stable release")
diff --git a/modules/utils.py b/modules/utils.py
index eb3041e5..07911ebd 100644
--- a/modules/utils.py
+++ b/modules/utils.py
@@ -14,23 +14,23 @@
def bfs_urls(urls, add_exts, rec_depth=0, stop_depth=None, target_url=None):
"""
- Traverses urls passed using Breadth First Search. You can specify stop
- depth or specify a target to look for. The rec_depth argument is used
- for recursion.
-
- *NOTE: This function uses a GET request for each url found, this can
- be very expensive so avoid if possible try to acquire the urls to
- be traversed and use bfs function.
-
- Args:
- urls (list): urls to traverse
- add_exts (str): additional extensions to use
- rec_depth (int): used for recursion
- stop_depth (int): stops traversing at this depth if specified
- target_url (str): stops at this url if specified
-
- Returns:
- rec_depth (int): depth stopped at
+ Traverses urls passed using Breadth First Search. You can specify stop
+ depth or specify a target to look for. The rec_depth argument is used
+ for recursion.
+
+ *NOTE: This function uses a GET request for each url found, this can
+ be very expensive so avoid if possible try to acquire the urls to
+ be traversed and use bfs function.
+
+ Args:
+ urls (list): urls to traverse
+ add_exts (str): additional extensions to use
+ rec_depth (int): used for recursion
+ stop_depth (int): stops traversing at this depth if specified
+ target_url (str): stops at this url if specified
+
+ Returns:
+ rec_depth (int): depth stopped at
"""
if rec_depth == stop_depth:
@@ -62,18 +62,18 @@ def bfs_urls(urls, add_exts, rec_depth=0, stop_depth=None, target_url=None):
def bfs(nodes, target_node=None, rec_depth=0, stop_depth=None):
"""
- Traverses nodes using Breadth First Search. You can specify stop
- depth or specify a target to look for. The rec_depth argument is used
- for recursion.
-
- Args:
- nodes (list): objects to traverse
- target_node (object): object being searched for
- rec_depth (int): used for recursion
- stop_depth (int): stops traversing at this depth if specified
-
- Returns:
- rec_depth (int): depth stopped at
+ Traverses nodes using Breadth First Search. You can specify stop
+ depth or specify a target to look for. The rec_depth argument is used
+ for recursion.
+
+ Args:
+ nodes (list): objects to traverse
+ target_node (object): object being searched for
+ rec_depth (int): used for recursion
+ stop_depth (int): stops traversing at this depth if specified
+
+ Returns:
+ rec_depth (int): depth stopped at
"""
if rec_depth == stop_depth:
@@ -103,15 +103,15 @@ def bfs(nodes, target_node=None, rec_depth=0, stop_depth=None):
def exec_tasks(que, task_func, tasks_args=tuple()):
"""
- Executes tasks inside of queue using function and arguments passed
- inside of threads
-
- Args:
- que (queue.Queue): contains tasks
- task_func (function): function to be executed on tasks and args
- task_args (tuple): contains arguments for function
- Returns:
- None
+ Executes tasks inside of queue using function and arguments passed
+ inside of threads
+
+ Args:
+ que (queue.Queue): contains tasks
+ task_func (function): function to be executed on tasks and args
+ task_args (tuple): contains arguments for function
+ Returns:
+ None
"""
while True:
task = que.get()
@@ -124,15 +124,15 @@ def exec_tasks(que, task_func, tasks_args=tuple()):
def queue_tasks(tasks, task_func, tasks_args=tuple()):
"""
- Starts threads with tasks and queue, then queues tasks and spawned
- threads begin to pull tasks off queue to execute
-
- Args:
- tasks (list): lists of values that you'd like to operate on
- task_func (function): function that you would like to use
- tasks_args (tuple): arguments for function
- Returns:
- None
+ Starts threads with tasks and queue, then queues tasks and spawned
+ threads begin to pull tasks off queue to execute
+
+ Args:
+ tasks (list): lists of values that you'd like to operate on
+ task_func (function): function that you would like to use
+ tasks_args (tuple): arguments for function
+ Returns:
+ None
"""
que = Queue(len(tasks)*2)
for _ in tasks:
@@ -157,16 +157,16 @@ def queue_tasks(tasks, task_func, tasks_args=tuple()):
def get_url_status(url, headers=False):
"""
- Uses GET request to check if website exists
+ Uses GET request to check if website exists
- *NOTE: May look into changing this to HEAD requests to improve perf
+ *NOTE: May look into changing this to HEAD requests to improve perf
- Args:
- url (str): url to be tested
+ Args:
+ url (str): url to be tested
- Return:
- something? (int/Response object): return value of the connection
- object's GET request if successful & zero upon failure
+ Return:
+ something? (int/Response object): return value of the connection
+ object's GET request if successful & zero upon failure
"""
try:
if headers: