From e3f909ce562ab829fa4d4b47def5afede52a6e8f Mon Sep 17 00:00:00 2001 From: Farham Aghdasi Date: Mon, 28 Jul 2025 20:19:18 +0330 Subject: [PATCH 1/2] Optimize IP scanning with async requests and enhanced logging --- README.md | 113 ++++++++++++++++++++++++++++++----------------- check-ips.py | 100 +++++++++++++++++++++++++++++------------ requirements.txt | 3 -- 3 files changed, 144 insertions(+), 72 deletions(-) delete mode 100644 requirements.txt diff --git a/README.md b/README.md index 4a2cb4a..0c06c94 100644 --- a/README.md +++ b/README.md @@ -1,67 +1,100 @@ # X-UI Panel Cracker - ![Screenshot of Tool](tool_screenshot.png) - --- +## Overview -## Features - -- Fast -- Log Full -- Save Goods +X-UI Panel Cracker is a Python-based tool designed to test login credentials for X-UI panels and scan for accessible panels on given IP addresses. The tool is optimized for speed and reliability, with features like asynchronous scanning, proxy support, and detailed logging. --- -## 📄 Usage - -1. **input files:** +## Features -- `urls.txt` — Example: +- **Fast Scanning**: Utilizes asynchronous HTTP requests for efficient IP scanning. +- **Comprehensive Logging**: Logs all scan activities and errors to a dedicated log file (`scan_log.txt`). +- **Proxy Support**: Optional proxy usage to enhance privacy during scans. +- **Flexible Output**: Saves valid results in JSON format (`good-sites.json`) for easier analysis. +- **Multi-Protocol Support**: Checks both HTTP and HTTPS protocols for panel accessibility. +- **Progress Tracking**: Displays a progress bar for IP scanning. -``` +--- -http://111.62.217:2053 -https://111.255.198:2053 -121.236.138:2053 +## 📄 Usage -``` +### Prerequisites -- `combos.txt` — Example: +Ensure you have Python 3.8+ installed. Install the required dependencies by running: +```bash +pip install -r requirements.txt ``` -admin:admin -user:password123 - -```` - -2. **Run the script:** - +### Input Files + +1. **`urls.txt`** — List of IP addresses/URLs to scan. Example: + ``` + http://111.62.217:2053 + https://111.255.198:2053 + 121.236.138:2053 + ``` + +2. **`combos.txt`** — List of username:password combinations for login testing. Example: + ``` + admin:admin + user:password123 + ``` + +3. **`proxies.txt`** (Optional) — List of proxy addresses for scanning. Example: + ``` + http://proxy1:port + https://proxy2:port + ``` + +### Running the Tool + +1. **Scan IPs for Accessible Panels**: + Run the IP checker script to identify accessible X-UI panels: + ```bash + python check-ips.py --ip-file urls.txt --workers 50 --timeout 5.0 --proxy-file proxies.txt + ``` + - Default values: `--ip-file urls.txt`, `--workers 50`, `--timeout 5.0`. + - Omit `--proxy-file` if not using proxies. + + **Output**: Results are saved in `good-sites.json` and logs in `scan_log.txt`. + +2. **Test Login Credentials**: + Run the main script to test login credentials on identified panels: + ```bash + python main.py + ``` + Or double-click `start.bat` on Windows. + + **Output**: Valid logins are saved in `valid-logins.txt` and `all-good.txt`. + +### Example Command ```bash -python main.py -```` - -Or simply double-click on: - -``` -start.bat +python check-ips.py --ip-file urls.txt --workers 100 --timeout 3.0 --proxy-file proxies.txt ``` -3. **Output:** - -All valid login results will be saved in: - -* `valid-logins.txt` and `all-good.txt` +--- -## Note +## Notes -1. **Checker panel on IPs** -* `python check-ips.py` +- **Ethical Use**: This tool is intended for authorized penetration testing only. Unauthorized use may be illegal. +- **Performance**: Adjust `--workers` based on your system's resources to avoid overload. +- **Dependencies**: Ensure all dependencies listed in `requirements.txt` are installed: + ``` + aiohttp>=3.8.0 + tqdm>=4.65.0 + requests>=2.31.0 + colorama>=0.4.6 + pystyle>=1.1.0 + ``` --- +## 📢 Contact -📢 **Telegram Channel:** [t.me/secabuser](https://t.me/secabuser) +Join our **Telegram Channel** for updates: [t.me/secabuser](https://t.me/secabuser) \ No newline at end of file diff --git a/check-ips.py b/check-ips.py index d989533..cc44d61 100644 --- a/check-ips.py +++ b/check-ips.py @@ -1,26 +1,36 @@ -import requests -import threading -import concurrent.futures +import aiohttp +import asyncio import random from os import system, name from pystyle import Colors, Colorate, Center +from tqdm import tqdm +import logging +import json +import argparse + +logging.basicConfig( + filename="scan_log.txt", + level=logging.INFO, + format="%(asctime)s - %(levelname)s - %(message)s" +) b = ''' ▄▀ ▗▖ ▗▖▗▖ ▗▖▗▄▄▄▖ ▗▖ ▗▖ ▗▄▖ ▗▖ ▗▄▄▄▖▗▄▄▄ ▗▄▖▗▄▄▄▖▗▄▖ ▗▄▄▖ █ ▝▚▞▘ ▐▌ ▐▌ █ ▐▌ ▐▌▐▌ ▐▌▐▌ █ ▐▌ █ ▐▌ ▐▌ █ ▐▌ ▐▌▐▌ ▐▌ ▄▀ ▐▌ ▐▌ ▐▌ █ ▐▌ ▐▌▐▛▀▜▌▐▌ █ ▐▌ █ ▐▛▀▜▌ █ ▐▌ ▐▌▐▛▀▚▖ ▗▞▘▝▚▖▝▚▄▞▘▗▄█▄▖ ▝▚▞▘ ▐▌ ▐▌▐▙▄▄▖▗▄█▄▖▐▙▄▄▀ ▐▌ ▐▌ █ ▝▚▄▞▘▐▌ ▐▌ - t.me/secabuser ''' -class Checker: - def __init__(self, ip_file, workers, timeout): +class IPChecker: + def __init__(self, ip_file, workers, timeout, proxy_file=None): self.ip_file = ip_file self.workers = workers self.timeout = timeout - self.lock = threading.Lock() + self.proxy_file = proxy_file self.targets = [] + self.proxies = [] + self.good_sites = [] def load_ips(self): try: @@ -29,10 +39,23 @@ def load_ips(self): clean = line.strip() if clean and ':' in clean: self.targets.append(clean) + logging.info(f"Loaded {len(self.targets)} IPs from {self.ip_file}") except FileNotFoundError: + logging.error(f"IP file {self.ip_file} not found!") print("IP file not found!") exit(1) + def load_proxies(self): + if self.proxy_file: + try: + with open(self.proxy_file, "r") as f: + self.proxies = [line.strip() for line in f if line.strip()] + logging.info(f"Loaded {len(self.proxies)} proxies from {self.proxy_file}") + except FileNotFoundError: + logging.error(f"Proxy file {self.proxy_file} not found!") + print("Proxy file not found!") + exit(1) + def user_agent(self): agents = [ "Mozilla/5.0 (Windows NT 10.0; Win64; x64)", @@ -43,44 +66,63 @@ def user_agent(self): ] return random.choice(agents) - def check_ip(self, target): + async def check_ip(self, session, target, progress): for proto in ["http://", "https://"]: url = f"{proto}{target}/xui" headers = {"User-Agent": self.user_agent()} + proxy = random.choice(self.proxies) if self.proxies else None try: - r = requests.get(url, headers=headers, timeout=self.timeout) - if r.status_code not in [400, 404, 407, 503]: - with self.lock: - with open("good-sites.txt", "a") as f: - f.write(target + "\n") - print(f"[Good] {target} ({r.status_code})") - return - except requests.RequestException: + async with session.get(url, headers=headers, timeout=self.timeout, proxy=proxy) as r: + if r.status not in [400, 404, 407, 503]: + self.good_sites.append({"url": target, "status": r.status}) + logging.info(f"[Good] {target} ({r.status})") + print(f"[Good] {target} ({r.status})") + return + except Exception as e: + logging.warning(f"[Not Good] {target} - Error: {str(e)}") continue print(f"[Not Good] {target}") + progress.update(1) - def run(self): + async def run(self): self.load_ips() + self.load_proxies() if not self.targets: print("No valid IPs loaded.") + logging.error("No valid IPs loaded.") return - with concurrent.futures.ThreadPoolExecutor(max_workers=self.workers) as executor: - executor.map(self.check_ip, self.targets) + + async with aiohttp.ClientSession() as session: + with tqdm(total=len(self.targets), desc="Scanning IPs") as progress: + tasks = [self.check_ip(session, target, progress) for target in self.targets] + await asyncio.gather(*tasks, return_exceptions=True) + + if self.good_sites: + with open("good-sites.json", "w") as f: + json.dump(self.good_sites, f, indent=4) + print(f"\nResults saved to good-sites.json") + logging.info(f"Saved {len(self.good_sites)} good sites to good-sites.json") print("\nDone!") -if __name__ == "__main__": +def clear_screen(): try: system("cls" if name == "nt" else "clear") except: pass + +def main(): + parser = argparse.ArgumentParser(description="X-uiCracker IP Scanner") + parser.add_argument("--ip-file", default="urls.txt", help="File containing IP addresses") + parser.add_argument("--workers", type=int, default=50, help="Maximum number of concurrent workers") + parser.add_argument("--timeout", type=float, default=5.0, help="Timeout for HTTP requests (seconds)") + parser.add_argument("--proxy-file", help="File containing proxy addresses") + args = parser.parse_args() + + clear_screen() print(Colorate.Diagonal(Colors.red_to_blue, Center.XCenter(b))) - ip_file = input("IP list file > ").strip() - try: - workers = int(input("Max workers > ").strip()) - timeout = float(input("Timeout > ").strip()) - except ValueError: - print("Invalid input!") - exit(1) - checker = Checker(ip_file, workers, timeout) - checker.run() + checker = IPChecker(args.ip_file, args.workers, args.timeout, args.proxy_file) + asyncio.run(checker.run()) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 5b74ed4..0000000 --- a/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -requests>=2.31.0 -colorama>=0.4.6 -pystyle>=1.1.0 \ No newline at end of file From 74fd3a11583b3e3ede688c45df672272aca73d0d Mon Sep 17 00:00:00 2001 From: Farham Aghdasi Date: Mon, 28 Jul 2025 20:55:26 +0330 Subject: [PATCH 2/2] Updated requirements.txt File --- requirements.txt | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 requirements.txt diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..eddd89c --- /dev/null +++ b/requirements.txt @@ -0,0 +1,5 @@ +aiohttp>=3.8.0 +tqdm>=4.65.0 +requests>=2.31.0 +colorama>=0.4.6 +pystyle>=1.1.0 \ No newline at end of file