Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
113 changes: 73 additions & 40 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,67 +1,100 @@
# X-UI Panel Cracker


![Screenshot of Tool](tool_screenshot.png)


---

## Overview

## Features

- Fast
- Log Full
- Save Goods
X-UI Panel Cracker is a Python-based tool designed to test login credentials for X-UI panels and scan for accessible panels on given IP addresses. The tool is optimized for speed and reliability, with features like asynchronous scanning, proxy support, and detailed logging.

---

## 📄 Usage

1. **input files:**
## Features

- `urls.txt` — Example:
- **Fast Scanning**: Utilizes asynchronous HTTP requests for efficient IP scanning.
- **Comprehensive Logging**: Logs all scan activities and errors to a dedicated log file (`scan_log.txt`).
- **Proxy Support**: Optional proxy usage to enhance privacy during scans.
- **Flexible Output**: Saves valid results in JSON format (`good-sites.json`) for easier analysis.
- **Multi-Protocol Support**: Checks both HTTP and HTTPS protocols for panel accessibility.
- **Progress Tracking**: Displays a progress bar for IP scanning.

```
---

http://111.62.217:2053
https://111.255.198:2053
121.236.138:2053
## 📄 Usage

```
### Prerequisites

- `combos.txt` — Example:
Ensure you have Python 3.8+ installed. Install the required dependencies by running:

```bash
pip install -r requirements.txt
```

admin:admin
user:password123

````

2. **Run the script:**

### Input Files

1. **`urls.txt`** — List of IP addresses/URLs to scan. Example:
```
http://111.62.217:2053
https://111.255.198:2053
121.236.138:2053
```

2. **`combos.txt`** — List of username:password combinations for login testing. Example:
```
admin:admin
user:password123
```

3. **`proxies.txt`** (Optional) — List of proxy addresses for scanning. Example:
```
http://proxy1:port
https://proxy2:port
```

### Running the Tool

1. **Scan IPs for Accessible Panels**:
Run the IP checker script to identify accessible X-UI panels:
```bash
python check-ips.py --ip-file urls.txt --workers 50 --timeout 5.0 --proxy-file proxies.txt
```
- Default values: `--ip-file urls.txt`, `--workers 50`, `--timeout 5.0`.
- Omit `--proxy-file` if not using proxies.

**Output**: Results are saved in `good-sites.json` and logs in `scan_log.txt`.

2. **Test Login Credentials**:
Run the main script to test login credentials on identified panels:
```bash
python main.py
```
Or double-click `start.bat` on Windows.

**Output**: Valid logins are saved in `valid-logins.txt` and `all-good.txt`.

### Example Command
```bash
python main.py
````

Or simply double-click on:

```
start.bat
python check-ips.py --ip-file urls.txt --workers 100 --timeout 3.0 --proxy-file proxies.txt
```

3. **Output:**

All valid login results will be saved in:

* `valid-logins.txt` and `all-good.txt`
---

## Note
## Notes

1. **Checker panel on IPs**
* `python check-ips.py`
- **Ethical Use**: This tool is intended for authorized penetration testing only. Unauthorized use may be illegal.
- **Performance**: Adjust `--workers` based on your system's resources to avoid overload.
- **Dependencies**: Ensure all dependencies listed in `requirements.txt` are installed:
```
aiohttp>=3.8.0
tqdm>=4.65.0
requests>=2.31.0
colorama>=0.4.6
pystyle>=1.1.0
```

---

## 📢 Contact

📢 **Telegram Channel:** [t.me/secabuser](https://t.me/secabuser)
Join our **Telegram Channel** for updates: [t.me/secabuser](https://t.me/secabuser)
100 changes: 71 additions & 29 deletions check-ips.py
Original file line number Diff line number Diff line change
@@ -1,26 +1,36 @@
import requests
import threading
import concurrent.futures
import aiohttp
import asyncio
import random
from os import system, name
from pystyle import Colors, Colorate, Center
from tqdm import tqdm
import logging
import json
import argparse

logging.basicConfig(
filename="scan_log.txt",
level=logging.INFO,
format="%(asctime)s - %(levelname)s - %(message)s"
)

b = '''
▄▀ ▗▖ ▗▖▗▖ ▗▖▗▄▄▄▖ ▗▖ ▗▖ ▗▄▖ ▗▖ ▗▄▄▄▖▗▄▄▄ ▗▄▖▗▄▄▄▖▗▄▖ ▗▄▄▖
█ ▝▚▞▘ ▐▌ ▐▌ █ ▐▌ ▐▌▐▌ ▐▌▐▌ █ ▐▌ █ ▐▌ ▐▌ █ ▐▌ ▐▌▐▌ ▐▌
▄▀ ▐▌ ▐▌ ▐▌ █ ▐▌ ▐▌▐▛▀▜▌▐▌ █ ▐▌ █ ▐▛▀▜▌ █ ▐▌ ▐▌▐▛▀▚▖
▗▞▘▝▚▖▝▚▄▞▘▗▄█▄▖ ▝▚▞▘ ▐▌ ▐▌▐▙▄▄▖▗▄█▄▖▐▙▄▄▀ ▐▌ ▐▌ █ ▝▚▄▞▘▐▌ ▐▌

t.me/secabuser
'''

class Checker:
def __init__(self, ip_file, workers, timeout):
class IPChecker:
def __init__(self, ip_file, workers, timeout, proxy_file=None):
self.ip_file = ip_file
self.workers = workers
self.timeout = timeout
self.lock = threading.Lock()
self.proxy_file = proxy_file
self.targets = []
self.proxies = []
self.good_sites = []

def load_ips(self):
try:
Expand All @@ -29,10 +39,23 @@ def load_ips(self):
clean = line.strip()
if clean and ':' in clean:
self.targets.append(clean)
logging.info(f"Loaded {len(self.targets)} IPs from {self.ip_file}")
except FileNotFoundError:
logging.error(f"IP file {self.ip_file} not found!")
print("IP file not found!")
exit(1)

def load_proxies(self):
if self.proxy_file:
try:
with open(self.proxy_file, "r") as f:
self.proxies = [line.strip() for line in f if line.strip()]
logging.info(f"Loaded {len(self.proxies)} proxies from {self.proxy_file}")
except FileNotFoundError:
logging.error(f"Proxy file {self.proxy_file} not found!")
print("Proxy file not found!")
exit(1)

def user_agent(self):
agents = [
"Mozilla/5.0 (Windows NT 10.0; Win64; x64)",
Expand All @@ -43,44 +66,63 @@ def user_agent(self):
]
return random.choice(agents)

def check_ip(self, target):
async def check_ip(self, session, target, progress):
for proto in ["http://", "https://"]:
url = f"{proto}{target}/xui"
headers = {"User-Agent": self.user_agent()}
proxy = random.choice(self.proxies) if self.proxies else None
try:
r = requests.get(url, headers=headers, timeout=self.timeout)
if r.status_code not in [400, 404, 407, 503]:
with self.lock:
with open("good-sites.txt", "a") as f:
f.write(target + "\n")
print(f"[Good] {target} ({r.status_code})")
return
except requests.RequestException:
async with session.get(url, headers=headers, timeout=self.timeout, proxy=proxy) as r:
if r.status not in [400, 404, 407, 503]:
self.good_sites.append({"url": target, "status": r.status})
logging.info(f"[Good] {target} ({r.status})")
print(f"[Good] {target} ({r.status})")
return
except Exception as e:
logging.warning(f"[Not Good] {target} - Error: {str(e)}")
continue
print(f"[Not Good] {target}")
progress.update(1)

def run(self):
async def run(self):
self.load_ips()
self.load_proxies()
if not self.targets:
print("No valid IPs loaded.")
logging.error("No valid IPs loaded.")
return
with concurrent.futures.ThreadPoolExecutor(max_workers=self.workers) as executor:
executor.map(self.check_ip, self.targets)

async with aiohttp.ClientSession() as session:
with tqdm(total=len(self.targets), desc="Scanning IPs") as progress:
tasks = [self.check_ip(session, target, progress) for target in self.targets]
await asyncio.gather(*tasks, return_exceptions=True)

if self.good_sites:
with open("good-sites.json", "w") as f:
json.dump(self.good_sites, f, indent=4)
print(f"\nResults saved to good-sites.json")
logging.info(f"Saved {len(self.good_sites)} good sites to good-sites.json")
print("\nDone!")

if __name__ == "__main__":
def clear_screen():
try:
system("cls" if name == "nt" else "clear")
except:
pass

def main():
parser = argparse.ArgumentParser(description="X-uiCracker IP Scanner")
parser.add_argument("--ip-file", default="urls.txt", help="File containing IP addresses")
parser.add_argument("--workers", type=int, default=50, help="Maximum number of concurrent workers")
parser.add_argument("--timeout", type=float, default=5.0, help="Timeout for HTTP requests (seconds)")
parser.add_argument("--proxy-file", help="File containing proxy addresses")
args = parser.parse_args()

clear_screen()
print(Colorate.Diagonal(Colors.red_to_blue, Center.XCenter(b)))
ip_file = input("IP list file > ").strip()
try:
workers = int(input("Max workers > ").strip())
timeout = float(input("Timeout > ").strip())
except ValueError:
print("Invalid input!")
exit(1)

checker = Checker(ip_file, workers, timeout)
checker.run()
checker = IPChecker(args.ip_file, args.workers, args.timeout, args.proxy_file)
asyncio.run(checker.run())

if __name__ == "__main__":
main()
6 changes: 4 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
requests>=2.31.0
colorama>=0.4.6
aiohttp>=3.8.0
tqdm>=4.65.0
requests>=2.31.0
colorama>=0.4.6
pystyle>=1.1.0