refactor(core): centralize on-hold user logic in traffic.py
This commit is contained in:
@ -35,7 +35,7 @@ def add_bulk_users(traffic_gb, expiration_days, count, prefix, start_number, unl
|
||||
|
||||
existing_users_lower = {u.lower() for u in users_data}
|
||||
new_users_to_add = {}
|
||||
creation_date = datetime.now().strftime("%Y-%m-%d")
|
||||
creation_date = None
|
||||
|
||||
try:
|
||||
password_process = subprocess.run(['pwgen', '-s', '32', str(count)], capture_output=True, text=True, check=True)
|
||||
|
||||
@ -1,13 +1,9 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import schedule
|
||||
import logging
|
||||
import subprocess
|
||||
import fcntl
|
||||
import datetime
|
||||
import json
|
||||
from pathlib import Path
|
||||
from paths import *
|
||||
|
||||
@ -72,46 +68,7 @@ def check_traffic_status():
|
||||
try:
|
||||
success = run_command(f"python3 {CLI_PATH} traffic-status --no-gui", log_success=False)
|
||||
if not success:
|
||||
logger.error("Failed to run traffic-status command. Aborting check.")
|
||||
return
|
||||
|
||||
if not os.path.exists(USERS_FILE):
|
||||
logger.warning(f"{USERS_FILE} not found. Skipping on-hold user check.")
|
||||
return
|
||||
|
||||
try:
|
||||
with open(USERS_FILE, 'r') as f:
|
||||
users_data = json.load(f)
|
||||
except (json.JSONDecodeError, IOError) as e:
|
||||
logger.error(f"Error reading or parsing {USERS_FILE}: {e}")
|
||||
return
|
||||
|
||||
users_updated = False
|
||||
today_date = datetime.datetime.now().strftime("%Y-%m-%d")
|
||||
|
||||
for username, user_data in users_data.items():
|
||||
is_on_hold = not user_data.get("account_creation_date")
|
||||
|
||||
if is_on_hold:
|
||||
is_online = user_data.get("status") == "Online"
|
||||
|
||||
if is_online:
|
||||
logger.info(f"On-hold user '{username}' connected. Activating account with creation date {today_date}.")
|
||||
user_data["account_creation_date"] = today_date
|
||||
users_updated = True
|
||||
else:
|
||||
if user_data.get("status") != "On-hold":
|
||||
user_data["status"] = "On-hold"
|
||||
users_updated = True
|
||||
|
||||
if users_updated:
|
||||
try:
|
||||
with open(USERS_FILE, 'w') as f:
|
||||
json.dump(users_data, f, indent=4)
|
||||
logger.info("Successfully updated users.json for on-hold users.")
|
||||
except IOError as e:
|
||||
logger.error(f"Error writing updates to {USERS_FILE}: {e}")
|
||||
|
||||
pass
|
||||
finally:
|
||||
release_lock(lock_fd)
|
||||
|
||||
@ -132,10 +89,8 @@ def main():
|
||||
schedule.every(1).minutes.do(check_traffic_status)
|
||||
schedule.every(6).hours.do(backup_hysteria)
|
||||
|
||||
# logger.info("Performing initial runs on startup...")
|
||||
check_traffic_status()
|
||||
backup_hysteria()
|
||||
# logger.info("Initial runs complete. Entering main loop.")
|
||||
|
||||
while True:
|
||||
try:
|
||||
|
||||
@ -149,6 +149,7 @@ async def remove_user_api(username: str):
|
||||
cli_api.kick_user_by_name(username)
|
||||
cli_api.traffic_status(display_output=False)
|
||||
cli_api.remove_user(username)
|
||||
cli_api.traffic_status(display_output=False)
|
||||
return DetailResponse(detail=f'User {username} has been removed.')
|
||||
except HTTPException:
|
||||
|
||||
|
||||
151
core/traffic.py
151
core/traffic.py
@ -16,16 +16,6 @@ LOCKFILE = "/tmp/kick.lock"
|
||||
BACKUP_FILE = f"{USERS_FILE}.bak"
|
||||
MAX_WORKERS = 8
|
||||
|
||||
# import logging
|
||||
# logging.basicConfig(
|
||||
# level=logging.INFO,
|
||||
# format='%(asctime)s: [%(levelname)s] %(message)s',
|
||||
# datefmt='%Y-%m-%d %H:%M:%S'
|
||||
# )
|
||||
# logger = logging.getLogger()
|
||||
# null_handler = logging.NullHandler()
|
||||
# logger.handlers = [null_handler]
|
||||
|
||||
def acquire_lock():
|
||||
"""Acquires a lock file to prevent concurrent execution"""
|
||||
try:
|
||||
@ -36,14 +26,7 @@ def acquire_lock():
|
||||
sys.exit(1)
|
||||
|
||||
def traffic_status(no_gui=False):
|
||||
"""Updates and retrieves traffic statistics for all users.
|
||||
|
||||
Args:
|
||||
no_gui (bool): If True, suppresses output to console
|
||||
|
||||
Returns:
|
||||
dict: User data including upload/download bytes and status
|
||||
"""
|
||||
"""Updates and retrieves traffic statistics for all users."""
|
||||
green = '\033[0;32m'
|
||||
cyan = '\033[0;36m'
|
||||
NC = '\033[0m'
|
||||
@ -90,8 +73,7 @@ def traffic_status(no_gui=False):
|
||||
users_data[user_id]["status"] = "Online" if status.is_online else "Offline"
|
||||
else:
|
||||
users_data[user_id] = {
|
||||
"upload_bytes": 0,
|
||||
"download_bytes": 0,
|
||||
"upload_bytes": 0, "download_bytes": 0,
|
||||
"status": "Online" if status.is_online else "Offline"
|
||||
}
|
||||
|
||||
@ -102,11 +84,23 @@ def traffic_status(no_gui=False):
|
||||
else:
|
||||
online = user_id in online_status and online_status[user_id].is_online
|
||||
users_data[user_id] = {
|
||||
"upload_bytes": stats.upload_bytes,
|
||||
"download_bytes": stats.download_bytes,
|
||||
"upload_bytes": stats.upload_bytes, "download_bytes": stats.download_bytes,
|
||||
"status": "Online" if online else "Offline"
|
||||
}
|
||||
|
||||
today_date = datetime.datetime.now().strftime("%Y-%m-%d")
|
||||
for username, user_data in users_data.items():
|
||||
is_on_hold = not user_data.get("account_creation_date")
|
||||
|
||||
if is_on_hold:
|
||||
is_online = user_data.get("status") == "Online"
|
||||
has_traffic = user_data.get("download_bytes", 0) > 0 or user_data.get("upload_bytes", 0) > 0
|
||||
|
||||
if is_online or has_traffic:
|
||||
user_data["account_creation_date"] = today_date
|
||||
else:
|
||||
user_data["status"] = "On-hold"
|
||||
|
||||
with open(USERS_FILE, 'w') as users_file:
|
||||
json.dump(users_data, users_file, indent=4)
|
||||
|
||||
@ -137,66 +131,49 @@ def display_traffic_data(data, green, cyan, NC):
|
||||
print(f"{user:<15} {green}{formatted_tx:<15}{NC} {cyan}{formatted_rx:<15}{NC} {status:<10}")
|
||||
print("-------------------------------------------------")
|
||||
|
||||
def format_bytes(bytes):
|
||||
def format_bytes(bytes_val):
|
||||
"""Format bytes as human-readable string"""
|
||||
if bytes < 1024:
|
||||
return f"{bytes}B"
|
||||
elif bytes < 1048576:
|
||||
return f"{bytes / 1024:.2f}KB"
|
||||
elif bytes < 1073741824:
|
||||
return f"{bytes / 1048576:.2f}MB"
|
||||
elif bytes < 1099511627776:
|
||||
return f"{bytes / 1073741824:.2f}GB"
|
||||
else:
|
||||
return f"{bytes / 1099511627776:.2f}TB"
|
||||
if bytes_val < 1024: return f"{bytes_val}B"
|
||||
elif bytes_val < 1048576: return f"{bytes_val / 1024:.2f}KB"
|
||||
elif bytes_val < 1073741824: return f"{bytes_val / 1048576:.2f}MB"
|
||||
elif bytes_val < 1099511627776: return f"{bytes_val / 1073741824:.2f}GB"
|
||||
else: return f"{bytes_val / 1099511627776:.2f}TB"
|
||||
|
||||
def kick_users(usernames, secret):
|
||||
"""Kicks specified users from the server"""
|
||||
try:
|
||||
client = Hysteria2Client(
|
||||
base_url=API_BASE_URL,
|
||||
secret=secret
|
||||
)
|
||||
|
||||
client = Hysteria2Client(base_url=API_BASE_URL, secret=secret)
|
||||
client.kick_clients(usernames)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def process_user(username, user_data, config_secret, users_data):
|
||||
def process_user(username, user_data, users_data):
|
||||
"""Process a single user to check if they should be kicked"""
|
||||
blocked = user_data.get('blocked', False)
|
||||
if user_data.get('blocked', False): return None
|
||||
|
||||
if blocked:
|
||||
return None
|
||||
account_creation_date = user_data.get('account_creation_date')
|
||||
if not account_creation_date: return None
|
||||
|
||||
max_download_bytes = user_data.get('max_download_bytes', 0)
|
||||
expiration_days = user_data.get('expiration_days', 0)
|
||||
account_creation_date = user_data.get('account_creation_date')
|
||||
current_download_bytes = user_data.get('download_bytes', 0)
|
||||
current_upload_bytes = user_data.get('upload_bytes', 0)
|
||||
|
||||
total_bytes = current_download_bytes + current_upload_bytes
|
||||
|
||||
if not account_creation_date:
|
||||
return None
|
||||
total_bytes = user_data.get('download_bytes', 0) + user_data.get('upload_bytes', 0)
|
||||
|
||||
should_block = False
|
||||
try:
|
||||
current_date = datetime.datetime.now().timestamp()
|
||||
creation_date = datetime.datetime.fromisoformat(account_creation_date.replace('Z', '+00:00'))
|
||||
expiration_date = (creation_date + datetime.timedelta(days=expiration_days)).timestamp()
|
||||
|
||||
should_block = False
|
||||
|
||||
if max_download_bytes > 0 and total_bytes >= 0 and expiration_days > 0:
|
||||
if total_bytes >= max_download_bytes or current_date >= expiration_date:
|
||||
if expiration_days > 0:
|
||||
creation_date = datetime.datetime.strptime(account_creation_date, "%Y-%m-%d")
|
||||
expiration_date = creation_date + datetime.timedelta(days=expiration_days)
|
||||
if datetime.datetime.now() >= expiration_date:
|
||||
should_block = True
|
||||
|
||||
if should_block:
|
||||
users_data[username]['blocked'] = True
|
||||
return username
|
||||
if not should_block and max_download_bytes > 0 and total_bytes >= max_download_bytes:
|
||||
should_block = True
|
||||
|
||||
except Exception:
|
||||
if should_block:
|
||||
users_data[username]['blocked'] = True
|
||||
return username
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
return None
|
||||
@ -206,59 +183,39 @@ def kick_expired_users():
|
||||
lock_file = acquire_lock()
|
||||
|
||||
try:
|
||||
if not os.path.exists(USERS_FILE): return
|
||||
shutil.copy2(USERS_FILE, BACKUP_FILE)
|
||||
|
||||
try:
|
||||
with open(CONFIG_FILE, 'r') as f:
|
||||
config = json.load(f)
|
||||
secret = config.get('trafficStats', {}).get('secret', '')
|
||||
if not secret:
|
||||
sys.exit(1)
|
||||
if not secret: sys.exit(1)
|
||||
except Exception:
|
||||
shutil.copy2(BACKUP_FILE, USERS_FILE)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
with open(USERS_FILE, 'r') as f:
|
||||
users_data = json.load(f)
|
||||
except json.JSONDecodeError:
|
||||
shutil.copy2(BACKUP_FILE, USERS_FILE)
|
||||
sys.exit(1)
|
||||
except Exception:
|
||||
shutil.copy2(BACKUP_FILE, USERS_FILE)
|
||||
sys.exit(1)
|
||||
with open(USERS_FILE, 'r') as f:
|
||||
users_data = json.load(f)
|
||||
|
||||
users_to_kick = []
|
||||
with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
|
||||
future_to_user = {
|
||||
executor.submit(process_user, username, user_data, secret, users_data): username
|
||||
for username, user_data in users_data.items()
|
||||
}
|
||||
|
||||
for future in future_to_user:
|
||||
username = future.result()
|
||||
if username:
|
||||
users_to_kick.append(username)
|
||||
futures = [executor.submit(process_user, u, d, users_data) for u, d in users_data.items()]
|
||||
for future in futures:
|
||||
result = future.result()
|
||||
if result:
|
||||
users_to_kick.append(result)
|
||||
|
||||
if users_to_kick:
|
||||
for retry in range(3):
|
||||
try:
|
||||
with open(USERS_FILE, 'w') as f:
|
||||
json.dump(users_data, f, indent=2)
|
||||
break
|
||||
except Exception:
|
||||
time.sleep(1)
|
||||
if retry == 2:
|
||||
raise
|
||||
with open(USERS_FILE, 'w') as f:
|
||||
json.dump(users_data, f, indent=4)
|
||||
|
||||
if users_to_kick:
|
||||
batch_size = 50
|
||||
for i in range(0, len(users_to_kick), batch_size):
|
||||
batch = users_to_kick[i:i+batch_size]
|
||||
for i in range(0, len(users_to_kick), 50):
|
||||
batch = users_to_kick[i:i+50]
|
||||
kick_users(batch, secret)
|
||||
|
||||
except Exception:
|
||||
shutil.copy2(BACKUP_FILE, USERS_FILE)
|
||||
if os.path.exists(BACKUP_FILE):
|
||||
shutil.copy2(BACKUP_FILE, USERS_FILE)
|
||||
sys.exit(1)
|
||||
finally:
|
||||
fcntl.flock(lock_file, fcntl.LOCK_UN)
|
||||
|
||||
Reference in New Issue
Block a user