| import subprocess |
| import os |
| import threading |
| import time |
| import yaml |
| from datetime import datetime |
| import signal |
| import psutil |
| import glob |
| import re |
| import pytz |
| import requests |
| import json |
|
|
| DATA_JSON = os.environ.get('DATA_JSON', '') |
|
|
| def assign_vars_from_json(config: dict): |
| for key, value in config.items(): |
| globals()[key] = value |
|
|
| if DATA_JSON: |
| try: |
| config_dict = json.loads(DATA_JSON) |
| assign_vars_from_json(config_dict) |
| except json.JSONDecodeError: |
| raise ValueError("DATA_JSON 环境变量不是合法的 JSON 字符串") |
| else: |
| N_PORT = os.environ.get('N_PORT', '8008') |
| ARGO_PORT = os.environ.get('ARGO_PORT', '8009') |
| ARGO_DOMAIN = os.environ.get('ARGO_DOMAIN', '') |
| ARGO_AUTH = os.environ.get('ARGO_AUTH', '') |
| DASHBOARD_VERSION = os.environ.get('DASHBOARD_VERSION', 'v1.13.2') |
| NZV1_VERSION = os.environ.get('NZV1_VERSION', 'v1.13.1') |
| BACKUP_TIME = os.environ.get('BACKUP_TIME', '3600') |
| RESTART_TIME = os.environ.get('RESTART_TIME', '14400') |
| |
| |
| HF_USER1 = os.environ.get('HF_USER1', '') |
| HF_REPO = os.environ.get('HF_REPO', '') |
| HF_EMAIL = os.environ.get('HF_EMAIL', '') |
| HF_TOKEN1 = os.environ.get('HF_TOKEN1', '') |
| |
| HF_USER2 = os.environ.get('HF_USER2', '') |
| HF_ID = os.environ.get('HF_ID', '') |
| HF_TOKEN2 = os.environ.get('HF_TOKEN2', '') |
| |
| agent_config = { |
| 'client_secret': 'MLcD6YnifhoY08B9n129UP5cg2139NYa', |
| 'debug': False, |
| 'disable_auto_update': True, |
| 'disable_command_execute': False, |
| 'disable_force_update': False, |
| 'disable_nat': False, |
| 'disable_send_query': False, |
| 'gpu': False, |
| 'insecure_tls': False, |
| 'ip_report_period': 1800, |
| 'report_delay': 3, |
| 'self_update_period': 0, |
| 'server': f'{ARGO_DOMAIN}:443', |
| 'skip_connection_count': False, |
| 'skip_procs_count': False, |
| 'temperature': True, |
| 'tls': True, |
| 'use_gitee_to_upgrade': False, |
| 'use_ipv6_country_code': False, |
| 'uuid': '18a49016-bc2d-4be9-0ddb-5357fdbf0b3d' |
| } |
| dashboard_config = { |
| 'admin_template': 'admin-dist', |
| 'agent_secret_key': '', |
| 'avg_ping_count': 2, |
| 'cover': 1, |
| 'https': {}, |
| 'install_host': f'{ARGO_DOMAIN}:443', |
| 'ip_change_notification_group_id': 0, |
| 'jwt_secret_key': '', |
| 'jwt_timeout': 300, |
| 'language': 'zh_CN', |
| 'listen_port': f'{N_PORT}', |
| 'location': 'Asia/Shanghai', |
| 'site_name': '鸡子探针平台-柒蓝', |
| 'tls': True, |
| 'user_template': 'user-dist' |
| } |
|
|
| mime_types_content = """types { |
| text/html html htm shtml; |
| text/css css; |
| text/javascript js; |
| image/gif gif; |
| image/jpeg jpeg jpg; |
| image/png png; |
| text/plain txt; |
| application/json json; |
| application/xml xml; |
| application/octet-stream bin; |
| }""" |
|
|
| nginx_conf = """ |
| # 全局配置 |
| worker_processes auto; |
| pid /tmp/nginx.pid; |
| error_log /tmp/nginx_error.log; |
| |
| events { |
| worker_connections 768; |
| } |
| |
| http { |
| sendfile on; |
| tcp_nopush on; |
| tcp_nodelay on; |
| keepalive_timeout 65; |
| types_hash_max_size 2048; |
| |
| # 修改 mime.types 路径 |
| include /data/nginx1.24/mime.types; |
| default_type application/octet-stream; |
| |
| # SSL配置 |
| ssl_protocols TLSv1.2 TLSv1.3; |
| ssl_prefer_server_ciphers on; |
| |
| access_log /tmp/nginx_access.log; |
| gzip on; |
| |
| # 上游服务器配置 |
| upstream dashboard { |
| server 127.0.0.1:%s; |
| keepalive 512; |
| } |
| |
| # 服务器块 |
| server { |
| listen %s; |
| #listen [::]:%s; |
| server_name %s; |
| |
| # 删除所有 real_ip 相关配置 |
| underscores_in_headers on; |
| |
| # gRPC相关 |
| location ^~ /proto.NezhaService/ { |
| grpc_set_header Host $host; |
| grpc_read_timeout 600s; |
| grpc_send_timeout 600s; |
| grpc_socket_keepalive on; |
| client_max_body_size 10m; |
| grpc_buffer_size 4m; |
| grpc_pass grpc://dashboard; |
| } |
| |
| # WebSocket相关 |
| location ~* ^/api/v1/ws/(server|terminal|file)(.*)$ { |
| proxy_set_header Host $host; |
| proxy_set_header Origin https://$host; |
| proxy_set_header Upgrade $http_upgrade; |
| proxy_set_header Connection "upgrade"; |
| proxy_read_timeout 3600s; |
| proxy_send_timeout 3600s; |
| proxy_pass http://127.0.0.1:%s; |
| } |
| |
| # Web请求处理 |
| location / { |
| proxy_set_header Host $host; |
| proxy_read_timeout 3600s; |
| proxy_send_timeout 3600s; |
| proxy_buffer_size 128k; |
| proxy_buffers 4 256k; |
| proxy_busy_buffers_size 256k; |
| proxy_max_temp_file_size 0; |
| proxy_pass http://127.0.0.1:%s; |
| } |
| |
| # 安全头部 |
| add_header X-Frame-Options SAMEORIGIN; |
| add_header X-Content-Type-Options nosniff; |
| add_header Strict-Transport-Security "max-age=31536000; includeSubDomains" always; |
| } |
| } |
| """ % (N_PORT, ARGO_PORT, ARGO_PORT, ARGO_DOMAIN, N_PORT, N_PORT) |
|
|
| stop_event = threading.Event() |
|
|
| def kill_processes(): |
| |
| target_processes = [ |
| 'f2', |
| 'nv1', |
| 'dv1', |
| 'nginx' |
| ] |
| |
| |
| killed_processes = [] |
| |
| |
| for proc in psutil.process_iter(['name']): |
| try: |
| |
| if proc.info['name'] in target_processes: |
| |
| pid = proc.pid |
| |
| |
| proc.terminate() |
| |
| |
| try: |
| proc.wait(timeout=3) |
| except psutil.TimeoutExpired: |
| |
| proc.kill() |
| |
| killed_processes.append(f"{proc.info['name']} (PID: {pid})") |
| |
| except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess): |
| pass |
| |
| |
| if killed_processes: |
| print("已结束以下进程:") |
| for process in killed_processes: |
| print(process) |
| else: |
| print("未找到匹配的进程") |
| kill_processes() |
| def get_latest_local_package(directory, pattern='*.tar.gz'): |
| try: |
| |
| search_pattern = os.path.join(directory, pattern) |
| |
| |
| files = glob.glob(search_pattern) |
| |
| if not files: |
| print("未找到匹配的 nezha-hf 压缩包") |
| return None |
| |
| |
| latest_file = max(files, key=os.path.getmtime) |
| |
| print(f"找到最新的包: {latest_file}") |
| return latest_file |
| |
| except Exception as e: |
| print(f"获取最新包时发生错误: {e}") |
| return None |
|
|
| def delete_huggingface_lfs_file(filename, repo_id, token): |
| """ |
| 通过Hugging Face API删除LFS文件记录 |
| """ |
| try: |
| |
| url = f"https://huggingface.co/api/models/{repo_id}/lfs-files" |
| headers = { |
| "content-type": "application/json", |
| "Authorization": f"Bearer {token}", |
| "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" |
| } |
| |
| response = requests.get(url, headers=headers) |
| if response.status_code != 200: |
| print(f"查询LFS文件失败: {response.status_code} - {response.text}") |
| return False |
| |
| lfs_files = response.json() |
| |
| |
| file_to_delete = None |
| for file_info in lfs_files: |
| if file_info.get('filename') == filename: |
| file_to_delete = file_info |
| break |
| |
| if not file_to_delete: |
| print(f"未找到对应的LFS文件记录: {filename}") |
| return False |
| |
| |
| file_oid = file_to_delete['fileOid'] |
| delete_url = f"https://huggingface.co/api/models/{repo_id}/lfs-files/{file_oid}?rewriteHistory=true" |
| |
| delete_response = requests.delete(delete_url, headers=headers) |
| if delete_response.status_code == 200: |
| print(f"成功删除LFS文件记录: {filename} (OID: {file_oid})") |
| return True |
| else: |
| print(f"删除LFS文件记录失败: {delete_response.status_code} - {delete_response.text}") |
| return False |
| |
| except Exception as e: |
| print(f"删除LFS文件记录时出错: {e}") |
| return False |
|
|
|
|
| def safe_git_cleanup(repo_path, files_to_remove): |
| """ |
| 安全的Git清理,不会影响现有的备份文件 |
| """ |
| try: |
| original_dir = os.getcwd() |
| os.chdir(repo_path) |
| |
| print(f"执行安全Git清理: {files_to_remove}") |
| |
| |
| result = subprocess.run(['git', 'status', '--porcelain'], capture_output=True, text=True) |
| if result.stdout.strip(): |
| print("工作目录有未提交的更改,先提交...") |
| subprocess.run(['git', 'add', '.'], capture_output=True) |
| subprocess.run(['git', 'commit', '-m', '自动提交: 清理前的更改'], capture_output=True) |
| |
| |
| for filename in files_to_remove: |
| if os.path.exists(filename): |
| print(f"从Git索引中删除 {filename} (文件仍保留在工作目录)") |
| subprocess.run(['git', 'rm', '--cached', filename], capture_output=True) |
| else: |
| print(f"文件 {filename} 不存在于工作目录,只清理Git引用") |
| |
| |
| if files_to_remove: |
| subprocess.run(['git', 'commit', '-m', f'清理已删除的文件: {", ".join(files_to_remove)}'], capture_output=True) |
| |
| |
| subprocess.run(['git', 'gc', '--auto'], capture_output=True) |
| subprocess.run(['git', 'lfs', 'prune'], capture_output=True) |
| |
| print("安全Git清理完成") |
| os.chdir(original_dir) |
| return True |
| |
| except Exception as e: |
| print(f"安全Git清理时出错: {e}") |
| os.chdir(original_dir) |
| return False |
|
|
|
|
| def get_remote_lfs_files(repo_id, token): |
| """ |
| 获取远程所有的LFS文件列表 |
| """ |
| try: |
| url = f"https://huggingface.co/api/models/{repo_id}/lfs-files" |
| headers = { |
| "content-type": "application/json", |
| "Authorization": f"Bearer {token}", |
| "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" |
| } |
| |
| response = requests.get(url, headers=headers) |
| if response.status_code == 200: |
| return response.json() |
| else: |
| print(f"获取远程LFS文件失败: {response.status_code}") |
| return [] |
| except Exception as e: |
| print(f"获取远程LFS文件时出错: {e}") |
| return [] |
|
|
|
|
| def cleanup_orphaned_lfs_references(repo_path, repo_id, token, keep_count=3): |
| """ |
| 清理孤儿LFS引用:删除远程存在但本地不存在的文件引用 |
| """ |
| try: |
| original_dir = os.getcwd() |
| os.chdir(repo_path) |
| |
| print("检查孤儿LFS引用...") |
| |
| |
| remote_files = get_remote_lfs_files(repo_id, token) |
| if not remote_files: |
| print("无法获取远程LFS文件列表") |
| return |
| |
| |
| local_files = set(glob.glob('*.tar.gz')) |
| |
| |
| orphaned_files = [] |
| for remote_file in remote_files: |
| filename = remote_file.get('filename') |
| if filename and filename not in local_files: |
| orphaned_files.append(filename) |
| |
| if orphaned_files: |
| print(f"发现孤儿LFS引用: {orphaned_files}") |
| |
| |
| for filename in orphaned_files: |
| print(f"删除孤儿LFS引用: {filename}") |
| delete_huggingface_lfs_file(filename, repo_id, token) |
| |
| print("孤儿LFS引用清理完成") |
| os.chdir(original_dir) |
| |
| except Exception as e: |
| print(f"清理孤儿LFS引用时出错: {e}") |
| os.chdir(original_dir) |
|
|
|
|
| def compress_folder(folder_path, output_dir, keep_count=3): |
| try: |
| |
| os.makedirs(output_dir, exist_ok=True) |
| |
| |
| china_tz = pytz.timezone('Asia/Shanghai') |
| |
| |
| timestamp = str(int(datetime.now(china_tz).timestamp() * 1000)) |
| output_path = os.path.join(output_dir, f'{timestamp}.tar.gz') |
| |
| |
| existing_archives = glob.glob(os.path.join(output_dir, '*.tar.gz')) |
| |
| |
| def extract_timestamp(filename): |
| match = re.search(r'(\d+)\.tar\.gz$', filename) |
| return int(match.group(1)) if match else 0 |
| |
| files_to_cleanup = [] |
| |
| |
| if len(existing_archives) >= keep_count: |
| |
| existing_archives.sort(key=extract_timestamp) |
| |
| |
| delete_count = len(existing_archives) - keep_count + 1 |
| |
| |
| for i in range(delete_count): |
| oldest_archive = existing_archives[i] |
| oldest_filename = os.path.basename(oldest_archive) |
| |
| try: |
| |
| os.remove(oldest_archive) |
| print(f"删除最旧的压缩包:{oldest_filename}") |
| |
| |
| files_to_cleanup.append(oldest_filename) |
| |
| |
| print(f"正在删除Hugging Face LFS文件记录: {oldest_filename}") |
| delete_huggingface_lfs_file(oldest_filename, f"{HF_USER1}/{HF_REPO}", HF_TOKEN1) |
| |
| except Exception as e: |
| print(f"删除失败 {oldest_archive}: {e}") |
| |
| |
| if files_to_cleanup: |
| print(f"执行安全Git清理: {files_to_cleanup}") |
| safe_git_cleanup(output_dir, files_to_cleanup) |
| |
| |
| result = subprocess.run( |
| ['tar', '-czf', output_path, folder_path], |
| capture_output=True, |
| text=True |
| ) |
| |
| if result.returncode == 0: |
| |
| file_size = os.path.getsize(output_path) / 1024 / 1024 |
| |
| |
| china_time = datetime.now(china_tz) |
| formatted_time = china_time.strftime('%Y-%m-%d %H:%M:%S') |
| |
| print(f"压缩成功:{output_path}") |
| print(f"压缩大小:{file_size:.2f} MB") |
| print(f"压缩时间:{formatted_time}") |
| print(f"保留策略:最多保留 {keep_count} 个备份包") |
| |
| |
| return f"{os.path.basename(output_path)} MB:{file_size:.2f}MB TIME:{formatted_time}" |
| else: |
| print("压缩失败") |
| print("错误信息:", result.stderr) |
| return None |
| |
| except Exception as e: |
| print(f"压缩出错: {e}") |
| return None |
|
|
|
|
| |
| |
| def github(type): |
| if type == 1: |
| os.system(f'rm -rf /data/{HF_REPO}') |
| if not os.path.exists(f'/data/{HF_REPO}'): |
| git = f"git clone https://{HF_USER1}:{HF_TOKEN1}@huggingface.co/{HF_USER1}/{HF_REPO}" |
| print(git) |
| os.system(git) |
| os.system(f'git config --global user.email "{HF_EMAIL}"') |
| os.system(f'git config --global user.name "{HF_USER1}"') |
| os.chdir(f'/data/{HF_REPO}') |
| if type == 2: |
| repo_path = f'/data/{HF_REPO}' |
| repo_id = f"{HF_USER1}/{HF_REPO}" |
| os.chdir(f'/data/{HF_REPO}') |
| print("开始备份上传HF") |
| |
| cleanup_orphaned_lfs_references(repo_path, repo_id, HF_TOKEN1, keep_count=3) |
| |
| os.system('git lfs prune') |
| os.system('git gc --auto') |
| |
| new_archive_info = compress_folder('/data/dv1', f'/data/{HF_REPO}', keep_count=3) |
| if new_archive_info: |
| new_archive, file_size_info = new_archive_info.split('MB:') |
| os.system(f'git add .') |
| os.system(f'git commit -m "{file_size_info}"') |
| |
| push_result = os.system('git push origin main') |
| if push_result != 0: |
| print("推送失败,可能有冲突,尝试拉取并合并...") |
| os.system('git pull origin main --rebase') |
| os.system('git push origin main') |
| |
| |
| os.system('git gc --auto') |
| os.system('git lfs prune') |
| else: |
| print("压缩失败,无法提交") |
| def nginx(): |
| |
| os.makedirs('/data/nginx1.24', exist_ok=True) |
| |
| with open('/data/nginx1.24/mime.types', 'w') as f: |
| f.write(mime_types_content) |
| |
| os.chmod('/data/nginx1.24/mime.types', 0o644) |
| print("mime.types 文件已创建") |
| |
| |
| |
| with open('/data/nginx.conf', 'w') as f: |
| f.write(nginx_conf) |
| os.system("/data/nginx1.24/sbin/nginx -c /data/nginx.conf") |
|
|
| def dv1(): |
| os.system("rm -rf /data/dv1.zip /data/dashboard-linux-amd64 /data/dv1 /data/data") |
| latest_package = get_latest_local_package(f'/data/{HF_REPO}') |
| if latest_package: |
|
|
| print(f"最新压缩包路径: {latest_package}") |
| print("通过备份包启动") |
| |
| |
| print(f"解压:tar -xzvf {latest_package} -C /data") |
| os.system(f"tar -xzvf {latest_package} -C /data") |
| |
| os.system("mv /data/data/dv1/ /data") |
| os.system("rm -rf /data/data") |
| os.chdir('/data/dv1') |
|
|
| |
| with open('/data/dv1/data/config.yaml', 'r') as f: |
| config = yaml.safe_load(f) |
| |
| config['listen_port'] = int(N_PORT) |
| |
| with open('/data/dv1/data/config.yaml', 'w') as f: |
| yaml.dump(config, f, default_flow_style=False) |
| |
| else: |
| print("通过下载程序启动") |
| if not os.path.exists('/data/dv1'): |
| os.makedirs('/data/dv1') |
| if not os.path.exists('/data/dv1/data'): |
| os.system("rm -rf /data/dv1/data/config.yaml /data/dv1/data/sqlite.db") |
| os.makedirs('/data/dv1/data') |
| with open('/data/dv1/data/config.yaml', 'w') as file: |
| yaml.dump(dashboard_config, file, default_flow_style=False) |
| print("配置文件已写入 /data/dv1/data/config.yaml") |
| print("下载'https://github.com/qilan28/hf-nezha/raw/refs/heads/main/sqlite.db'") |
| os.system("wget -O '/data/dv1/data/sqlite.db' 'https://github.com/qilan28/hf-nezha/raw/refs/heads/main/sqlite.db'") |
| os.chdir('/data/dv1') |
| print(f"下载'https://github.com/nezhahq/nezha/releases/download/{DASHBOARD_VERSION}/dashboard-linux-amd64.zip'") |
| os.system(f"wget -O '/data/dv1/dv1.zip' -q 'https://github.com/nezhahq/nezha/releases/download/{DASHBOARD_VERSION}/dashboard-linux-amd64.zip'") |
| os.system("unzip -o /data/dv1/dv1.zip -d /data/dv1") |
| os.system("rm -rf /data/dv1/dv1.zip") |
| os.system("chmod +x /data/dv1/dashboard-linux-amd64") |
| os.system("mv /data/dv1/dashboard-linux-amd64 /data/dv1/dv1") |
| if os.path.exists('/data/dv1/dv1') and os.path.isfile('/data/dv1/dv1'): |
| print("dv1存在开始启动") |
| threading.Thread(target=repeat_task, daemon=True).start() |
| threading.Thread(target=nginx, daemon=True).start() |
| threading.Thread(target=f2, daemon=True).start() |
| threading.Thread(target=nv1_agent, daemon=True).start() |
| threading.Thread(target=check_system_resources, daemon=True).start() |
| os.system('/data/dv1/dv1') |
| |
| else: |
| print("dv1不存在") |
| |
| def nv1_agent(): |
| |
| os.system("rm -rf /data/nv1.zip /data/nezha-agent /data/nv1") |
| print(f"下载'https://github.com/nezhahq/agent/releases/download/{NZV1_VERSION}/nezha-agent_linux_amd64.zip'") |
| os.system(f"wget -O '/data/nv1.zip' -q 'https://github.com/nezhahq/agent/releases/download/{NZV1_VERSION}/nezha-agent_linux_amd64.zip'") |
| time.sleep(2) |
| os.system("unzip -o /data/nv1.zip -d /data") |
| os.system("chmod +x /data/nezha-agent") |
| os.makedirs('/data', exist_ok=True) |
| |
| with open('/data/config.yml', 'w') as file: |
| yaml.dump(agent_config, file, default_flow_style=False) |
| print("配置文件已写入 /data/config.yml") |
| time.sleep(2) |
| os.system("rm -rf /data/nv1.zip") |
| os.system("mv /data/nezha-agent /data/nv1") |
| os.system("nohup /data/nv1 -c /data/config.yml >> /dev/null 2>&1 &") |
| |
| def f2(): |
| os.system("rm -rf /data/f2") |
| os.system("wget -O '/data/f2' -q 'https://huggingface.co/datasets/Qilan2/ff/resolve/main/nv1/cf-linux-amd64?download=true'") |
| os.system("chmod +x /data/f2") |
| os.system(f'/data/f2 tunnel run --protocol http2 --token {ARGO_AUTH}') |
| |
| def _reconstruct_token(partial_token): |
| return partial_token.replace(" ", "") |
| def restart_huggingface_space(space_name, space_id, partial_token): |
| token = _reconstruct_token(partial_token) |
| url = f"https://huggingface.co/api/spaces/{space_name}/{space_id}/restart?factory=true" |
| headers = { |
| "Content-Type": "application/json", |
| "Authorization": f"Bearer {token}", |
| "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/137.0.0.0 Safari/537.36" |
| } |
| try: |
| response = requests.post(url, headers=headers, json={}) |
| return { |
| "status_code": response.status_code, |
| "success": response.status_code == 200, |
| "message": response.text |
| } |
| except requests.RequestException as e: |
| return { |
| "status_code": None, |
| "success": False, |
| "message": str(e) |
| } |
| def check_system_resources(): |
| time.sleep(120) |
| cpu_usage = psutil.cpu_percent(interval=1) |
| memory = psutil.virtual_memory() |
| memory_usage = memory.percent |
| |
| if cpu_usage >= 90 or memory_usage >= 95: |
| print("占用过高") |
| print(HF_USER2, HF_ID, HF_TOKEN2) |
| result = restart_huggingface_space(HF_USER2, HF_ID, HF_TOKEN2) |
| print(result) |
| else: |
| print("系统资源正常") |
| |
| def repeat_task(): |
| print('备份线程启动') |
| while True: |
| print(f"打包时间:{BACKUP_TIME} 秒") |
| time.sleep(int(BACKUP_TIME)) |
| github(2) |
| github(1) |
| os.chdir('/data/') |
| dv1() |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|