| FROM node:22-slim |
|
|
| |
| RUN apt-get update && apt-get install -y \ |
| git \ |
| curl \ |
| zip \ |
| unzip \ |
| tar \ |
| xz-utils \ |
| bzip2 \ |
| python3 \ |
| python3-pip \ |
| libglib2.0-0 \ |
| libnss3 \ |
| libnspr4 \ |
| libatk1.0-0 \ |
| libatk-bridge2.0-0 \ |
| libcups2 \ |
| libdrm2 \ |
| libxkbcommon0 \ |
| libxcomposite1 \ |
| libxdamage1 \ |
| libxrandr2 \ |
| libgbm1 \ |
| libgtk-3-0 \ |
| libasound2 \ |
| libpango-1.0-0 \ |
| libcairo2 \ |
| rsync \ |
| && npx playwright install-deps chromium \ |
| && rm -rf /var/lib/apt/lists/* |
|
|
| ARG TELEGRAM_BOT_APIBASE |
| ENV TELEGRAM_BOT_APIBASE=${TELEGRAM_BOT_APIBASE} |
|
|
| |
| RUN npm install -g openclaw@latest agent-browser pdfnano @google/gemini-cli |
|
|
| |
| RUN echo "TELEGRAM_BOT_APIBASE=$TELEGRAM_BOT_APIBASE" |
| RUN find /usr/local/lib/node_modules/openclaw -type f -name "*.js" -exec sed -i "s|api.telegram.org|${TELEGRAM_BOT_APIBASE}|g" {} + |
|
|
| |
| RUN pip3 install --no-cache-dir --break-system-packages huggingface_hub |
|
|
| |
| RUN agent-browser install chromium && \ |
| ln -s $(find /root/.cache/ms-playwright -name chrome -type f | grep -v "shell" | head -n 1) /usr/bin/google-chrome |
|
|
| |
| RUN mkdir -p /root/.openclaw/agents/main/agent \ |
| /root/.openclaw/agents/main/sessions \ |
| /root/.openclaw/credentials \ |
| /root/.openclaw/workspace |
|
|
| ENV PORT=7860 |
| ENV OPENCLAW_GATEWAY_MODE=local |
| EXPOSE 7860 |
|
|
| |
| RUN cat > /usr/local/bin/start-openclaw << 'EOF' |
| |
| set -e |
|
|
| OPENCLAW_DIR="/root/.openclaw" |
| HF_TMP="/tmp/hf-restore" |
|
|
| |
| echo "nameserver 1.1.1.1" > /etc/resolv.conf 2>/dev/null || true |
| echo "nameserver 8.8.8.8" >> /etc/resolv.conf 2>/dev/null || true |
|
|
| |
| if [[ -n "$HF_DATASET_TOKEN" && -n "$HF_DATASET" ]]; then |
| echo "[HF] Restoring dataset: $HF_DATASET" |
|
|
| rm -rf "$HF_TMP" |
| mkdir -p "$HF_TMP" |
|
|
| python3 << 'EOP' |
| import os |
| from huggingface_hub import snapshot_download |
|
|
| snapshot_download( |
| repo_id=os.environ["HF_DATASET"], |
| repo_type="dataset", |
| local_dir="/tmp/hf-restore", |
| allow_patterns=["data/**"], |
| token=os.environ["HF_DATASET_TOKEN"] |
| ) |
| print("[HF] Download complete") |
| EOP |
|
|
| |
| if [ -d "$HF_TMP/data" ]; then |
| rsync -a "$HF_TMP/data/" "$OPENCLAW_DIR/" |
| echo "[HF] Data merged into OpenClaw workspace" |
| fi |
| fi |
|
|
| |
| if [ ! -f "$OPENCLAW_DIR/openclaw.json" ]; then |
| echo "[CFG] Generating initial openclaw.json" |
|
|
| if [ -z "$GEMINI_APIKEY" ]; then |
| echo "ERROR: GEMINI_APIKEY not set" |
| exit 1 |
| fi |
|
|
| cat > "$OPENCLAW_DIR/openclaw.json" << EOC |
| { |
| "env": { |
| "GOOGLE_API_KEY": "${GEMINI_APIKEY}", |
| "GEMINI_API_KEY": "${GEMINI_APIKEY}", |
| "GOOGLE_GENERATIVE_AI_API_KEY": "${GEMINI_APIKEY}", |
| "TELEGRAM_BOT_TOKEN": "${TELEGRAM_BOT_TOKEN}" |
| }, |
| "gateway": { |
| "port": 7860, |
| "mode": "local", |
| "bind": "lan", |
| "controlUi": { |
| "allowedOrigins": [ |
| "http://localhost:7860", |
| "http://127.0.0.1:7860", |
| "https://amangs-cbot.hf.space" |
| ], |
| "allowInsecureAuth": true, |
| "dangerouslyDisableDeviceAuth": true |
| }, |
| "auth": { |
| "mode": "token" |
| } |
| }, |
| "models": { |
| "providers": { |
| "nvidia": { |
| "baseUrl": "https://integrate.api.nvidia.com/v1", |
| "apiKey": "${NVIDIA_NIM_API_KEY}", |
| "api": "openai-completions", |
| "models": [ |
| { "id": "deepseek-ai/deepseek-r1-distill-qwen-7b", "name": "Deepseek R1 Distill Qwen 7b", "contextWindow": 200000, "maxTokens": 8192 }, |
| { "id": "deepseek-ai/deepseek-v3.2", "name": "Deepseek v3.2", "contextWindow": 200000, "maxTokens": 8192 }, |
| { "id": "openai/gpt-oss-120b", "name": "GPT OSS 120b", "contextWindow": 200000, "maxTokens": 8192 }, |
| { "id": "openai/gpt-oss-20b", "name": "GPT OSS 20b", "contextWindow": 200000, "maxTokens": 8192 }, |
| { "id": "qwen/qwen2.5-7b-instruct", "name": "Qwen 2.5 7B Instruct", "contextWindow": 200000, "maxTokens": 8192 }, |
| { "id": "qwen/qwen2.5-coder-32b-instruct", "name": "Qwen 2.5 Coder 32B", "contextWindow": 200000, "maxTokens": 8192 }, |
| { "id": "qwen/qwen2.5-coder-7b-instruct", "name": "Qwen 2.5 Coder 7B", "contextWindow": 200000, "maxTokens": 8192 }, |
| { "id": "mistralai/mistral-large-2-instruct", "name": "Mistral Large 2 Instruct", "contextWindow": 200000, "maxTokens": 8192 }, |
| { "id": "databricks/dbrx-instruct", "name": "DBRX Instruct", "contextWindow": 200000, "maxTokens": 8192 }, |
| { "id": "mistralai/mistral-7b-instruct-v0.3", "name": "Mistral 7B Instruct v0.3", "contextWindow": 200000, "maxTokens": 8192 }, |
| { "id": "minimaxai/minimax-m2.1", "name": "MiniMax M2.1", "contextWindow": 200000, "maxTokens": 8192 }, |
| { "id": "z-ai/glm4.7", "name": "GLM 4.7", "contextWindow": 200000, "maxTokens": 8192 } |
| ] |
| } |
| } |
| }, |
| "agents": { |
| "defaults": { |
| "model": { |
| "primary": "google/gemini-3-flash-preview", |
| "fallbacks": [ |
| "google/gemini-3-pro-preview", |
| "google/gemini-2.0-flash", |
| "google/gemini-2.5-flash", |
| "google/gemini-2.5-pro", |
| "google/gemini-3-flash-preview" |
| ] |
| }, |
| "models": { |
| "google/gemini-3-pro-preview": {}, |
| "google/gemini-2.0-flash": {}, |
| "google/gemini-2.5-flash": {}, |
| "google/gemini-2.5-pro": {}, |
| "google/gemini-3-flash-preview": {}, |
| "nvidia/deepseek-ai/deepseek-r1-distill-qwen-7b": {}, |
| "nvidia/deepseek-ai/deepseek-v3.2": {}, |
| "nvidia/openai/gpt-oss-120b": {}, |
| "nvidia/openai/gpt-oss-20b": {}, |
| "nvidia/qwen/qwen2.5-7b-instruct": {}, |
| "nvidia/qwen/qwen2.5-coder-32b-instruct": {}, |
| "nvidia/qwen/qwen2.5-coder-7b-instruct": {}, |
| "nvidia/mistralai/mistral-large-2-instruct": {}, |
| "nvidia/mistralai/mistral-7b-instruct-v0.3": {}, |
| "nvidia/databricks/dbrx-instruct": {}, |
| "nvidia/minimaxai/minimax-m2.1": {}, |
| "nvidia/z-ai/glm4.7": {} |
| }, |
| "workspace": "/root/.openclaw/workspace", |
| "compaction": { "mode": "safeguard" }, |
| "maxConcurrent": 4, |
| "subagents": { "maxConcurrent": 8 } |
| } |
| }, |
| "commands": { |
| "native": "auto", |
| "nativeSkills": "auto", |
| "restart": true |
| }, |
| "plugins": { |
| "entries": { |
| "telegram": { |
| "enabled": true |
| }, |
| "whatsapp": { |
| "enabled": false |
| }, |
| "discord": { |
| "enabled": false |
| }, |
| "qwen-portal-auth": { |
| "enabled": false |
| } |
| } |
| }, |
| "channels": { |
| "telegram": { |
| "enabled": true, |
| "allowFrom": ["${TELEGRAM_USER}"] |
| } |
| } |
| } |
| EOC |
| fi |
|
|
| chmod -R 700 "$OPENCLAW_DIR" |
|
|
| |
| if [[ -n "$HF_DATASET_TOKEN" && -n "$HF_DATASET" ]]; then |
| ( |
| while true; do |
| sleep 1800 |
| echo "[HF] Backup running" |
|
|
| python3 << 'EOP' |
| import os |
| from huggingface_hub import HfApi |
|
|
| api = HfApi(token=os.environ["HF_DATASET_TOKEN"]) |
| api.upload_folder( |
| repo_id=os.environ["HF_DATASET"], |
| repo_type="dataset", |
| folder_path="/root/.openclaw", |
| path_in_repo="data", |
| ignore_patterns=[ |
| "openclawd.json", |
| "agents/**/sessions/**", |
| "workspace/node_modules/**", |
| "browser/**", |
| "workspace/.cache/**", |
| "*.key", |
| ".env" |
| ] |
| ) |
| print("[HF] Backup complete") |
| EOP |
| done |
| ) & |
| fi |
|
|
| exec openclaw gateway run --port 7860 |
| EOF |
|
|
| RUN chmod +x /usr/local/bin/start-openclaw |
| CMD ["/usr/local/bin/start-openclaw"] |