在威联通NAS中安装OpenClaw
下载镜像文件到NAS
alpine/openclaw
NAS 的 CPU 是 arm64
从镜像选择创建容器 名称:openclaw
命令: node openclaw.mjs gateway --allow-unconfigured 默认就好
网络:Host
环境:默认 时区 TZ=Asia/Shanghai
HTTP_PROXY http://192.168.2.20:10809 走你的 V2Ray 代理
HTTPS_PROXY http://192.168.2.20:10809 走你的 V2Ray 代理
NO_PROXY localhost,127.0.0.1,192.168.2.0/24 访问局域网时不走代理
存储:选择容器文件夹 /Container/OpenClaw /home/node/.openclaw
运行:特权模式
等待安装完成
使用电脑连接NAS
Mac 终端ssh进入nas系统 ssh admin@192.168.2.20
sudo docker exec -it openclaw bash
查看状态
openclaw status
openclaw config 进去配置模型
我配置的Google API,选择 模型
使用电脑的ssh 容易断,改用nas容器自带的控制台
8683406655:AAHX_fhevulkaTzyW1HDkneoZgXD7p-dY4E
配置网关访问
配置聊天
退出容器
直接输入 exit 或按 Ctrl + D
安装微信
docker exec -it openclaw openclaw plugins install "@tencent-weixin/openclaw-weixin@latest"
电脑ssh连接的会丢失
下面方法可行
在 NAS 中容器控制台输入下面的命令安装并获取二维码
openclaw plugins install "@tencent-weixin/openclaw-weixin@latest"
npx -y @tencent-weixin/openclaw-weixin-cli@latest install
首次连接未完成,可稍后手动重试
openclaw channels login --channel openclaw-weixin
短期临时解决(无需改 config,重启都不用): 在终端直接执行下面命令(把当前默认 agent 强行切到 DeepSeek):
正确命令(直接复制执行):
Bash
openclaw models set deepseek/deepseek-chat
执行完后应该看到类似:
text
✅ Updated default primary model to deepseek/deepseek-chat
Configuration saved to ~/.openclaw/openclaw.json
验证是否成功:
Bash
openclaw models list # 查看当前默认模型
openclaw config get agents.defaults.model.primary # 直接看 config 里的值
如果你想一次性把 fallback 也优化好(推荐):
运行下面这几条命令(逐条执行):
Bash
# 1. 设置主力模型为 DeepSeek Chat
openclaw models set deepseek/deepseek-chat
# 2. 添加 DeepSeek Reasoner 到 fallback(防止 Chat 也限流)
openclaw config set agents.defaults.model.fallbacks '["deepseek/deepseek-reasoner", "google/gemini-flash-latest"]'
# 3. 给模型起别名(方便以后聊天时切换)
openclaw config set agents.defaults.models.deepseek/deepseek-reasoner.alias DeepSeek-Reasoner
openclaw config set agents.defaults.models.google/gemini-flash-latest.alias Gemini-Flash
执行完后重启一下 Gateway:
Bash
openclaw gateway restart
配置文件
{
"meta": {
"lastTouchedVersion": "2026.3.28",
"lastTouchedAt": "2026-03-31T15:16:05.014Z"
},
"wizard": {
"lastRunAt": "2026-03-31T15:16:04.922Z",
"lastRunVersion": "2026.3.28",
"lastRunCommand": "configure",
"lastRunMode": "local"
},
"auth": {
"profiles": {
"google:default": {
"provider": "google",
"mode": "api_key"
},
"deepseek:default": {
"provider": "deepseek",
"mode": "api_key"
}
}
},
"models": {
"mode": "merge",
"providers": {
"deepseek": {
"baseUrl": "https://api.deepseek.com",
"api": "openai-completions",
"models": [
{
"id": "deepseek-chat",
"name": "DeepSeek Chat",
"api": "openai-completions",
"reasoning": false,
"input": [
"text"
],
"cost": {
"input": 0.28,
"output": 0.42,
"cacheRead": 0.028,
"cacheWrite": 0
},
"contextWindow": 131072,
"maxTokens": 8192,
"compat": {
"supportsUsageInStreaming": true
}
},
{
"id": "deepseek-reasoner",
"name": "DeepSeek Reasoner",
"api": "openai-completions",
"reasoning": true,
"input": [
"text"
],
"cost": {
"input": 0.28,
"output": 0.42,
"cacheRead": 0.028,
"cacheWrite": 0
},
"contextWindow": 131072,
"maxTokens": 65536,
"compat": {
"supportsUsageInStreaming": true
}
}
]
}
}
},
"agents": {
"defaults": {
"model": {
"primary": "deepseek/deepseek-chat",
"fallbacks": [
"deepseek/deepseek-chat",
"deepseek/deepseek-reasoner"
]
},
"models": {
"deepseek/deepseek-chat": {
"alias": "DeepSeek"
},
"deepseek/deepseek-reasoner": {
"alias": "DeepSeek-Reasoner"
}
},
"compaction": {
"mode": "safeguard"
},
"maxConcurrent": 4,
"subagents": {
"maxConcurrent": 8
}
}
},
"tools": {
"web": {
"search": {
"enabled": true,
"provider": "exa"
},
"fetch": {
"enabled": true
}
}
},
"messages": {
"ackReactionScope": "group-mentions"
},
"commands": {
"native": "auto",
"nativeSkills": "auto",
"restart": true,
"ownerDisplay": "raw"
},
"channels": {
"telegram": {
"enabled": true,
"dmPolicy": "allowlist",
"botToken": "AAHX_fhevulkeoZgXD7p-dY4E",
"groups": {
"*": {
"requireMention": true
}
},
"allowFrom": [
"8683406655"
],
"groupPolicy": "allowlist",
"streaming": "partial"
},
"openclaw-weixin": {
"accounts": {}
}
},
"gateway": {
"port": 18789,
"mode": "local",
"bind": "lan",
"controlUi": {
"allowedOrigins": [
"http://localhost:18789",
"http://127.0.0.1:18789",
"http://192.168.2.20:18789"
],
"dangerouslyDisableDeviceAuth": true
},
"auth": {
"mode": "token",
"token": "ff5b01a9073b1aac8aa8fabb9f0b18464"
},
"tailscale": {
"mode": "off",
"resetOnExit": false
}
},
"plugins": {
"allow": [
"exa",
"openclaw-weixin"
],
"entries": {
"exa": {
"enabled": true,
"config": {
"webSearch": {
"apiKey": "1db7aed4-e310065480edd"
}
}
},
"openclaw-weixin": {
"enabled": true,
"config": {}
}
},
"installs": {
"openclaw-weixin": {
"source": "npm",
"spec": "@tencent-weixin/openclaw-weixin@latest",
"installPath": "/home/node/.openclaw/extensions/openclaw-weixin",
"version": "2.1.1",
"resolvedName": "@tencent-weixin/openclaw-weixin",
"resolvedVersion": "2.1.1",
"resolvedSpec": "@tencent-weixin/openclaw-weixin@2.1.1",
"integrity": "sha512-lSiTI+9eQQuvZXehuteNps3Ia4/ih5zz8C+ldgRfrYd8lZTxYWC8ffkLtG0EVo2E7Ag==",
"shasum": "574c53a972c1af0327f8be2fcb",
"resolvedAt": "2026-03-31T14:34:45.815Z",
"installedAt": "2026-03-31T14:37:21.651Z"
}
}
}
}