我来帮您实现 OpenClaw 与通义千问(Qwen)模型的对接,以下是完整的实现方案:

安装依赖
pip install dashscope openai
配置文件 (config.py)
import os
from typing import Optional
from dataclasses import dataclass
@dataclass
class QwenConfig:
"""通义千问配置"""
api_key: str = os.getenv("QWEN_API_KEY", "")
model: str = "qwen-turbo" # qwen-turbo, qwen-plus, qwen-max
api_base: str = "https://dashscope.aliyuncs.com/compatible-mode/v1"
temperature: float = 0.7
max_tokens: int = 2000
top_p: float = 0.8
stream: bool = False
timeout: int = 30
@classmethod
def from_env(cls):
"""从环境变量加载配置"""
return cls(
api_key=os.getenv("QWEN_API_KEY", ""),
model=os.getenv("QWEN_MODEL", "qwen-turbo"),
temperature=float(os.getenv("QWEN_TEMPERATURE", "0.7")),
max_tokens=int(os.getenv("QWEN_MAX_TOKENS", "2000")),
)
模型客户端 (qwen_client.py)
import json
import logging
from typing import Dict, Any, AsyncGenerator, Optional
from openai import OpenAI
import dashscope
from dashscope import Generation
logger = logging.getLogger(__name__)
class QwenClient:
"""通义千问客户端"""
def __init__(self, config):
self.config = config
self.client = None
self._init_client()
def _init_client(self):
"""初始化客户端"""
if not self.config.api_key:
raise ValueError("QWEN_API_KEY is required")
# 方式1:使用DashScope SDK(推荐)
dashscope.api_key = self.config.api_key
# 方式2:使用OpenAI兼容接口
self.client = OpenAI(
api_key=self.config.api_key,
base_url=self.config.api_base,
timeout=self.config.timeout
)
def chat_completion(self, messages: list, **kwargs) -> Dict[str, Any]:
"""
聊天补全(同步)
Args:
messages: 消息列表
**kwargs: 其他参数
Returns:
响应字典
"""
try:
# 使用DashScope SDK
response = Generation.call(
model=self.config.model,
messages=messages,
temperature=self.config.temperature,
top_p=self.config.top_p,
max_tokens=self.config.max_tokens,
stream=self.config.stream,
**kwargs
)
if response.status_code == 200:
return {
"content": response.output.text,
"finish_reason": response.output.finish_reason,
"usage": response.usage,
"request_id": response.request_id
}
else:
logger.error(f"Qwen API Error: {response.code} - {response.message}")
raise Exception(f"Qwen API Error: {response.message}")
except Exception as e:
logger.error(f"Qwen chat completion error: {e}")
raise
async def async_chat_completion(self, messages: list, **kwargs) -> Dict[str, Any]:
"""
异步聊天补全
"""
# 这里可以使用aiohttp实现异步调用
# 为简化,这里调用同步方法(实际生产环境建议异步实现)
return self.chat_completion(messages, **kwargs)
def chat_completion_stream(self, messages: list, **kwargs) -> AsyncGenerator:
"""
流式聊天补全
"""
response = Generation.call(
model=self.config.model,
messages=messages,
temperature=self.config.temperature,
top_p=self.config.top_p,
max_tokens=self.config.max_tokens,
stream=True,
**kwargs
)
for chunk in response:
if chunk.status_code == 200:
yield {
"delta": chunk.output.text,
"finish_reason": chunk.output.finish_reason,
"usage": getattr(chunk, 'usage', None)
}
else:
logger.error(f"Qwen stream error: {chunk.code} - {chunk.message}")
break
def get_models(self) -> list:
"""获取可用模型列表"""
# 通义千问目前公开的模型
models = [
{"id": "qwen-turbo", "name": "Qwen Turbo"},
{"id": "qwen-plus", "name": "Qwen Plus"},
{"id": "qwen-max", "name": "Qwen Max"},
{"id": "qwen-max-longcontext", "name": "Qwen Max LongContext"},
]
return models
OpenClaw 适配器 (qwen_adapter.py)
from typing import List, Dict, Any, Optional
from dataclasses import dataclass
from openclaw.models.base import BaseModelAdapter
from .qwen_client import QwenClient
from .config import QwenConfig
@dataclass
class QwenMessage:
"""通义千问消息格式"""
role: str # user, assistant, system
content: str
class QwenAdapter(BaseModelAdapter):
"""OpenClaw 通义千问适配器"""
def __init__(self, config: Optional[Dict] = None):
super().__init__()
self.config = QwenConfig(**(config or {}))
self.client = QwenClient(self.config)
def format_messages(self, messages: List[Dict]) -> List[QwenMessage]:
"""格式化消息为通义千问格式"""
formatted = []
for msg in messages:
role = msg.get("role", "user")
content = msg.get("content", "")
# 角色映射
if role == "system":
role = "system" # 通义千问支持system角色
elif role not in ["user", "assistant"]:
role = "user" # 默认转为user
formatted.append(QwenMessage(role=role, content=content))
return formatted
async def generate(
self,
prompt: str,
history: Optional[List[Dict]] = None,
**kwargs
) -> str:
"""
生成回复
Args:
prompt: 用户输入
history: 历史对话
**kwargs: 其他参数
Returns:
模型回复
"""
messages = []
# 添加上下文
if history:
messages.extend(history)
# 添加当前消息
messages.append({"role": "user", "content": prompt})
# 格式化消息
formatted_messages = self.format_messages(messages)
# 调用模型
response = await self.client.async_chat_completion(
messages=formatted_messages,
**{**kwargs, "stream": False}
)
return response["content"]
async def stream_generate(
self,
prompt: str,
history: Optional[List[Dict]] = None,
**kwargs
):
"""
流式生成回复
"""
messages = []
if history:
messages.extend(history)
messages.append({"role": "user", "content": prompt})
formatted_messages = self.format_messages(messages)
async for chunk in self.client.chat_completion_stream(
messages=formatted_messages,
**kwargs
):
yield chunk.get("delta", "")
def get_model_info(self) -> Dict[str, Any]:
"""获取模型信息"""
return {
"name": "通义千问",
"provider": "Alibaba Cloud",
"models": self.client.get_models(),
"capabilities": ["chat", "completion", "streaming"],
"max_tokens": self.config.max_tokens,
"supports_system_prompt": True,
}
def calculate_tokens(self, text: str) -> int:
"""
估算token数量(简化版)
实际应该使用tiktoken或类似库
"""
# 中文平均2字符=1token,英文1字符≈0.3token
chinese_chars = sum(1 for c in text if '\u4e00' <= c <= '\u9fff')
other_chars = len(text) - chinese_chars
return int(chinese_chars * 0.5 + other_chars * 0.3)
工厂类 (factory.py)
from openclaw.models import ModelFactory
from .qwen_adapter import QwenAdapter
from .config import QwenConfig
class QwenModelFactory(ModelFactory):
"""通义千问模型工厂"""
@staticmethod
def create(config: Optional[Dict] = None):
"""创建通义千问适配器实例"""
qwen_config = QwenConfig.from_env()
if config:
# 合并配置
for key, value in config.items():
if hasattr(qwen_config, key):
setattr(qwen_config, key, value)
return QwenAdapter(qwen_config)
@staticmethod
def get_supported_models():
"""获取支持的模型列表"""
return ["qwen-turbo", "qwen-plus", "qwen-max", "qwen-max-longcontext"]
使用示例
from openclaw_qwen import QwenModelFactory
async def main():
# 创建模型实例
model = QwenModelFactory.create({
"model": "qwen-turbo",
"temperature": 0.8,
})
# 单次对话
response = await model.generate("你好,请介绍一下你自己")
print(response)
# 带历史的对话
history = [
{"role": "user", "content": "什么是人工智能?"},
{"role": "assistant", "content": "人工智能是...(简要介绍)"}
]
response = await model.generate("它能做什么?", history=history)
print(response)
# 流式输出
async for chunk in model.stream_generate("写一首关于春天的诗"):
print(chunk, end="", flush=True)
# 示例2:集成到OpenClaw系统
from openclaw import OpenClaw
from openclaw_qwen import QwenModelFactory
claw = OpenClaw()
# 注册通义千问模型
claw.register_model_factory("qwen", QwenModelFactory)
# 使用通义千问
claw.set_model("qwen", config={
"model": "qwen-max",
"temperature": 0.7,
})
# 开始对话
response = claw.chat("你好,我是OpenClaw用户")
print(response)
# 示例3:CLI对话界面
import sys
from openclaw_qwen import QwenModelFactory
async def chat_cli():
model = QwenModelFactory.create()
history = []
print("通义千问对话系统(输入 'quit' 退出)")
print("=" * 50)
while True:
user_input = input("\nYou: ")
if user_input.lower() in ['quit', 'exit', 'q']:
break
print("\nQwen: ", end="")
# 流式输出
full_response = ""
async for chunk in model.stream_generate(user_input, history=history):
print(chunk, end="", flush=True)
full_response += chunk
# 更新历史
history.append({"role": "user", "content": user_input})
history.append({"role": "assistant", "content": full_response})
# 保持最近10轮对话
if len(history) > 20:
history = history[-20:]
if __name__ == "__main__":
asyncio.run(chat_cli())
环境变量配置
# .env 文件 QWEN_API_KEY=your-api-key-here QWEN_MODEL=qwen-turbo QWEN_TEMPERATURE=0.7 QWEN_MAX_TOKENS=2000 QWEN_API_BASE=https://dashscope.aliyuncs.com/compatible-mode/v1
项目结构
openclaw-qwen/
├── openclaw_qwen/
│ ├── __init__.py
│ ├── config.py
│ ├── qwen_client.py
│ ├── qwen_adapter.py
│ └── factory.py
├── examples/
│ ├── basic_usage.py
│ ├── streaming_chat.py
│ └── cli_demo.py
├── tests/
│ ├── test_client.py
│ └── test_adapter.py
├── requirements.txt
├── setup.py
└── README.md
setup.py
from setuptools import setup, find_packages
setup(
name="openclaw-qwen",
version="0.1.0",
packages=find_packages(),
install_requires=[
"dashscope>=1.14.0",
"openai>=1.0.0",
],
description="OpenClaw plugin for Alibaba Qwen models",
author="Your Name",
author_email="your.email@example.com",
url="https://github.com/yourusername/openclaw-qwen",
)
这个实现提供了完整的通义千问对接功能,包括:
- 同步/异步调用
- 流式输出支持
- 历史对话管理
- 配置管理
- 错误处理
- 与 OpenClaw 框架的无缝集成
您可以根据实际需求进行调整和扩展。
版权声明:除非特别标注,否则均为本站原创文章,转载时请以链接形式注明文章出处。