from openai import OpenAI
import os
class ChatAPIWrapper:
def __init__(self, api_key, base_url="https://api.deepseek.com"):
"""
初始化 OpenAI 客户端
:param api_key: 您的 API 密钥
:param base_url: API 的基础 URL
"""
self.client = OpenAI(api_key=api_key, base_url=base_url)
def chat_stream(self, model, messages):
"""
发起流式传输聊天请求
:param model: 使用的模型名称
:param messages: 对话历史,格式为 [{"role": "system|user|assistant", "content": "文本"}]
:return: 返回生成器对象,用于逐块获取流式传输的内容
"""
response = self.client.chat.completions.create(
model=model,
messages=messages,
stream=True
)
# 定义生成器函数
def stream_generator():
for event in response:
print("Event received:", event) # 调试信息
if hasattr(event, 'choices') and event.choices:
for choice in event.choices:
if hasattr(choice, 'delta') and hasattr(choice.delta, 'content'):
yield choice.delta.content
return stream_generator()
# 示例调用
if __name__ == "__main__":
api_key = "sk-这里做了省略"
wrapper = ChatAPIWrapper(api_key=api_key)
# 读取文件内容
file_path = os.path.join(os.path.dirname(__file__), "国际news内容3.txt")
try:
with open(file_path, 'r', encoding='utf-8') as file:
news_content = file.read()
print("新闻内容已加载。") # 确认文件加载成功
except FileNotFoundError:
print(f"文件未找到: {file_path}")
exit(1)
# 构建消息列表
messages = [
{"role": "system", "content": "You are a helpful assistant"},
{"role": "user", "content": f"请200字总结如下新闻内容:\n{news_content}"},
]
# 发送非流式请求
try:
response = wrapper.client.chat.completions.create(
model="deepseek-reasoner",
messages=messages,
stream=False
)
print("完整回复:")
print(response.choices[0].message.content)
except Exception as e:
print("发生错误:", str(e))
deepseek api在python中的调用文档示例-实用
于 2025-05-29 20:00:31 首次发布