from openai import OpenAIimport os# ====================== 厂商配置区(按需切换)======================# 1. OpenAI 原生# api_key = "你的OpenAI密钥"# base_url = "https://api.openai.com/v1"# model = "gpt-3.5-turbo"# 2. 英伟达 (NVIDIA NIM)# api_key = "你的英伟达API密钥"# base_url = "https://integrate.api.nvidia.com/v1"# model = "meta/llama3-70b-instruct" # 英伟达支持的模型# 3. 字节跳动(火山方舟)# api_key = "你的火山方舟密钥"# base_url = "https://ark.cn-beijing.volces.com/api/v3"# model = "skylark-pro"# 4. 百度文心千帆(适配OpenAI格式)# api_key = "你的文心千帆密钥"# base_url = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-4.0"# model = "ernie-4.0"# 5. 阿里云通义千问# api_key = "你的通义千问API密钥"# base_url = "https://dashscope.aliyuncs.com/compatible-mode/v1"# model = "qwen-turbo"# 6. 腾讯云混元# api_key = "你的腾讯云混元密钥"# base_url = "https://hunyuan.tencentcloudapi.com/v1/chat/completions"# model = "hunyuan-pro"# 7. 智谱AI(GLM)# api_key = "你的智谱AI密钥"# base_url = "https://open.bigmodel.cn/api/paas/v4/"# model = "glm-4"# 8. 讯飞星火# api_key = "你的讯飞星火密钥"# base_url = "https://spark-api.xfyun.cn/v3/chat/completions"# model = "spark-3.5"# 9. Azure OpenAI# api_key = "你的Azure OpenAI密钥"# base_url = "https://<你的资源名>.openai.azure.com/openai/deployments/<部署名>/chat/completions?api-version=2024-02-15-preview"# model = "gpt-3.5-turbo"# 10. Anthropic(Claude)# api_key = "你的Anthropic密钥"# base_url = "https://api.anthropic.com/v1/messages"# model = "claude-3-opus-20240229"# 11. Mistral AI# api_key = "你的Mistral AI密钥"# base_url = "https://api.mistral.ai/v1"# model = "mistral-large-latest"# 12. Groq# api_key = "你的Groq密钥"# base_url = "https://api.groq.com/openai/v1"# model = "llama3-70b-8192"# ====================== 实际使用配置======================api_key = "替换为你的API密钥"base_url = "替换为对应厂商的base_url"model = "替换为对应厂商的模型名"# 初始化OpenAI客户端client = OpenAI( api_key=api_key, base_url=base_url # 核心:指定第三方厂商的接口地址)def chat_with_llm(prompt): try: response = client.chat.completions.create( model=model, messages=[{"role": "user", "content": prompt}], temperature=0.7, # 随机性 max_tokens=1000 # 最大生成字数 ) return response.choices[0].message.content.strip() except Exception as e: return f"调用失败:{str(e)}"# 测试调用if __name__ == "__main__": prompt = "请简要介绍一下自己的核心能力" result = chat_with_llm(prompt) print("模型回复:\n", result)