# app/core/llm.py import os from dotenv import load_dotenv from langchain_openai import AzureChatOpenAI, ChatOpenAI # 加载.env文件中的环境变量 load_dotenv() # 获取配置的LLM供应商 LLM_PROVIDER = os.getenv("LLM_PROVIDER", "openai").lower() llm = None print(f"--- [Core] Initializing LLM with provider: {LLM_PROVIDER} ---") if LLM_PROVIDER == "azure": # --- Azure OpenAI 配置 --- required_vars = [ "AZURE_OPENAI_ENDPOINT", "AZURE_OPENAI_API_KEY", "OPENAI_API_VERSION", "AZURE_OPENAI_CHAT_DEPLOYMENT_NAME" ] if not all(os.getenv(var) for var in required_vars): raise ValueError("One or more Azure OpenAI environment variables for chat are not set.") llm = AzureChatOpenAI( azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"), api_key=os.getenv("AZURE_OPENAI_API_KEY"), api_version=os.getenv("OPENAI_API_VERSION"), azure_deployment=os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME"), temperature=0, ) elif LLM_PROVIDER == "openai": # --- 标准 OpenAI 配置 --- if not os.getenv("OPENAI_API_KEY"): raise ValueError("OPENAI_API_KEY is not set for the 'openai' provider.") llm = ChatOpenAI( api_key=os.getenv("OPENAI_API_KEY"), model_name=os.getenv("OPENAI_MODEL_NAME", "gpt-4o"), temperature=0, ) else: raise ValueError(f"Unsupported LLM_PROVIDER: {LLM_PROVIDER}. Please use 'azure' or 'openai'.")