Skip to content

Commit 2b619bf

Browse files
Merge pull request #535 from MervinPraison/claude/issue-332-20250528_161954
fix: improve API key validation for LM Studio and local servers
2 parents 2f49566 + 6b88a7f commit 2b619bf

2 files changed

Lines changed: 42 additions & 5 deletions

File tree

src/praisonai-agents/praisonaiagents/main.py

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -362,7 +362,24 @@ class ReflectionOutput(BaseModel):
362362
reflection: str
363363
satisfactory: Literal["yes", "no"]
364364

365-
client = OpenAI(api_key=(os.environ["OPENAI_API_KEY"] if os.environ.get("OPENAI_API_KEY") else "xxxx"))
365+
# Constants
366+
LOCAL_SERVER_API_KEY_PLACEHOLDER = "not-needed"
367+
368+
# Initialize OpenAI client with proper API key handling
369+
api_key = os.environ.get("OPENAI_API_KEY")
370+
base_url = os.environ.get("OPENAI_API_BASE") or os.environ.get("OPENAI_BASE_URL")
371+
372+
# For local servers like LM Studio, allow minimal API key
373+
if base_url and not api_key:
374+
api_key = LOCAL_SERVER_API_KEY_PLACEHOLDER
375+
elif not api_key:
376+
raise ValueError(
377+
"OPENAI_API_KEY environment variable is required for the default OpenAI service. "
378+
"If you are targeting a local server (e.g., LM Studio), ensure OPENAI_API_BASE is set "
379+
f"(e.g., 'http://localhost:1234/v1') and you can use a placeholder API key by setting OPENAI_API_KEY='{LOCAL_SERVER_API_KEY_PLACEHOLDER}'"
380+
)
381+
382+
client = OpenAI(api_key=api_key, base_url=base_url)
366383

367384
class TaskOutput(BaseModel):
368385
model_config = ConfigDict(arbitrary_types_allowed=True)

src/praisonai/praisonai/inc/models.py

Lines changed: 24 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,13 @@
11
# praisonai/inc/models.py
22
import os
33
import logging
4+
from urllib.parse import urlparse
45
logger = logging.getLogger(__name__)
56
logging.basicConfig(level=os.environ.get('LOGLEVEL', 'INFO').upper(), format='%(asctime)s - %(levelname)s - %(message)s')
67

8+
# Constants
9+
LOCAL_SERVER_API_KEY_PLACEHOLDER = "not-needed"
10+
711
# Conditionally import modules based on availability
812
try:
913
from langchain_openai import ChatOpenAI # pip install langchain-openai
@@ -71,11 +75,27 @@ def __init__(self, model=None, api_key_var=None, base_url=None, api_key=None):
7175
self.model_name = self.model.replace("openrouter/", "")
7276
else:
7377
self.api_key_var = api_key_var or "OPENAI_API_KEY"
74-
self.base_url = base_url or os.environ.get("OPENAI_API_BASE", "https://api.openai.com/v1")
78+
self.base_url = base_url or os.environ.get("OPENAI_API_BASE") or os.environ.get("OPENAI_BASE_URL") or "https://api.openai.com/v1"
7579
self.model_name = self.model
7680
logger.debug(f"Initialized PraisonAIModel with model {self.model_name}, api_key_var {self.api_key_var}, and base_url {self.base_url}")
77-
# Use explicit API key if provided, otherwise fall back to environment variable
78-
self.api_key = api_key or os.environ.get(self.api_key_var, "nokey")
81+
82+
# Get API key from environment
83+
self.api_key = api_key or os.environ.get(self.api_key_var)
84+
85+
# For local servers, allow placeholder API key if base_url is set to non-OpenAI endpoint
86+
if not self.api_key and self.base_url:
87+
parsed_url = urlparse(self.base_url)
88+
is_local = (parsed_url.hostname in ["localhost", "127.0.0.1"] or
89+
"api.openai.com" not in self.base_url)
90+
if is_local:
91+
self.api_key = LOCAL_SERVER_API_KEY_PLACEHOLDER
92+
93+
if not self.api_key:
94+
raise ValueError(
95+
f"{self.api_key_var} environment variable is required for the default OpenAI service. "
96+
f"For local servers, set {self.api_key_var}='{LOCAL_SERVER_API_KEY_PLACEHOLDER}' and OPENAI_API_BASE to your local endpoint."
97+
)
98+
7999

80100
def get_model(self):
81101
"""
@@ -127,4 +147,4 @@ def get_model(self):
127147
raise ImportError(
128148
"Required Langchain Integration 'langchain-openai' not found. "
129149
"Please install with 'pip install langchain-openai'"
130-
)
150+
)

0 commit comments

Comments
 (0)