Merge pull request #1548 from maharjun/use_openai_context_manager

Use Openai Client Context Manager
This commit is contained in:
Daniel.y
2025-05-09 14:33:48 +08:00
committed by GitHub

View File

@@ -177,14 +177,15 @@ async def openai_complete_if_cache(
logger.debug("===== Sending Query to LLM =====") logger.debug("===== Sending Query to LLM =====")
try: try:
if "response_format" in kwargs: async with openai_async_client:
response = await openai_async_client.beta.chat.completions.parse( if "response_format" in kwargs:
model=model, messages=messages, **kwargs response = await openai_async_client.beta.chat.completions.parse(
) model=model, messages=messages, **kwargs
else: )
response = await openai_async_client.chat.completions.create( else:
model=model, messages=messages, **kwargs response = await openai_async_client.chat.completions.create(
) model=model, messages=messages, **kwargs
)
except APIConnectionError as e: except APIConnectionError as e:
logger.error(f"OpenAI API Connection Error: {e}") logger.error(f"OpenAI API Connection Error: {e}")
raise raise
@@ -421,7 +422,8 @@ async def openai_embed(
api_key=api_key, base_url=base_url, client_configs=client_configs api_key=api_key, base_url=base_url, client_configs=client_configs
) )
response = await openai_async_client.embeddings.create( async with openai_async_client:
model=model, input=texts, encoding_format="float" response = await openai_async_client.embeddings.create(
) model=model, input=texts, encoding_format="float"
return np.array([dp.embedding for dp in response.data]) )
return np.array([dp.embedding for dp in response.data])