wiseflow/core/llms/openai_wrapper.py

34 lines
850 B
Python
Raw Normal View History

2024-04-07 09:37:47 +08:00
import os
2024-05-08 09:57:48 +08:00
from openai import OpenAI
2024-04-07 09:37:47 +08:00
2024-05-08 09:57:48 +08:00
base_url = os.environ.get('LLM_API_BASE', "")
2024-06-13 21:08:58 +08:00
token = os.environ.get('LLM_API_KEY', "")
2024-04-07 09:37:47 +08:00
2024-06-13 21:08:58 +08:00
if token:
client = OpenAI(api_key=token, base_url=base_url)
else:
client = OpenAI(base_url=base_url)
2024-04-07 09:37:47 +08:00
2024-05-08 09:57:48 +08:00
def openai_llm(messages: list, model: str, logger=None, **kwargs) -> str:
2024-04-07 09:37:47 +08:00
if logger:
logger.debug(f'messages:\n {messages}')
2024-05-08 09:57:48 +08:00
logger.debug(f'model: {model}')
logger.debug(f'kwargs:\n {kwargs}')
2024-05-09 14:56:58 +08:00
try:
response = client.chat.completions.create(messages=messages, model=model, **kwargs)
2024-05-08 09:57:48 +08:00
2024-05-09 14:56:58 +08:00
except Exception as e:
2024-05-08 09:57:48 +08:00
if logger:
2024-05-09 14:56:58 +08:00
logger.error(f'openai_llm error: {e}')
2024-05-08 09:57:48 +08:00
return ''
2024-04-07 09:37:47 +08:00
if logger:
2024-05-08 09:57:48 +08:00
logger.debug(f'result:\n {response.choices[0]}')
2024-04-07 09:37:47 +08:00
logger.debug(f'usage:\n {response.usage}')
return response.choices[0].message.content