This commit is contained in:
Larfii
2024-10-07 17:28:18 +08:00
parent 25df09a8ac
commit 13c67c2bcf
2 changed files with 3 additions and 20 deletions

View File

@@ -97,11 +97,7 @@ class LightRAG:
addon_params: dict = field(default_factory=dict)
convert_response_to_json_func: callable = convert_response_to_json
def __post_init__(self):
# use proxy
os.environ['http_proxy'] = 'http://127.0.0.1:7890'
os.environ['https_proxy'] = 'http://127.0.0.1:7890'
def __post_init__(self):
log_file = os.path.join(self.working_dir, "lightrag.log")
set_logger(log_file)
logger.info(f"Logger initialized for working directory: {self.working_dir}")

View File

@@ -17,7 +17,7 @@ from .utils import compute_args_hash, wrap_embedding_func_with_attrs
retry=retry_if_exception_type((RateLimitError, APIConnectionError, Timeout)),
)
async def openai_complete_if_cache(
model, prompt, api_key='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'
model, prompt, api_key=''
, system_prompt=None, history_messages=[], **kwargs
) -> str:
openai_async_client = AsyncOpenAI(api_key=api_key)
@@ -72,26 +72,13 @@ async def gpt_4o_mini_complete(
wait=wait_exponential(multiplier=1, min=4, max=10),
retry=retry_if_exception_type((RateLimitError, APIConnectionError, Timeout)),
)
async def openai_embedding(texts: list[str]) -> np.ndarray:
api_key = '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'
async def openai_embedding(texts: list[str], api_key='') -> np.ndarray:
openai_async_client = AsyncOpenAI(api_key=api_key)
response = await openai_async_client.embeddings.create(
model="text-embedding-3-small", input=texts, encoding_format="float"
)
return np.array([dp.embedding for dp in response.data])
async def moonshot_complete(
prompt, system_prompt=None, history_messages=[], **kwargs
) -> str:
return await openai_complete_if_cache(
"moonshot-v1-128k",
prompt,
api_key='sk-OsvLvHgFFH3tz6Yhym3OAhcTfZ9y7rHEgQ3JDLmnuLpTw9C0',
system_prompt=system_prompt,
history_messages=history_messages,
**kwargs,
)
if __name__ == "__main__":
import asyncio