Merge pull request #720 from danielaskdd/add-keyword-extraction-param-for-llm

fix: add keyword_extraction param support for LLM func of API Server
This commit is contained in:
zrguo
2025-02-06 23:31:53 +08:00
committed by GitHub

View File

@@ -17,6 +17,7 @@ import argparse
from typing import List, Any, Optional, Union, Dict
from pydantic import BaseModel
from lightrag import LightRAG, QueryParam
from lightrag.types import GPTKeywordExtractionFormat
from lightrag.api import __api_version__
from lightrag.utils import EmbeddingFunc
from enum import Enum
@@ -756,6 +757,9 @@ def create_app(args):
keyword_extraction=False,
**kwargs,
) -> str:
keyword_extraction = kwargs.pop("keyword_extraction", None)
if keyword_extraction:
kwargs["response_format"] = GPTKeywordExtractionFormat
return await openai_complete_if_cache(
args.llm_model,
prompt,
@@ -773,6 +777,9 @@ def create_app(args):
keyword_extraction=False,
**kwargs,
) -> str:
keyword_extraction = kwargs.pop("keyword_extraction", None)
if keyword_extraction:
kwargs["response_format"] = GPTKeywordExtractionFormat
return await azure_openai_complete_if_cache(
args.llm_model,
prompt,