From eb9883d8dadb5ac4aa7c46ad8bb123b223c9f2e0 Mon Sep 17 00:00:00 2001 From: yangdx Date: Thu, 6 Feb 2025 15:56:18 +0800 Subject: [PATCH] fix: add keyword_extraction param support for LLM func of API Server --- lightrag/api/lightrag_server.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/lightrag/api/lightrag_server.py b/lightrag/api/lightrag_server.py index d80518c6..12ee9a03 100644 --- a/lightrag/api/lightrag_server.py +++ b/lightrag/api/lightrag_server.py @@ -17,6 +17,7 @@ import argparse from typing import List, Any, Optional, Union, Dict from pydantic import BaseModel from lightrag import LightRAG, QueryParam +from lightrag.types import GPTKeywordExtractionFormat from lightrag.api import __api_version__ from lightrag.utils import EmbeddingFunc from enum import Enum @@ -756,6 +757,9 @@ def create_app(args): keyword_extraction=False, **kwargs, ) -> str: + keyword_extraction = kwargs.pop("keyword_extraction", None) + if keyword_extraction: + kwargs["response_format"] = GPTKeywordExtractionFormat return await openai_complete_if_cache( args.llm_model, prompt, @@ -773,6 +777,9 @@ def create_app(args): keyword_extraction=False, **kwargs, ) -> str: + keyword_extraction = kwargs.pop("keyword_extraction", None) + if keyword_extraction: + kwargs["response_format"] = GPTKeywordExtractionFormat return await azure_openai_complete_if_cache( args.llm_model, prompt,