Replace verbose_debug with logger.debug for token logging.
- Removed unused verbose_debug import - Updated debug logging in kg_query - Updated debug logging in mix_kg_vector_query - Updated debug logging in kg_query_with_keywords
This commit is contained in:
@@ -24,7 +24,6 @@ from .utils import (
|
|||||||
CacheData,
|
CacheData,
|
||||||
statistic_data,
|
statistic_data,
|
||||||
get_conversation_turns,
|
get_conversation_turns,
|
||||||
verbose_debug,
|
|
||||||
)
|
)
|
||||||
from .base import (
|
from .base import (
|
||||||
BaseGraphStorage,
|
BaseGraphStorage,
|
||||||
@@ -689,7 +688,7 @@ async def kg_query(
|
|||||||
return sys_prompt
|
return sys_prompt
|
||||||
|
|
||||||
len_of_prompts = len(encode_string_by_tiktoken(query + sys_prompt))
|
len_of_prompts = len(encode_string_by_tiktoken(query + sys_prompt))
|
||||||
verbose_debug(f"[kg_query]Prompt Tokens: {len_of_prompts}")
|
logger.debug(f"[kg_query]Prompt Tokens: {len_of_prompts}")
|
||||||
|
|
||||||
response = await use_model_func(
|
response = await use_model_func(
|
||||||
query,
|
query,
|
||||||
@@ -978,7 +977,7 @@ async def mix_kg_vector_query(
|
|||||||
return sys_prompt
|
return sys_prompt
|
||||||
|
|
||||||
len_of_prompts = len(encode_string_by_tiktoken(query + sys_prompt))
|
len_of_prompts = len(encode_string_by_tiktoken(query + sys_prompt))
|
||||||
verbose_debug(f"[mix_kg_vector_query]Prompt Tokens: {len_of_prompts}")
|
logger.debug(f"[mix_kg_vector_query]Prompt Tokens: {len_of_prompts}")
|
||||||
|
|
||||||
# 6. Generate response
|
# 6. Generate response
|
||||||
response = await use_model_func(
|
response = await use_model_func(
|
||||||
@@ -1808,7 +1807,7 @@ async def kg_query_with_keywords(
|
|||||||
return sys_prompt
|
return sys_prompt
|
||||||
|
|
||||||
len_of_prompts = len(encode_string_by_tiktoken(query + sys_prompt))
|
len_of_prompts = len(encode_string_by_tiktoken(query + sys_prompt))
|
||||||
verbose_debug(f"[kg_query_with_keywords]Prompt Tokens: {len_of_prompts}")
|
logger.debug(f"[kg_query_with_keywords]Prompt Tokens: {len_of_prompts}")
|
||||||
|
|
||||||
response = await use_model_func(
|
response = await use_model_func(
|
||||||
query,
|
query,
|
||||||
|
Reference in New Issue
Block a user