clean comments and unused libs
This commit is contained in:
@@ -1,46 +1,3 @@
|
|||||||
"""
|
|
||||||
Azure OpenAI LLM Interface Module
|
|
||||||
==========================
|
|
||||||
|
|
||||||
This module provides interfaces for interacting with aure openai's language models,
|
|
||||||
including text generation and embedding capabilities.
|
|
||||||
|
|
||||||
Author: Lightrag team
|
|
||||||
Created: 2024-01-24
|
|
||||||
License: MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2024 Lightrag
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
Version: 1.0.0
|
|
||||||
|
|
||||||
Change Log:
|
|
||||||
- 1.0.0 (2024-01-24): Initial release
|
|
||||||
* Added async chat completion support
|
|
||||||
* Added embedding generation
|
|
||||||
* Added stream response capability
|
|
||||||
|
|
||||||
Dependencies:
|
|
||||||
- openai
|
|
||||||
- numpy
|
|
||||||
- pipmaster
|
|
||||||
- Python >= 3.10
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
from llm_interfaces.azure_openai import azure_openai_model_complete, azure_openai_embed
|
|
||||||
"""
|
|
||||||
|
|
||||||
__version__ = "1.0.0"
|
|
||||||
__author__ = "lightrag Team"
|
|
||||||
__status__ = "Production"
|
|
||||||
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import pipmaster as pm # Pipmaster for dynamic library install
|
import pipmaster as pm # Pipmaster for dynamic library install
|
||||||
|
|
||||||
|
@@ -1,46 +1,3 @@
|
|||||||
"""
|
|
||||||
Bedrock LLM Interface Module
|
|
||||||
==========================
|
|
||||||
|
|
||||||
This module provides interfaces for interacting with Bedrock's language models,
|
|
||||||
including text generation and embedding capabilities.
|
|
||||||
|
|
||||||
Author: Lightrag team
|
|
||||||
Created: 2024-01-24
|
|
||||||
License: MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2024 Lightrag
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
Version: 1.0.0
|
|
||||||
|
|
||||||
Change Log:
|
|
||||||
- 1.0.0 (2024-01-24): Initial release
|
|
||||||
* Added async chat completion support
|
|
||||||
* Added embedding generation
|
|
||||||
* Added stream response capability
|
|
||||||
|
|
||||||
Dependencies:
|
|
||||||
- aioboto3, tenacity
|
|
||||||
- numpy
|
|
||||||
- pipmaster
|
|
||||||
- Python >= 3.10
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
from llm_interfaces.bebrock import bebrock_model_complete, bebrock_embed
|
|
||||||
"""
|
|
||||||
|
|
||||||
__version__ = "1.0.0"
|
|
||||||
__author__ = "lightrag Team"
|
|
||||||
__status__ = "Production"
|
|
||||||
|
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
|
@@ -1,43 +1,3 @@
|
|||||||
"""
|
|
||||||
Jina Embedding Interface Module
|
|
||||||
==========================
|
|
||||||
|
|
||||||
This module provides interfaces for interacting with jina system,
|
|
||||||
including embedding capabilities.
|
|
||||||
|
|
||||||
Author: Lightrag team
|
|
||||||
Created: 2024-01-24
|
|
||||||
License: MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2024 Lightrag
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
Version: 1.0.0
|
|
||||||
|
|
||||||
Change Log:
|
|
||||||
- 1.0.0 (2024-01-24): Initial release
|
|
||||||
* Added embedding generation
|
|
||||||
|
|
||||||
Dependencies:
|
|
||||||
- tenacity
|
|
||||||
- numpy
|
|
||||||
- pipmaster
|
|
||||||
- Python >= 3.10
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
from llm_interfaces.jina import jina_embed
|
|
||||||
"""
|
|
||||||
|
|
||||||
__version__ = "1.0.0"
|
|
||||||
__author__ = "lightrag Team"
|
|
||||||
__status__ = "Production"
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import pipmaster as pm # Pipmaster for dynamic library install
|
import pipmaster as pm # Pipmaster for dynamic library install
|
||||||
|
|
||||||
|
@@ -1,45 +1,3 @@
|
|||||||
"""
|
|
||||||
LMDeploy LLM Interface Module
|
|
||||||
==========================
|
|
||||||
|
|
||||||
This module provides interfaces for interacting with LMDeploy's language models,
|
|
||||||
including text generation and embedding capabilities.
|
|
||||||
|
|
||||||
Author: Lightrag team
|
|
||||||
Created: 2024-01-24
|
|
||||||
License: MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2024 Lightrag
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
Version: 1.0.0
|
|
||||||
|
|
||||||
Change Log:
|
|
||||||
- 1.0.0 (2024-01-24): Initial release
|
|
||||||
* Added async chat completion support
|
|
||||||
* Added embedding generation
|
|
||||||
* Added stream response capability
|
|
||||||
|
|
||||||
Dependencies:
|
|
||||||
- tenacity
|
|
||||||
- numpy
|
|
||||||
- pipmaster
|
|
||||||
- Python >= 3.10
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
from llm_interfaces.lmdeploy import lmdeploy_model_complete, lmdeploy_embed
|
|
||||||
"""
|
|
||||||
|
|
||||||
__version__ = "1.0.0"
|
|
||||||
__author__ = "lightrag Team"
|
|
||||||
__status__ = "Production"
|
|
||||||
|
|
||||||
import pipmaster as pm # Pipmaster for dynamic library install
|
import pipmaster as pm # Pipmaster for dynamic library install
|
||||||
|
|
||||||
# install specific modules
|
# install specific modules
|
||||||
|
@@ -1,66 +1,3 @@
|
|||||||
"""
|
|
||||||
LoLLMs (Lord of Large Language Models) Interface Module
|
|
||||||
=====================================================
|
|
||||||
|
|
||||||
This module provides the official interface for interacting with LoLLMs (Lord of Large Language and multimodal Systems),
|
|
||||||
a unified framework for AI model interaction and deployment.
|
|
||||||
|
|
||||||
LoLLMs is designed as a "one tool to rule them all" solution, providing seamless integration
|
|
||||||
with various AI models while maintaining high performance and user-friendly interfaces.
|
|
||||||
|
|
||||||
Author: ParisNeo
|
|
||||||
Created: 2024-01-24
|
|
||||||
License: Apache 2.0
|
|
||||||
|
|
||||||
Copyright (c) 2024 ParisNeo
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
|
|
||||||
Version: 2.0.0
|
|
||||||
|
|
||||||
Change Log:
|
|
||||||
- 2.0.0 (2024-01-24):
|
|
||||||
* Added async support for model inference
|
|
||||||
* Implemented streaming capabilities
|
|
||||||
* Added embedding generation functionality
|
|
||||||
* Enhanced parameter handling
|
|
||||||
* Improved error handling and timeout management
|
|
||||||
|
|
||||||
Dependencies:
|
|
||||||
- aiohttp
|
|
||||||
- numpy
|
|
||||||
- Python >= 3.10
|
|
||||||
|
|
||||||
Features:
|
|
||||||
- Async text generation with streaming support
|
|
||||||
- Embedding generation
|
|
||||||
- Configurable model parameters
|
|
||||||
- System prompt and chat history support
|
|
||||||
- Timeout handling
|
|
||||||
- API key authentication
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
from llm_interfaces.lollms import lollms_model_complete, lollms_embed
|
|
||||||
|
|
||||||
Project Repository: https://github.com/ParisNeo/lollms
|
|
||||||
Documentation: https://github.com/ParisNeo/lollms/docs
|
|
||||||
"""
|
|
||||||
|
|
||||||
__version__ = "1.0.0"
|
|
||||||
__author__ = "ParisNeo"
|
|
||||||
__status__ = "Production"
|
|
||||||
__project_url__ = "https://github.com/ParisNeo/lollms"
|
|
||||||
__doc_url__ = "https://github.com/ParisNeo/lollms/docs"
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
if sys.version_info < (3, 9):
|
if sys.version_info < (3, 9):
|
||||||
|
@@ -1,46 +1,3 @@
|
|||||||
"""
|
|
||||||
OpenAI LLM Interface Module
|
|
||||||
==========================
|
|
||||||
|
|
||||||
This module provides interfaces for interacting with openai's language models,
|
|
||||||
including text generation and embedding capabilities.
|
|
||||||
|
|
||||||
Author: Lightrag team
|
|
||||||
Created: 2024-01-24
|
|
||||||
License: MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2024 Lightrag
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
Version: 1.0.0
|
|
||||||
|
|
||||||
Change Log:
|
|
||||||
- 1.0.0 (2024-01-24): Initial release
|
|
||||||
* Added async chat completion support
|
|
||||||
* Added embedding generation
|
|
||||||
* Added stream response capability
|
|
||||||
|
|
||||||
Dependencies:
|
|
||||||
- openai
|
|
||||||
- numpy
|
|
||||||
- pipmaster
|
|
||||||
- Python >= 3.10
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
from llm_interfaces.nvidia_openai import nvidia_openai_model_complete, nvidia_openai_embed
|
|
||||||
"""
|
|
||||||
|
|
||||||
__version__ = "1.0.0"
|
|
||||||
__author__ = "lightrag Team"
|
|
||||||
__status__ = "Production"
|
|
||||||
|
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
|
||||||
@@ -48,6 +5,7 @@ if sys.version_info < (3, 9):
|
|||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
import pipmaster as pm # Pipmaster for dynamic library install
|
import pipmaster as pm # Pipmaster for dynamic library install
|
||||||
|
|
||||||
# install specific modules
|
# install specific modules
|
||||||
|
@@ -1,45 +1,3 @@
|
|||||||
"""
|
|
||||||
OpenAI LLM Interface Module
|
|
||||||
==========================
|
|
||||||
|
|
||||||
This module provides interfaces for interacting with openai's language models,
|
|
||||||
including text generation and embedding capabilities.
|
|
||||||
|
|
||||||
Author: Lightrag team
|
|
||||||
Created: 2024-01-24
|
|
||||||
License: MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2024 Lightrag
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
Version: 1.0.0
|
|
||||||
|
|
||||||
Change Log:
|
|
||||||
- 1.0.0 (2024-01-24): Initial release
|
|
||||||
* Added async chat completion support
|
|
||||||
* Added embedding generation
|
|
||||||
* Added stream response capability
|
|
||||||
|
|
||||||
Dependencies:
|
|
||||||
- openai
|
|
||||||
- numpy
|
|
||||||
- pipmaster
|
|
||||||
- Python >= 3.10
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
from llm_interfaces.openai import openai_model_complete, openai_embed
|
|
||||||
"""
|
|
||||||
|
|
||||||
__version__ = "1.0.0"
|
|
||||||
__author__ = "lightrag Team"
|
|
||||||
__status__ = "Production"
|
|
||||||
|
|
||||||
from ..utils import verbose_debug, VERBOSE_DEBUG
|
from ..utils import verbose_debug, VERBOSE_DEBUG
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
@@ -1,43 +1,3 @@
|
|||||||
"""
|
|
||||||
SiliconCloud Embedding Interface Module
|
|
||||||
==========================
|
|
||||||
|
|
||||||
This module provides interfaces for interacting with SiliconCloud system,
|
|
||||||
including embedding capabilities.
|
|
||||||
|
|
||||||
Author: Lightrag team
|
|
||||||
Created: 2024-01-24
|
|
||||||
License: MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2024 Lightrag
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
Version: 1.0.0
|
|
||||||
|
|
||||||
Change Log:
|
|
||||||
- 1.0.0 (2024-01-24): Initial release
|
|
||||||
* Added embedding generation
|
|
||||||
|
|
||||||
Dependencies:
|
|
||||||
- tenacity
|
|
||||||
- numpy
|
|
||||||
- pipmaster
|
|
||||||
- Python >= 3.10
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
from llm_interfaces.siliconcloud import siliconcloud_model_complete, siliconcloud_embed
|
|
||||||
"""
|
|
||||||
|
|
||||||
__version__ = "1.0.0"
|
|
||||||
__author__ = "lightrag Team"
|
|
||||||
__status__ = "Production"
|
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
if sys.version_info < (3, 9):
|
if sys.version_info < (3, 9):
|
||||||
|
@@ -1,45 +1,3 @@
|
|||||||
"""
|
|
||||||
Zhipu LLM Interface Module
|
|
||||||
==========================
|
|
||||||
|
|
||||||
This module provides interfaces for interacting with LMDeploy's language models,
|
|
||||||
including text generation and embedding capabilities.
|
|
||||||
|
|
||||||
Author: Lightrag team
|
|
||||||
Created: 2024-01-24
|
|
||||||
License: MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2024 Lightrag
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
Version: 1.0.0
|
|
||||||
|
|
||||||
Change Log:
|
|
||||||
- 1.0.0 (2024-01-24): Initial release
|
|
||||||
* Added async chat completion support
|
|
||||||
* Added embedding generation
|
|
||||||
* Added stream response capability
|
|
||||||
|
|
||||||
Dependencies:
|
|
||||||
- tenacity
|
|
||||||
- numpy
|
|
||||||
- pipmaster
|
|
||||||
- Python >= 3.10
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
from llm_interfaces.zhipu import zhipu_model_complete, zhipu_embed
|
|
||||||
"""
|
|
||||||
|
|
||||||
__version__ = "1.0.0"
|
|
||||||
__author__ = "lightrag Team"
|
|
||||||
__status__ = "Production"
|
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import re
|
import re
|
||||||
import json
|
import json
|
||||||
|
@@ -1,8 +1,9 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
GRAPH_FIELD_SEP = "<SEP>"
|
GRAPH_FIELD_SEP = "<SEP>"
|
||||||
|
|
||||||
PROMPTS = {}
|
PROMPTS: dict[str, Any] = {}
|
||||||
|
|
||||||
PROMPTS["DEFAULT_LANGUAGE"] = "English"
|
PROMPTS["DEFAULT_LANGUAGE"] = "English"
|
||||||
PROMPTS["DEFAULT_TUPLE_DELIMITER"] = "<|>"
|
PROMPTS["DEFAULT_TUPLE_DELIMITER"] = "<|>"
|
||||||
|
@@ -1,10 +1,3 @@
|
|||||||
"""
|
|
||||||
3D GraphML Viewer using Dear ImGui and ModernGL
|
|
||||||
Author: ParisNeo, ArnoChen
|
|
||||||
Description: An interactive 3D GraphML viewer using imgui_bundle and ModernGL
|
|
||||||
Version: 2.0
|
|
||||||
"""
|
|
||||||
|
|
||||||
from typing import Optional, Tuple, Dict, List
|
from typing import Optional, Tuple, Dict, List
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import networkx as nx
|
import networkx as nx
|
||||||
|
@@ -13,9 +13,7 @@ python-dotenv
|
|||||||
setuptools
|
setuptools
|
||||||
tenacity
|
tenacity
|
||||||
|
|
||||||
|
|
||||||
# LLM packages
|
# LLM packages
|
||||||
tiktoken
|
tiktoken
|
||||||
xxhash
|
|
||||||
|
|
||||||
# Extra libraries are installed when needed using pipmaster
|
# Extra libraries are installed when needed using pipmaster
|
||||||
|
Reference in New Issue
Block a user