A clear commented version and pipeline summeration in my_progress.md

This commit is contained in:
hhh2210
2025-03-16 19:47:24 +08:00
parent d5ff164818
commit aae6377500
4 changed files with 57 additions and 2 deletions

View File

@@ -140,6 +140,13 @@ async def _handle_entity_relation_summary(
description: str,
global_config: dict,
) -> str:
"""Summarize the entity or relation description,is used during entity extraction and when merging nodes or edges in the knowledge graph
Args:
entity_or_relation_name: entity or relation name
description: description
global_config: global configuration
"""
use_llm_func: callable = global_config["cheap_model_func"]
llm_max_tokens = global_config["cheap_model_max_token_size"]
tiktoken_model_name = global_config["tiktoken_model_name"]
@@ -311,6 +318,17 @@ async def extract_hierarchical_entities(
entity_vdb: BaseVectorStorage,
global_config: dict,
)-> Union[BaseGraphStorage, None]:
"""Extract entities and relations from text chunks
Args:
chunks: text chunks
knowledge_graph_inst: knowledge graph instance
entity_vdb: entity vector database
global_config: global configuration
Returns:
Union[BaseGraphStorage, None]: knowledge graph instance
"""
use_llm_func: callable = global_config["best_model_func"]
entity_extract_max_gleaning = global_config["entity_extract_max_gleaning"]

View File

@@ -199,7 +199,10 @@ class NetworkXStorage(BaseGraphStorage):
async def _leiden_clustering(self):
from graspologic.partition import hierarchical_leiden
"""
It uses the hierarchical_leiden function from the graspologic library
The Leiden algorithm is used in the HiRAG.ainsert method
"""
graph = NetworkXStorage.stable_largest_connected_component(self._graph)
community_mapping = hierarchical_leiden(
graph,

View File

@@ -533,7 +533,7 @@ Entity description list: {entity_description_list}
#######
Output:
"""
# TYPE的定义
PROMPTS["DEFAULT_ENTITY_TYPES"] = ["organization", "person", "geo", "event"]
PROMPTS["META_ENTITY_TYPES"] = ["organization", "person", "location", "event", "product", "technology", "industry", "mathematics", "social sciences"]
PROMPTS["DEFAULT_TUPLE_DELIMITER"] = "<|>"