mirror of
https://github.com/getzep/graphiti.git
synced 2024-09-08 19:13:11 +03:00
Node Distance Reranker: Limit max hops (and cleanup prints) (#72)
* limit SHORTEST max hops * cleanup prints
This commit is contained in:
@@ -60,6 +60,5 @@ class OpenAIClient(LLMClient):
|
||||
result = response.choices[0].message.content or ''
|
||||
return json.loads(result)
|
||||
except Exception as e:
|
||||
print(openai_messages)
|
||||
logger.error(f'Error in generating LLM response: {e}')
|
||||
raise
|
||||
|
||||
@@ -424,7 +424,7 @@ async def node_distance_reranker(
|
||||
records, _, _ = await driver.execute_query(
|
||||
"""
|
||||
MATCH (source:Entity)-[r:RELATES_TO {uuid: $edge_uuid}]->(target:Entity)
|
||||
MATCH p = SHORTEST 1 (center:Entity)-[:RELATES_TO]-+(n:Entity)
|
||||
MATCH p = SHORTEST 1 (center:Entity)-[:RELATES_TO*1..10]->(n:Entity)
|
||||
WHERE center.uuid = $center_uuid AND n.uuid IN [source.uuid, target.uuid]
|
||||
RETURN min(length(p)) AS score, source.uuid AS source_uuid, target.uuid AS target_uuid
|
||||
""",
|
||||
|
||||
@@ -70,7 +70,6 @@ async def extract_edges(
|
||||
}
|
||||
|
||||
llm_response = await llm_client.generate_response(prompt_library.extract_edges.v2(context))
|
||||
print(llm_response)
|
||||
edges_data = llm_response.get('edges', [])
|
||||
|
||||
end = time()
|
||||
|
||||
Reference in New Issue
Block a user