@@ -312,6 +312,7 @@ async def node_degrees_batch(self, node_ids: list[str]) -> dict[str, int]:
312312 with self ._connection_pool .session_context (
313313 self .USERNAME , self .PASSWORD
314314 ) as session :
315+ session .execute (f"USE { self ._space_name } " )
315316 query = f"""
316317
317318 MATCH (v:base)-[r]-(n)
@@ -458,9 +459,12 @@ async def get_edges_batch(
458459 with self ._connection_pool .session_context (
459460 self .USERNAME , self .PASSWORD
460461 ) as session :
462+ session .execute (f"USE { self ._space_name } " )
461463 sCondition = ""
462464 for adata in pairs :
463- sCondition += f" or (id(start)={ adata ["src" ]} and id(end)={ adata ["tag" ]} ) "
465+ asrc = adata ["src" ]
466+ atag = adata ["tag" ]
467+ sCondition += f" or (id(start)={ asrc } and id(end)={ atag } ) "
464468
465469
466470 query = f"""
@@ -584,6 +588,7 @@ async def get_nodes_edges_batch(
584588 self .USERNAME , self .PASSWORD
585589 ) as session :
586590 # Query to get both outgoing and incoming edges
591+ session .execute (f"USE { self ._space_name } " )
587592 query = f"""
588593
589594 MATCH (n:base )-[r]-(connected:base)
@@ -692,11 +697,19 @@ async def get_knowledge_graph(
692697 self ,
693698 node_label : str ,
694699 max_depth : int = 3 ,
695- min_degree : int = 0 ,
696- inclusive : bool = False ,
700+ max_nodes : int = MAX_GRAPH_NODES ,
697701 ) -> KnowledgeGraph :
698702 """
699- Retrieve a connected subgraph of nodes
703+ Retrieve a connected subgraph of nodes where the label includes the specified `node_label`.
704+
705+ Args:
706+ node_label: Label of the starting node, * means all nodes
707+ max_depth: Maximum depth of the subgraph, Defaults to 3
708+ max_nodes: Maxiumu nodes to return by BFS, Defaults to 1000
709+
710+ Returns:
711+ KnowledgeGraph object containing nodes and edges, with an is_truncated flag
712+ indicating whether the graph was truncated due to max_nodes limit
700713 """
701714 result = KnowledgeGraph ()
702715 seen_nodes = set ()
@@ -714,15 +727,15 @@ async def get_knowledge_graph(
714727 f"MATCH (n:base) "
715728 f"WHERE id(n) >= '' "
716729 f"RETURN id(n) AS id, properties(n) AS props "
717- f"LIMIT { MAX_GRAPH_NODES } "
730+ f"LIMIT { max_nodes } "
718731 )
719732 else :
720733 # Start from specific node and traverse
721734 query = f"""
722735 MATCH (n)
723- WHERE id(n) { 'CONTAINS' if inclusive else '==' } "{ node_label } "
736+ WHERE id(n) == "{ node_label } "
724737 RETURN id(n) AS id, properties(n) AS props
725- LIMIT { MAX_GRAPH_NODES }
738+ LIMIT { max_nodes }
726739 """
727740
728741 try :
@@ -752,7 +765,7 @@ async def get_knowledge_graph(
752765 MATCH (src)-[e*1..{ max_depth } ]-(neighbor)
753766 WHERE id(src) IN { matched_ids }
754767 RETURN DISTINCT id(neighbor) AS id,properties(neighbor) AS props
755- LIMIT { MAX_GRAPH_NODES - len (matched_ids )}
768+ LIMIT { max_nodes - len (matched_ids )}
756769 """
757770
758771 result_set = session .execute (neighbors_query ).as_data_frame ()
@@ -902,13 +915,14 @@ async def drop(self) -> dict[str, str]:
902915 self .USERNAME , self .PASSWORD
903916 ) as session :
904917 # Delete all nodes and relationships
918+ session .execute (f"USE { self ._space_name } " )
905919
906- query = f"USE { self ._space_name } ;CLEAR SPACE; "
920+ query = f" drop space { self ._space_name } "
907921 session .execute (query )
908922 logger .info (
909- f"Process { os .getpid ()} drop nebula database { self ._space_name } "
923+ f"Process { os .getpid ()} drop nebula space { self ._space_name } "
910924 )
911925 return {"status" : "success" , "message" : "data dropped" }
912926 except Exception as e :
913- logger .error (f"Error dropping nebula database { self ._space_name } : { e } " )
927+ logger .error (f"Error dropping nebula space { self ._space_name } : { e } " )
914928 return {"status" : "error" , "message" : str (e )}
0 commit comments