diff --git a/src/memos/graph_dbs/base.py b/src/memos/graph_dbs/base.py index 87a50d443..130b66a3d 100644 --- a/src/memos/graph_dbs/base.py +++ b/src/memos/graph_dbs/base.py @@ -19,12 +19,13 @@ def add_node(self, id: str, memory: str, metadata: dict[str, Any]) -> None: """ @abstractmethod - def update_node(self, id: str, fields: dict[str, Any]) -> None: + def update_node(self, id: str, fields: dict[str, Any], user_name: str | None = None) -> None: """ Update attributes of an existing node. Args: id: Node identifier to be updated. fields: Dictionary of fields to update. + user_name: given user_name """ @abstractmethod @@ -70,7 +71,7 @@ def edge_exists(self, source_id: str, target_id: str, type: str) -> bool: # Graph Query & Reasoning @abstractmethod - def get_node(self, id: str, include_embedding: bool = False) -> dict[str, Any] | None: + def get_node(self, id: str, include_embedding: bool = False, **kwargs) -> dict[str, Any] | None: """ Retrieve the metadata and content of a node. Args: diff --git a/src/memos/mem_reader/multi_modal_struct.py b/src/memos/mem_reader/multi_modal_struct.py index be9f02b22..9edcd0a55 100644 --- a/src/memos/mem_reader/multi_modal_struct.py +++ b/src/memos/mem_reader/multi_modal_struct.py @@ -474,13 +474,12 @@ def _get_maybe_merged_memory( ) if not search_results: - # No similar memories found, return original return extracted_memory_dict # Get full memory details similar_memory_ids = [r["id"] for r in search_results if r.get("id")] similar_memories_list = [ - self.graph_db.get_node(mem_id, include_embedding=False) + self.graph_db.get_node(mem_id, include_embedding=False, user_name=user_name) for mem_id in similar_memory_ids ] @@ -505,7 +504,6 @@ def _get_maybe_merged_memory( ) if not filtered_similar: - # No valid similar memories, return original return extracted_memory_dict # Create a temporary TextualMemoryItem for merge check @@ -529,14 +527,12 @@ def _get_maybe_merged_memory( if merge_result: # Return merged memory dict merged_dict = extracted_memory_dict.copy() - merged_dict["value"] = merge_result.get("value", mem_text) - merged_dict["merged_from"] = merge_result.get("merged_from", []) - logger.info( - f"[MultiModalFine] Merged memory with {len(merged_dict['merged_from'])} existing memories" - ) + merged_content = merge_result.get("value", mem_text) + merged_dict["value"] = merged_content + merged_from_ids = merge_result.get("merged_from", []) + merged_dict["merged_from"] = merged_from_ids return merged_dict else: - # No merge needed, return original return extracted_memory_dict except Exception as e: @@ -648,6 +644,7 @@ def _process_one_item(fast_item: TextualMemoryItem) -> list[TextualMemoryItem]: extracted_memory_dict=m, mem_text=m.get("value", ""), sources=sources, + original_query=mem_str, **kwargs, ) # Normalize memory_type (same as simple_struct) @@ -680,6 +677,7 @@ def _process_one_item(fast_item: TextualMemoryItem) -> list[TextualMemoryItem]: extracted_memory_dict=resp, mem_text=resp.get("value", "").strip(), sources=sources, + original_query=mem_str, **kwargs, ) node = self._make_memory_item( diff --git a/src/memos/mem_scheduler/general_scheduler.py b/src/memos/mem_scheduler/general_scheduler.py index 8755de281..d4ac09cc3 100644 --- a/src/memos/mem_scheduler/general_scheduler.py +++ b/src/memos/mem_scheduler/general_scheduler.py @@ -899,7 +899,7 @@ def _process_memories_with_reader( ) # Mark merged_from memories as archived when provided in memory metadata - if self.mem_reader and self.mem_reader.graph_db: + if self.mem_reader.graph_db: for memory in flattened_memories: merged_from = (memory.metadata.info or {}).get("merged_from") if merged_from: diff --git a/src/memos/memories/textual/tree_text_memory/retrieve/recall.py b/src/memos/memories/textual/tree_text_memory/retrieve/recall.py index 2db2fd08b..4541b118b 100644 --- a/src/memos/memories/textual/tree_text_memory/retrieve/recall.py +++ b/src/memos/memories/textual/tree_text_memory/retrieve/recall.py @@ -77,6 +77,7 @@ def retrieve( include_embedding=self.include_embedding, user_name=user_name, filter=search_filter, + status="activated", ) return [TextualMemoryItem.from_dict(record) for record in working_memories[:top_k]] @@ -247,7 +248,7 @@ def process_node(node): # Load nodes and post-filter node_dicts = self.graph_store.get_nodes( - list(candidate_ids), include_embedding=self.include_embedding + list(candidate_ids), include_embedding=self.include_embedding, user_name=user_name ) final_nodes = [] @@ -277,7 +278,9 @@ def process_node(node): {"field": "key", "op": "in", "value": parsed_goal.keys}, {"field": "memory_type", "op": "=", "value": memory_scope}, ] - key_ids = self.graph_store.get_by_metadata(key_filters, user_name=user_name) + key_ids = self.graph_store.get_by_metadata( + key_filters, user_name=user_name, status="activated" + ) candidate_ids.update(key_ids) # 2) tag-based OR branch @@ -286,7 +289,9 @@ def process_node(node): {"field": "tags", "op": "contains", "value": parsed_goal.tags}, {"field": "memory_type", "op": "=", "value": memory_scope}, ] - tag_ids = self.graph_store.get_by_metadata(tag_filters, user_name=user_name) + tag_ids = self.graph_store.get_by_metadata( + tag_filters, user_name=user_name, status="activated" + ) candidate_ids.update(tag_ids) # No matches → return empty @@ -422,9 +427,11 @@ def _bm25_recall( value = search_filter[key] key_filters.append({"field": key, "op": "=", "value": value}) corpus_name += "".join(list(search_filter.values())) - candidate_ids = self.graph_store.get_by_metadata(key_filters, user_name=user_name) + candidate_ids = self.graph_store.get_by_metadata( + key_filters, user_name=user_name, status="activated" + ) node_dicts = self.graph_store.get_nodes( - list(candidate_ids), include_embedding=self.include_embedding + list(candidate_ids), include_embedding=self.include_embedding, user_name=user_name ) bm25_query = " ".join(list({query, *parsed_goal.keys})) diff --git a/src/memos/multi_mem_cube/single_cube.py b/src/memos/multi_mem_cube/single_cube.py index ffe8fe989..426cf32be 100644 --- a/src/memos/multi_mem_cube/single_cube.py +++ b/src/memos/multi_mem_cube/single_cube.py @@ -846,7 +846,9 @@ def _process_text_mem( for old_id in old_ids: try: self.mem_reader.graph_db.update_node( - str(old_id), {"status": "archived"} + str(old_id), + {"status": "archived"}, + user_name=user_context.mem_cube_id, ) self.logger.info( f"[SingleCubeView] Archived merged_from memory: {old_id}"