"""Graphiti Knowledge-Graph – zentraler Client-Singleton. Wird von ingest_episode_event_step und query_graph_step genutzt. Lazy-initialisiert beim ersten Aufruf. """ import asyncio import os from typing import Any, Optional from graphiti_core import Graphiti from graphiti_core.llm_client import LLMConfig from graphiti_core.llm_client.openai_generic_client import OpenAIGenericClient from graphiti_core.embedder import OpenAIEmbedder, OpenAIEmbedderConfig from services.graphiti_tracer import build_langfuse_tracer class GraphitiError(Exception): """Fehler beim Zugriff auf den Graphiti-Client.""" _graphiti_client: Graphiti | None = None _graphiti_init_lock = asyncio.Lock() async def get_graphiti(ctx: Optional[Any] = None) -> Graphiti: """Gibt den gecachten Graphiti-Client zurück (Singleton). Args: ctx: Optionaler Motia-Context für Logging während der Initialisierung. Benötigte Umgebungsvariablen: NEO4J_URI – bolt://host:7687 NEO4J_USER – neo4j NEO4J_PASSWORD – Pflicht XAI_API_KEY – xAI-Key für LLM (grok) XAI_BASE_URL – optional, Standard: https://api.x.ai/v1 XAI_MODEL – optional, Standard: grok-4-1-fast-reasoning OPENAI_API_KEY – OpenAI-Key für Embeddings GRAPHITI_EMBED_BASE_URL – optional, Standard: https://api.openai.com/v1 GRAPHITI_EMBED_MODEL – optional, Standard: text-embedding-3-small """ global _graphiti_client if _graphiti_client is None: async with _graphiti_init_lock: if _graphiti_client is None: _log(ctx, "Initialisiere Graphiti-Client...") try: _graphiti_client = await _build_graphiti() _log(ctx, "Graphiti-Client bereit.") except KeyError as e: raise GraphitiError(f"Konfigurationsfehler – Umgebungsvariable fehlt: {e}") from e return _graphiti_client def _log(ctx: Optional[Any], message: str, level: str = "info") -> None: """Loggt via ctx.logger falls vorhanden, sonst print.""" if ctx is not None and hasattr(ctx, "logger"): getattr(ctx.logger, level)(f"[GraphitiClient] {message}") else: print(f"[GraphitiClient] {message}") async def _build_graphiti() -> Graphiti: neo4j_uri = os.environ["NEO4J_URI"] neo4j_user = os.environ.get("NEO4J_USER", "neo4j") neo4j_password = os.environ["NEO4J_PASSWORD"] xai_base_url = os.environ.get("XAI_BASE_URL", "https://api.x.ai/v1") xai_api_key = os.environ["XAI_API_KEY"] xai_model = os.environ.get("XAI_MODEL", "grok-4-1-fast-reasoning") embed_api_key = os.environ.get("OPENAI_API_KEY") or os.environ.get("GRAPHITI_EMBED_API_KEY", xai_api_key) embed_base_url = os.environ.get("GRAPHITI_EMBED_BASE_URL", "https://api.openai.com/v1") embed_model = os.environ.get("GRAPHITI_EMBED_MODEL", "text-embedding-3-small") llm_client = OpenAIGenericClient( config=LLMConfig( api_key=xai_api_key, base_url=xai_base_url, model=xai_model, ) ) embedder = OpenAIEmbedder( config=OpenAIEmbedderConfig( api_key=embed_api_key, base_url=embed_base_url, embedding_model=embed_model, embedding_dim=1536, ) ) tracer = build_langfuse_tracer(span_prefix="graphiti", ctx=None) client = Graphiti( uri=neo4j_uri, user=neo4j_user, password=neo4j_password, llm_client=llm_client, embedder=embedder, tracer=tracer, ) await client.build_indices_and_constraints() return client