Add comprehensive test scripts for thumbnail generation and xAI collections API

- Implemented `test_thumbnail_generation.py` to validate the complete flow of document thumbnail generation in EspoCRM, including document creation, file upload, webhook triggering, and preview verification.
- Created `test_xai_collections_api.py` to test critical operations of the xAI Collections API, covering file uploads, collection CRUD operations, document management, and response validation.
- Both scripts include detailed logging for success and error states, ensuring robust testing and easier debugging.
This commit is contained in:
bsiggel
2026-03-03 17:03:08 +00:00
parent c45bfb7233
commit bcb6454b2a
15 changed files with 505 additions and 1259 deletions

View File

@@ -1,6 +0,0 @@
"""
EspoCRM Generic Webhooks
Empfängt Webhooks von EspoCRM für verschiedene Entities.
Zentrale Anlaufstelle für alle EspoCRM-Events außerhalb VMH-Kontext.
"""

View File

@@ -1,208 +0,0 @@
"""EspoCRM Webhook - Document Create
Empfängt Create-Webhooks von EspoCRM für Documents.
Loggt detailliert alle Payload-Informationen für Analyse.
"""
import json
import datetime
from typing import Any
from motia import FlowContext, http, ApiRequest, ApiResponse
config = {
"name": "VMH Webhook Document Create",
"description": "Empfängt Create-Webhooks von EspoCRM für Document Entities",
"flows": ["vmh-documents"],
"triggers": [
http("POST", "/vmh/webhook/document/create")
],
"enqueues": ["vmh.document.create"],
}
async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
"""
Webhook handler for Document creation in EspoCRM.
Receives notifications when documents are created and emits queue events
for processing (xAI sync, etc.).
Payload Analysis Mode: Logs comprehensive details about webhook structure.
"""
try:
payload = request.body or []
# ═══════════════════════════════════════════════════════════════
# DETAILLIERTES LOGGING FÜR ANALYSE
# ═══════════════════════════════════════════════════════════════
ctx.logger.info("=" * 80)
ctx.logger.info("📥 EspoCRM DOCUMENT CREATE WEBHOOK EMPFANGEN")
ctx.logger.info("=" * 80)
# Log Request Headers
ctx.logger.info("\n🔍 REQUEST HEADERS:")
if hasattr(request, 'headers'):
for key, value in request.headers.items():
ctx.logger.info(f" {key}: {value}")
else:
ctx.logger.info(" (keine Headers verfügbar)")
# Log Payload Type & Structure
ctx.logger.info(f"\n📦 PAYLOAD TYPE: {type(payload).__name__}")
ctx.logger.info(f"📦 PAYLOAD LENGTH: {len(payload) if isinstance(payload, (list, dict)) else 'N/A'}")
# Log Full Payload (pretty-printed)
ctx.logger.info("\n📄 FULL PAYLOAD:")
ctx.logger.info(json.dumps(payload, indent=2, ensure_ascii=False))
# ═══════════════════════════════════════════════════════════════
# PAYLOAD ANALYSE & ID EXTRAKTION
# ═══════════════════════════════════════════════════════════════
entity_ids = set()
payload_details = []
if isinstance(payload, list):
ctx.logger.info(f"\n✅ Payload ist LIST mit {len(payload)} Einträgen")
for idx, entity in enumerate(payload):
if isinstance(entity, dict):
entity_id = entity.get('id')
if entity_id:
entity_ids.add(entity_id)
# Sammle Details für Logging
detail = {
'index': idx,
'id': entity_id,
'name': entity.get('name', 'N/A'),
'type': entity.get('type', 'N/A'),
'size': entity.get('size', 'N/A'),
'all_fields': list(entity.keys())
}
payload_details.append(detail)
ctx.logger.info(f"\n 📄 Document #{idx + 1}:")
ctx.logger.info(f" ID: {entity_id}")
ctx.logger.info(f" Name: {entity.get('name', 'N/A')}")
ctx.logger.info(f" Type: {entity.get('type', 'N/A')}")
ctx.logger.info(f" Size: {entity.get('size', 'N/A')} bytes")
ctx.logger.info(f" Verfügbare Felder: {', '.join(entity.keys())}")
# xAI-relevante Felder (falls vorhanden)
xai_fields = {k: v for k, v in entity.items()
if 'xai' in k.lower() or 'collection' in k.lower()}
if xai_fields:
ctx.logger.info(f" 🤖 xAI-Felder: {json.dumps(xai_fields, ensure_ascii=False)}")
# Parent/Relationship Felder
rel_fields = {k: v for k, v in entity.items()
if 'parent' in k.lower() or 'related' in k.lower() or
'link' in k.lower() or k.endswith('Id') or k.endswith('Ids')}
if rel_fields:
ctx.logger.info(f" 🔗 Relationship-Felder: {json.dumps(rel_fields, ensure_ascii=False)}")
elif isinstance(payload, dict):
ctx.logger.info("\n✅ Payload ist SINGLE DICT")
entity_id = payload.get('id')
if entity_id:
entity_ids.add(entity_id)
ctx.logger.info(f"\n 📄 Document:")
ctx.logger.info(f" ID: {entity_id}")
ctx.logger.info(f" Name: {payload.get('name', 'N/A')}")
ctx.logger.info(f" Type: {payload.get('type', 'N/A')}")
ctx.logger.info(f" Size: {payload.get('size', 'N/A')} bytes")
ctx.logger.info(f" Verfügbare Felder: {', '.join(payload.keys())}")
# xAI-relevante Felder
xai_fields = {k: v for k, v in payload.items()
if 'xai' in k.lower() or 'collection' in k.lower()}
if xai_fields:
ctx.logger.info(f" 🤖 xAI-Felder: {json.dumps(xai_fields, ensure_ascii=False)}")
# Relationship Felder
rel_fields = {k: v for k, v in payload.items()
if 'parent' in k.lower() or 'related' in k.lower() or
'link' in k.lower() or k.endswith('Id') or k.endswith('Ids')}
if rel_fields:
ctx.logger.info(f" 🔗 Relationship-Felder: {json.dumps(rel_fields, ensure_ascii=False)}")
else:
ctx.logger.warning(f"⚠️ Unerwarteter Payload-Typ: {type(payload)}")
# ═══════════════════════════════════════════════════════════════
# QUEUE EVENTS EMITTIEREN
# ═══════════════════════════════════════════════════════════════
ctx.logger.info("\n" + "=" * 80)
ctx.logger.info(f"📊 ZUSAMMENFASSUNG: {len(entity_ids)} Document(s) gefunden")
ctx.logger.info("=" * 80)
if not entity_ids:
ctx.logger.warning("⚠️ Keine Document-IDs im Payload gefunden!")
return ApiResponse(
status=200,
body={
'status': 'received',
'action': 'create',
'ids_count': 0,
'warning': 'No document IDs found in payload'
}
)
# Emit events für Queue-Processing (Deduplizierung erfolgt im Event-Handler via Lock)
# Versuche Entity-Type zu ermitteln
entity_type = 'CDokumente' # Default für VMH
if isinstance(payload, list) and payload:
entity_type = payload[0].get('entityType') or payload[0].get('_scope') or 'CDokumente'
elif isinstance(payload, dict):
entity_type = payload.get('entityType') or payload.get('_scope') or 'CDokumente'
ctx.logger.info(f"📝 Entity-Type: {entity_type}")
for entity_id in entity_ids:
await ctx.enqueue({
'topic': 'vmh.document.create',
'data': {
'entity_id': entity_id,
'entity_type': entity_type,
'action': 'create',
'source': 'webhook',
'timestamp': datetime.datetime.now().isoformat()
}
})
ctx.logger.info(f"✅ Event emittiert: vmh.document.create für ID {entity_id} (Type: {entity_type})")
ctx.logger.info("\n" + "=" * 80)
ctx.logger.info(f"✅ WEBHOOK VERARBEITUNG ABGESCHLOSSEN")
ctx.logger.info("=" * 80)
return ApiResponse(
status=200,
body={
'status': 'received',
'action': 'create',
'ids_count': len(entity_ids),
'document_ids': list(entity_ids)
}
)
except Exception as e:
ctx.logger.error("=" * 80)
ctx.logger.error(f"❌ FEHLER beim Verarbeiten des Document Create Webhooks")
ctx.logger.error("=" * 80)
ctx.logger.error(f"Error Type: {type(e).__name__}")
ctx.logger.error(f"Error Message: {str(e)}")
# Log Stack Trace
import traceback
ctx.logger.error(f"Stack Trace:\n{traceback.format_exc()}")
return ApiResponse(
status=500,
body={
'error': 'Internal server error',
'error_type': type(e).__name__,
'details': str(e)
}
)

View File

@@ -1,184 +0,0 @@
"""EspoCRM Webhook - Document Delete
Empfängt Delete-Webhooks von EspoCRM für Documents.
Loggt detailliert alle Payload-Informationen für Analyse.
"""
import json
import datetime
from typing import Any
from motia import FlowContext, http, ApiRequest, ApiResponse
config = {
"name": "VMH Webhook Document Delete",
"description": "Empfängt Delete-Webhooks von EspoCRM für Document Entities",
"flows": ["vmh-documents"],
"triggers": [
http("POST", "/vmh/webhook/document/delete")
],
"enqueues": ["vmh.document.delete"],
}
async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
"""
Webhook handler for Document deletion in EspoCRM.
Receives notifications when documents are deleted.
Note: Bei Deletion haben wir ggf. nur die ID, keine vollständigen Entity-Daten.
"""
try:
payload = request.body or []
# ═══════════════════════════════════════════════════════════════
# DETAILLIERTES LOGGING FÜR ANALYSE
# ═══════════════════════════════════════════════════════════════
ctx.logger.info("=" * 80)
ctx.logger.info("📥 EspoCRM DOCUMENT DELETE WEBHOOK EMPFANGEN")
ctx.logger.info("=" * 80)
# Log Request Headers
ctx.logger.info("\n🔍 REQUEST HEADERS:")
if hasattr(request, 'headers'):
for key, value in request.headers.items():
ctx.logger.info(f" {key}: {value}")
else:
ctx.logger.info(" (keine Headers verfügbar)")
# Log Payload Type & Structure
ctx.logger.info(f"\n📦 PAYLOAD TYPE: {type(payload).__name__}")
ctx.logger.info(f"📦 PAYLOAD LENGTH: {len(payload) if isinstance(payload, (list, dict)) else 'N/A'}")
# Log Full Payload (pretty-printed)
ctx.logger.info("\n📄 FULL PAYLOAD:")
ctx.logger.info(json.dumps(payload, indent=2, ensure_ascii=False))
# ═══════════════════════════════════════════════════════════════
# PAYLOAD ANALYSE & ID EXTRAKTION
# ═══════════════════════════════════════════════════════════════
entity_ids = set()
if isinstance(payload, list):
ctx.logger.info(f"\n✅ Payload ist LIST mit {len(payload)} Einträgen")
for idx, entity in enumerate(payload):
if isinstance(entity, dict):
entity_id = entity.get('id')
if entity_id:
entity_ids.add(entity_id)
ctx.logger.info(f"\n 🗑️ Document #{idx + 1}:")
ctx.logger.info(f" ID: {entity_id}")
ctx.logger.info(f" Verfügbare Felder: {', '.join(entity.keys())}")
# Bei Delete haben wir oft nur minimale Daten
if 'name' in entity:
ctx.logger.info(f" Name: {entity.get('name')}")
if 'deletedAt' in entity or 'deleted' in entity:
ctx.logger.info(f" Deleted At: {entity.get('deletedAt', entity.get('deleted', 'N/A'))}")
# xAI-relevante Felder (falls vorhanden)
xai_fields = {k: v for k, v in entity.items()
if 'xai' in k.lower() or 'collection' in k.lower()}
if xai_fields:
ctx.logger.info(f" 🤖 xAI-Felder: {json.dumps(xai_fields, ensure_ascii=False)}")
elif isinstance(payload, dict):
ctx.logger.info("\n✅ Payload ist SINGLE DICT")
entity_id = payload.get('id')
if entity_id:
entity_ids.add(entity_id)
ctx.logger.info(f"\n 🗑️ Document:")
ctx.logger.info(f" ID: {entity_id}")
ctx.logger.info(f" Verfügbare Felder: {', '.join(payload.keys())}")
if 'name' in payload:
ctx.logger.info(f" Name: {payload.get('name')}")
if 'deletedAt' in payload or 'deleted' in payload:
ctx.logger.info(f" Deleted At: {payload.get('deletedAt', payload.get('deleted', 'N/A'))}")
# xAI-relevante Felder
xai_fields = {k: v for k, v in payload.items()
if 'xai' in k.lower() or 'collection' in k.lower()}
if xai_fields:
ctx.logger.info(f" 🤖 xAI-Felder: {json.dumps(xai_fields, ensure_ascii=False)}")
else:
ctx.logger.warning(f"⚠️ Unerwarteter Payload-Typ: {type(payload)}")
# ═══════════════════════════════════════════════════════════════
# QUEUE EVENTS EMITTIEREN
# ═══════════════════════════════════════════════════════════════
ctx.logger.info("\n" + "=" * 80)
ctx.logger.info(f"📊 ZUSAMMENFASSUNG: {len(entity_ids)} Document(s) gefunden")
ctx.logger.info("=" * 80)
if not entity_ids:
ctx.logger.warning("⚠️ Keine Document-IDs im Payload gefunden!")
return ApiResponse(
status=200,
body={
'status': 'received',
'action': 'delete',
'ids_count': 0,
'warning': 'No document IDs found in payload'
}
)
# Emit events für Queue-Processing
# Versuche Entity-Type zu ermitteln
entity_type = 'CDokumente' # Default für VMH
if isinstance(payload, list) and payload:
entity_type = payload[0].get('entityType') or payload[0].get('_scope') or 'CDokumente'
elif isinstance(payload, dict):
entity_type = payload.get('entityType') or payload.get('_scope') or 'CDokumente'
ctx.logger.info(f"📝 Entity-Type: {entity_type}")
for entity_id in entity_ids:
await ctx.enqueue({
'topic': 'vmh.document.delete',
'data': {
'entity_id': entity_id,
'entity_type': entity_type,
'action': 'delete',
'source': 'webhook',
'timestamp': datetime.datetime.now().isoformat()
}
})
ctx.logger.info(f"✅ Event emittiert: vmh.document.delete für ID {entity_id} (Type: {entity_type})")
ctx.logger.info("\n" + "=" * 80)
ctx.logger.info(f"✅ WEBHOOK VERARBEITUNG ABGESCHLOSSEN")
ctx.logger.info("=" * 80)
return ApiResponse(
status=200,
body={
'status': 'received',
'action': 'delete',
'ids_count': len(entity_ids),
'document_ids': list(entity_ids)
}
)
except Exception as e:
ctx.logger.error("=" * 80)
ctx.logger.error(f"❌ FEHLER beim Verarbeiten des Document Delete Webhooks")
ctx.logger.error("=" * 80)
ctx.logger.error(f"Error Type: {type(e).__name__}")
ctx.logger.error(f"Error Message: {str(e)}")
import traceback
ctx.logger.error(f"Stack Trace:\n{traceback.format_exc()}")
return ApiResponse(
status=500,
body={
'error': 'Internal server error',
'error_type': type(e).__name__,
'details': str(e)
}
)

View File

@@ -1,206 +0,0 @@
"""EspoCRM Webhook - Document Update
Empfängt Update-Webhooks von EspoCRM für Documents.
Loggt detailliert alle Payload-Informationen für Analyse.
"""
import json
import datetime
from typing import Any
from motia import FlowContext, http, ApiRequest, ApiResponse
config = {
"name": "VMH Webhook Document Update",
"description": "Empfängt Update-Webhooks von EspoCRM für Document Entities",
"flows": ["vmh-documents"],
"triggers": [
http("POST", "/vmh/webhook/document/update")
],
"enqueues": ["vmh.document.update"],
}
async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
"""
Webhook handler for Document updates in EspoCRM.
Receives notifications when documents are updated and emits queue events
for processing (xAI sync, etc.).
Note: Loop-Prevention sollte auf EspoCRM-Seite implementiert werden.
xAI-Feld-Updates sollten keine neuen Webhooks triggern.
"""
try:
payload = request.body or []
# ═══════════════════════════════════════════════════════════════
# DETAILLIERTES LOGGING FÜR ANALYSE
# ═══════════════════════════════════════════════════════════════
ctx.logger.info("=" * 80)
ctx.logger.info("📥 EspoCRM DOCUMENT UPDATE WEBHOOK EMPFANGEN")
ctx.logger.info("=" * 80)
# Log Request Headers
ctx.logger.info("\n🔍 REQUEST HEADERS:")
if hasattr(request, 'headers'):
for key, value in request.headers.items():
ctx.logger.info(f" {key}: {value}")
else:
ctx.logger.info(" (keine Headers verfügbar)")
# Log Payload Type & Structure
ctx.logger.info(f"\n📦 PAYLOAD TYPE: {type(payload).__name__}")
ctx.logger.info(f"📦 PAYLOAD LENGTH: {len(payload) if isinstance(payload, (list, dict)) else 'N/A'}")
# Log Full Payload (pretty-printed)
ctx.logger.info("\n📄 FULL PAYLOAD:")
ctx.logger.info(json.dumps(payload, indent=2, ensure_ascii=False))
# ═══════════════════════════════════════════════════════════════
# PAYLOAD ANALYSE & ID EXTRAKTION
# ═══════════════════════════════════════════════════════════════
entity_ids = set()
if isinstance(payload, list):
ctx.logger.info(f"\n✅ Payload ist LIST mit {len(payload)} Einträgen")
for idx, entity in enumerate(payload):
if isinstance(entity, dict):
entity_id = entity.get('id')
if entity_id:
entity_ids.add(entity_id)
ctx.logger.info(f"\n 📄 Document #{idx + 1}:")
ctx.logger.info(f" ID: {entity_id}")
ctx.logger.info(f" Name: {entity.get('name', 'N/A')}")
ctx.logger.info(f" Modified At: {entity.get('modifiedAt', 'N/A')}")
ctx.logger.info(f" Modified By: {entity.get('modifiedById', 'N/A')}")
ctx.logger.info(f" Verfügbare Felder: {', '.join(entity.keys())}")
# Prüfe ob CHANGED fields mitgeliefert werden
changed_fields = entity.get('changedFields') or entity.get('changed') or entity.get('modifiedFields')
if changed_fields:
ctx.logger.info(f" 🔄 Geänderte Felder: {json.dumps(changed_fields, ensure_ascii=False)}")
# xAI-relevante Felder
xai_fields = {k: v for k, v in entity.items()
if 'xai' in k.lower() or 'collection' in k.lower()}
if xai_fields:
ctx.logger.info(f" 🤖 xAI-Felder: {json.dumps(xai_fields, ensure_ascii=False)}")
# Relationship Felder
rel_fields = {k: v for k, v in entity.items()
if 'parent' in k.lower() or 'related' in k.lower() or
'link' in k.lower() or k.endswith('Id') or k.endswith('Ids')}
if rel_fields:
ctx.logger.info(f" 🔗 Relationship-Felder: {json.dumps(rel_fields, ensure_ascii=False)}")
elif isinstance(payload, dict):
ctx.logger.info("\n✅ Payload ist SINGLE DICT")
entity_id = payload.get('id')
if entity_id:
entity_ids.add(entity_id)
ctx.logger.info(f"\n 📄 Document:")
ctx.logger.info(f" ID: {entity_id}")
ctx.logger.info(f" Name: {payload.get('name', 'N/A')}")
ctx.logger.info(f" Modified At: {payload.get('modifiedAt', 'N/A')}")
ctx.logger.info(f" Modified By: {payload.get('modifiedById', 'N/A')}")
ctx.logger.info(f" Verfügbare Felder: {', '.join(payload.keys())}")
# Geänderte Felder
changed_fields = payload.get('changedFields') or payload.get('changed') or payload.get('modifiedFields')
if changed_fields:
ctx.logger.info(f" 🔄 Geänderte Felder: {json.dumps(changed_fields, ensure_ascii=False)}")
# xAI-relevante Felder
xai_fields = {k: v for k, v in payload.items()
if 'xai' in k.lower() or 'collection' in k.lower()}
if xai_fields:
ctx.logger.info(f" 🤖 xAI-Felder: {json.dumps(xai_fields, ensure_ascii=False)}")
# Relationship Felder
rel_fields = {k: v for k, v in payload.items()
if 'parent' in k.lower() or 'related' in k.lower() or
'link' in k.lower() or k.endswith('Id') or k.endswith('Ids')}
if rel_fields:
ctx.logger.info(f" 🔗 Relationship-Felder: {json.dumps(rel_fields, ensure_ascii=False)}")
else:
ctx.logger.warning(f"⚠️ Unerwarteter Payload-Typ: {type(payload)}")
# ═══════════════════════════════════════════════════════════════
# QUEUE EVENTS EMITTIEREN
# ═══════════════════════════════════════════════════════════════
ctx.logger.info("\n" + "=" * 80)
ctx.logger.info(f"📊 ZUSAMMENFASSUNG: {len(entity_ids)} Document(s) gefunden")
ctx.logger.info("=" * 80)
if not entity_ids:
ctx.logger.warning("⚠️ Keine Document-IDs im Payload gefunden!")
return ApiResponse(
status=200,
body={
'status': 'received',
'action': 'update',
'ids_count': 0,
'warning': 'No document IDs found in payload'
}
)
# Emit events für Queue-Processing
# Versuche Entity-Type zu ermitteln
entity_type = 'CDokumente' # Default für VMH
if isinstance(payload, list) and payload:
entity_type = payload[0].get('entityType') or payload[0].get('_scope') or 'CDokumente'
elif isinstance(payload, dict):
entity_type = payload.get('entityType') or payload.get('_scope') or 'CDokumente'
ctx.logger.info(f"📝 Entity-Type: {entity_type}")
for entity_id in entity_ids:
await ctx.enqueue({
'topic': 'vmh.document.update',
'data': {
'entity_id': entity_id,
'entity_type': entity_type,
'action': 'update',
'source': 'webhook',
'timestamp': datetime.datetime.now().isoformat()
}
})
ctx.logger.info(f"✅ Event emittiert: vmh.document.update für ID {entity_id} (Type: {entity_type})")
ctx.logger.info("\n" + "=" * 80)
ctx.logger.info(f"✅ WEBHOOK VERARBEITUNG ABGESCHLOSSEN")
ctx.logger.info("=" * 80)
return ApiResponse(
status=200,
body={
'status': 'received',
'action': 'update',
'ids_count': len(entity_ids),
'document_ids': list(entity_ids)
}
)
except Exception as e:
ctx.logger.error("=" * 80)
ctx.logger.error(f"❌ FEHLER beim Verarbeiten des Document Update Webhooks")
ctx.logger.error("=" * 80)
ctx.logger.error(f"Error Type: {type(e).__name__}")
ctx.logger.error(f"Error Message: {str(e)}")
import traceback
ctx.logger.error(f"Stack Trace:\n{traceback.format_exc()}")
return ApiResponse(
status=500,
body={
'error': 'Internal server error',
'error_type': type(e).__name__,
'details': str(e)
}
)

View File

@@ -0,0 +1,77 @@
"""VMH Webhook - Document Create"""
import json
from typing import Any
from motia import FlowContext, http, ApiRequest, ApiResponse
config = {
"name": "VMH Webhook Document Create",
"description": "Empfängt Create-Webhooks von EspoCRM für Documents",
"flows": ["vmh-documents"],
"triggers": [
http("POST", "/vmh/webhook/document/create")
],
"enqueues": ["vmh.document.create"],
}
async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
"""
Webhook handler for Document creation in EspoCRM.
Receives batch or single entity notifications and emits queue events
for each entity ID to be synced to xAI.
"""
try:
payload = request.body or []
ctx.logger.info("VMH Webhook Document Create empfangen")
ctx.logger.debug(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
# Sammle alle IDs aus dem Batch
entity_ids = set()
if isinstance(payload, list):
for entity in payload:
if isinstance(entity, dict) and 'id' in entity:
entity_ids.add(entity['id'])
# Extrahiere entityType falls vorhanden
entity_type = entity.get('entityType', 'CDokumente')
elif isinstance(payload, dict) and 'id' in payload:
entity_ids.add(payload['id'])
entity_type = payload.get('entityType', 'CDokumente')
ctx.logger.info(f"{len(entity_ids)} Document IDs zum Create-Sync gefunden")
# Emit events für Queue-Processing (Deduplizierung erfolgt im Event-Handler via Lock)
for entity_id in entity_ids:
await ctx.enqueue({
'topic': 'vmh.document.create',
'data': {
'entity_id': entity_id,
'entity_type': entity_type if 'entity_type' in locals() else 'CDokumente',
'action': 'create',
'timestamp': payload[0].get('modifiedAt') if isinstance(payload, list) and payload else None
}
})
return ApiResponse(
status_code=200,
body={
'success': True,
'message': f'{len(entity_ids)} Document(s) zum Sync enqueued',
'entity_ids': list(entity_ids)
}
)
except Exception as e:
ctx.logger.error(f"Fehler im Document Create Webhook: {e}")
ctx.logger.error(f"Payload: {request.body}")
return ApiResponse(
status_code=500,
body={
'success': False,
'error': str(e)
}
)

View File

@@ -0,0 +1,76 @@
"""VMH Webhook - Document Delete"""
import json
from typing import Any
from motia import FlowContext, http, ApiRequest, ApiResponse
config = {
"name": "VMH Webhook Document Delete",
"description": "Empfängt Delete-Webhooks von EspoCRM für Documents",
"flows": ["vmh-documents"],
"triggers": [
http("POST", "/vmh/webhook/document/delete")
],
"enqueues": ["vmh.document.delete"],
}
async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
"""
Webhook handler for Document deletion in EspoCRM.
Receives batch or single entity notifications and emits queue events
for each entity ID to be removed from xAI.
"""
try:
payload = request.body or []
ctx.logger.info("VMH Webhook Document Delete empfangen")
ctx.logger.debug(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
# Sammle alle IDs aus dem Batch
entity_ids = set()
if isinstance(payload, list):
for entity in payload:
if isinstance(entity, dict) and 'id' in entity:
entity_ids.add(entity['id'])
entity_type = entity.get('entityType', 'CDokumente')
elif isinstance(payload, dict) and 'id' in payload:
entity_ids.add(payload['id'])
entity_type = payload.get('entityType', 'CDokumente')
ctx.logger.info(f"{len(entity_ids)} Document IDs zum Delete-Sync gefunden")
# Emit events für Queue-Processing
for entity_id in entity_ids:
await ctx.enqueue({
'topic': 'vmh.document.delete',
'data': {
'entity_id': entity_id,
'entity_type': entity_type if 'entity_type' in locals() else 'CDokumente',
'action': 'delete',
'timestamp': payload[0].get('deletedAt') if isinstance(payload, list) and payload else None
}
})
return ApiResponse(
status_code=200,
body={
'success': True,
'message': f'{len(entity_ids)} Document(s) zum Delete enqueued',
'entity_ids': list(entity_ids)
}
)
except Exception as e:
ctx.logger.error(f"Fehler im Document Delete Webhook: {e}")
ctx.logger.error(f"Payload: {request.body}")
return ApiResponse(
status_code=500,
body={
'success': False,
'error': str(e)
}
)

View File

@@ -0,0 +1,76 @@
"""VMH Webhook - Document Update"""
import json
from typing import Any
from motia import FlowContext, http, ApiRequest, ApiResponse
config = {
"name": "VMH Webhook Document Update",
"description": "Empfängt Update-Webhooks von EspoCRM für Documents",
"flows": ["vmh-documents"],
"triggers": [
http("POST", "/vmh/webhook/document/update")
],
"enqueues": ["vmh.document.update"],
}
async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
"""
Webhook handler for Document updates in EspoCRM.
Receives batch or single entity notifications and emits queue events
for each entity ID to be synced to xAI.
"""
try:
payload = request.body or []
ctx.logger.info("VMH Webhook Document Update empfangen")
ctx.logger.debug(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
# Sammle alle IDs aus dem Batch
entity_ids = set()
if isinstance(payload, list):
for entity in payload:
if isinstance(entity, dict) and 'id' in entity:
entity_ids.add(entity['id'])
entity_type = entity.get('entityType', 'CDokumente')
elif isinstance(payload, dict) and 'id' in payload:
entity_ids.add(payload['id'])
entity_type = payload.get('entityType', 'CDokumente')
ctx.logger.info(f"{len(entity_ids)} Document IDs zum Update-Sync gefunden")
# Emit events für Queue-Processing
for entity_id in entity_ids:
await ctx.enqueue({
'topic': 'vmh.document.update',
'data': {
'entity_id': entity_id,
'entity_type': entity_type if 'entity_type' in locals() else 'CDokumente',
'action': 'update',
'timestamp': payload[0].get('modifiedAt') if isinstance(payload, list) and payload else None
}
})
return ApiResponse(
status_code=200,
body={
'success': True,
'message': f'{len(entity_ids)} Document(s) zum Sync enqueued',
'entity_ids': list(entity_ids)
}
)
except Exception as e:
ctx.logger.error(f"Fehler im Document Update Webhook: {e}")
ctx.logger.error(f"Payload: {request.body}")
return ApiResponse(
status_code=500,
body={
'success': False,
'error': str(e)
}
)