Files
motia-iii/steps/espocrm_webhooks/document_create_webhook_api_step.py
bsiggel c45bfb7233 Enhance EspoCRM API and Webhook Handling
- Improved logging for file uploads in EspoCRMAPI to include upload parameters and error details.
- Updated cron job configurations for calendar sync and participant sync to trigger every 15 minutes on the first minute of the hour.
- Enhanced document create, delete, and update webhook handlers to determine and log the entity type.
- Refactored document sync event handler to include entity type in sync operations and logging.
- Added a new test script for uploading preview images to EspoCRM and verifying the upload process.
- Created a test script for document thumbnail generation, including document creation, file upload, webhook triggering, and preview verification.
2026-03-03 16:53:55 +00:00

209 lines
9.7 KiB
Python

"""EspoCRM Webhook - Document Create
Empfängt Create-Webhooks von EspoCRM für Documents.
Loggt detailliert alle Payload-Informationen für Analyse.
"""
import json
import datetime
from typing import Any
from motia import FlowContext, http, ApiRequest, ApiResponse
config = {
"name": "VMH Webhook Document Create",
"description": "Empfängt Create-Webhooks von EspoCRM für Document Entities",
"flows": ["vmh-documents"],
"triggers": [
http("POST", "/vmh/webhook/document/create")
],
"enqueues": ["vmh.document.create"],
}
async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
"""
Webhook handler for Document creation in EspoCRM.
Receives notifications when documents are created and emits queue events
for processing (xAI sync, etc.).
Payload Analysis Mode: Logs comprehensive details about webhook structure.
"""
try:
payload = request.body or []
# ═══════════════════════════════════════════════════════════════
# DETAILLIERTES LOGGING FÜR ANALYSE
# ═══════════════════════════════════════════════════════════════
ctx.logger.info("=" * 80)
ctx.logger.info("📥 EspoCRM DOCUMENT CREATE WEBHOOK EMPFANGEN")
ctx.logger.info("=" * 80)
# Log Request Headers
ctx.logger.info("\n🔍 REQUEST HEADERS:")
if hasattr(request, 'headers'):
for key, value in request.headers.items():
ctx.logger.info(f" {key}: {value}")
else:
ctx.logger.info(" (keine Headers verfügbar)")
# Log Payload Type & Structure
ctx.logger.info(f"\n📦 PAYLOAD TYPE: {type(payload).__name__}")
ctx.logger.info(f"📦 PAYLOAD LENGTH: {len(payload) if isinstance(payload, (list, dict)) else 'N/A'}")
# Log Full Payload (pretty-printed)
ctx.logger.info("\n📄 FULL PAYLOAD:")
ctx.logger.info(json.dumps(payload, indent=2, ensure_ascii=False))
# ═══════════════════════════════════════════════════════════════
# PAYLOAD ANALYSE & ID EXTRAKTION
# ═══════════════════════════════════════════════════════════════
entity_ids = set()
payload_details = []
if isinstance(payload, list):
ctx.logger.info(f"\n✅ Payload ist LIST mit {len(payload)} Einträgen")
for idx, entity in enumerate(payload):
if isinstance(entity, dict):
entity_id = entity.get('id')
if entity_id:
entity_ids.add(entity_id)
# Sammle Details für Logging
detail = {
'index': idx,
'id': entity_id,
'name': entity.get('name', 'N/A'),
'type': entity.get('type', 'N/A'),
'size': entity.get('size', 'N/A'),
'all_fields': list(entity.keys())
}
payload_details.append(detail)
ctx.logger.info(f"\n 📄 Document #{idx + 1}:")
ctx.logger.info(f" ID: {entity_id}")
ctx.logger.info(f" Name: {entity.get('name', 'N/A')}")
ctx.logger.info(f" Type: {entity.get('type', 'N/A')}")
ctx.logger.info(f" Size: {entity.get('size', 'N/A')} bytes")
ctx.logger.info(f" Verfügbare Felder: {', '.join(entity.keys())}")
# xAI-relevante Felder (falls vorhanden)
xai_fields = {k: v for k, v in entity.items()
if 'xai' in k.lower() or 'collection' in k.lower()}
if xai_fields:
ctx.logger.info(f" 🤖 xAI-Felder: {json.dumps(xai_fields, ensure_ascii=False)}")
# Parent/Relationship Felder
rel_fields = {k: v for k, v in entity.items()
if 'parent' in k.lower() or 'related' in k.lower() or
'link' in k.lower() or k.endswith('Id') or k.endswith('Ids')}
if rel_fields:
ctx.logger.info(f" 🔗 Relationship-Felder: {json.dumps(rel_fields, ensure_ascii=False)}")
elif isinstance(payload, dict):
ctx.logger.info("\n✅ Payload ist SINGLE DICT")
entity_id = payload.get('id')
if entity_id:
entity_ids.add(entity_id)
ctx.logger.info(f"\n 📄 Document:")
ctx.logger.info(f" ID: {entity_id}")
ctx.logger.info(f" Name: {payload.get('name', 'N/A')}")
ctx.logger.info(f" Type: {payload.get('type', 'N/A')}")
ctx.logger.info(f" Size: {payload.get('size', 'N/A')} bytes")
ctx.logger.info(f" Verfügbare Felder: {', '.join(payload.keys())}")
# xAI-relevante Felder
xai_fields = {k: v for k, v in payload.items()
if 'xai' in k.lower() or 'collection' in k.lower()}
if xai_fields:
ctx.logger.info(f" 🤖 xAI-Felder: {json.dumps(xai_fields, ensure_ascii=False)}")
# Relationship Felder
rel_fields = {k: v for k, v in payload.items()
if 'parent' in k.lower() or 'related' in k.lower() or
'link' in k.lower() or k.endswith('Id') or k.endswith('Ids')}
if rel_fields:
ctx.logger.info(f" 🔗 Relationship-Felder: {json.dumps(rel_fields, ensure_ascii=False)}")
else:
ctx.logger.warning(f"⚠️ Unerwarteter Payload-Typ: {type(payload)}")
# ═══════════════════════════════════════════════════════════════
# QUEUE EVENTS EMITTIEREN
# ═══════════════════════════════════════════════════════════════
ctx.logger.info("\n" + "=" * 80)
ctx.logger.info(f"📊 ZUSAMMENFASSUNG: {len(entity_ids)} Document(s) gefunden")
ctx.logger.info("=" * 80)
if not entity_ids:
ctx.logger.warning("⚠️ Keine Document-IDs im Payload gefunden!")
return ApiResponse(
status=200,
body={
'status': 'received',
'action': 'create',
'ids_count': 0,
'warning': 'No document IDs found in payload'
}
)
# Emit events für Queue-Processing (Deduplizierung erfolgt im Event-Handler via Lock)
# Versuche Entity-Type zu ermitteln
entity_type = 'CDokumente' # Default für VMH
if isinstance(payload, list) and payload:
entity_type = payload[0].get('entityType') or payload[0].get('_scope') or 'CDokumente'
elif isinstance(payload, dict):
entity_type = payload.get('entityType') or payload.get('_scope') or 'CDokumente'
ctx.logger.info(f"📝 Entity-Type: {entity_type}")
for entity_id in entity_ids:
await ctx.enqueue({
'topic': 'vmh.document.create',
'data': {
'entity_id': entity_id,
'entity_type': entity_type,
'action': 'create',
'source': 'webhook',
'timestamp': datetime.datetime.now().isoformat()
}
})
ctx.logger.info(f"✅ Event emittiert: vmh.document.create für ID {entity_id} (Type: {entity_type})")
ctx.logger.info("\n" + "=" * 80)
ctx.logger.info(f"✅ WEBHOOK VERARBEITUNG ABGESCHLOSSEN")
ctx.logger.info("=" * 80)
return ApiResponse(
status=200,
body={
'status': 'received',
'action': 'create',
'ids_count': len(entity_ids),
'document_ids': list(entity_ids)
}
)
except Exception as e:
ctx.logger.error("=" * 80)
ctx.logger.error(f"❌ FEHLER beim Verarbeiten des Document Create Webhooks")
ctx.logger.error("=" * 80)
ctx.logger.error(f"Error Type: {type(e).__name__}")
ctx.logger.error(f"Error Message: {str(e)}")
# Log Stack Trace
import traceback
ctx.logger.error(f"Stack Trace:\n{traceback.format_exc()}")
return ApiResponse(
status=500,
body={
'error': 'Internal server error',
'error_type': type(e).__name__,
'details': str(e)
}
)