Compare commits

...

2 Commits

25 changed files with 244 additions and 92 deletions

View File

@@ -17,7 +17,7 @@ import pytz
from services.exceptions import LockAcquisitionError, SyncError, ValidationError
from services.redis_client import get_redis_client
from services.config import SYNC_CONFIG, get_lock_key, get_retry_delay_seconds
from services.logging_utils import get_logger
from services.logging_utils import get_service_logger
import redis
@@ -31,7 +31,7 @@ class BeteiligteSync:
def __init__(self, espocrm_api, redis_client: Optional[redis.Redis] = None, context=None):
self.espocrm = espocrm_api
self.context = context
self.logger = get_logger('beteiligte_sync', context)
self.logger = get_service_logger('beteiligte_sync', context)
# Use provided Redis client or get from factory
self.redis = redis_client or get_redis_client(strict=False)
@@ -46,6 +46,11 @@ class BeteiligteSync:
from services.notification_utils import NotificationManager
self.notification_manager = NotificationManager(espocrm_api=self.espocrm, context=context)
def _log(self, message: str, level: str = 'info') -> None:
"""Delegate logging to the logger with optional level"""
log_func = getattr(self.logger, level, self.logger.info)
log_func(message)
async def acquire_sync_lock(self, entity_id: str) -> bool:
"""
Atomic distributed lock via Redis + syncStatus update

View File

@@ -10,12 +10,9 @@ Hilfsfunktionen für Document-Synchronisation mit xAI:
from typing import Dict, Any, Optional, List, Tuple
from datetime import datetime, timedelta
import logging
from services.sync_utils_base import BaseSyncUtils
logger = logging.getLogger(__name__)
# Max retry before permanent failure
MAX_SYNC_RETRIES = 5

View File

@@ -5,6 +5,59 @@ Vereinheitlicht Logging über:
- Standard Python Logger
- Motia FlowContext Logger
- Structured Logging
Usage Guidelines:
=================
FOR SERVICES: Use get_service_logger('service_name', context)
-----------------------------------------------------------------
Example:
from services.logging_utils import get_service_logger
class XAIService:
def __init__(self, ctx=None):
self.logger = get_service_logger('xai', ctx)
def upload(self):
self.logger.info("Uploading file...")
FOR STEPS: Use ctx.logger directly (preferred)
-----------------------------------------------------------------
Steps already have ctx.logger available - use it directly:
async def handler(event_data, ctx: FlowContext):
ctx.logger.info("Processing event")
Alternative: Use get_step_logger() for additional loggers:
step_logger = get_step_logger('beteiligte_sync', ctx)
FOR SYNC UTILS: Inherit from BaseSyncUtils (provides self.logger)
-----------------------------------------------------------------
from services.sync_utils_base import BaseSyncUtils
class MySync(BaseSyncUtils):
def __init__(self, espocrm, redis, context):
super().__init__(espocrm, redis, context)
# self.logger is now available
def sync(self):
self._log("Syncing...", level='info')
FOR STANDALONE UTILITIES: Use get_logger()
-----------------------------------------------------------------
from services.logging_utils import get_logger
logger = get_logger('my_module', context)
logger.info("Processing...")
CONSISTENCY RULES:
==================
✅ Services: get_service_logger('service_name', ctx)
✅ Steps: ctx.logger (direct) or get_step_logger('step_name', ctx)
✅ Sync Utils: Inherit from BaseSyncUtils → use self._log() or self.logger
✅ Standalone: get_logger('module_name', ctx)
❌ DO NOT: Use module-level logging.getLogger(__name__)
❌ DO NOT: Mix get_logger() and get_service_logger() in same module
"""
import logging

View File

@@ -14,7 +14,7 @@ import pytz
from services.exceptions import RedisConnectionError, LockAcquisitionError
from services.redis_client import get_redis_client
from services.config import SYNC_CONFIG, get_lock_key
from services.logging_utils import get_logger
from services.logging_utils import get_service_logger
import redis
@@ -31,7 +31,7 @@ class BaseSyncUtils:
"""
self.espocrm = espocrm_api
self.context = context
self.logger = get_logger('sync_utils', context)
self.logger = get_service_logger('sync_utils', context)
# Use provided Redis client or get from factory
self.redis = redis_client or get_redis_client(strict=False)

View File

@@ -1,10 +1,8 @@
"""xAI Files & Collections Service"""
import os
import aiohttp
import logging
from typing import Optional, List
logger = logging.getLogger(__name__)
from services.logging_utils import get_service_logger
XAI_FILES_URL = "https://api.x.ai"
XAI_MANAGEMENT_URL = "https://management-api.x.ai"
@@ -23,6 +21,7 @@ class XAIService:
self.api_key = os.getenv('XAI_API_KEY', '')
self.management_key = os.getenv('XAI_MANAGEMENT_KEY', '')
self.ctx = ctx
self.logger = get_service_logger('xai', ctx)
self._session: Optional[aiohttp.ClientSession] = None
if not self.api_key:
@@ -31,10 +30,9 @@ class XAIService:
raise ValueError("XAI_MANAGEMENT_KEY not configured in environment")
def _log(self, msg: str, level: str = 'info') -> None:
if self.ctx:
getattr(self.ctx.logger, level, self.ctx.logger.info)(msg)
else:
getattr(logger, level, logger.info)(msg)
"""Delegate logging to service logger"""
log_func = getattr(self.logger, level, self.logger.info)
log_func(msg)
async def _get_session(self) -> aiohttp.ClientSession:
if self._session is None or self._session.closed:

View File

@@ -17,7 +17,7 @@ from calendar_sync_utils import (
import math
import time
from datetime import datetime
from typing import Any
from typing import Any, Dict
from motia import queue, FlowContext
from pydantic import BaseModel, Field
from services.advoware_service import AdvowareService
@@ -33,7 +33,7 @@ config = {
}
async def handler(input_data: dict, ctx: FlowContext):
async def handler(input_data: Dict[str, Any], ctx: FlowContext) -> None:
"""
Handler that fetches all employees, sorts by last sync time,
and emits calendar_sync_employee events for the oldest ones.

View File

@@ -9,6 +9,7 @@ from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent))
from calendar_sync_utils import log_operation
from typing import Dict, Any
from motia import cron, FlowContext
@@ -23,7 +24,7 @@ config = {
}
async def handler(input_data: dict, ctx: FlowContext):
async def handler(input_data: Dict[str, Any], ctx: FlowContext) -> None:
"""Cron handler that triggers the calendar sync cascade."""
try:
log_operation('info', "Calendar Sync Cron: Starting to emit sync-all event", context=ctx)
@@ -37,14 +38,6 @@ async def handler(input_data: dict, ctx: FlowContext):
})
log_operation('info', "Calendar Sync Cron: Emitted sync-all event", context=ctx)
return {
'status': 'completed',
'triggered_by': 'cron'
}
except Exception as e:
log_operation('error', f"Fehler beim Cron-Job: {e}", context=ctx)
return {
'status': 'error',
'error': str(e)
}

View File

@@ -14,6 +14,7 @@ import asyncio
import os
import datetime
from datetime import timedelta
from typing import Dict, Any
import pytz
import backoff
import time
@@ -945,14 +946,14 @@ config = {
}
async def handler(input_data: dict, ctx: FlowContext):
async def handler(input_data: Dict[str, Any], ctx: FlowContext) -> None:
"""Main event handler for calendar sync."""
start_time = time.time()
kuerzel = input_data.get('kuerzel')
if not kuerzel:
log_operation('error', "No kuerzel provided in event", context=ctx)
return {'status': 400, 'body': {'error': 'No kuerzel provided'}}
return
log_operation('info', f"Starting calendar sync for employee {kuerzel}", context=ctx)

View File

@@ -32,23 +32,33 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
body={'error': 'Endpoint required as query parameter'}
)
ctx.logger.info("=" * 80)
ctx.logger.info("🔄 ADVOWARE PROXY: DELETE REQUEST")
ctx.logger.info("=" * 80)
ctx.logger.info(f"Endpoint: {endpoint}")
ctx.logger.info("=" * 80)
# Initialize Advoware client
advoware = AdvowareAPI(ctx)
# Forward all query params except 'endpoint'
params = {k: v for k, v in request.query_params.items() if k != 'endpoint'}
ctx.logger.info(f"Proxying DELETE request to Advoware: {endpoint}")
result = await advoware.api_call(
endpoint,
method='DELETE',
params=params
)
ctx.logger.info("✅ Proxy DELETE erfolgreich")
return ApiResponse(status=200, body={'result': result})
except Exception as e:
ctx.logger.error(f"Proxy error: {e}")
ctx.logger.error("=" * 80)
ctx.logger.error("❌ ADVOWARE PROXY DELETE FEHLER")
ctx.logger.error(f"Endpoint: {request.query_params.get('endpoint', 'N/A')}")
ctx.logger.error(f"Error: {e}")
ctx.logger.error("=" * 80)
return ApiResponse(
status=500,
body={'error': 'Internal server error', 'details': str(e)}

View File

@@ -32,23 +32,33 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
body={'error': 'Endpoint required as query parameter'}
)
ctx.logger.info("=" * 80)
ctx.logger.info("🔄 ADVOWARE PROXY: GET REQUEST")
ctx.logger.info("=" * 80)
ctx.logger.info(f"Endpoint: {endpoint}")
ctx.logger.info("=" * 80)
# Initialize Advoware client
advoware = AdvowareAPI(ctx)
# Forward all query params except 'endpoint'
params = {k: v for k, v in request.query_params.items() if k != 'endpoint'}
ctx.logger.info(f"Proxying GET request to Advoware: {endpoint}")
result = await advoware.api_call(
endpoint,
method='GET',
params=params
)
ctx.logger.info("✅ Proxy GET erfolgreich")
return ApiResponse(status=200, body={'result': result})
except Exception as e:
ctx.logger.error(f"Proxy error: {e}")
ctx.logger.error("=" * 80)
ctx.logger.error("❌ ADVOWARE PROXY GET FEHLER")
ctx.logger.error(f"Endpoint: {request.query_params.get('endpoint', 'N/A')}")
ctx.logger.error(f"Error: {e}")
ctx.logger.error("=" * 80)
return ApiResponse(
status=500,
body={'error': 'Internal server error', 'details': str(e)}

View File

@@ -34,6 +34,12 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
body={'error': 'Endpoint required as query parameter'}
)
ctx.logger.info("=" * 80)
ctx.logger.info("🔄 ADVOWARE PROXY: POST REQUEST")
ctx.logger.info("=" * 80)
ctx.logger.info(f"Endpoint: {endpoint}")
ctx.logger.info("=" * 80)
# Initialize Advoware client
advoware = AdvowareAPI(ctx)
@@ -43,7 +49,6 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
# Get request body
json_data = request.body
ctx.logger.info(f"Proxying POST request to Advoware: {endpoint}")
result = await advoware.api_call(
endpoint,
method='POST',
@@ -51,11 +56,17 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
json_data=json_data
)
ctx.logger.info("✅ Proxy POST erfolgreich")
return ApiResponse(status=200, body={'result': result})
except Exception as e:
ctx.logger.error(f"Proxy error: {e}")
ctx.logger.error("=" * 80)
ctx.logger.error("❌ ADVOWARE PROXY POST FEHLER")
ctx.logger.error(f"Endpoint: {request.query_params.get('endpoint', 'N/A')}")
ctx.logger.error(f"Error: {e}")
ctx.logger.error("=" * 80)
return ApiResponse(
status=500,
body={'error': 'Internal server error', 'details': str(e)}
)
)

View File

@@ -34,6 +34,12 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
body={'error': 'Endpoint required as query parameter'}
)
ctx.logger.info("=" * 80)
ctx.logger.info("🔄 ADVOWARE PROXY: PUT REQUEST")
ctx.logger.info("=" * 80)
ctx.logger.info(f"Endpoint: {endpoint}")
ctx.logger.info("=" * 80)
# Initialize Advoware client
advoware = AdvowareAPI(ctx)
@@ -43,7 +49,6 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
# Get request body
json_data = request.body
ctx.logger.info(f"Proxying PUT request to Advoware: {endpoint}")
result = await advoware.api_call(
endpoint,
method='PUT',
@@ -51,11 +56,17 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
json_data=json_data
)
ctx.logger.info("✅ Proxy PUT erfolgreich")
return ApiResponse(status=200, body={'result': result})
except Exception as e:
ctx.logger.error(f"Proxy error: {e}")
ctx.logger.error("=" * 80)
ctx.logger.error("❌ ADVOWARE PROXY PUT FEHLER")
ctx.logger.error(f"Endpoint: {request.query_params.get('endpoint', 'N/A')}")
ctx.logger.error(f"Error: {e}")
ctx.logger.error("=" * 80)
return ApiResponse(
status=500,
body={'error': 'Internal server error', 'details': str(e)}
)
)

View File

@@ -34,7 +34,7 @@ config = {
}
async def handler(event_data: Dict[str, Any], ctx: FlowContext[Any]):
async def handler(event_data: Dict[str, Any], ctx: FlowContext[Any]) -> None:
"""Zentraler Sync-Handler für Bankverbindungen"""
entity_id = event_data.get('entity_id')

View File

@@ -25,7 +25,7 @@ config = {
}
async def handler(input_data: Dict[str, Any], ctx: FlowContext):
async def handler(input_data: Dict[str, Any], ctx: FlowContext) -> None:
"""
Cron-Handler: Findet alle Beteiligte die Sync benötigen und emittiert Events
"""

View File

@@ -42,16 +42,13 @@ config = {
}
async def handler(event_data: Dict[str, Any], ctx: FlowContext[Any]) -> Optional[Dict[str, Any]]:
async def handler(event_data: Dict[str, Any], ctx: FlowContext[Any]) -> None:
"""
Zentraler Sync-Handler für Beteiligte
Args:
event_data: Event data mit entity_id, action, source
ctx: Motia FlowContext
Returns:
Optional result dict
"""
entity_id = event_data.get('entity_id')
action = event_data.get('action')
@@ -61,11 +58,13 @@ async def handler(event_data: Dict[str, Any], ctx: FlowContext[Any]) -> Optional
if not entity_id:
step_logger.error("Keine entity_id im Event gefunden")
return None
return
step_logger.info(
f"🔄 Sync-Handler gestartet: {action.upper()} | Entity: {entity_id} | Source: {source}"
)
step_logger.info("=" * 80)
step_logger.info(f"🔄 BETEILIGTE SYNC HANDLER: {action.upper()}")
step_logger.info("=" * 80)
step_logger.info(f"Entity: {entity_id} | Source: {source}")
step_logger.info("=" * 80)
# Get shared Redis client (centralized)
redis_client = get_redis_client(strict=False)

View File

@@ -14,10 +14,9 @@ from motia import FlowContext
from services.espocrm import EspoCRMAPI
from services.document_sync_utils import DocumentSync
from services.xai_service import XAIService
from services.redis_client import get_redis_client
import hashlib
import json
import redis
import os
config = {
"name": "VMH Document Sync Handler",
@@ -32,7 +31,7 @@ config = {
}
async def handler(event_data: Dict[str, Any], ctx: FlowContext[Any]):
async def handler(event_data: Dict[str, Any], ctx: FlowContext[Any]) -> None:
"""Zentraler Sync-Handler für Documents"""
entity_id = event_data.get('entity_id')
entity_type = event_data.get('entity_type', 'CDokumente') # Default: CDokumente
@@ -52,20 +51,11 @@ async def handler(event_data: Dict[str, Any], ctx: FlowContext[Any]):
ctx.logger.info(f"Source: {source}")
ctx.logger.info("=" * 80)
# Shared Redis client for distributed locking
redis_host = os.getenv('REDIS_HOST', 'localhost')
redis_port = int(os.getenv('REDIS_PORT', '6379'))
redis_db = int(os.getenv('REDIS_DB_ADVOWARE_CACHE', '1'))
# Shared Redis client for distributed locking (centralized factory)
redis_client = get_redis_client(strict=False)
redis_client = redis.Redis(
host=redis_host,
port=redis_port,
db=redis_db,
decode_responses=True
)
# APIs initialisieren
espocrm = EspoCRMAPI()
# APIs initialisieren (mit Context für besseres Logging)
espocrm = EspoCRMAPI(ctx)
sync_utils = DocumentSync(espocrm, redis_client, ctx)
xai_service = XAIService(ctx)

View File

@@ -23,8 +23,11 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
try:
payload = request.body or []
ctx.logger.info("VMH Webhook Bankverbindungen Create empfangen")
ctx.logger.info("=" * 80)
ctx.logger.info("📥 VMH WEBHOOK: BANKVERBINDUNGEN CREATE")
ctx.logger.info("=" * 80)
ctx.logger.info(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
ctx.logger.info("=" * 80)
# Sammle alle IDs aus dem Batch
entity_ids = set()
@@ -50,7 +53,8 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
}
})
ctx.logger.info(f"VMH Create Webhook verarbeitet: {len(entity_ids)} Events emittiert")
ctx.logger.info("VMH Create Webhook verarbeitet: "
f"{len(entity_ids)} Events emittiert")
return ApiResponse(
status=200,
@@ -62,7 +66,10 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
)
except Exception as e:
ctx.logger.error(f"Fehler beim Verarbeiten des VMH Create Webhooks: {e}")
ctx.logger.error("=" * 80)
ctx.logger.error("❌ FEHLER: BANKVERBINDUNGEN CREATE WEBHOOK")
ctx.logger.error(f"Error: {e}")
ctx.logger.error("=" * 80)
return ApiResponse(
status=500,
body={'error': 'Internal server error', 'details': str(e)}

View File

@@ -23,8 +23,11 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
try:
payload = request.body or []
ctx.logger.info("VMH Webhook Bankverbindungen Delete empfangen")
ctx.logger.info("=" * 80)
ctx.logger.info("📥 VMH WEBHOOK: BANKVERBINDUNGEN DELETE")
ctx.logger.info("=" * 80)
ctx.logger.info(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
ctx.logger.info("=" * 80)
# Sammle alle IDs
entity_ids = set()
@@ -50,7 +53,8 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
}
})
ctx.logger.info(f"VMH Delete Webhook verarbeitet: {len(entity_ids)} Events emittiert")
ctx.logger.info("VMH Delete Webhook verarbeitet: "
f"{len(entity_ids)} Events emittiert")
return ApiResponse(
status=200,
@@ -62,7 +66,10 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
)
except Exception as e:
ctx.logger.error(f"Fehler beim Verarbeiten des VMH Delete Webhooks: {e}")
ctx.logger.error("=" * 80)
ctx.logger.error("❌ FEHLER: BANKVERBINDUNGEN DELETE WEBHOOK")
ctx.logger.error(f"Error: {e}")
ctx.logger.error("=" * 80)
return ApiResponse(
status=500,
body={'error': 'Internal server error', 'details': str(e)}

View File

@@ -23,8 +23,11 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
try:
payload = request.body or []
ctx.logger.info("VMH Webhook Bankverbindungen Update empfangen")
ctx.logger.info("=" * 80)
ctx.logger.info("📥 VMH WEBHOOK: BANKVERBINDUNGEN UPDATE")
ctx.logger.info("=" * 80)
ctx.logger.info(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
ctx.logger.info("=" * 80)
# Sammle alle IDs
entity_ids = set()
@@ -50,7 +53,8 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
}
})
ctx.logger.info(f"VMH Update Webhook verarbeitet: {len(entity_ids)} Events emittiert")
ctx.logger.info("VMH Update Webhook verarbeitet: "
f"{len(entity_ids)} Events emittiert")
return ApiResponse(
status=200,
@@ -62,7 +66,10 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
)
except Exception as e:
ctx.logger.error(f"Fehler beim Verarbeiten des VMH Update Webhooks: {e}")
ctx.logger.error("=" * 80)
ctx.logger.error("❌ FEHLER: BANKVERBINDUNGEN UPDATE WEBHOOK")
ctx.logger.error(f"Error: {e}")
ctx.logger.error("=" * 80)
return ApiResponse(
status=500,
body={'error': 'Internal server error', 'details': str(e)}

View File

@@ -26,8 +26,11 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
try:
payload = request.body or []
ctx.logger.info("VMH Webhook Beteiligte Create empfangen")
ctx.logger.info("=" * 80)
ctx.logger.info("📥 VMH WEBHOOK: BETEILIGTE CREATE")
ctx.logger.info("=" * 80)
ctx.logger.info(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
ctx.logger.info("=" * 80)
# Sammle alle IDs aus dem Batch
entity_ids = set()
@@ -53,7 +56,8 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
}
})
ctx.logger.info(f"VMH Create Webhook verarbeitet: {len(entity_ids)} Events emittiert")
ctx.logger.info("VMH Create Webhook verarbeitet: "
f"{len(entity_ids)} Events emittiert")
return ApiResponse(
status=200,
@@ -65,7 +69,10 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
)
except Exception as e:
ctx.logger.error(f"Fehler beim Verarbeiten des VMH Create Webhooks: {e}")
ctx.logger.error("=" * 80)
ctx.logger.error("❌ FEHLER: VMH CREATE WEBHOOK")
ctx.logger.error(f"Error: {e}")
ctx.logger.error("=" * 80)
return ApiResponse(
status=500,
body={

View File

@@ -23,8 +23,11 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
try:
payload = request.body or []
ctx.logger.info("VMH Webhook Beteiligte Delete empfangen")
ctx.logger.info("=" * 80)
ctx.logger.info("📥 VMH WEBHOOK: BETEILIGTE DELETE")
ctx.logger.info("=" * 80)
ctx.logger.info(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
ctx.logger.info("=" * 80)
# Sammle alle IDs aus dem Batch
entity_ids = set()
@@ -50,7 +53,8 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
}
})
ctx.logger.info(f"VMH Delete Webhook verarbeitet: {len(entity_ids)} Events emittiert")
ctx.logger.info("VMH Delete Webhook verarbeitet: "
f"{len(entity_ids)} Events emittiert")
return ApiResponse(
status=200,
@@ -62,7 +66,10 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
)
except Exception as e:
ctx.logger.error(f"Fehler beim Delete-Webhook: {e}")
ctx.logger.error("=" * 80)
ctx.logger.error("❌ FEHLER: BETEILIGTE DELETE WEBHOOK")
ctx.logger.error(f"Error: {e}")
ctx.logger.error("=" * 80)
return ApiResponse(
status=500,
body={'error': 'Internal server error', 'details': str(e)}

View File

@@ -26,8 +26,11 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
try:
payload = request.body or []
ctx.logger.info("VMH Webhook Beteiligte Update empfangen")
ctx.logger.info("=" * 80)
ctx.logger.info("📥 VMH WEBHOOK: BETEILIGTE UPDATE")
ctx.logger.info("=" * 80)
ctx.logger.info(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
ctx.logger.info("=" * 80)
# Sammle alle IDs aus dem Batch
entity_ids = set()
@@ -53,7 +56,8 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
}
})
ctx.logger.info(f"VMH Update Webhook verarbeitet: {len(entity_ids)} Events emittiert")
ctx.logger.info("VMH Update Webhook verarbeitet: "
f"{len(entity_ids)} Events emittiert")
return ApiResponse(
status=200,
@@ -65,7 +69,10 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
)
except Exception as e:
ctx.logger.error(f"Fehler beim Verarbeiten des VMH Update Webhooks: {e}")
ctx.logger.error("=" * 80)
ctx.logger.error("❌ FEHLER: VMH UPDATE WEBHOOK")
ctx.logger.error(f"Error: {e}")
ctx.logger.error("=" * 80)
return ApiResponse(
status=500,
body={

View File

@@ -25,17 +25,21 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
try:
payload = request.body or []
ctx.logger.info("VMH Webhook Document Create empfangen")
ctx.logger.info("=" * 80)
ctx.logger.info("📥 VMH WEBHOOK: DOCUMENT CREATE")
ctx.logger.info("=" * 80)
ctx.logger.debug(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
# Sammle alle IDs aus dem Batch
entity_ids = set()
entity_type = 'CDokumente' # Default
if isinstance(payload, list):
for entity in payload:
if isinstance(entity, dict) and 'id' in entity:
entity_ids.add(entity['id'])
# Extrahiere entityType falls vorhanden
# Take entityType from first entity if present
if entity_type == 'CDokumente':
entity_type = entity.get('entityType', 'CDokumente')
elif isinstance(payload, dict) and 'id' in payload:
entity_ids.add(payload['id'])
@@ -49,12 +53,15 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
'topic': 'vmh.document.create',
'data': {
'entity_id': entity_id,
'entity_type': entity_type if 'entity_type' in locals() else 'CDokumente',
'entity_type': entity_type,
'action': 'create',
'timestamp': payload[0].get('modifiedAt') if isinstance(payload, list) and payload else None
}
})
ctx.logger.info("✅ Document Create Webhook verarbeitet: "
f"{len(entity_ids)} Events emittiert")
return ApiResponse(
status=200,
body={
@@ -65,8 +72,11 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
)
except Exception as e:
ctx.logger.error(f"Fehler im Document Create Webhook: {e}")
ctx.logger.error("=" * 80)
ctx.logger.error("❌ FEHLER: DOCUMENT CREATE WEBHOOK")
ctx.logger.error(f"Error: {e}")
ctx.logger.error(f"Payload: {request.body}")
ctx.logger.error("=" * 80)
return ApiResponse(
status=500,

View File

@@ -25,16 +25,21 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
try:
payload = request.body or []
ctx.logger.info("VMH Webhook Document Delete empfangen")
ctx.logger.info("=" * 80)
ctx.logger.info("📥 VMH WEBHOOK: DOCUMENT DELETE")
ctx.logger.info("=" * 80)
ctx.logger.debug(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
# Sammle alle IDs aus dem Batch
entity_ids = set()
entity_type = 'CDokumente' # Default
if isinstance(payload, list):
for entity in payload:
if isinstance(entity, dict) and 'id' in entity:
entity_ids.add(entity['id'])
# Take entityType from first entity if present
if entity_type == 'CDokumente':
entity_type = entity.get('entityType', 'CDokumente')
elif isinstance(payload, dict) and 'id' in payload:
entity_ids.add(payload['id'])
@@ -48,12 +53,15 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
'topic': 'vmh.document.delete',
'data': {
'entity_id': entity_id,
'entity_type': entity_type if 'entity_type' in locals() else 'CDokumente',
'entity_type': entity_type,
'action': 'delete',
'timestamp': payload[0].get('deletedAt') if isinstance(payload, list) and payload else None
}
})
ctx.logger.info("✅ Document Delete Webhook verarbeitet: "
f"{len(entity_ids)} Events emittiert")
return ApiResponse(
status=200,
body={
@@ -64,8 +72,11 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
)
except Exception as e:
ctx.logger.error(f"Fehler im Document Delete Webhook: {e}")
ctx.logger.error("=" * 80)
ctx.logger.error("❌ FEHLER: DOCUMENT DELETE WEBHOOK")
ctx.logger.error(f"Error: {e}")
ctx.logger.error(f"Payload: {request.body}")
ctx.logger.error("=" * 80)
return ApiResponse(
status=500,

View File

@@ -25,16 +25,21 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
try:
payload = request.body or []
ctx.logger.info("VMH Webhook Document Update empfangen")
ctx.logger.info("=" * 80)
ctx.logger.info("📥 VMH WEBHOOK: DOCUMENT UPDATE")
ctx.logger.info("=" * 80)
ctx.logger.debug(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
# Sammle alle IDs aus dem Batch
entity_ids = set()
entity_type = 'CDokumente' # Default
if isinstance(payload, list):
for entity in payload:
if isinstance(entity, dict) and 'id' in entity:
entity_ids.add(entity['id'])
# Take entityType from first entity if present
if entity_type == 'CDokumente':
entity_type = entity.get('entityType', 'CDokumente')
elif isinstance(payload, dict) and 'id' in payload:
entity_ids.add(payload['id'])
@@ -48,12 +53,15 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
'topic': 'vmh.document.update',
'data': {
'entity_id': entity_id,
'entity_type': entity_type if 'entity_type' in locals() else 'CDokumente',
'entity_type': entity_type,
'action': 'update',
'timestamp': payload[0].get('modifiedAt') if isinstance(payload, list) and payload else None
}
})
ctx.logger.info("✅ Document Update Webhook verarbeitet: "
f"{len(entity_ids)} Events emittiert")
return ApiResponse(
status=200,
body={
@@ -64,8 +72,11 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
)
except Exception as e:
ctx.logger.error(f"Fehler im Document Update Webhook: {e}")
ctx.logger.error("=" * 80)
ctx.logger.error("❌ FEHLER: DOCUMENT UPDATE WEBHOOK")
ctx.logger.error(f"Error: {e}")
ctx.logger.error(f"Payload: {request.body}")
ctx.logger.error("=" * 80)
return ApiResponse(
status=500,