Refactor and enhance logging in webhook handlers and Redis client

- Translated comments and docstrings from German to English for better clarity.
- Improved logging consistency across various webhook handlers for create, delete, and update operations.
- Centralized logging functionality by utilizing a dedicated logger utility.
- Added new enums for file and XAI sync statuses in models.
- Updated Redis client factory to use a centralized logger and improved error handling.
- Enhanced API responses to include more descriptive messages and status codes.
This commit is contained in:
bsiggel
2026-03-08 21:50:34 +00:00
parent f392ec0f06
commit a0cf845877
17 changed files with 300 additions and 276 deletions

View File

@@ -7,7 +7,7 @@ from motia import FlowContext, http, ApiRequest, ApiResponse
config = {
"name": "VMH Webhook Bankverbindungen Create",
"description": "Empfängt Create-Webhooks von EspoCRM für Bankverbindungen",
"description": "Receives create webhooks from EspoCRM for Bankverbindungen",
"flows": ["vmh-bankverbindungen"],
"triggers": [
http("POST", "/vmh/webhook/bankverbindungen/create")
@@ -29,7 +29,7 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
ctx.logger.info(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
ctx.logger.info("=" * 80)
# Sammle alle IDs aus dem Batch
# Collect all IDs from batch
entity_ids = set()
if isinstance(payload, list):
@@ -39,7 +39,7 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
elif isinstance(payload, dict) and 'id' in payload:
entity_ids.add(payload['id'])
ctx.logger.info(f"{len(entity_ids)} IDs zum Create-Sync gefunden")
ctx.logger.info(f"{len(entity_ids)} IDs found for create sync")
# Emit events
for entity_id in entity_ids:
@@ -53,11 +53,11 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
}
})
ctx.logger.info("✅ VMH Create Webhook verarbeitet: "
f"{len(entity_ids)} Events emittiert")
ctx.logger.info("✅ VMH Create Webhook processed: "
f"{len(entity_ids)} events emitted")
return ApiResponse(
status=200,
status_code=200,
body={
'status': 'received',
'action': 'create',
@@ -67,10 +67,10 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
except Exception as e:
ctx.logger.error("=" * 80)
ctx.logger.error("FEHLER: BANKVERBINDUNGEN CREATE WEBHOOK")
ctx.logger.error("ERROR: BANKVERBINDUNGEN CREATE WEBHOOK")
ctx.logger.error(f"Error: {e}")
ctx.logger.error("=" * 80)
return ApiResponse(
status=500,
status_code=500,
body={'error': 'Internal server error', 'details': str(e)}
)

View File

@@ -7,7 +7,7 @@ from motia import FlowContext, http, ApiRequest, ApiResponse
config = {
"name": "VMH Webhook Bankverbindungen Delete",
"description": "Empfängt Delete-Webhooks von EspoCRM für Bankverbindungen",
"description": "Receives delete webhooks from EspoCRM for Bankverbindungen",
"flows": ["vmh-bankverbindungen"],
"triggers": [
http("POST", "/vmh/webhook/bankverbindungen/delete")
@@ -29,7 +29,7 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
ctx.logger.info(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
ctx.logger.info("=" * 80)
# Sammle alle IDs
# Collect all IDs
entity_ids = set()
if isinstance(payload, list):
@@ -39,7 +39,7 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
elif isinstance(payload, dict) and 'id' in payload:
entity_ids.add(payload['id'])
ctx.logger.info(f"{len(entity_ids)} IDs zum Delete-Sync gefunden")
ctx.logger.info(f"{len(entity_ids)} IDs found for delete sync")
# Emit events
for entity_id in entity_ids:
@@ -53,11 +53,11 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
}
})
ctx.logger.info("✅ VMH Delete Webhook verarbeitet: "
f"{len(entity_ids)} Events emittiert")
ctx.logger.info("✅ VMH Delete Webhook processed: "
f"{len(entity_ids)} events emitted")
return ApiResponse(
status=200,
status_code=200,
body={
'status': 'received',
'action': 'delete',
@@ -67,10 +67,10 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
except Exception as e:
ctx.logger.error("=" * 80)
ctx.logger.error("FEHLER: BANKVERBINDUNGEN DELETE WEBHOOK")
ctx.logger.error("ERROR: BANKVERBINDUNGEN DELETE WEBHOOK")
ctx.logger.error(f"Error: {e}")
ctx.logger.error("=" * 80)
return ApiResponse(
status=500,
status_code=500,
body={'error': 'Internal server error', 'details': str(e)}
)

View File

@@ -7,7 +7,7 @@ from motia import FlowContext, http, ApiRequest, ApiResponse
config = {
"name": "VMH Webhook Bankverbindungen Update",
"description": "Empfängt Update-Webhooks von EspoCRM für Bankverbindungen",
"description": "Receives update webhooks from EspoCRM for Bankverbindungen",
"flows": ["vmh-bankverbindungen"],
"triggers": [
http("POST", "/vmh/webhook/bankverbindungen/update")
@@ -29,7 +29,7 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
ctx.logger.info(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
ctx.logger.info("=" * 80)
# Sammle alle IDs
# Collect all IDs
entity_ids = set()
if isinstance(payload, list):
@@ -39,7 +39,7 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
elif isinstance(payload, dict) and 'id' in payload:
entity_ids.add(payload['id'])
ctx.logger.info(f"{len(entity_ids)} IDs zum Update-Sync gefunden")
ctx.logger.info(f"{len(entity_ids)} IDs found for update sync")
# Emit events
for entity_id in entity_ids:
@@ -53,11 +53,11 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
}
})
ctx.logger.info("✅ VMH Update Webhook verarbeitet: "
f"{len(entity_ids)} Events emittiert")
ctx.logger.info("✅ VMH Update Webhook processed: "
f"{len(entity_ids)} events emitted")
return ApiResponse(
status=200,
status_code=200,
body={
'status': 'received',
'action': 'update',
@@ -67,10 +67,10 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
except Exception as e:
ctx.logger.error("=" * 80)
ctx.logger.error("FEHLER: BANKVERBINDUNGEN UPDATE WEBHOOK")
ctx.logger.error("ERROR: BANKVERBINDUNGEN UPDATE WEBHOOK")
ctx.logger.error(f"Error: {e}")
ctx.logger.error("=" * 80)
return ApiResponse(
status=500,
status_code=500,
body={'error': 'Internal server error', 'details': str(e)}
)

View File

@@ -7,7 +7,7 @@ from motia import FlowContext, http, ApiRequest, ApiResponse
config = {
"name": "VMH Webhook Beteiligte Create",
"description": "Empfängt Create-Webhooks von EspoCRM für Beteiligte",
"description": "Receives create webhooks from EspoCRM for Beteiligte",
"flows": ["vmh-beteiligte"],
"triggers": [
http("POST", "/vmh/webhook/beteiligte/create")
@@ -32,7 +32,7 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
ctx.logger.info(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
ctx.logger.info("=" * 80)
# Sammle alle IDs aus dem Batch
# Collect all IDs from batch
entity_ids = set()
if isinstance(payload, list):
@@ -42,9 +42,9 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
elif isinstance(payload, dict) and 'id' in payload:
entity_ids.add(payload['id'])
ctx.logger.info(f"{len(entity_ids)} IDs zum Create-Sync gefunden")
ctx.logger.info(f"{len(entity_ids)} IDs found for create sync")
# Emit events für Queue-Processing (Deduplizierung erfolgt im Event-Handler via Lock)
# Emit events for queue processing (deduplication via lock in event handler)
for entity_id in entity_ids:
await ctx.enqueue({
'topic': 'vmh.beteiligte.create',
@@ -56,11 +56,11 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
}
})
ctx.logger.info("✅ VMH Create Webhook verarbeitet: "
f"{len(entity_ids)} Events emittiert")
ctx.logger.info("✅ VMH Create Webhook processed: "
f"{len(entity_ids)} events emitted")
return ApiResponse(
status=200,
status_code=200,
body={
'status': 'received',
'action': 'create',
@@ -70,11 +70,11 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
except Exception as e:
ctx.logger.error("=" * 80)
ctx.logger.error("FEHLER: VMH CREATE WEBHOOK")
ctx.logger.error("ERROR: VMH CREATE WEBHOOK")
ctx.logger.error(f"Error: {e}")
ctx.logger.error("=" * 80)
return ApiResponse(
status=500,
status_code=500,
body={
'error': 'Internal server error',
'details': str(e)

View File

@@ -7,7 +7,7 @@ from motia import FlowContext, http, ApiRequest, ApiResponse
config = {
"name": "VMH Webhook Beteiligte Delete",
"description": "Empfängt Delete-Webhooks von EspoCRM für Beteiligte",
"description": "Receives delete webhooks from EspoCRM for Beteiligte",
"flows": ["vmh-beteiligte"],
"triggers": [
http("POST", "/vmh/webhook/beteiligte/delete")
@@ -29,7 +29,7 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
ctx.logger.info(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
ctx.logger.info("=" * 80)
# Sammle alle IDs aus dem Batch
# Collect all IDs from batch
entity_ids = set()
if isinstance(payload, list):
@@ -39,9 +39,9 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
elif isinstance(payload, dict) and 'id' in payload:
entity_ids.add(payload['id'])
ctx.logger.info(f"{len(entity_ids)} IDs zum Delete-Sync gefunden")
ctx.logger.info(f"{len(entity_ids)} IDs found for delete sync")
# Emit events für Queue-Processing
# Emit events for queue processing
for entity_id in entity_ids:
await ctx.enqueue({
'topic': 'vmh.beteiligte.delete',
@@ -53,11 +53,11 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
}
})
ctx.logger.info("✅ VMH Delete Webhook verarbeitet: "
f"{len(entity_ids)} Events emittiert")
ctx.logger.info("✅ VMH Delete Webhook processed: "
f"{len(entity_ids)} events emitted")
return ApiResponse(
status=200,
status_code=200,
body={
'status': 'received',
'action': 'delete',
@@ -67,10 +67,10 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
except Exception as e:
ctx.logger.error("=" * 80)
ctx.logger.error("FEHLER: BETEILIGTE DELETE WEBHOOK")
ctx.logger.error("ERROR: BETEILIGTE DELETE WEBHOOK")
ctx.logger.error(f"Error: {e}")
ctx.logger.error("=" * 80)
return ApiResponse(
status=500,
status_code=500,
body={'error': 'Internal server error', 'details': str(e)}
)

View File

@@ -7,7 +7,7 @@ from motia import FlowContext, http, ApiRequest, ApiResponse
config = {
"name": "VMH Webhook Beteiligte Update",
"description": "Empfängt Update-Webhooks von EspoCRM für Beteiligte",
"description": "Receives update webhooks from EspoCRM for Beteiligte",
"flows": ["vmh-beteiligte"],
"triggers": [
http("POST", "/vmh/webhook/beteiligte/update")
@@ -20,8 +20,8 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
"""
Webhook handler for Beteiligte updates in EspoCRM.
Note: Loop-Prevention ist auf EspoCRM-Seite implementiert.
rowId-Updates triggern keine Webhooks mehr, daher keine Filterung nötig.
Note: Loop prevention is implemented on EspoCRM side.
rowId updates no longer trigger webhooks, so no filtering needed.
"""
try:
payload = request.body or []
@@ -32,7 +32,7 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
ctx.logger.info(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
ctx.logger.info("=" * 80)
# Sammle alle IDs aus dem Batch
# Collect all IDs from batch
entity_ids = set()
if isinstance(payload, list):
@@ -42,9 +42,9 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
elif isinstance(payload, dict) and 'id' in payload:
entity_ids.add(payload['id'])
ctx.logger.info(f"{len(entity_ids)} IDs zum Update-Sync gefunden")
ctx.logger.info(f"{len(entity_ids)} IDs found for update sync")
# Emit events für Queue-Processing
# Emit events for queue processing
for entity_id in entity_ids:
await ctx.enqueue({
'topic': 'vmh.beteiligte.update',
@@ -56,11 +56,11 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
}
})
ctx.logger.info("✅ VMH Update Webhook verarbeitet: "
f"{len(entity_ids)} Events emittiert")
ctx.logger.info("✅ VMH Update Webhook processed: "
f"{len(entity_ids)} events emitted")
return ApiResponse(
status=200,
status_code=200,
body={
'status': 'received',
'action': 'update',
@@ -70,11 +70,11 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
except Exception as e:
ctx.logger.error("=" * 80)
ctx.logger.error("FEHLER: VMH UPDATE WEBHOOK")
ctx.logger.error("ERROR: VMH UPDATE WEBHOOK")
ctx.logger.error(f"Error: {e}")
ctx.logger.error("=" * 80)
return ApiResponse(
status=500,
status_code=500,
body={
'error': 'Internal server error',
'details': str(e)

View File

@@ -30,7 +30,7 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
ctx.logger.info("=" * 80)
ctx.logger.debug(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
# Sammle alle IDs aus dem Batch
# Collect all IDs from batch
entity_ids = set()
entity_type = 'CDokumente' # Default
@@ -45,9 +45,9 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
entity_ids.add(payload['id'])
entity_type = payload.get('entityType', 'CDokumente')
ctx.logger.info(f"{len(entity_ids)} Document IDs zum Create-Sync gefunden")
ctx.logger.info(f"{len(entity_ids)} document IDs found for create sync")
# Emit events für Queue-Processing (Deduplizierung erfolgt im Event-Handler via Lock)
# Emit events for queue processing (deduplication via lock in event handler)
for entity_id in entity_ids:
await ctx.enqueue({
'topic': 'vmh.document.create',
@@ -59,27 +59,27 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
}
})
ctx.logger.info("✅ Document Create Webhook verarbeitet: "
f"{len(entity_ids)} Events emittiert")
ctx.logger.info("✅ Document Create Webhook processed: "
f"{len(entity_ids)} events emitted")
return ApiResponse(
status=200,
status_code=200,
body={
'success': True,
'message': f'{len(entity_ids)} Document(s) zum Sync enqueued',
'message': f'{len(entity_ids)} document(s) enqueued for sync',
'entity_ids': list(entity_ids)
}
)
except Exception as e:
ctx.logger.error("=" * 80)
ctx.logger.error("FEHLER: DOCUMENT CREATE WEBHOOK")
ctx.logger.error("ERROR: DOCUMENT CREATE WEBHOOK")
ctx.logger.error(f"Error: {e}")
ctx.logger.error(f"Payload: {request.body}")
ctx.logger.error("=" * 80)
return ApiResponse(
status=500,
status_code=500,
body={
'success': False,
'error': str(e)

View File

@@ -30,7 +30,7 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
ctx.logger.info("=" * 80)
ctx.logger.debug(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
# Sammle alle IDs aus dem Batch
# Collect all IDs from batch
entity_ids = set()
entity_type = 'CDokumente' # Default
@@ -45,9 +45,9 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
entity_ids.add(payload['id'])
entity_type = payload.get('entityType', 'CDokumente')
ctx.logger.info(f"{len(entity_ids)} Document IDs zum Delete-Sync gefunden")
ctx.logger.info(f"{len(entity_ids)} document IDs found for delete sync")
# Emit events für Queue-Processing
# Emit events for queue processing
for entity_id in entity_ids:
await ctx.enqueue({
'topic': 'vmh.document.delete',
@@ -59,27 +59,27 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
}
})
ctx.logger.info("✅ Document Delete Webhook verarbeitet: "
f"{len(entity_ids)} Events emittiert")
ctx.logger.info("✅ Document Delete Webhook processed: "
f"{len(entity_ids)} events emitted")
return ApiResponse(
status=200,
status_code=200,
body={
'success': True,
'message': f'{len(entity_ids)} Document(s) zum Delete enqueued',
'message': f'{len(entity_ids)} document(s) enqueued for deletion',
'entity_ids': list(entity_ids)
}
)
except Exception as e:
ctx.logger.error("=" * 80)
ctx.logger.error("FEHLER: DOCUMENT DELETE WEBHOOK")
ctx.logger.error("ERROR: DOCUMENT DELETE WEBHOOK")
ctx.logger.error(f"Error: {e}")
ctx.logger.error(f"Payload: {request.body}")
ctx.logger.error("=" * 80)
return ApiResponse(
status=500,
status_code=500,
body={
'success': False,
'error': str(e)

View File

@@ -30,7 +30,7 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
ctx.logger.info("=" * 80)
ctx.logger.debug(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
# Sammle alle IDs aus dem Batch
# Collect all IDs from batch
entity_ids = set()
entity_type = 'CDokumente' # Default
@@ -45,9 +45,9 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
entity_ids.add(payload['id'])
entity_type = payload.get('entityType', 'CDokumente')
ctx.logger.info(f"{len(entity_ids)} Document IDs zum Update-Sync gefunden")
ctx.logger.info(f"{len(entity_ids)} document IDs found for update sync")
# Emit events für Queue-Processing
# Emit events for queue processing
for entity_id in entity_ids:
await ctx.enqueue({
'topic': 'vmh.document.update',
@@ -59,27 +59,27 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
}
})
ctx.logger.info("✅ Document Update Webhook verarbeitet: "
f"{len(entity_ids)} Events emittiert")
ctx.logger.info("✅ Document Update Webhook processed: "
f"{len(entity_ids)} events emitted")
return ApiResponse(
status=200,
status_code=200,
body={
'success': True,
'message': f'{len(entity_ids)} Document(s) zum Sync enqueued',
'message': f'{len(entity_ids)} document(s) enqueued for sync',
'entity_ids': list(entity_ids)
}
)
except Exception as e:
ctx.logger.error("=" * 80)
ctx.logger.error("FEHLER: DOCUMENT UPDATE WEBHOOK")
ctx.logger.error("ERROR: DOCUMENT UPDATE WEBHOOK")
ctx.logger.error(f"Error: {e}")
ctx.logger.error(f"Payload: {request.body}")
ctx.logger.error("=" * 80)
return ApiResponse(
status=500,
status_code=500,
body={
'success': False,
'error': str(e)