Enhance EspoCRM API and Webhook Handling
- Improved logging for file uploads in EspoCRMAPI to include upload parameters and error details. - Updated cron job configurations for calendar sync and participant sync to trigger every 15 minutes on the first minute of the hour. - Enhanced document create, delete, and update webhook handlers to determine and log the entity type. - Refactored document sync event handler to include entity type in sync operations and logging. - Added a new test script for uploading preview images to EspoCRM and verifying the upload process. - Created a test script for document thumbnail generation, including document creation, file upload, webhook triggering, and preview verification.
This commit is contained in:
@@ -61,12 +61,13 @@ class DocumentSync:
|
|||||||
else:
|
else:
|
||||||
getattr(logger, level)(message)
|
getattr(logger, level)(message)
|
||||||
|
|
||||||
async def acquire_sync_lock(self, entity_id: str) -> bool:
|
async def acquire_sync_lock(self, entity_id: str, entity_type: str = 'CDokumente') -> bool:
|
||||||
"""
|
"""
|
||||||
Atomic distributed lock via Redis + syncStatus update
|
Atomic distributed lock via Redis + syncStatus update
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
entity_id: EspoCRM Document ID
|
entity_id: EspoCRM Document ID
|
||||||
|
entity_type: Entity-Type (CDokumente oder Document)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
True wenn Lock erfolgreich, False wenn bereits im Sync
|
True wenn Lock erfolgreich, False wenn bereits im Sync
|
||||||
@@ -78,19 +79,20 @@ class DocumentSync:
|
|||||||
acquired = self.redis.set(lock_key, "locked", nx=True, ex=LOCK_TTL_SECONDS)
|
acquired = self.redis.set(lock_key, "locked", nx=True, ex=LOCK_TTL_SECONDS)
|
||||||
|
|
||||||
if not acquired:
|
if not acquired:
|
||||||
self._log(f"Redis lock bereits aktiv für Document {entity_id}", level='warn')
|
self._log(f"Redis lock bereits aktiv für {entity_type} {entity_id}", level='warn')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# STEP 2: Update syncStatus (für UI visibility) - falls Feld existiert
|
# STEP 2: Update syncStatus (für UI visibility) - nur bei Document Entity
|
||||||
# NOTE: Ggf. muss syncStatus bei Document Entity erst angelegt werden
|
# CDokumente hat dieses Feld nicht - überspringen
|
||||||
try:
|
if entity_type == 'Document':
|
||||||
await self.espocrm.update_entity('Document', entity_id, {
|
try:
|
||||||
'xaiSyncStatus': 'syncing'
|
await self.espocrm.update_entity(entity_type, entity_id, {
|
||||||
})
|
'xaiSyncStatus': 'syncing'
|
||||||
except Exception as e:
|
})
|
||||||
self._log(f"Konnte xaiSyncStatus nicht setzen (Feld existiert evtl. nicht): {e}", level='debug')
|
except Exception as e:
|
||||||
|
self._log(f"Konnte xaiSyncStatus nicht setzen: {e}", level='debug')
|
||||||
|
|
||||||
self._log(f"Sync-Lock für Document {entity_id} erworben")
|
self._log(f"Sync-Lock für {entity_type} {entity_id} erworben")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -109,7 +111,8 @@ class DocumentSync:
|
|||||||
entity_id: str,
|
entity_id: str,
|
||||||
success: bool = True,
|
success: bool = True,
|
||||||
error_message: Optional[str] = None,
|
error_message: Optional[str] = None,
|
||||||
extra_fields: Optional[Dict[str, Any]] = None
|
extra_fields: Optional[Dict[str, Any]] = None,
|
||||||
|
entity_type: str = 'CDokumente'
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Gibt Sync-Lock frei und setzt finalen Status
|
Gibt Sync-Lock frei und setzt finalen Status
|
||||||
@@ -119,29 +122,31 @@ class DocumentSync:
|
|||||||
success: Ob Sync erfolgreich war
|
success: Ob Sync erfolgreich war
|
||||||
error_message: Optional: Fehlermeldung
|
error_message: Optional: Fehlermeldung
|
||||||
extra_fields: Optional: Zusätzliche Felder (z.B. xaiFileId, xaiCollections)
|
extra_fields: Optional: Zusätzliche Felder (z.B. xaiFileId, xaiCollections)
|
||||||
|
entity_type: Entity-Type (CDokumente oder Document)
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
update_data = {}
|
update_data = {}
|
||||||
|
|
||||||
# Status-Feld (falls vorhanden)
|
# Status-Felder nur bei Document Entity (CDokumente hat diese Felder nicht)
|
||||||
try:
|
if entity_type == 'Document':
|
||||||
update_data['xaiSyncStatus'] = 'synced' if success else 'failed'
|
try:
|
||||||
|
update_data['xaiSyncStatus'] = 'synced' if success else 'failed'
|
||||||
|
|
||||||
if error_message:
|
if error_message:
|
||||||
update_data['xaiSyncError'] = error_message[:2000]
|
update_data['xaiSyncError'] = error_message[:2000]
|
||||||
else:
|
else:
|
||||||
update_data['xaiSyncError'] = None
|
update_data['xaiSyncError'] = None
|
||||||
except:
|
except:
|
||||||
pass # Felder existieren evtl. nicht
|
pass # Felder existieren evtl. nicht
|
||||||
|
|
||||||
# Merge extra fields (z.B. xaiFileId, xaiCollections)
|
# Merge extra fields (z.B. xaiFileId, xaiCollections)
|
||||||
if extra_fields:
|
if extra_fields:
|
||||||
update_data.update(extra_fields)
|
update_data.update(extra_fields)
|
||||||
|
|
||||||
if update_data:
|
if update_data:
|
||||||
await self.espocrm.update_entity('Document', entity_id, update_data)
|
await self.espocrm.update_entity(entity_type, entity_id, update_data)
|
||||||
|
|
||||||
self._log(f"Sync-Lock released: Document {entity_id} → {'success' if success else 'failed'}")
|
self._log(f"Sync-Lock released: {entity_type} {entity_id} → {'success' if success else 'failed'}")
|
||||||
|
|
||||||
# Release Redis lock
|
# Release Redis lock
|
||||||
if self.redis:
|
if self.redis:
|
||||||
@@ -322,12 +327,17 @@ class DocumentSync:
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
async def get_document_download_info(self, document_id: str) -> Optional[Dict[str, Any]]:
|
async def get_document_download_info(self, document_id: str, entity_type: str = 'CDokumente') -> Optional[Dict[str, Any]]:
|
||||||
"""
|
"""
|
||||||
Holt Download-Informationen für ein Document
|
Holt Download-Informationen für ein Document
|
||||||
|
|
||||||
|
Args:
|
||||||
|
document_id: ID des Documents
|
||||||
|
entity_type: Entity-Type (CDokumente oder Document)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Dict mit:
|
Dict mit:
|
||||||
|
- attachment_id: ID des Attachments
|
||||||
- download_url: URL zum Download
|
- download_url: URL zum Download
|
||||||
- filename: Dateiname
|
- filename: Dateiname
|
||||||
- mime_type: MIME-Type
|
- mime_type: MIME-Type
|
||||||
@@ -335,25 +345,49 @@ class DocumentSync:
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
# Hole vollständiges Document
|
# Hole vollständiges Document
|
||||||
doc = await self.espocrm.get_entity('Document', document_id)
|
doc = await self.espocrm.get_entity(entity_type, document_id)
|
||||||
|
|
||||||
# EspoCRM Document hat Attachments (Attachment ID in attachmentsIds)
|
# EspoCRM Documents können Files auf verschiedene Arten speichern:
|
||||||
attachment_ids = doc.get('attachmentsIds') or []
|
# CDokumente: dokumentId/dokumentName (Custom Entity)
|
||||||
|
# Document: fileId/fileName ODER attachmentsIds
|
||||||
|
|
||||||
if not attachment_ids:
|
attachment_id = None
|
||||||
self._log(f"⚠️ Document {document_id} hat keine Attachments", level='warn')
|
filename = None
|
||||||
|
|
||||||
|
# Prüfe zuerst dokumentId (CDokumente Custom Entity)
|
||||||
|
if doc.get('dokumentId'):
|
||||||
|
attachment_id = doc.get('dokumentId')
|
||||||
|
filename = doc.get('dokumentName')
|
||||||
|
self._log(f"📎 CDokumente verwendet dokumentId: {attachment_id}")
|
||||||
|
|
||||||
|
# Fallback: fileId (Standard Document Entity)
|
||||||
|
elif doc.get('fileId'):
|
||||||
|
attachment_id = doc.get('fileId')
|
||||||
|
filename = doc.get('fileName')
|
||||||
|
self._log(f"📎 Document verwendet fileId: {attachment_id}")
|
||||||
|
|
||||||
|
# Fallback 2: attachmentsIds (z.B. bei zusätzlichen Attachments)
|
||||||
|
elif doc.get('attachmentsIds'):
|
||||||
|
attachment_ids = doc.get('attachmentsIds')
|
||||||
|
if attachment_ids:
|
||||||
|
attachment_id = attachment_ids[0]
|
||||||
|
self._log(f"📎 Document verwendet attachmentsIds: {attachment_id}")
|
||||||
|
|
||||||
|
if not attachment_id:
|
||||||
|
self._log(f"⚠️ {entity_type} {document_id} hat weder dokumentId, fileId noch attachmentsIds", level='warn')
|
||||||
|
self._log(f" Verfügbare Felder: {list(doc.keys())}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Nehme erstes Attachment (Documents haben normalerweise nur 1 File)
|
|
||||||
attachment_id = attachment_ids[0]
|
|
||||||
|
|
||||||
# Hole Attachment-Details
|
# Hole Attachment-Details
|
||||||
attachment = await self.espocrm.get_entity('Attachment', attachment_id)
|
attachment = await self.espocrm.get_entity('Attachment', attachment_id)
|
||||||
|
|
||||||
|
# Filename: Nutze dokumentName/fileName falls vorhanden, sonst aus Attachment
|
||||||
|
final_filename = filename or attachment.get('name', 'unknown')
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'attachment_id': attachment_id,
|
'attachment_id': attachment_id,
|
||||||
'download_url': f"/api/v1/Attachment/file/{attachment_id}",
|
'download_url': f"/api/v1/Attachment/file/{attachment_id}",
|
||||||
'filename': attachment.get('name', 'unknown'),
|
'filename': final_filename,
|
||||||
'mime_type': attachment.get('type', 'application/octet-stream'),
|
'mime_type': attachment.get('type', 'application/octet-stream'),
|
||||||
'size': attachment.get('size', 0)
|
'size': attachment.get('size', 0)
|
||||||
}
|
}
|
||||||
@@ -476,7 +510,8 @@ class DocumentSync:
|
|||||||
xai_file_id: Optional[str] = None,
|
xai_file_id: Optional[str] = None,
|
||||||
collection_ids: Optional[List[str]] = None,
|
collection_ids: Optional[List[str]] = None,
|
||||||
file_hash: Optional[str] = None,
|
file_hash: Optional[str] = None,
|
||||||
preview_data: Optional[bytes] = None
|
preview_data: Optional[bytes] = None,
|
||||||
|
entity_type: str = 'CDokumente'
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Updated Document-Metadaten nach erfolgreichem xAI-Sync
|
Updated Document-Metadaten nach erfolgreichem xAI-Sync
|
||||||
@@ -487,20 +522,29 @@ class DocumentSync:
|
|||||||
collection_ids: Liste der xAI Collection IDs (optional)
|
collection_ids: Liste der xAI Collection IDs (optional)
|
||||||
file_hash: MD5/SHA Hash des gesyncten Files
|
file_hash: MD5/SHA Hash des gesyncten Files
|
||||||
preview_data: Vorschaubild (WebP) als bytes
|
preview_data: Vorschaubild (WebP) als bytes
|
||||||
|
entity_type: Entity-Type (CDokumente oder Document)
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
update_data = {}
|
update_data = {}
|
||||||
|
|
||||||
# Nur xAI-Felder updaten wenn vorhanden
|
# Nur xAI-Felder updaten wenn vorhanden
|
||||||
if xai_file_id:
|
if xai_file_id:
|
||||||
update_data['xaiFileId'] = xai_file_id
|
# CDokumente verwendet xaiId, Document verwendet xaiFileId
|
||||||
|
if entity_type == 'CDokumente':
|
||||||
|
update_data['xaiId'] = xai_file_id
|
||||||
|
else:
|
||||||
|
update_data['xaiFileId'] = xai_file_id
|
||||||
|
|
||||||
if collection_ids is not None:
|
if collection_ids is not None:
|
||||||
update_data['xaiCollections'] = collection_ids
|
update_data['xaiCollections'] = collection_ids
|
||||||
|
|
||||||
# Nur Status auf "Gesynct" setzen wenn xAI-File-ID vorhanden
|
# Nur Status auf "Gesynct" setzen wenn xAI-File-ID vorhanden
|
||||||
if xai_file_id:
|
if xai_file_id:
|
||||||
update_data['dateiStatus'] = 'Gesynct'
|
# CDokumente verwendet fileStatus, Document verwendet dateiStatus
|
||||||
|
if entity_type == 'CDokumente':
|
||||||
|
update_data['fileStatus'] = 'synced'
|
||||||
|
else:
|
||||||
|
update_data['dateiStatus'] = 'Gesynct'
|
||||||
|
|
||||||
# Hash speichern für zukünftige Change Detection
|
# Hash speichern für zukünftige Change Detection
|
||||||
if file_hash:
|
if file_hash:
|
||||||
@@ -508,40 +552,78 @@ class DocumentSync:
|
|||||||
|
|
||||||
# Preview als Attachment hochladen (falls vorhanden)
|
# Preview als Attachment hochladen (falls vorhanden)
|
||||||
if preview_data:
|
if preview_data:
|
||||||
await self._upload_preview_to_espocrm(document_id, preview_data)
|
await self._upload_preview_to_espocrm(document_id, preview_data, entity_type)
|
||||||
|
|
||||||
# Nur updaten wenn es etwas zu updaten gibt
|
# Nur updaten wenn es etwas zu updaten gibt
|
||||||
if update_data:
|
if update_data:
|
||||||
await self.espocrm.update_entity('Document', document_id, update_data)
|
await self.espocrm.update_entity(entity_type, document_id, update_data)
|
||||||
self._log(f"✅ Sync-Metadaten aktualisiert für Document {document_id}: {list(update_data.keys())}")
|
self._log(f"✅ Sync-Metadaten aktualisiert für {entity_type} {document_id}: {list(update_data.keys())}")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self._log(f"❌ Fehler beim Update von Sync-Metadaten: {e}", level='error')
|
self._log(f"❌ Fehler beim Update von Sync-Metadaten: {e}", level='error')
|
||||||
raise
|
raise
|
||||||
|
|
||||||
async def _upload_preview_to_espocrm(self, document_id: str, preview_data: bytes) -> None:
|
async def _upload_preview_to_espocrm(self, document_id: str, preview_data: bytes, entity_type: str = 'CDokumente') -> None:
|
||||||
"""
|
"""
|
||||||
Lädt Preview-Image als Attachment zu EspoCRM hoch
|
Lädt Preview-Image als Attachment zu EspoCRM hoch
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
document_id: Document ID
|
document_id: Document ID
|
||||||
preview_data: WebP Preview als bytes
|
preview_data: WebP Preview als bytes
|
||||||
|
entity_type: Entity-Type (CDokumente oder Document)
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
self._log(f"📤 Uploading preview image ({len(preview_data)} bytes)...")
|
self._log(f"📤 Uploading preview image to {entity_type} ({len(preview_data)} bytes)...")
|
||||||
|
|
||||||
# Upload via EspoCRM Attachment API
|
# EspoCRM erwartet base64-encoded file im Format: data:mime/type;base64,xxxxx
|
||||||
await self.espocrm.upload_attachment(
|
import base64
|
||||||
file_content=preview_data,
|
import aiohttp
|
||||||
filename='preview.webp',
|
|
||||||
parent_type='Document',
|
|
||||||
parent_id=document_id,
|
|
||||||
field='preview',
|
|
||||||
mime_type='image/webp',
|
|
||||||
role='Attachment'
|
|
||||||
)
|
|
||||||
|
|
||||||
self._log(f"✅ Preview erfolgreich hochgeladen")
|
# Base64-encode preview data
|
||||||
|
base64_data = base64.b64encode(preview_data).decode('ascii')
|
||||||
|
file_data_uri = f"data:image/webp;base64,{base64_data}"
|
||||||
|
|
||||||
|
# Upload via JSON POST mit base64-encoded file field
|
||||||
|
url = self.espocrm.api_base_url.rstrip('/') + '/Attachment'
|
||||||
|
headers = {
|
||||||
|
'X-Api-Key': self.espocrm.api_key,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
'name': 'preview.webp',
|
||||||
|
'type': 'image/webp',
|
||||||
|
'role': 'Attachment',
|
||||||
|
'field': 'preview',
|
||||||
|
'relatedType': entity_type,
|
||||||
|
'relatedId': document_id,
|
||||||
|
'file': file_data_uri
|
||||||
|
}
|
||||||
|
|
||||||
|
self._log(f"📤 Posting to {url} with base64-encoded file ({len(base64_data)} chars)")
|
||||||
|
self._log(f" relatedType={entity_type}, relatedId={document_id}, field=preview")
|
||||||
|
|
||||||
|
timeout = aiohttp.ClientTimeout(total=30)
|
||||||
|
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||||
|
async with session.post(url, headers=headers, json=payload) as response:
|
||||||
|
self._log(f"Upload response status: {response.status}")
|
||||||
|
|
||||||
|
if response.status >= 400:
|
||||||
|
error_text = await response.text()
|
||||||
|
self._log(f"❌ Upload failed: {error_text}", level='error')
|
||||||
|
raise Exception(f"Upload error {response.status}: {error_text}")
|
||||||
|
|
||||||
|
result = await response.json()
|
||||||
|
attachment_id = result.get('id')
|
||||||
|
self._log(f"✅ Preview Attachment created: {attachment_id}")
|
||||||
|
|
||||||
|
# Update Entity mit previewId
|
||||||
|
self._log(f"📝 Updating {entity_type} with previewId...")
|
||||||
|
await self.espocrm.update_entity(entity_type, document_id, {
|
||||||
|
'previewId': attachment_id,
|
||||||
|
'previewName': 'preview.webp'
|
||||||
|
})
|
||||||
|
self._log(f"✅ {entity_type} previewId/previewName aktualisiert")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self._log(f"❌ Fehler beim Preview-Upload: {e}", level='error')
|
self._log(f"❌ Fehler beim Preview-Upload: {e}", level='error')
|
||||||
|
|||||||
@@ -341,12 +341,14 @@ class EspoCRMAPI:
|
|||||||
form_data.add_field('role', role)
|
form_data.add_field('role', role)
|
||||||
form_data.add_field('name', filename)
|
form_data.add_field('name', filename)
|
||||||
|
|
||||||
|
self._log(f"Upload params: parentType={parent_type}, parentId={parent_id}, field={field}, role={role}")
|
||||||
|
|
||||||
effective_timeout = aiohttp.ClientTimeout(total=self.api_timeout_seconds)
|
effective_timeout = aiohttp.ClientTimeout(total=self.api_timeout_seconds)
|
||||||
|
|
||||||
async with aiohttp.ClientSession(timeout=effective_timeout) as session:
|
async with aiohttp.ClientSession(timeout=effective_timeout) as session:
|
||||||
try:
|
try:
|
||||||
async with session.post(url, headers=headers, data=form_data) as response:
|
async with session.post(url, headers=headers, data=form_data) as response:
|
||||||
self._log(f"Upload response status: {response.status}", level='debug')
|
self._log(f"Upload response status: {response.status}")
|
||||||
|
|
||||||
if response.status == 401:
|
if response.status == 401:
|
||||||
raise EspoCRMAuthError("Authentication failed - check API key")
|
raise EspoCRMAuthError("Authentication failed - check API key")
|
||||||
@@ -356,6 +358,7 @@ class EspoCRMAPI:
|
|||||||
raise EspoCRMError(f"Attachment endpoint not found")
|
raise EspoCRMError(f"Attachment endpoint not found")
|
||||||
elif response.status >= 400:
|
elif response.status >= 400:
|
||||||
error_text = await response.text()
|
error_text = await response.text()
|
||||||
|
self._log(f"❌ Upload failed with {response.status}. Response: {error_text}", level='error')
|
||||||
raise EspoCRMError(f"Upload error {response.status}: {error_text}")
|
raise EspoCRMError(f"Upload error {response.status}: {error_text}")
|
||||||
|
|
||||||
# Parse response
|
# Parse response
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ config = {
|
|||||||
'description': 'Runs calendar sync automatically every 15 minutes',
|
'description': 'Runs calendar sync automatically every 15 minutes',
|
||||||
'flows': ['advoware-calendar-sync'],
|
'flows': ['advoware-calendar-sync'],
|
||||||
'triggers': [
|
'triggers': [
|
||||||
cron("0 */15 * * * *") # Every 15 minutes at second 0 (6-field: sec min hour day month weekday)
|
cron("0 */15 1o * * *") # Every 15 minutes at second 0 (6-field: sec min hour day month weekday)
|
||||||
],
|
],
|
||||||
'enqueues': ['calendar_sync_all']
|
'enqueues': ['calendar_sync_all']
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -151,17 +151,27 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Emit events für Queue-Processing (Deduplizierung erfolgt im Event-Handler via Lock)
|
# Emit events für Queue-Processing (Deduplizierung erfolgt im Event-Handler via Lock)
|
||||||
|
# Versuche Entity-Type zu ermitteln
|
||||||
|
entity_type = 'CDokumente' # Default für VMH
|
||||||
|
if isinstance(payload, list) and payload:
|
||||||
|
entity_type = payload[0].get('entityType') or payload[0].get('_scope') or 'CDokumente'
|
||||||
|
elif isinstance(payload, dict):
|
||||||
|
entity_type = payload.get('entityType') or payload.get('_scope') or 'CDokumente'
|
||||||
|
|
||||||
|
ctx.logger.info(f"📝 Entity-Type: {entity_type}")
|
||||||
|
|
||||||
for entity_id in entity_ids:
|
for entity_id in entity_ids:
|
||||||
await ctx.enqueue({
|
await ctx.enqueue({
|
||||||
'topic': 'vmh.document.create',
|
'topic': 'vmh.document.create',
|
||||||
'data': {
|
'data': {
|
||||||
'entity_id': entity_id,
|
'entity_id': entity_id,
|
||||||
|
'entity_type': entity_type,
|
||||||
'action': 'create',
|
'action': 'create',
|
||||||
'source': 'webhook',
|
'source': 'webhook',
|
||||||
'timestamp': datetime.datetime.now().isoformat()
|
'timestamp': datetime.datetime.now().isoformat()
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
ctx.logger.info(f"✅ Event emittiert: vmh.document.create für ID {entity_id}")
|
ctx.logger.info(f"✅ Event emittiert: vmh.document.create für ID {entity_id} (Type: {entity_type})")
|
||||||
|
|
||||||
ctx.logger.info("\n" + "=" * 80)
|
ctx.logger.info("\n" + "=" * 80)
|
||||||
ctx.logger.info(f"✅ WEBHOOK VERARBEITUNG ABGESCHLOSSEN")
|
ctx.logger.info(f"✅ WEBHOOK VERARBEITUNG ABGESCHLOSSEN")
|
||||||
|
|||||||
@@ -128,17 +128,27 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Emit events für Queue-Processing
|
# Emit events für Queue-Processing
|
||||||
|
# Versuche Entity-Type zu ermitteln
|
||||||
|
entity_type = 'CDokumente' # Default für VMH
|
||||||
|
if isinstance(payload, list) and payload:
|
||||||
|
entity_type = payload[0].get('entityType') or payload[0].get('_scope') or 'CDokumente'
|
||||||
|
elif isinstance(payload, dict):
|
||||||
|
entity_type = payload.get('entityType') or payload.get('_scope') or 'CDokumente'
|
||||||
|
|
||||||
|
ctx.logger.info(f"📝 Entity-Type: {entity_type}")
|
||||||
|
|
||||||
for entity_id in entity_ids:
|
for entity_id in entity_ids:
|
||||||
await ctx.enqueue({
|
await ctx.enqueue({
|
||||||
'topic': 'vmh.document.delete',
|
'topic': 'vmh.document.delete',
|
||||||
'data': {
|
'data': {
|
||||||
'entity_id': entity_id,
|
'entity_id': entity_id,
|
||||||
|
'entity_type': entity_type,
|
||||||
'action': 'delete',
|
'action': 'delete',
|
||||||
'source': 'webhook',
|
'source': 'webhook',
|
||||||
'timestamp': datetime.datetime.now().isoformat()
|
'timestamp': datetime.datetime.now().isoformat()
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
ctx.logger.info(f"✅ Event emittiert: vmh.document.delete für ID {entity_id}")
|
ctx.logger.info(f"✅ Event emittiert: vmh.document.delete für ID {entity_id} (Type: {entity_type})")
|
||||||
|
|
||||||
ctx.logger.info("\n" + "=" * 80)
|
ctx.logger.info("\n" + "=" * 80)
|
||||||
ctx.logger.info(f"✅ WEBHOOK VERARBEITUNG ABGESCHLOSSEN")
|
ctx.logger.info(f"✅ WEBHOOK VERARBEITUNG ABGESCHLOSSEN")
|
||||||
|
|||||||
@@ -150,17 +150,27 @@ async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Emit events für Queue-Processing
|
# Emit events für Queue-Processing
|
||||||
|
# Versuche Entity-Type zu ermitteln
|
||||||
|
entity_type = 'CDokumente' # Default für VMH
|
||||||
|
if isinstance(payload, list) and payload:
|
||||||
|
entity_type = payload[0].get('entityType') or payload[0].get('_scope') or 'CDokumente'
|
||||||
|
elif isinstance(payload, dict):
|
||||||
|
entity_type = payload.get('entityType') or payload.get('_scope') or 'CDokumente'
|
||||||
|
|
||||||
|
ctx.logger.info(f"📝 Entity-Type: {entity_type}")
|
||||||
|
|
||||||
for entity_id in entity_ids:
|
for entity_id in entity_ids:
|
||||||
await ctx.enqueue({
|
await ctx.enqueue({
|
||||||
'topic': 'vmh.document.update',
|
'topic': 'vmh.document.update',
|
||||||
'data': {
|
'data': {
|
||||||
'entity_id': entity_id,
|
'entity_id': entity_id,
|
||||||
|
'entity_type': entity_type,
|
||||||
'action': 'update',
|
'action': 'update',
|
||||||
'source': 'webhook',
|
'source': 'webhook',
|
||||||
'timestamp': datetime.datetime.now().isoformat()
|
'timestamp': datetime.datetime.now().isoformat()
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
ctx.logger.info(f"✅ Event emittiert: vmh.document.update für ID {entity_id}")
|
ctx.logger.info(f"✅ Event emittiert: vmh.document.update für ID {entity_id} (Type: {entity_type})")
|
||||||
|
|
||||||
ctx.logger.info("\n" + "=" * 80)
|
ctx.logger.info("\n" + "=" * 80)
|
||||||
ctx.logger.info(f"✅ WEBHOOK VERARBEITUNG ABGESCHLOSSEN")
|
ctx.logger.info(f"✅ WEBHOOK VERARBEITUNG ABGESCHLOSSEN")
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ config = {
|
|||||||
"description": "Prüft alle 15 Minuten welche Beteiligte synchronisiert werden müssen",
|
"description": "Prüft alle 15 Minuten welche Beteiligte synchronisiert werden müssen",
|
||||||
"flows": ["vmh-beteiligte"],
|
"flows": ["vmh-beteiligte"],
|
||||||
"triggers": [
|
"triggers": [
|
||||||
cron("0 */15 * * * *") # Alle 15 Minuten (6-field format!)
|
cron("0 */15 1 * * *") # Alle 15 Minuten (6-field format!)
|
||||||
],
|
],
|
||||||
"enqueues": ["vmh.beteiligte.sync_check"]
|
"enqueues": ["vmh.beteiligte.sync_check"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -33,6 +33,7 @@ config = {
|
|||||||
async def handler(event_data: Dict[str, Any], ctx: FlowContext[Any]):
|
async def handler(event_data: Dict[str, Any], ctx: FlowContext[Any]):
|
||||||
"""Zentraler Sync-Handler für Documents"""
|
"""Zentraler Sync-Handler für Documents"""
|
||||||
entity_id = event_data.get('entity_id')
|
entity_id = event_data.get('entity_id')
|
||||||
|
entity_type = event_data.get('entity_type', 'CDokumente') # Default: CDokumente
|
||||||
action = event_data.get('action')
|
action = event_data.get('action')
|
||||||
source = event_data.get('source')
|
source = event_data.get('source')
|
||||||
|
|
||||||
@@ -43,6 +44,7 @@ async def handler(event_data: Dict[str, Any], ctx: FlowContext[Any]):
|
|||||||
ctx.logger.info("=" * 80)
|
ctx.logger.info("=" * 80)
|
||||||
ctx.logger.info(f"🔄 DOCUMENT SYNC HANDLER GESTARTET")
|
ctx.logger.info(f"🔄 DOCUMENT SYNC HANDLER GESTARTET")
|
||||||
ctx.logger.info("=" * 80)
|
ctx.logger.info("=" * 80)
|
||||||
|
ctx.logger.info(f"Entity Type: {entity_type}")
|
||||||
ctx.logger.info(f"Action: {action.upper()}")
|
ctx.logger.info(f"Action: {action.upper()}")
|
||||||
ctx.logger.info(f"Document ID: {entity_id}")
|
ctx.logger.info(f"Document ID: {entity_id}")
|
||||||
ctx.logger.info(f"Source: {source}")
|
ctx.logger.info(f"Source: {source}")
|
||||||
@@ -70,39 +72,40 @@ async def handler(event_data: Dict[str, Any], ctx: FlowContext[Any]):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
# 1. ACQUIRE LOCK (verhindert parallele Syncs)
|
# 1. ACQUIRE LOCK (verhindert parallele Syncs)
|
||||||
lock_acquired = await sync_utils.acquire_sync_lock(entity_id)
|
lock_acquired = await sync_utils.acquire_sync_lock(entity_id, entity_type)
|
||||||
|
|
||||||
if not lock_acquired:
|
if not lock_acquired:
|
||||||
ctx.logger.warn(f"⏸️ Sync bereits aktiv für Document {entity_id}, überspringe")
|
ctx.logger.warn(f"⏸️ Sync bereits aktiv für {entity_type} {entity_id}, überspringe")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Lock erfolgreich acquired - MUSS im finally block released werden!
|
# Lock erfolgreich acquired - MUSS im finally block released werden!
|
||||||
try:
|
try:
|
||||||
# 2. FETCH VOLLSTÄNDIGES DOCUMENT VON ESPOCRM
|
# 2. FETCH VOLLSTÄNDIGES DOCUMENT VON ESPOCRM
|
||||||
try:
|
try:
|
||||||
document = await espocrm.get_entity('Document', entity_id)
|
document = await espocrm.get_entity(entity_type, entity_id)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
ctx.logger.error(f"❌ Fehler beim Laden von Document: {e}")
|
ctx.logger.error(f"❌ Fehler beim Laden von {entity_type}: {e}")
|
||||||
await sync_utils.release_sync_lock(entity_id, success=False, error_message=str(e))
|
await sync_utils.release_sync_lock(entity_id, success=False, error_message=str(e), entity_type=entity_type)
|
||||||
return
|
return
|
||||||
|
|
||||||
ctx.logger.info(f"📋 Document geladen:")
|
ctx.logger.info(f"📋 {entity_type} geladen:")
|
||||||
ctx.logger.info(f" Name: {document.get('name', 'N/A')}")
|
ctx.logger.info(f" Name: {document.get('name', 'N/A')}")
|
||||||
ctx.logger.info(f" Type: {document.get('type', 'N/A')}")
|
ctx.logger.info(f" Type: {document.get('type', 'N/A')}")
|
||||||
ctx.logger.info(f" xaiFileId: {document.get('xaiFileId', 'N/A')}")
|
ctx.logger.info(f" fileStatus: {document.get('fileStatus', 'N/A')}")
|
||||||
|
ctx.logger.info(f" xaiFileId: {document.get('xaiFileId') or document.get('xaiId', 'N/A')}")
|
||||||
ctx.logger.info(f" xaiCollections: {document.get('xaiCollections', [])}")
|
ctx.logger.info(f" xaiCollections: {document.get('xaiCollections', [])}")
|
||||||
|
|
||||||
# 3. BESTIMME SYNC-AKTION BASIEREND AUF ACTION
|
# 3. BESTIMME SYNC-AKTION BASIEREND AUF ACTION
|
||||||
|
|
||||||
if action == 'delete':
|
if action == 'delete':
|
||||||
await handle_delete(entity_id, document, sync_utils, ctx)
|
await handle_delete(entity_id, document, sync_utils, ctx, entity_type)
|
||||||
|
|
||||||
elif action in ['create', 'update']:
|
elif action in ['create', 'update']:
|
||||||
await handle_create_or_update(entity_id, document, sync_utils, ctx)
|
await handle_create_or_update(entity_id, document, sync_utils, ctx, entity_type)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
ctx.logger.warn(f"⚠️ Unbekannte Action: {action}")
|
ctx.logger.warn(f"⚠️ Unbekannte Action: {action}")
|
||||||
await sync_utils.release_sync_lock(entity_id, success=False, error_message=f"Unbekannte Action: {action}")
|
await sync_utils.release_sync_lock(entity_id, success=False, error_message=f"Unbekannte Action: {action}", entity_type=entity_type)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Unerwarteter Fehler während Sync - GARANTIERE Lock-Release
|
# Unerwarteter Fehler während Sync - GARANTIERE Lock-Release
|
||||||
@@ -114,7 +117,8 @@ async def handler(event_data: Dict[str, Any], ctx: FlowContext[Any]):
|
|||||||
await sync_utils.release_sync_lock(
|
await sync_utils.release_sync_lock(
|
||||||
entity_id,
|
entity_id,
|
||||||
success=False,
|
success=False,
|
||||||
error_message=str(e)[:2000]
|
error_message=str(e)[:2000],
|
||||||
|
entity_type=entity_type
|
||||||
)
|
)
|
||||||
except Exception as release_error:
|
except Exception as release_error:
|
||||||
# Selbst Lock-Release failed - logge kritischen Fehler
|
# Selbst Lock-Release failed - logge kritischen Fehler
|
||||||
@@ -134,7 +138,7 @@ async def handler(event_data: Dict[str, Any], ctx: FlowContext[Any]):
|
|||||||
ctx.logger.error(traceback.format_exc())
|
ctx.logger.error(traceback.format_exc())
|
||||||
|
|
||||||
|
|
||||||
async def handle_create_or_update(entity_id: str, document: Dict[str, Any], sync_utils: DocumentSync, ctx: FlowContext[Any]):
|
async def handle_create_or_update(entity_id: str, document: Dict[str, Any], sync_utils: DocumentSync, ctx: FlowContext[Any], entity_type: str = 'CDokumente'):
|
||||||
"""
|
"""
|
||||||
Behandelt Create/Update von Documents
|
Behandelt Create/Update von Documents
|
||||||
|
|
||||||
@@ -146,15 +150,15 @@ async def handle_create_or_update(entity_id: str, document: Dict[str, Any], sync
|
|||||||
ctx.logger.info("🔍 ANALYSE: Braucht dieses Document xAI-Sync?")
|
ctx.logger.info("🔍 ANALYSE: Braucht dieses Document xAI-Sync?")
|
||||||
ctx.logger.info("=" * 80)
|
ctx.logger.info("=" * 80)
|
||||||
|
|
||||||
# Datei-Status für Preview-Generierung
|
# Datei-Status für Preview-Generierung (verschiedene Feld-Namen unterstützen)
|
||||||
datei_status = document.get('dateiStatus') or document.get('fileStatus')
|
datei_status = document.get('fileStatus') or document.get('dateiStatus')
|
||||||
|
|
||||||
# Entscheidungslogik: Soll dieses Document zu xAI?
|
# Entscheidungslogik: Soll dieses Document zu xAI?
|
||||||
needs_sync, collection_ids, reason = await sync_utils.should_sync_to_xai(document)
|
needs_sync, collection_ids, reason = await sync_utils.should_sync_to_xai(document)
|
||||||
|
|
||||||
ctx.logger.info(f"📊 Entscheidung: {'✅ SYNC NÖTIG' if needs_sync else '⏭️ KEIN SYNC NÖTIG'}")
|
ctx.logger.info(f"📊 Entscheidung: {'✅ SYNC NÖTIG' if needs_sync else '⏭️ KEIN SYNC NÖTIG'}")
|
||||||
ctx.logger.info(f" Grund: {reason}")
|
ctx.logger.info(f" Grund: {reason}")
|
||||||
ctx.logger.info(f" Datei-Status: {datei_status or 'N/A'}")
|
ctx.logger.info(f" File-Status: {datei_status or 'N/A'}")
|
||||||
|
|
||||||
if collection_ids:
|
if collection_ids:
|
||||||
ctx.logger.info(f" Collections: {collection_ids}")
|
ctx.logger.info(f" Collections: {collection_ids}")
|
||||||
@@ -163,7 +167,9 @@ async def handle_create_or_update(entity_id: str, document: Dict[str, Any], sync
|
|||||||
# PREVIEW-GENERIERUNG bei neuen/geänderten Dateien
|
# PREVIEW-GENERIERUNG bei neuen/geänderten Dateien
|
||||||
# ═══════════════════════════════════════════════════════════════
|
# ═══════════════════════════════════════════════════════════════
|
||||||
|
|
||||||
if datei_status in ['Neu', 'Geändert', 'neu', 'geändert', 'New', 'Changed']:
|
# Case-insensitive check für Datei-Status
|
||||||
|
datei_status_lower = (datei_status or '').lower()
|
||||||
|
if datei_status_lower in ['neu', 'geändert', 'new', 'changed']:
|
||||||
ctx.logger.info("")
|
ctx.logger.info("")
|
||||||
ctx.logger.info("=" * 80)
|
ctx.logger.info("=" * 80)
|
||||||
ctx.logger.info("🖼️ PREVIEW-GENERIERUNG STARTEN")
|
ctx.logger.info("🖼️ PREVIEW-GENERIERUNG STARTEN")
|
||||||
@@ -172,7 +178,7 @@ async def handle_create_or_update(entity_id: str, document: Dict[str, Any], sync
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
# 1. Hole Download-Informationen
|
# 1. Hole Download-Informationen
|
||||||
download_info = await sync_utils.get_document_download_info(entity_id)
|
download_info = await sync_utils.get_document_download_info(entity_id, entity_type)
|
||||||
|
|
||||||
if not download_info:
|
if not download_info:
|
||||||
ctx.logger.warn("⚠️ Keine Download-Info verfügbar - überspringe Preview")
|
ctx.logger.warn("⚠️ Keine Download-Info verfügbar - überspringe Preview")
|
||||||
@@ -213,7 +219,8 @@ async def handle_create_or_update(entity_id: str, document: Dict[str, Any], sync
|
|||||||
ctx.logger.info(f"📤 Uploading preview to EspoCRM...")
|
ctx.logger.info(f"📤 Uploading preview to EspoCRM...")
|
||||||
await sync_utils.update_sync_metadata(
|
await sync_utils.update_sync_metadata(
|
||||||
entity_id,
|
entity_id,
|
||||||
preview_data=preview_data
|
preview_data=preview_data,
|
||||||
|
entity_type=entity_type
|
||||||
# Keine xaiFileId/collections - nur Preview update
|
# Keine xaiFileId/collections - nur Preview update
|
||||||
)
|
)
|
||||||
ctx.logger.info(f"✅ Preview uploaded successfully")
|
ctx.logger.info(f"✅ Preview uploaded successfully")
|
||||||
@@ -244,7 +251,7 @@ async def handle_create_or_update(entity_id: str, document: Dict[str, Any], sync
|
|||||||
|
|
||||||
if not needs_sync:
|
if not needs_sync:
|
||||||
ctx.logger.info("✅ Kein xAI-Sync erforderlich, Lock wird released")
|
ctx.logger.info("✅ Kein xAI-Sync erforderlich, Lock wird released")
|
||||||
await sync_utils.release_sync_lock(entity_id, success=True)
|
await sync_utils.release_sync_lock(entity_id, success=True, entity_type=entity_type)
|
||||||
return
|
return
|
||||||
|
|
||||||
# ═══════════════════════════════════════════════════════════════
|
# ═══════════════════════════════════════════════════════════════
|
||||||
@@ -315,9 +322,9 @@ async def handle_create_or_update(entity_id: str, document: Dict[str, Any], sync
|
|||||||
await sync_utils.release_sync_lock(entity_id, success=False, error_message=str(e))
|
await sync_utils.release_sync_lock(entity_id, success=False, error_message=str(e))
|
||||||
|
|
||||||
|
|
||||||
async def handle_delete(entity_id: str, document: Dict[str, Any], sync_utils: DocumentSync, ctx: FlowContext[Any]):
|
async def handle_delete(entity_id: str, document: Dict[str, Any], sync_utils: DocumentSync, ctx: FlowContext[Any], entity_type: str = 'CDokumente'):
|
||||||
"""
|
"""
|
||||||
Behandelt Deletion von Documents
|
Behandelt Delete von Documents
|
||||||
|
|
||||||
Entfernt Document aus xAI Collections (aber löscht File nicht - kann in anderen Collections sein)
|
Entfernt Document aus xAI Collections (aber löscht File nicht - kann in anderen Collections sein)
|
||||||
"""
|
"""
|
||||||
@@ -327,12 +334,12 @@ async def handle_delete(entity_id: str, document: Dict[str, Any], sync_utils: Do
|
|||||||
ctx.logger.info("🗑️ DOCUMENT DELETE - xAI CLEANUP")
|
ctx.logger.info("🗑️ DOCUMENT DELETE - xAI CLEANUP")
|
||||||
ctx.logger.info("=" * 80)
|
ctx.logger.info("=" * 80)
|
||||||
|
|
||||||
xai_file_id = document.get('xaiFileId')
|
xai_file_id = document.get('xaiFileId') or document.get('xaiId')
|
||||||
xai_collections = document.get('xaiCollections') or []
|
xai_collections = document.get('xaiCollections') or []
|
||||||
|
|
||||||
if not xai_file_id or not xai_collections:
|
if not xai_file_id or not xai_collections:
|
||||||
ctx.logger.info("⏭️ Document war nicht in xAI gesynct, nichts zu tun")
|
ctx.logger.info("⏭️ Document war nicht in xAI gesynct, nichts zu tun")
|
||||||
await sync_utils.release_sync_lock(entity_id, success=True)
|
await sync_utils.release_sync_lock(entity_id, success=True, entity_type=entity_type)
|
||||||
return
|
return
|
||||||
|
|
||||||
ctx.logger.info(f"📋 Document Info:")
|
ctx.logger.info(f"📋 Document Info:")
|
||||||
@@ -354,7 +361,7 @@ async def handle_delete(entity_id: str, document: Dict[str, Any], sync_utils: Do
|
|||||||
#
|
#
|
||||||
# ctx.logger.info(f"✅ File aus {len(xai_collections)} Collection(s) entfernt")
|
# ctx.logger.info(f"✅ File aus {len(xai_collections)} Collection(s) entfernt")
|
||||||
|
|
||||||
await sync_utils.release_sync_lock(entity_id, success=True)
|
await sync_utils.release_sync_lock(entity_id, success=True, entity_type=entity_type)
|
||||||
|
|
||||||
ctx.logger.info("=" * 80)
|
ctx.logger.info("=" * 80)
|
||||||
ctx.logger.info("✅ DELETE ABGESCHLOSSEN (PLACEHOLDER)")
|
ctx.logger.info("✅ DELETE ABGESCHLOSSEN (PLACEHOLDER)")
|
||||||
@@ -364,4 +371,4 @@ async def handle_delete(entity_id: str, document: Dict[str, Any], sync_utils: Do
|
|||||||
ctx.logger.error(f"❌ Fehler bei Delete: {e}")
|
ctx.logger.error(f"❌ Fehler bei Delete: {e}")
|
||||||
import traceback
|
import traceback
|
||||||
ctx.logger.error(traceback.format_exc())
|
ctx.logger.error(traceback.format_exc())
|
||||||
await sync_utils.release_sync_lock(entity_id, success=False, error_message=str(e))
|
await sync_utils.release_sync_lock(entity_id, success=False, error_message=str(e), entity_type=entity_type)
|
||||||
|
|||||||
279
test_preview_upload.py
Executable file
279
test_preview_upload.py
Executable file
@@ -0,0 +1,279 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Test Script: Preview Image Upload zu EspoCRM
|
||||||
|
|
||||||
|
Testet das Hochladen eines Preview-Bildes (WebP) als Attachment
|
||||||
|
zu einem CDokumente Entity via EspoCRM API.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python test_preview_upload.py <document_id>
|
||||||
|
|
||||||
|
Example:
|
||||||
|
python test_preview_upload.py 69a68906ac3d0fd25
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import aiohttp
|
||||||
|
import base64
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from io import BytesIO
|
||||||
|
from PIL import Image
|
||||||
|
|
||||||
|
|
||||||
|
# EspoCRM Config (aus Environment oder hardcoded für Test)
|
||||||
|
ESPOCRM_API_BASE_URL = os.getenv('ESPOCRM_API_BASE_URL', 'https://crm.bitbylaw.com/api/v1')
|
||||||
|
ESPOCRM_API_KEY = os.getenv('ESPOCRM_API_KEY', '')
|
||||||
|
|
||||||
|
# Test-Parameter
|
||||||
|
ENTITY_TYPE = 'CDokumente'
|
||||||
|
FIELD_NAME = 'preview'
|
||||||
|
|
||||||
|
|
||||||
|
def generate_test_webp(text: str = "TEST PREVIEW", size: tuple = (600, 800)) -> bytes:
|
||||||
|
"""
|
||||||
|
Generiert ein einfaches Test-WebP-Bild
|
||||||
|
|
||||||
|
Args:
|
||||||
|
text: Text der im Bild angezeigt wird
|
||||||
|
size: Größe des Bildes (width, height)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
WebP image als bytes
|
||||||
|
"""
|
||||||
|
print(f"📐 Generating test image ({size[0]}x{size[1]})...")
|
||||||
|
|
||||||
|
# Erstelle einfaches Bild mit Text
|
||||||
|
img = Image.new('RGB', size, color='lightblue')
|
||||||
|
|
||||||
|
# Optional: Füge Text hinzu (benötigt PIL ImageDraw)
|
||||||
|
try:
|
||||||
|
from PIL import ImageDraw, ImageFont
|
||||||
|
draw = ImageDraw.Draw(img)
|
||||||
|
|
||||||
|
# Versuche ein größeres Font zu laden
|
||||||
|
try:
|
||||||
|
font = ImageFont.truetype("/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf", 40)
|
||||||
|
except:
|
||||||
|
font = ImageFont.load_default()
|
||||||
|
|
||||||
|
# Text zentriert
|
||||||
|
bbox = draw.textbbox((0, 0), text, font=font)
|
||||||
|
text_width = bbox[2] - bbox[0]
|
||||||
|
text_height = bbox[3] - bbox[1]
|
||||||
|
x = (size[0] - text_width) // 2
|
||||||
|
y = (size[1] - text_height) // 2
|
||||||
|
|
||||||
|
draw.text((x, y), text, fill='darkblue', font=font)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"⚠️ Text rendering failed: {e}")
|
||||||
|
|
||||||
|
# Konvertiere zu WebP
|
||||||
|
buffer = BytesIO()
|
||||||
|
img.save(buffer, format='WEBP', quality=85)
|
||||||
|
webp_bytes = buffer.getvalue()
|
||||||
|
|
||||||
|
print(f"✅ Test image generated: {len(webp_bytes)} bytes")
|
||||||
|
return webp_bytes
|
||||||
|
|
||||||
|
|
||||||
|
async def upload_preview_to_espocrm(
|
||||||
|
document_id: str,
|
||||||
|
preview_data: bytes,
|
||||||
|
entity_type: str = 'CDokumente'
|
||||||
|
) -> dict:
|
||||||
|
"""
|
||||||
|
Upload Preview zu EspoCRM Attachment API
|
||||||
|
|
||||||
|
Args:
|
||||||
|
document_id: ID des CDokumente/Document Entity
|
||||||
|
preview_data: WebP image als bytes
|
||||||
|
entity_type: Entity-Type (CDokumente oder Document)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Response dict mit Attachment ID
|
||||||
|
"""
|
||||||
|
print(f"\n📤 Uploading preview to {entity_type}/{document_id}...")
|
||||||
|
print(f" Preview size: {len(preview_data)} bytes")
|
||||||
|
|
||||||
|
# Base64-encode
|
||||||
|
base64_data = base64.b64encode(preview_data).decode('ascii')
|
||||||
|
file_data_uri = f"data:image/webp;base64,{base64_data}"
|
||||||
|
|
||||||
|
print(f" Base64 encoded: {len(base64_data)} chars")
|
||||||
|
|
||||||
|
# API Request
|
||||||
|
url = ESPOCRM_API_BASE_URL.rstrip('/') + '/Attachment'
|
||||||
|
headers = {
|
||||||
|
'X-Api-Key': ESPOCRM_API_KEY,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
'name': 'preview.webp',
|
||||||
|
'type': 'image/webp',
|
||||||
|
'role': 'Attachment',
|
||||||
|
'field': FIELD_NAME,
|
||||||
|
'relatedType': entity_type,
|
||||||
|
'relatedId': document_id,
|
||||||
|
'file': file_data_uri
|
||||||
|
}
|
||||||
|
|
||||||
|
print(f"\n🌐 POST {url}")
|
||||||
|
print(f" Headers: X-Api-Key={ESPOCRM_API_KEY[:20]}...")
|
||||||
|
print(f" Payload keys: {list(payload.keys())}")
|
||||||
|
print(f" - name: {payload['name']}")
|
||||||
|
print(f" - type: {payload['type']}")
|
||||||
|
print(f" - role: {payload['role']}")
|
||||||
|
print(f" - field: {payload['field']}")
|
||||||
|
print(f" - relatedType: {payload['relatedType']}")
|
||||||
|
print(f" - relatedId: {payload['relatedId']}")
|
||||||
|
print(f" - file: data:image/webp;base64,... ({len(base64_data)} chars)")
|
||||||
|
|
||||||
|
timeout = aiohttp.ClientTimeout(total=30)
|
||||||
|
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||||
|
async with session.post(url, headers=headers, json=payload) as response:
|
||||||
|
print(f"\n📥 Response Status: {response.status}")
|
||||||
|
print(f" Content-Type: {response.content_type}")
|
||||||
|
|
||||||
|
response_text = await response.text()
|
||||||
|
|
||||||
|
if response.status >= 400:
|
||||||
|
print(f"\n❌ Upload FAILED!")
|
||||||
|
print(f" Status: {response.status}")
|
||||||
|
print(f" Response: {response_text}")
|
||||||
|
raise Exception(f"Upload error {response.status}: {response_text}")
|
||||||
|
|
||||||
|
# Parse JSON response
|
||||||
|
result = await response.json()
|
||||||
|
attachment_id = result.get('id')
|
||||||
|
|
||||||
|
print(f"\n✅ Upload SUCCESSFUL!")
|
||||||
|
print(f" Attachment ID: {attachment_id}")
|
||||||
|
print(f" Full response: {result}")
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
async def update_entity_with_preview(
|
||||||
|
document_id: str,
|
||||||
|
attachment_id: str,
|
||||||
|
entity_type: str = 'CDokumente'
|
||||||
|
) -> dict:
|
||||||
|
"""
|
||||||
|
Update Entity mit previewId und previewName
|
||||||
|
|
||||||
|
Args:
|
||||||
|
document_id: Entity ID
|
||||||
|
attachment_id: Attachment ID vom Upload
|
||||||
|
entity_type: Entity-Type
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Updated entity data
|
||||||
|
"""
|
||||||
|
print(f"\n📝 Updating {entity_type}/{document_id} with previewId...")
|
||||||
|
|
||||||
|
url = f"{ESPOCRM_API_BASE_URL.rstrip('/')}/{entity_type}/{document_id}"
|
||||||
|
headers = {
|
||||||
|
'X-Api-Key': ESPOCRM_API_KEY,
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
'previewId': attachment_id,
|
||||||
|
'previewName': 'preview.webp'
|
||||||
|
}
|
||||||
|
|
||||||
|
print(f" PUT {url}")
|
||||||
|
print(f" Payload: {payload}")
|
||||||
|
|
||||||
|
timeout = aiohttp.ClientTimeout(total=30)
|
||||||
|
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||||
|
async with session.put(url, headers=headers, json=payload) as response:
|
||||||
|
print(f" Response Status: {response.status}")
|
||||||
|
|
||||||
|
if response.status >= 400:
|
||||||
|
response_text = await response.text()
|
||||||
|
print(f"\n❌ Update FAILED!")
|
||||||
|
print(f" Status: {response.status}")
|
||||||
|
print(f" Response: {response_text}")
|
||||||
|
raise Exception(f"Update error {response.status}: {response_text}")
|
||||||
|
|
||||||
|
result = await response.json()
|
||||||
|
print(f"\n✅ Entity updated successfully!")
|
||||||
|
print(f" previewId: {result.get('previewId')}")
|
||||||
|
print(f" previewName: {result.get('previewName')}")
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
"""Main test flow"""
|
||||||
|
print("=" * 80)
|
||||||
|
print("🖼️ ESPOCRM PREVIEW UPLOAD TEST")
|
||||||
|
print("=" * 80)
|
||||||
|
|
||||||
|
# Check arguments
|
||||||
|
if len(sys.argv) < 2:
|
||||||
|
print("\n❌ Error: Document ID required!")
|
||||||
|
print(f"\nUsage: {sys.argv[0]} <document_id>")
|
||||||
|
print(f"Example: {sys.argv[0]} 69a68906ac3d0fd25")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
document_id = sys.argv[1]
|
||||||
|
|
||||||
|
# Check API key
|
||||||
|
if not ESPOCRM_API_KEY:
|
||||||
|
print("\n❌ Error: ESPOCRM_API_KEY environment variable not set!")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print(f"\n📋 Test Parameters:")
|
||||||
|
print(f" API Base URL: {ESPOCRM_API_BASE_URL}")
|
||||||
|
print(f" API Key: {ESPOCRM_API_KEY[:20]}...")
|
||||||
|
print(f" Entity Type: {ENTITY_TYPE}")
|
||||||
|
print(f" Document ID: {document_id}")
|
||||||
|
print(f" Field: {FIELD_NAME}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Step 1: Generate test image
|
||||||
|
print("\n" + "=" * 80)
|
||||||
|
print("STEP 1: Generate Test Image")
|
||||||
|
print("=" * 80)
|
||||||
|
preview_data = generate_test_webp(f"Preview Test\n{document_id[:8]}", size=(600, 800))
|
||||||
|
|
||||||
|
# Step 2: Upload to EspoCRM
|
||||||
|
print("\n" + "=" * 80)
|
||||||
|
print("STEP 2: Upload to EspoCRM Attachment API")
|
||||||
|
print("=" * 80)
|
||||||
|
result = await upload_preview_to_espocrm(document_id, preview_data, ENTITY_TYPE)
|
||||||
|
attachment_id = result.get('id')
|
||||||
|
|
||||||
|
# Step 3: Update Entity
|
||||||
|
print("\n" + "=" * 80)
|
||||||
|
print("STEP 3: Update Entity with Preview Reference")
|
||||||
|
print("=" * 80)
|
||||||
|
await update_entity_with_preview(document_id, attachment_id, ENTITY_TYPE)
|
||||||
|
|
||||||
|
# Success summary
|
||||||
|
print("\n" + "=" * 80)
|
||||||
|
print("✅ TEST SUCCESSFUL!")
|
||||||
|
print("=" * 80)
|
||||||
|
print(f"\n📊 Summary:")
|
||||||
|
print(f" - Attachment ID: {attachment_id}")
|
||||||
|
print(f" - Entity: {ENTITY_TYPE}/{document_id}")
|
||||||
|
print(f" - Preview Size: {len(preview_data)} bytes")
|
||||||
|
print(f"\n🔗 View in EspoCRM:")
|
||||||
|
print(f" {ESPOCRM_API_BASE_URL.replace('/api/v1', '')}/#CDokumente/view/{document_id}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print("\n" + "=" * 80)
|
||||||
|
print("❌ TEST FAILED!")
|
||||||
|
print("=" * 80)
|
||||||
|
print(f"\nError: {e}")
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
asyncio.run(main())
|
||||||
253
test_thumbnail_generation.py
Normal file
253
test_thumbnail_generation.py
Normal file
@@ -0,0 +1,253 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Test script for Document Thumbnail Generation
|
||||||
|
Tests the complete flow:
|
||||||
|
1. Create a test document in EspoCRM
|
||||||
|
2. Upload a file attachment
|
||||||
|
3. Trigger the webhook (or wait for automatic trigger)
|
||||||
|
4. Verify preview generation
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import aiohttp
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
from io import BytesIO
|
||||||
|
from PIL import Image
|
||||||
|
|
||||||
|
# Add bitbylaw to path
|
||||||
|
sys.path.insert(0, str(Path(__file__).parent))
|
||||||
|
|
||||||
|
from services.espocrm import EspoCRMAPI
|
||||||
|
|
||||||
|
|
||||||
|
async def create_test_image(width: int = 800, height: int = 600) -> bytes:
|
||||||
|
"""Create a simple test PNG image"""
|
||||||
|
img = Image.new('RGB', (width, height), color='lightblue')
|
||||||
|
|
||||||
|
# Add some text/pattern so it's not just a solid color
|
||||||
|
from PIL import ImageDraw, ImageFont
|
||||||
|
draw = ImageDraw.Draw(img)
|
||||||
|
|
||||||
|
# Draw some shapes
|
||||||
|
draw.rectangle([50, 50, width-50, height-50], outline='darkblue', width=5)
|
||||||
|
draw.ellipse([width//4, height//4, 3*width//4, 3*height//4], outline='red', width=3)
|
||||||
|
|
||||||
|
# Add text
|
||||||
|
try:
|
||||||
|
font = ImageFont.truetype("/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf", 48)
|
||||||
|
except:
|
||||||
|
font = None
|
||||||
|
|
||||||
|
text = "TEST IMAGE\nFor Thumbnail\nGeneration"
|
||||||
|
draw.text((width//2, height//2), text, fill='black', anchor='mm', font=font, align='center')
|
||||||
|
|
||||||
|
# Save to bytes
|
||||||
|
buffer = BytesIO()
|
||||||
|
img.save(buffer, format='PNG')
|
||||||
|
return buffer.getvalue()
|
||||||
|
|
||||||
|
|
||||||
|
async def create_test_document(espocrm: EspoCRMAPI) -> str:
|
||||||
|
"""Create a test document in EspoCRM"""
|
||||||
|
print("\n📄 Creating test document in EspoCRM...")
|
||||||
|
|
||||||
|
document_data = {
|
||||||
|
"name": f"Test Thumbnail Generation {asyncio.get_event_loop().time()}",
|
||||||
|
"status": "Active",
|
||||||
|
"dateiStatus": "Neu", # This should trigger preview generation
|
||||||
|
"type": "Image",
|
||||||
|
"description": "Automated test document for thumbnail generation"
|
||||||
|
}
|
||||||
|
|
||||||
|
result = await espocrm.create_entity("Document", document_data)
|
||||||
|
doc_id = result.get("id")
|
||||||
|
|
||||||
|
print(f"✅ Document created: {doc_id}")
|
||||||
|
print(f" Name: {result.get('name')}")
|
||||||
|
print(f" Datei-Status: {result.get('dateiStatus')}")
|
||||||
|
|
||||||
|
return doc_id
|
||||||
|
|
||||||
|
|
||||||
|
async def upload_test_file(espocrm: EspoCRMAPI, doc_id: str) -> str:
|
||||||
|
"""Upload a test image file to the document"""
|
||||||
|
print(f"\n📤 Uploading test image to document {doc_id}...")
|
||||||
|
|
||||||
|
# Create test image
|
||||||
|
image_data = await create_test_image(1200, 900)
|
||||||
|
print(f" Generated test image: {len(image_data)} bytes")
|
||||||
|
|
||||||
|
# Upload to EspoCRM
|
||||||
|
attachment = await espocrm.upload_attachment(
|
||||||
|
file_content=image_data,
|
||||||
|
filename="test_image.png",
|
||||||
|
parent_type="Document",
|
||||||
|
parent_id=doc_id,
|
||||||
|
field="file",
|
||||||
|
mime_type="image/png",
|
||||||
|
role="Attachment"
|
||||||
|
)
|
||||||
|
|
||||||
|
attachment_id = attachment.get("id")
|
||||||
|
print(f"✅ File uploaded: {attachment_id}")
|
||||||
|
print(f" Filename: {attachment.get('name')}")
|
||||||
|
print(f" Size: {attachment.get('size')} bytes")
|
||||||
|
|
||||||
|
return attachment_id
|
||||||
|
|
||||||
|
|
||||||
|
async def trigger_webhook(doc_id: str, action: str = "update"):
|
||||||
|
"""Manually trigger the document webhook"""
|
||||||
|
print(f"\n🔔 Triggering webhook for document {doc_id}...")
|
||||||
|
|
||||||
|
webhook_url = f"http://localhost:7777/vmh/webhook/document/{action}"
|
||||||
|
payload = {
|
||||||
|
"entityType": "Document",
|
||||||
|
"entity": {
|
||||||
|
"id": doc_id,
|
||||||
|
"entityType": "Document"
|
||||||
|
},
|
||||||
|
"data": {
|
||||||
|
"entity": {
|
||||||
|
"id": doc_id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
async with session.post(webhook_url, json=payload) as response:
|
||||||
|
status = response.status
|
||||||
|
text = await response.text()
|
||||||
|
|
||||||
|
if status == 200:
|
||||||
|
print(f"✅ Webhook triggered successfully")
|
||||||
|
print(f" Response: {text}")
|
||||||
|
else:
|
||||||
|
print(f"❌ Webhook failed: {status}")
|
||||||
|
print(f" Response: {text}")
|
||||||
|
|
||||||
|
return status == 200
|
||||||
|
|
||||||
|
|
||||||
|
async def check_preview_generated(espocrm: EspoCRMAPI, doc_id: str, max_wait: int = 30):
|
||||||
|
"""Check if preview was generated (poll for a few seconds)"""
|
||||||
|
print(f"\n🔍 Checking for preview generation (max {max_wait}s)...")
|
||||||
|
|
||||||
|
for i in range(max_wait):
|
||||||
|
await asyncio.sleep(1)
|
||||||
|
|
||||||
|
# Get document
|
||||||
|
doc = await espocrm.get_entity("Document", doc_id)
|
||||||
|
|
||||||
|
# Check if preview field is populated
|
||||||
|
preview_id = doc.get("previewId")
|
||||||
|
if preview_id:
|
||||||
|
print(f"\n✅ Preview generated!")
|
||||||
|
print(f" Preview Attachment ID: {preview_id}")
|
||||||
|
print(f" Preview Name: {doc.get('previewName')}")
|
||||||
|
print(f" Preview Type: {doc.get('previewType')}")
|
||||||
|
|
||||||
|
# Try to download and check the preview
|
||||||
|
try:
|
||||||
|
preview_data = await espocrm.download_attachment(preview_id)
|
||||||
|
print(f" Preview Size: {len(preview_data)} bytes")
|
||||||
|
|
||||||
|
# Verify it's a WebP image
|
||||||
|
from PIL import Image
|
||||||
|
img = Image.open(BytesIO(preview_data))
|
||||||
|
print(f" Preview Format: {img.format}")
|
||||||
|
print(f" Preview Dimensions: {img.width}x{img.height}")
|
||||||
|
|
||||||
|
if img.format == "WEBP":
|
||||||
|
print(" ✅ Format is WebP as expected")
|
||||||
|
if img.width <= 600 and img.height <= 800:
|
||||||
|
print(" ✅ Dimensions within expected range")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f" ⚠️ Could not verify preview: {e}")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
if (i + 1) % 5 == 0:
|
||||||
|
print(f" Still waiting... ({i + 1}s)")
|
||||||
|
|
||||||
|
print(f"\n❌ Preview not generated after {max_wait}s")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
async def cleanup_test_document(espocrm: EspoCRMAPI, doc_id: str):
|
||||||
|
"""Delete the test document"""
|
||||||
|
print(f"\n🗑️ Cleaning up test document {doc_id}...")
|
||||||
|
try:
|
||||||
|
await espocrm.delete_entity("Document", doc_id)
|
||||||
|
print("✅ Test document deleted")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"⚠️ Could not delete test document: {e}")
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
print("=" * 80)
|
||||||
|
print("THUMBNAIL GENERATION TEST")
|
||||||
|
print("=" * 80)
|
||||||
|
|
||||||
|
# Initialize EspoCRM API
|
||||||
|
espocrm = EspoCRMAPI()
|
||||||
|
|
||||||
|
doc_id = None
|
||||||
|
try:
|
||||||
|
# Step 1: Create test document
|
||||||
|
doc_id = await create_test_document(espocrm)
|
||||||
|
|
||||||
|
# Step 2: Upload test file
|
||||||
|
attachment_id = await upload_test_file(espocrm, doc_id)
|
||||||
|
|
||||||
|
# Step 3: Update document to trigger webhook (set dateiStatus to trigger sync)
|
||||||
|
print(f"\n🔄 Updating document to trigger webhook...")
|
||||||
|
await espocrm.update_entity("Document", doc_id, {
|
||||||
|
"dateiStatus": "Neu" # This should trigger the webhook
|
||||||
|
})
|
||||||
|
print("✅ Document updated")
|
||||||
|
|
||||||
|
# Step 4: Wait a bit for webhook to be processed
|
||||||
|
print("\n⏳ Waiting 3 seconds for webhook processing...")
|
||||||
|
await asyncio.sleep(3)
|
||||||
|
|
||||||
|
# Step 5: Check if preview was generated
|
||||||
|
success = await check_preview_generated(espocrm, doc_id, max_wait=20)
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
print("\n" + "=" * 80)
|
||||||
|
if success:
|
||||||
|
print("✅ TEST PASSED - Preview generation successful!")
|
||||||
|
else:
|
||||||
|
print("❌ TEST FAILED - Preview was not generated")
|
||||||
|
print("\nCheck logs with:")
|
||||||
|
print(" sudo journalctl -u motia.service --since '2 minutes ago' | grep -E '(PREVIEW|Document)'")
|
||||||
|
print("=" * 80)
|
||||||
|
|
||||||
|
# Ask if we should clean up
|
||||||
|
print(f"\nTest document ID: {doc_id}")
|
||||||
|
cleanup = input("\nDelete test document? (y/N): ").strip().lower()
|
||||||
|
if cleanup == 'y':
|
||||||
|
await cleanup_test_document(espocrm, doc_id)
|
||||||
|
else:
|
||||||
|
print(f"ℹ️ Test document kept: {doc_id}")
|
||||||
|
print(f" View in EspoCRM: https://crm.bitbylaw.com/#Document/view/{doc_id}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"\n❌ Test failed with error: {e}")
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
|
if doc_id:
|
||||||
|
print(f"\nTest document ID: {doc_id}")
|
||||||
|
cleanup = input("\nDelete test document? (y/N): ").strip().lower()
|
||||||
|
if cleanup == 'y':
|
||||||
|
await cleanup_test_document(espocrm, doc_id)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(main())
|
||||||
Reference in New Issue
Block a user