feat(webhooks): Add webhook handlers for Beteiligte and Document entities
- Implemented create, update, and delete webhook handlers for Beteiligte. - Implemented create, update, and delete webhook handlers for Document entities. - Added logging and error handling for each webhook handler. - Created a universal step for generating document previews. - Ensured payload validation and entity ID extraction for batch processing.
This commit is contained in:
0
src/steps/crm/__init__.py
Normal file
0
src/steps/crm/__init__.py
Normal file
0
src/steps/crm/akte/__init__.py
Normal file
0
src/steps/crm/akte/__init__.py
Normal file
165
src/steps/crm/akte/akte_sync_cron_step.py
Normal file
165
src/steps/crm/akte/akte_sync_cron_step.py
Normal file
@@ -0,0 +1,165 @@
|
||||
"""
|
||||
Akte Sync - Cron Poller
|
||||
|
||||
Polls two Redis Sorted Sets every 10 seconds (10 s debounce each):
|
||||
|
||||
advoware:pending_aktennummern – written by Windows Advoware Watcher
|
||||
{ aktennummer → timestamp }
|
||||
akte:pending_entity_ids – written by EspoCRM webhook
|
||||
{ akte_id → timestamp }
|
||||
|
||||
Eligibility (either flag triggers sync):
|
||||
syncSchalter AND aktivierungsstatus in valid list → Advoware sync
|
||||
aiAktivierungsstatus in valid list → xAI sync
|
||||
"""
|
||||
|
||||
from motia import FlowContext, cron
|
||||
|
||||
|
||||
config = {
|
||||
"name": "Akte Sync - Cron Poller",
|
||||
"description": "Poll Redis for pending Aktennummern and emit akte.sync events (10 s debounce)",
|
||||
"flows": ["akte-sync"],
|
||||
"triggers": [cron("*/10 * * * * *")],
|
||||
"enqueues": ["akte.sync"],
|
||||
}
|
||||
|
||||
# Queue 1: written by Windows Advoware Watcher (keyed by Aktennummer)
|
||||
PENDING_ADVO_KEY = "advoware:pending_aktennummern"
|
||||
PROCESSING_ADVO_KEY = "advoware:processing_aktennummern"
|
||||
|
||||
# Queue 2: written by EspoCRM webhook (keyed by entity ID)
|
||||
PENDING_ID_KEY = "akte:pending_entity_ids"
|
||||
PROCESSING_ID_KEY = "akte:processing_entity_ids"
|
||||
|
||||
DEBOUNCE_SECS = 10
|
||||
|
||||
VALID_ADVOWARE_STATUSES = {'import', 'neu', 'new', 'aktiv', 'active'}
|
||||
VALID_AI_STATUSES = {'new', 'neu', 'aktiv', 'active'}
|
||||
|
||||
|
||||
async def handler(input_data: None, ctx: FlowContext) -> None:
|
||||
import time
|
||||
from services.redis_client import get_redis_client
|
||||
from services.espocrm import EspoCRMAPI
|
||||
|
||||
ctx.logger.info("=" * 60)
|
||||
ctx.logger.info("⏰ AKTE CRON POLLER")
|
||||
|
||||
redis_client = get_redis_client(strict=False)
|
||||
if not redis_client:
|
||||
ctx.logger.error("❌ Redis unavailable")
|
||||
ctx.logger.info("=" * 60)
|
||||
return
|
||||
|
||||
espocrm = EspoCRMAPI(ctx)
|
||||
cutoff = time.time() - DEBOUNCE_SECS
|
||||
|
||||
advo_pending = redis_client.zcard(PENDING_ADVO_KEY)
|
||||
id_pending = redis_client.zcard(PENDING_ID_KEY)
|
||||
ctx.logger.info(f" Pending (aktennr) : {advo_pending}")
|
||||
ctx.logger.info(f" Pending (akte_id) : {id_pending}")
|
||||
|
||||
processed = False
|
||||
|
||||
# ── Queue 1: Advoware Watcher (by Aktennummer) ─────────────────────
|
||||
advo_entries = redis_client.zrangebyscore(PENDING_ADVO_KEY, min=0, max=cutoff, start=0, num=1)
|
||||
if advo_entries:
|
||||
aktennr = advo_entries[0]
|
||||
if isinstance(aktennr, bytes):
|
||||
aktennr = aktennr.decode()
|
||||
|
||||
score = redis_client.zscore(PENDING_ADVO_KEY, aktennr) or 0
|
||||
age = time.time() - score
|
||||
redis_client.zrem(PENDING_ADVO_KEY, aktennr)
|
||||
redis_client.sadd(PROCESSING_ADVO_KEY, aktennr)
|
||||
|
||||
ctx.logger.info(f"📋 Aktennummer: {aktennr} (age={age:.1f}s)")
|
||||
processed = True
|
||||
|
||||
try:
|
||||
result = await espocrm.list_entities(
|
||||
'CAkten',
|
||||
where=[{'type': 'equals', 'attribute': 'aktennummer', 'value': int(aktennr)}],
|
||||
max_size=1,
|
||||
)
|
||||
if not result or not result.get('list'):
|
||||
ctx.logger.warn(f"⚠️ No CAkten found for aktennummer={aktennr} – removing")
|
||||
redis_client.srem(PROCESSING_ADVO_KEY, aktennr)
|
||||
else:
|
||||
akte = result['list'][0]
|
||||
await _emit_if_eligible(akte, aktennr, ctx)
|
||||
redis_client.srem(PROCESSING_ADVO_KEY, aktennr)
|
||||
except Exception as e:
|
||||
ctx.logger.error(f"❌ Error (aktennr queue) {aktennr}: {e}")
|
||||
redis_client.zadd(PENDING_ADVO_KEY, {aktennr: time.time()})
|
||||
redis_client.srem(PROCESSING_ADVO_KEY, aktennr)
|
||||
raise
|
||||
|
||||
# ── Queue 2: EspoCRM Webhook (by Entity ID) ────────────────────────
|
||||
id_entries = redis_client.zrangebyscore(PENDING_ID_KEY, min=0, max=cutoff, start=0, num=1)
|
||||
if id_entries:
|
||||
akte_id = id_entries[0]
|
||||
if isinstance(akte_id, bytes):
|
||||
akte_id = akte_id.decode()
|
||||
|
||||
score = redis_client.zscore(PENDING_ID_KEY, akte_id) or 0
|
||||
age = time.time() - score
|
||||
redis_client.zrem(PENDING_ID_KEY, akte_id)
|
||||
redis_client.sadd(PROCESSING_ID_KEY, akte_id)
|
||||
|
||||
ctx.logger.info(f"📋 Entity ID: {akte_id} (age={age:.1f}s)")
|
||||
processed = True
|
||||
|
||||
try:
|
||||
akte = await espocrm.get_entity('CAkten', akte_id)
|
||||
if not akte:
|
||||
ctx.logger.warn(f"⚠️ No CAkten found for id={akte_id} – removing")
|
||||
redis_client.srem(PROCESSING_ID_KEY, akte_id)
|
||||
else:
|
||||
await _emit_if_eligible(akte, None, ctx)
|
||||
redis_client.srem(PROCESSING_ID_KEY, akte_id)
|
||||
except Exception as e:
|
||||
ctx.logger.error(f"❌ Error (entity-id queue) {akte_id}: {e}")
|
||||
redis_client.zadd(PENDING_ID_KEY, {akte_id: time.time()})
|
||||
redis_client.srem(PROCESSING_ID_KEY, akte_id)
|
||||
raise
|
||||
|
||||
if not processed:
|
||||
if advo_pending > 0 or id_pending > 0:
|
||||
ctx.logger.info(f"⏸️ Entries pending but all too recent (< {DEBOUNCE_SECS}s)")
|
||||
else:
|
||||
ctx.logger.info("✓ Both queues empty")
|
||||
|
||||
ctx.logger.info("=" * 60)
|
||||
|
||||
|
||||
async def _emit_if_eligible(akte: dict, aktennr, ctx: FlowContext) -> None:
|
||||
"""Check eligibility and emit akte.sync if applicable."""
|
||||
akte_id = akte['id']
|
||||
# Prefer aktennr from argument; fall back to entity field
|
||||
aktennummer = aktennr or akte.get('aktennummer')
|
||||
sync_schalter = akte.get('syncSchalter', False)
|
||||
aktivierungsstatus = str(akte.get('aktivierungsstatus') or '').lower()
|
||||
ai_status = str(akte.get('aiAktivierungsstatus') or '').lower()
|
||||
|
||||
advoware_eligible = bool(aktennummer) and sync_schalter and aktivierungsstatus in VALID_ADVOWARE_STATUSES
|
||||
xai_eligible = ai_status in VALID_AI_STATUSES
|
||||
|
||||
ctx.logger.info(f" akte_id : {akte_id}")
|
||||
ctx.logger.info(f" aktennummer : {aktennummer or '—'}")
|
||||
ctx.logger.info(f" aktivierungsstatus : {aktivierungsstatus} ({'✅' if advoware_eligible else '⏭️'})")
|
||||
ctx.logger.info(f" aiAktivierungsstatus : {ai_status} ({'✅' if xai_eligible else '⏭️'})")
|
||||
|
||||
if not advoware_eligible and not xai_eligible:
|
||||
ctx.logger.warn(f"⚠️ Akte {akte_id} not eligible for any sync")
|
||||
return
|
||||
|
||||
await ctx.enqueue({
|
||||
'topic': 'akte.sync',
|
||||
'data': {
|
||||
'akte_id': akte_id,
|
||||
'aktennummer': aktennummer, # may be None for xAI-only Akten
|
||||
},
|
||||
})
|
||||
ctx.logger.info(f"📤 akte.sync emitted (akte_id={akte_id}, aktennummer={aktennummer or '—'})")
|
||||
435
src/steps/crm/akte/akte_sync_event_step.py
Normal file
435
src/steps/crm/akte/akte_sync_event_step.py
Normal file
@@ -0,0 +1,435 @@
|
||||
"""
|
||||
Akte Sync - Event Handler
|
||||
|
||||
Unified sync for one CAkten entity across all configured backends:
|
||||
- Advoware (3-way merge: Windows ↔ EspoCRM ↔ History)
|
||||
- xAI (Blake3 hash-based upload to Collection)
|
||||
|
||||
Both run in the same event to keep CDokumente perfectly in sync.
|
||||
|
||||
Trigger: akte.sync { akte_id, aktennummer }
|
||||
Lock: Redis per-Akte (30 min TTL, prevents double-sync of same Akte)
|
||||
Parallel: Different Akten sync simultaneously.
|
||||
|
||||
Enqueues:
|
||||
- document.generate_preview (after CREATE / UPDATE_ESPO)
|
||||
"""
|
||||
|
||||
from typing import Dict, Any
|
||||
from datetime import datetime
|
||||
from motia import FlowContext, queue
|
||||
|
||||
|
||||
config = {
|
||||
"name": "Akte Sync - Event Handler",
|
||||
"description": "Unified sync for one Akte: Advoware 3-way merge + xAI upload",
|
||||
"flows": ["akte-sync"],
|
||||
"triggers": [queue("akte.sync")],
|
||||
"enqueues": ["document.generate_preview"],
|
||||
}
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
# Entry point
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
|
||||
async def handler(event_data: Dict[str, Any], ctx: FlowContext) -> None:
|
||||
akte_id = event_data.get('akte_id')
|
||||
aktennummer = event_data.get('aktennummer')
|
||||
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info("🔄 AKTE SYNC STARTED")
|
||||
ctx.logger.info(f" Aktennummer : {aktennummer}")
|
||||
ctx.logger.info(f" EspoCRM ID : {akte_id}")
|
||||
ctx.logger.info("=" * 80)
|
||||
|
||||
from services.redis_client import get_redis_client
|
||||
from services.espocrm import EspoCRMAPI
|
||||
|
||||
redis_client = get_redis_client(strict=False)
|
||||
if not redis_client:
|
||||
ctx.logger.error("❌ Redis unavailable")
|
||||
return
|
||||
|
||||
lock_key = f"akte_sync:{akte_id}"
|
||||
lock_acquired = redis_client.set(lock_key, datetime.now().isoformat(), nx=True, ex=1800)
|
||||
if not lock_acquired:
|
||||
ctx.logger.warn(f"⏸️ Lock busy for Akte {akte_id} – requeueing")
|
||||
raise RuntimeError(f"Lock busy for akte_id={akte_id}")
|
||||
|
||||
espocrm = EspoCRMAPI(ctx)
|
||||
|
||||
try:
|
||||
# ── Load Akte ──────────────────────────────────────────────────────
|
||||
akte = await espocrm.get_entity('CAkten', akte_id)
|
||||
if not akte:
|
||||
ctx.logger.error(f"❌ Akte {akte_id} not found in EspoCRM")
|
||||
return
|
||||
|
||||
# aktennummer can come from the event payload OR from the entity
|
||||
# (Akten without Advoware have no aktennummer)
|
||||
if not aktennummer:
|
||||
aktennummer = akte.get('aktennummer')
|
||||
|
||||
sync_schalter = akte.get('syncSchalter', False)
|
||||
aktivierungsstatus = str(akte.get('aktivierungsstatus') or '').lower()
|
||||
ai_aktivierungsstatus = str(akte.get('aiAktivierungsstatus') or '').lower()
|
||||
|
||||
ctx.logger.info(f"📋 Akte '{akte.get('name')}'")
|
||||
ctx.logger.info(f" syncSchalter : {sync_schalter}")
|
||||
ctx.logger.info(f" aktivierungsstatus : {aktivierungsstatus}")
|
||||
ctx.logger.info(f" aiAktivierungsstatus : {ai_aktivierungsstatus}")
|
||||
|
||||
# Advoware sync requires an aktennummer (Akten without Advoware won't have one)
|
||||
advoware_enabled = bool(aktennummer) and sync_schalter and aktivierungsstatus in ('import', 'neu', 'new', 'aktiv', 'active')
|
||||
xai_enabled = ai_aktivierungsstatus in ('new', 'neu', 'aktiv', 'active')
|
||||
|
||||
ctx.logger.info(f" Advoware sync : {'✅ ON' if advoware_enabled else '⏭️ OFF'}")
|
||||
ctx.logger.info(f" xAI sync : {'✅ ON' if xai_enabled else '⏭️ OFF'}")
|
||||
|
||||
if not advoware_enabled and not xai_enabled:
|
||||
ctx.logger.info("⏭️ Both syncs disabled – nothing to do")
|
||||
return
|
||||
|
||||
# ── ADVOWARE SYNC ──────────────────────────────────────────────────
|
||||
advoware_results = None
|
||||
if advoware_enabled:
|
||||
advoware_results = await _run_advoware_sync(akte, aktennummer, akte_id, espocrm, ctx)
|
||||
|
||||
# ── xAI SYNC ──────────────────────────────────────────────────────
|
||||
if xai_enabled:
|
||||
await _run_xai_sync(akte, akte_id, espocrm, ctx)
|
||||
|
||||
# ── Final Status ───────────────────────────────────────────────────
|
||||
now = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
final_update: Dict[str, Any] = {'globalLastSync': now, 'globalSyncStatus': 'synced'}
|
||||
if advoware_enabled:
|
||||
final_update['syncStatus'] = 'synced'
|
||||
final_update['lastSync'] = now
|
||||
# 'import' = erster Sync → danach auf 'aktiv' setzen
|
||||
if aktivierungsstatus == 'import':
|
||||
final_update['aktivierungsstatus'] = 'aktiv'
|
||||
ctx.logger.info("🔄 aktivierungsstatus: import → aktiv")
|
||||
if xai_enabled:
|
||||
final_update['aiSyncStatus'] = 'synced'
|
||||
final_update['aiLastSync'] = now
|
||||
# 'new' = Collection wurde gerade erstmalig angelegt → auf 'aktiv' setzen
|
||||
if ai_aktivierungsstatus == 'new':
|
||||
final_update['aiAktivierungsstatus'] = 'aktiv'
|
||||
ctx.logger.info("🔄 aiAktivierungsstatus: new → aktiv")
|
||||
|
||||
await espocrm.update_entity('CAkten', akte_id, final_update)
|
||||
# Clean up processing sets (both queues may have triggered this sync)
|
||||
if aktennummer:
|
||||
redis_client.srem("advoware:processing_aktennummern", aktennummer)
|
||||
redis_client.srem("akte:processing_entity_ids", akte_id)
|
||||
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info("✅ AKTE SYNC COMPLETE")
|
||||
if advoware_results:
|
||||
ctx.logger.info(f" Advoware: created={advoware_results['created']} updated={advoware_results['updated']} deleted={advoware_results['deleted']} errors={advoware_results['errors']}")
|
||||
ctx.logger.info("=" * 80)
|
||||
|
||||
except Exception as e:
|
||||
ctx.logger.error(f"❌ Sync failed: {e}")
|
||||
import traceback
|
||||
ctx.logger.error(traceback.format_exc())
|
||||
|
||||
# Requeue for retry (into the appropriate queue(s))
|
||||
import time
|
||||
now_ts = time.time()
|
||||
if aktennummer:
|
||||
redis_client.zadd("advoware:pending_aktennummern", {aktennummer: now_ts})
|
||||
redis_client.zadd("akte:pending_entity_ids", {akte_id: now_ts})
|
||||
|
||||
try:
|
||||
await espocrm.update_entity('CAkten', akte_id, {
|
||||
'syncStatus': 'failed',
|
||||
'globalSyncStatus': 'failed',
|
||||
})
|
||||
except Exception:
|
||||
pass
|
||||
raise
|
||||
|
||||
finally:
|
||||
if lock_acquired and redis_client:
|
||||
redis_client.delete(lock_key)
|
||||
ctx.logger.info(f"🔓 Lock released for Akte {aktennummer}")
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
# Advoware 3-way merge
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
|
||||
async def _run_advoware_sync(
|
||||
akte: Dict[str, Any],
|
||||
aktennummer: str,
|
||||
akte_id: str,
|
||||
espocrm,
|
||||
ctx: FlowContext,
|
||||
) -> Dict[str, int]:
|
||||
from services.advoware_watcher_service import AdvowareWatcherService
|
||||
from services.advoware_history_service import AdvowareHistoryService
|
||||
from services.advoware_service import AdvowareService
|
||||
from services.advoware_document_sync_utils import AdvowareDocumentSyncUtils
|
||||
from services.blake3_utils import compute_blake3
|
||||
import mimetypes
|
||||
|
||||
watcher = AdvowareWatcherService(ctx)
|
||||
history_service = AdvowareHistoryService(ctx)
|
||||
advoware_service = AdvowareService(ctx)
|
||||
sync_utils = AdvowareDocumentSyncUtils(ctx)
|
||||
|
||||
results = {'created': 0, 'updated': 0, 'deleted': 0, 'skipped': 0, 'errors': 0}
|
||||
|
||||
ctx.logger.info("")
|
||||
ctx.logger.info("─" * 60)
|
||||
ctx.logger.info("📂 ADVOWARE SYNC")
|
||||
ctx.logger.info("─" * 60)
|
||||
|
||||
# ── Fetch from all 3 sources ───────────────────────────────────────
|
||||
espo_docs_result = await espocrm.list_related('CAkten', akte_id, 'dokumentes')
|
||||
espo_docs = espo_docs_result.get('list', [])
|
||||
|
||||
try:
|
||||
windows_files = await watcher.get_akte_files(aktennummer)
|
||||
except Exception as e:
|
||||
ctx.logger.error(f"❌ Windows watcher failed: {e}")
|
||||
windows_files = []
|
||||
|
||||
try:
|
||||
advo_history = await history_service.get_akte_history(aktennummer)
|
||||
except Exception as e:
|
||||
ctx.logger.error(f"❌ Advoware history failed: {e}")
|
||||
advo_history = []
|
||||
|
||||
ctx.logger.info(f" EspoCRM docs : {len(espo_docs)}")
|
||||
ctx.logger.info(f" Windows files : {len(windows_files)}")
|
||||
ctx.logger.info(f" History entries: {len(advo_history)}")
|
||||
|
||||
# ── Cleanup Windows list (only files in History) ───────────────────
|
||||
windows_files = sync_utils.cleanup_file_list(windows_files, advo_history)
|
||||
|
||||
# ── Build indexes by HNR (stable identifier from Advoware) ────────
|
||||
espo_by_hnr = {}
|
||||
for doc in espo_docs:
|
||||
if doc.get('hnr'):
|
||||
espo_by_hnr[doc['hnr']] = doc
|
||||
|
||||
history_by_hnr = {}
|
||||
for entry in advo_history:
|
||||
if entry.get('hNr'):
|
||||
history_by_hnr[entry['hNr']] = entry
|
||||
|
||||
windows_by_path = {f.get('path', '').lower(): f for f in windows_files}
|
||||
|
||||
all_hnrs = set(espo_by_hnr.keys()) | set(history_by_hnr.keys())
|
||||
ctx.logger.info(f" Unique HNRs : {len(all_hnrs)}")
|
||||
|
||||
# ── 3-way merge per HNR ───────────────────────────────────────────
|
||||
for hnr in all_hnrs:
|
||||
espo_doc = espo_by_hnr.get(hnr)
|
||||
history_entry = history_by_hnr.get(hnr)
|
||||
|
||||
windows_file = None
|
||||
if history_entry and history_entry.get('datei'):
|
||||
windows_file = windows_by_path.get(history_entry['datei'].lower())
|
||||
|
||||
if history_entry and history_entry.get('datei'):
|
||||
filename = history_entry['datei'].split('\\')[-1]
|
||||
elif espo_doc:
|
||||
filename = espo_doc.get('name', f'hnr_{hnr}')
|
||||
else:
|
||||
filename = f'hnr_{hnr}'
|
||||
|
||||
try:
|
||||
action = sync_utils.merge_three_way(espo_doc, windows_file, history_entry)
|
||||
ctx.logger.info(f" [{action.action:12s}] {filename} (hnr={hnr}) – {action.reason}")
|
||||
|
||||
if action.action == 'SKIP':
|
||||
results['skipped'] += 1
|
||||
|
||||
elif action.action == 'CREATE':
|
||||
if not windows_file:
|
||||
ctx.logger.error(f" ❌ CREATE: no Windows file for hnr {hnr}")
|
||||
results['errors'] += 1
|
||||
continue
|
||||
|
||||
content = await watcher.download_file(aktennummer, windows_file.get('relative_path', filename))
|
||||
blake3_hash = compute_blake3(content)
|
||||
mime_type, _ = mimetypes.guess_type(filename)
|
||||
mime_type = mime_type or 'application/octet-stream'
|
||||
now = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
|
||||
attachment = await espocrm.upload_attachment_for_file_field(
|
||||
file_content=content,
|
||||
filename=filename,
|
||||
related_type='CDokumente',
|
||||
field='dokument',
|
||||
mime_type=mime_type,
|
||||
)
|
||||
new_doc = await espocrm.create_entity('CDokumente', {
|
||||
'name': filename,
|
||||
'dokumentId': attachment.get('id'),
|
||||
'hnr': history_entry.get('hNr') if history_entry else None,
|
||||
'advowareArt': (history_entry.get('art', 'Schreiben') or 'Schreiben')[:100] if history_entry else 'Schreiben',
|
||||
'advowareBemerkung': (history_entry.get('text', '') or '')[:255] if history_entry else '',
|
||||
'dateipfad': windows_file.get('path', ''),
|
||||
'blake3hash': blake3_hash,
|
||||
'syncedHash': blake3_hash,
|
||||
'usn': windows_file.get('usn', 0),
|
||||
'syncStatus': 'synced',
|
||||
'lastSyncTimestamp': now,
|
||||
'cAktenId': akte_id, # Direct FK to CAkten
|
||||
})
|
||||
doc_id = new_doc.get('id')
|
||||
|
||||
# Link to Akte
|
||||
await espocrm.link_entities('CAkten', akte_id, 'dokumentes', doc_id)
|
||||
results['created'] += 1
|
||||
|
||||
# Trigger preview
|
||||
try:
|
||||
await ctx.emit('document.generate_preview', {
|
||||
'entity_id': doc_id,
|
||||
'entity_type': 'CDokumente',
|
||||
})
|
||||
except Exception as e:
|
||||
ctx.logger.warn(f" ⚠️ Preview trigger failed: {e}")
|
||||
|
||||
elif action.action == 'UPDATE_ESPO':
|
||||
if not windows_file:
|
||||
ctx.logger.error(f" ❌ UPDATE_ESPO: no Windows file for hnr {hnr}")
|
||||
results['errors'] += 1
|
||||
continue
|
||||
|
||||
content = await watcher.download_file(aktennummer, windows_file.get('relative_path', filename))
|
||||
blake3_hash = compute_blake3(content)
|
||||
mime_type, _ = mimetypes.guess_type(filename)
|
||||
mime_type = mime_type or 'application/octet-stream'
|
||||
now = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
|
||||
update_data: Dict[str, Any] = {
|
||||
'name': filename,
|
||||
'blake3hash': blake3_hash,
|
||||
'syncedHash': blake3_hash,
|
||||
'usn': windows_file.get('usn', 0),
|
||||
'dateipfad': windows_file.get('path', ''),
|
||||
'syncStatus': 'synced',
|
||||
'lastSyncTimestamp': now,
|
||||
}
|
||||
if history_entry:
|
||||
update_data['hnr'] = history_entry.get('hNr')
|
||||
update_data['advowareArt'] = (history_entry.get('art', 'Schreiben') or 'Schreiben')[:100]
|
||||
update_data['advowareBemerkung'] = (history_entry.get('text', '') or '')[:255]
|
||||
|
||||
await espocrm.update_entity('CDokumente', espo_doc['id'], update_data)
|
||||
results['updated'] += 1
|
||||
|
||||
# Mark for re-sync to xAI (hash changed)
|
||||
if espo_doc.get('aiSyncStatus') == 'synced':
|
||||
await espocrm.update_entity('CDokumente', espo_doc['id'], {
|
||||
'aiSyncStatus': 'unclean',
|
||||
})
|
||||
|
||||
try:
|
||||
await ctx.emit('document.generate_preview', {
|
||||
'entity_id': espo_doc['id'],
|
||||
'entity_type': 'CDokumente',
|
||||
})
|
||||
except Exception as e:
|
||||
ctx.logger.warn(f" ⚠️ Preview trigger failed: {e}")
|
||||
|
||||
elif action.action == 'DELETE':
|
||||
if espo_doc:
|
||||
# Only delete if the HNR is genuinely absent from Advoware History
|
||||
# (not just absent from Windows – avoids deleting docs whose file
|
||||
# is temporarily unavailable on the Windows share)
|
||||
if hnr in history_by_hnr:
|
||||
ctx.logger.warn(f" ⚠️ SKIP DELETE hnr={hnr}: still in Advoware History, only missing from Windows")
|
||||
results['skipped'] += 1
|
||||
else:
|
||||
await espocrm.delete_entity('CDokumente', espo_doc['id'])
|
||||
results['deleted'] += 1
|
||||
|
||||
except Exception as e:
|
||||
ctx.logger.error(f" ❌ Error for hnr {hnr} ({filename}): {e}")
|
||||
results['errors'] += 1
|
||||
|
||||
# ── Ablage check + Rubrum sync ─────────────────────────────────────
|
||||
try:
|
||||
akte_details = await advoware_service.get_akte(aktennummer)
|
||||
if akte_details:
|
||||
espo_update: Dict[str, Any] = {}
|
||||
|
||||
if akte_details.get('ablage') == 1:
|
||||
ctx.logger.info("📁 Akte marked as ablage → deactivating")
|
||||
espo_update['aktivierungsstatus'] = 'deaktiviert'
|
||||
|
||||
rubrum = akte_details.get('rubrum')
|
||||
if rubrum and rubrum != akte.get('rubrum'):
|
||||
espo_update['rubrum'] = rubrum
|
||||
ctx.logger.info(f"📝 Rubrum synced: {rubrum[:80]}")
|
||||
|
||||
if espo_update:
|
||||
await espocrm.update_entity('CAkten', akte_id, espo_update)
|
||||
except Exception as e:
|
||||
ctx.logger.warn(f"⚠️ Ablage/Rubrum check failed: {e}")
|
||||
|
||||
return results
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
# xAI sync
|
||||
# ─────────────────────────────────────────────────────────────────────────────
|
||||
|
||||
async def _run_xai_sync(
|
||||
akte: Dict[str, Any],
|
||||
akte_id: str,
|
||||
espocrm,
|
||||
ctx: FlowContext,
|
||||
) -> None:
|
||||
from services.xai_service import XAIService
|
||||
from services.xai_upload_utils import XAIUploadUtils
|
||||
|
||||
xai = XAIService(ctx)
|
||||
upload_utils = XAIUploadUtils(ctx)
|
||||
|
||||
ctx.logger.info("")
|
||||
ctx.logger.info("─" * 60)
|
||||
ctx.logger.info("🤖 xAI SYNC")
|
||||
ctx.logger.info("─" * 60)
|
||||
|
||||
try:
|
||||
# ── Ensure collection exists ───────────────────────────────────
|
||||
collection_id = await upload_utils.ensure_collection(akte, xai, espocrm)
|
||||
if not collection_id:
|
||||
ctx.logger.error("❌ Could not obtain xAI collection – aborting xAI sync")
|
||||
await espocrm.update_entity('CAkten', akte_id, {'aiSyncStatus': 'failed'})
|
||||
return
|
||||
|
||||
# ── Load all linked documents ──────────────────────────────────
|
||||
docs_result = await espocrm.list_related('CAkten', akte_id, 'dokumentes')
|
||||
docs = docs_result.get('list', [])
|
||||
ctx.logger.info(f" Documents to check: {len(docs)}")
|
||||
|
||||
synced = 0
|
||||
skipped = 0
|
||||
failed = 0
|
||||
|
||||
for doc in docs:
|
||||
ok = await upload_utils.sync_document_to_xai(doc, collection_id, xai, espocrm)
|
||||
if ok:
|
||||
if doc.get('aiSyncStatus') == 'synced' and doc.get('aiSyncHash') == doc.get('blake3hash'):
|
||||
skipped += 1
|
||||
else:
|
||||
synced += 1
|
||||
else:
|
||||
failed += 1
|
||||
|
||||
ctx.logger.info(f" ✅ Synced : {synced}")
|
||||
ctx.logger.info(f" ⏭️ Skipped : {skipped}")
|
||||
ctx.logger.info(f" ❌ Failed : {failed}")
|
||||
|
||||
finally:
|
||||
await xai.close()
|
||||
0
src/steps/crm/akte/webhooks/__init__.py
Normal file
0
src/steps/crm/akte/webhooks/__init__.py
Normal file
68
src/steps/crm/akte/webhooks/akte_webhook_step.py
Normal file
68
src/steps/crm/akte/webhooks/akte_webhook_step.py
Normal file
@@ -0,0 +1,68 @@
|
||||
"""
|
||||
Akte Sync - EspoCRM Webhook
|
||||
|
||||
Empfängt EspoCRM-Webhooks für CAkten (create / update / delete).
|
||||
Schreibt die Entity-ID in die Redis-Queue `akte:pending_entity_ids`
|
||||
mit 10-Sekunden-Debounce — der Cron-Poller übernimmt den Rest.
|
||||
|
||||
Route: POST /akte/webhook/update
|
||||
Payload: { "id": "..." } oder [{ "id": "..." }, ...]
|
||||
"""
|
||||
|
||||
import json
|
||||
import time
|
||||
from typing import Any
|
||||
from motia import FlowContext, http, ApiRequest, ApiResponse
|
||||
|
||||
|
||||
config = {
|
||||
"name": "Akte Webhook - EspoCRM",
|
||||
"description": "Empfängt EspoCRM-Webhooks für CAkten und queued Entity-IDs für den Sync",
|
||||
"flows": ["akte-sync"],
|
||||
"triggers": [http("POST", "/crm/akte/webhook/update")],
|
||||
"enqueues": [],
|
||||
}
|
||||
|
||||
PENDING_KEY = "akte:pending_entity_ids"
|
||||
|
||||
|
||||
async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
|
||||
try:
|
||||
payload = request.body or {}
|
||||
|
||||
ctx.logger.info("=" * 60)
|
||||
ctx.logger.info("📥 AKTE WEBHOOK")
|
||||
ctx.logger.info(f" Payload: {json.dumps(payload, ensure_ascii=False)[:200]}")
|
||||
|
||||
# ── Collect entity IDs ─────────────────────────────────────
|
||||
entity_ids: set[str] = set()
|
||||
if isinstance(payload, list):
|
||||
for item in payload:
|
||||
if isinstance(item, dict) and 'id' in item:
|
||||
entity_ids.add(item['id'])
|
||||
elif isinstance(payload, dict) and 'id' in payload:
|
||||
entity_ids.add(payload['id'])
|
||||
|
||||
if not entity_ids:
|
||||
ctx.logger.warn("⚠️ No entity IDs in payload")
|
||||
return ApiResponse(status_code=400, body={"error": "No entity ID found in payload"})
|
||||
|
||||
# ── Push to Redis with current timestamp (debounce in cron) ─
|
||||
from services.redis_client import get_redis_client
|
||||
redis_client = get_redis_client(strict=False)
|
||||
if not redis_client:
|
||||
ctx.logger.error("❌ Redis unavailable")
|
||||
return ApiResponse(status_code=503, body={"error": "Service unavailable"})
|
||||
|
||||
ts = time.time()
|
||||
mapping = {eid: ts for eid in entity_ids}
|
||||
redis_client.zadd(PENDING_KEY, mapping)
|
||||
|
||||
ctx.logger.info(f"✅ Queued {len(entity_ids)} entity ID(s): {entity_ids}")
|
||||
ctx.logger.info("=" * 60)
|
||||
|
||||
return ApiResponse(status_code=200, body={"queued": len(entity_ids)})
|
||||
|
||||
except Exception as e:
|
||||
ctx.logger.error(f"❌ Webhook error: {e}")
|
||||
return ApiResponse(status_code=500, body={"error": str(e)})
|
||||
0
src/steps/crm/bankverbindungen/__init__.py
Normal file
0
src/steps/crm/bankverbindungen/__init__.py
Normal file
@@ -0,0 +1,254 @@
|
||||
"""
|
||||
VMH Bankverbindungen Sync Handler
|
||||
|
||||
Zentraler Sync-Handler für Bankverbindungen (Webhooks + Cron Events)
|
||||
|
||||
Verarbeitet:
|
||||
- vmh.bankverbindungen.create: Neu in EspoCRM → Create in Advoware
|
||||
- vmh.bankverbindungen.update: Geändert in EspoCRM → Notification (nicht unterstützt)
|
||||
- vmh.bankverbindungen.delete: Gelöscht in EspoCRM → Notification (nicht unterstützt)
|
||||
- vmh.bankverbindungen.sync_check: Cron-Check → Sync wenn nötig
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, Optional
|
||||
from motia import FlowContext, queue
|
||||
from services.advoware import AdvowareAPI
|
||||
from services.espocrm import EspoCRMAPI
|
||||
from services.bankverbindungen_mapper import BankverbindungenMapper
|
||||
from services.notification_utils import NotificationManager
|
||||
from services.redis_client import get_redis_client
|
||||
import json
|
||||
|
||||
config = {
|
||||
"name": "VMH Bankverbindungen Sync Handler",
|
||||
"description": "Zentraler Sync-Handler für Bankverbindungen (Webhooks + Cron Events)",
|
||||
"flows": ["vmh-bankverbindungen"],
|
||||
"triggers": [
|
||||
queue("vmh.bankverbindungen.create"),
|
||||
queue("vmh.bankverbindungen.update"),
|
||||
queue("vmh.bankverbindungen.delete"),
|
||||
queue("vmh.bankverbindungen.sync_check")
|
||||
],
|
||||
"enqueues": []
|
||||
}
|
||||
|
||||
|
||||
async def handler(event_data: Dict[str, Any], ctx: FlowContext[Any]) -> None:
|
||||
"""Zentraler Sync-Handler für Bankverbindungen"""
|
||||
|
||||
entity_id = event_data.get('entity_id')
|
||||
action = event_data.get('action', 'sync_check')
|
||||
source = event_data.get('source', 'unknown')
|
||||
|
||||
if not entity_id:
|
||||
ctx.logger.error("Keine entity_id im Event gefunden")
|
||||
return
|
||||
|
||||
ctx.logger.info(f"🔄 Bankverbindungen Sync gestartet: {action.upper()} | Entity: {entity_id} | Source: {source}")
|
||||
|
||||
# Shared Redis client (centralized factory)
|
||||
redis_client = get_redis_client(strict=False)
|
||||
|
||||
# APIs initialisieren (mit Context für besseres Logging)
|
||||
espocrm = EspoCRMAPI(ctx)
|
||||
advoware = AdvowareAPI(ctx)
|
||||
mapper = BankverbindungenMapper()
|
||||
notification_mgr = NotificationManager(espocrm_api=espocrm, context=ctx)
|
||||
|
||||
try:
|
||||
# 1. ACQUIRE LOCK
|
||||
lock_key = f"sync_lock:cbankverbindungen:{entity_id}"
|
||||
acquired = redis_client.set(lock_key, "locked", nx=True, ex=900) # 15min TTL
|
||||
|
||||
if not acquired:
|
||||
ctx.logger.warn(f"⏸️ Sync bereits aktiv für {entity_id}, überspringe")
|
||||
return
|
||||
|
||||
# 2. FETCH ENTITY VON ESPOCRM
|
||||
try:
|
||||
espo_entity = await espocrm.get_entity('CBankverbindungen', entity_id)
|
||||
except Exception as e:
|
||||
ctx.logger.error(f"❌ Fehler beim Laden von EspoCRM Entity: {e}")
|
||||
redis_client.delete(lock_key)
|
||||
return
|
||||
|
||||
ctx.logger.info(f"📋 Entity geladen: {espo_entity.get('name', 'Unbenannt')} (IBAN: {espo_entity.get('iban', 'N/A')})")
|
||||
|
||||
advoware_id = espo_entity.get('advowareId')
|
||||
beteiligte_id = espo_entity.get('cBeteiligteId') # Parent Beteiligter
|
||||
|
||||
if not beteiligte_id:
|
||||
ctx.logger.error(f"❌ Keine cBeteiligteId gefunden - Bankverbindung muss einem Beteiligten zugeordnet sein")
|
||||
redis_client.delete(lock_key)
|
||||
return
|
||||
|
||||
# Hole betNr vom Parent
|
||||
parent = await espocrm.get_entity('CBeteiligte', beteiligte_id)
|
||||
betnr = parent.get('betnr')
|
||||
|
||||
if not betnr:
|
||||
ctx.logger.error(f"❌ Parent Beteiligter {beteiligte_id} hat keine betNr")
|
||||
redis_client.delete(lock_key)
|
||||
return
|
||||
|
||||
# 3. BESTIMME SYNC-AKTION
|
||||
|
||||
# FALL A: Neu (kein advowareId) → CREATE in Advoware
|
||||
if not advoware_id and action in ['create', 'sync_check']:
|
||||
await handle_create(entity_id, betnr, espo_entity, espocrm, advoware, mapper, ctx, redis_client, lock_key)
|
||||
|
||||
# FALL B: Existiert (hat advowareId) → UPDATE oder CHECK (nicht unterstützt!)
|
||||
elif advoware_id and action in ['update', 'sync_check']:
|
||||
await handle_update(entity_id, betnr, advoware_id, espo_entity, espocrm, notification_mgr, ctx, redis_client, lock_key)
|
||||
|
||||
# FALL C: DELETE (nicht unterstützt!)
|
||||
elif action == 'delete':
|
||||
await handle_delete(entity_id, betnr, advoware_id, espo_entity, espocrm, notification_mgr, ctx, redis_client, lock_key)
|
||||
|
||||
else:
|
||||
ctx.logger.warn(f"⚠️ Unbekannte Kombination: action={action}, advowareId={advoware_id}")
|
||||
redis_client.delete(lock_key)
|
||||
|
||||
except Exception as e:
|
||||
ctx.logger.error(f"❌ Unerwarteter Fehler im Sync-Handler: {e}")
|
||||
import traceback
|
||||
ctx.logger.error(traceback.format_exc())
|
||||
|
||||
try:
|
||||
redis_client.delete(lock_key)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
async def handle_create(entity_id, betnr, espo_entity, espocrm, advoware, mapper, ctx, redis_client, lock_key) -> None:
|
||||
"""Erstellt neue Bankverbindung in Advoware"""
|
||||
try:
|
||||
ctx.logger.info(f"🔨 CREATE Bankverbindung in Advoware für Beteiligter {betnr}...")
|
||||
|
||||
advo_data = mapper.map_cbankverbindungen_to_advoware(espo_entity)
|
||||
|
||||
ctx.logger.info(f"📤 Sende an Advoware: {json.dumps(advo_data, ensure_ascii=False)[:200]}...")
|
||||
|
||||
# POST zu Advoware (Beteiligten-spezifischer Endpoint!)
|
||||
result = await advoware.api_call(
|
||||
f'api/v1/advonet/Beteiligte/{betnr}/Bankverbindungen',
|
||||
method='POST',
|
||||
json_data=advo_data
|
||||
)
|
||||
|
||||
# Extrahiere ID und rowId
|
||||
if isinstance(result, list) and len(result) > 0:
|
||||
new_entity = result[0]
|
||||
elif isinstance(result, dict):
|
||||
new_entity = result
|
||||
else:
|
||||
raise Exception(f"Unexpected response format: {result}")
|
||||
|
||||
new_id = new_entity.get('id')
|
||||
new_rowid = new_entity.get('rowId')
|
||||
|
||||
if not new_id:
|
||||
raise Exception(f"Keine ID in Advoware Response: {result}")
|
||||
|
||||
ctx.logger.info(f"✅ In Advoware erstellt: ID={new_id}, rowId={new_rowid[:20] if new_rowid else 'N/A'}...")
|
||||
|
||||
# Schreibe advowareId + rowId zurück
|
||||
await espocrm.update_entity('CBankverbindungen', entity_id, {
|
||||
'advowareId': new_id,
|
||||
'advowareRowId': new_rowid
|
||||
})
|
||||
|
||||
redis_client.delete(lock_key)
|
||||
ctx.logger.info(f"✅ CREATE erfolgreich: {entity_id} → Advoware ID {new_id}")
|
||||
|
||||
except Exception as e:
|
||||
ctx.logger.error(f"❌ CREATE fehlgeschlagen: {e}")
|
||||
redis_client.delete(lock_key)
|
||||
|
||||
|
||||
async def handle_update(entity_id, betnr, advoware_id, espo_entity, espocrm, notification_mgr, ctx, redis_client, lock_key) -> None:
|
||||
"""Update nicht möglich - Sendet Notification an User"""
|
||||
try:
|
||||
ctx.logger.warn(f"⚠️ UPDATE: Advoware API unterstützt kein PUT für Bankverbindungen")
|
||||
|
||||
iban = espo_entity.get('iban', 'N/A')
|
||||
bank = espo_entity.get('bank', 'N/A')
|
||||
name = espo_entity.get('name', 'Unbenannt')
|
||||
|
||||
# Sende Notification
|
||||
await notification_mgr.notify_manual_action_required(
|
||||
entity_type='CBankverbindungen',
|
||||
entity_id=entity_id,
|
||||
action_type='general_manual_action',
|
||||
details={
|
||||
'message': f'UPDATE nicht möglich für Bankverbindung: {name}',
|
||||
'description': (
|
||||
f"Die Advoware API unterstützt keine Updates für Bankverbindungen.\n\n"
|
||||
f"**Details:**\n"
|
||||
f"- Bank: {bank}\n"
|
||||
f"- IBAN: {iban}\n"
|
||||
f"- Beteiligter betNr: {betnr}\n"
|
||||
f"- Advoware ID: {advoware_id}\n\n"
|
||||
f"**Workaround:**\n"
|
||||
f"Löschen Sie die Bankverbindung in EspoCRM und erstellen Sie sie neu. "
|
||||
f"Die neue Bankverbindung wird dann automatisch in Advoware angelegt."
|
||||
),
|
||||
'entity_name': name,
|
||||
'priority': 'Normal'
|
||||
},
|
||||
create_task=True
|
||||
)
|
||||
|
||||
ctx.logger.info(f"📧 Notification gesendet: Update-Limitation")
|
||||
redis_client.delete(lock_key)
|
||||
|
||||
except Exception as e:
|
||||
ctx.logger.error(f"❌ UPDATE Notification fehlgeschlagen: {e}")
|
||||
import traceback
|
||||
ctx.logger.error(traceback.format_exc())
|
||||
redis_client.delete(lock_key)
|
||||
|
||||
|
||||
async def handle_delete(entity_id, betnr, advoware_id, espo_entity, espocrm, notification_mgr, ctx, redis_client, lock_key) -> None:
|
||||
"""Delete nicht möglich - Sendet Notification an User"""
|
||||
try:
|
||||
ctx.logger.warn(f"⚠️ DELETE: Advoware API unterstützt kein DELETE für Bankverbindungen")
|
||||
|
||||
if not advoware_id:
|
||||
ctx.logger.info(f"ℹ️ Keine advowareId vorhanden, nur EspoCRM-seitiges Delete")
|
||||
redis_client.delete(lock_key)
|
||||
return
|
||||
|
||||
iban = espo_entity.get('iban', 'N/A')
|
||||
bank = espo_entity.get('bank', 'N/A')
|
||||
name = espo_entity.get('name', 'Unbenannt')
|
||||
|
||||
# Sende Notification
|
||||
await notification_mgr.notify_manual_action_required(
|
||||
entity_type='CBankverbindungen',
|
||||
entity_id=entity_id,
|
||||
action_type='general_manual_action',
|
||||
details={
|
||||
'message': f'DELETE erforderlich für Bankverbindung: {name}',
|
||||
'description': (
|
||||
f"Die Advoware API unterstützt keine Löschungen für Bankverbindungen.\n\n"
|
||||
f"**Bitte manuell in Advoware löschen:**\n"
|
||||
f"- Bank: {bank}\n"
|
||||
f"- IBAN: {iban}\n"
|
||||
f"- Beteiligter betNr: {betnr}\n"
|
||||
f"- Advoware ID: {advoware_id}\n\n"
|
||||
f"Die Bankverbindung wurde in EspoCRM gelöscht, bleibt aber in Advoware "
|
||||
f"bestehen bis zur manuellen Löschung."
|
||||
),
|
||||
'entity_name': name,
|
||||
'priority': 'Normal'
|
||||
},
|
||||
create_task=True
|
||||
)
|
||||
|
||||
ctx.logger.info(f"📧 Notification gesendet: Delete erforderlich")
|
||||
redis_client.delete(lock_key)
|
||||
|
||||
except Exception as e:
|
||||
ctx.logger.error(f"❌ DELETE Notification fehlgeschlagen: {e}")
|
||||
redis_client.delete(lock_key)
|
||||
0
src/steps/crm/bankverbindungen/webhooks/__init__.py
Normal file
0
src/steps/crm/bankverbindungen/webhooks/__init__.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""VMH Webhook - Bankverbindungen Create"""
|
||||
import json
|
||||
import datetime
|
||||
from typing import Any
|
||||
from motia import FlowContext, http, ApiRequest, ApiResponse
|
||||
|
||||
|
||||
config = {
|
||||
"name": "VMH Webhook Bankverbindungen Create",
|
||||
"description": "Receives create webhooks from EspoCRM for Bankverbindungen",
|
||||
"flows": ["vmh-bankverbindungen"],
|
||||
"triggers": [
|
||||
http("POST", "/crm/bankverbindungen/webhook/create")
|
||||
],
|
||||
"enqueues": ["vmh.bankverbindungen.create"],
|
||||
}
|
||||
|
||||
|
||||
async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
|
||||
"""
|
||||
Webhook handler for Bankverbindungen creation in EspoCRM.
|
||||
"""
|
||||
try:
|
||||
payload = request.body or []
|
||||
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info("📥 VMH WEBHOOK: BANKVERBINDUNGEN CREATE")
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
|
||||
ctx.logger.info("=" * 80)
|
||||
|
||||
# Collect all IDs from batch
|
||||
entity_ids = set()
|
||||
|
||||
if isinstance(payload, list):
|
||||
for entity in payload:
|
||||
if isinstance(entity, dict) and 'id' in entity:
|
||||
entity_ids.add(entity['id'])
|
||||
elif isinstance(payload, dict) and 'id' in payload:
|
||||
entity_ids.add(payload['id'])
|
||||
|
||||
ctx.logger.info(f"{len(entity_ids)} IDs found for create sync")
|
||||
|
||||
# Emit events
|
||||
for entity_id in entity_ids:
|
||||
await ctx.enqueue({
|
||||
'topic': 'vmh.bankverbindungen.create',
|
||||
'data': {
|
||||
'entity_id': entity_id,
|
||||
'action': 'create',
|
||||
'source': 'webhook',
|
||||
'timestamp': datetime.datetime.now().isoformat()
|
||||
}
|
||||
})
|
||||
|
||||
ctx.logger.info("✅ VMH Create Webhook processed: "
|
||||
f"{len(entity_ids)} events emitted")
|
||||
|
||||
return ApiResponse(
|
||||
status=200,
|
||||
body={
|
||||
'status': 'received',
|
||||
'action': 'create',
|
||||
'ids_count': len(entity_ids)
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
ctx.logger.error("=" * 80)
|
||||
ctx.logger.error("❌ ERROR: BANKVERBINDUNGEN CREATE WEBHOOK")
|
||||
ctx.logger.error(f"Error: {e}")
|
||||
ctx.logger.error("=" * 80)
|
||||
return ApiResponse(
|
||||
status=500,
|
||||
body={'error': 'Internal server error', 'details': str(e)}
|
||||
)
|
||||
@@ -0,0 +1,76 @@
|
||||
"""VMH Webhook - Bankverbindungen Delete"""
|
||||
import json
|
||||
import datetime
|
||||
from typing import Any
|
||||
from motia import FlowContext, http, ApiRequest, ApiResponse
|
||||
|
||||
|
||||
config = {
|
||||
"name": "VMH Webhook Bankverbindungen Delete",
|
||||
"description": "Receives delete webhooks from EspoCRM for Bankverbindungen",
|
||||
"flows": ["vmh-bankverbindungen"],
|
||||
"triggers": [
|
||||
http("POST", "/crm/bankverbindungen/webhook/delete")
|
||||
],
|
||||
"enqueues": ["vmh.bankverbindungen.delete"],
|
||||
}
|
||||
|
||||
|
||||
async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
|
||||
"""
|
||||
Webhook handler for Bankverbindungen deletion in EspoCRM.
|
||||
"""
|
||||
try:
|
||||
payload = request.body or []
|
||||
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info("📥 VMH WEBHOOK: BANKVERBINDUNGEN DELETE")
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
|
||||
ctx.logger.info("=" * 80)
|
||||
|
||||
# Collect all IDs
|
||||
entity_ids = set()
|
||||
|
||||
if isinstance(payload, list):
|
||||
for entity in payload:
|
||||
if isinstance(entity, dict) and 'id' in entity:
|
||||
entity_ids.add(entity['id'])
|
||||
elif isinstance(payload, dict) and 'id' in payload:
|
||||
entity_ids.add(payload['id'])
|
||||
|
||||
ctx.logger.info(f"{len(entity_ids)} IDs found for delete sync")
|
||||
|
||||
# Emit events
|
||||
for entity_id in entity_ids:
|
||||
await ctx.enqueue({
|
||||
'topic': 'vmh.bankverbindungen.delete',
|
||||
'data': {
|
||||
'entity_id': entity_id,
|
||||
'action': 'delete',
|
||||
'source': 'webhook',
|
||||
'timestamp': datetime.datetime.now().isoformat()
|
||||
}
|
||||
})
|
||||
|
||||
ctx.logger.info("✅ VMH Delete Webhook processed: "
|
||||
f"{len(entity_ids)} events emitted")
|
||||
|
||||
return ApiResponse(
|
||||
status=200,
|
||||
body={
|
||||
'status': 'received',
|
||||
'action': 'delete',
|
||||
'ids_count': len(entity_ids)
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
ctx.logger.error("=" * 80)
|
||||
ctx.logger.error("❌ ERROR: BANKVERBINDUNGEN DELETE WEBHOOK")
|
||||
ctx.logger.error(f"Error: {e}")
|
||||
ctx.logger.error("=" * 80)
|
||||
return ApiResponse(
|
||||
status=500,
|
||||
body={'error': 'Internal server error', 'details': str(e)}
|
||||
)
|
||||
@@ -0,0 +1,76 @@
|
||||
"""VMH Webhook - Bankverbindungen Update"""
|
||||
import json
|
||||
import datetime
|
||||
from typing import Any
|
||||
from motia import FlowContext, http, ApiRequest, ApiResponse
|
||||
|
||||
|
||||
config = {
|
||||
"name": "VMH Webhook Bankverbindungen Update",
|
||||
"description": "Receives update webhooks from EspoCRM for Bankverbindungen",
|
||||
"flows": ["vmh-bankverbindungen"],
|
||||
"triggers": [
|
||||
http("POST", "/crm/bankverbindungen/webhook/update")
|
||||
],
|
||||
"enqueues": ["vmh.bankverbindungen.update"],
|
||||
}
|
||||
|
||||
|
||||
async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
|
||||
"""
|
||||
Webhook handler for Bankverbindungen updates in EspoCRM.
|
||||
"""
|
||||
try:
|
||||
payload = request.body or []
|
||||
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info("📥 VMH WEBHOOK: BANKVERBINDUNGEN UPDATE")
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
|
||||
ctx.logger.info("=" * 80)
|
||||
|
||||
# Collect all IDs
|
||||
entity_ids = set()
|
||||
|
||||
if isinstance(payload, list):
|
||||
for entity in payload:
|
||||
if isinstance(entity, dict) and 'id' in entity:
|
||||
entity_ids.add(entity['id'])
|
||||
elif isinstance(payload, dict) and 'id' in payload:
|
||||
entity_ids.add(payload['id'])
|
||||
|
||||
ctx.logger.info(f"{len(entity_ids)} IDs found for update sync")
|
||||
|
||||
# Emit events
|
||||
for entity_id in entity_ids:
|
||||
await ctx.enqueue({
|
||||
'topic': 'vmh.bankverbindungen.update',
|
||||
'data': {
|
||||
'entity_id': entity_id,
|
||||
'action': 'update',
|
||||
'source': 'webhook',
|
||||
'timestamp': datetime.datetime.now().isoformat()
|
||||
}
|
||||
})
|
||||
|
||||
ctx.logger.info("✅ VMH Update Webhook processed: "
|
||||
f"{len(entity_ids)} events emitted")
|
||||
|
||||
return ApiResponse(
|
||||
status=200,
|
||||
body={
|
||||
'status': 'received',
|
||||
'action': 'update',
|
||||
'ids_count': len(entity_ids)
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
ctx.logger.error("=" * 80)
|
||||
ctx.logger.error("❌ ERROR: BANKVERBINDUNGEN UPDATE WEBHOOK")
|
||||
ctx.logger.error(f"Error: {e}")
|
||||
ctx.logger.error("=" * 80)
|
||||
return ApiResponse(
|
||||
status=500,
|
||||
body={'error': 'Internal server error', 'details': str(e)}
|
||||
)
|
||||
0
src/steps/crm/beteiligte/__init__.py
Normal file
0
src/steps/crm/beteiligte/__init__.py
Normal file
164
src/steps/crm/beteiligte/beteiligte_sync_cron_step.py
Normal file
164
src/steps/crm/beteiligte/beteiligte_sync_cron_step.py
Normal file
@@ -0,0 +1,164 @@
|
||||
"""
|
||||
Beteiligte Sync Cron Job
|
||||
|
||||
Läuft alle 15 Minuten und emittiert Sync-Events für Beteiligte die:
|
||||
- Neu sind (pending_sync)
|
||||
- Geändert wurden (dirty)
|
||||
- Fehlgeschlagen sind (failed → Retry)
|
||||
- Lange nicht gesynct wurden (clean aber > 24h alt)
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from typing import Dict, Any
|
||||
from motia import FlowContext, cron
|
||||
from services.espocrm import EspoCRMAPI
|
||||
import datetime
|
||||
|
||||
config = {
|
||||
"name": "VMH Beteiligte Sync Cron",
|
||||
"description": "Prüft alle 15 Minuten welche Beteiligte synchronisiert werden müssen",
|
||||
"flows": ["vmh-beteiligte"],
|
||||
"triggers": [
|
||||
cron("0 */15 1 * * *") # Alle 15 Minuten (6-field format!)
|
||||
],
|
||||
"enqueues": ["vmh.beteiligte.sync_check"]
|
||||
}
|
||||
|
||||
|
||||
async def handler(input_data: Dict[str, Any], ctx: FlowContext) -> None:
|
||||
"""
|
||||
Cron-Handler: Findet alle Beteiligte die Sync benötigen und emittiert Events
|
||||
"""
|
||||
ctx.logger.info("🕐 Beteiligte Sync Cron gestartet")
|
||||
|
||||
try:
|
||||
espocrm = EspoCRMAPI(ctx)
|
||||
|
||||
# Berechne Threshold für "veraltete" Syncs (24 Stunden)
|
||||
threshold = datetime.datetime.now() - datetime.timedelta(hours=24)
|
||||
threshold_str = threshold.strftime('%Y-%m-%d %H:%M:%S')
|
||||
|
||||
ctx.logger.info(f"📅 Suche Entities mit Sync-Bedarf (älter als {threshold_str})")
|
||||
|
||||
# QUERY 1: Entities mit Status pending_sync, dirty oder failed
|
||||
unclean_filter = {
|
||||
'where': [
|
||||
{
|
||||
'type': 'or',
|
||||
'value': [
|
||||
{'type': 'equals', 'attribute': 'syncStatus', 'value': 'pending_sync'},
|
||||
{'type': 'equals', 'attribute': 'syncStatus', 'value': 'dirty'},
|
||||
{'type': 'equals', 'attribute': 'syncStatus', 'value': 'failed'},
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
unclean_result = await espocrm.list_entities('CBeteiligte', where=unclean_filter['where'], max_size=100)
|
||||
unclean_entities = unclean_result.get('list', [])
|
||||
|
||||
ctx.logger.info(f"📊 Gefunden: {len(unclean_entities)} Entities mit Status pending/dirty/failed")
|
||||
|
||||
# QUERY 1b: permanently_failed Entities die Auto-Reset erreicht haben
|
||||
permanently_failed_filter = {
|
||||
'where': [
|
||||
{
|
||||
'type': 'and',
|
||||
'value': [
|
||||
{'type': 'equals', 'attribute': 'syncStatus', 'value': 'permanently_failed'},
|
||||
{'type': 'isNotNull', 'attribute': 'syncAutoResetAt'},
|
||||
{'type': 'before', 'attribute': 'syncAutoResetAt', 'value': threshold_str}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
reset_result = await espocrm.list_entities('CBeteiligte', where=permanently_failed_filter['where'], max_size=50)
|
||||
reset_entities = reset_result.get('list', [])
|
||||
|
||||
# Reset permanently_failed entities
|
||||
for entity in reset_entities:
|
||||
entity_id = entity['id']
|
||||
ctx.logger.info(f"🔄 Auto-Reset für permanently_failed Entity {entity_id}")
|
||||
|
||||
# Reset Status und Retry-Count
|
||||
await espocrm.update_entity('CBeteiligte', entity_id, {
|
||||
'syncStatus': 'failed', # Zurück zu 'failed' für normalen Retry
|
||||
'syncRetryCount': 0,
|
||||
'syncAutoResetAt': None,
|
||||
'syncErrorMessage': f"Auto-Reset nach 24h - vorheriger Fehler: {entity.get('syncErrorMessage', 'N/A')}"
|
||||
})
|
||||
|
||||
ctx.logger.info(f"📊 Auto-Reset: {len(reset_entities)} permanently_failed Entities")
|
||||
|
||||
# QUERY 2: Clean Entities die > 24h nicht gesynct wurden
|
||||
stale_filter = {
|
||||
'where': [
|
||||
{
|
||||
'type': 'and',
|
||||
'value': [
|
||||
{'type': 'equals', 'attribute': 'syncStatus', 'value': 'clean'},
|
||||
{'type': 'isNotNull', 'attribute': 'betnr'},
|
||||
{
|
||||
'type': 'or',
|
||||
'value': [
|
||||
{'type': 'isNull', 'attribute': 'advowareLastSync'},
|
||||
{'type': 'before', 'attribute': 'advowareLastSync', 'value': threshold_str}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
stale_result = await espocrm.list_entities('CBeteiligte', where=stale_filter['where'], max_size=50)
|
||||
stale_entities = stale_result.get('list', [])
|
||||
|
||||
ctx.logger.info(f"📊 Gefunden: {len(stale_entities)} Entities mit veraltetem Sync (> 24h)")
|
||||
|
||||
# KOMBINIERE ALLE (inkl. reset_entities)
|
||||
all_entities = unclean_entities + stale_entities + reset_entities
|
||||
entity_ids = list(set([e['id'] for e in all_entities])) # Dedupliziere
|
||||
|
||||
ctx.logger.info(f"🎯 Total: {len(entity_ids)} eindeutige Entities zum Sync")
|
||||
|
||||
if not entity_ids:
|
||||
ctx.logger.info("✅ Keine Entities benötigen Sync")
|
||||
return
|
||||
|
||||
# Emittiere Events parallel
|
||||
ctx.logger.info(f"🚀 Emittiere {len(entity_ids)} Events parallel...")
|
||||
|
||||
emit_tasks = [
|
||||
ctx.enqueue({
|
||||
'topic': 'vmh.beteiligte.sync_check',
|
||||
'data': {
|
||||
'entity_id': entity_id,
|
||||
'action': 'sync_check',
|
||||
'source': 'cron',
|
||||
'timestamp': datetime.datetime.now().isoformat()
|
||||
}
|
||||
})
|
||||
for entity_id in entity_ids
|
||||
]
|
||||
|
||||
# Parallel emit mit error handling
|
||||
results = await asyncio.gather(*emit_tasks, return_exceptions=True)
|
||||
|
||||
# Count successes and failures
|
||||
emitted_count = sum(1 for r in results if not isinstance(r, Exception))
|
||||
failed_count = sum(1 for r in results if isinstance(r, Exception))
|
||||
|
||||
if failed_count > 0:
|
||||
ctx.logger.warn(f"⚠️ {failed_count} Events konnten nicht emittiert werden")
|
||||
# Log first few errors
|
||||
for i, result in enumerate(results[:5]): # Log max 5 errors
|
||||
if isinstance(result, Exception):
|
||||
ctx.logger.error(f" Entity {entity_ids[i]}: {result}")
|
||||
|
||||
ctx.logger.info(f"✅ Cron fertig: {emitted_count}/{len(entity_ids)} Events emittiert")
|
||||
|
||||
except Exception as e:
|
||||
ctx.logger.error(f"❌ Fehler im Sync Cron: {e}")
|
||||
import traceback
|
||||
ctx.logger.error(traceback.format_exc())
|
||||
423
src/steps/crm/beteiligte/beteiligte_sync_event_step.py
Normal file
423
src/steps/crm/beteiligte/beteiligte_sync_event_step.py
Normal file
@@ -0,0 +1,423 @@
|
||||
"""
|
||||
VMH Beteiligte Sync Handler
|
||||
|
||||
Zentraler Sync-Handler für Beteiligte (Webhooks + Cron Events)
|
||||
|
||||
Verarbeitet:
|
||||
- vmh.beteiligte.create: Neu in EspoCRM → Create in Advoware
|
||||
- vmh.beteiligte.update: Geändert in EspoCRM → Update in Advoware
|
||||
- vmh.beteiligte.delete: Gelöscht in EspoCRM → Delete in Advoware (TODO)
|
||||
- vmh.beteiligte.sync_check: Cron-Check → Sync wenn nötig
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, Optional
|
||||
from motia import FlowContext, queue
|
||||
from services.advoware import AdvowareAPI
|
||||
from services.advoware_service import AdvowareService
|
||||
from services.espocrm import EspoCRMAPI
|
||||
from services.espocrm_mapper import BeteiligteMapper
|
||||
from services.beteiligte_sync_utils import BeteiligteSync
|
||||
from services.redis_client import get_redis_client
|
||||
from services.exceptions import (
|
||||
AdvowareAPIError,
|
||||
EspoCRMAPIError,
|
||||
SyncError,
|
||||
RetryableError,
|
||||
is_retryable
|
||||
)
|
||||
from services.logging_utils import get_step_logger
|
||||
import json
|
||||
|
||||
config = {
|
||||
"name": "VMH Beteiligte Sync Handler",
|
||||
"description": "Zentraler Sync-Handler für Beteiligte (Webhooks + Cron Events)",
|
||||
"flows": ["vmh-beteiligte"],
|
||||
"triggers": [
|
||||
queue("vmh.beteiligte.create"),
|
||||
queue("vmh.beteiligte.update"),
|
||||
queue("vmh.beteiligte.delete"),
|
||||
queue("vmh.beteiligte.sync_check")
|
||||
],
|
||||
"enqueues": []
|
||||
}
|
||||
|
||||
|
||||
async def handler(event_data: Dict[str, Any], ctx: FlowContext[Any]) -> None:
|
||||
"""
|
||||
Zentraler Sync-Handler für Beteiligte
|
||||
|
||||
Args:
|
||||
event_data: Event data mit entity_id, action, source
|
||||
ctx: Motia FlowContext
|
||||
"""
|
||||
entity_id = event_data.get('entity_id')
|
||||
action = event_data.get('action')
|
||||
source = event_data.get('source')
|
||||
|
||||
step_logger = get_step_logger('beteiligte_sync', ctx)
|
||||
|
||||
if not entity_id:
|
||||
step_logger.error("Keine entity_id im Event gefunden")
|
||||
return
|
||||
|
||||
step_logger.info("=" * 80)
|
||||
step_logger.info(f"🔄 BETEILIGTE SYNC HANDLER: {action.upper()}")
|
||||
step_logger.info("=" * 80)
|
||||
step_logger.info(f"Entity: {entity_id} | Source: {source}")
|
||||
step_logger.info("=" * 80)
|
||||
|
||||
# Get shared Redis client (centralized)
|
||||
redis_client = get_redis_client(strict=False)
|
||||
|
||||
# APIs initialisieren
|
||||
espocrm = EspoCRMAPI(ctx)
|
||||
advoware = AdvowareAPI(ctx)
|
||||
sync_utils = BeteiligteSync(espocrm, redis_client, ctx)
|
||||
mapper = BeteiligteMapper()
|
||||
|
||||
# NOTE: Kommunikation Sync Manager wird in zukünftiger Version hinzugefügt
|
||||
# wenn kommunikation_sync_utils.py migriert ist
|
||||
# advo_service = AdvowareService(ctx)
|
||||
# komm_sync = KommunikationSyncManager(advo_service, espocrm, ctx)
|
||||
|
||||
try:
|
||||
# 1. ACQUIRE LOCK (verhindert parallele Syncs)
|
||||
lock_acquired = await sync_utils.acquire_sync_lock(entity_id)
|
||||
|
||||
if not lock_acquired:
|
||||
ctx.logger.warn(f"⏸️ Sync bereits aktiv für {entity_id}, überspringe")
|
||||
return
|
||||
|
||||
# Lock erfolgreich acquired - MUSS im finally block released werden!
|
||||
try:
|
||||
# 2. FETCH ENTITY VON ESPOCRM
|
||||
try:
|
||||
espo_entity = await espocrm.get_entity('CBeteiligte', entity_id)
|
||||
except Exception as e:
|
||||
ctx.logger.error(f"❌ Fehler beim Laden von EspoCRM Entity: {e}")
|
||||
await sync_utils.release_sync_lock(entity_id, 'failed', str(e), increment_retry=True)
|
||||
return
|
||||
|
||||
ctx.logger.info(f"📋 Entity geladen: {espo_entity.get('name')} (betnr: {espo_entity.get('betnr')})")
|
||||
|
||||
betnr = espo_entity.get('betnr')
|
||||
sync_status = espo_entity.get('syncStatus', 'pending_sync')
|
||||
|
||||
# Check Retry-Backoff - überspringe wenn syncNextRetry noch nicht erreicht
|
||||
sync_next_retry = espo_entity.get('syncNextRetry')
|
||||
if sync_next_retry and sync_status == 'failed':
|
||||
import datetime
|
||||
import pytz
|
||||
|
||||
try:
|
||||
next_retry_ts = datetime.datetime.strptime(sync_next_retry, '%Y-%m-%d %H:%M:%S')
|
||||
next_retry_ts = pytz.UTC.localize(next_retry_ts)
|
||||
now_utc = datetime.datetime.now(pytz.UTC)
|
||||
|
||||
if now_utc < next_retry_ts:
|
||||
remaining_minutes = int((next_retry_ts - now_utc).total_seconds() / 60)
|
||||
ctx.logger.info(f"⏸️ Retry-Backoff aktiv: Nächster Versuch in {remaining_minutes} Minuten")
|
||||
await sync_utils.release_sync_lock(entity_id, sync_status)
|
||||
return
|
||||
except Exception as e:
|
||||
ctx.logger.warn(f"⚠️ Fehler beim Parsen von syncNextRetry: {e}")
|
||||
|
||||
# 3. BESTIMME SYNC-AKTION
|
||||
|
||||
# FALL A: Neu (kein betnr) → CREATE in Advoware
|
||||
if not betnr and action in ['create', 'sync_check']:
|
||||
ctx.logger.info(f"🆕 Neuer Beteiligter → CREATE in Advoware")
|
||||
await handle_create(entity_id, espo_entity, espocrm, advoware, sync_utils, mapper, ctx)
|
||||
|
||||
# FALL B: Existiert (hat betnr) → UPDATE oder CHECK
|
||||
elif betnr:
|
||||
ctx.logger.info(f"♻️ Existierender Beteiligter (betNr: {betnr}) → UPDATE/CHECK")
|
||||
await handle_update(entity_id, betnr, espo_entity, espocrm, advoware, sync_utils, mapper, ctx)
|
||||
|
||||
# FALL C: DELETE (TODO: Implementierung später)
|
||||
elif action == 'delete':
|
||||
ctx.logger.warn(f"🗑️ DELETE noch nicht implementiert für {entity_id}")
|
||||
await sync_utils.release_sync_lock(entity_id, 'failed', 'Delete-Operation nicht implementiert')
|
||||
|
||||
else:
|
||||
ctx.logger.warn(f"⚠️ Unbekannte Kombination: action={action}, betnr={betnr}")
|
||||
await sync_utils.release_sync_lock(entity_id, 'failed', f'Unbekannte Aktion: {action}')
|
||||
|
||||
except Exception as e:
|
||||
# Unerwarteter Fehler während Sync - GARANTIERE Lock-Release
|
||||
ctx.logger.error(f"❌ Unerwarteter Fehler im Sync-Handler: {e}")
|
||||
import traceback
|
||||
ctx.logger.error(traceback.format_exc())
|
||||
|
||||
try:
|
||||
await sync_utils.release_sync_lock(
|
||||
entity_id,
|
||||
'failed',
|
||||
f'Unerwarteter Fehler: {str(e)[:1900]}',
|
||||
increment_retry=True
|
||||
)
|
||||
except Exception as release_error:
|
||||
# Selbst Lock-Release failed - logge kritischen Fehler
|
||||
ctx.logger.critical(f"🚨 CRITICAL: Lock-Release failed für {entity_id}: {release_error}")
|
||||
# Force Redis lock release
|
||||
try:
|
||||
lock_key = f"sync_lock:cbeteiligte:{entity_id}"
|
||||
redis_client.delete(lock_key)
|
||||
ctx.logger.info(f"✅ Redis lock manuell released: {lock_key}")
|
||||
except:
|
||||
pass
|
||||
|
||||
except Exception as e:
|
||||
# Fehler VOR Lock-Acquire - kein Lock-Release nötig
|
||||
ctx.logger.error(f"❌ Fehler vor Lock-Acquire: {e}")
|
||||
import traceback
|
||||
ctx.logger.error(traceback.format_exc())
|
||||
|
||||
|
||||
async def handle_create(entity_id, espo_entity, espocrm, advoware, sync_utils, mapper, ctx) -> None:
|
||||
"""Erstellt neuen Beteiligten in Advoware"""
|
||||
try:
|
||||
ctx.logger.info(f"🔨 CREATE in Advoware...")
|
||||
|
||||
# Transform zu Advoware Format
|
||||
advo_data = mapper.map_cbeteiligte_to_advoware(espo_entity)
|
||||
|
||||
ctx.logger.info(f"📤 Sende an Advoware: {json.dumps(advo_data, ensure_ascii=False)[:200]}...")
|
||||
|
||||
# POST zu Advoware
|
||||
result = await advoware.api_call(
|
||||
'api/v1/advonet/Beteiligte',
|
||||
method='POST',
|
||||
json_data=advo_data
|
||||
)
|
||||
|
||||
# Extrahiere betNr aus Response (case-insensitive: betNr oder betnr)
|
||||
new_betnr = None
|
||||
if isinstance(result, dict):
|
||||
new_betnr = result.get('betNr') or result.get('betnr')
|
||||
|
||||
if not new_betnr:
|
||||
raise Exception(f"Keine betNr/betnr in Advoware Response: {result}")
|
||||
|
||||
ctx.logger.info(f"✅ In Advoware erstellt: betNr={new_betnr}")
|
||||
|
||||
# Lade Entity nach POST um rowId zu bekommen (WICHTIG für Change Detection!)
|
||||
created_entity = await advoware.api_call(
|
||||
f'api/v1/advonet/Beteiligte/{new_betnr}',
|
||||
method='GET'
|
||||
)
|
||||
|
||||
if isinstance(created_entity, list):
|
||||
new_rowid = created_entity[0].get('rowId') if created_entity else None
|
||||
else:
|
||||
new_rowid = created_entity.get('rowId')
|
||||
|
||||
if not new_rowid:
|
||||
ctx.logger.warn(f"⚠️ Keine rowId nach CREATE - Change Detection nicht möglich!")
|
||||
|
||||
# OPTIMIERT: Kombiniere release_lock + betnr + rowId update in 1 API call
|
||||
await sync_utils.release_sync_lock(
|
||||
entity_id,
|
||||
'clean',
|
||||
error_message=None,
|
||||
extra_fields={
|
||||
'betnr': new_betnr,
|
||||
'advowareRowId': new_rowid # WICHTIG für Change Detection!
|
||||
}
|
||||
)
|
||||
|
||||
ctx.logger.info(f"✅ CREATE erfolgreich: {entity_id} → betNr {new_betnr}, rowId {new_rowid[:20] if new_rowid else 'N/A'}...")
|
||||
|
||||
except Exception as e:
|
||||
ctx.logger.error(f"❌ CREATE fehlgeschlagen: {e}")
|
||||
await sync_utils.release_sync_lock(entity_id, 'failed', str(e), increment_retry=True)
|
||||
|
||||
|
||||
async def handle_update(entity_id, betnr, espo_entity, espocrm, advoware, sync_utils, mapper, ctx) -> None:
|
||||
"""Synchronisiert existierenden Beteiligten"""
|
||||
try:
|
||||
ctx.logger.info(f"🔍 Fetch von Advoware betNr={betnr}...")
|
||||
|
||||
# Fetch von Advoware
|
||||
try:
|
||||
advo_result = await advoware.api_call(
|
||||
f'api/v1/advonet/Beteiligte/{betnr}',
|
||||
method='GET'
|
||||
)
|
||||
|
||||
# Advoware gibt manchmal Listen zurück
|
||||
if isinstance(advo_result, list):
|
||||
advo_entity = advo_result[0] if advo_result else None
|
||||
else:
|
||||
advo_entity = advo_result
|
||||
|
||||
if not advo_entity:
|
||||
raise Exception(f"Beteiligter betNr={betnr} nicht gefunden")
|
||||
|
||||
except Exception as e:
|
||||
# 404 oder anderer Fehler → Beteiligter wurde in Advoware gelöscht
|
||||
if '404' in str(e) or 'nicht gefunden' in str(e).lower():
|
||||
ctx.logger.warn(f"🗑️ Beteiligter in Advoware gelöscht: betNr={betnr}")
|
||||
await sync_utils.handle_advoware_deleted(entity_id, str(e))
|
||||
return
|
||||
else:
|
||||
raise
|
||||
|
||||
ctx.logger.info(f"📥 Von Advoware geladen: {advo_entity.get('name')}")
|
||||
|
||||
# ÄNDERUNGSERKENNUNG (Primary: rowId, Fallback: Timestamps)
|
||||
comparison = sync_utils.compare_entities(espo_entity, advo_entity)
|
||||
|
||||
ctx.logger.info(f"⏱️ Vergleich: {comparison}")
|
||||
|
||||
# KEIN STAMMDATEN-SYNC NÖTIG
|
||||
if comparison == 'no_change':
|
||||
ctx.logger.info(f"✅ Keine Stammdaten-Änderungen erkannt")
|
||||
|
||||
# NOTE: Kommunikation-Sync würde hier stattfinden
|
||||
# await run_kommunikation_sync(entity_id, betnr, komm_sync, ctx)
|
||||
|
||||
await sync_utils.release_sync_lock(entity_id, 'clean')
|
||||
return
|
||||
|
||||
# ESPOCRM NEUER → Update Advoware
|
||||
if comparison == 'espocrm_newer':
|
||||
ctx.logger.info(f"📤 EspoCRM ist neuer → Update Advoware STAMMDATEN")
|
||||
|
||||
# OPTIMIERT: Use merge utility
|
||||
merged_data = sync_utils.merge_for_advoware_put(advo_entity, espo_entity, mapper)
|
||||
|
||||
put_result = await advoware.api_call(
|
||||
f'api/v1/advonet/Beteiligte/{betnr}',
|
||||
method='PUT',
|
||||
json_data=merged_data
|
||||
)
|
||||
|
||||
# Extrahiere neue rowId aus PUT Response (spart extra GET!)
|
||||
new_rowid = None
|
||||
if isinstance(put_result, list) and len(put_result) > 0:
|
||||
new_rowid = put_result[0].get('rowId')
|
||||
elif isinstance(put_result, dict):
|
||||
new_rowid = put_result.get('rowId')
|
||||
|
||||
ctx.logger.info(f"✅ Advoware STAMMDATEN aktualisiert, rowId: {new_rowid[:20] if new_rowid else 'N/A'}...")
|
||||
|
||||
# Validiere Sync-Ergebnis
|
||||
validation_success, validation_error = await sync_utils.validate_sync_result(
|
||||
entity_id, betnr, mapper, direction='to_advoware'
|
||||
)
|
||||
|
||||
if not validation_success:
|
||||
ctx.logger.error(f"❌ Sync-Validation fehlgeschlagen: {validation_error}")
|
||||
await sync_utils.release_sync_lock(
|
||||
entity_id,
|
||||
'failed',
|
||||
error_message=f"Validation failed: {validation_error}",
|
||||
increment_retry=True
|
||||
)
|
||||
return
|
||||
|
||||
# NOTE: Kommunikation-Sync würde hier stattfinden
|
||||
# await run_kommunikation_sync(entity_id, betnr, komm_sync, ctx)
|
||||
|
||||
# Release Lock + Update rowId
|
||||
await sync_utils.release_sync_lock(
|
||||
entity_id,
|
||||
'clean',
|
||||
extra_fields={'advowareRowId': new_rowid}
|
||||
)
|
||||
|
||||
# ADVOWARE NEUER → Update EspoCRM
|
||||
elif comparison == 'advoware_newer':
|
||||
ctx.logger.info(f"📥 Advoware ist neuer → Update EspoCRM STAMMDATEN")
|
||||
|
||||
espo_data = mapper.map_advoware_to_cbeteiligte(advo_entity)
|
||||
await espocrm.update_entity('CBeteiligte', entity_id, espo_data)
|
||||
ctx.logger.info(f"✅ EspoCRM STAMMDATEN aktualisiert")
|
||||
|
||||
# Validiere Sync-Ergebnis
|
||||
validation_success, validation_error = await sync_utils.validate_sync_result(
|
||||
entity_id, betnr, mapper, direction='to_espocrm'
|
||||
)
|
||||
|
||||
if not validation_success:
|
||||
ctx.logger.error(f"❌ Sync-Validation fehlgeschlagen: {validation_error}")
|
||||
await sync_utils.release_sync_lock(
|
||||
entity_id,
|
||||
'failed',
|
||||
error_message=f"Validation failed: {validation_error}",
|
||||
increment_retry=True
|
||||
)
|
||||
return
|
||||
|
||||
# NOTE: Kommunikation-Sync würde hier stattfinden
|
||||
# await run_kommunikation_sync(entity_id, betnr, komm_sync, ctx)
|
||||
|
||||
# Release Lock + Update rowId
|
||||
await sync_utils.release_sync_lock(
|
||||
entity_id,
|
||||
'clean',
|
||||
extra_fields={'advowareRowId': advo_entity.get('rowId')}
|
||||
)
|
||||
|
||||
# KONFLIKT → EspoCRM WINS
|
||||
elif comparison == 'conflict':
|
||||
ctx.logger.warn(f"⚠️ KONFLIKT erkannt → EspoCRM WINS (STAMMDATEN)")
|
||||
|
||||
# OPTIMIERT: Use merge utility
|
||||
merged_data = sync_utils.merge_for_advoware_put(advo_entity, espo_entity, mapper)
|
||||
|
||||
put_result = await advoware.api_call(
|
||||
f'api/v1/advonet/Beteiligte/{betnr}',
|
||||
method='PUT',
|
||||
json_data=merged_data
|
||||
)
|
||||
|
||||
# Extrahiere neue rowId aus PUT Response
|
||||
new_rowid = None
|
||||
if isinstance(put_result, list) and len(put_result) > 0:
|
||||
new_rowid = put_result[0].get('rowId')
|
||||
elif isinstance(put_result, dict):
|
||||
new_rowid = put_result.get('rowId')
|
||||
|
||||
conflict_msg = (
|
||||
f"EspoCRM: {espo_entity.get('modifiedAt')}, "
|
||||
f"Advoware: {advo_entity.get('geaendertAm')}. "
|
||||
f"EspoCRM hat gewonnen."
|
||||
)
|
||||
|
||||
ctx.logger.info(f"✅ Konflikt gelöst (EspoCRM won), neue rowId: {new_rowid[:20] if new_rowid else 'N/A'}...")
|
||||
|
||||
# Validiere Sync-Ergebnis
|
||||
validation_success, validation_error = await sync_utils.validate_sync_result(
|
||||
entity_id, betnr, mapper, direction='to_advoware'
|
||||
)
|
||||
|
||||
if not validation_success:
|
||||
ctx.logger.error(f"❌ Conflict resolution validation fehlgeschlagen: {validation_error}")
|
||||
await sync_utils.release_sync_lock(
|
||||
entity_id,
|
||||
'failed',
|
||||
error_message=f"Conflict resolution validation failed: {validation_error}",
|
||||
increment_retry=True
|
||||
)
|
||||
return
|
||||
|
||||
await sync_utils.resolve_conflict_espocrm_wins(
|
||||
entity_id,
|
||||
espo_entity,
|
||||
advo_entity,
|
||||
conflict_msg,
|
||||
extra_fields={'advowareRowId': new_rowid}
|
||||
)
|
||||
|
||||
# NOTE: Kommunikation-Sync (nur EspoCRM→Advoware) würde hier stattfinden
|
||||
# await run_kommunikation_sync(entity_id, betnr, komm_sync, ctx, direction='to_advoware', force_espo_wins=True)
|
||||
|
||||
await sync_utils.release_sync_lock(entity_id, 'clean')
|
||||
|
||||
except Exception as e:
|
||||
ctx.logger.error(f"❌ UPDATE fehlgeschlagen: {e}")
|
||||
import traceback
|
||||
ctx.logger.error(traceback.format_exc())
|
||||
await sync_utils.release_sync_lock(entity_id, 'failed', str(e), increment_retry=True)
|
||||
0
src/steps/crm/beteiligte/webhooks/__init__.py
Normal file
0
src/steps/crm/beteiligte/webhooks/__init__.py
Normal file
@@ -0,0 +1,86 @@
|
||||
"""VMH Webhook - Beteiligte Create"""
|
||||
import json
|
||||
import datetime
|
||||
from typing import Any
|
||||
from motia import FlowContext, http, ApiRequest, ApiResponse
|
||||
|
||||
|
||||
config = {
|
||||
"name": "VMH Webhook Beteiligte Create",
|
||||
"description": "Receives create webhooks from EspoCRM for Beteiligte",
|
||||
"flows": ["vmh-beteiligte"],
|
||||
"triggers": [
|
||||
http("POST", "/crm/beteiligte/webhook/create")
|
||||
],
|
||||
"enqueues": ["vmh.beteiligte.create"],
|
||||
}
|
||||
|
||||
|
||||
async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
|
||||
"""
|
||||
Webhook handler for Beteiligte creation in EspoCRM.
|
||||
|
||||
Receives batch or single entity notifications and emits queue events
|
||||
for each entity ID to be synced to Advoware.
|
||||
"""
|
||||
try:
|
||||
payload = request.body or []
|
||||
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info("📥 VMH WEBHOOK: BETEILIGTE CREATE")
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
|
||||
ctx.logger.info("=" * 80)
|
||||
|
||||
# Collect all IDs from batch
|
||||
entity_ids = set()
|
||||
|
||||
if isinstance(payload, list):
|
||||
for entity in payload:
|
||||
if isinstance(entity, dict) and 'id' in entity:
|
||||
entity_ids.add(entity['id'])
|
||||
elif isinstance(payload, dict) and 'id' in payload:
|
||||
entity_ids.add(payload['id'])
|
||||
|
||||
ctx.logger.info(f"{len(entity_ids)} IDs found for create sync")
|
||||
|
||||
# Emit events for queue processing (deduplication via lock in event handler)
|
||||
for entity_id in entity_ids:
|
||||
await ctx.enqueue({
|
||||
'topic': 'vmh.beteiligte.create',
|
||||
'data': {
|
||||
'entity_id': entity_id,
|
||||
'action': 'create',
|
||||
'source': 'webhook',
|
||||
'timestamp': datetime.datetime.now().isoformat()
|
||||
}
|
||||
})
|
||||
|
||||
ctx.logger.info("✅ VMH Create Webhook processed: "
|
||||
f"{len(entity_ids)} events emitted")
|
||||
|
||||
return ApiResponse(
|
||||
status=200,
|
||||
body={
|
||||
'status': 'received',
|
||||
'action': 'create',
|
||||
'ids_count': len(entity_ids)
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
ctx.logger.error("=" * 80)
|
||||
ctx.logger.error("❌ ERROR: VMH CREATE WEBHOOK")
|
||||
ctx.logger.error("=" * 80)
|
||||
ctx.logger.error(f"Error: {e}")
|
||||
ctx.logger.error(f"Entity IDs attempted: {list(entity_ids) if 'entity_ids' in locals() else 'N/A'}")
|
||||
ctx.logger.error(f"Full Payload: {json.dumps(request.body, indent=2, ensure_ascii=False)}")
|
||||
ctx.logger.error(f"Timestamp: {datetime.datetime.now().isoformat()}")
|
||||
ctx.logger.error("=" * 80)
|
||||
return ApiResponse(
|
||||
status=500,
|
||||
body={
|
||||
'error': 'Internal server error',
|
||||
'details': str(e)
|
||||
}
|
||||
)
|
||||
@@ -0,0 +1,76 @@
|
||||
"""VMH Webhook - Beteiligte Delete"""
|
||||
import json
|
||||
import datetime
|
||||
from typing import Any
|
||||
from motia import FlowContext, http, ApiRequest, ApiResponse
|
||||
|
||||
|
||||
config = {
|
||||
"name": "VMH Webhook Beteiligte Delete",
|
||||
"description": "Receives delete webhooks from EspoCRM for Beteiligte",
|
||||
"flows": ["vmh-beteiligte"],
|
||||
"triggers": [
|
||||
http("POST", "/crm/beteiligte/webhook/delete")
|
||||
],
|
||||
"enqueues": ["vmh.beteiligte.delete"],
|
||||
}
|
||||
|
||||
|
||||
async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
|
||||
"""
|
||||
Webhook handler for Beteiligte deletion in EspoCRM.
|
||||
"""
|
||||
try:
|
||||
payload = request.body or []
|
||||
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info("📥 VMH WEBHOOK: BETEILIGTE DELETE")
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
|
||||
ctx.logger.info("=" * 80)
|
||||
|
||||
# Collect all IDs from batch
|
||||
entity_ids = set()
|
||||
|
||||
if isinstance(payload, list):
|
||||
for entity in payload:
|
||||
if isinstance(entity, dict) and 'id' in entity:
|
||||
entity_ids.add(entity['id'])
|
||||
elif isinstance(payload, dict) and 'id' in payload:
|
||||
entity_ids.add(payload['id'])
|
||||
|
||||
ctx.logger.info(f"{len(entity_ids)} IDs found for delete sync")
|
||||
|
||||
# Emit events for queue processing
|
||||
for entity_id in entity_ids:
|
||||
await ctx.enqueue({
|
||||
'topic': 'vmh.beteiligte.delete',
|
||||
'data': {
|
||||
'entity_id': entity_id,
|
||||
'action': 'delete',
|
||||
'source': 'webhook',
|
||||
'timestamp': datetime.datetime.now().isoformat()
|
||||
}
|
||||
})
|
||||
|
||||
ctx.logger.info("✅ VMH Delete Webhook processed: "
|
||||
f"{len(entity_ids)} events emitted")
|
||||
|
||||
return ApiResponse(
|
||||
status=200,
|
||||
body={
|
||||
'status': 'received',
|
||||
'action': 'delete',
|
||||
'ids_count': len(entity_ids)
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
ctx.logger.error("=" * 80)
|
||||
ctx.logger.error("❌ ERROR: BETEILIGTE DELETE WEBHOOK")
|
||||
ctx.logger.error(f"Error: {e}")
|
||||
ctx.logger.error("=" * 80)
|
||||
return ApiResponse(
|
||||
status=500,
|
||||
body={'error': 'Internal server error', 'details': str(e)}
|
||||
)
|
||||
@@ -0,0 +1,86 @@
|
||||
"""VMH Webhook - Beteiligte Update"""
|
||||
import json
|
||||
import datetime
|
||||
from typing import Any
|
||||
from motia import FlowContext, http, ApiRequest, ApiResponse
|
||||
|
||||
|
||||
config = {
|
||||
"name": "VMH Webhook Beteiligte Update",
|
||||
"description": "Receives update webhooks from EspoCRM for Beteiligte",
|
||||
"flows": ["vmh-beteiligte"],
|
||||
"triggers": [
|
||||
http("POST", "/crm/beteiligte/webhook/update")
|
||||
],
|
||||
"enqueues": ["vmh.beteiligte.update"],
|
||||
}
|
||||
|
||||
|
||||
async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
|
||||
"""
|
||||
Webhook handler for Beteiligte updates in EspoCRM.
|
||||
|
||||
Note: Loop prevention is implemented on EspoCRM side.
|
||||
rowId updates no longer trigger webhooks, so no filtering needed.
|
||||
"""
|
||||
try:
|
||||
payload = request.body or []
|
||||
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info("📥 VMH WEBHOOK: BETEILIGTE UPDATE")
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
|
||||
ctx.logger.info("=" * 80)
|
||||
|
||||
# Collect all IDs from batch
|
||||
entity_ids = set()
|
||||
|
||||
if isinstance(payload, list):
|
||||
for entity in payload:
|
||||
if isinstance(entity, dict) and 'id' in entity:
|
||||
entity_ids.add(entity['id'])
|
||||
elif isinstance(payload, dict) and 'id' in payload:
|
||||
entity_ids.add(payload['id'])
|
||||
|
||||
ctx.logger.info(f"{len(entity_ids)} IDs found for update sync")
|
||||
|
||||
# Emit events for queue processing
|
||||
for entity_id in entity_ids:
|
||||
await ctx.enqueue({
|
||||
'topic': 'vmh.beteiligte.update',
|
||||
'data': {
|
||||
'entity_id': entity_id,
|
||||
'action': 'update',
|
||||
'source': 'webhook',
|
||||
'timestamp': datetime.datetime.now().isoformat()
|
||||
}
|
||||
})
|
||||
|
||||
ctx.logger.info("✅ VMH Update Webhook processed: "
|
||||
f"{len(entity_ids)} events emitted")
|
||||
|
||||
return ApiResponse(
|
||||
status=200,
|
||||
body={
|
||||
'status': 'received',
|
||||
'action': 'update',
|
||||
'ids_count': len(entity_ids)
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
ctx.logger.error("=" * 80)
|
||||
ctx.logger.error("❌ ERROR: VMH UPDATE WEBHOOK")
|
||||
ctx.logger.error("=" * 80)
|
||||
ctx.logger.error(f"Error: {e}")
|
||||
ctx.logger.error(f"Entity IDs attempted: {list(entity_ids) if 'entity_ids' in locals() else 'N/A'}")
|
||||
ctx.logger.error(f"Full Payload: {json.dumps(request.body, indent=2, ensure_ascii=False)}")
|
||||
ctx.logger.error(f"Timestamp: {datetime.datetime.now().isoformat()}")
|
||||
ctx.logger.error("=" * 80)
|
||||
return ApiResponse(
|
||||
status=500,
|
||||
body={
|
||||
'error': 'Internal server error',
|
||||
'details': str(e)
|
||||
}
|
||||
)
|
||||
0
src/steps/crm/document/__init__.py
Normal file
0
src/steps/crm/document/__init__.py
Normal file
130
src/steps/crm/document/generate_document_preview_step.py
Normal file
130
src/steps/crm/document/generate_document_preview_step.py
Normal file
@@ -0,0 +1,130 @@
|
||||
"""
|
||||
Generate Document Preview Step
|
||||
|
||||
Universal step for generating document previews.
|
||||
Can be triggered by any document sync flow.
|
||||
|
||||
Flow:
|
||||
1. Load document from EspoCRM
|
||||
2. Download file attachment
|
||||
3. Generate preview (PDF, DOCX, Images → WebP)
|
||||
4. Upload preview to EspoCRM
|
||||
5. Update document metadata
|
||||
|
||||
Event: document.generate_preview
|
||||
Input: entity_id, entity_type (default: 'CDokumente')
|
||||
"""
|
||||
|
||||
from typing import Dict, Any
|
||||
from motia import FlowContext, queue
|
||||
import tempfile
|
||||
import os
|
||||
|
||||
|
||||
config = {
|
||||
"name": "Generate Document Preview",
|
||||
"description": "Generates preview image for documents",
|
||||
"flows": ["document-preview"],
|
||||
"triggers": [queue("document.generate_preview")],
|
||||
"enqueues": [],
|
||||
}
|
||||
|
||||
|
||||
async def handler(event_data: Dict[str, Any], ctx: FlowContext[Any]) -> None:
|
||||
"""
|
||||
Generate preview for a document.
|
||||
|
||||
Args:
|
||||
event_data: {
|
||||
'entity_id': str, # Required: Document ID
|
||||
'entity_type': str, # Optional: 'CDokumente' (default) or 'Document'
|
||||
}
|
||||
"""
|
||||
from services.document_sync_utils import DocumentSync
|
||||
|
||||
entity_id = event_data.get('entity_id')
|
||||
entity_type = event_data.get('entity_type', 'CDokumente')
|
||||
|
||||
if not entity_id:
|
||||
ctx.logger.error("❌ Missing entity_id in event data")
|
||||
return
|
||||
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info(f"🖼️ GENERATE DOCUMENT PREVIEW")
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info(f"Entity Type: {entity_type}")
|
||||
ctx.logger.info(f"Document ID: {entity_id}")
|
||||
ctx.logger.info("=" * 80)
|
||||
|
||||
# Initialize sync utils
|
||||
sync_utils = DocumentSync(ctx)
|
||||
|
||||
try:
|
||||
# Step 1: Get download info from EspoCRM
|
||||
ctx.logger.info("📥 Step 1: Getting download info from EspoCRM...")
|
||||
download_info = await sync_utils.get_document_download_info(entity_id, entity_type)
|
||||
|
||||
if not download_info:
|
||||
ctx.logger.warn("⚠️ No download info available - skipping preview generation")
|
||||
return
|
||||
|
||||
attachment_id = download_info['attachment_id']
|
||||
filename = download_info['filename']
|
||||
mime_type = download_info['mime_type']
|
||||
|
||||
ctx.logger.info(f" Filename: {filename}")
|
||||
ctx.logger.info(f" MIME Type: {mime_type}")
|
||||
ctx.logger.info(f" Attachment ID: {attachment_id}")
|
||||
|
||||
# Step 2: Download file from EspoCRM
|
||||
ctx.logger.info("📥 Step 2: Downloading file from EspoCRM...")
|
||||
file_content = await sync_utils.espocrm.download_attachment(attachment_id)
|
||||
ctx.logger.info(f" Downloaded: {len(file_content)} bytes")
|
||||
|
||||
# Step 3: Save to temporary file for preview generation
|
||||
ctx.logger.info("💾 Step 3: Saving to temporary file...")
|
||||
with tempfile.NamedTemporaryFile(mode='wb', delete=False, suffix=os.path.splitext(filename)[1]) as tmp_file:
|
||||
tmp_file.write(file_content)
|
||||
tmp_path = tmp_file.name
|
||||
|
||||
try:
|
||||
# Step 4: Generate preview (600x800 WebP)
|
||||
ctx.logger.info(f"🖼️ Step 4: Generating preview (600x800 WebP)...")
|
||||
preview_data = await sync_utils.generate_thumbnail(
|
||||
tmp_path,
|
||||
mime_type,
|
||||
max_width=600,
|
||||
max_height=800
|
||||
)
|
||||
|
||||
if preview_data:
|
||||
ctx.logger.info(f"✅ Preview generated: {len(preview_data)} bytes WebP")
|
||||
|
||||
# Step 5: Upload preview to EspoCRM
|
||||
ctx.logger.info(f"📤 Step 5: Uploading preview to EspoCRM...")
|
||||
await sync_utils._upload_preview_to_espocrm(entity_id, preview_data, entity_type)
|
||||
ctx.logger.info(f"✅ Preview uploaded successfully")
|
||||
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info("✅ PREVIEW GENERATION COMPLETE")
|
||||
ctx.logger.info("=" * 80)
|
||||
else:
|
||||
ctx.logger.warn("⚠️ Preview generation returned no data")
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info("⚠️ PREVIEW GENERATION FAILED")
|
||||
ctx.logger.info("=" * 80)
|
||||
|
||||
finally:
|
||||
# Cleanup temporary file
|
||||
if os.path.exists(tmp_path):
|
||||
os.remove(tmp_path)
|
||||
ctx.logger.debug(f"🗑️ Removed temporary file: {tmp_path}")
|
||||
|
||||
except Exception as e:
|
||||
ctx.logger.error(f"❌ Preview generation failed: {e}")
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info("❌ PREVIEW GENERATION ERROR")
|
||||
ctx.logger.info("=" * 80)
|
||||
import traceback
|
||||
ctx.logger.debug(traceback.format_exc())
|
||||
# Don't raise - preview generation is optional
|
||||
0
src/steps/crm/document/webhooks/__init__.py
Normal file
0
src/steps/crm/document/webhooks/__init__.py
Normal file
@@ -0,0 +1,91 @@
|
||||
"""VMH Webhook - AI Knowledge Update"""
|
||||
from typing import Any
|
||||
from motia import FlowContext, http, ApiRequest, ApiResponse
|
||||
|
||||
|
||||
config = {
|
||||
"name": "VMH Webhook AI Knowledge Update",
|
||||
"description": "Receives update webhooks from EspoCRM for CAIKnowledge entities",
|
||||
"flows": ["vmh-aiknowledge"],
|
||||
"triggers": [
|
||||
http("POST", "/crm/document/webhook/aiknowledge/update")
|
||||
],
|
||||
"enqueues": ["aiknowledge.sync"],
|
||||
}
|
||||
|
||||
|
||||
async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
|
||||
"""
|
||||
Webhook handler for CAIKnowledge updates in EspoCRM.
|
||||
|
||||
Triggered when:
|
||||
- activationStatus changes
|
||||
- syncStatus changes (e.g., set to 'unclean')
|
||||
- Documents linked/unlinked
|
||||
"""
|
||||
try:
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info("🔔 AI Knowledge Update Webhook")
|
||||
ctx.logger.info("=" * 80)
|
||||
|
||||
# Extract payload
|
||||
payload = request.body
|
||||
|
||||
# Handle case where payload is a list (e.g., from array-based webhook)
|
||||
if isinstance(payload, list):
|
||||
if not payload:
|
||||
ctx.logger.error("❌ Empty payload list")
|
||||
return ApiResponse(
|
||||
status=400,
|
||||
body={'success': False, 'error': 'Empty payload'}
|
||||
)
|
||||
payload = payload[0] # Take first item
|
||||
|
||||
# Ensure payload is a dict
|
||||
if not isinstance(payload, dict):
|
||||
ctx.logger.error(f"❌ Invalid payload type: {type(payload)}")
|
||||
return ApiResponse(
|
||||
status=400,
|
||||
body={'success': False, 'error': f'Invalid payload type: {type(payload).__name__}'}
|
||||
)
|
||||
|
||||
# Validate required fields
|
||||
knowledge_id = payload.get('entity_id') or payload.get('id')
|
||||
entity_type = payload.get('entity_type', 'CAIKnowledge')
|
||||
action = payload.get('action', 'update')
|
||||
|
||||
if not knowledge_id:
|
||||
ctx.logger.error("❌ Missing entity_id in payload")
|
||||
return ApiResponse(
|
||||
status=400,
|
||||
body={'success': False, 'error': 'Missing entity_id'}
|
||||
)
|
||||
|
||||
ctx.logger.info(f"📋 Entity Type: {entity_type}")
|
||||
ctx.logger.info(f"📋 Entity ID: {knowledge_id}")
|
||||
ctx.logger.info(f"📋 Action: {action}")
|
||||
|
||||
# Enqueue sync event
|
||||
await ctx.enqueue({
|
||||
'topic': 'aiknowledge.sync',
|
||||
'data': {
|
||||
'knowledge_id': knowledge_id,
|
||||
'source': 'webhook',
|
||||
'action': action
|
||||
}
|
||||
})
|
||||
|
||||
ctx.logger.info(f"✅ Sync event enqueued for {knowledge_id}")
|
||||
ctx.logger.info("=" * 80)
|
||||
|
||||
return ApiResponse(
|
||||
status=200,
|
||||
body={'success': True, 'knowledge_id': knowledge_id}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
ctx.logger.error(f"❌ Webhook error: {e}")
|
||||
return ApiResponse(
|
||||
status=500,
|
||||
body={'success': False, 'error': str(e)}
|
||||
)
|
||||
91
src/steps/crm/document/webhooks/document_create_api_step.py
Normal file
91
src/steps/crm/document/webhooks/document_create_api_step.py
Normal file
@@ -0,0 +1,91 @@
|
||||
"""VMH Webhook - Document Create"""
|
||||
import json
|
||||
import datetime
|
||||
from typing import Any
|
||||
from motia import FlowContext, http, ApiRequest, ApiResponse
|
||||
|
||||
|
||||
config = {
|
||||
"name": "VMH Webhook Document Create",
|
||||
"description": "Empfängt Create-Webhooks von EspoCRM für Documents",
|
||||
"flows": ["vmh-documents"],
|
||||
"triggers": [
|
||||
http("POST", "/crm/document/webhook/create")
|
||||
],
|
||||
"enqueues": ["vmh.document.create"],
|
||||
}
|
||||
|
||||
|
||||
async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
|
||||
"""
|
||||
Webhook handler for Document creation in EspoCRM.
|
||||
|
||||
Receives batch or single entity notifications and emits queue events
|
||||
for each entity ID to be synced to xAI.
|
||||
"""
|
||||
try:
|
||||
payload = request.body or []
|
||||
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info("📥 VMH WEBHOOK: DOCUMENT CREATE")
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.debug(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
|
||||
|
||||
# Collect all IDs from batch
|
||||
entity_ids = set()
|
||||
entity_type = 'CDokumente' # Default
|
||||
|
||||
if isinstance(payload, list):
|
||||
for entity in payload:
|
||||
if isinstance(entity, dict) and 'id' in entity:
|
||||
entity_ids.add(entity['id'])
|
||||
# Take entityType from first entity if present
|
||||
if entity_type == 'CDokumente':
|
||||
entity_type = entity.get('entityType', 'CDokumente')
|
||||
elif isinstance(payload, dict) and 'id' in payload:
|
||||
entity_ids.add(payload['id'])
|
||||
entity_type = payload.get('entityType', 'CDokumente')
|
||||
|
||||
ctx.logger.info(f"{len(entity_ids)} document IDs found for create sync")
|
||||
|
||||
# Emit events for queue processing (deduplication via lock in event handler)
|
||||
for entity_id in entity_ids:
|
||||
await ctx.enqueue({
|
||||
'topic': 'vmh.document.create',
|
||||
'data': {
|
||||
'entity_id': entity_id,
|
||||
'entity_type': entity_type,
|
||||
'action': 'create',
|
||||
'timestamp': payload[0].get('modifiedAt') if isinstance(payload, list) and payload else None
|
||||
}
|
||||
})
|
||||
|
||||
ctx.logger.info("✅ Document Create Webhook processed: "
|
||||
f"{len(entity_ids)} events emitted")
|
||||
|
||||
return ApiResponse(
|
||||
status=200,
|
||||
body={
|
||||
'success': True,
|
||||
'message': f'{len(entity_ids)} document(s) enqueued for sync',
|
||||
'entity_ids': list(entity_ids)
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
ctx.logger.error("=" * 80)
|
||||
ctx.logger.error("❌ ERROR: DOCUMENT CREATE WEBHOOK")
|
||||
ctx.logger.error("=" * 80)
|
||||
ctx.logger.error(f"Error: {e}")
|
||||
ctx.logger.error(f"Entity IDs attempted: {list(entity_ids) if 'entity_ids' in locals() else 'N/A'}")
|
||||
ctx.logger.error(f"Full Payload: {json.dumps(request.body, indent=2, ensure_ascii=False)}")
|
||||
ctx.logger.error(f"Timestamp: {datetime.datetime.now().isoformat()}")
|
||||
ctx.logger.error("=" * 80)
|
||||
|
||||
return ApiResponse(
|
||||
status=500,
|
||||
body={
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
)
|
||||
91
src/steps/crm/document/webhooks/document_delete_api_step.py
Normal file
91
src/steps/crm/document/webhooks/document_delete_api_step.py
Normal file
@@ -0,0 +1,91 @@
|
||||
"""VMH Webhook - Document Delete"""
|
||||
import json
|
||||
import datetime
|
||||
from typing import Any
|
||||
from motia import FlowContext, http, ApiRequest, ApiResponse
|
||||
|
||||
|
||||
config = {
|
||||
"name": "VMH Webhook Document Delete",
|
||||
"description": "Empfängt Delete-Webhooks von EspoCRM für Documents",
|
||||
"flows": ["vmh-documents"],
|
||||
"triggers": [
|
||||
http("POST", "/crm/document/webhook/delete")
|
||||
],
|
||||
"enqueues": ["vmh.document.delete"],
|
||||
}
|
||||
|
||||
|
||||
async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
|
||||
"""
|
||||
Webhook handler for Document deletion in EspoCRM.
|
||||
|
||||
Receives batch or single entity notifications and emits queue events
|
||||
for each entity ID to be removed from xAI.
|
||||
"""
|
||||
try:
|
||||
payload = request.body or []
|
||||
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info("📥 VMH WEBHOOK: DOCUMENT DELETE")
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.debug(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
|
||||
|
||||
# Collect all IDs from batch
|
||||
entity_ids = set()
|
||||
entity_type = 'CDokumente' # Default
|
||||
|
||||
if isinstance(payload, list):
|
||||
for entity in payload:
|
||||
if isinstance(entity, dict) and 'id' in entity:
|
||||
entity_ids.add(entity['id'])
|
||||
# Take entityType from first entity if present
|
||||
if entity_type == 'CDokumente':
|
||||
entity_type = entity.get('entityType', 'CDokumente')
|
||||
elif isinstance(payload, dict) and 'id' in payload:
|
||||
entity_ids.add(payload['id'])
|
||||
entity_type = payload.get('entityType', 'CDokumente')
|
||||
|
||||
ctx.logger.info(f"{len(entity_ids)} document IDs found for delete sync")
|
||||
|
||||
# Emit events for queue processing
|
||||
for entity_id in entity_ids:
|
||||
await ctx.enqueue({
|
||||
'topic': 'vmh.document.delete',
|
||||
'data': {
|
||||
'entity_id': entity_id,
|
||||
'entity_type': entity_type,
|
||||
'action': 'delete',
|
||||
'timestamp': payload[0].get('deletedAt') if isinstance(payload, list) and payload else None
|
||||
}
|
||||
})
|
||||
|
||||
ctx.logger.info("✅ Document Delete Webhook processed: "
|
||||
f"{len(entity_ids)} events emitted")
|
||||
|
||||
return ApiResponse(
|
||||
status=200,
|
||||
body={
|
||||
'success': True,
|
||||
'message': f'{len(entity_ids)} document(s) enqueued for deletion',
|
||||
'entity_ids': list(entity_ids)
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
ctx.logger.error("=" * 80)
|
||||
ctx.logger.error("❌ ERROR: DOCUMENT DELETE WEBHOOK")
|
||||
ctx.logger.error("=" * 80)
|
||||
ctx.logger.error(f"Error: {e}")
|
||||
ctx.logger.error(f"Entity IDs attempted: {list(entity_ids) if 'entity_ids' in locals() else 'N/A'}")
|
||||
ctx.logger.error(f"Full Payload: {json.dumps(request.body, indent=2, ensure_ascii=False)}")
|
||||
ctx.logger.error(f"Timestamp: {datetime.datetime.now().isoformat()}")
|
||||
ctx.logger.error("=" * 80)
|
||||
|
||||
return ApiResponse(
|
||||
status=500,
|
||||
body={
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
)
|
||||
91
src/steps/crm/document/webhooks/document_update_api_step.py
Normal file
91
src/steps/crm/document/webhooks/document_update_api_step.py
Normal file
@@ -0,0 +1,91 @@
|
||||
"""VMH Webhook - Document Update"""
|
||||
import json
|
||||
import datetime
|
||||
from typing import Any
|
||||
from motia import FlowContext, http, ApiRequest, ApiResponse
|
||||
|
||||
|
||||
config = {
|
||||
"name": "VMH Webhook Document Update",
|
||||
"description": "Empfängt Update-Webhooks von EspoCRM für Documents",
|
||||
"flows": ["vmh-documents"],
|
||||
"triggers": [
|
||||
http("POST", "/crm/document/webhook/update")
|
||||
],
|
||||
"enqueues": ["vmh.document.update"],
|
||||
}
|
||||
|
||||
|
||||
async def handler(request: ApiRequest, ctx: FlowContext[Any]) -> ApiResponse:
|
||||
"""
|
||||
Webhook handler for Document updates in EspoCRM.
|
||||
|
||||
Receives batch or single entity notifications and emits queue events
|
||||
for each entity ID to be synced to xAI.
|
||||
"""
|
||||
try:
|
||||
payload = request.body or []
|
||||
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.info("📥 VMH WEBHOOK: DOCUMENT UPDATE")
|
||||
ctx.logger.info("=" * 80)
|
||||
ctx.logger.debug(f"Payload: {json.dumps(payload, indent=2, ensure_ascii=False)}")
|
||||
|
||||
# Collect all IDs from batch
|
||||
entity_ids = set()
|
||||
entity_type = 'CDokumente' # Default
|
||||
|
||||
if isinstance(payload, list):
|
||||
for entity in payload:
|
||||
if isinstance(entity, dict) and 'id' in entity:
|
||||
entity_ids.add(entity['id'])
|
||||
# Take entityType from first entity if present
|
||||
if entity_type == 'CDokumente':
|
||||
entity_type = entity.get('entityType', 'CDokumente')
|
||||
elif isinstance(payload, dict) and 'id' in payload:
|
||||
entity_ids.add(payload['id'])
|
||||
entity_type = payload.get('entityType', 'CDokumente')
|
||||
|
||||
ctx.logger.info(f"{len(entity_ids)} document IDs found for update sync")
|
||||
|
||||
# Emit events for queue processing
|
||||
for entity_id in entity_ids:
|
||||
await ctx.enqueue({
|
||||
'topic': 'vmh.document.update',
|
||||
'data': {
|
||||
'entity_id': entity_id,
|
||||
'entity_type': entity_type,
|
||||
'action': 'update',
|
||||
'timestamp': payload[0].get('modifiedAt') if isinstance(payload, list) and payload else None
|
||||
}
|
||||
})
|
||||
|
||||
ctx.logger.info("✅ Document Update Webhook processed: "
|
||||
f"{len(entity_ids)} events emitted")
|
||||
|
||||
return ApiResponse(
|
||||
status=200,
|
||||
body={
|
||||
'success': True,
|
||||
'message': f'{len(entity_ids)} document(s) enqueued for sync',
|
||||
'entity_ids': list(entity_ids)
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
ctx.logger.error("=" * 80)
|
||||
ctx.logger.error("❌ ERROR: DOCUMENT UPDATE WEBHOOK")
|
||||
ctx.logger.error("=" * 80)
|
||||
ctx.logger.error(f"Error: {e}")
|
||||
ctx.logger.error(f"Entity IDs attempted: {list(entity_ids) if 'entity_ids' in locals() else 'N/A'}")
|
||||
ctx.logger.error(f"Full Payload: {json.dumps(request.body, indent=2, ensure_ascii=False)}")
|
||||
ctx.logger.error(f"Timestamp: {datetime.datetime.now().isoformat()}")
|
||||
ctx.logger.error("=" * 80)
|
||||
|
||||
return ApiResponse(
|
||||
status=500,
|
||||
body={
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
)
|
||||
Reference in New Issue
Block a user