feat: Add Advoware History and Watcher services for document synchronization

- Implement AdvowareHistoryService for fetching and creating history entries.
- Implement AdvowareWatcherService for file operations including listing, downloading, and uploading with Blake3 hash verification.
- Introduce Blake3 utility functions for hash computation and verification.
- Create document sync cron step to poll Redis for pending Aktennummern and emit sync events.
- Develop document sync event handler to manage 3-way merge synchronization for Akten, including metadata updates and error handling.
This commit is contained in:
bsiggel
2026-03-25 21:24:31 +00:00
parent 3c4c1dc852
commit 1ffc37b0b7
11 changed files with 2147 additions and 10 deletions

View File

@@ -0,0 +1,327 @@
"""
Advoware Document Sync Business Logic
Provides 3-way merge logic for document synchronization between:
- Windows filesystem (USN-tracked)
- EspoCRM (CRM database)
- Advoware History (document timeline)
"""
from typing import Dict, Any, List, Optional, Literal, Tuple
from dataclasses import dataclass
from datetime import datetime
from services.logging_utils import get_service_logger
@dataclass
class SyncAction:
"""
Represents a sync decision from 3-way merge.
Attributes:
action: Sync action to take
reason: Human-readable explanation
source: Which system is the source of truth
needs_upload: True if file needs upload to Windows
needs_download: True if file needs download from Windows
"""
action: Literal['CREATE', 'UPDATE_ESPO', 'UPLOAD_WINDOWS', 'DELETE', 'SKIP']
reason: str
source: Literal['Windows', 'EspoCRM', 'None']
needs_upload: bool
needs_download: bool
class AdvowareDocumentSyncUtils:
"""
Business logic for Advoware document sync.
Provides methods for:
- File list cleanup (filter by History)
- 3-way merge decision logic
- Conflict resolution
- Metadata comparison
"""
def __init__(self, ctx):
"""
Initialize utils with context.
Args:
ctx: Motia context for logging
"""
self.ctx = ctx
self.logger = get_service_logger(__name__, ctx)
self.logger.info("AdvowareDocumentSyncUtils initialized")
def _log(self, message: str, level: str = 'info') -> None:
"""Helper for consistent logging"""
getattr(self.logger, level)(f"[AdvowareDocumentSyncUtils] {message}")
def cleanup_file_list(
self,
windows_files: List[Dict[str, Any]],
advoware_history: List[Dict[str, Any]]
) -> List[Dict[str, Any]]:
"""
Remove files from Windows list that are not in Advoware History.
Strategy: Only sync files that have a History entry in Advoware.
Files without History are ignored (may be temporary/system files).
Args:
windows_files: List of files from Windows Watcher
advoware_history: List of History entries from Advoware
Returns:
Filtered list of Windows files that have History entries
"""
self._log(f"Cleaning file list: {len(windows_files)} Windows files, {len(advoware_history)} History entries")
# Build set of full paths from History (normalized to lowercase)
history_paths = set()
history_file_details = [] # Track for logging
for entry in advoware_history:
datei = entry.get('datei', '')
if datei:
# Use full path for matching (case-insensitive)
history_paths.add(datei.lower())
history_file_details.append({'path': datei})
self._log(f"📊 History has {len(history_paths)} unique file paths")
# Log first 10 History paths
for i, detail in enumerate(history_file_details[:10], 1):
self._log(f" {i}. {detail['path']}")
# Filter Windows files by matching full path
cleaned = []
matches = []
for win_file in windows_files:
win_path = win_file.get('path', '').lower()
if win_path in history_paths:
cleaned.append(win_file)
matches.append(win_path)
self._log(f"After cleanup: {len(cleaned)} files with History entries")
# Log matches
if matches:
self._log(f"✅ Matched files (by full path):")
for match in matches[:10]: # Zeige erste 10
self._log(f" - {match}")
return cleaned
def merge_three_way(
self,
espo_doc: Optional[Dict[str, Any]],
windows_file: Optional[Dict[str, Any]],
advo_history: Optional[Dict[str, Any]]
) -> SyncAction:
"""
Perform 3-way merge to determine sync action.
Decision logic:
1. If Windows USN > EspoCRM sync_usn → Windows changed → Download
2. If blake3Hash != syncHash (EspoCRM) → EspoCRM changed → Upload
3. If both changed → Conflict → Resolve by timestamp
4. If neither changed → Skip
Args:
espo_doc: Document from EspoCRM (can be None if not exists)
windows_file: File info from Windows (can be None if not exists)
advo_history: History entry from Advoware (can be None if not exists)
Returns:
SyncAction with decision
"""
self._log("Performing 3-way merge")
# Case 1: File only in Windows → CREATE in EspoCRM
if windows_file and not espo_doc:
return SyncAction(
action='CREATE',
reason='File exists in Windows but not in EspoCRM',
source='Windows',
needs_upload=False,
needs_download=True
)
# Case 2: File only in EspoCRM → UPLOAD to Windows
if espo_doc and not windows_file:
return SyncAction(
action='UPLOAD_WINDOWS',
reason='File exists in EspoCRM but not in Windows',
source='EspoCRM',
needs_upload=True,
needs_download=False
)
# Case 3: File in both → Compare hashes and USNs
if espo_doc and windows_file:
# Extract comparison fields
windows_usn = windows_file.get('usn', 0)
windows_blake3 = windows_file.get('blake3Hash', '')
espo_sync_usn = espo_doc.get('sync_usn', 0)
espo_sync_hash = espo_doc.get('syncHash', '')
# Check if Windows changed
windows_changed = windows_usn != espo_sync_usn
# Check if EspoCRM changed
espo_changed = (
windows_blake3 and
espo_sync_hash and
windows_blake3.lower() != espo_sync_hash.lower()
)
# Case 3a: Both changed → Conflict
if windows_changed and espo_changed:
return self.resolve_conflict(espo_doc, windows_file)
# Case 3b: Only Windows changed → Download
if windows_changed:
return SyncAction(
action='UPDATE_ESPO',
reason=f'Windows changed (USN: {espo_sync_usn}{windows_usn})',
source='Windows',
needs_upload=False,
needs_download=True
)
# Case 3c: Only EspoCRM changed → Upload
if espo_changed:
return SyncAction(
action='UPLOAD_WINDOWS',
reason='EspoCRM changed (hash mismatch)',
source='EspoCRM',
needs_upload=True,
needs_download=False
)
# Case 3d: Neither changed → Skip
return SyncAction(
action='SKIP',
reason='No changes detected',
source='None',
needs_upload=False,
needs_download=False
)
# Case 4: File in neither → Skip
return SyncAction(
action='SKIP',
reason='File does not exist in any system',
source='None',
needs_upload=False,
needs_download=False
)
def resolve_conflict(
self,
espo_doc: Dict[str, Any],
windows_file: Dict[str, Any]
) -> SyncAction:
"""
Resolve conflict when both Windows and EspoCRM changed.
Strategy: Newest timestamp wins.
Args:
espo_doc: Document from EspoCRM
windows_file: File info from Windows
Returns:
SyncAction with conflict resolution
"""
self._log("⚠️ Conflict detected: Both Windows and EspoCRM changed", level='warning')
# Get timestamps
try:
# EspoCRM modified timestamp
espo_modified_str = espo_doc.get('modifiedAt', espo_doc.get('createdAt', ''))
espo_modified = datetime.fromisoformat(espo_modified_str.replace('Z', '+00:00'))
# Windows modified timestamp
windows_modified_str = windows_file.get('modified', '')
windows_modified = datetime.fromisoformat(windows_modified_str.replace('Z', '+00:00'))
# Compare timestamps
if espo_modified > windows_modified:
self._log(f"Conflict resolution: EspoCRM wins (newer: {espo_modified} > {windows_modified})")
return SyncAction(
action='UPLOAD_WINDOWS',
reason=f'Conflict: EspoCRM newer ({espo_modified} > {windows_modified})',
source='EspoCRM',
needs_upload=True,
needs_download=False
)
else:
self._log(f"Conflict resolution: Windows wins (newer: {windows_modified} >= {espo_modified})")
return SyncAction(
action='UPDATE_ESPO',
reason=f'Conflict: Windows newer ({windows_modified} >= {espo_modified})',
source='Windows',
needs_upload=False,
needs_download=True
)
except Exception as e:
self._log(f"Error parsing timestamps for conflict resolution: {e}", level='error')
# Fallback: Windows wins (safer to preserve data on filesystem)
return SyncAction(
action='UPDATE_ESPO',
reason='Conflict: Timestamp parse failed, defaulting to Windows',
source='Windows',
needs_upload=False,
needs_download=True
)
def should_sync_metadata(
self,
espo_doc: Dict[str, Any],
advo_history: Dict[str, Any]
) -> Tuple[bool, Dict[str, Any]]:
"""
Check if metadata needs update in EspoCRM.
Compares History metadata (text, art, dat) with EspoCRM fields.
Args:
espo_doc: Document from EspoCRM
advo_history: History entry from Advoware
Returns:
(needs_update: bool, updates: Dict) - Updates to apply if needed
"""
updates = {}
# Map History fields to EspoCRM fields
history_text = advo_history.get('text', '')
history_art = advo_history.get('art', '')
history_dat = advo_history.get('dat', '')
espo_description = espo_doc.get('description', '')
espo_type = espo_doc.get('type', '')
espo_date = espo_doc.get('dateUploaded', '')
# Check if different
if history_text and history_text != espo_description:
updates['description'] = history_text
if history_art and history_art != espo_type:
updates['type'] = history_art
if history_dat and history_dat != espo_date:
updates['dateUploaded'] = history_dat
needs_update = len(updates) > 0
if needs_update:
self._log(f"Metadata needs update: {list(updates.keys())}")
return needs_update, updates

View File

@@ -0,0 +1,153 @@
"""
Advoware History API Client
API client for Advoware History (document timeline) operations.
Provides methods to:
- Get History entries for Akte
- Create new History entry
"""
from typing import Dict, Any, List, Optional
from datetime import datetime
from services.advoware import AdvowareAPI
from services.logging_utils import get_service_logger
from services.exceptions import AdvowareAPIError
class AdvowareHistoryService:
"""
Advoware History API client.
Provides methods to:
- Get History entries for Akte
- Create new History entry
"""
def __init__(self, ctx):
"""
Initialize service with context.
Args:
ctx: Motia context for logging
"""
self.ctx = ctx
self.logger = get_service_logger(__name__, ctx)
self.advoware = AdvowareAPI(ctx) # Reuse existing auth
self.logger.info("AdvowareHistoryService initialized")
def _log(self, message: str, level: str = 'info') -> None:
"""Helper for consistent logging"""
getattr(self.logger, level)(f"[AdvowareHistoryService] {message}")
async def get_akte_history(self, akte_nr: str) -> List[Dict[str, Any]]:
"""
Get all History entries for Akte.
Args:
akte_nr: Aktennummer (10-digit string, e.g., "2019001145")
Returns:
List of History entry dicts with fields:
- dat: str (timestamp)
- art: str (type, e.g., "Schreiben")
- text: str (description)
- datei: str (file path, e.g., "V:\\12345\\document.pdf")
- benutzer: str (user)
- versendeart: str
- hnr: int (History entry ID)
Raises:
AdvowareAPIError: If API call fails (non-retryable)
Note:
Uses correct endpoint: GET /api/v1/advonet/History?nr={aktennummer}
"""
self._log(f"Fetching History for Akte {akte_nr}")
try:
endpoint = "api/v1/advonet/History"
params = {'nr': akte_nr}
result = await self.advoware.api_call(endpoint, method='GET', params=params)
if not isinstance(result, list):
self._log(f"Unexpected History response format: {type(result)}", level='warning')
return []
self._log(f"Successfully fetched {len(result)} History entries for Akte {akte_nr}")
return result
except Exception as e:
error_msg = str(e)
# Advoware server bug: "Nullable object must have a value" in ConnectorFunctionsHistory.cs
# This is a server-side bug we cannot fix - return empty list and continue
if "Nullable object must have a value" in error_msg or "500" in error_msg:
self._log(
f"⚠️ Advoware server error for Akte {akte_nr} (likely null reference bug): {e}",
level='warning'
)
self._log(f"Continuing with empty History for Akte {akte_nr}", level='info')
return [] # Return empty list instead of failing
# For other errors, raise as before
self._log(f"Failed to fetch History for Akte {akte_nr}: {e}", level='error')
raise AdvowareAPIError(f"History fetch failed: {e}") from e
async def create_history_entry(
self,
akte_id: int,
entry_data: Dict[str, Any]
) -> Dict[str, Any]:
"""
Create new History entry.
Args:
akte_id: Advoware Akte ID
entry_data: History entry data with fields:
- dat: str (timestamp, ISO format)
- art: str (type, e.g., "Schreiben")
- text: str (description)
- datei: str (file path, e.g., "V:\\12345\\document.pdf")
- benutzer: str (user, default: "AI")
- versendeart: str (default: "Y")
- visibleOnline: bool (default: True)
- posteingang: int (default: 0)
Returns:
Created History entry
Raises:
AdvowareAPIError: If creation fails
"""
self._log(f"Creating History entry for Akte {akte_id}")
# Ensure required fields with defaults
now = datetime.now().isoformat()
payload = {
"betNr": entry_data.get('betNr'), # Can be null
"dat": entry_data.get('dat', now),
"art": entry_data.get('art', 'Schreiben'),
"text": entry_data.get('text', 'Document uploaded via Motia'),
"datei": entry_data.get('datei', ''),
"benutzer": entry_data.get('benutzer', 'AI'),
"gelesen": entry_data.get('gelesen'), # Can be null
"modified": entry_data.get('modified', now),
"vorgelegt": entry_data.get('vorgelegt', ''),
"posteingang": entry_data.get('posteingang', 0),
"visibleOnline": entry_data.get('visibleOnline', True),
"versendeart": entry_data.get('versendeart', 'Y')
}
try:
endpoint = f"api/v1/advonet/Akten/{akte_id}/History"
result = await self.advoware.api_call(endpoint, method='POST', json_data=payload)
if result:
self._log(f"Successfully created History entry for Akte {akte_id}")
return result
except Exception as e:
self._log(f"Failed to create History entry for Akte {akte_id}: {e}", level='error')
raise AdvowareAPIError(f"History entry creation failed: {e}") from e

View File

@@ -127,3 +127,35 @@ class AdvowareService:
# Expected: 403 Forbidden
self._log(f"[ADVO] DELETE not allowed (expected): {e}", level='warning')
return False
# ========== AKTEN ==========
async def get_akte(self, akte_id: int) -> Optional[Dict[str, Any]]:
"""
Get Akte details including ablage status.
Args:
akte_id: Advoware Akte ID
Returns:
Akte details with fields:
- ablage: int (0 or 1, archive status)
- az: str (Aktenzeichen)
- rubrum: str
- referat: str
- wegen: str
Returns None if Akte not found
"""
try:
endpoint = f"api/v1/advonet/Akten/{akte_id}"
result = await self.api.api_call(endpoint, method='GET')
if result:
self._log(f"[ADVO] ✅ Fetched Akte {akte_id}: {result.get('az', 'N/A')}")
return result
except Exception as e:
self._log(f"[ADVO] Error loading Akte {akte_id}: {e}", level='error')
return None

View File

@@ -0,0 +1,275 @@
"""
Advoware Filesystem Watcher API Client
API client for Windows Watcher service that provides:
- File list retrieval with USN tracking
- File download from Windows
- File upload to Windows with Blake3 hash verification
"""
from typing import Dict, Any, List, Optional
import aiohttp
import asyncio
import os
from services.logging_utils import get_service_logger
from services.exceptions import ExternalAPIError
class AdvowareWatcherService:
"""
API client for Advoware Filesystem Watcher.
Provides methods to:
- Get file list with USNs
- Download files
- Upload files with Blake3 verification
"""
def __init__(self, ctx):
"""
Initialize service with context.
Args:
ctx: Motia context for logging and config
"""
self.ctx = ctx
self.logger = get_service_logger(__name__, ctx)
self.base_url = os.getenv('ADVOWARE_WATCHER_BASE_URL', 'http://192.168.1.12:8765')
self.auth_token = os.getenv('ADVOWARE_WATCHER_AUTH_TOKEN', '')
self.timeout = int(os.getenv('ADVOWARE_WATCHER_TIMEOUT_SECONDS', '30'))
if not self.auth_token:
self.logger.warning("⚠️ ADVOWARE_WATCHER_AUTH_TOKEN not configured")
self._session: Optional[aiohttp.ClientSession] = None
self.logger.info(f"AdvowareWatcherService initialized: {self.base_url}")
async def _get_session(self) -> aiohttp.ClientSession:
"""Get or create HTTP session"""
if self._session is None or self._session.closed:
headers = {}
if self.auth_token:
headers['Authorization'] = f'Bearer {self.auth_token}'
self._session = aiohttp.ClientSession(headers=headers)
return self._session
async def close(self) -> None:
"""Close HTTP session"""
if self._session and not self._session.closed:
await self._session.close()
def _log(self, message: str, level: str = 'info') -> None:
"""Helper for consistent logging"""
getattr(self.logger, level)(f"[AdvowareWatcherService] {message}")
async def get_akte_files(self, aktennummer: str) -> List[Dict[str, Any]]:
"""
Get file list for Akte with USNs.
Args:
aktennummer: Akte number (e.g., "12345")
Returns:
List of file info dicts with:
- filename: str
- path: str (relative to V:\)
- usn: int (Windows USN)
- size: int (bytes)
- modified: str (ISO timestamp)
- blake3Hash: str (hex)
Raises:
ExternalAPIError: If API call fails
"""
self._log(f"Fetching file list for Akte {aktennummer}")
try:
session = await self._get_session()
# Retry with exponential backoff
for attempt in range(1, 4): # 3 attempts
try:
async with session.get(
f"{self.base_url}/akte-details",
params={'akte': aktennummer},
timeout=aiohttp.ClientTimeout(total=30)
) as response:
if response.status == 404:
self._log(f"Akte {aktennummer} not found on Windows", level='warning')
return []
response.raise_for_status()
data = await response.json()
files = data.get('files', [])
# Transform: Add 'filename' field (extracted from relative_path)
for file in files:
rel_path = file.get('relative_path', '')
if rel_path and 'filename' not in file:
# Extract filename from path (e.g., "subdir/doc.pdf" → "doc.pdf")
filename = rel_path.split('/')[-1] # Use / for cross-platform
file['filename'] = filename
self._log(f"Successfully fetched {len(files)} files for Akte {aktennummer}")
return files
except asyncio.TimeoutError:
if attempt < 3:
delay = 2 ** attempt # 2, 4 seconds
self._log(f"Timeout on attempt {attempt}, retrying in {delay}s...", level='warning')
await asyncio.sleep(delay)
else:
raise
except aiohttp.ClientError as e:
if attempt < 3:
delay = 2 ** attempt
self._log(f"Network error on attempt {attempt}: {e}, retrying in {delay}s...", level='warning')
await asyncio.sleep(delay)
else:
raise
except Exception as e:
self._log(f"Failed to fetch file list for Akte {aktennummer}: {e}", level='error')
raise ExternalAPIError(f"Watcher API error: {e}") from e
async def download_file(self, aktennummer: str, filename: str) -> bytes:
"""
Download file from Windows.
Args:
aktennummer: Akte number
filename: Filename (e.g., "document.pdf")
Returns:
File content as bytes
Raises:
ExternalAPIError: If download fails
"""
self._log(f"Downloading file: {aktennummer}/{filename}")
try:
session = await self._get_session()
# Retry with exponential backoff
for attempt in range(1, 4): # 3 attempts
try:
async with session.get(
f"{self.base_url}/file",
params={
'akte': aktennummer,
'path': filename
},
timeout=aiohttp.ClientTimeout(total=60) # Longer timeout for downloads
) as response:
if response.status == 404:
raise ExternalAPIError(f"File not found: {aktennummer}/{filename}")
response.raise_for_status()
content = await response.read()
self._log(f"Successfully downloaded {len(content)} bytes from {aktennummer}/{filename}")
return content
except asyncio.TimeoutError:
if attempt < 3:
delay = 2 ** attempt
self._log(f"Download timeout on attempt {attempt}, retrying in {delay}s...", level='warning')
await asyncio.sleep(delay)
else:
raise
except aiohttp.ClientError as e:
if attempt < 3:
delay = 2 ** attempt
self._log(f"Download error on attempt {attempt}: {e}, retrying in {delay}s...", level='warning')
await asyncio.sleep(delay)
else:
raise
except Exception as e:
self._log(f"Failed to download file {aktennummer}/{filename}: {e}", level='error')
raise ExternalAPIError(f"File download failed: {e}") from e
async def upload_file(
self,
aktennummer: str,
filename: str,
content: bytes,
blake3_hash: str
) -> Dict[str, Any]:
"""
Upload file to Windows with Blake3 verification.
Args:
aktennummer: Akte number
filename: Filename
content: File content
blake3_hash: Blake3 hash (hex) for verification
Returns:
Upload result dict with:
- success: bool
- message: str
- usn: int (new USN)
- blake3Hash: str (computed hash)
Raises:
ExternalAPIError: If upload fails
"""
self._log(f"Uploading file: {aktennummer}/{filename} ({len(content)} bytes)")
try:
session = await self._get_session()
# Build headers with Blake3 hash
headers = {
'X-Blake3-Hash': blake3_hash,
'Content-Type': 'application/octet-stream'
}
# Retry with exponential backoff
for attempt in range(1, 4): # 3 attempts
try:
async with session.put(
f"{self.base_url}/files/{aktennummer}/{filename}",
data=content,
headers=headers,
timeout=aiohttp.ClientTimeout(total=120) # Long timeout for uploads
) as response:
response.raise_for_status()
result = await response.json()
if not result.get('success'):
error_msg = result.get('message', 'Unknown error')
raise ExternalAPIError(f"Upload failed: {error_msg}")
self._log(f"Successfully uploaded {aktennummer}/{filename}, new USN: {result.get('usn')}")
return result
except asyncio.TimeoutError:
if attempt < 3:
delay = 2 ** attempt
self._log(f"Upload timeout on attempt {attempt}, retrying in {delay}s...", level='warning')
await asyncio.sleep(delay)
else:
raise
except aiohttp.ClientError as e:
if attempt < 3:
delay = 2 ** attempt
self._log(f"Upload error on attempt {attempt}: {e}, retrying in {delay}s...", level='warning')
await asyncio.sleep(delay)
else:
raise
except Exception as e:
self._log(f"Failed to upload file {aktennummer}/{filename}: {e}", level='error')
raise ExternalAPIError(f"File upload failed: {e}") from e

47
services/blake3_utils.py Normal file
View File

@@ -0,0 +1,47 @@
"""
Blake3 Hash Utilities
Provides Blake3 hash computation for file integrity verification.
"""
from typing import Union
def compute_blake3(content: bytes) -> str:
"""
Compute Blake3 hash of content.
Args:
content: File bytes
Returns:
Hex string (lowercase)
Raises:
ImportError: If blake3 module not installed
"""
try:
import blake3
except ImportError:
raise ImportError(
"blake3 module not installed. Install with: pip install blake3"
)
hasher = blake3.blake3()
hasher.update(content)
return hasher.hexdigest()
def verify_blake3(content: bytes, expected_hash: str) -> bool:
"""
Verify Blake3 hash of content.
Args:
content: File bytes
expected_hash: Expected hex hash (lowercase)
Returns:
True if hash matches, False otherwise
"""
computed = compute_blake3(content)
return computed.lower() == expected_hash.lower()

View File

@@ -377,7 +377,37 @@ class EspoCRMAPI:
self._log(f"Updating {entity_type} with ID: {entity_id}")
return await self.api_call(f"/{entity_type}/{entity_id}", method='PUT', json_data=data)
async def delete_entity(self, entity_type: str, entity_id: str) -> bool:
async def link_entities(
self,
entity_type: str,
entity_id: str,
link: str,
foreign_id: str
) -> bool:
"""
Link two entities together (create relationship).
Args:
entity_type: Parent entity type
entity_id: Parent entity ID
link: Link name (relationship field)
foreign_id: ID of entity to link
Returns:
True if successful
Example:
await espocrm.link_entities('CAdvowareAkten', 'akte123', 'dokumente', 'doc456')
"""
self._log(f"Linking {entity_type}/{entity_id}{link}{foreign_id}")
await self.api_call(
f"/{entity_type}/{entity_id}/{link}",
method='POST',
json_data={"id": foreign_id}
)
return True
async def delete_entity(self, entity_type: str,entity_id: str) -> bool:
"""
Delete an entity.
@@ -494,6 +524,99 @@ class EspoCRMAPI:
self._log(f"Upload failed: {e}", level='error')
raise EspoCRMError(f"Upload request failed: {e}") from e
async def upload_attachment_for_file_field(
self,
file_content: bytes,
filename: str,
related_type: str,
field: str,
mime_type: str = 'application/octet-stream'
) -> Dict[str, Any]:
"""
Upload an attachment for a File field (2-step process per EspoCRM API).
This is Step 1: Upload the attachment without parent, specifying relatedType and field.
Step 2: Create/update the entity with {field}Id set to the attachment ID.
Args:
file_content: File content as bytes
filename: Name of the file
related_type: Entity type that will contain this attachment (e.g., 'CDokumente')
field: Field name in the entity (e.g., 'dokument')
mime_type: MIME type of the file
Returns:
Attachment entity data with 'id' field
Example:
# Step 1: Upload attachment
attachment = await espocrm.upload_attachment_for_file_field(
file_content=file_bytes,
filename="document.pdf",
related_type="CDokumente",
field="dokument",
mime_type="application/pdf"
)
# Step 2: Create entity with dokumentId
doc = await espocrm.create_entity('CDokumente', {
'name': 'document.pdf',
'dokumentId': attachment['id']
})
"""
import base64
self._log(f"Uploading attachment for File field: {filename} ({len(file_content)} bytes) -> {related_type}.{field}")
# Encode file content to base64
file_base64 = base64.b64encode(file_content).decode('utf-8')
data_uri = f"data:{mime_type};base64,{file_base64}"
url = self.api_base_url.rstrip('/') + '/Attachment'
headers = {
'X-Api-Key': self.api_key,
'Content-Type': 'application/json'
}
payload = {
'name': filename,
'type': mime_type,
'role': 'Attachment',
'relatedType': related_type,
'field': field,
'file': data_uri
}
self._log(f"Upload params: relatedType={related_type}, field={field}, role=Attachment")
effective_timeout = aiohttp.ClientTimeout(total=self.api_timeout_seconds)
session = await self._get_session()
try:
async with session.post(url, headers=headers, json=payload, timeout=effective_timeout) as response:
self._log(f"Upload response status: {response.status}")
if response.status == 401:
raise EspoCRMAuthError("Authentication failed - check API key")
elif response.status == 403:
raise EspoCRMError("Access forbidden")
elif response.status == 404:
raise EspoCRMError(f"Attachment endpoint not found")
elif response.status >= 400:
error_text = await response.text()
self._log(f"❌ Upload failed with {response.status}. Response: {error_text}", level='error')
raise EspoCRMError(f"Upload error {response.status}: {error_text}")
# Parse response
result = await response.json()
attachment_id = result.get('id')
self._log(f"✅ Attachment uploaded successfully: {attachment_id}")
return result
except aiohttp.ClientError as e:
self._log(f"Upload failed: {e}", level='error')
raise EspoCRMError(f"Upload request failed: {e}") from e
async def download_attachment(self, attachment_id: str) -> bytes:
"""
Download an attachment from EspoCRM.

View File

@@ -77,6 +77,11 @@ class EspoCRMTimeoutError(EspoCRMAPIError):
pass
class ExternalAPIError(APIError):
"""Generic external API error (Watcher, etc.)"""
pass
# ========== Sync Errors ==========
class SyncError(IntegrationError):

View File

@@ -85,6 +85,7 @@ class RedisClientFactory:
redis_host = os.getenv('REDIS_HOST', 'localhost')
redis_port = int(os.getenv('REDIS_PORT', '6379'))
redis_db = int(os.getenv('REDIS_DB_ADVOWARE_CACHE', '1'))
redis_password = os.getenv('REDIS_PASSWORD', None) # Optional password
redis_timeout = int(os.getenv('REDIS_TIMEOUT_SECONDS', '5'))
redis_max_connections = int(os.getenv('REDIS_MAX_CONNECTIONS', '50'))
@@ -95,15 +96,22 @@ class RedisClientFactory:
# Create connection pool
if cls._connection_pool is None:
cls._connection_pool = redis.ConnectionPool(
host=redis_host,
port=redis_port,
db=redis_db,
socket_timeout=redis_timeout,
socket_connect_timeout=redis_timeout,
max_connections=redis_max_connections,
decode_responses=True # Auto-decode bytes to strings
)
pool_kwargs = {
'host': redis_host,
'port': redis_port,
'db': redis_db,
'socket_timeout': redis_timeout,
'socket_connect_timeout': redis_timeout,
'max_connections': redis_max_connections,
'decode_responses': True # Auto-decode bytes to strings
}
# Add password if configured
if redis_password:
pool_kwargs['password'] = redis_password
logger.info("Redis authentication enabled")
cls._connection_pool = redis.ConnectionPool(**pool_kwargs)
# Create client from pool
client = redis.Redis(connection_pool=cls._connection_pool)