refactor(logging): enhance error handling and resource management in rate limiting and sync operations

This commit is contained in:
bsiggel
2026-03-08 22:47:05 +00:00
parent 6c3cf3ca91
commit a5a122b688
2 changed files with 23 additions and 3 deletions

View File

@@ -92,7 +92,7 @@ class BeteiligteSync:
return True return True
except Exception as e: except Exception as e:
self._log(f"Fehler beim Acquire Lock: {e}", level='error') self.logger.error(f"Fehler beim Acquire Lock: {e}")
# Clean up Redis lock on error # Clean up Redis lock on error
if self.redis: if self.redis:
try: try:

View File

@@ -65,7 +65,8 @@ async def enforce_global_rate_limit(context=None):
socket_timeout=int(os.getenv('REDIS_TIMEOUT_SECONDS', '5')) socket_timeout=int(os.getenv('REDIS_TIMEOUT_SECONDS', '5'))
) )
lua_script = """ try:
lua_script = """
local key = KEYS[1] local key = KEYS[1]
local current_time_ms = tonumber(ARGV[1]) local current_time_ms = tonumber(ARGV[1])
local max_tokens = tonumber(ARGV[2]) local max_tokens = tonumber(ARGV[2])
@@ -97,7 +98,6 @@ async def enforce_global_rate_limit(context=None):
end end
""" """
try:
script = redis_client.register_script(lua_script) script = redis_client.register_script(lua_script)
while True: while True:
@@ -121,6 +121,12 @@ async def enforce_global_rate_limit(context=None):
except Exception as e: except Exception as e:
log_operation('error', f"Rate limiting failed: {e}. Proceeding without limit.", context=context) log_operation('error', f"Rate limiting failed: {e}. Proceeding without limit.", context=context)
finally:
# Always close Redis connection to prevent resource leaks
try:
redis_client.close()
except Exception:
pass
@backoff.on_exception(backoff.expo, HttpError, max_tries=4, base=3, @backoff.on_exception(backoff.expo, HttpError, max_tries=4, base=3,
@@ -958,6 +964,7 @@ async def handler(input_data: Dict[str, Any], ctx: FlowContext) -> None:
log_operation('info', f"Starting calendar sync for employee {kuerzel}", context=ctx) log_operation('info', f"Starting calendar sync for employee {kuerzel}", context=ctx)
redis_client = get_redis_client(ctx) redis_client = get_redis_client(ctx)
service = None
try: try:
log_operation('debug', "Initializing Advoware service", context=ctx) log_operation('debug', "Initializing Advoware service", context=ctx)
@@ -1048,6 +1055,19 @@ async def handler(input_data: Dict[str, Any], ctx: FlowContext) -> None:
log_operation('info', f"Handler duration: {time.time() - start_time}", context=ctx) log_operation('info', f"Handler duration: {time.time() - start_time}", context=ctx)
return {'status': 200, 'body': {'status': 'completed', 'kuerzel': kuerzel}} return {'status': 200, 'body': {'status': 'completed', 'kuerzel': kuerzel}}
finally:
# Always close resources to prevent memory leaks
if service is not None:
try:
service.close()
except Exception as e:
log_operation('debug', f"Error closing Google service: {e}", context=ctx)
try:
redis_client.close()
except Exception as e:
log_operation('debug', f"Error closing Redis client: {e}", context=ctx)
except Exception as e: except Exception as e:
log_operation('error', f"Sync failed for {kuerzel}: {e}", context=ctx) log_operation('error', f"Sync failed for {kuerzel}: {e}", context=ctx)