Fix Advoware time filtering pitfall: extend ranges (Advoware -1 to +9 years, Google -2 to +10 years) and document issue in README
This commit is contained in:
@@ -23,8 +23,10 @@ logger.addHandler(handler)
|
||||
BERLIN_TZ = pytz.timezone('Europe/Berlin')
|
||||
|
||||
# Constants for ranges (optimize fetch efficiency)
|
||||
FETCH_FROM = (datetime.datetime.now(BERLIN_TZ) - timedelta(days=365)).strftime('%Y-01-01T00:00:00')
|
||||
FETCH_TO = (datetime.datetime.now(BERLIN_TZ) + timedelta(days=730)).strftime('%Y-12-31T23:59:59')
|
||||
now = datetime.datetime.now(BERLIN_TZ)
|
||||
current_year = now.year
|
||||
FETCH_FROM = f"{current_year - 1}-01-01T00:00:00" # Start of previous year
|
||||
FETCH_TO = f"{current_year + 9}-12-31T23:59:59" # End of 9 years ahead
|
||||
|
||||
CALENDAR_SYNC_LOCK_KEY = 'calendar_sync_lock'
|
||||
|
||||
@@ -129,21 +131,29 @@ async def fetch_advoware_appointments(advoware, employee_kuerzel):
|
||||
async def fetch_google_events(service, calendar_id):
|
||||
"""Fetch Google events in range."""
|
||||
try:
|
||||
now = datetime.datetime.now(pytz.utc)
|
||||
from_date = now - timedelta(days=365)
|
||||
to_date = now + timedelta(days=730)
|
||||
time_min = from_date.strftime('%Y-%m-%dT%H:%M:%SZ')
|
||||
time_max = to_date.strftime('%Y-%m-%dT%H:%M:%SZ')
|
||||
events_result = service.events().list(
|
||||
calendarId=calendar_id,
|
||||
timeMin=time_min,
|
||||
timeMax=time_max,
|
||||
singleEvents=True, # Expand recurring
|
||||
orderBy='startTime'
|
||||
).execute()
|
||||
logger.debug(f"Raw Google API response: {events_result}")
|
||||
events = [evt for evt in events_result.get('items', []) if evt.get('status') != 'cancelled']
|
||||
logger.info(f"Fetched {len(events)} Google events for calendar {calendar_id}")
|
||||
time_min = f"{current_year - 2}-01-01T00:00:00Z"
|
||||
time_max = f"{current_year + 10}-12-31T23:59:59Z"
|
||||
|
||||
all_events = []
|
||||
page_token = None
|
||||
while True:
|
||||
events_result = service.events().list(
|
||||
calendarId=calendar_id,
|
||||
timeMin=time_min,
|
||||
timeMax=time_max,
|
||||
singleEvents=True, # Expand recurring
|
||||
orderBy='startTime',
|
||||
pageToken=page_token,
|
||||
maxResults=2500 # Max per page
|
||||
).execute()
|
||||
events_page = events_result.get('items', [])
|
||||
all_events.extend(events_page)
|
||||
page_token = events_result.get('nextPageToken')
|
||||
if not page_token:
|
||||
break
|
||||
|
||||
events = [evt for evt in all_events if evt.get('status') != 'cancelled']
|
||||
logger.info(f"Fetched {len(all_events)} total Google events ({len(events)} not cancelled) for calendar {calendar_id}")
|
||||
return events
|
||||
except HttpError as e:
|
||||
logger.error(f"Google API error fetching events: {e}")
|
||||
@@ -553,6 +563,20 @@ async def get_advoware_employees(advoware):
|
||||
logger.error(f"Failed to fetch Advoware employees: {e}")
|
||||
raise
|
||||
|
||||
async def get_advoware_timestamp(advoware, frnr):
|
||||
"""Fetch the last modified timestamp for an Advoware appointment."""
|
||||
try:
|
||||
result = await advoware.api_call('api/v1/advonet/Termine', method='GET', params={'frnr': frnr})
|
||||
if isinstance(result, list) and result:
|
||||
appointment = result[0] # Assuming it returns a list with one item
|
||||
timestamp_str = appointment.get('zuletztGeaendertAm')
|
||||
if timestamp_str:
|
||||
return BERLIN_TZ.localize(datetime.datetime.fromisoformat(timestamp_str))
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to fetch timestamp for Advoware frNr {frnr}: {e}")
|
||||
return None
|
||||
|
||||
async def handler(event, context):
|
||||
"""Main event handler for calendar sync."""
|
||||
logger.info("Starting calendar sync for all employees")
|
||||
@@ -576,7 +600,7 @@ async def handler(event, context):
|
||||
continue
|
||||
|
||||
# DEBUG: Nur für Nutzer AI syncen (für Test der Travel/Prep Zeit)
|
||||
if kuerzel != 'AI':
|
||||
if kuerzel != 'SB':
|
||||
logger.info(f"DEBUG: Überspringe {kuerzel}, nur AI wird gesynct")
|
||||
continue
|
||||
|
||||
@@ -623,6 +647,32 @@ async def handler(event, context):
|
||||
# For regular events, use the event_id directly
|
||||
db_google_index[row['google_event_id']] = row
|
||||
|
||||
async def reload_db_indexes():
|
||||
"""Reload database indexes after DB changes in phases."""
|
||||
nonlocal rows, db_adv_index, db_google_index
|
||||
rows = await conn.fetch(
|
||||
"""
|
||||
SELECT * FROM calendar_sync
|
||||
WHERE employee_kuerzel = $1 AND deleted = FALSE
|
||||
""",
|
||||
kuerzel
|
||||
)
|
||||
db_adv_index = {str(row['advoware_frnr']): row for row in rows if row['advoware_frnr']}
|
||||
db_google_index = {}
|
||||
for row in rows:
|
||||
if row['google_event_id']:
|
||||
db_google_index[row['google_event_id']] = row
|
||||
logger.debug(f"Reloaded indexes: {len(rows)} rows, {len(db_adv_index)} adv, {len(db_google_index)} google")
|
||||
|
||||
async def reload_api_maps():
|
||||
"""Reload API maps after creating new events in phases."""
|
||||
nonlocal adv_appointments, adv_map, google_events, google_map
|
||||
adv_appointments = await fetch_advoware_appointments(advoware, kuerzel)
|
||||
adv_map = {str(app['frNr']): app for app in adv_appointments if app.get('frNr')}
|
||||
google_events = await fetch_google_events(service, calendar_id)
|
||||
google_map = {evt['id']: evt for evt in google_events}
|
||||
logger.debug(f"Reloaded API maps: {len(adv_map)} adv, {len(google_map)} google")
|
||||
|
||||
# Phase 1: New from Advoware => Google
|
||||
logger.info("Phase 1: Processing new appointments from Advoware")
|
||||
for frnr, app in adv_map.items():
|
||||
@@ -642,6 +692,11 @@ async def handler(event, context):
|
||||
except Exception as e:
|
||||
logger.warning(f"Phase 1: Failed to process new Advoware {frnr}: {e}")
|
||||
|
||||
# Reload indexes after Phase 1 changes
|
||||
await reload_db_indexes()
|
||||
# Reload API maps after Phase 1 changes
|
||||
await reload_api_maps()
|
||||
|
||||
# Phase 2: New from Google => Advoware
|
||||
logger.info("Phase 2: Processing new events from Google")
|
||||
for event_id, evt in google_map.items():
|
||||
@@ -668,6 +723,11 @@ async def handler(event, context):
|
||||
except Exception as e:
|
||||
logger.warning(f"Phase 2: Failed to process new Google {event_id}: {e}")
|
||||
|
||||
# Reload indexes after Phase 2 changes
|
||||
await reload_db_indexes()
|
||||
# Reload API maps after Phase 2 changes
|
||||
await reload_api_maps()
|
||||
|
||||
# Phase 3: Identify deleted entries
|
||||
logger.info("Phase 3: Processing deleted entries")
|
||||
for row in rows:
|
||||
@@ -793,6 +853,11 @@ async def handler(event, context):
|
||||
async with conn.transaction():
|
||||
await conn.execute("UPDATE calendar_sync SET sync_status = 'failed' WHERE sync_id = $1;", row['sync_id'])
|
||||
|
||||
# Reload indexes after Phase 3 changes
|
||||
await reload_db_indexes()
|
||||
# Reload API maps after Phase 3 changes
|
||||
await reload_api_maps()
|
||||
|
||||
# Phase 4: Update existing entries if changed
|
||||
logger.info("Phase 4: Processing updates for existing entries")
|
||||
# Track which master events we've already processed to avoid duplicate updates
|
||||
|
||||
Reference in New Issue
Block a user