626 lines
25 KiB
Python
Executable File
626 lines
25 KiB
Python
Executable File
#!/usr/bin/env python3
|
||
"""
|
||
EspoCRM Custom Entity Validator & Rebuild Tool
|
||
Führt umfassende Validierungen durch bevor der Rebuild ausgeführt wird.
|
||
"""
|
||
|
||
import json
|
||
import os
|
||
import sys
|
||
import subprocess
|
||
import re
|
||
from pathlib import Path
|
||
from typing import Dict, List, Tuple, Set
|
||
from collections import defaultdict
|
||
|
||
# ANSI Color Codes
|
||
class Colors:
|
||
GREEN = '\033[92m'
|
||
YELLOW = '\033[93m'
|
||
RED = '\033[91m'
|
||
BLUE = '\033[94m'
|
||
BOLD = '\033[1m'
|
||
END = '\033[0m'
|
||
|
||
def print_header(text: str):
|
||
print(f"\n{Colors.BOLD}{Colors.BLUE}{'='*70}{Colors.END}")
|
||
print(f"{Colors.BOLD}{Colors.BLUE}{text.center(70)}{Colors.END}")
|
||
print(f"{Colors.BOLD}{Colors.BLUE}{'='*70}{Colors.END}\n")
|
||
|
||
def print_success(text: str):
|
||
print(f"{Colors.GREEN}✓{Colors.END} {text}")
|
||
|
||
def print_warning(text: str):
|
||
print(f"{Colors.YELLOW}⚠{Colors.END} {text}")
|
||
|
||
def print_error(text: str):
|
||
print(f"{Colors.RED}✗{Colors.END} {text}")
|
||
|
||
def print_info(text: str):
|
||
print(f"{Colors.BLUE}ℹ{Colors.END} {text}")
|
||
|
||
class EntityValidator:
|
||
def __init__(self, base_path: str):
|
||
self.base_path = Path(base_path)
|
||
self.custom_path = self.base_path / "custom" / "Espo" / "Custom" / "Resources"
|
||
self.metadata_path = self.custom_path / "metadata"
|
||
self.i18n_path = self.custom_path / "i18n"
|
||
self.errors = []
|
||
self.warnings = []
|
||
self.entity_defs = {}
|
||
self.relationships = defaultdict(list)
|
||
|
||
def validate_json_syntax(self) -> bool:
|
||
"""Validiere JSON-Syntax aller Dateien im custom-Verzeichnis."""
|
||
print_header("1. JSON-SYNTAX VALIDIERUNG")
|
||
|
||
json_files = list(self.custom_path.rglob("*.json"))
|
||
if not json_files:
|
||
print_warning("Keine JSON-Dateien gefunden")
|
||
return True
|
||
|
||
invalid_files = []
|
||
for json_file in json_files:
|
||
try:
|
||
with open(json_file, 'r', encoding='utf-8') as f:
|
||
json.load(f)
|
||
except json.JSONDecodeError as e:
|
||
self.errors.append(f"JSON-Fehler in {json_file.relative_to(self.base_path)}: {e}")
|
||
invalid_files.append(str(json_file.relative_to(self.base_path)))
|
||
|
||
if invalid_files:
|
||
print_error(f"{len(invalid_files)} Datei(en) mit JSON-Fehlern gefunden:")
|
||
for f in invalid_files:
|
||
print(f" {Colors.RED}•{Colors.END} {f}")
|
||
return False
|
||
else:
|
||
print_success(f"Alle {len(json_files)} JSON-Dateien sind syntaktisch korrekt")
|
||
return True
|
||
|
||
def load_entity_defs(self):
|
||
"""Lade alle entityDefs für weitere Analysen."""
|
||
entity_defs_path = self.metadata_path / "entityDefs"
|
||
if not entity_defs_path.exists():
|
||
return
|
||
|
||
for json_file in entity_defs_path.glob("*.json"):
|
||
entity_name = json_file.stem
|
||
try:
|
||
with open(json_file, 'r', encoding='utf-8') as f:
|
||
self.entity_defs[entity_name] = json.load(f)
|
||
except Exception as e:
|
||
# Fehler wird bereits in JSON-Validierung gemeldet
|
||
pass
|
||
|
||
def validate_relationships(self) -> bool:
|
||
"""Validiere Relationship-Definitionen zwischen Entities."""
|
||
print_header("2. RELATIONSHIP-KONSISTENZ")
|
||
|
||
if not self.entity_defs:
|
||
print_warning("Keine entityDefs geladen")
|
||
return True
|
||
|
||
relationship_errors = []
|
||
checked_pairs = set()
|
||
|
||
# Links die nicht geprüft werden (Standard-EspoCRM parent-Relationships)
|
||
skip_foreign_links = {'parent', 'parents'}
|
||
|
||
for entity_name, entity_def in self.entity_defs.items():
|
||
links = entity_def.get('links', {})
|
||
|
||
for link_name, link_def in links.items():
|
||
link_type = link_def.get('type')
|
||
target_entity = link_def.get('entity')
|
||
foreign = link_def.get('foreign')
|
||
relation_name = link_def.get('relationName')
|
||
|
||
# Überspringe parent-Links (Standard Activities-Relationship)
|
||
if foreign in skip_foreign_links:
|
||
continue
|
||
|
||
# Nur hasMany und hasOne prüfen (nicht belongsTo, da das die Gegenseite ist)
|
||
if link_type in ['hasMany', 'hasOne'] and target_entity and foreign:
|
||
pair_key = tuple(sorted([f"{entity_name}.{link_name}", f"{target_entity}.{foreign}"]))
|
||
if pair_key in checked_pairs:
|
||
continue
|
||
checked_pairs.add(pair_key)
|
||
|
||
# Prüfe ob Ziel-Entity existiert
|
||
if target_entity not in self.entity_defs:
|
||
relationship_errors.append(
|
||
f"{entity_name}.{link_name}: Ziel-Entity '{target_entity}' existiert nicht"
|
||
)
|
||
continue
|
||
|
||
target_links = self.entity_defs[target_entity].get('links', {})
|
||
|
||
# Prüfe ob foreign Link existiert
|
||
if foreign not in target_links:
|
||
relationship_errors.append(
|
||
f"{entity_name}.{link_name} → {target_entity}: "
|
||
f"Foreign link '{foreign}' fehlt in {target_entity}"
|
||
)
|
||
continue
|
||
|
||
foreign_def = target_links[foreign]
|
||
foreign_foreign = foreign_def.get('foreign')
|
||
foreign_relation_name = foreign_def.get('relationName')
|
||
|
||
# Prüfe ob foreign.foreign zurück zeigt
|
||
if foreign_foreign != link_name:
|
||
relationship_errors.append(
|
||
f"{entity_name}.{link_name} ↔ {target_entity}.{foreign}: "
|
||
f"Foreign zeigt auf '{foreign_foreign}' statt auf '{link_name}'"
|
||
)
|
||
|
||
# Prüfe ob relationName übereinstimmt (falls beide definiert)
|
||
if relation_name and foreign_relation_name and relation_name != foreign_relation_name:
|
||
relationship_errors.append(
|
||
f"{entity_name}.{link_name} ↔ {target_entity}.{foreign}: "
|
||
f"relationName unterschiedlich ('{relation_name}' vs '{foreign_relation_name}')"
|
||
)
|
||
|
||
if relationship_errors:
|
||
print_error(f"{len(relationship_errors)} Relationship-Fehler gefunden:")
|
||
for err in relationship_errors:
|
||
print(f" {Colors.RED}•{Colors.END} {err}")
|
||
self.errors.extend(relationship_errors)
|
||
return False
|
||
else:
|
||
print_success(f"{len(checked_pairs)} Relationships geprüft - alle konsistent")
|
||
return True
|
||
|
||
def validate_formula_placement(self) -> bool:
|
||
"""Prüfe ob Formula-Scripts korrekt in /formula/ statt /entityDefs/ platziert sind."""
|
||
print_header("3. FORMULA-SCRIPT PLATZIERUNG")
|
||
|
||
misplaced_formulas = []
|
||
|
||
# Prüfe entityDefs auf formula-Definitionen (sollte nicht da sein)
|
||
for entity_name, entity_def in self.entity_defs.items():
|
||
if 'formula' in entity_def:
|
||
misplaced_formulas.append(
|
||
f"entityDefs/{entity_name}.json enthält 'formula' - "
|
||
f"sollte in formula/{entity_name}.json sein"
|
||
)
|
||
|
||
# Prüfe ob formula-Dateien existieren und valide sind
|
||
formula_path = self.metadata_path / "formula"
|
||
formula_count = 0
|
||
if formula_path.exists():
|
||
for formula_file in formula_path.glob("*.json"):
|
||
formula_count += 1
|
||
try:
|
||
with open(formula_file, 'r', encoding='utf-8') as f:
|
||
formula_def = json.load(f)
|
||
# Prüfe auf leere oder null Scripts
|
||
for key, value in formula_def.items():
|
||
if value == "" or value is None:
|
||
self.warnings.append(
|
||
f"formula/{formula_file.name}: '{key}' ist leer oder null"
|
||
)
|
||
except Exception:
|
||
pass # JSON-Fehler bereits gemeldet
|
||
|
||
if misplaced_formulas:
|
||
print_error(f"{len(misplaced_formulas)} Formula-Platzierungsfehler:")
|
||
for err in misplaced_formulas:
|
||
print(f" {Colors.RED}•{Colors.END} {err}")
|
||
self.errors.extend(misplaced_formulas)
|
||
return False
|
||
else:
|
||
print_success(f"{formula_count} Formula-Definitionen korrekt platziert")
|
||
return True
|
||
|
||
def validate_i18n_completeness(self) -> bool:
|
||
"""Prüfe i18n-Definitionen auf Vollständigkeit."""
|
||
print_header("4. i18n-VOLLSTÄNDIGKEIT")
|
||
|
||
if not self.entity_defs:
|
||
print_warning("Keine entityDefs zum Prüfen")
|
||
return True
|
||
|
||
missing_i18n = []
|
||
incomplete_i18n = []
|
||
|
||
languages = ['de_DE', 'en_US']
|
||
custom_entities = [name for name in self.entity_defs.keys()
|
||
if name.startswith('C') or name.startswith('CVmh')]
|
||
|
||
for entity_name in custom_entities:
|
||
entity_def = self.entity_defs[entity_name]
|
||
links = entity_def.get('links', {})
|
||
|
||
# Finde alle hasMany/hasOne Links die übersetzt werden sollten
|
||
custom_links = []
|
||
for link_name, link_def in links.items():
|
||
link_type = link_def.get('type')
|
||
if link_type in ['hasMany', 'hasOne']:
|
||
# Überspringe System-Links
|
||
if link_name not in ['createdBy', 'modifiedBy', 'assignedUser', 'teams']:
|
||
custom_links.append(link_name)
|
||
|
||
if not custom_links:
|
||
continue
|
||
|
||
for lang in languages:
|
||
i18n_file = self.i18n_path / lang / f"{entity_name}.json"
|
||
|
||
if not i18n_file.exists():
|
||
missing_i18n.append(f"{entity_name}: {lang} fehlt komplett")
|
||
continue
|
||
|
||
try:
|
||
with open(i18n_file, 'r', encoding='utf-8') as f:
|
||
i18n_def = json.load(f)
|
||
links_i18n = i18n_def.get('links', {})
|
||
|
||
# Prüfe ob alle custom Links übersetzt sind
|
||
for link_name in custom_links:
|
||
if link_name not in links_i18n:
|
||
incomplete_i18n.append(
|
||
f"{entity_name} ({lang}): Link '{link_name}' fehlt in i18n"
|
||
)
|
||
except Exception:
|
||
pass # JSON-Fehler bereits gemeldet
|
||
|
||
total_issues = len(missing_i18n) + len(incomplete_i18n)
|
||
|
||
if missing_i18n:
|
||
print_error(f"{len(missing_i18n)} komplett fehlende i18n-Dateien:")
|
||
for err in missing_i18n[:10]: # Max 10 anzeigen
|
||
print(f" {Colors.RED}•{Colors.END} {err}")
|
||
if len(missing_i18n) > 10:
|
||
print(f" {Colors.RED}...{Colors.END} und {len(missing_i18n) - 10} weitere")
|
||
|
||
if incomplete_i18n:
|
||
print_warning(f"{len(incomplete_i18n)} unvollständige i18n-Definitionen:")
|
||
for err in incomplete_i18n[:10]: # Max 10 anzeigen
|
||
print(f" {Colors.YELLOW}•{Colors.END} {err}")
|
||
if len(incomplete_i18n) > 10:
|
||
print(f" {Colors.YELLOW}...{Colors.END} und {len(incomplete_i18n) - 10} weitere")
|
||
|
||
if not missing_i18n and not incomplete_i18n:
|
||
print_success(f"i18n für {len(custom_entities)} Custom-Entities vollständig")
|
||
|
||
# i18n-Fehler sind nur Warnungen, kein Abbruch
|
||
self.warnings.extend(missing_i18n + incomplete_i18n)
|
||
return True
|
||
|
||
def validate_layout_structure(self) -> bool:
|
||
"""Prüfe Layout-Dateien auf häufige Fehler."""
|
||
print_header("5. LAYOUT-STRUKTUR VALIDIERUNG")
|
||
|
||
layouts_path = self.metadata_path / "clientDefs"
|
||
if not layouts_path.exists():
|
||
print_warning("Keine clientDefs gefunden")
|
||
return True
|
||
|
||
layout_errors = []
|
||
checked_layouts = 0
|
||
|
||
for client_def_file in layouts_path.glob("*.json"):
|
||
try:
|
||
with open(client_def_file, 'r', encoding='utf-8') as f:
|
||
client_def = json.load(f)
|
||
|
||
# Prüfe auf häufige Layout-Fehler in bottomPanels
|
||
bottom_panels = client_def.get('bottomPanelsDetail', {})
|
||
for panel_key, panel_def in bottom_panels.items():
|
||
checked_layouts += 1
|
||
|
||
# Prüfe auf unnötige false-Elemente
|
||
if isinstance(panel_def, dict):
|
||
for key, value in panel_def.items():
|
||
if value is False and key not in ['disabled', 'sticked']:
|
||
layout_errors.append(
|
||
f"{client_def_file.stem}: bottomPanelsDetail.{panel_key}.{key} "
|
||
f"sollte nicht 'false' sein"
|
||
)
|
||
except Exception:
|
||
pass # JSON-Fehler bereits gemeldet
|
||
|
||
if layout_errors:
|
||
print_warning(f"{len(layout_errors)} Layout-Strukturprobleme:")
|
||
for err in layout_errors[:5]:
|
||
print(f" {Colors.YELLOW}•{Colors.END} {err}")
|
||
if len(layout_errors) > 5:
|
||
print(f" {Colors.YELLOW}...{Colors.END} und {len(layout_errors) - 5} weitere")
|
||
self.warnings.extend(layout_errors)
|
||
else:
|
||
print_success(f"{checked_layouts} Layout-Definitionen geprüft")
|
||
|
||
return True
|
||
|
||
def check_file_permissions(self) -> bool:
|
||
"""Prüfe Dateirechte im custom-Verzeichnis."""
|
||
print_header("6. DATEIRECHTE-PRÜFUNG")
|
||
|
||
try:
|
||
# Prüfe ob Dateien von www-data gehören
|
||
result = subprocess.run(
|
||
['find', str(self.custom_path), '!', '-user', 'www-data', '-o', '!', '-group', 'www-data'],
|
||
capture_output=True,
|
||
text=True
|
||
)
|
||
|
||
wrong_owner_files = [line for line in result.stdout.strip().split('\n') if line]
|
||
|
||
if wrong_owner_files:
|
||
print_warning(f"{len(wrong_owner_files)} Dateien mit falschen Rechten gefunden")
|
||
print_info("Versuche automatische Korrektur...")
|
||
|
||
# Versuche Rechte zu korrigieren
|
||
try:
|
||
subprocess.run(
|
||
['sudo', 'chown', '-R', 'www-data:www-data', str(self.custom_path)],
|
||
check=True,
|
||
capture_output=True
|
||
)
|
||
subprocess.run(
|
||
['sudo', 'find', str(self.custom_path), '-type', 'f', '-exec', 'chmod', '664', '{}', ';'],
|
||
check=True,
|
||
capture_output=True
|
||
)
|
||
subprocess.run(
|
||
['sudo', 'find', str(self.custom_path), '-type', 'd', '-exec', 'chmod', '775', '{}', ';'],
|
||
check=True,
|
||
capture_output=True
|
||
)
|
||
print_success("Dateirechte korrigiert")
|
||
except subprocess.CalledProcessError:
|
||
print_warning("Konnte Dateirechte nicht automatisch korrigieren (sudo erforderlich)")
|
||
else:
|
||
print_success("Alle Dateirechte korrekt (www-data:www-data)")
|
||
|
||
return True
|
||
except Exception as e:
|
||
print_warning(f"Konnte Dateirechte nicht prüfen: {e}")
|
||
return True
|
||
|
||
def run_rebuild(self) -> bool:
|
||
"""Führe den EspoCRM Rebuild aus."""
|
||
print_header("7. ESPOCRM REBUILD")
|
||
|
||
# Prüfe ob wir in einem Docker-Volume sind
|
||
is_docker_volume = '/docker/volumes/' in str(self.base_path)
|
||
|
||
if is_docker_volume:
|
||
# Versuche Docker-Container zu finden
|
||
try:
|
||
result = subprocess.run(
|
||
['docker', 'ps', '--format', '{{.Names}}'],
|
||
capture_output=True,
|
||
text=True,
|
||
timeout=5
|
||
)
|
||
|
||
containers = result.stdout.strip().split('\n')
|
||
espo_container = None
|
||
|
||
# Suche nach EspoCRM Container (meist "espocrm" ohne Suffix)
|
||
for container in containers:
|
||
if container.lower() in ['espocrm', 'espocrm-app']:
|
||
espo_container = container
|
||
break
|
||
|
||
if not espo_container:
|
||
# Fallback: erster Container mit "espo" im Namen
|
||
for container in containers:
|
||
if 'espo' in container.lower() and 'websocket' not in container.lower() and 'daemon' not in container.lower() and 'db' not in container.lower():
|
||
espo_container = container
|
||
break
|
||
|
||
if espo_container:
|
||
print_info(f"Docker-Container erkannt: {espo_container}")
|
||
|
||
# Schritt 1: Cache löschen
|
||
print_info("Lösche Cache...")
|
||
cache_result = subprocess.run(
|
||
['docker', 'exec', espo_container, 'php', 'command.php', 'clear-cache'],
|
||
capture_output=True,
|
||
text=True,
|
||
timeout=30
|
||
)
|
||
|
||
if cache_result.returncode == 0:
|
||
print_success("Cache erfolgreich gelöscht")
|
||
else:
|
||
print_warning("Cache-Löschung fehlgeschlagen, fahre trotzdem fort...")
|
||
|
||
# Schritt 2: Rebuild
|
||
print_info("Starte Rebuild (kann 10-30 Sekunden dauern)...")
|
||
result = subprocess.run(
|
||
['docker', 'exec', espo_container, 'php', 'command.php', 'rebuild'],
|
||
capture_output=True,
|
||
text=True,
|
||
timeout=60
|
||
)
|
||
|
||
if result.returncode == 0:
|
||
print_success("Rebuild erfolgreich abgeschlossen")
|
||
if result.stdout:
|
||
print(f" {result.stdout.strip()}")
|
||
return True
|
||
else:
|
||
print_error("Rebuild fehlgeschlagen:")
|
||
if result.stderr:
|
||
print(f"\n{result.stderr}")
|
||
return False
|
||
else:
|
||
print_warning("Kein EspoCRM Docker-Container gefunden")
|
||
print_info("Versuche lokalen Rebuild...")
|
||
except Exception as e:
|
||
print_warning(f"Docker-Erkennung fehlgeschlagen: {e}")
|
||
print_info("Versuche lokalen Rebuild...")
|
||
|
||
# Lokaler Rebuild (Fallback)
|
||
rebuild_script = self.base_path / "rebuild.php"
|
||
if not rebuild_script.exists():
|
||
print_error(f"rebuild.php nicht gefunden in {self.base_path}")
|
||
return False
|
||
|
||
try:
|
||
# Schritt 1: Cache löschen
|
||
print_info("Lösche Cache...")
|
||
cache_result = subprocess.run(
|
||
['php', 'command.php', 'clear-cache'],
|
||
cwd=str(self.base_path),
|
||
capture_output=True,
|
||
text=True,
|
||
timeout=30
|
||
)
|
||
|
||
if cache_result.returncode == 0:
|
||
print_success("Cache erfolgreich gelöscht")
|
||
else:
|
||
print_warning("Cache-Löschung fehlgeschlagen, fahre trotzdem fort...")
|
||
|
||
# Schritt 2: Rebuild
|
||
print_info("Starte lokalen Rebuild (kann 10-30 Sekunden dauern)...")
|
||
result = subprocess.run(
|
||
['php', 'command.php', 'rebuild'],
|
||
cwd=str(self.base_path),
|
||
capture_output=True,
|
||
text=True,
|
||
timeout=60
|
||
)
|
||
|
||
if result.returncode == 0:
|
||
print_success("Rebuild erfolgreich abgeschlossen")
|
||
return True
|
||
else:
|
||
print_error("Rebuild fehlgeschlagen:")
|
||
if result.stderr:
|
||
print(f"\n{result.stderr}")
|
||
return False
|
||
except subprocess.TimeoutExpired:
|
||
print_error("Rebuild-Timeout (>60 Sekunden)")
|
||
return False
|
||
except Exception as e:
|
||
print_error(f"Rebuild-Fehler: {e}")
|
||
return False
|
||
|
||
def print_summary(self):
|
||
"""Drucke Zusammenfassung aller Ergebnisse."""
|
||
print_header("ZUSAMMENFASSUNG")
|
||
|
||
if self.errors:
|
||
print(f"\n{Colors.RED}{Colors.BOLD}FEHLER: {len(self.errors)}{Colors.END}")
|
||
for err in self.errors:
|
||
print(f" {Colors.RED}✗{Colors.END} {err}")
|
||
|
||
if self.warnings:
|
||
print(f"\n{Colors.YELLOW}{Colors.BOLD}WARNUNGEN: {len(self.warnings)}{Colors.END}")
|
||
for warn in self.warnings[:10]:
|
||
print(f" {Colors.YELLOW}⚠{Colors.END} {warn}")
|
||
if len(self.warnings) > 10:
|
||
print(f" {Colors.YELLOW}...{Colors.END} und {len(self.warnings) - 10} weitere Warnungen")
|
||
|
||
if not self.errors and not self.warnings:
|
||
print(f"\n{Colors.GREEN}{Colors.BOLD}✓ ALLE PRÜFUNGEN BESTANDEN{Colors.END}")
|
||
|
||
print()
|
||
|
||
def validate_all(self) -> bool:
|
||
"""Führe alle Validierungen durch."""
|
||
all_valid = True
|
||
|
||
# 1. JSON-Syntax (kritisch)
|
||
if not self.validate_json_syntax():
|
||
all_valid = False
|
||
print_error("\nAbbruch: JSON-Syntax-Fehler müssen behoben werden!\n")
|
||
return False
|
||
|
||
# Lade entityDefs für weitere Checks
|
||
self.load_entity_defs()
|
||
|
||
# 2. Relationships (kritisch)
|
||
if not self.validate_relationships():
|
||
all_valid = False
|
||
|
||
# 3. Formula-Platzierung (kritisch)
|
||
if not self.validate_formula_placement():
|
||
all_valid = False
|
||
|
||
# 4. i18n-Vollständigkeit (nur Warnung)
|
||
self.validate_i18n_completeness()
|
||
|
||
# 5. Layout-Struktur (nur Warnung)
|
||
self.validate_layout_structure()
|
||
|
||
# 6. Dateirechte (nicht kritisch für Rebuild)
|
||
self.check_file_permissions()
|
||
|
||
return all_valid
|
||
|
||
def main():
|
||
import argparse
|
||
|
||
parser = argparse.ArgumentParser(
|
||
description='EspoCRM Custom Entity Validator & Rebuild Tool'
|
||
)
|
||
parser.add_argument(
|
||
'--dry-run',
|
||
action='store_true',
|
||
help='Nur Validierungen durchführen, kein Rebuild'
|
||
)
|
||
parser.add_argument(
|
||
'--no-rebuild',
|
||
action='store_true',
|
||
help='Synonym für --dry-run'
|
||
)
|
||
args = parser.parse_args()
|
||
|
||
dry_run = args.dry_run or args.no_rebuild
|
||
|
||
# Finde EspoCRM Root-Verzeichnis
|
||
script_dir = Path(__file__).parent.parent.parent
|
||
|
||
if not (script_dir / "rebuild.php").exists():
|
||
print_error("Fehler: Nicht im EspoCRM-Root-Verzeichnis!")
|
||
print_info(f"Aktueller Pfad: {script_dir}")
|
||
sys.exit(1)
|
||
|
||
print(f"{Colors.BOLD}EspoCRM Custom Entity Validator & Rebuild Tool{Colors.END}")
|
||
print(f"Arbeitsverzeichnis: {script_dir}")
|
||
if dry_run:
|
||
print(f"{Colors.YELLOW}Modus: DRY-RUN (kein Rebuild){Colors.END}")
|
||
print()
|
||
|
||
validator = EntityValidator(str(script_dir))
|
||
|
||
# Validierungen durchführen
|
||
all_valid = validator.validate_all()
|
||
|
||
# Zusammenfassung drucken
|
||
validator.print_summary()
|
||
|
||
# Entscheidung über Rebuild
|
||
if not all_valid:
|
||
print_error("REBUILD ABGEBROCHEN: Kritische Fehler müssen behoben werden!")
|
||
sys.exit(1)
|
||
|
||
if dry_run:
|
||
print_info("Dry-Run Modus: Rebuild übersprungen")
|
||
print(f"\n{Colors.GREEN}{Colors.BOLD}✓ VALIDIERUNGEN ABGESCHLOSSEN{Colors.END}\n")
|
||
sys.exit(0)
|
||
|
||
if validator.warnings:
|
||
print_warning(
|
||
f"Es gibt {len(validator.warnings)} Warnungen, aber keine kritischen Fehler."
|
||
)
|
||
print_info("Rebuild wird trotzdem durchgeführt...\n")
|
||
|
||
# Rebuild ausführen
|
||
if validator.run_rebuild():
|
||
print(f"\n{Colors.GREEN}{Colors.BOLD}✓ ERFOLGREICH ABGESCHLOSSEN{Colors.END}\n")
|
||
sys.exit(0)
|
||
else:
|
||
print(f"\n{Colors.RED}{Colors.BOLD}✗ REBUILD FEHLGESCHLAGEN{Colors.END}\n")
|
||
sys.exit(1)
|
||
|
||
if __name__ == "__main__":
|
||
main()
|