1004 lines
43 KiB
Python
Executable File
1004 lines
43 KiB
Python
Executable File
#!/usr/bin/env python3
|
||
"""
|
||
EspoCRM Custom Entity Validator & Rebuild Tool
|
||
Führt umfassende Validierungen durch bevor der Rebuild ausgeführt wird.
|
||
"""
|
||
|
||
import json
|
||
import os
|
||
import sys
|
||
import subprocess
|
||
import re
|
||
from pathlib import Path
|
||
from typing import Dict, List, Tuple, Set
|
||
from collections import defaultdict
|
||
|
||
# ANSI Color Codes
|
||
class Colors:
|
||
GREEN = '\033[92m'
|
||
YELLOW = '\033[93m'
|
||
RED = '\033[91m'
|
||
BLUE = '\033[94m'
|
||
BOLD = '\033[1m'
|
||
END = '\033[0m'
|
||
|
||
def print_header(text: str):
|
||
print(f"\n{Colors.BOLD}{Colors.BLUE}{'='*70}{Colors.END}")
|
||
print(f"{Colors.BOLD}{Colors.BLUE}{text.center(70)}{Colors.END}")
|
||
print(f"{Colors.BOLD}{Colors.BLUE}{'='*70}{Colors.END}\n")
|
||
|
||
def print_success(text: str):
|
||
print(f"{Colors.GREEN}✓{Colors.END} {text}")
|
||
|
||
def print_warning(text: str):
|
||
print(f"{Colors.YELLOW}⚠{Colors.END} {text}")
|
||
|
||
def print_error(text: str):
|
||
print(f"{Colors.RED}✗{Colors.END} {text}")
|
||
|
||
def print_info(text: str):
|
||
print(f"{Colors.BLUE}ℹ{Colors.END} {text}")
|
||
|
||
class EntityValidator:
|
||
def __init__(self, base_path: str):
|
||
self.base_path = Path(base_path)
|
||
self.custom_path = self.base_path / "custom" / "Espo" / "Custom" / "Resources"
|
||
self.metadata_path = self.custom_path / "metadata"
|
||
self.i18n_path = self.custom_path / "i18n"
|
||
self.client_custom_path = self.base_path / "client" / "custom"
|
||
self.errors = []
|
||
self.warnings = []
|
||
self.entity_defs = {}
|
||
self.relationships = defaultdict(list)
|
||
self.skip_e2e_tests = False
|
||
|
||
def validate_json_syntax(self) -> bool:
|
||
"""Validiere JSON-Syntax aller Dateien im custom-Verzeichnis."""
|
||
print_header("1. JSON-SYNTAX VALIDIERUNG")
|
||
|
||
json_files = list(self.custom_path.rglob("*.json"))
|
||
if not json_files:
|
||
print_warning("Keine JSON-Dateien gefunden")
|
||
return True
|
||
|
||
invalid_files = []
|
||
for json_file in json_files:
|
||
try:
|
||
with open(json_file, 'r', encoding='utf-8') as f:
|
||
json.load(f)
|
||
except json.JSONDecodeError as e:
|
||
self.errors.append(f"JSON-Fehler in {json_file.relative_to(self.base_path)}: {e}")
|
||
invalid_files.append(str(json_file.relative_to(self.base_path)))
|
||
|
||
if invalid_files:
|
||
print_error(f"{len(invalid_files)} Datei(en) mit JSON-Fehlern gefunden:")
|
||
for f in invalid_files:
|
||
print(f" {Colors.RED}•{Colors.END} {f}")
|
||
return False
|
||
else:
|
||
print_success(f"Alle {len(json_files)} JSON-Dateien sind syntaktisch korrekt")
|
||
return True
|
||
|
||
def load_entity_defs(self):
|
||
"""Lade alle entityDefs für weitere Analysen."""
|
||
entity_defs_path = self.metadata_path / "entityDefs"
|
||
if not entity_defs_path.exists():
|
||
return
|
||
|
||
for json_file in entity_defs_path.glob("*.json"):
|
||
entity_name = json_file.stem
|
||
try:
|
||
with open(json_file, 'r', encoding='utf-8') as f:
|
||
self.entity_defs[entity_name] = json.load(f)
|
||
except Exception as e:
|
||
# Fehler wird bereits in JSON-Validierung gemeldet
|
||
pass
|
||
|
||
def validate_relationships(self) -> bool:
|
||
"""Validiere Relationship-Definitionen zwischen Entities."""
|
||
print_header("2. RELATIONSHIP-KONSISTENZ")
|
||
|
||
if not self.entity_defs:
|
||
print_warning("Keine entityDefs geladen")
|
||
return True
|
||
|
||
relationship_errors = []
|
||
checked_pairs = set()
|
||
|
||
# Links die nicht geprüft werden (Standard-EspoCRM parent-Relationships)
|
||
skip_foreign_links = {'parent', 'parents'}
|
||
|
||
for entity_name, entity_def in self.entity_defs.items():
|
||
links = entity_def.get('links', {})
|
||
|
||
for link_name, link_def in links.items():
|
||
link_type = link_def.get('type')
|
||
target_entity = link_def.get('entity')
|
||
foreign = link_def.get('foreign')
|
||
relation_name = link_def.get('relationName')
|
||
|
||
# Überspringe parent-Links (Standard Activities-Relationship)
|
||
if foreign in skip_foreign_links:
|
||
continue
|
||
|
||
# Nur hasMany und hasOne prüfen (nicht belongsTo, da das die Gegenseite ist)
|
||
if link_type in ['hasMany', 'hasOne'] and target_entity and foreign:
|
||
pair_key = tuple(sorted([f"{entity_name}.{link_name}", f"{target_entity}.{foreign}"]))
|
||
if pair_key in checked_pairs:
|
||
continue
|
||
checked_pairs.add(pair_key)
|
||
|
||
# Prüfe ob Ziel-Entity existiert
|
||
if target_entity not in self.entity_defs:
|
||
relationship_errors.append(
|
||
f"{entity_name}.{link_name}: Ziel-Entity '{target_entity}' existiert nicht"
|
||
)
|
||
continue
|
||
|
||
target_links = self.entity_defs[target_entity].get('links', {})
|
||
|
||
# Prüfe ob foreign Link existiert
|
||
if foreign not in target_links:
|
||
relationship_errors.append(
|
||
f"{entity_name}.{link_name} → {target_entity}: "
|
||
f"Foreign link '{foreign}' fehlt in {target_entity}"
|
||
)
|
||
continue
|
||
|
||
foreign_def = target_links[foreign]
|
||
foreign_foreign = foreign_def.get('foreign')
|
||
foreign_relation_name = foreign_def.get('relationName')
|
||
|
||
# Prüfe ob foreign.foreign zurück zeigt
|
||
if foreign_foreign != link_name:
|
||
relationship_errors.append(
|
||
f"{entity_name}.{link_name} ↔ {target_entity}.{foreign}: "
|
||
f"Foreign zeigt auf '{foreign_foreign}' statt auf '{link_name}'"
|
||
)
|
||
|
||
# Prüfe ob relationName übereinstimmt (falls beide definiert)
|
||
if relation_name and foreign_relation_name and relation_name != foreign_relation_name:
|
||
relationship_errors.append(
|
||
f"{entity_name}.{link_name} ↔ {target_entity}.{foreign}: "
|
||
f"relationName unterschiedlich ('{relation_name}' vs '{foreign_relation_name}')"
|
||
)
|
||
|
||
if relationship_errors:
|
||
print_error(f"{len(relationship_errors)} Relationship-Fehler gefunden:")
|
||
for err in relationship_errors:
|
||
print(f" {Colors.RED}•{Colors.END} {err}")
|
||
self.errors.extend(relationship_errors)
|
||
return False
|
||
else:
|
||
print_success(f"{len(checked_pairs)} Relationships geprüft - alle konsistent")
|
||
return True
|
||
|
||
def validate_formula_placement(self) -> bool:
|
||
"""Prüfe ob Formula-Scripts korrekt in /formula/ statt /entityDefs/ platziert sind."""
|
||
print_header("3. FORMULA-SCRIPT PLATZIERUNG")
|
||
|
||
misplaced_formulas = []
|
||
|
||
# Prüfe entityDefs auf formula-Definitionen (sollte nicht da sein)
|
||
for entity_name, entity_def in self.entity_defs.items():
|
||
if 'formula' in entity_def:
|
||
misplaced_formulas.append(
|
||
f"entityDefs/{entity_name}.json enthält 'formula' - "
|
||
f"sollte in formula/{entity_name}.json sein"
|
||
)
|
||
|
||
# Prüfe ob formula-Dateien existieren und valide sind
|
||
formula_path = self.metadata_path / "formula"
|
||
formula_count = 0
|
||
if formula_path.exists():
|
||
for formula_file in formula_path.glob("*.json"):
|
||
formula_count += 1
|
||
try:
|
||
with open(formula_file, 'r', encoding='utf-8') as f:
|
||
formula_def = json.load(f)
|
||
# Prüfe auf leere oder null Scripts
|
||
for key, value in formula_def.items():
|
||
if value == "" or value is None:
|
||
self.warnings.append(
|
||
f"formula/{formula_file.name}: '{key}' ist leer oder null"
|
||
)
|
||
except Exception:
|
||
pass # JSON-Fehler bereits gemeldet
|
||
|
||
if misplaced_formulas:
|
||
print_error(f"{len(misplaced_formulas)} Formula-Platzierungsfehler:")
|
||
for err in misplaced_formulas:
|
||
print(f" {Colors.RED}•{Colors.END} {err}")
|
||
self.errors.extend(misplaced_formulas)
|
||
return False
|
||
else:
|
||
print_success(f"{formula_count} Formula-Definitionen korrekt platziert")
|
||
return True
|
||
|
||
def validate_i18n_completeness(self) -> bool:
|
||
"""Prüfe i18n-Definitionen auf Vollständigkeit."""
|
||
print_header("4. i18n-VOLLSTÄNDIGKEIT")
|
||
|
||
if not self.entity_defs:
|
||
print_warning("Keine entityDefs zum Prüfen")
|
||
return True
|
||
|
||
missing_i18n = []
|
||
incomplete_i18n = []
|
||
|
||
languages = ['de_DE', 'en_US']
|
||
custom_entities = [name for name in self.entity_defs.keys()
|
||
if name.startswith('C') or name.startswith('CVmh')]
|
||
|
||
for entity_name in custom_entities:
|
||
entity_def = self.entity_defs[entity_name]
|
||
links = entity_def.get('links', {})
|
||
|
||
# Finde alle hasMany/hasOne Links die übersetzt werden sollten
|
||
custom_links = []
|
||
for link_name, link_def in links.items():
|
||
link_type = link_def.get('type')
|
||
if link_type in ['hasMany', 'hasOne']:
|
||
# Überspringe System-Links
|
||
if link_name not in ['createdBy', 'modifiedBy', 'assignedUser', 'teams']:
|
||
custom_links.append(link_name)
|
||
|
||
if not custom_links:
|
||
continue
|
||
|
||
for lang in languages:
|
||
i18n_file = self.i18n_path / lang / f"{entity_name}.json"
|
||
|
||
if not i18n_file.exists():
|
||
missing_i18n.append(f"{entity_name}: {lang} fehlt komplett")
|
||
continue
|
||
|
||
try:
|
||
with open(i18n_file, 'r', encoding='utf-8') as f:
|
||
i18n_def = json.load(f)
|
||
links_i18n = i18n_def.get('links', {})
|
||
|
||
# Prüfe ob alle custom Links übersetzt sind
|
||
for link_name in custom_links:
|
||
if link_name not in links_i18n:
|
||
incomplete_i18n.append(
|
||
f"{entity_name} ({lang}): Link '{link_name}' fehlt in i18n"
|
||
)
|
||
except Exception:
|
||
pass # JSON-Fehler bereits gemeldet
|
||
|
||
total_issues = len(missing_i18n) + len(incomplete_i18n)
|
||
|
||
if missing_i18n:
|
||
print_error(f"{len(missing_i18n)} komplett fehlende i18n-Dateien:")
|
||
for err in missing_i18n[:10]: # Max 10 anzeigen
|
||
print(f" {Colors.RED}•{Colors.END} {err}")
|
||
if len(missing_i18n) > 10:
|
||
print(f" {Colors.RED}...{Colors.END} und {len(missing_i18n) - 10} weitere")
|
||
|
||
if incomplete_i18n:
|
||
print_warning(f"{len(incomplete_i18n)} unvollständige i18n-Definitionen:")
|
||
for err in incomplete_i18n[:10]: # Max 10 anzeigen
|
||
print(f" {Colors.YELLOW}•{Colors.END} {err}")
|
||
if len(incomplete_i18n) > 10:
|
||
print(f" {Colors.YELLOW}...{Colors.END} und {len(incomplete_i18n) - 10} weitere")
|
||
|
||
if not missing_i18n and not incomplete_i18n:
|
||
print_success(f"i18n für {len(custom_entities)} Custom-Entities vollständig")
|
||
|
||
# i18n-Fehler sind nur Warnungen, kein Abbruch
|
||
self.warnings.extend(missing_i18n + incomplete_i18n)
|
||
return True
|
||
|
||
def validate_layout_structure(self) -> bool:
|
||
"""Prüfe Layout-Dateien auf häufige Fehler."""
|
||
print_header("5. LAYOUT-STRUKTUR VALIDIERUNG")
|
||
|
||
layouts_base = self.custom_path / "layouts"
|
||
layout_errors = []
|
||
layout_warnings = []
|
||
checked_layouts = 0
|
||
|
||
# 1. Prüfe bottomPanelsDetail.json Dateien (KRITISCH: Muss Objekt sein, kein Array!)
|
||
if layouts_base.exists():
|
||
for bottom_panel_file in layouts_base.rglob("bottomPanelsDetail.json"):
|
||
try:
|
||
with open(bottom_panel_file, 'r', encoding='utf-8') as f:
|
||
content = json.load(f)
|
||
|
||
checked_layouts += 1
|
||
entity_name = bottom_panel_file.parent.name
|
||
|
||
# KRITISCHER CHECK: bottomPanelsDetail.json MUSS Objekt sein (nicht Array)!
|
||
if isinstance(content, list):
|
||
layout_errors.append(
|
||
f"{entity_name}/bottomPanelsDetail.json: FEHLER - Ist Array statt Objekt! "
|
||
f"EspoCRM 7.x erfordert Objekt-Format mit Keys wie 'contacts', '_tabBreak_0', etc."
|
||
)
|
||
elif isinstance(content, dict):
|
||
# Prüfe auf false-Werte in falschen Kontexten
|
||
for key, value in content.items():
|
||
if isinstance(value, dict):
|
||
for subkey, subvalue in value.items():
|
||
if subvalue is False and subkey not in ['disabled', 'sticked']:
|
||
layout_warnings.append(
|
||
f"{entity_name}/bottomPanelsDetail.json: {key}.{subkey} "
|
||
f"sollte nicht 'false' sein"
|
||
)
|
||
except Exception:
|
||
pass # JSON-Fehler bereits in validate_json_syntax gemeldet
|
||
|
||
# 2. Prüfe detail.json Dateien auf deprecated false-Platzhalter
|
||
if layouts_base.exists():
|
||
for detail_file in layouts_base.rglob("detail.json"):
|
||
try:
|
||
with open(detail_file, 'r', encoding='utf-8') as f:
|
||
content = json.load(f)
|
||
|
||
entity_name = detail_file.parent.name
|
||
|
||
# Prüfe ob content ein Array von Panels ist
|
||
if isinstance(content, list):
|
||
for panel_idx, panel in enumerate(content):
|
||
if not isinstance(panel, dict):
|
||
continue
|
||
|
||
rows = panel.get('rows', [])
|
||
for row_idx, row in enumerate(rows):
|
||
if not isinstance(row, list):
|
||
continue
|
||
|
||
for cell_idx, cell in enumerate(row):
|
||
# KRITISCH: false als Platzhalter ist deprecated in EspoCRM 7.x!
|
||
if cell is False:
|
||
layout_errors.append(
|
||
f"{entity_name}/detail.json: Panel {panel_idx}, Row {row_idx}, "
|
||
f"Cell {cell_idx} verwendet 'false' als Platzhalter. "
|
||
f"In EspoCRM 7.x muss '{{}}' (leeres Objekt) verwendet werden!"
|
||
)
|
||
except Exception:
|
||
pass # JSON-Fehler bereits gemeldet
|
||
|
||
# Ergebnisse ausgeben
|
||
if layout_errors:
|
||
print_error(f"{len(layout_errors)} KRITISCHE Layout-Fehler gefunden:")
|
||
for err in layout_errors:
|
||
print(f" {Colors.RED}✗{Colors.END} {err}")
|
||
self.errors.extend(layout_errors)
|
||
return False
|
||
|
||
if layout_warnings:
|
||
print_warning(f"{len(layout_warnings)} Layout-Warnungen:")
|
||
for warn in layout_warnings[:5]:
|
||
print(f" {Colors.YELLOW}⚠{Colors.END} {warn}")
|
||
if len(layout_warnings) > 5:
|
||
print(f" {Colors.YELLOW}...{Colors.END} und {len(layout_warnings) - 5} weitere")
|
||
self.warnings.extend(layout_warnings)
|
||
|
||
if not layout_errors and not layout_warnings:
|
||
print_success(f"{checked_layouts} Layout-Dateien geprüft, keine Fehler")
|
||
elif not layout_errors:
|
||
print_success(f"{checked_layouts} Layout-Dateien geprüft, keine kritischen Fehler")
|
||
|
||
return True
|
||
|
||
def check_file_permissions(self) -> bool:
|
||
"""Prüfe Dateirechte im custom-Verzeichnis."""
|
||
print_header("6. DATEIRECHTE-PRÜFUNG")
|
||
|
||
try:
|
||
# Prüfe ob Dateien von www-data gehören
|
||
result = subprocess.run(
|
||
['find', str(self.custom_path), '!', '-user', 'www-data', '-o', '!', '-group', 'www-data'],
|
||
capture_output=True,
|
||
text=True
|
||
)
|
||
|
||
wrong_owner_files = [line for line in result.stdout.strip().split('\n') if line]
|
||
|
||
if wrong_owner_files:
|
||
print_warning(f"{len(wrong_owner_files)} Dateien mit falschen Rechten gefunden")
|
||
print_info("Versuche automatische Korrektur...")
|
||
|
||
# Versuche Rechte zu korrigieren
|
||
try:
|
||
subprocess.run(
|
||
['sudo', 'chown', '-R', 'www-data:www-data', str(self.custom_path)],
|
||
check=True,
|
||
capture_output=True
|
||
)
|
||
subprocess.run(
|
||
['sudo', 'find', str(self.custom_path), '-type', 'f', '-exec', 'chmod', '664', '{}', ';'],
|
||
check=True,
|
||
capture_output=True
|
||
)
|
||
subprocess.run(
|
||
['sudo', 'find', str(self.custom_path), '-type', 'd', '-exec', 'chmod', '775', '{}', ';'],
|
||
check=True,
|
||
capture_output=True
|
||
)
|
||
print_success("Dateirechte korrigiert")
|
||
except subprocess.CalledProcessError:
|
||
print_warning("Konnte Dateirechte nicht automatisch korrigieren (sudo erforderlich)")
|
||
else:
|
||
print_success("Alle Dateirechte korrekt (www-data:www-data)")
|
||
|
||
return True
|
||
except Exception as e:
|
||
print_warning(f"Konnte Dateirechte nicht prüfen: {e}")
|
||
return True
|
||
|
||
def validate_css_files(self) -> bool:
|
||
"""Validiere CSS-Dateien mit csslint."""
|
||
print_header("7. CSS-VALIDIERUNG")
|
||
|
||
css_files = []
|
||
if self.client_custom_path.exists():
|
||
css_files = list((self.client_custom_path / "css").rglob("*.css")) if (self.client_custom_path / "css").exists() else []
|
||
|
||
if not css_files:
|
||
print_info("Keine CSS-Dateien gefunden")
|
||
return True
|
||
|
||
# Prüfe ob csslint verfügbar ist
|
||
try:
|
||
subprocess.run(['csslint', '--version'], capture_output=True, timeout=5)
|
||
use_csslint = True
|
||
except (FileNotFoundError, subprocess.TimeoutExpired):
|
||
use_csslint = False
|
||
print_warning("csslint nicht gefunden, verwende Basis-Validierung")
|
||
|
||
invalid_files = []
|
||
for css_file in css_files:
|
||
if use_csslint:
|
||
# Verwende csslint für professionelle Validierung
|
||
try:
|
||
result = subprocess.run(
|
||
['csslint', '--format=compact', '--quiet', str(css_file)],
|
||
capture_output=True,
|
||
text=True,
|
||
timeout=10
|
||
)
|
||
|
||
if result.returncode != 0 and result.stdout.strip():
|
||
# Parse csslint Ausgabe
|
||
errors = []
|
||
for line in result.stdout.strip().split('\n'):
|
||
if 'Error' in line or 'Warning' in line:
|
||
# Extrahiere nur die Fehlermeldung ohne Pfad
|
||
parts = line.split(': ', 2)
|
||
if len(parts) >= 3:
|
||
errors.append(parts[2])
|
||
else:
|
||
errors.append(line)
|
||
|
||
if errors:
|
||
# Nur echte Fehler, keine Warnungen als kritisch behandeln
|
||
if any('Error' in err for err in errors):
|
||
self.errors.append(f"CSS-Fehler in {css_file.relative_to(self.base_path)}")
|
||
invalid_files.append((str(css_file.relative_to(self.base_path)), errors))
|
||
else:
|
||
# Nur Warnungen
|
||
for err in errors:
|
||
self.warnings.append(f"{css_file.relative_to(self.base_path)}: {err}")
|
||
|
||
except subprocess.TimeoutExpired:
|
||
self.warnings.append(f"CSS-Validierung timeout für {css_file.relative_to(self.base_path)}")
|
||
except Exception as e:
|
||
self.warnings.append(f"Konnte {css_file.relative_to(self.base_path)} nicht validieren: {e}")
|
||
else:
|
||
# Fallback: Basis-Validierung
|
||
try:
|
||
with open(css_file, 'r', encoding='utf-8') as f:
|
||
content = f.read()
|
||
|
||
errors = []
|
||
|
||
# Geschlossene Klammern
|
||
open_braces = content.count('{')
|
||
close_braces = content.count('}')
|
||
if open_braces != close_braces:
|
||
errors.append(f"Ungleiche Klammern: {open_braces} {{ vs {close_braces} }}")
|
||
|
||
if errors:
|
||
self.errors.append(f"CSS-Fehler in {css_file.relative_to(self.base_path)}")
|
||
invalid_files.append((str(css_file.relative_to(self.base_path)), errors))
|
||
|
||
except Exception as e:
|
||
self.errors.append(f"Konnte {css_file.relative_to(self.base_path)} nicht lesen: {e}")
|
||
invalid_files.append((str(css_file.relative_to(self.base_path)), [str(e)]))
|
||
|
||
if invalid_files:
|
||
print_error(f"{len(invalid_files)} CSS-Datei(en) mit Fehlern:")
|
||
for filepath, errors in invalid_files:
|
||
print(f" {Colors.RED}•{Colors.END} {filepath}")
|
||
for err in errors[:5]: # Maximal 5 Fehler pro Datei anzeigen
|
||
print(f" {Colors.RED}→{Colors.END} {err}")
|
||
if len(errors) > 5:
|
||
print(f" {Colors.RED}...{Colors.END} und {len(errors) - 5} weitere Fehler")
|
||
return False
|
||
else:
|
||
print_success(f"Alle {len(css_files)} CSS-Dateien sind syntaktisch korrekt")
|
||
return True
|
||
|
||
def validate_js_files(self) -> bool:
|
||
"""Validiere JavaScript-Dateien mit jshint."""
|
||
print_header("8. JAVASCRIPT-VALIDIERUNG")
|
||
|
||
js_files = []
|
||
if self.client_custom_path.exists():
|
||
src_path = self.client_custom_path / "src"
|
||
js_files = list(src_path.rglob("*.js")) if src_path.exists() else []
|
||
|
||
if not js_files:
|
||
print_info("Keine JavaScript-Dateien gefunden")
|
||
return True
|
||
|
||
# Prüfe ob jshint verfügbar ist
|
||
try:
|
||
subprocess.run(['jshint', '--version'], capture_output=True, timeout=5)
|
||
use_jshint = True
|
||
except (FileNotFoundError, subprocess.TimeoutExpired):
|
||
use_jshint = False
|
||
print_warning("jshint nicht gefunden, verwende Basis-Validierung")
|
||
|
||
invalid_files = []
|
||
for js_file in js_files:
|
||
if use_jshint:
|
||
# Verwende jshint für professionelle Validierung
|
||
try:
|
||
result = subprocess.run(
|
||
['jshint', '--config=/dev/null', str(js_file)],
|
||
capture_output=True,
|
||
text=True,
|
||
timeout=10
|
||
)
|
||
|
||
if result.returncode != 0 and result.stdout.strip():
|
||
errors = []
|
||
for line in result.stdout.strip().split('\n'):
|
||
if line and not line.startswith('Lint'):
|
||
# Parse jshint Ausgabe
|
||
errors.append(line)
|
||
|
||
if errors:
|
||
self.errors.append(f"JavaScript-Fehler in {js_file.relative_to(self.base_path)}")
|
||
invalid_files.append((str(js_file.relative_to(self.base_path)), errors))
|
||
|
||
except subprocess.TimeoutExpired:
|
||
self.warnings.append(f"JavaScript-Validierung timeout für {js_file.relative_to(self.base_path)}")
|
||
except Exception as e:
|
||
self.warnings.append(f"Konnte {js_file.relative_to(self.base_path)} nicht validieren: {e}")
|
||
else:
|
||
# Fallback: Basis-Validierung
|
||
try:
|
||
with open(js_file, 'r', encoding='utf-8') as f:
|
||
content = f.read()
|
||
|
||
errors = []
|
||
|
||
# Geschlossene Klammern
|
||
open_paren = content.count('(')
|
||
close_paren = content.count(')')
|
||
if open_paren != close_paren:
|
||
errors.append(f"Ungleiche runde Klammern: {open_paren} ( vs {close_paren} )")
|
||
|
||
open_braces = content.count('{')
|
||
close_braces = content.count('}')
|
||
if open_braces != close_braces:
|
||
errors.append(f"Ungleiche geschweifte Klammern: {open_braces} {{ vs {close_braces} }}")
|
||
|
||
open_brackets = content.count('[')
|
||
close_brackets = content.count(']')
|
||
if open_brackets != close_brackets:
|
||
errors.append(f"Ungleiche eckige Klammern: {open_brackets} [ vs {close_brackets} ]")
|
||
|
||
if errors:
|
||
self.errors.append(f"JavaScript-Fehler in {js_file.relative_to(self.base_path)}")
|
||
invalid_files.append((str(js_file.relative_to(self.base_path)), errors))
|
||
|
||
except Exception as e:
|
||
self.errors.append(f"Konnte {js_file.relative_to(self.base_path)} nicht lesen: {e}")
|
||
invalid_files.append((str(js_file.relative_to(self.base_path)), [str(e)]))
|
||
|
||
if invalid_files:
|
||
print_error(f"{len(invalid_files)} JavaScript-Datei(en) mit Fehlern:")
|
||
for filepath, errors in invalid_files:
|
||
print(f" {Colors.RED}•{Colors.END} {filepath}")
|
||
for err in errors[:5]: # Maximal 5 Fehler pro Datei
|
||
print(f" {Colors.RED}→{Colors.END} {err}")
|
||
if len(errors) > 5:
|
||
print(f" {Colors.RED}...{Colors.END} und {len(errors) - 5} weitere Fehler")
|
||
return False
|
||
else:
|
||
print_success(f"Alle {len(js_files)} JavaScript-Dateien sind syntaktisch korrekt")
|
||
return True
|
||
|
||
def validate_php_files(self) -> bool:
|
||
"""Validiere PHP-Dateien mit php -l (Lint)."""
|
||
print_header("9. PHP-VALIDIERUNG")
|
||
|
||
php_files = []
|
||
custom_espo_path = self.base_path / "custom" / "Espo"
|
||
if custom_espo_path.exists():
|
||
php_files = list(custom_espo_path.rglob("*.php"))
|
||
|
||
if not php_files:
|
||
print_info("Keine PHP-Dateien gefunden")
|
||
return True
|
||
|
||
# Prüfe ob php verfügbar ist
|
||
try:
|
||
subprocess.run(['php', '--version'], capture_output=True, timeout=5)
|
||
except (FileNotFoundError, subprocess.TimeoutExpired):
|
||
print_warning("PHP-CLI nicht gefunden, überspringe PHP-Validierung")
|
||
return True
|
||
|
||
invalid_files = []
|
||
for php_file in php_files:
|
||
try:
|
||
# Verwende php -l für Syntax-Check
|
||
result = subprocess.run(
|
||
['php', '-l', str(php_file)],
|
||
capture_output=True,
|
||
text=True,
|
||
timeout=5
|
||
)
|
||
|
||
if result.returncode != 0:
|
||
error_lines = []
|
||
output = result.stderr.strip() or result.stdout.strip()
|
||
|
||
for line in output.split('\n'):
|
||
# Filtere die relevanten Fehlerzeilen
|
||
if line and not line.startswith('No syntax errors'):
|
||
# Entferne Datei-Pfad aus Fehlermeldung für bessere Lesbarkeit
|
||
clean_line = re.sub(r'^.*?in\s+.*?on\s+', '', line)
|
||
if clean_line != line: # Wenn Ersetzung stattfand
|
||
error_lines.append(clean_line)
|
||
else:
|
||
error_lines.append(line)
|
||
|
||
if error_lines:
|
||
self.errors.append(f"PHP-Syntax-Fehler in {php_file.relative_to(self.base_path)}")
|
||
invalid_files.append((str(php_file.relative_to(self.base_path)), error_lines))
|
||
|
||
except subprocess.TimeoutExpired:
|
||
self.warnings.append(f"PHP-Validierung timeout für {php_file.relative_to(self.base_path)}")
|
||
except Exception as e:
|
||
self.warnings.append(f"Konnte {php_file.relative_to(self.base_path)} nicht validieren: {e}")
|
||
|
||
if invalid_files:
|
||
print_error(f"{len(invalid_files)} PHP-Datei(en) mit Syntax-Fehlern:")
|
||
for filepath, errors in invalid_files:
|
||
print(f" {Colors.RED}•{Colors.END} {filepath}")
|
||
for err in errors[:3]: # Maximal 3 Fehler pro Datei
|
||
print(f" {Colors.RED}→{Colors.END} {err}")
|
||
if len(errors) > 3:
|
||
print(f" {Colors.RED}...{Colors.END} und {len(errors) - 3} weitere Fehler")
|
||
return False
|
||
else:
|
||
print_success(f"Alle {len(php_files)} PHP-Dateien sind syntaktisch korrekt")
|
||
return True
|
||
|
||
def run_rebuild(self) -> bool:
|
||
"""Führe den EspoCRM Rebuild aus."""
|
||
print_header("10. ESPOCRM REBUILD")
|
||
|
||
# Prüfe ob wir in einem Docker-Volume sind
|
||
is_docker_volume = '/docker/volumes/' in str(self.base_path)
|
||
|
||
if is_docker_volume:
|
||
# Versuche Docker-Container zu finden
|
||
try:
|
||
result = subprocess.run(
|
||
['docker', 'ps', '--format', '{{.Names}}'],
|
||
capture_output=True,
|
||
text=True,
|
||
timeout=5
|
||
)
|
||
|
||
containers = result.stdout.strip().split('\n')
|
||
espo_container = None
|
||
|
||
# Suche nach EspoCRM Container (meist "espocrm" ohne Suffix)
|
||
for container in containers:
|
||
if container.lower() in ['espocrm', 'espocrm-app']:
|
||
espo_container = container
|
||
break
|
||
|
||
if not espo_container:
|
||
# Fallback: erster Container mit "espo" im Namen
|
||
for container in containers:
|
||
if 'espo' in container.lower() and 'websocket' not in container.lower() and 'daemon' not in container.lower() and 'db' not in container.lower():
|
||
espo_container = container
|
||
break
|
||
|
||
if espo_container:
|
||
print_info(f"Docker-Container erkannt: {espo_container}")
|
||
|
||
# Schritt 1: Cache löschen
|
||
print_info("Lösche Cache...")
|
||
cache_result = subprocess.run(
|
||
['docker', 'exec', espo_container, 'php', 'command.php', 'clear-cache'],
|
||
capture_output=True,
|
||
text=True,
|
||
timeout=30
|
||
)
|
||
|
||
if cache_result.returncode == 0:
|
||
print_success("Cache erfolgreich gelöscht")
|
||
else:
|
||
print_warning("Cache-Löschung fehlgeschlagen, fahre trotzdem fort...")
|
||
|
||
# Schritt 2: Rebuild
|
||
print_info("Starte Rebuild (kann 10-30 Sekunden dauern)...")
|
||
result = subprocess.run(
|
||
['docker', 'exec', espo_container, 'php', 'command.php', 'rebuild'],
|
||
capture_output=True,
|
||
text=True,
|
||
timeout=60
|
||
)
|
||
|
||
if result.returncode == 0:
|
||
print_success("Rebuild erfolgreich abgeschlossen")
|
||
if result.stdout:
|
||
print(f" {result.stdout.strip()}")
|
||
|
||
# E2E-Tests nach erfolgreichem Rebuild
|
||
self.run_e2e_tests()
|
||
|
||
return True
|
||
else:
|
||
print_error("Rebuild fehlgeschlagen:")
|
||
if result.stderr:
|
||
print(f"\n{result.stderr}")
|
||
return False
|
||
else:
|
||
print_warning("Kein EspoCRM Docker-Container gefunden")
|
||
print_info("Versuche lokalen Rebuild...")
|
||
except Exception as e:
|
||
print_warning(f"Docker-Erkennung fehlgeschlagen: {e}")
|
||
print_info("Versuche lokalen Rebuild...")
|
||
|
||
# Lokaler Rebuild (Fallback)
|
||
rebuild_script = self.base_path / "rebuild.php"
|
||
if not rebuild_script.exists():
|
||
print_error(f"rebuild.php nicht gefunden in {self.base_path}")
|
||
return False
|
||
|
||
try:
|
||
# Schritt 1: Cache löschen
|
||
print_info("Lösche Cache...")
|
||
cache_result = subprocess.run(
|
||
['php', 'command.php', 'clear-cache'],
|
||
cwd=str(self.base_path),
|
||
capture_output=True,
|
||
text=True,
|
||
timeout=30
|
||
)
|
||
|
||
if cache_result.returncode == 0:
|
||
print_success("Cache erfolgreich gelöscht")
|
||
else:
|
||
print_warning("Cache-Löschung fehlgeschlagen, fahre trotzdem fort...")
|
||
|
||
# Schritt 2: Rebuild
|
||
print_info("Starte lokalen Rebuild (kann 10-30 Sekunden dauern)...")
|
||
result = subprocess.run(
|
||
['php', 'command.php', 'rebuild'],
|
||
cwd=str(self.base_path),
|
||
capture_output=True,
|
||
text=True,
|
||
timeout=60
|
||
)
|
||
|
||
if result.returncode == 0:
|
||
print_success("Rebuild erfolgreich abgeschlossen")
|
||
|
||
# E2E-Tests nach erfolgreichem Rebuild
|
||
self.run_e2e_tests()
|
||
|
||
return True
|
||
else:
|
||
print_error("Rebuild fehlgeschlagen:")
|
||
if result.stderr:
|
||
print(f"\n{result.stderr}")
|
||
return False
|
||
except subprocess.TimeoutExpired:
|
||
print_error("Rebuild-Timeout (>60 Sekunden)")
|
||
return False
|
||
except Exception as e:
|
||
print_error(f"Rebuild-Fehler: {e}")
|
||
return False
|
||
def run_e2e_tests(self) -> bool:
|
||
"""Führe End-to-End Tests nach erfolgreichem Rebuild aus."""
|
||
|
||
# Überspringe wenn Flag gesetzt
|
||
if self.skip_e2e_tests:
|
||
print_info("\nE2E-Tests wurden übersprungen (--skip-e2e)")
|
||
return True
|
||
|
||
print_header("11. END-TO-END TESTS")
|
||
|
||
# Prüfe ob E2E-Test Skript existiert
|
||
e2e_script = self.base_path / "custom" / "scripts" / "e2e_tests.py"
|
||
if not e2e_script.exists():
|
||
print_warning("E2E-Test Skript nicht gefunden, überspringe Tests")
|
||
return True
|
||
|
||
print_info("Starte automatisierte End-to-End Tests...")
|
||
print_info("Dies validiert CRUD-Operationen für Custom Entities\n")
|
||
|
||
try:
|
||
result = subprocess.run(
|
||
['python3', 'e2e_tests.py'],
|
||
cwd=str(e2e_script.parent),
|
||
capture_output=True,
|
||
text=True,
|
||
timeout=120
|
||
)
|
||
|
||
# Ausgabe anzeigen
|
||
if result.stdout:
|
||
print(result.stdout)
|
||
|
||
if result.returncode == 0:
|
||
print_success("E2E-Tests erfolgreich abgeschlossen")
|
||
return True
|
||
else:
|
||
print_warning("E2E-Tests haben Fehler gemeldet")
|
||
if result.stderr:
|
||
print(f"\n{Colors.YELLOW}{result.stderr}{Colors.END}")
|
||
print_info("Dies ist keine kritische Fehler - der Rebuild war erfolgreich")
|
||
return True # Nicht als Fehler werten
|
||
|
||
except subprocess.TimeoutExpired:
|
||
print_warning("E2E-Tests Timeout (>120 Sekunden)")
|
||
return True # Nicht als Fehler werten
|
||
except Exception as e:
|
||
print_warning(f"E2E-Tests konnten nicht ausgeführt werden: {e}")
|
||
return True # Nicht als Fehler werten
|
||
def print_summary(self):
|
||
"""Drucke Zusammenfassung aller Ergebnisse."""
|
||
print_header("ZUSAMMENFASSUNG")
|
||
|
||
if self.errors:
|
||
print(f"\n{Colors.RED}{Colors.BOLD}FEHLER: {len(self.errors)}{Colors.END}")
|
||
for err in self.errors:
|
||
print(f" {Colors.RED}✗{Colors.END} {err}")
|
||
|
||
if self.warnings:
|
||
print(f"\n{Colors.YELLOW}{Colors.BOLD}WARNUNGEN: {len(self.warnings)}{Colors.END}")
|
||
for warn in self.warnings[:10]:
|
||
print(f" {Colors.YELLOW}⚠{Colors.END} {warn}")
|
||
if len(self.warnings) > 10:
|
||
print(f" {Colors.YELLOW}...{Colors.END} und {len(self.warnings) - 10} weitere Warnungen")
|
||
|
||
if not self.errors and not self.warnings:
|
||
print(f"\n{Colors.GREEN}{Colors.BOLD}✓ ALLE PRÜFUNGEN BESTANDEN{Colors.END}")
|
||
|
||
print()
|
||
|
||
def validate_all(self) -> bool:
|
||
"""Führe alle Validierungen durch."""
|
||
all_valid = True
|
||
|
||
# 1. JSON-Syntax (kritisch)
|
||
if not self.validate_json_syntax():
|
||
all_valid = False
|
||
print_error("\nAbbruch: JSON-Syntax-Fehler müssen behoben werden!\n")
|
||
return False
|
||
|
||
# Lade entityDefs für weitere Checks
|
||
self.load_entity_defs()
|
||
|
||
# 2. Relationships (kritisch)
|
||
if not self.validate_relationships():
|
||
all_valid = False
|
||
|
||
# 3. Formula-Platzierung (kritisch)
|
||
if not self.validate_formula_placement():
|
||
all_valid = False
|
||
|
||
# 4. i18n-Vollständigkeit (nur Warnung)
|
||
self.validate_i18n_completeness()
|
||
|
||
# 5. Layout-Struktur (nur Warnung)
|
||
self.validate_layout_structure()
|
||
|
||
# 6. Dateirechte (nicht kritisch für Rebuild)
|
||
self.check_file_permissions()
|
||
|
||
# 7. CSS-Validierung (kritisch)
|
||
if not self.validate_css_files():
|
||
all_valid = False
|
||
|
||
# 8. JavaScript-Validierung (kritisch)
|
||
if not self.validate_js_files():
|
||
all_valid = False
|
||
|
||
# 9. PHP-Validierung (kritisch)
|
||
if not self.validate_php_files():
|
||
all_valid = False
|
||
|
||
return all_valid
|
||
|
||
def main():
|
||
import argparse
|
||
|
||
parser = argparse.ArgumentParser(
|
||
description='EspoCRM Custom Entity Validator & Rebuild Tool'
|
||
)
|
||
parser.add_argument(
|
||
'--dry-run',
|
||
action='store_true',
|
||
help='Nur Validierungen durchführen, kein Rebuild'
|
||
)
|
||
parser.add_argument(
|
||
'--no-rebuild',
|
||
action='store_true',
|
||
help='Synonym für --dry-run'
|
||
)
|
||
parser.add_argument(
|
||
'--skip-e2e',
|
||
action='store_true',
|
||
help='Überspringe E2E-Tests nach Rebuild'
|
||
)
|
||
args = parser.parse_args()
|
||
|
||
dry_run = args.dry_run or args.no_rebuild
|
||
skip_e2e = args.skip_e2e
|
||
|
||
# Finde EspoCRM Root-Verzeichnis
|
||
script_dir = Path(__file__).parent.parent.parent
|
||
|
||
if not (script_dir / "rebuild.php").exists():
|
||
print_error("Fehler: Nicht im EspoCRM-Root-Verzeichnis!")
|
||
print_info(f"Aktueller Pfad: {script_dir}")
|
||
sys.exit(1)
|
||
|
||
print(f"{Colors.BOLD}EspoCRM Custom Entity Validator & Rebuild Tool{Colors.END}")
|
||
print(f"Arbeitsverzeichnis: {script_dir}")
|
||
if dry_run:
|
||
print(f"{Colors.YELLOW}Modus: DRY-RUN (kein Rebuild){Colors.END}")
|
||
if skip_e2e:
|
||
print(f"{Colors.YELLOW}E2E-Tests werden übersprungen{Colors.END}")
|
||
print()
|
||
|
||
validator = EntityValidator(str(script_dir))
|
||
validator.skip_e2e_tests = skip_e2e
|
||
|
||
# Validierungen durchführen
|
||
all_valid = validator.validate_all()
|
||
|
||
# Zusammenfassung drucken
|
||
validator.print_summary()
|
||
|
||
# Entscheidung über Rebuild
|
||
if not all_valid:
|
||
print_error("REBUILD ABGEBROCHEN: Kritische Fehler müssen behoben werden!")
|
||
sys.exit(1)
|
||
|
||
if dry_run:
|
||
print_info("Dry-Run Modus: Rebuild übersprungen")
|
||
print(f"\n{Colors.GREEN}{Colors.BOLD}✓ VALIDIERUNGEN ABGESCHLOSSEN{Colors.END}\n")
|
||
sys.exit(0)
|
||
|
||
if validator.warnings:
|
||
print_warning(
|
||
f"Es gibt {len(validator.warnings)} Warnungen, aber keine kritischen Fehler."
|
||
)
|
||
print_info("Rebuild wird trotzdem durchgeführt...\n")
|
||
|
||
# Rebuild ausführen
|
||
if validator.run_rebuild():
|
||
print(f"\n{Colors.GREEN}{Colors.BOLD}✓ ERFOLGREICH ABGESCHLOSSEN{Colors.END}\n")
|
||
sys.exit(0)
|
||
else:
|
||
print(f"\n{Colors.RED}{Colors.BOLD}✗ REBUILD FEHLGESCHLAGEN{Colors.END}\n")
|
||
sys.exit(1)
|
||
|
||
if __name__ == "__main__":
|
||
main()
|