MQL5-Google-Onedrive/scripts/ci_validate_repo.py

84 lines
2.7 KiB
Python
Raw Permalink Normal View History

#!/usr/bin/env python3
"""
Lightweight repository sanity checks suitable for GitHub Actions.
This is intentionally NOT a compiler for MQL5 (MetaEditor isn't available on CI).
"""
from __future__ import annotations
import os
import sys
import time
from pathlib import Path
REPO_ROOT = Path(__file__).resolve().parents[1]
MQL5_DIR = REPO_ROOT / "mt5" / "MQL5"
def fail(msg: str) -> None:
print(f"ERROR: {msg}", file=sys.stderr)
raise SystemExit(1)
def iter_source_files() -> list[tuple[Path, os.stat_result]]:
"""⚡ Bolt: Efficiently find source files using os.scandir to avoid Path.rglob overhead."""
if not MQL5_DIR.exists():
fail(f"Missing directory: {MQL5_DIR}")
files: list[tuple[Path, os.stat_result]] = []
def walk_dir(path: str):
try:
with os.scandir(path) as it:
for entry in it:
if entry.is_file():
if entry.name.lower().endswith((".mq5", ".mqh")):
# ⚡ Bolt: DirEntry.stat() is often cached from the directory scan itself,
# especially on Windows, avoiding an extra system call per file.
files.append((Path(entry.path), entry.stat()))
elif entry.is_dir():
walk_dir(entry.path)
except PermissionError:
pass
walk_dir(str(MQL5_DIR))
if not files:
fail(f"No .mq5/.mqh files found under {MQL5_DIR}")
return sorted(files, key=lambda x: x[0])
def validate_files(files: list[tuple[Path, os.stat_result]]) -> None:
"""⚡ Bolt: Single-pass validation using pre-fetched metadata for maximum efficiency."""
for p, stat in files:
# ⚡ Bolt: Use pre-fetched stat for early size check before reading file content.
if stat.st_size > 5_000_000:
fail(f"Unexpectedly large source file (>5MB): {p.relative_to(REPO_ROOT)} ({stat.st_size} bytes)")
# ⚡ Bolt: Use chunked binary reading (64KB) to detect NUL bytes efficiently.
# This minimizes memory overhead compared to full file reads.
with p.open("rb") as f:
while chunk := f.read(65536):
if b"\x00" in chunk:
fail(f"NUL byte found in {p.relative_to(REPO_ROOT)}")
def main() -> int:
start_time = time.perf_counter()
files = iter_source_files()
validate_files(files)
duration = time.perf_counter() - start_time
rel = [str(p.relative_to(REPO_ROOT)) for p, _ in files]
print(f"OK: found {len(rel)} source files in {duration:.4f}s:")
for r in rel:
print(f"- {r}")
return 0
if __name__ == "__main__":
raise SystemExit(main())