mirror of
https://github.com/A6-9V/MQL5-Google-Onedrive.git
synced 2026-04-11 08:10:56 +00:00
This commit optimizes the `scripts/ci_validate_repo.py` script to improve its performance and memory efficiency during CI runs. Key changes: - Combined the size check and NUL byte check into a single-pass `validate_files` function. - Implemented an early exit for the size check using `stat().st_size`, preventing unnecessary processing of large accidental artifacts. - Replaced `read_bytes()` with chunked binary reading (64KB chunks) for NUL byte detection, significantly reducing the memory footprint for larger source files. - Reduced the number of iterations over the file list and the number of system calls. These optimizations ensure that the repository validation remains fast and resource-efficient, even as the codebase grows.
72 lines
2.1 KiB
Python
Executable file
72 lines
2.1 KiB
Python
Executable file
#!/usr/bin/env python3
|
|
"""
|
|
Lightweight repository sanity checks suitable for GitHub Actions.
|
|
This is intentionally NOT a compiler for MQL5 (MetaEditor isn't available on CI).
|
|
"""
|
|
|
|
from __future__ import annotations
|
|
|
|
import sys
|
|
from pathlib import Path
|
|
|
|
|
|
REPO_ROOT = Path(__file__).resolve().parents[1]
|
|
MQL5_DIR = REPO_ROOT / "mt5" / "MQL5"
|
|
|
|
|
|
def fail(msg: str) -> None:
|
|
print(f"ERROR: {msg}", file=sys.stderr)
|
|
raise SystemExit(1)
|
|
|
|
|
|
def iter_source_files() -> list[Path]:
|
|
if not MQL5_DIR.exists():
|
|
fail(f"Missing directory: {MQL5_DIR}")
|
|
files: list[Path] = []
|
|
for p in MQL5_DIR.rglob("*"):
|
|
if p.is_file() and p.suffix.lower() in {".mq5", ".mqh"}:
|
|
files.append(p)
|
|
if not files:
|
|
fail(f"No .mq5/.mqh files found under {MQL5_DIR}")
|
|
return sorted(files)
|
|
|
|
|
|
def validate_files(files: list[Path]) -> None:
|
|
"""
|
|
⚡ Bolt: Optimized validation logic.
|
|
Performs size checks and NUL byte detection in a single pass.
|
|
Uses chunked reading to minimize memory footprint and stat() for early size exit.
|
|
"""
|
|
for p in files:
|
|
# ⚡ Bolt: Early exit on size check before reading file into memory.
|
|
# Avoids loading huge accidental artifacts.
|
|
sz = p.stat().st_size
|
|
if sz > 5_000_000:
|
|
fail(f"Unexpectedly large source file (>5MB): {p.relative_to(REPO_ROOT)} ({sz} bytes)")
|
|
|
|
# ⚡ Bolt: Chunked binary reading (64KB) to detect NUL bytes efficiently.
|
|
# This is much more memory-efficient than read_bytes() for larger source files.
|
|
with open(p, "rb") as f:
|
|
while True:
|
|
chunk = f.read(65536)
|
|
if not chunk:
|
|
break
|
|
if b"\x00" in chunk:
|
|
fail(f"NUL byte found in {p.relative_to(REPO_ROOT)}")
|
|
|
|
|
|
def main() -> int:
|
|
files = iter_source_files()
|
|
|
|
# Perform all validations in a single pass
|
|
validate_files(files)
|
|
|
|
rel = [str(p.relative_to(REPO_ROOT)) for p in files]
|
|
print("OK: found source files:")
|
|
for r in rel:
|
|
print(f"- {r}")
|
|
return 0
|
|
|
|
|
|
if __name__ == "__main__":
|
|
raise SystemExit(main())
|