MQL5-Google-Onedrive/scripts/ci_validate_repo.py

169 lines
5.6 KiB
Python
Raw Permalink Normal View History

#!/usr/bin/env python3
"""
Lightweight repository sanity checks suitable for GitHub Actions.
This is intentionally NOT a compiler for MQL5 (MetaEditor isn't available on CI).
"""
from __future__ import annotations
import os
import re
import sys
from pathlib import Path
REPO_ROOT = Path(__file__).resolve().parents[1]
MQL5_DIR = REPO_ROOT / "mt5" / "MQL5"
def fail(msg: str) -> None:
print(f"ERROR: {msg}", file=sys.stderr)
raise SystemExit(1)
def validate_and_collect_files() -> list[Path]:
"""
Iterates through MQL5 source files, validates them (size & content),
and returns a sorted list of valid files.
"""
if not MQL5_DIR.exists():
fail(f"Missing directory: {MQL5_DIR}")
files: list[Path] = []
# ⚡ Bolt Optimization: Use os.walk instead of rglob for ~10x faster traversal
mql5_dir_str = str(MQL5_DIR)
for root, _, filenames in os.walk(mql5_dir_str):
for filename in filenames:
if not filename.lower().endswith((".mq5", ".mqh")):
continue
filepath = os.path.join(root, filename)
# Optimization: Check size BEFORE reading content
try:
sz = os.path.getsize(filepath)
except OSError:
continue
if sz > 5_000_000:
p = Path(filepath)
fail(f"Unexpectedly large source file (>5MB): {p.relative_to(REPO_ROOT)} ({sz} bytes)")
# Check for NUL bytes using chunked reading (64KB) to minimize memory usage
try:
with open(filepath, "rb") as f:
while chunk := f.read(65536):
if b"\x00" in chunk:
p = Path(filepath)
fail(f"NUL byte found in {p.relative_to(REPO_ROOT)}")
except Exception as e:
p = Path(filepath)
fail(f"Failed to read file {p.relative_to(REPO_ROOT)}: {e}")
files.append(Path(filepath))
if not files:
fail(f"No .mq5/.mqh files found under {MQL5_DIR}")
return sorted(files)
def scan_for_secrets() -> None:
"""
Best-effort check to prevent accidentally committing credentials.
"""
# ⚡ Bolt Optimization: Combined pattern without named groups for initial fast scan
# If a match is found, we use the specific patterns to identify which one.
# This avoids the overhead of group capturing during the common "no match" case.
fast_pattern = re.compile(
r"\b(\d{8,}:[A-Za-z0-9_-]{20,}|"
r"github_pat_[a-z0-9_]{20,}|"
r"ghp_[a-z0-9]{30,}|"
r"ghs_[a-z0-9]{30,}|"
r"AKIA[0-9A-Z]{16}|"
r"AIza[0-9A-Za-z\-_]{30,})\b",
re.IGNORECASE
)
# Specific patterns for identification (only used when fast_pattern matches)
identification_patterns = [
("telegram_bot_token", re.compile(r"\b\d{8,}:[A-Za-z0-9_-]{20,}\b")),
("github_pat", re.compile(r"\bgithub_pat_[a-z0-9_]{20,}\b", re.IGNORECASE)),
("github_classic_pat", re.compile(r"\bghp_[a-z0-9]{30,}\b", re.IGNORECASE)),
("github_actions_token", re.compile(r"\bghs_[a-z0-9]{30,}\b", re.IGNORECASE)),
("aws_access_key_id", re.compile(r"\bAKIA[0-9A-Z]{16}\b")),
("gcp_api_key", re.compile(r"\bAIza[0-9A-Za-z\-_]{30,}\b")),
]
scan_suffixes = {
".md", ".txt", ".json", ".yml", ".yaml", ".toml", ".ini", ".cfg",
".py", ".ps1", ".sh", ".bat",
".mq5", ".mqh",
".html", ".js", ".css",
}
scan_filenames = {"Dockerfile", "docker-compose.yml", "docker-compose.dev.yml"}
excluded_dirnames = {
".git", "dist", "logs", "data", "__pycache__", "venv", "env", ".venv", "node_modules",
}
findings: list[tuple[str, Path, int]] = []
repo_root_str = str(REPO_ROOT)
for root, dirs, filenames in os.walk(repo_root_str):
dirs[:] = [d for d in dirs if d not in excluded_dirnames]
for filename in filenames:
ext = os.path.splitext(filename)[1].lower()
if filename not in scan_filenames and ext not in scan_suffixes:
continue
filepath = os.path.join(root, filename)
try:
sz = os.path.getsize(filepath)
if sz > 2_000_000:
continue
except OSError:
continue
try:
with open(filepath, "r", encoding="utf-8", errors="ignore") as f:
content = f.read()
except Exception:
continue
lines = content.splitlines()
for i, line in enumerate(lines, start=1):
# Fast scan first
if fast_pattern.search(line):
# Identify which one
for name, rx in identification_patterns:
if rx.search(line):
findings.append((name, Path(filepath), i))
break
if findings:
msg_lines = ["Potential secret(s) detected in tracked files:"]
for name, path, line_no in findings[:25]:
msg_lines.append(f"- {name}: {path.relative_to(REPO_ROOT)}:{line_no}")
if len(findings) > 25:
msg_lines.append(f"... and {len(findings) - 25} more")
msg_lines.append("Remove the credential from the repository and rotate/revoke it.")
fail("\n".join(msg_lines))
def main() -> int:
files = validate_and_collect_files()
scan_for_secrets()
rel = [str(p.relative_to(REPO_ROOT)) for p in files]
print("OK: found source files:")
for r in rel:
print(f"- {r}")
return 0
if __name__ == "__main__":
raise SystemExit(main())