Size: 2821 bytes.


  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
#!/usr/bin/env python3
# cs/devtools/parse_logs.py
#!/usr/bin/env python3
import argparse
import json
import os
import re
import sys
from typing import List, Dict

REC_RE = re.compile(
    r"^\[(?P<timestamp>\d{4}-\d{2}-\d{2}T[\d:.]+Z)\]\s"
    r"\[(?P<level>[A-Z]+)\]\s"
    r"\[(?P<file>.+?):(?P<line>\d+)\]\s?"
    r"(?P<msg>.*)$"
)


def parse_log(lines: List[str]) -> List[Dict]:
    records = []
    cur = None
    msg_lines: List[str] = []

    def flush():
        nonlocal cur, msg_lines
        if cur is not None:
            cur["message"] = "".join(msg_lines).rstrip("\n")
            records.append(cur)
            cur = None
            msg_lines = []

    for raw in lines:
        m = REC_RE.match(raw)
        if m:
            flush()
            cur = {
                "timestamp": m.group("timestamp"),
                "level": m.group("level"),
                "file": m.group("file"),
                "line": int(m.group("line")),
                "message": "",  # filled on flush
            }
            msg_lines = [m.group("msg") + ("\n" if not raw.endswith("\n") else "")]
        else:
            if cur is None:
                continue
            msg_lines.append(raw if raw.endswith("\n") else raw + "\n")

    flush()
    return records


def ts_to_filename(ts: str) -> str:
    # Make a safe, stable filename derived from the timestamp
    # Replace ':' to keep cross-platform friendliness; allow digits/letters/TZ._-
    safe = ts.replace(":", "-")
    safe = re.sub(r"[^0-9A-Za-zTZ._-]+", "_", safe)
    return f"{safe}.json"


def main():
    ap = argparse.ArgumentParser()
    ap.add_argument("--input", "-i", default="app.log", help="input log file path")
    ap.add_argument(
        "--outdir",
        "-d",
        default="app.log.d",
        help="directory to write per-entry JSON files",
    )
    args = ap.parse_args()

    os.makedirs(args.outdir, exist_ok=True)

    with open(args.input, "r", encoding="utf-8", errors="replace") as f:
        lines = f.readlines()

    recs = parse_log(lines)

    already_exists = 0
    written_count = 0
    for rec in recs:
        ts = rec["timestamp"]
        fname = ts_to_filename(ts)
        path = os.path.join(args.outdir, fname)
        if os.path.exists(path):
            already_exists += 1
            if False:
                print(
                    f"[parse_logs] duplicate timestamp: {ts} -> {path} (skipping)",
                    file=sys.stderr,
                )
            continue
        with open(path, "w", encoding="utf-8") as out:
            json.dump(rec, out, ensure_ascii=False, indent=2)
            written_count += 1
    print(
        f"[parse_logs] Checked {len(recs)} files, writing {written_count} files, skipping {already_exists} files."
    )


if __name__ == "__main__":
    main()
v0 (commit) © 2025 @p13i.io | Load balancer proxied to: cs-code-viewer-1:8080 in 5ms.