File size: 7,595 Bytes
f631cce
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
# memory_usage_reporter.py
"""

Background memory usage reporter.

- Logs process RSS, VMS, peak (if available), GC counts, and optional tracemalloc stats

- Writes to logs/memory.log and also propagates to root logger (run.log) via a child logger

- Designed to be lightweight and safe in GUI apps

"""
import os
import sys
import time
import threading
import logging
import gc
from logging.handlers import RotatingFileHandler

try:
    import psutil
except Exception:
    psutil = None

# Global singletons
_GLOBAL_THREAD = None
_GLOBAL_STOP = threading.Event()


def _ensure_logs_dir() -> str:
    # Prefer explicit override from main app
    try:
        env_dir = os.environ.get("GLOSSARION_LOG_DIR")
        if env_dir:
            dir_path = os.path.expanduser(env_dir)
            os.makedirs(dir_path, exist_ok=True)
            return dir_path
    except Exception:
        pass

    def _can_write(p: str) -> bool:
        try:
            os.makedirs(p, exist_ok=True)
            test_file = os.path.join(p, ".write_test")
            with open(test_file, "w", encoding="utf-8") as f:
                f.write("ok")
            os.remove(test_file)
            return True
        except Exception:
            return False

    # Frozen exe: try next to the executable first
    try:
        if getattr(sys, 'frozen', False) and hasattr(sys, 'executable'):
            exe_dir = os.path.dirname(sys.executable)
            candidate = os.path.join(exe_dir, "logs")
            if _can_write(candidate):
                return candidate
    except Exception:
        pass

    # User-local app data (persistent and writable)
    try:
        base = os.environ.get('LOCALAPPDATA') or os.environ.get('APPDATA') or os.path.expanduser('~')
        candidate = os.path.join(base, 'Glossarion', 'logs')
        if _can_write(candidate):
            return candidate
    except Exception:
        pass

    # Development fallback: next to this file
    try:
        base_dir = os.path.abspath(os.path.dirname(__file__))
        candidate = os.path.join(base_dir, "logs")
        if _can_write(candidate):
            return candidate
    except Exception:
        pass

    # Final fallback: CWD
    fallback = os.path.join(os.getcwd(), "logs")
    os.makedirs(fallback, exist_ok=True)
    return fallback


def _make_logger() -> logging.Logger:
    logger = logging.getLogger("memory")
    logger.setLevel(logging.INFO)

    # Avoid duplicate handlers if called more than once
    if not any(isinstance(h, RotatingFileHandler) for h in logger.handlers):
        logs_dir = _ensure_logs_dir()
        file_path = os.path.join(logs_dir, "memory.log")
        fh = RotatingFileHandler(file_path, maxBytes=2 * 1024 * 1024, backupCount=3, encoding="utf-8")
        fmt = logging.Formatter(
            fmt="%(asctime)s %(levelname)s [%(process)d:%(threadName)s] %(name)s: %(message)s",
            datefmt="%Y-%m-%d %H:%M:%S",
        )
        fh.setFormatter(fmt)
        logger.addHandler(fh)

    # Do NOT propagate to root; keep memory logs out of console and only in memory.log
    logger.propagate = False
    return logger


def _get_process() -> "psutil.Process | None":
    if psutil is None:
        return None
    try:
        return psutil.Process()
    except Exception:
        return None


def _format_bytes(num: int) -> str:
    try:
        for unit in ["B", "KB", "MB", "GB", "TB"]:
            if num < 1024.0:
                return f"{num:,.1f}{unit}"
            num /= 1024.0
        return f"{num:,.1f}PB"
    except Exception:
        return str(num)


def _collect_stats(proc) -> dict:
    stats = {}
    try:
        if proc is not None:
            mi = proc.memory_info()
            stats["rss"] = mi.rss
            stats["vms"] = getattr(mi, "vms", 0)
            # Peak RSS on Windows via psutil.Process.memory_info() may expose peak_wset in private API; skip for portability
        else:
            stats["rss"] = 0
            stats["vms"] = 0
    except Exception:
        stats["rss"] = stats.get("rss", 0)
        stats["vms"] = stats.get("vms", 0)

    # GC stats
    try:
        counts = gc.get_count()
        stats["gc"] = counts
    except Exception:
        stats["gc"] = (0, 0, 0)

    return stats


def _worker(interval_sec: float, include_tracemalloc: bool):
    """Memory usage monitoring worker - runs in background thread."""
    try:
        log = _make_logger()
        proc = _get_process()
        
        # Optional tracemalloc
        if include_tracemalloc:
            try:
                import tracemalloc
                if not tracemalloc.is_tracing():
                    tracemalloc.start()
                tm_enabled = True
            except Exception:
                tm_enabled = False
        else:
            tm_enabled = False
    except Exception:
        # If initialization fails, exit thread gracefully
        return

    # Main monitoring loop with additional safety
    while not _GLOBAL_STOP.is_set():
        try:
            st = _collect_stats(proc)
            rss = st.get("rss", 0)
            vms = st.get("vms", 0)
            gc0, gc1, gc2 = st.get("gc", (0, 0, 0))

            msg = (
                f"RSS={_format_bytes(rss)} VMS={_format_bytes(vms)} "
                f"GC={gc0}/{gc1}/{gc2}"
            )

            if tm_enabled:
                try:
                    import tracemalloc
                    cur, peak = tracemalloc.get_traced_memory()
                    msg += f" TM_CUR={_format_bytes(cur)} TM_PEAK={_format_bytes(peak)}"
                except Exception:
                    pass

            log.info(msg)
        except Exception as e:
            try:
                log.warning("memory reporter error: %s", e)
            except Exception:
                pass
        finally:
            # Use a single sleep with timeout instead of multiple small sleeps
            # This reduces thread switching overhead that can cause GIL issues
            try:
                _GLOBAL_STOP.wait(timeout=interval_sec)
            except Exception:
                # Fallback to regular sleep if wait fails
                time.sleep(interval_sec)


def start_global_memory_logger(interval_sec: float = 3.0, include_tracemalloc: bool = False) -> None:
    """Start the background memory logger once per process.



    interval_sec: how often to log

    include_tracemalloc: if True, also log tracemalloc current/peak

    """
    global _GLOBAL_THREAD
    
    # Thread-safe check
    with threading.Lock():
        if _GLOBAL_THREAD and _GLOBAL_THREAD.is_alive():
            return
        
        # Clear stop event before starting
        _GLOBAL_STOP.clear()
        
        try:
            t = threading.Thread(
                target=_worker, 
                args=(interval_sec, include_tracemalloc), 
                name="mem-logger", 
                daemon=True
            )
            t.start()
            _GLOBAL_THREAD = t
        except Exception:
            # Do not raise to avoid breaking GUI startup
            _GLOBAL_THREAD = None
            pass


def stop_global_memory_logger() -> None:
    try:
        _GLOBAL_STOP.set()
        if _GLOBAL_THREAD and _GLOBAL_THREAD.is_alive():
            # Give it a moment to exit
            _GLOBAL_THREAD.join(timeout=2.0)
    except Exception:
        pass