Coverage for src / tracekit / core / memory_limits.py: 96%

115 statements  

« prev     ^ index     » next       coverage.py v7.13.1, created at 2026-01-11 23:04 +0000

1"""Per-operation memory limits for TraceKit. 

2 

3This module provides fine-grained memory control for individual operations 

4with automatic parameter adjustment to fit memory constraints. 

5 

6 

7Example: 

8 >>> from tracekit.core.memory_limits import apply_memory_limit 

9 >>> params = apply_memory_limit('spectrogram', samples=1e9, max_memory='512MB') 

10 >>> print(f"Adjusted nperseg: {params['nperseg']}") 

11 

12References: 

13 See tracekit.config.memory for global memory configuration. 

14""" 

15 

16from __future__ import annotations 

17 

18import warnings 

19from typing import Any 

20 

21from tracekit.config.memory import get_memory_config 

22from tracekit.utils.memory import estimate_memory 

23 

24 

25def parse_memory_limit(limit: int | str | None) -> int | None: 

26 """Parse memory limit from various formats. 

27 

28 Args: 

29 limit: Memory limit as bytes (int), string ("4GB", "512MB"), or None. 

30 

31 Returns: 

32 Memory limit in bytes, or None for no limit. 

33 

34 Raises: 

35 ValueError: If format is invalid. 

36 

37 Example: 

38 >>> parse_memory_limit("4GB") 

39 4000000000 

40 >>> parse_memory_limit(512 * 1024**2) 

41 536870912 

42 >>> parse_memory_limit(None) is None 

43 True 

44 """ 

45 if limit is None: 

46 return None 

47 

48 if isinstance(limit, str): 

49 limit_upper = limit.upper().strip() 

50 try: 

51 if limit_upper.endswith("GB"): 

52 return int(float(limit_upper[:-2]) * 1e9) 

53 elif limit_upper.endswith("MB"): 

54 return int(float(limit_upper[:-2]) * 1e6) 

55 elif limit_upper.endswith("KB"): 

56 return int(float(limit_upper[:-2]) * 1e3) 

57 elif limit_upper.endswith("GIB"): 

58 return int(float(limit_upper[:-3]) * 1024**3) 

59 elif limit_upper.endswith("MIB"): 

60 return int(float(limit_upper[:-3]) * 1024**2) 

61 elif limit_upper.endswith("KIB"): 

62 return int(float(limit_upper[:-3]) * 1024) 

63 else: 

64 return int(float(limit_upper)) 

65 except ValueError as e: 

66 raise ValueError(f"Invalid memory limit format: {limit}") from e 

67 

68 return int(limit) 

69 

70 

71def apply_memory_limit( 

72 operation: str, 

73 samples: int | float, 

74 *, 

75 max_memory: int | str | None = None, 

76 **params: Any, 

77) -> dict[str, Any]: 

78 """Apply memory limit and adjust parameters to fit. 

79 

80 

81 Args: 

82 operation: Operation name (fft, psd, spectrogram, etc.). 

83 samples: Number of samples to process. 

84 max_memory: Maximum memory limit (overrides global config if provided). 

85 **params: Operation parameters to adjust. 

86 

87 Returns: 

88 Adjusted parameters dictionary that fits within memory limit. 

89 

90 Example: 

91 >>> params = apply_memory_limit('spectrogram', samples=1e9, max_memory='512MB', nperseg=8192) 

92 >>> print(f"Adjusted to nperseg={params['nperseg']} to fit 512MB") 

93 

94 Note: 

95 If parameters cannot be adjusted to fit memory, a warning is issued 

96 and the original parameters are returned. 

97 """ 

98 # Parse memory limit 

99 limit_bytes = parse_memory_limit(max_memory) 

100 if limit_bytes is None: 

101 # Use global config 

102 config = get_memory_config() 

103 limit_bytes = config.max_memory 

104 if limit_bytes is None: 

105 # No limit, return params unchanged 

106 return params 

107 

108 samples = int(samples) 

109 

110 # Estimate with current parameters 

111 current_estimate = estimate_memory(operation, samples, **params) 

112 

113 if current_estimate.total <= limit_bytes: 

114 # Already within limit 

115 return params 

116 

117 # Need to adjust parameters 

118 adjusted_params = params.copy() 

119 

120 if operation in ("fft", "psd"): 

121 # Reduce nfft if specified 

122 if "nfft" in adjusted_params: 

123 # Try reducing nfft 

124 original_nfft = adjusted_params["nfft"] 

125 # Binary search for suitable nfft 

126 nfft = _find_max_nfft(operation, samples, limit_bytes, **adjusted_params) 

127 if nfft < original_nfft: 127 ↛ 174line 127 didn't jump to line 174 because the condition on line 127 was always true

128 adjusted_params["nfft"] = nfft 

129 warnings.warn( 

130 f"Reduced nfft from {original_nfft} to {nfft} to fit {limit_bytes / 1e6:.1f} MB limit", 

131 UserWarning, 

132 stacklevel=2, 

133 ) 

134 

135 elif operation == "spectrogram": 

136 # Adjust nperseg and/or nfft 

137 original_nperseg = adjusted_params.get("nperseg", 256) 

138 original_nfft = adjusted_params.get("nfft", original_nperseg) 

139 

140 # Try reducing nperseg first 

141 nperseg = _find_max_nperseg(samples, limit_bytes, noverlap=adjusted_params.get("noverlap")) 

142 if nperseg < original_nperseg: 142 ↛ 155line 142 didn't jump to line 155 because the condition on line 142 was always true

143 adjusted_params["nperseg"] = nperseg 

144 # Adjust noverlap proportionally 

145 if "noverlap" in adjusted_params: 

146 overlap_ratio = adjusted_params["noverlap"] / original_nperseg 

147 adjusted_params["noverlap"] = int(nperseg * overlap_ratio) 

148 warnings.warn( 

149 f"Reduced nperseg from {original_nperseg} to {nperseg} to fit {limit_bytes / 1e6:.1f} MB limit", 

150 UserWarning, 

151 stacklevel=2, 

152 ) 

153 

154 # Also reduce nfft if needed 

155 if "nfft" in adjusted_params and adjusted_params["nfft"] > nperseg: 

156 adjusted_params["nfft"] = nperseg 

157 

158 elif operation == "eye_diagram": 158 ↛ 174line 158 didn't jump to line 174 because the condition on line 158 was always true

159 # Reduce samples_per_ui or num_uis 

160 if "num_uis" in adjusted_params: 160 ↛ 174line 160 didn't jump to line 174 because the condition on line 160 was always true

161 original_num_uis = adjusted_params["num_uis"] 

162 # Calculate max num_uis that fits 

163 samples_per_ui = adjusted_params.get("samples_per_ui", 100) 

164 max_num_uis = _find_max_num_uis(limit_bytes, samples_per_ui) 

165 if max_num_uis < original_num_uis: 165 ↛ 174line 165 didn't jump to line 174 because the condition on line 165 was always true

166 adjusted_params["num_uis"] = max_num_uis 

167 warnings.warn( 

168 f"Reduced num_uis from {original_num_uis} to {max_num_uis} to fit {limit_bytes / 1e6:.1f} MB limit", 

169 UserWarning, 

170 stacklevel=2, 

171 ) 

172 

173 # Verify final estimate 

174 final_estimate = estimate_memory(operation, samples, **adjusted_params) 

175 if final_estimate.total > limit_bytes: 175 ↛ 184line 175 didn't jump to line 184 because the condition on line 175 was always true

176 warnings.warn( 

177 f"Could not adjust parameters to fit {limit_bytes / 1e6:.1f} MB limit. " 

178 f"Operation requires {final_estimate.total / 1e6:.1f} MB. " 

179 "Consider using chunked processing or increasing memory limit.", 

180 UserWarning, 

181 stacklevel=2, 

182 ) 

183 

184 return adjusted_params 

185 

186 

187def _find_max_nfft(operation: str, samples: int, limit_bytes: int, **params: Any) -> int: 

188 """Binary search for maximum nfft that fits memory limit. 

189 

190 Args: 

191 operation: Operation name. 

192 samples: Number of samples. 

193 limit_bytes: Memory limit in bytes. 

194 **params: Additional parameters. 

195 

196 Returns: 

197 Maximum nfft that fits within limit. 

198 """ 

199 min_nfft = 64 

200 max_nfft = params.get("nfft", 8192) 

201 

202 # Binary search 

203 while min_nfft < max_nfft: 

204 mid_nfft = (min_nfft + max_nfft + 1) // 2 

205 test_params = {**params, "nfft": mid_nfft} 

206 estimate = estimate_memory(operation, samples, **test_params) 

207 

208 if estimate.total <= limit_bytes: 

209 min_nfft = mid_nfft 

210 else: 

211 max_nfft = mid_nfft - 1 

212 

213 return min_nfft 

214 

215 

216def _find_max_nperseg(samples: int, limit_bytes: int, noverlap: int | None = None) -> int: 

217 """Binary search for maximum nperseg that fits memory limit. 

218 

219 Args: 

220 samples: Number of samples. 

221 limit_bytes: Memory limit in bytes. 

222 noverlap: Overlap samples (if specified). 

223 

224 Returns: 

225 Maximum nperseg that fits within limit. 

226 """ 

227 min_nperseg = 64 

228 max_nperseg = min(8192, samples // 4) 

229 

230 # Binary search 

231 while min_nperseg < max_nperseg: 

232 mid_nperseg = (min_nperseg + max_nperseg + 1) // 2 

233 

234 # Calculate memory for this nperseg 

235 hop = mid_nperseg - (noverlap or mid_nperseg // 2) 

236 num_segments = max(1, (samples - (noverlap or mid_nperseg // 2)) // hop) 

237 

238 # Estimate memory 

239 bytes_per_sample = 8 # float64 

240 data_mem = samples * bytes_per_sample 

241 intermediate_mem = mid_nperseg * bytes_per_sample * 2 + mid_nperseg * bytes_per_sample * 2 

242 output_mem = (mid_nperseg // 2 + 1) * num_segments * bytes_per_sample * 2 

243 

244 total_mem = data_mem + intermediate_mem + output_mem 

245 

246 if total_mem <= limit_bytes: 

247 min_nperseg = mid_nperseg 

248 else: 

249 max_nperseg = mid_nperseg - 1 

250 

251 return min_nperseg 

252 

253 

254def _find_max_num_uis(limit_bytes: int, samples_per_ui: int) -> int: 

255 """Find maximum num_uis that fits memory limit for eye diagrams. 

256 

257 Args: 

258 limit_bytes: Memory limit in bytes. 

259 samples_per_ui: Samples per unit interval. 

260 

261 Returns: 

262 Maximum num_uis that fits. 

263 """ 

264 bytes_per_sample = 8 # float64 

265 # Eye diagram memory: samples_per_ui * num_uis * bytes_per_sample 

266 max_num_uis = limit_bytes // (samples_per_ui * bytes_per_sample * 2) 

267 return max(100, int(max_num_uis)) # At least 100 UIs 

268 

269 

270def get_operation_memory_limit( 

271 operation: str, 

272 max_memory: int | str | None = None, 

273) -> int: 

274 """Get effective memory limit for an operation. 

275 

276 Args: 

277 operation: Operation name. 

278 max_memory: Override limit (or None for global config). 

279 

280 Returns: 

281 Memory limit in bytes. 

282 

283 Example: 

284 >>> limit = get_operation_memory_limit('spectrogram', max_memory='512MB') 

285 >>> print(f"Limit: {limit / 1e6:.1f} MB") 

286 """ 

287 # Parse override 

288 limit_bytes = parse_memory_limit(max_memory) 

289 if limit_bytes is not None: 

290 return limit_bytes 

291 

292 # Use global config 

293 config = get_memory_config() 

294 if config.max_memory is not None: 

295 return config.max_memory 

296 

297 # Default: 80% of available 

298 from tracekit.utils.memory import get_available_memory 

299 

300 return int(get_available_memory() * 0.8) 

301 

302 

303def check_operation_fits( 

304 operation: str, 

305 samples: int | float, 

306 *, 

307 max_memory: int | str | None = None, 

308 **params: Any, 

309) -> bool: 

310 """Check if operation with given parameters fits within memory limit. 

311 

312 Args: 

313 operation: Operation name. 

314 samples: Number of samples. 

315 max_memory: Memory limit (or None for global config). 

316 **params: Operation parameters. 

317 

318 Returns: 

319 True if operation fits within limit. 

320 

321 Example: 

322 >>> fits = check_operation_fits('fft', samples=1e9, max_memory='4GB', nfft=8192) 

323 >>> if not fits: 

324 ... print("FFT too large for 4GB limit") 

325 """ 

326 limit_bytes = get_operation_memory_limit(operation, max_memory) 

327 estimate = estimate_memory(operation, samples, **params) 

328 return estimate.total <= limit_bytes 

329 

330 

331__all__ = [ 

332 "apply_memory_limit", 

333 "check_operation_fits", 

334 "get_operation_memory_limit", 

335 "parse_memory_limit", 

336]