Coverage for src / tracekit / exploratory / legacy.py: 90%

281 statements  

« prev     ^ index     » next       coverage.py v7.13.1, created at 2026-01-11 23:04 +0000

1"""Legacy system signal analysis. 

2 

3This module provides analysis tools for legacy RTL/TTL systems with 

4mixed logic families and multi-voltage domains. 

5 

6 

7Example: 

8 >>> from tracekit.exploratory.legacy import detect_logic_families_multi_channel 

9 >>> families = detect_logic_families_multi_channel(channels) 

10 >>> for ch, result in families.items(): 

11 ... print(f"Channel {ch}: {result['family']} (confidence={result['confidence']:.2f})") 

12""" 

13 

14from __future__ import annotations 

15 

16from dataclasses import dataclass 

17from typing import TYPE_CHECKING, Any, Literal 

18 

19import numpy as np 

20 

21if TYPE_CHECKING: 

22 from numpy.typing import NDArray 

23 

24 from tracekit.core.types import WaveformTrace 

25 

26# Logic family specifications per IEEE/JEDEC standards 

27LOGIC_FAMILY_SPECS = { 

28 "TTL": { 

29 "vil_max": 0.8, 

30 "vih_min": 2.0, 

31 "vol_max": 0.4, 

32 "voh_min": 2.4, 

33 "vcc": 5.0, 

34 }, 

35 "CMOS_5V": { 

36 "vil_max": 1.5, 

37 "vih_min": 3.5, 

38 "vol_max": 0.5, 

39 "voh_min": 4.5, 

40 "vcc": 5.0, 

41 }, 

42 "LVTTL": { 

43 "vil_max": 0.8, 

44 "vih_min": 2.0, 

45 "vol_max": 0.4, 

46 "voh_min": 2.4, 

47 "vcc": 3.3, 

48 }, 

49 "LVCMOS_3V3": { 

50 "vil_max": 0.8, 

51 "vih_min": 2.0, 

52 "vol_max": 0.4, 

53 "voh_min": 2.4, 

54 "vcc": 3.3, 

55 }, 

56 "LVCMOS_2V5": { 

57 "vil_max": 0.7, 

58 "vih_min": 1.7, 

59 "vol_max": 0.4, 

60 "voh_min": 2.0, 

61 "vcc": 2.5, 

62 }, 

63 "LVCMOS_1V8": { 

64 "vil_max": 0.35 * 1.8, 

65 "vih_min": 0.65 * 1.8, 

66 "vol_max": 0.4, 

67 "voh_min": 1.4, 

68 "vcc": 1.8, 

69 }, 

70 "ECL": { 

71 "vil_max": -1.475, 

72 "vih_min": -1.105, 

73 "vol_max": -1.65, 

74 "voh_min": -0.98, 

75 "vcc": -5.2, 

76 }, 

77 "PECL": { 

78 "vil_max": 3.4, 

79 "vih_min": 4.0, 

80 "vol_max": 3.2, 

81 "voh_min": 4.4, 

82 "vcc": 5.0, 

83 }, 

84 "OPEN_COLLECTOR": { 

85 "vil_max": 0.8, 

86 "vih_min": 2.0, 

87 "vol_max": 0.4, 

88 "voh_min": None, # Depends on pullup 

89 "vcc": 5.0, 

90 }, 

91} 

92 

93 

94@dataclass 

95class LogicFamilyResult: 

96 """Result of logic family detection. 

97 

98 Attributes: 

99 family: Detected logic family name. 

100 confidence: Confidence score (0.0 to 1.0). 

101 v_low: Measured low voltage level. 

102 v_high: Measured high voltage level. 

103 alternatives: List of alternative candidates with confidence. 

104 degradation_warning: Optional warning about signal degradation. 

105 deviation_pct: Deviation from spec as percentage. 

106 """ 

107 

108 family: str 

109 confidence: float 

110 v_low: float 

111 v_high: float 

112 alternatives: list[tuple[str, float]] 

113 degradation_warning: str | None = None 

114 deviation_pct: float = 0.0 

115 

116 

117def detect_logic_families_multi_channel( 

118 channels: list[WaveformTrace] | dict[int, WaveformTrace], 

119 *, 

120 confidence_thresholds: dict[str, float] | None = None, 

121 warn_on_degradation: bool = True, 

122 voltage_tolerance: float = 0.20, 

123 min_edges_for_detection: int = 10, 

124) -> dict[int, LogicFamilyResult]: 

125 """Detect logic family for each channel independently. 

126 

127 Analyzes voltage distribution per channel and maps to logic family specs. 

128 

129 Args: 

130 channels: List or dict of WaveformTrace objects. 

131 confidence_thresholds: Thresholds for high/medium confidence. 

132 Default: {'high': 0.9, 'medium': 0.7} 

133 warn_on_degradation: If True, warn on degraded signals. 

134 voltage_tolerance: Tolerance for spec matching (default 20%). 

135 min_edges_for_detection: Minimum edges required per channel. 

136 

137 Returns: 

138 Dictionary mapping channel ID to LogicFamilyResult. 

139 

140 Example: 

141 >>> channels = [trace.get_channel(i) for i in range(8)] 

142 >>> families = detect_logic_families_multi_channel(channels) 

143 >>> for ch_id, result in families.items(): 

144 ... print(f"Channel {ch_id}: {result.family} (confidence={result.confidence:.2f})") 

145 

146 References: 

147 LEGACY-001: Multi-Channel Logic Family Auto-Detection 

148 IEEE 1164: Standard for Logic Families 

149 JEDEC: Logic Family Specifications 

150 """ 

151 if confidence_thresholds is None: 151 ↛ 155line 151 didn't jump to line 155 because the condition on line 151 was always true

152 confidence_thresholds = {"high": 0.9, "medium": 0.7} 

153 

154 # Convert list to dict if needed 

155 if isinstance(channels, list): 

156 channels = dict(enumerate(channels)) 

157 

158 results = {} 

159 

160 for ch_id, trace in channels.items(): 

161 data = trace.data 

162 

163 # Extract voltage percentiles 

164 p10 = np.percentile(data, 10) 

165 np.percentile(data, 50) 

166 p90 = np.percentile(data, 90) 

167 

168 # Estimate low and high levels 

169 v_low = p10 

170 v_high = p90 

171 v_high - v_low 

172 

173 # Count edges for confidence 

174 threshold = (v_low + v_high) / 2 

175 edges = np.sum(np.abs(np.diff(data > threshold))) 

176 

177 # Score each logic family 

178 candidates = [] 

179 

180 for family_name, specs in LOGIC_FAMILY_SPECS.items(): 

181 score = _score_logic_family(v_low, v_high, specs, voltage_tolerance) # type: ignore[arg-type] 

182 if score > 0: 

183 candidates.append((family_name, score)) 

184 

185 # Sort by score descending 

186 candidates.sort(key=lambda x: x[1], reverse=True) 

187 

188 if not candidates: 

189 # No match found 

190 result = LogicFamilyResult( 

191 family="UNKNOWN", 

192 confidence=0.0, 

193 v_low=v_low, 

194 v_high=v_high, 

195 alternatives=[], 

196 degradation_warning="No matching logic family found", 

197 ) 

198 else: 

199 best_family, best_score = candidates[0] 

200 confidence = min(1.0, best_score) 

201 

202 # Reduce confidence if insufficient edges 

203 if edges < min_edges_for_detection: 

204 confidence *= 0.5 

205 

206 # Check for ambiguity (multiple families close in score) 

207 alternatives = [ 

208 (name, score) for name, score in candidates[1:4] if best_score - score < 0.2 

209 ] 

210 

211 # Check for degradation 

212 degradation_warning = None 

213 deviation_pct = 0.0 

214 

215 if warn_on_degradation: 215 ↛ 226line 215 didn't jump to line 226 because the condition on line 215 was always true

216 specs = LOGIC_FAMILY_SPECS[best_family] 

217 if specs["voh_min"] is not None: # type: ignore[index] 217 ↛ 226line 217 didn't jump to line 226 because the condition on line 217 was always true

218 expected_voh = specs["voh_min"] # type: ignore[index] 

219 if v_high < expected_voh: 219 ↛ 220line 219 didn't jump to line 220 because the condition on line 219 was never true

220 deviation_pct = 100 * (expected_voh - v_high) / expected_voh 

221 if deviation_pct > 10: 

222 degradation_warning = ( 

223 f"V_high below spec (expected >= {expected_voh:.3f}V)" 

224 ) 

225 

226 result = LogicFamilyResult( 

227 family=best_family, 

228 confidence=confidence, 

229 v_low=v_low, 

230 v_high=v_high, 

231 alternatives=alternatives, 

232 degradation_warning=degradation_warning, 

233 deviation_pct=deviation_pct, 

234 ) 

235 

236 results[ch_id] = result 

237 

238 return results 

239 

240 

241def _score_logic_family( 

242 v_low: float, 

243 v_high: float, 

244 specs: dict[str, float | None], 

245 tolerance: float, 

246) -> float: 

247 """Score how well voltage levels match a logic family. 

248 

249 Args: 

250 v_low: Measured low voltage. 

251 v_high: Measured high voltage. 

252 specs: Logic family specifications. 

253 tolerance: Tolerance for matching. 

254 

255 Returns: 

256 Score from 0.0 to 1.0. 

257 """ 

258 score = 1.0 

259 

260 # Check VOL (output low) 

261 vol_max = specs["vol_max"] 

262 if vol_max is not None: 262 ↛ 271line 262 didn't jump to line 271 because the condition on line 262 was always true

263 if v_low <= vol_max: 

264 score *= 1.0 # Exact match 

265 elif v_low <= vol_max * (1 + tolerance): 

266 score *= 0.85 # Within tolerance 

267 else: 

268 score *= 0.0 # Outside tolerance 

269 

270 # Check VOH (output high) 

271 voh_min = specs["voh_min"] 

272 if voh_min is not None: 

273 if v_high >= voh_min: 

274 score *= 1.0 

275 elif v_high >= voh_min * (1 - tolerance): 

276 score *= 0.85 

277 else: 

278 score *= 0.0 

279 

280 return score 

281 

282 

283@dataclass 

284class CrossCorrelationResult: 

285 """Result of multi-reference cross-correlation. 

286 

287 Attributes: 

288 correlation: Pearson correlation coefficient. 

289 confidence: Overall confidence in result. 

290 ref_offset_mv: Reference voltage offset in mV. 

291 offset_uncertainty_mv: Uncertainty in offset measurement. 

292 lag_samples: Time lag in samples. 

293 lag_ns: Time lag in nanoseconds. 

294 drift_detected: True if reference drift detected. 

295 drift_rate: Drift rate in V/ms if detected. 

296 normalized_signal1: Normalized first signal. 

297 normalized_signal2: Normalized second signal. 

298 """ 

299 

300 correlation: float 

301 confidence: float 

302 ref_offset_mv: float 

303 offset_uncertainty_mv: float 

304 lag_samples: int 

305 lag_ns: float 

306 drift_detected: bool = False 

307 drift_rate: float | None = None 

308 normalized_signal1: NDArray[np.float64] | None = None 

309 normalized_signal2: NDArray[np.float64] | None = None 

310 

311 

312def cross_correlate_multi_reference( 

313 signal1: WaveformTrace, 

314 signal2: WaveformTrace, 

315 *, 

316 detect_drift: bool = False, 

317 drift_window_ms: float = 10.0, 

318) -> CrossCorrelationResult: 

319 """Correlate signals with different voltage references. 

320 

321 Normalizes signals to [0, 1] using per-signal logic levels before 

322 computing correlation, enabling comparison of signals with different 

323 ground references. 

324 

325 Args: 

326 signal1: First signal trace. 

327 signal2: Second signal trace. 

328 detect_drift: If True, detect time-varying reference drift. 

329 drift_window_ms: Window size for drift detection in ms. 

330 

331 Returns: 

332 CrossCorrelationResult with correlation and offset information. 

333 

334 Example: 

335 >>> ttl = trace.get_channel(0) # 5V TTL 

336 >>> cmos = trace.get_channel(1) # 3.3V CMOS 

337 >>> result = cross_correlate_multi_reference(ttl, cmos) 

338 >>> print(f"Correlation: {result.correlation:.3f}") 

339 >>> print(f"Reference offset: {result.ref_offset_mv:.1f} mV") 

340 

341 References: 

342 LEGACY-002: Multi-Reference Voltage Signal Correlation 

343 """ 

344 data1 = signal1.data 

345 data2 = signal2.data 

346 

347 # Normalize each signal to [0, 1] 

348 norm1 = _normalize_to_logic_levels(data1) 

349 norm2 = _normalize_to_logic_levels(data2) 

350 

351 # Estimate DC offset between normalized signals 

352 dc_offset = np.mean(norm1) - np.mean(norm2) 

353 

354 # Apply offset correction 

355 norm2_corrected = norm2 + dc_offset 

356 

357 # Compute cross-correlation 

358 min_len = min(len(norm1), len(norm2_corrected)) 

359 norm1 = norm1[:min_len] 

360 norm2_corrected = norm2_corrected[:min_len] 

361 

362 correlation = np.corrcoef(norm1, norm2_corrected)[0, 1] 

363 

364 # Find lag using cross-correlation 

365 xcorr = np.correlate( 

366 norm1 - np.mean(norm1), norm2_corrected - np.mean(norm2_corrected), mode="full" 

367 ) 

368 lag_samples = xcorr.argmax() - (len(norm1) - 1) 

369 

370 # Convert lag to nanoseconds 

371 sample_rate = signal1.metadata.sample_rate 

372 lag_ns = lag_samples / sample_rate * 1e9 

373 

374 # Estimate reference voltage offset 

375 # Reference offset is how much signal2's ground differs from signal1's ground 

376 v1_min = np.min(data1) 

377 v2_min = np.min(data2) 

378 

379 # Reference offset is difference in ground levels (signal2 relative to signal1) 

380 ref_offset_mv = (v2_min - v1_min) * 1000 

381 

382 # Confidence calculation 

383 offset_uncertainty_mv = abs(ref_offset_mv) * 0.1 # 10% uncertainty 

384 confidence = abs(correlation) * (1 - min(abs(ref_offset_mv) / 1000, 1.0)) 

385 

386 # Drift detection 

387 drift_detected = False 

388 drift_rate = None 

389 

390 if detect_drift: 

391 # Calculate offset in windows 

392 window_samples = int(drift_window_ms * 1e-3 * sample_rate) 

393 n_windows = min_len // window_samples 

394 

395 if n_windows >= 2: 395 ↛ 411line 395 didn't jump to line 411 because the condition on line 395 was always true

396 offsets = [] 

397 for i in range(n_windows): 

398 start = i * window_samples 

399 end = start + window_samples 

400 win_offset = np.mean(data1[start:end]) - np.mean(data2[start:end]) 

401 offsets.append(win_offset) 

402 

403 # Check for drift 

404 offset_change = abs(offsets[-1] - offsets[0]) 

405 drift_rate_val = offset_change / (n_windows * drift_window_ms) 

406 

407 if drift_rate_val > 0.1: # V/ms threshold 407 ↛ 408line 407 didn't jump to line 408 because the condition on line 407 was never true

408 drift_detected = True 

409 drift_rate = drift_rate_val 

410 

411 return CrossCorrelationResult( 

412 correlation=float(correlation), 

413 confidence=float(confidence), 

414 ref_offset_mv=float(ref_offset_mv), 

415 offset_uncertainty_mv=float(offset_uncertainty_mv), 

416 lag_samples=int(lag_samples), 

417 lag_ns=float(lag_ns), 

418 drift_detected=drift_detected, 

419 drift_rate=drift_rate, 

420 normalized_signal1=norm1, 

421 normalized_signal2=norm2_corrected, 

422 ) 

423 

424 

425def _normalize_to_logic_levels(data: NDArray[np.float64]) -> NDArray[np.float64]: 

426 """Normalize signal to [0, 1] based on logic levels. 

427 

428 Args: 

429 data: Signal data. 

430 

431 Returns: 

432 Normalized signal. 

433 """ 

434 v_min = float(np.percentile(data, 5)) 

435 v_max = float(np.percentile(data, 95)) 

436 v_range = v_max - v_min 

437 

438 if v_range < 1e-6: 438 ↛ 439line 438 didn't jump to line 439 because the condition on line 438 was never true

439 return np.zeros_like(data) 

440 

441 return (data - v_min) / v_range 

442 

443 

444@dataclass 

445class SignalQualityResult: 

446 """Result of signal quality assessment. 

447 

448 Attributes: 

449 status: 'OK', 'WARNING', or 'CRITICAL'. 

450 violation_count: Number of spec violations. 

451 total_samples: Total samples analyzed. 

452 min_margin_mv: Minimum margin to spec in mV. 

453 violations: List of violation details. 

454 vil_violations: Count of VIL violations. 

455 vih_violations: Count of VIH violations. 

456 vol_violations: Count of VOL violations. 

457 voh_violations: Count of VOH violations. 

458 failure_diagnosis: Suggested failure mode. 

459 time_to_failure_s: Estimated time to failure. 

460 drift_rate_mv_per_s: Voltage drift rate. 

461 """ 

462 

463 status: Literal["OK", "WARNING", "CRITICAL"] 

464 violation_count: int 

465 total_samples: int 

466 min_margin_mv: float 

467 violations: list[dict[str, Any]] 

468 vil_violations: int = 0 

469 vih_violations: int = 0 

470 vol_violations: int = 0 

471 voh_violations: int = 0 

472 vil_rate: float = 0.0 

473 vih_rate: float = 0.0 

474 vol_rate: float = 0.0 

475 voh_rate: float = 0.0 

476 failure_diagnosis: str | None = None 

477 time_to_failure_s: float | None = None 

478 drift_rate_mv_per_s: float | None = None 

479 

480 

481def assess_signal_quality( 

482 signal: WaveformTrace, 

483 logic_family: str, 

484 *, 

485 check_aging: bool = False, 

486 time_window_s: float = 1.0, 

487) -> SignalQualityResult: 

488 """Assess signal quality against logic family specs. 

489 

490 Checks voltage compliance with specifications and detects degraded 

491 signal levels that may indicate aging or failing components. 

492 

493 Args: 

494 signal: Signal trace to assess. 

495 logic_family: Logic family name (e.g., 'TTL', 'CMOS_5V'). 

496 check_aging: If True, analyze for aging/degradation. 

497 time_window_s: Window for drift analysis. 

498 

499 Returns: 

500 SignalQualityResult with compliance status and violations. 

501 

502 Example: 

503 >>> result = assess_signal_quality(signal, logic_family='TTL') 

504 >>> print(f"Status: {result.status}") 

505 >>> print(f"Violations: {result.violation_count}") 

506 

507 References: 

508 LEGACY-003: Logic Level Compliance Checking 

509 JEDEC Standard No. 8C 

510 """ 

511 if logic_family not in LOGIC_FAMILY_SPECS: 

512 logic_family = "TTL" # Default fallback 

513 

514 specs = LOGIC_FAMILY_SPECS[logic_family] 

515 data = signal.data 

516 sample_rate = signal.metadata.sample_rate 

517 n_samples = len(data) 

518 

519 # Threshold for high/low classification 

520 threshold = (specs["vil_max"] + specs["vih_min"]) / 2 # type: ignore[index] 

521 

522 # Classify samples 

523 is_high = data > threshold 

524 is_low = ~is_high 

525 

526 # Count violations 

527 high_samples = data[is_high] 

528 low_samples = data[is_low] 

529 

530 voh_min = specs["voh_min"] # type: ignore[index] 

531 vol_max = specs["vol_max"] # type: ignore[index] 

532 

533 voh_violations = 0 

534 vol_violations = 0 

535 violations = [] 

536 

537 # Check VOH violations (high samples below spec) 

538 if voh_min is not None and len(high_samples) > 0: 538 ↛ 554line 538 didn't jump to line 554 because the condition on line 538 was always true

539 voh_mask = high_samples < voh_min 

540 voh_violations = np.sum(voh_mask) 

541 if voh_violations > 0: 

542 violation_indices = np.where(is_high)[0][voh_mask] 

543 for idx in violation_indices[:10]: # First 10 violations 

544 violations.append( 

545 { 

546 "timestamp_us": idx / sample_rate * 1e6, 

547 "type": "VOH", 

548 "voltage": data[idx], 

549 "spec_limit": voh_min, 

550 } 

551 ) 

552 

553 # Check VOL violations (low samples above spec) 

554 if vol_max is not None and len(low_samples) > 0: 554 ↛ 569line 554 didn't jump to line 569 because the condition on line 554 was always true

555 vol_mask = low_samples > vol_max 

556 vol_violations = np.sum(vol_mask) 

557 if vol_violations > 0: 

558 violation_indices = np.where(is_low)[0][vol_mask] 

559 for idx in violation_indices[:10]: 

560 violations.append( 

561 { 

562 "timestamp_us": idx / sample_rate * 1e6, 

563 "type": "VOL", 

564 "voltage": data[idx], 

565 "spec_limit": vol_max, 

566 } 

567 ) 

568 

569 total_violations = voh_violations + vol_violations 

570 

571 # Calculate margins 

572 margins = [] 

573 if len(high_samples) > 0 and voh_min is not None: 573 ↛ 575line 573 didn't jump to line 575 because the condition on line 573 was always true

574 margins.extend((high_samples - voh_min) * 1000) # Convert to mV 

575 if len(low_samples) > 0 and vol_max is not None: 575 ↛ 578line 575 didn't jump to line 578 because the condition on line 575 was always true

576 margins.extend((vol_max - low_samples) * 1000) 

577 

578 min_margin_mv = min(margins) if margins else 0.0 

579 

580 # Determine status 

581 if min_margin_mv < 100: 

582 status: Literal["OK", "WARNING", "CRITICAL"] = "CRITICAL" 

583 elif min_margin_mv < 200: 

584 status = "WARNING" 

585 else: 

586 status = "OK" 

587 

588 # Calculate rates 

589 n_high = len(high_samples) 

590 n_low = len(low_samples) 

591 voh_rate = voh_violations / n_high if n_high > 0 else 0.0 

592 vol_rate = vol_violations / n_low if n_low > 0 else 0.0 

593 

594 # Aging analysis 

595 failure_diagnosis = None 

596 time_to_failure_s = None 

597 drift_rate_mv_per_s = None 

598 

599 if check_aging and n_samples > 1000: 

600 # Calculate drift over time 

601 window_samples = int(time_window_s * sample_rate) 

602 n_windows = n_samples // window_samples 

603 

604 if n_windows >= 2: 604 ↛ 628line 604 didn't jump to line 628 because the condition on line 604 was always true

605 window_means = [ 

606 np.mean(data[i * window_samples : (i + 1) * window_samples]) 

607 for i in range(n_windows) 

608 ] 

609 

610 drift = window_means[-1] - window_means[0] 

611 drift_rate_mv_per_s = drift * 1000 / (n_windows * time_window_s) 

612 

613 if abs(drift_rate_mv_per_s) > 0.1: # Significant drift 613 ↛ 628line 613 didn't jump to line 628 because the condition on line 613 was always true

614 # Estimate time to failure 

615 if voh_min is not None and drift_rate_mv_per_s < 0: 615 ↛ 621line 615 didn't jump to line 621 because the condition on line 615 was always true

616 current_margin = np.mean(high_samples) - voh_min 

617 if current_margin > 0: 617 ↛ 621line 617 didn't jump to line 621 because the condition on line 617 was always true

618 time_to_failure_s = current_margin * 1000 / abs(drift_rate_mv_per_s) 

619 

620 # Diagnose failure mode 

621 if voh_violations > vol_violations: 621 ↛ 623line 621 didn't jump to line 623 because the condition on line 621 was always true

622 failure_diagnosis = "Degraded output driver (weak high)" 

623 elif vol_violations > voh_violations: 

624 failure_diagnosis = "Degraded output driver (weak low)" 

625 else: 

626 failure_diagnosis = "General signal degradation" 

627 

628 return SignalQualityResult( 

629 status=status, 

630 violation_count=total_violations, 

631 total_samples=n_samples, 

632 min_margin_mv=min_margin_mv, 

633 violations=violations, 

634 voh_violations=voh_violations, 

635 vol_violations=vol_violations, 

636 voh_rate=voh_rate, 

637 vol_rate=vol_rate, 

638 failure_diagnosis=failure_diagnosis, 

639 time_to_failure_s=time_to_failure_s, 

640 drift_rate_mv_per_s=drift_rate_mv_per_s, 

641 ) 

642 

643 

644@dataclass 

645class TestPointCharacterization: 

646 """Characterization of a single test point. 

647 

648 Attributes: 

649 channel_id: Channel identifier. 

650 v_low: Low voltage level. 

651 v_high: High voltage level. 

652 v_swing: Voltage swing. 

653 logic_family: Detected logic family. 

654 confidence: Detection confidence. 

655 is_digital: True if signal appears digital. 

656 is_clock: True if signal appears to be a clock. 

657 frequency: Estimated frequency if periodic. 

658 """ 

659 

660 channel_id: int 

661 v_low: float 

662 v_high: float 

663 v_swing: float 

664 logic_family: str 

665 confidence: float 

666 is_digital: bool 

667 is_clock: bool 

668 frequency: float | None 

669 

670 

671def characterize_test_points( 

672 channels: list[WaveformTrace] | dict[int, WaveformTrace], 

673 *, 

674 sample_rate: float | None = None, 

675) -> dict[int, TestPointCharacterization]: 

676 """Batch characterize multiple test points. 

677 

678 Analyzes 8-16 test points to build a voltage level map of an 

679 unknown board. 

680 

681 Args: 

682 channels: List or dict of WaveformTrace objects. 

683 sample_rate: Sample rate in Hz (uses metadata if not specified). 

684 

685 Returns: 

686 Dictionary mapping channel ID to TestPointCharacterization. 

687 

688 Example: 

689 >>> channels = [trace.get_channel(i) for i in range(8)] 

690 >>> chars = characterize_test_points(channels) 

691 >>> for ch_id, char in chars.items(): 

692 ... print(f"CH{ch_id}: {char.logic_family} ({char.v_low:.2f}V - {char.v_high:.2f}V)") 

693 

694 References: 

695 LEGACY-004: Multi-Channel Voltage Characterization 

696 """ 

697 if isinstance(channels, list): 

698 channels = dict(enumerate(channels)) 

699 

700 # First detect logic families 

701 families = detect_logic_families_multi_channel(channels) 

702 

703 results = {} 

704 

705 for ch_id, trace in channels.items(): 

706 data = trace.data 

707 sr = sample_rate or trace.metadata.sample_rate 

708 

709 # Voltage statistics 

710 v_low = float(np.percentile(data, 10)) 

711 v_high = float(np.percentile(data, 90)) 

712 v_swing = v_high - v_low 

713 

714 # Get logic family result 

715 family_result = families.get( 

716 ch_id, 

717 LogicFamilyResult( 

718 family="UNKNOWN", 

719 confidence=0.0, 

720 v_low=v_low, 

721 v_high=v_high, 

722 alternatives=[], 

723 ), 

724 ) 

725 

726 # Determine if digital (bimodal distribution) 

727 is_digital = v_swing > 0.5 and _is_bimodal(data) 

728 

729 # Check for clock signal 

730 is_clock = False 

731 frequency = None 

732 

733 if is_digital and sr is not None: 

734 # Check for periodic signal via FFT 

735 from scipy import signal as sp_signal 

736 

737 f, psd = sp_signal.welch(data, fs=sr, nperseg=min(1024, len(data))) 

738 peak_idx = np.argmax(psd[1:]) + 1 # Skip DC 

739 if psd[peak_idx] > 10 * np.mean(psd): # Strong peak 739 ↛ 747line 739 didn't jump to line 747 because the condition on line 739 was always true

740 frequency = f[peak_idx] 

741 # Check duty cycle for clock 

742 threshold = (v_low + v_high) / 2 

743 high_ratio = np.mean(data > threshold) 

744 if 0.4 <= high_ratio <= 0.6: 744 ↛ 747line 744 didn't jump to line 747 because the condition on line 744 was always true

745 is_clock = True 

746 

747 results[ch_id] = TestPointCharacterization( 

748 channel_id=ch_id, 

749 v_low=v_low, 

750 v_high=v_high, 

751 v_swing=v_swing, 

752 logic_family=family_result.family, 

753 confidence=family_result.confidence, 

754 is_digital=is_digital, 

755 is_clock=is_clock, 

756 frequency=frequency, 

757 ) 

758 

759 return results 

760 

761 

762def _is_bimodal(data: NDArray[np.float64], bins: int = 50) -> bool: 

763 """Check if data has bimodal distribution. 

764 

765 Args: 

766 data: Signal data. 

767 bins: Number of histogram bins. 

768 

769 Returns: 

770 True if distribution appears bimodal (digital signal). 

771 False for analog signals (sine waves have many peaks). 

772 """ 

773 hist, bin_edges = np.histogram(data, bins=bins) 

774 centers = (bin_edges[:-1] + bin_edges[1:]) / 2 

775 

776 # Find peaks (including edge bins for perfect bimodal signals) 

777 threshold = 0.1 * np.max(hist) 

778 peaks = [] 

779 

780 # Check first bin (only needs to be > right neighbor) 

781 if len(hist) > 1 and hist[0] > hist[1] and hist[0] > threshold: 

782 peaks.append((0, hist[0], centers[0])) 

783 

784 # Check middle bins (need to be > both neighbors) 

785 for i in range(1, len(hist) - 1): 

786 if hist[i] > hist[i - 1] and hist[i] > hist[i + 1] and hist[i] > threshold: 

787 peaks.append((i, hist[i], centers[i])) 

788 

789 # Check last bin (only needs to be > left neighbor) 

790 if len(hist) > 1 and hist[-1] > hist[-2] and hist[-1] > threshold: 790 ↛ 794line 790 didn't jump to line 794 because the condition on line 790 was always true

791 peaks.append((len(hist) - 1, hist[-1], centers[-1])) 

792 

793 # Too many peaks suggests analog signal (e.g., sine wave) 

794 if len(peaks) >= 4: 

795 return False 

796 

797 # Bimodal if exactly 2-3 significant peaks that are well-separated 

798 if len(peaks) == 2 or len(peaks) == 3: 798 ↛ 818line 798 didn't jump to line 818 because the condition on line 798 was always true

799 peaks.sort(key=lambda x: x[1], reverse=True) 

800 

801 # Check if peaks are well-separated (digital signals have peaks at extremes) 

802 v_min, v_max = np.min(data), np.max(data) 

803 v_range = v_max - v_min 

804 if v_range == 0: 804 ↛ 805line 804 didn't jump to line 805 because the condition on line 804 was never true

805 return False 

806 

807 # Normalize peak positions 

808 peak_positions = [(p[2] - v_min) / v_range for p in peaks[:2]] 

809 

810 # Digital signals have one peak < 0.4 and one peak > 0.6 

811 has_low_peak = any(p < 0.4 for p in peak_positions) 

812 has_high_peak = any(p > 0.6 for p in peak_positions) 

813 

814 # Second peak should be significant 

815 if has_low_peak and has_high_peak and peaks[1][1] > 0.3 * peaks[0][1]: 815 ↛ 818line 815 didn't jump to line 818 because the condition on line 815 was always true

816 return True 

817 

818 return False 

819 

820 

821__all__ = [ 

822 "LOGIC_FAMILY_SPECS", 

823 "CrossCorrelationResult", 

824 "LogicFamilyResult", 

825 "SignalQualityResult", 

826 "TestPointCharacterization", 

827 "assess_signal_quality", 

828 "characterize_test_points", 

829 "cross_correlate_multi_reference", 

830 "detect_logic_families_multi_channel", 

831]