Coverage for src / tracekit / extensibility / validation.py: 76%
194 statements
« prev ^ index » next coverage.py v7.13.1, created at 2026-01-11 23:04 +0000
« prev ^ index » next coverage.py v7.13.1, created at 2026-01-11 23:04 +0000
1"""Extension validation system for TraceKit plugins and custom decoders.
3This module provides comprehensive validation of extensions including metadata
4validation, interface compliance checking, dependency verification, and
5security checks.
8Example:
9 >>> from tracekit.extensibility.validation import validate_extension
10 >>> from pathlib import Path
11 >>>
12 >>> # Validate a plugin directory
13 >>> result = validate_extension(Path("my_plugin/"))
14 >>> if result.is_valid:
15 ... print("Plugin is valid!")
16 >>> else:
17 ... for error in result.errors:
18 ... print(f"Error: {error}")
19"""
21from __future__ import annotations
23import ast
24import inspect
25import logging
26from dataclasses import dataclass, field
27from typing import TYPE_CHECKING, Any
29if TYPE_CHECKING:
30 from collections.abc import Callable
31 from pathlib import Path
33logger = logging.getLogger(__name__)
36@dataclass
37class ValidationIssue:
38 """A validation issue found during extension validation.
40 Attributes:
41 severity: Severity level ("error", "warning", "info")
42 message: Human-readable error message
43 location: Optional location information (file, line number)
44 fix_hint: Optional suggestion for fixing the issue
45 """
47 severity: str
48 message: str
49 location: str = ""
50 fix_hint: str = ""
53@dataclass
54class ValidationResult:
55 """Result of extension validation.
57 Attributes:
58 is_valid: Whether extension passed all validation checks
59 errors: List of error-level issues
60 warnings: List of warning-level issues
61 info: List of informational messages
62 metadata: Extracted extension metadata
63 """
65 is_valid: bool = True
66 errors: list[ValidationIssue] = field(default_factory=list)
67 warnings: list[ValidationIssue] = field(default_factory=list)
68 info: list[ValidationIssue] = field(default_factory=list)
69 metadata: dict[str, Any] = field(default_factory=dict)
71 def add_error(self, message: str, location: str = "", fix_hint: str = "") -> None:
72 """Add an error issue.
74 Args:
75 message: Error message
76 location: Optional location
77 fix_hint: Optional fix suggestion
78 """
79 self.errors.append(
80 ValidationIssue(
81 severity="error",
82 message=message,
83 location=location,
84 fix_hint=fix_hint,
85 )
86 )
87 self.is_valid = False
89 def add_warning(self, message: str, location: str = "", fix_hint: str = "") -> None:
90 """Add a warning issue.
92 Args:
93 message: Warning message
94 location: Optional location
95 fix_hint: Optional fix suggestion
96 """
97 self.warnings.append(
98 ValidationIssue(
99 severity="warning",
100 message=message,
101 location=location,
102 fix_hint=fix_hint,
103 )
104 )
106 def add_info(self, message: str, location: str = "") -> None:
107 """Add an informational message.
109 Args:
110 message: Info message
111 location: Optional location
112 """
113 self.info.append(
114 ValidationIssue(
115 severity="info",
116 message=message,
117 location=location,
118 )
119 )
121 @property
122 def all_issues(self) -> list[ValidationIssue]:
123 """Get all issues sorted by severity.
125 Returns:
126 List of all issues (errors, warnings, info)
127 """
128 return self.errors + self.warnings + self.info
131def validate_extension(
132 extension_path: Path,
133 *,
134 check_dependencies: bool = True,
135 check_security: bool = True,
136 strict: bool = False,
137) -> ValidationResult:
138 """Validate an extension (plugin, decoder, etc.) at the given path.
140 Performs comprehensive validation including:
141 - Metadata validation (pyproject.toml or plugin.yaml)
142 - Interface compliance checking
143 - Entry point validation
144 - Dependency verification
145 - Security checks (if enabled)
146 - Code quality checks (if strict)
148 Args:
149 extension_path: Path to extension directory
150 check_dependencies: Verify dependencies are satisfied
151 check_security: Perform security checks
152 strict: Enable strict validation (warnings become errors)
154 Returns:
155 ValidationResult with validation outcome
157 Example:
158 >>> from pathlib import Path
159 >>> result = validate_extension(Path("plugins/my_decoder/"))
160 >>> if not result.is_valid:
161 ... for error in result.errors:
162 ... print(f"Error: {error.message}")
163 ... if error.fix_hint:
164 ... print(f" Fix: {error.fix_hint}")
166 References:
167 EXT-005: Extension Validation
168 """
169 result = ValidationResult()
171 if not extension_path.exists():
172 result.add_error(
173 f"Extension path does not exist: {extension_path}",
174 fix_hint="Check the path is correct",
175 )
176 return result
178 if not extension_path.is_dir():
179 result.add_error(
180 f"Extension path is not a directory: {extension_path}",
181 fix_hint="Provide path to extension directory",
182 )
183 return result
185 result.add_info(f"Validating extension at: {extension_path}")
187 # Validate metadata
188 _validate_metadata(extension_path, result)
190 # Validate structure
191 _validate_structure(extension_path, result)
193 # Validate entry points
194 _validate_entry_points(extension_path, result)
196 # Validate implementation
197 _validate_implementation(extension_path, result)
199 # Check dependencies if requested
200 if check_dependencies: 200 ↛ 201line 200 didn't jump to line 201 because the condition on line 200 was never true
201 _check_dependencies(extension_path, result)
203 # Security checks if requested
204 if check_security:
205 _check_security(extension_path, result)
207 # Convert warnings to errors in strict mode
208 if strict and result.warnings: 208 ↛ 209line 208 didn't jump to line 209 because the condition on line 208 was never true
209 for warning in result.warnings:
210 result.add_error(
211 f"Strict mode: {warning.message}",
212 location=warning.location,
213 fix_hint=warning.fix_hint,
214 )
215 result.warnings = []
217 return result
220def validate_decoder_interface(
221 decoder_class: type,
222) -> ValidationResult:
223 """Validate that a decoder class implements the required interface.
225 Checks for:
226 - Required methods (decode, get_metadata)
227 - Optional methods (configure, reset, validate_config)
228 - Method signatures
229 - Return types
231 Args:
232 decoder_class: Decoder class to validate
234 Returns:
235 ValidationResult with validation outcome
237 Example:
238 >>> class MyDecoder:
239 ... def decode(self, trace):
240 ... return []
241 ... def get_metadata(self):
242 ... return {"name": "my_decoder"}
243 >>> result = validate_decoder_interface(MyDecoder)
244 >>> assert result.is_valid
246 References:
247 EXT-006: Custom Decoder Registration
248 """
249 result = ValidationResult()
251 # Required methods
252 required_methods = {
253 "decode": {
254 "params": ["self", "trace"],
255 "returns": "list",
256 },
257 "get_metadata": {
258 "params": ["self"],
259 "returns": "dict",
260 },
261 }
263 # Optional methods
264 optional_methods = {
265 "configure": {"params": ["self"], "returns": None},
266 "reset": {"params": ["self"], "returns": None},
267 "validate_config": {"params": ["self", "config"], "returns": "bool"},
268 }
270 # Check required methods
271 for method_name in required_methods:
272 if not hasattr(decoder_class, method_name):
273 result.add_error(
274 f"Missing required method: {method_name}",
275 location=f"{decoder_class.__name__}",
276 fix_hint=f"Add method: def {method_name}(self, ...): ...",
277 )
278 continue
280 method = getattr(decoder_class, method_name)
281 if not callable(method):
282 result.add_error(
283 f"Method {method_name} is not callable",
284 location=f"{decoder_class.__name__}.{method_name}",
285 )
287 # Check optional methods if present
288 for method_name in optional_methods:
289 if hasattr(decoder_class, method_name):
290 method = getattr(decoder_class, method_name)
291 if not callable(method): 291 ↛ 292line 291 didn't jump to line 292 because the condition on line 291 was never true
292 result.add_warning(
293 f"Optional method {method_name} exists but is not callable",
294 location=f"{decoder_class.__name__}.{method_name}",
295 )
297 # Check documentation requirements (EXT-006)
298 if not decoder_class.__doc__ or not decoder_class.__doc__.strip(): 298 ↛ 299line 298 didn't jump to line 299 because the condition on line 298 was never true
299 result.add_error(
300 "Decoder class must have a docstring documenting its purpose and usage",
301 location=f"{decoder_class.__name__}",
302 fix_hint='Add docstring: """Decoder for XYZ protocol."""',
303 )
305 # Extract metadata
306 result.metadata = {
307 "class_name": decoder_class.__name__,
308 "module": decoder_class.__module__,
309 "required_methods": list(required_methods.keys()),
310 "optional_methods": list(optional_methods.keys()),
311 "has_docstring": decoder_class.__doc__ is not None,
312 }
314 if result.is_valid:
315 result.add_info(f"Decoder interface validation passed for {decoder_class.__name__}")
317 return result
320def validate_hook_function(
321 func: Callable[[Any], Any],
322) -> ValidationResult:
323 """Validate that a function is suitable for use as a hook.
325 Checks:
326 - Function signature accepts HookContext
327 - Function returns HookContext
328 - Function has docstring
329 - Function handles exceptions
331 Args:
332 func: Hook function to validate
334 Returns:
335 ValidationResult with validation outcome
337 Example:
338 >>> def my_hook(context):
339 ... '''Validate context.'''
340 ... return context
341 >>> result = validate_hook_function(my_hook)
342 >>> assert result.is_valid
344 References:
345 EXT-005: Hook System
346 """
347 result = ValidationResult()
349 if not callable(func):
350 result.add_error( # type: ignore[unreachable]
351 "Hook must be callable",
352 fix_hint="Provide a function or callable object",
353 )
354 return result
356 # Check signature
357 sig = inspect.signature(func)
358 params = list(sig.parameters.keys())
360 if len(params) < 1:
361 result.add_error(
362 "Hook function must accept at least one parameter (context)",
363 location=func.__name__,
364 fix_hint="Add parameter: def hook(context): ...",
365 )
367 # Check for docstring
368 if not func.__doc__:
369 result.add_warning(
370 "Hook function should have a docstring",
371 location=func.__name__,
372 fix_hint='Add docstring: """Hook description."""',
373 )
375 result.metadata = {
376 "name": func.__name__,
377 "params": params,
378 "has_docstring": func.__doc__ is not None,
379 }
381 if result.is_valid:
382 result.add_info(f"Hook function validation passed for {func.__name__}")
384 return result
387def _validate_metadata(extension_path: Path, result: ValidationResult) -> None:
388 """Validate extension metadata (pyproject.toml or plugin.yaml).
390 Args:
391 extension_path: Path to extension directory
392 result: ValidationResult to append issues to
393 """
394 pyproject = extension_path / "pyproject.toml"
395 plugin_yaml = extension_path / "plugin.yaml"
397 if not pyproject.exists() and not plugin_yaml.exists():
398 result.add_error(
399 "No metadata file found (pyproject.toml or plugin.yaml)",
400 location=str(extension_path),
401 fix_hint="Create pyproject.toml with [project] section",
402 )
403 return
405 if pyproject.exists(): 405 ↛ exitline 405 didn't return from function '_validate_metadata' because the condition on line 405 was always true
406 try:
407 import tomllib
409 with open(pyproject, "rb") as f:
410 data = tomllib.load(f)
412 # Check required project fields
413 if "project" not in data: 413 ↛ 414line 413 didn't jump to line 414 because the condition on line 413 was never true
414 result.add_error(
415 "pyproject.toml missing [project] section",
416 location=str(pyproject),
417 )
418 else:
419 project = data["project"]
420 required = ["name", "version", "description"]
421 for field in required:
422 if field not in project: 422 ↛ 423line 422 didn't jump to line 423 because the condition on line 422 was never true
423 result.add_error(
424 f"pyproject.toml missing required field: {field}",
425 location="[project]",
426 fix_hint=f'Add: {field} = "..."',
427 )
429 result.metadata.update(
430 {
431 "name": project.get("name", ""),
432 "version": project.get("version", ""),
433 "description": project.get("description", ""),
434 }
435 )
437 except Exception as e:
438 result.add_error(
439 f"Failed to parse pyproject.toml: {e}",
440 location=str(pyproject),
441 )
444def _validate_structure(extension_path: Path, result: ValidationResult) -> None:
445 """Validate extension directory structure.
447 Args:
448 extension_path: Path to extension directory
449 result: ValidationResult to append issues to
450 """
451 # Check for __init__.py
452 init_py = extension_path / "__init__.py"
453 if not init_py.exists():
454 result.add_warning(
455 "No __init__.py found",
456 location=str(extension_path),
457 fix_hint="Add __init__.py to make it a Python package",
458 )
460 # Check for tests directory
461 tests_dir = extension_path / "tests"
462 if not tests_dir.exists(): 462 ↛ 470line 462 didn't jump to line 470 because the condition on line 462 was always true
463 result.add_warning(
464 "No tests/ directory found",
465 location=str(extension_path),
466 fix_hint="Add tests/ directory with unit tests",
467 )
468 else:
469 # Check for test files
470 test_files = list(tests_dir.glob("test_*.py"))
471 if not test_files:
472 result.add_warning(
473 "No test files found in tests/",
474 location=str(tests_dir),
475 fix_hint="Add test_*.py files",
476 )
478 # Check for README
479 readme_files = list(extension_path.glob("README.*"))
480 if not readme_files: 480 ↛ exitline 480 didn't return from function '_validate_structure' because the condition on line 480 was always true
481 result.add_warning(
482 "No README file found",
483 location=str(extension_path),
484 fix_hint="Add README.md with usage documentation",
485 )
488def _validate_entry_points(extension_path: Path, result: ValidationResult) -> None:
489 """Validate entry points configuration.
491 Args:
492 extension_path: Path to extension directory
493 result: ValidationResult to append issues to
494 """
495 pyproject = extension_path / "pyproject.toml"
496 if not pyproject.exists():
497 return
499 try:
500 import tomllib
502 with open(pyproject, "rb") as f:
503 data = tomllib.load(f)
505 # Check for entry points
506 if "project" not in data or "entry-points" not in data["project"]: 506 ↛ 513line 506 didn't jump to line 513 because the condition on line 506 was always true
507 result.add_info(
508 "No entry points defined (plugin may be used as library)",
509 location=str(pyproject),
510 )
511 return
513 entry_points = data["project"]["entry-points"]
514 tracekit_groups = [k for k in entry_points if k.startswith("tracekit.")]
516 if not tracekit_groups:
517 result.add_warning(
518 "No TraceKit entry points found",
519 location="[project.entry-points]",
520 fix_hint="Add entry point like: tracekit.decoders = ...",
521 )
522 else:
523 result.metadata["entry_points"] = tracekit_groups
524 result.add_info(f"Found entry point groups: {', '.join(tracekit_groups)}")
526 except Exception as e:
527 result.add_warning(f"Failed to validate entry points: {e}")
530def _validate_implementation(extension_path: Path, result: ValidationResult) -> None:
531 """Validate extension implementation files.
533 Args:
534 extension_path: Path to extension directory
535 result: ValidationResult to append issues to
536 """
537 # Find Python files
538 py_files = list(extension_path.glob("*.py"))
539 py_files = [f for f in py_files if f.name != "__init__.py"]
541 if not py_files:
542 result.add_warning(
543 "No implementation files found",
544 location=str(extension_path),
545 fix_hint="Add Python module with implementation",
546 )
547 return
549 # Basic syntax check
550 for py_file in py_files:
551 try:
552 with open(py_file, encoding="utf-8") as f:
553 source = f.read()
554 ast.parse(source)
555 result.add_info(f"Syntax check passed: {py_file.name}")
556 except SyntaxError as e:
557 result.add_error(
558 f"Syntax error in {py_file.name}: {e}",
559 location=f"{py_file.name}:{e.lineno}",
560 fix_hint="Fix syntax error",
561 )
564def _check_dependencies(extension_path: Path, result: ValidationResult) -> None:
565 """Check extension dependencies are satisfied.
567 Args:
568 extension_path: Path to extension directory
569 result: ValidationResult to append issues to
570 """
571 pyproject = extension_path / "pyproject.toml"
572 if not pyproject.exists():
573 return
575 try:
576 import tomllib
578 with open(pyproject, "rb") as f:
579 data = tomllib.load(f)
581 if "project" not in data or "dependencies" not in data["project"]:
582 result.add_info("No dependencies declared")
583 return
585 dependencies = data["project"]["dependencies"]
586 result.metadata["dependencies"] = dependencies
588 # Check if tracekit is in dependencies
589 tracekit_deps = [d for d in dependencies if "tracekit" in d.lower()]
590 if not tracekit_deps:
591 result.add_warning(
592 "TraceKit not listed in dependencies",
593 location="[project.dependencies]",
594 fix_hint='Add: "tracekit>=0.1.0"',
595 )
597 except Exception as e:
598 result.add_warning(f"Failed to check dependencies: {e}")
601def _check_security(extension_path: Path, result: ValidationResult) -> None:
602 """Perform basic security checks on extension.
604 Args:
605 extension_path: Path to extension directory
606 result: ValidationResult to append issues to
607 """
608 # Check for common security issues
609 py_files = list(extension_path.rglob("*.py"))
611 dangerous_imports = ["pickle", "eval", "exec", "compile", "__import__"]
612 dangerous_calls = ["eval(", "exec(", "compile(", "__import__("]
614 for py_file in py_files:
615 try:
616 with open(py_file, encoding="utf-8") as f:
617 source = f.read()
619 # Check for dangerous imports
620 tree = ast.parse(source)
621 for node in ast.walk(tree):
622 if isinstance(node, ast.Import):
623 for alias in node.names:
624 if alias.name in dangerous_imports: 624 ↛ 623line 624 didn't jump to line 623 because the condition on line 624 was always true
625 result.add_warning(
626 f"Potentially unsafe import: {alias.name}",
627 location=f"{py_file.name}:{node.lineno}",
628 fix_hint="Consider safer alternatives",
629 )
631 # Check for dangerous function calls
632 for call in dangerous_calls:
633 if call in source: 633 ↛ 634line 633 didn't jump to line 634 because the condition on line 633 was never true
634 result.add_warning(
635 f"Potentially unsafe call: {call}",
636 location=py_file.name,
637 fix_hint="Avoid eval/exec for security",
638 )
640 except Exception:
641 # Ignore parse errors, already caught in implementation validation
642 pass
645__all__ = [
646 "ValidationIssue",
647 "ValidationResult",
648 "validate_decoder_interface",
649 "validate_extension",
650 "validate_hook_function",
651]