Coverage for pattern_lens/consts.py: 100%
23 statements
« prev ^ index » next coverage.py v7.6.12, created at 2025-04-06 15:09 -0600
« prev ^ index » next coverage.py v7.6.12, created at 2025-04-06 15:09 -0600
1"""implements some constants and types"""
3import re
4from typing import Literal
6import numpy as np
7import torch
8from jaxtyping import Float
10AttentionMatrix = Float[np.ndarray, "n_ctx n_ctx"]
11"type alias for attention matrix"
13ActivationCacheNp = dict[str, np.ndarray]
14"type alias for a cache of activations, like a transformer_lens.ActivationCache"
16ActivationCacheTorch = dict[str, torch.Tensor]
17"type alias for a cache of activations, like a transformer_lens.ActivationCache but without the extras. useful for when loading from an npz file"
19DATA_DIR: str = "attn_data"
20"default directory for attention data"
22ATTN_PATTERN_REGEX: re.Pattern = re.compile(r"blocks\.(\d+)\.attn\.hook_pattern")
23"regex for finding attention patterns in model state dicts"
25SPINNER_KWARGS: dict = dict(
26 config=dict(success="✔️ "),
27)
28"default kwargs for `muutils.spinner.Spinner`"
30DIVIDER_S1: str = "=" * 70
31"divider string for separating sections"
33DIVIDER_S2: str = "-" * 50
34"divider string for separating subsections"
36ReturnCache = Literal[None, "numpy", "torch"]
37"return type for a cache of activations"