File size: 2,245 Bytes
36c78b1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
from .model import (
    PositionalEncoding,
    BitTransformerLM,
    ReversibleLoggingTransformerEncoderLayer,
    example_usage,
    example_training_step,
    infer_long_sequence,
    diffusion_inference,
)
from .telemetry import TelemetrySynthesizer, detect_metric_drift
from .dashboard import plot_telemetry
from .dashboard_app import run_dashboard
from .collapse import collapse_submodel, save_distilled_model
from .safety import hil_safe_inference, demo_hil_safety, safe_sample_with_retry
from .bit_io import (
    text_to_bits,
    bits_to_text,
    infer_text,
)
from .parity import enforce_parity
from .compression import (
    compress_bits,
    decompress_bits,
    model_output_decompress,
    pack_bits,
    unpack_bits,
)
from .distributed import wrap_fsdp, make_pipeline
from .optimization import configure_optimizer, adjust_learning_rate
from .scale import expand_model
from .distil import distill_step, TelemetryLog
from .quantization import (
    quantize_dynamic,
    prepare_qat_fx,
    convert_qat_fx,
)
from .training import train_loop
from .utils import save_model, load_model, set_dropout
from .hf_checkpoint import hf_login, save_checkpoint, download_checkpoint
from .torch_utils import cpu_autocast

__all__ = [
    "PositionalEncoding",
    "BitTransformerLM",
    "ReversibleLoggingTransformerEncoderLayer",
    "example_usage",
    "example_training_step",
    "TelemetrySynthesizer",
    "detect_metric_drift",
    "collapse_submodel",
    "save_distilled_model",
    "hil_safe_inference",
    "demo_hil_safety",
    "safe_sample_with_retry",
    "text_to_bits",
    "bits_to_text",
    "infer_text",
    "enforce_parity",
    "plot_telemetry",
    "run_dashboard",
    "configure_optimizer",
    "adjust_learning_rate",
    "expand_model",
    "distill_step",
    "TelemetryLog",
    "quantize_dynamic",
    "prepare_qat_fx",
    "convert_qat_fx",
    "train_loop",
    "wrap_fsdp",
    "make_pipeline",
    "compress_bits",
    "decompress_bits",
    "model_output_decompress",
    "pack_bits",
    "unpack_bits",
    "infer_long_sequence",
    "diffusion_inference",
    "save_model",
    "load_model",
    "set_dropout",
    "hf_login",
    "save_checkpoint",
    "download_checkpoint",
    "cpu_autocast",
]