|
from .model import ( |
|
PositionalEncoding, |
|
BitTransformerLM, |
|
ReversibleLoggingTransformerEncoderLayer, |
|
example_usage, |
|
example_training_step, |
|
infer_long_sequence, |
|
diffusion_inference, |
|
) |
|
from .telemetry import TelemetrySynthesizer, detect_metric_drift |
|
from .dashboard import plot_telemetry |
|
from .dashboard_app import run_dashboard |
|
from .collapse import collapse_submodel, save_distilled_model |
|
from .safety import hil_safe_inference, demo_hil_safety, safe_sample_with_retry |
|
from .bit_io import ( |
|
text_to_bits, |
|
bits_to_text, |
|
infer_text, |
|
) |
|
from .parity import enforce_parity |
|
from .compression import ( |
|
compress_bits, |
|
decompress_bits, |
|
model_output_decompress, |
|
pack_bits, |
|
unpack_bits, |
|
) |
|
from .distributed import wrap_fsdp, make_pipeline |
|
from .optimization import configure_optimizer, adjust_learning_rate |
|
from .scale import expand_model |
|
from .distil import distill_step, TelemetryLog |
|
from .quantization import ( |
|
quantize_dynamic, |
|
prepare_qat_fx, |
|
convert_qat_fx, |
|
) |
|
from .training import train_loop |
|
from .utils import save_model, load_model, set_dropout |
|
from .hf_checkpoint import hf_login, save_checkpoint, download_checkpoint |
|
from .torch_utils import cpu_autocast |
|
|
|
__all__ = [ |
|
"PositionalEncoding", |
|
"BitTransformerLM", |
|
"ReversibleLoggingTransformerEncoderLayer", |
|
"example_usage", |
|
"example_training_step", |
|
"TelemetrySynthesizer", |
|
"detect_metric_drift", |
|
"collapse_submodel", |
|
"save_distilled_model", |
|
"hil_safe_inference", |
|
"demo_hil_safety", |
|
"safe_sample_with_retry", |
|
"text_to_bits", |
|
"bits_to_text", |
|
"infer_text", |
|
"enforce_parity", |
|
"plot_telemetry", |
|
"run_dashboard", |
|
"configure_optimizer", |
|
"adjust_learning_rate", |
|
"expand_model", |
|
"distill_step", |
|
"TelemetryLog", |
|
"quantize_dynamic", |
|
"prepare_qat_fx", |
|
"convert_qat_fx", |
|
"train_loop", |
|
"wrap_fsdp", |
|
"make_pipeline", |
|
"compress_bits", |
|
"decompress_bits", |
|
"model_output_decompress", |
|
"pack_bits", |
|
"unpack_bits", |
|
"infer_long_sequence", |
|
"diffusion_inference", |
|
"save_model", |
|
"load_model", |
|
"set_dropout", |
|
"hf_login", |
|
"save_checkpoint", |
|
"download_checkpoint", |
|
"cpu_autocast", |
|
] |
|
|