{ "_name_or_path": "dar-tau/toy-autoencoder-4L-1L", "architectures": [ "ToyAutoEncoder" ], "emb_structure": "all_onehot", "hidden_dim": 54, "intermediate_ratio": 4, "max_pos": 4, "model_type": "toy_autoencoder", "num_attention_heads": 1, "num_decoder_layers": 1, "num_encoder_layers": 2, "shared": true, "torch_dtype": "float32", "transformers_version": "4.37.2", "vocab_size": 50 }