YAML Metadata Warning: empty or missing yaml metadata in repo card (https://huggingface.co/docs/hub/model-cards#model-card-metadata)
# Download the model weights
from huggingface_hub import hf_hub_download
model_path = hf_hub_download(repo_id="johnowhitaker/sac_midu_mini", 
                             filename="midu_model_aesthetic_classifier.pt")

# Load the aesthetic classifier
m = nn.Sequential(
    nn.Conv2d(1280, 256, kernel_size=3, padding=1), nn.ReLU(),
    nn.MaxPool2d(2, 2),
    nn.Conv2d(256, 128, kernel_size=3, padding=1), nn.ReLU(),
    nn.AdaptiveAvgPool2d(output_size=(2, 2)), nn.Flatten(),
    nn.Linear(128*4, 64), nn.ReLU(), nn.Linear(64, 10)).to(device)
m.load_state_dict(torch.load(model_path));

# Load the SD pipeline and add a hook
pipe = StableDiffusionPipeline.from_pretrained("stabilityai/stable-diffusion-2-1-base").to(device)
pipe.scheduler = LMSDiscreteScheduler(beta_start=0.00085, beta_end=0.012, beta_schedule="scaled_linear", num_train_timesteps=1000)
pipe.scheduler.set_timesteps(30)
def hook_fn(module, input, output):
    module.output = output 
pipe.unet.mid_block.register_forward_hook(hook_fn);

# Now after calling the forward pass of the UNET, you can do
preds = m(pipe.unet.mid_block.output)
Downloads last month

-

Downloads are not tracked for this model. How to track
Inference Providers NEW
This model isn't deployed by any Inference Provider. ๐Ÿ™‹ Ask for provider support