EchoFlow / lifm /FMiT-B4-4f4 /config.json
HReynaud's picture
lifm
941c72f
raw
history blame
758 Bytes
{
"_class_name": "SegDiTTransformer2DModel",
"_diffusers_version": "0.30.3",
"_name_or_path": "experiments/lifm_DiT-B_4f4_all_sv/checkpoint-770000/denoiser_ema",
"activation_fn": "gelu-approximate",
"attention_bias": true,
"attention_head_dim": 64,
"decay": 0.9999,
"dropout": 0.0,
"in_channels": 5,
"inv_gamma": 1.0,
"min_decay": 0.0,
"norm_elementwise_affine": false,
"norm_eps": 1e-05,
"norm_num_groups": 32,
"norm_type": "ada_norm_zero",
"num_attention_heads": 12,
"num_embeds_ada_norm": 1000,
"num_layers": 12,
"optimization_step": 230001,
"out_channels": 4,
"patch_size": 4,
"power": 0.6666666666666666,
"sample_size": 28,
"upcast_attention": false,
"update_after_step": 0,
"use_ema_warmup": false
}