HReynaud's picture
lifm
941c72f
raw
history blame
761 Bytes
{
"_class_name": "SegDiTTransformer2DModel",
"_diffusers_version": "0.30.3",
"_name_or_path": "experiments/lifm_DiT-L_16f8_all_sv/checkpoint-900000/denoiser_ema",
"activation_fn": "gelu-approximate",
"attention_bias": true,
"attention_head_dim": 64,
"decay": 0.9999,
"dropout": 0.0,
"in_channels": 17,
"inv_gamma": 1.0,
"min_decay": 0.0,
"norm_elementwise_affine": false,
"norm_eps": 1e-05,
"norm_num_groups": 32,
"norm_type": "ada_norm_zero",
"num_attention_heads": 16,
"num_embeds_ada_norm": 1000,
"num_layers": 24,
"optimization_step": 100001,
"out_channels": 16,
"patch_size": 2,
"power": 0.6666666666666666,
"sample_size": 14,
"upcast_attention": false,
"update_after_step": 0,
"use_ema_warmup": false
}