ChenDY commited on
Commit
bc2e792
·
1 Parent(s): 6c3ac85

width n height

Browse files
Files changed (1) hide show
  1. app.py +32 -6
app.py CHANGED
@@ -2,6 +2,7 @@ import random
2
  import os
3
 
4
  import spaces
 
5
  import torch
6
  from PIL import Image
7
  import huggingface_hub
@@ -11,6 +12,10 @@ from src.pipeline_flux_nag import NAGFluxPipeline
11
  from src.transformer_flux import NAGFluxTransformer2DModel
12
 
13
 
 
 
 
 
14
  theme = gr.themes.Base(
15
  font=[gr.themes.GoogleFont('Libre Franklin'), gr.themes.GoogleFont('Public Sans'), 'system-ui', 'sans-serif'],
16
  )
@@ -31,8 +36,7 @@ pipe = pipe.to(device)
31
 
32
  examples = [
33
  ["Portrait of AI researcher.", "Glasses.", 5],
34
- ["A beautiful cyborg.", "Robot.", 5],
35
- ["Minimalist abstract line drawing: face portrait of a girl with long hair.", "Complex, detail.", 7],
36
  ["A baby phoenix made of fire and flames is born from the smoking ashes.", "Low resolution, blurry, lack of details, illustration, cartoon, painting.", 5],
37
  ["A tiny astronaut hatching from an egg on the moon.", "Low resolution, blurry, lack of details, illustration, cartoon, painting.", 9]
38
  ]
@@ -42,6 +46,7 @@ def sample(
42
  prompt,
43
  negative_prompt=None, guidance_scale=3.5,
44
  nag_negative_prompt=None, nag_scale=5.0,
 
45
  num_inference_steps=25,
46
  seed=2025, randomize_seed=False,
47
  compare=True,
@@ -49,10 +54,11 @@ def sample(
49
  prompt = prompt.strip()
50
  negative_prompt = negative_prompt.strip() if negative_prompt and negative_prompt.strip() else None
51
  guidance_scale = float(guidance_scale)
 
52
  num_inference_steps = int(num_inference_steps)
53
 
54
  if (randomize_seed):
55
- seed = random.randint(0, 9007199254740991)
56
  else:
57
  seed = int(seed)
58
 
@@ -64,6 +70,8 @@ def sample(
64
  nag_negative_prompt=nag_negative_prompt,
65
  nag_scale=nag_scale,
66
  generator=generator,
 
 
67
  num_inference_steps=num_inference_steps,
68
  ).images[0]
69
 
@@ -74,6 +82,8 @@ def sample(
74
  negative_prompt=negative_prompt,
75
  guidance_scale=guidance_scale,
76
  generator=generator,
 
 
77
  num_inference_steps=num_inference_steps,
78
  ).images[0]
79
  else:
@@ -92,7 +102,7 @@ def sample_example(
92
  nag_negative_prompt=nag_negative_prompt,
93
  nag_scale=nag_scale,
94
  )
95
- return outputs, 3.5, 25, seed, True
96
 
97
 
98
  css = '''
@@ -124,8 +134,23 @@ with gr.Blocks(css=css, theme=theme) as demo:
124
  with gr.Accordion("Advanced Settings", open=False):
125
  negative_prompt = gr.Textbox(label="Negative Prompt", value=None, visible=False)
126
  guidance_scale = gr.Slider(label="Guidance Scale", minimum=1., maximum=15., step=0.1, value=3.5)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
127
  num_inference_steps = gr.Slider(label="Inference Steps", minimum=1, maximum=50, step=1, value=25)
128
- seed = gr.Slider(label="Seed", minimum=1, maximum=9007199254740991, step=1, randomize=True)
129
  randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
130
 
131
  gr.Examples(
@@ -136,7 +161,7 @@ with gr.Blocks(css=css, theme=theme) as demo:
136
  nag_negative_prompt,
137
  nag_scale,
138
  ],
139
- outputs=[output, guidance_scale, num_inference_steps, seed, compare],
140
  cache_examples="lazy",
141
  )
142
 
@@ -150,6 +175,7 @@ with gr.Blocks(css=css, theme=theme) as demo:
150
  prompt,
151
  negative_prompt, guidance_scale,
152
  nag_negative_prompt, nag_scale,
 
153
  num_inference_steps,
154
  seed, randomize_seed,
155
  compare,
 
2
  import os
3
 
4
  import spaces
5
+ import numpy as np
6
  import torch
7
  from PIL import Image
8
  import huggingface_hub
 
12
  from src.transformer_flux import NAGFluxTransformer2DModel
13
 
14
 
15
+ MAX_SEED = np.iinfo(np.int32).max
16
+ MAX_IMAGE_SIZE = 2048
17
+
18
+
19
  theme = gr.themes.Base(
20
  font=[gr.themes.GoogleFont('Libre Franklin'), gr.themes.GoogleFont('Public Sans'), 'system-ui', 'sans-serif'],
21
  )
 
36
 
37
  examples = [
38
  ["Portrait of AI researcher.", "Glasses.", 5],
39
+ ["Portrait of AI researcher.", "Male.", 5],
 
40
  ["A baby phoenix made of fire and flames is born from the smoking ashes.", "Low resolution, blurry, lack of details, illustration, cartoon, painting.", 5],
41
  ["A tiny astronaut hatching from an egg on the moon.", "Low resolution, blurry, lack of details, illustration, cartoon, painting.", 9]
42
  ]
 
46
  prompt,
47
  negative_prompt=None, guidance_scale=3.5,
48
  nag_negative_prompt=None, nag_scale=5.0,
49
+ width=1024, height=1024,
50
  num_inference_steps=25,
51
  seed=2025, randomize_seed=False,
52
  compare=True,
 
54
  prompt = prompt.strip()
55
  negative_prompt = negative_prompt.strip() if negative_prompt and negative_prompt.strip() else None
56
  guidance_scale = float(guidance_scale)
57
+ width, height = int(width), int(height)
58
  num_inference_steps = int(num_inference_steps)
59
 
60
  if (randomize_seed):
61
+ seed = random.randint(0, MAX_SEED)
62
  else:
63
  seed = int(seed)
64
 
 
70
  nag_negative_prompt=nag_negative_prompt,
71
  nag_scale=nag_scale,
72
  generator=generator,
73
+ width=width,
74
+ height=height,
75
  num_inference_steps=num_inference_steps,
76
  ).images[0]
77
 
 
82
  negative_prompt=negative_prompt,
83
  guidance_scale=guidance_scale,
84
  generator=generator,
85
+ width=width,
86
+ height=height,
87
  num_inference_steps=num_inference_steps,
88
  ).images[0]
89
  else:
 
102
  nag_negative_prompt=nag_negative_prompt,
103
  nag_scale=nag_scale,
104
  )
105
+ return outputs, 3.5, 1024, 1024, 25, seed, True
106
 
107
 
108
  css = '''
 
134
  with gr.Accordion("Advanced Settings", open=False):
135
  negative_prompt = gr.Textbox(label="Negative Prompt", value=None, visible=False)
136
  guidance_scale = gr.Slider(label="Guidance Scale", minimum=1., maximum=15., step=0.1, value=3.5)
137
+ with gr.Row():
138
+ width = gr.Slider(
139
+ label="Width",
140
+ minimum=256,
141
+ maximum=MAX_IMAGE_SIZE,
142
+ step=32,
143
+ value=1024,
144
+ )
145
+ height = gr.Slider(
146
+ label="Height",
147
+ minimum=256,
148
+ maximum=MAX_IMAGE_SIZE,
149
+ step=32,
150
+ value=1024,
151
+ )
152
  num_inference_steps = gr.Slider(label="Inference Steps", minimum=1, maximum=50, step=1, value=25)
153
+ seed = gr.Slider(label="Seed", minimum=1, maximum=MAX_SEED, step=1, randomize=True)
154
  randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
155
 
156
  gr.Examples(
 
161
  nag_negative_prompt,
162
  nag_scale,
163
  ],
164
+ outputs=[output, guidance_scale, width, height, num_inference_steps, seed, compare],
165
  cache_examples="lazy",
166
  )
167
 
 
175
  prompt,
176
  negative_prompt, guidance_scale,
177
  nag_negative_prompt, nag_scale,
178
+ width, height,
179
  num_inference_steps,
180
  seed, randomize_seed,
181
  compare,