Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -14,190 +14,10 @@ import tempfile
|
|
14 |
import io
|
15 |
import logging
|
16 |
|
17 |
-
# -----------------------------
|
18 |
-
# Google Gemini API ๊ด๋ จ
|
19 |
-
# -----------------------------
|
20 |
-
import google.generativeai as genai
|
21 |
-
import google.generativeai.types as genai_types
|
22 |
-
|
23 |
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
|
24 |
|
25 |
###############################################################################
|
26 |
-
# 1.
|
27 |
-
###############################################################################
|
28 |
-
|
29 |
-
def maybe_translate_to_english(text: str) -> str:
|
30 |
-
"""
|
31 |
-
ํ
์คํธ์ ํ๊ตญ์ด๊ฐ ์์ผ๋ฉด ๊ฐ๋จํ ์นํ ๊ท์น์ ๋ฐ๋ผ ์์ด๋ก ๋ณํ.
|
32 |
-
"""
|
33 |
-
translations = {
|
34 |
-
"์๋
ํ์ธ์": "Hello",
|
35 |
-
"ํ์ํฉ๋๋ค": "Welcome",
|
36 |
-
"์๋
": "Hello",
|
37 |
-
"๋ฐฐ๋": "Banner",
|
38 |
-
# ํ์์ ๋ฐ๋ผ ์ถ๊ฐ
|
39 |
-
}
|
40 |
-
for kr, en in translations.items():
|
41 |
-
if kr in text:
|
42 |
-
text = text.replace(kr, en)
|
43 |
-
return text
|
44 |
-
|
45 |
-
###############################################################################
|
46 |
-
# 2. Gemini API ํธ์ถ์ ์ํ ์ค๋น
|
47 |
-
###############################################################################
|
48 |
-
|
49 |
-
def save_binary_file(file_name, data):
|
50 |
-
""" ์ด์ง ํ์ผ์ ์ ์ฅํ๋ ํฌํผ ํจ์ """
|
51 |
-
with open(file_name, "wb") as f:
|
52 |
-
f.write(data)
|
53 |
-
|
54 |
-
def generate_by_google_genai(text, file_name, model="gemini-2.0-flash-exp"):
|
55 |
-
"""
|
56 |
-
Google Gemini API๋ฅผ ํธ์ถํด ํ
์คํธ ๊ธฐ๋ฐ ์ด๋ฏธ์ง ํธ์ง/์์ฑ์ ์ํ.
|
57 |
-
file_name: ์๋ณธ ์ด๋ฏธ์ง๋ฅผ ์์ ์
๋ก๋ํ์ฌ API๋ก ์ ๋ฌ
|
58 |
-
text: ์ ์ฉํ ํ
์คํธ ์ง์์ฌํญ
|
59 |
-
"""
|
60 |
-
api_key = os.getenv("GAPI_TOKEN")
|
61 |
-
if not api_key:
|
62 |
-
raise ValueError("GAPI_TOKEN is missing. Please set an API key.")
|
63 |
-
|
64 |
-
# Gemini API ์ธ์ฆ ์ค์
|
65 |
-
genai.configure(api_key=api_key)
|
66 |
-
|
67 |
-
# ์ด๋ฏธ์ง ํ์ผ ์
๋ก๋
|
68 |
-
with open(file_name, "rb") as f:
|
69 |
-
image_data = f.read()
|
70 |
-
|
71 |
-
# ์ต์ Gemini API ๊ตฌ์กฐ์ ๋ง์ถฐ ์์ฒญ ๊ตฌ์ฑ
|
72 |
-
model = genai.GenerativeModel(model)
|
73 |
-
|
74 |
-
# API์ ์ ๋ฌํ content ๊ตฌ์ฑ
|
75 |
-
image_parts = [
|
76 |
-
{
|
77 |
-
"mime_type": "image/png",
|
78 |
-
"data": image_data
|
79 |
-
},
|
80 |
-
text
|
81 |
-
]
|
82 |
-
|
83 |
-
# ์์ฑ(ํธ์ง) ์ค์
|
84 |
-
generation_config = {
|
85 |
-
"temperature": 1,
|
86 |
-
"top_p": 0.95,
|
87 |
-
"top_k": 40,
|
88 |
-
"max_output_tokens": 8192,
|
89 |
-
}
|
90 |
-
|
91 |
-
text_response = "" # API๊ฐ ๋ฐํํ ํ
์คํธ ๋์
|
92 |
-
image_path = None # API๊ฐ ๋ฐํํ ์ด๋ฏธ์ง ํ์ผ์ ๋ก์ปฌ ๊ฒฝ๋ก
|
93 |
-
|
94 |
-
# ์์ ํ์ผ์ ํธ์ง๋ ์ด๋ฏธ์ง ์ ์ฅ
|
95 |
-
with tempfile.NamedTemporaryFile(suffix=".png", delete=False) as tmp:
|
96 |
-
temp_path = tmp.name
|
97 |
-
|
98 |
-
try:
|
99 |
-
# ์คํธ๋ฆฌ๋ฐ ํํ๋ก ์๋ต์ ๋ฐ์
|
100 |
-
response = model.generate_content(
|
101 |
-
contents=image_parts,
|
102 |
-
generation_config=generation_config,
|
103 |
-
stream=True
|
104 |
-
)
|
105 |
-
|
106 |
-
# ์คํธ๋ฆฌ๋ฐ๋ chunk๋ค์์ ์ด๋ฏธ์ง์ ํ
์คํธ๋ฅผ ์ถ์ถ
|
107 |
-
for chunk in response:
|
108 |
-
# ์ด๋ฏธ์ง ๋ฐ์ดํฐ๊ฐ ์๋์ง ํ์ธ
|
109 |
-
if hasattr(chunk, 'parts'):
|
110 |
-
for part in chunk.parts:
|
111 |
-
if hasattr(part, 'inline_data') and part.inline_data:
|
112 |
-
save_binary_file(temp_path, part.inline_data.data)
|
113 |
-
image_path = temp_path
|
114 |
-
elif hasattr(part, 'text'):
|
115 |
-
text_response += part.text
|
116 |
-
|
117 |
-
except Exception as e:
|
118 |
-
logging.error(f"Gemini API error: {e}")
|
119 |
-
return None, str(e)
|
120 |
-
|
121 |
-
return image_path, text_response
|
122 |
-
|
123 |
-
###############################################################################
|
124 |
-
# 3. ์ด๋ฏธ์ง์ ํ
์คํธ๋ฅผ ์ฝ์
/์์ ํ๋ ํจ์ (Gemini API 2ํ ํธ์ถ)
|
125 |
-
###############################################################################
|
126 |
-
|
127 |
-
def change_text_in_image_two_times(original_image, instruction):
|
128 |
-
"""
|
129 |
-
Gemini API๋ฅผ ๋ ๋ฒ ํธ์ถํ์ฌ ๋ ๊ฐ์ ๋ฒ์ ์ ์์ฑํ๋ค.
|
130 |
-
"""
|
131 |
-
import numpy as np
|
132 |
-
|
133 |
-
# ๋ง์ฝ ์ด๋ฏธ์ง๊ฐ numpy.ndarray ํ์
์ด๋ฉด PIL๋ก ๋ณํ
|
134 |
-
if isinstance(original_image, np.ndarray):
|
135 |
-
original_image = Image.fromarray(original_image)
|
136 |
-
|
137 |
-
results = []
|
138 |
-
for version_tag in ["(A)", "(B)"]:
|
139 |
-
mod_instruction = f"{instruction} {version_tag}"
|
140 |
-
try:
|
141 |
-
with tempfile.NamedTemporaryFile(suffix=".png", delete=False) as tmp:
|
142 |
-
original_path = tmp.name
|
143 |
-
if isinstance(original_image, Image.Image):
|
144 |
-
original_image.save(original_path, format="PNG")
|
145 |
-
logging.debug(f"[DEBUG] Saved image to temporary file: {original_path}")
|
146 |
-
else:
|
147 |
-
raise gr.Error(f"์์๋ PIL Image๊ฐ ์๋ {type(original_image)} ํ์
์ด ์ ๊ณต๋์์ต๋๋ค.")
|
148 |
-
# Gemini API ํธ์ถ
|
149 |
-
image_path, text_response = generate_by_google_genai(
|
150 |
-
text=mod_instruction,
|
151 |
-
file_name=original_path
|
152 |
-
)
|
153 |
-
if image_path:
|
154 |
-
# ๋ฐํ๋ ์ด๋ฏธ์ง ๋ก๋
|
155 |
-
try:
|
156 |
-
with open(image_path, "rb") as f:
|
157 |
-
image_data = f.read()
|
158 |
-
new_img = Image.open(io.BytesIO(image_data))
|
159 |
-
results.append(new_img)
|
160 |
-
except Exception as img_err:
|
161 |
-
logging.error(f"[ERROR] Failed to process Gemini image: {img_err}")
|
162 |
-
results.append(original_image)
|
163 |
-
else:
|
164 |
-
logging.warning(f"[WARNING] ์ด๋ฏธ์ง๊ฐ ๋ฐํ๋์ง ์์์ต๋๋ค. ํ
์คํธ ์๋ต: {text_response}")
|
165 |
-
results.append(original_image)
|
166 |
-
except Exception as e:
|
167 |
-
logging.exception(f"Text modification error: {e}")
|
168 |
-
results.append(original_image)
|
169 |
-
return results
|
170 |
-
|
171 |
-
###############################################################################
|
172 |
-
# 4. ํ
์คํธ ๋ ๋๋ง(๋ฌธ์ ์ฝ์
)์ฉ ํจ์
|
173 |
-
###############################################################################
|
174 |
-
|
175 |
-
def gemini_text_rendering(image, rendering_text):
|
176 |
-
"""
|
177 |
-
์ฃผ์ด์ง image์ ๋ํด Gemini API๋ก text_rendering์ ์ ์ฉ
|
178 |
-
"""
|
179 |
-
rendering_text_en = maybe_translate_to_english(rendering_text)
|
180 |
-
instruction = (
|
181 |
-
f"Render the following text on the image in a clear, visually appealing manner: "
|
182 |
-
f"{rendering_text_en}."
|
183 |
-
)
|
184 |
-
# ์ด๋ฏธ์ง์ ํ
์คํธ ์ฝ์
(A/B ๋ฒ์ 2ํ ์์ฑ) โ ์ฌ๊ธฐ์๋ 2ํ ์ค ์ฒซ ๋ฒ์งธ๋ง ๋ฐํ
|
185 |
-
rendered_images = change_text_in_image_two_times(image, instruction)
|
186 |
-
if rendered_images and len(rendered_images) > 0:
|
187 |
-
return rendered_images[0]
|
188 |
-
return image
|
189 |
-
|
190 |
-
def apply_text_rendering(image, rendering_text):
|
191 |
-
"""
|
192 |
-
rendering_text๊ฐ ์กด์ฌํ๋ฉด Gemini API๋ก ํ
์คํธ ์ฝ์
์ ์ ์ฉ.
|
193 |
-
์์ผ๋ฉด ์๋ณธ ์ด๋ฏธ์ง๋ฅผ ๊ทธ๋๋ก ๋ฐํ.
|
194 |
-
"""
|
195 |
-
if rendering_text and rendering_text.strip():
|
196 |
-
return gemini_text_rendering(image, rendering_text)
|
197 |
-
return image
|
198 |
-
|
199 |
-
###############################################################################
|
200 |
-
# 5. Diffusion Pipeline ๋ก๋ ๋ฐ ๊ธฐ๋ณธ ์ธํ
|
201 |
###############################################################################
|
202 |
|
203 |
SAVE_DIR = "saved_images"
|
@@ -307,33 +127,91 @@ def inference(
|
|
307 |
error_img = Image.new('RGB', (width, height), color='red')
|
308 |
return error_img, seed, load_generated_images()
|
309 |
|
310 |
-
|
311 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
312 |
"""
|
313 |
-
|
314 |
-
ํ
์คํธ ๋ ๋๋ง ์
๋ ฅ๋์ ์๋์ผ๋ก ์ฑ์ฐ๊ธฐ ์ํ ํจ์
|
315 |
"""
|
316 |
-
|
317 |
-
|
318 |
-
double_quotes = re.findall(r'"([^"]*)"', prompt)
|
319 |
|
320 |
-
|
321 |
-
|
322 |
-
|
323 |
-
|
324 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
325 |
|
326 |
###############################################################################
|
327 |
-
#
|
328 |
###############################################################################
|
329 |
|
330 |
examples = [
|
331 |
"Ghibli style futuristic stormtrooper with glossy white armor and a sleek helmet, standing heroically on a lush alien planet, vibrant flowers blooming around, soft sunlight illuminating the scene, a gentle breeze rustling the leaves. The armor reflects the pink and purple hues of the alien sunset, creating an ethereal glow around the figure. [trigger]",
|
332 |
-
"Ghibli style
|
333 |
"Ghibli style ancient forest guardian robot, covered in moss and flowering vines, sitting peacefully in a crystal-clear lake. Its gentle eyes glow with soft blue light, while bioluminescent dragonflies dance around its weathered metal frame. Ancient tech symbols on its surface pulse with a gentle rhythm. [trigger]",
|
334 |
-
"Ghibli style
|
335 |
"Ghibli style cyber-shrine maiden with flowing holographic robes, performing a ritual dance among floating lanterns and digital cherry blossoms. Her traditional headdress emits soft light patterns, while spirit-like AI constructs swirl around her in elegant patterns. The scene is set in a modern shrine with both ancient wood and sleek chrome elements. [trigger]",
|
336 |
-
"Ghibli style robot farmer tending to floating rice paddies in the sky, wearing a traditional straw hat with advanced sensors
|
337 |
]
|
338 |
|
339 |
css = """
|
@@ -534,6 +412,12 @@ footer {
|
|
534 |
color: var(--primary-color) !important;
|
535 |
margin-bottom: 1rem !important;
|
536 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
537 |
"""
|
538 |
|
539 |
with gr.Blocks(css=css, analytics_enabled=False, theme="soft") as demo:
|
@@ -545,98 +429,155 @@ with gr.Blocks(css=css, analytics_enabled=False, theme="soft") as demo:
|
|
545 |
</div>
|
546 |
''')
|
547 |
|
548 |
-
with gr.
|
549 |
-
|
550 |
-
|
551 |
-
|
552 |
-
|
553 |
-
|
554 |
-
|
555 |
-
|
556 |
-
|
557 |
-
|
558 |
-
label="Text Rendering (Multilingual: English, Korean...)",
|
559 |
-
placeholder="Man saying '์๋
' in 'speech bubble'",
|
560 |
-
lines=1
|
561 |
-
)
|
562 |
-
|
563 |
-
with gr.Row():
|
564 |
-
run_button = gr.Button("โจ Generate Image", elem_classes="primary")
|
565 |
-
clear_button = gr.Button("Clear", elem_classes="secondary")
|
566 |
-
|
567 |
-
with gr.Accordion("Advanced Settings", open=False, elem_classes="settings-accordion"):
|
568 |
-
with gr.Row():
|
569 |
-
seed = gr.Slider(
|
570 |
-
label="Seed",
|
571 |
-
minimum=0,
|
572 |
-
maximum=MAX_SEED,
|
573 |
-
step=1,
|
574 |
-
value=42,
|
575 |
-
)
|
576 |
-
randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
|
577 |
-
with gr.Row():
|
578 |
-
width = gr.Slider(
|
579 |
-
label="Width",
|
580 |
-
minimum=256,
|
581 |
-
maximum=MAX_IMAGE_SIZE,
|
582 |
-
step=32,
|
583 |
-
value=1024,
|
584 |
-
)
|
585 |
-
height = gr.Slider(
|
586 |
-
label="Height",
|
587 |
-
minimum=256,
|
588 |
-
maximum=MAX_IMAGE_SIZE,
|
589 |
-
step=32,
|
590 |
-
value=768,
|
591 |
-
)
|
592 |
-
with gr.Row():
|
593 |
-
guidance_scale = gr.Slider(
|
594 |
-
label="Guidance scale",
|
595 |
-
minimum=0.0,
|
596 |
-
maximum=10.0,
|
597 |
-
step=0.1,
|
598 |
-
value=3.5,
|
599 |
-
)
|
600 |
-
with gr.Row():
|
601 |
-
num_inference_steps = gr.Slider(
|
602 |
-
label="Steps",
|
603 |
-
minimum=1,
|
604 |
-
maximum=50,
|
605 |
-
step=1,
|
606 |
-
value=30,
|
607 |
-
)
|
608 |
-
lora_scale = gr.Slider(
|
609 |
-
label="LoRA scale",
|
610 |
-
minimum=0.0,
|
611 |
-
maximum=1.0,
|
612 |
-
step=0.1,
|
613 |
-
value=1.0,
|
614 |
)
|
615 |
-
|
616 |
-
|
617 |
-
|
618 |
-
|
619 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
620 |
|
621 |
-
|
622 |
-
|
623 |
-
generation_status = gr.HTML('<div class="status-complete">Ready to generate</div>')
|
624 |
-
result = gr.Image(label="Generated Image", elem_id="result-image")
|
625 |
-
seed_text = gr.Number(label="Used Seed", value=42)
|
626 |
-
|
627 |
-
with gr.Tabs(elem_classes="tabs") as tabs:
|
628 |
-
with gr.TabItem("Gallery"):
|
629 |
-
with gr.Group(elem_classes="container"):
|
630 |
-
gallery_header = gr.Markdown("### ๐ผ๏ธ Your Generated Masterpieces")
|
631 |
with gr.Row():
|
632 |
-
|
633 |
-
|
634 |
-
|
635 |
-
|
636 |
-
|
637 |
-
|
638 |
-
|
639 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
640 |
|
641 |
###########################################################################
|
642 |
# Gradio Helper Functions
|
@@ -645,13 +586,22 @@ with gr.Blocks(css=css, analytics_enabled=False, theme="soft") as demo:
|
|
645 |
return load_generated_images()
|
646 |
|
647 |
def clear_output():
|
648 |
-
return "", gr.update(value=None), seed, '<div class="status-complete">Ready to generate</div>'
|
|
|
|
|
|
|
649 |
|
650 |
def before_generate():
|
651 |
return '<div class="status-generating">Generating image...</div>'
|
652 |
|
|
|
|
|
|
|
653 |
def after_generate(image, seed_num, gallery):
|
654 |
return image, seed_num, gallery, '<div class="status-complete">Generation complete!</div>'
|
|
|
|
|
|
|
655 |
|
656 |
###########################################################################
|
657 |
# Gradio Event Wiring
|
@@ -662,25 +612,17 @@ with gr.Blocks(css=css, analytics_enabled=False, theme="soft") as demo:
|
|
662 |
outputs=generated_gallery,
|
663 |
)
|
664 |
|
|
|
665 |
clear_button.click(
|
666 |
fn=clear_output,
|
667 |
inputs=None,
|
668 |
-
outputs=[prompt, result, seed_text, generation_status
|
669 |
)
|
670 |
|
671 |
-
# 1) ์ํ ํ์
|
672 |
-
# 2) ํ
์คํธ ์ถ์ถ
|
673 |
-
# 3) ์ด๋ฏธ์ง ์์ฑ
|
674 |
-
# 4) ์ํ ์
๋ฐ์ดํธ
|
675 |
-
# 5) ํ
์คํธ ๋ ๋๋ง(์๋ค๋ฉด)
|
676 |
run_button.click(
|
677 |
fn=before_generate,
|
678 |
inputs=None,
|
679 |
outputs=generation_status,
|
680 |
-
).then(
|
681 |
-
fn=extract_text_for_rendering, # ์ถ๊ฐ: ํ
์คํธ ์ถ์ถ ํจ์
|
682 |
-
inputs=prompt,
|
683 |
-
outputs=text_rendering, # ์ถ์ถ๋ ํ
์คํธ๋ก ํ
์คํธ ๋ ๋๋ง ํ๋ ์
๋ฐ์ดํธ
|
684 |
).then(
|
685 |
fn=inference,
|
686 |
inputs=[
|
@@ -698,21 +640,12 @@ with gr.Blocks(css=css, analytics_enabled=False, theme="soft") as demo:
|
|
698 |
fn=after_generate,
|
699 |
inputs=[result, seed_text, generated_gallery],
|
700 |
outputs=[result, seed_text, generated_gallery, generation_status],
|
701 |
-
).then(
|
702 |
-
fn=apply_text_rendering,
|
703 |
-
inputs=[result, text_rendering],
|
704 |
-
outputs=result
|
705 |
)
|
706 |
|
707 |
-
# prompt submit ์์๋ ๋์ผํ ์ฒด์ธ ์คํ
|
708 |
prompt.submit(
|
709 |
fn=before_generate,
|
710 |
inputs=None,
|
711 |
outputs=generation_status,
|
712 |
-
).then(
|
713 |
-
fn=extract_text_for_rendering, # ์ถ๊ฐ: ํ
์คํธ ์ถ์ถ ํจ์
|
714 |
-
inputs=prompt,
|
715 |
-
outputs=text_rendering, # ์ถ์ถ๋ ํ
์คํธ๋ก ํ
์คํธ ๋ ๋๋ง ํ๋ ์
๋ฐ์ดํธ
|
716 |
).then(
|
717 |
fn=inference,
|
718 |
inputs=[
|
@@ -730,10 +663,35 @@ with gr.Blocks(css=css, analytics_enabled=False, theme="soft") as demo:
|
|
730 |
fn=after_generate,
|
731 |
inputs=[result, seed_text, generated_gallery],
|
732 |
outputs=[result, seed_text, generated_gallery, generation_status],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
733 |
).then(
|
734 |
-
fn=
|
735 |
-
inputs=[
|
736 |
-
outputs=
|
737 |
)
|
738 |
|
739 |
# JS๋ก ์์ prompt ํด๋ฆญ ์ ์๋ ์ฑ์ฐ๊ธฐ
|
@@ -756,7 +714,7 @@ with gr.Blocks(css=css, analytics_enabled=False, theme="soft") as demo:
|
|
756 |
""")
|
757 |
|
758 |
###############################################################################
|
759 |
-
#
|
760 |
###############################################################################
|
761 |
try:
|
762 |
demo.queue(concurrency_count=1, max_size=20)
|
|
|
14 |
import io
|
15 |
import logging
|
16 |
|
|
|
|
|
|
|
|
|
|
|
|
|
17 |
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
|
18 |
|
19 |
###############################################################################
|
20 |
+
# 1. Diffusion Pipeline ๋ก๋ ๋ฐ ๊ธฐ๋ณธ ์ธํ
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
21 |
###############################################################################
|
22 |
|
23 |
SAVE_DIR = "saved_images"
|
|
|
127 |
error_img = Image.new('RGB', (width, height), color='red')
|
128 |
return error_img, seed, load_generated_images()
|
129 |
|
130 |
+
###############################################################################
|
131 |
+
# 2. ์ด๋ฏธ์ง ์
๋ก๋ ์ฒ๋ฆฌ ํจ์ ์ถ๊ฐ
|
132 |
+
###############################################################################
|
133 |
+
|
134 |
+
def process_uploaded_image(
|
135 |
+
image,
|
136 |
+
prompt: str,
|
137 |
+
seed: int,
|
138 |
+
randomize_seed: bool,
|
139 |
+
guidance_scale: float,
|
140 |
+
num_inference_steps: int,
|
141 |
+
lora_scale: float,
|
142 |
+
progress: gr.Progress = gr.Progress(track_tqdm=True),
|
143 |
+
):
|
144 |
"""
|
145 |
+
์
๋ก๋๋ ์ด๋ฏธ์ง๋ฅผ ์ง๋ธ๋ฆฌ ์คํ์ผ๋ก ๋ณํํ๋ ํจ์
|
|
|
146 |
"""
|
147 |
+
if image is None:
|
148 |
+
return None, seed, load_generated_images()
|
|
|
149 |
|
150 |
+
if randomize_seed:
|
151 |
+
seed = random.randint(0, MAX_SEED)
|
152 |
+
generator = torch.Generator(device=device).manual_seed(seed)
|
153 |
+
|
154 |
+
try:
|
155 |
+
# ์ด๋ฏธ์ง ์ ์ฒ๋ฆฌ
|
156 |
+
if isinstance(image, np.ndarray):
|
157 |
+
image_pil = Image.fromarray(image)
|
158 |
+
else:
|
159 |
+
image_pil = image
|
160 |
+
|
161 |
+
# ์ด๋ฏธ์ง ํฌ๊ธฐ ์กฐ์ (์๋ณธ ๋น์จ ์ ์ง)
|
162 |
+
width, height = image_pil.size
|
163 |
+
max_size = 768
|
164 |
+
if width > height:
|
165 |
+
if width > max_size:
|
166 |
+
ratio = max_size / width
|
167 |
+
new_width = max_size
|
168 |
+
new_height = int(height * ratio)
|
169 |
+
else:
|
170 |
+
if height > max_size:
|
171 |
+
ratio = max_size / height
|
172 |
+
new_height = max_size
|
173 |
+
new_width = int(width * ratio)
|
174 |
+
|
175 |
+
if width > max_size or height > max_size:
|
176 |
+
image_pil = image_pil.resize((new_width, new_height), Image.LANCZOS)
|
177 |
+
|
178 |
+
# ํ๋กฌํํธ๊ฐ ๋น์ด์๋ ๊ฒฝ์ฐ ๊ธฐ๋ณธ ํ๋กฌํํธ ์ฌ์ฉ
|
179 |
+
if not prompt or prompt.strip() == "":
|
180 |
+
prompt = "Ghibli style artwork, high quality, detailed"
|
181 |
+
elif "ghibli" not in prompt.lower() and "studio ghibli" not in prompt.lower():
|
182 |
+
prompt = f"Ghibli style {prompt}, high quality, detailed"
|
183 |
+
|
184 |
+
# ์ด๋ฏธ์ง ์์ฑ (img2img)
|
185 |
+
# ์ฐธ๊ณ : ์ค์ FLUX ๋ชจ๋ธ์ด img2img๋ฅผ ์ง์ํ๋์ง์ ๋ฐ๋ผ ์ด ๋ถ๋ถ์ ์กฐ์ ํ์
|
186 |
+
image = pipeline(
|
187 |
+
prompt=prompt,
|
188 |
+
image=image_pil, # ์ด๋ฏธ์ง ์
๋ ฅ
|
189 |
+
guidance_scale=guidance_scale,
|
190 |
+
num_inference_steps=num_inference_steps,
|
191 |
+
generator=generator,
|
192 |
+
joint_attention_kwargs={"scale": lora_scale},
|
193 |
+
strength=0.75, # ์ ์ฉ ๊ฐ๋ (0: ์๋ณธ ์ ์ง, 1: ์์ ํ ์๋ก ์์ฑ)
|
194 |
+
).images[0]
|
195 |
+
|
196 |
+
filepath = save_generated_image(image, f"Uploaded image with prompt: {prompt}")
|
197 |
+
return image, seed, load_generated_images()
|
198 |
+
|
199 |
+
except Exception as e:
|
200 |
+
logging.error(f"Error during image processing: {e}")
|
201 |
+
return image, seed, load_generated_images()
|
202 |
+
|
203 |
|
204 |
###############################################################################
|
205 |
+
# 3. Gradio UI
|
206 |
###############################################################################
|
207 |
|
208 |
examples = [
|
209 |
"Ghibli style futuristic stormtrooper with glossy white armor and a sleek helmet, standing heroically on a lush alien planet, vibrant flowers blooming around, soft sunlight illuminating the scene, a gentle breeze rustling the leaves. The armor reflects the pink and purple hues of the alien sunset, creating an ethereal glow around the figure. [trigger]",
|
210 |
+
"Ghibli style wonder woman, she saying \"hello\" in 'speech bubble', her blue costume shining under the sun, surrounded by floating islands with waterfalls, magical sparkles in the air. [trigger]",
|
211 |
"Ghibli style ancient forest guardian robot, covered in moss and flowering vines, sitting peacefully in a crystal-clear lake. Its gentle eyes glow with soft blue light, while bioluminescent dragonflies dance around its weathered metal frame. Ancient tech symbols on its surface pulse with a gentle rhythm. [trigger]",
|
212 |
+
"Ghibli style superhero girl saying \"I will protect you\" in a 'speech bubble', her cape flowing in the wind as she stands on a rooftop at sunset, with city lights beginning to twinkle below. [trigger]",
|
213 |
"Ghibli style cyber-shrine maiden with flowing holographic robes, performing a ritual dance among floating lanterns and digital cherry blossoms. Her traditional headdress emits soft light patterns, while spirit-like AI constructs swirl around her in elegant patterns. The scene is set in a modern shrine with both ancient wood and sleek chrome elements. [trigger]",
|
214 |
+
"Ghibli style robot farmer saying \"Harvest time!\" in 'speech bubble', tending to floating rice paddies in the sky, wearing a traditional straw hat with advanced sensors, surrounded by flying fish that leave trails of sparkles. [trigger]"
|
215 |
]
|
216 |
|
217 |
css = """
|
|
|
412 |
color: var(--primary-color) !important;
|
413 |
margin-bottom: 1rem !important;
|
414 |
}
|
415 |
+
.tab-nav {
|
416 |
+
margin-bottom: 1rem !important;
|
417 |
+
background-color: var(--panel-background) !important;
|
418 |
+
border-radius: var(--border-radius) !important;
|
419 |
+
overflow: hidden !important;
|
420 |
+
}
|
421 |
"""
|
422 |
|
423 |
with gr.Blocks(css=css, analytics_enabled=False, theme="soft") as demo:
|
|
|
429 |
</div>
|
430 |
''')
|
431 |
|
432 |
+
with gr.Tabs() as mode_tabs:
|
433 |
+
# ํ
์คํธ-์ด๋ฏธ์ง ์์ฑ ํญ
|
434 |
+
with gr.TabItem("Text-to-Image") as text_to_image_tab:
|
435 |
+
with gr.Row():
|
436 |
+
with gr.Column(scale=3):
|
437 |
+
with gr.Group(elem_classes="container"):
|
438 |
+
prompt = gr.Textbox(
|
439 |
+
label="Enter your imagination",
|
440 |
+
placeholder="Describe your Ghibli-style image here...",
|
441 |
+
lines=3
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
442 |
)
|
443 |
+
|
444 |
+
with gr.Row():
|
445 |
+
run_button = gr.Button("โจ Generate Image", elem_classes="primary")
|
446 |
+
clear_button = gr.Button("Clear", elem_classes="secondary")
|
447 |
+
|
448 |
+
with gr.Accordion("Advanced Settings", open=False, elem_classes="settings-accordion"):
|
449 |
+
with gr.Row():
|
450 |
+
seed = gr.Slider(
|
451 |
+
label="Seed",
|
452 |
+
minimum=0,
|
453 |
+
maximum=MAX_SEED,
|
454 |
+
step=1,
|
455 |
+
value=42,
|
456 |
+
)
|
457 |
+
randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
|
458 |
+
with gr.Row():
|
459 |
+
width = gr.Slider(
|
460 |
+
label="Width",
|
461 |
+
minimum=256,
|
462 |
+
maximum=MAX_IMAGE_SIZE,
|
463 |
+
step=32,
|
464 |
+
value=1024,
|
465 |
+
)
|
466 |
+
height = gr.Slider(
|
467 |
+
label="Height",
|
468 |
+
minimum=256,
|
469 |
+
maximum=MAX_IMAGE_SIZE,
|
470 |
+
step=32,
|
471 |
+
value=768,
|
472 |
+
)
|
473 |
+
with gr.Row():
|
474 |
+
guidance_scale = gr.Slider(
|
475 |
+
label="Guidance scale",
|
476 |
+
minimum=0.0,
|
477 |
+
maximum=10.0,
|
478 |
+
step=0.1,
|
479 |
+
value=3.5,
|
480 |
+
)
|
481 |
+
with gr.Row():
|
482 |
+
num_inference_steps = gr.Slider(
|
483 |
+
label="Steps",
|
484 |
+
minimum=1,
|
485 |
+
maximum=50,
|
486 |
+
step=1,
|
487 |
+
value=30,
|
488 |
+
)
|
489 |
+
lora_scale = gr.Slider(
|
490 |
+
label="LoRA scale",
|
491 |
+
minimum=0.0,
|
492 |
+
maximum=1.0,
|
493 |
+
step=0.1,
|
494 |
+
value=1.0,
|
495 |
+
)
|
496 |
+
|
497 |
+
with gr.Group(elem_classes="container"):
|
498 |
+
gr.Markdown("### โจ Example Prompts")
|
499 |
+
examples_html = '\n'.join([f'<div class="example-prompt">{ex}</div>' for ex in examples])
|
500 |
+
example_container = gr.HTML(examples_html)
|
501 |
+
|
502 |
+
with gr.Column(scale=4):
|
503 |
+
with gr.Group(elem_classes="container"):
|
504 |
+
generation_status = gr.HTML('<div class="status-complete">Ready to generate</div>')
|
505 |
+
result = gr.Image(label="Generated Image", elem_id="result-image")
|
506 |
+
seed_text = gr.Number(label="Used Seed", value=42)
|
507 |
|
508 |
+
# ์ด๋ฏธ์ง-์ด๋ฏธ์ง ๋ณํ ํญ
|
509 |
+
with gr.TabItem("Image-to-Image") as image_to_image_tab:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
510 |
with gr.Row():
|
511 |
+
with gr.Column(scale=3):
|
512 |
+
with gr.Group(elem_classes="container"):
|
513 |
+
upload_image = gr.Image(
|
514 |
+
label="Upload your image",
|
515 |
+
type="pil",
|
516 |
+
elem_id="upload-image"
|
517 |
+
)
|
518 |
+
img2img_prompt = gr.Textbox(
|
519 |
+
label="Optional: Describe additional details",
|
520 |
+
placeholder="Add details or leave empty for default Ghibli style...",
|
521 |
+
lines=2
|
522 |
+
)
|
523 |
+
|
524 |
+
with gr.Row():
|
525 |
+
process_button = gr.Button("๐จ Process Image", elem_classes="primary")
|
526 |
+
clear_img_button = gr.Button("Clear", elem_classes="secondary")
|
527 |
+
|
528 |
+
with gr.Accordion("Advanced Settings", open=False, elem_classes="settings-accordion"):
|
529 |
+
with gr.Row():
|
530 |
+
img2img_seed = gr.Slider(
|
531 |
+
label="Seed",
|
532 |
+
minimum=0,
|
533 |
+
maximum=MAX_SEED,
|
534 |
+
step=1,
|
535 |
+
value=42,
|
536 |
+
)
|
537 |
+
img2img_random_seed = gr.Checkbox(label="Randomize seed", value=True)
|
538 |
+
with gr.Row():
|
539 |
+
img2img_guidance_scale = gr.Slider(
|
540 |
+
label="Guidance scale",
|
541 |
+
minimum=0.0,
|
542 |
+
maximum=10.0,
|
543 |
+
step=0.1,
|
544 |
+
value=3.5,
|
545 |
+
)
|
546 |
+
with gr.Row():
|
547 |
+
img2img_steps = gr.Slider(
|
548 |
+
label="Steps",
|
549 |
+
minimum=1,
|
550 |
+
maximum=50,
|
551 |
+
step=1,
|
552 |
+
value=30,
|
553 |
+
)
|
554 |
+
img2img_lora_scale = gr.Slider(
|
555 |
+
label="LoRA scale",
|
556 |
+
minimum=0.0,
|
557 |
+
maximum=1.0,
|
558 |
+
step=0.1,
|
559 |
+
value=1.0,
|
560 |
+
)
|
561 |
+
|
562 |
+
with gr.Column(scale=4):
|
563 |
+
with gr.Group(elem_classes="container"):
|
564 |
+
img2img_status = gr.HTML('<div class="status-complete">Ready to process</div>')
|
565 |
+
img2img_result = gr.Image(label="Processed Image", elem_id="img2img-result")
|
566 |
+
img2img_seed_text = gr.Number(label="Used Seed", value=42)
|
567 |
+
|
568 |
+
with gr.Group(elem_classes="container"):
|
569 |
+
with gr.Tabs(elem_classes="tabs") as gallery_tabs:
|
570 |
+
with gr.TabItem("Gallery"):
|
571 |
+
gallery_header = gr.Markdown("### ๐ผ๏ธ Your Generated Masterpieces")
|
572 |
+
with gr.Row():
|
573 |
+
refresh_btn = gr.Button("๐ Refresh Gallery", elem_classes="secondary")
|
574 |
+
generated_gallery = gr.Gallery(
|
575 |
+
label="Generated Images",
|
576 |
+
columns=3,
|
577 |
+
value=load_generated_images(),
|
578 |
+
height="500px",
|
579 |
+
elem_classes="gallery-item"
|
580 |
+
)
|
581 |
|
582 |
###########################################################################
|
583 |
# Gradio Helper Functions
|
|
|
586 |
return load_generated_images()
|
587 |
|
588 |
def clear_output():
|
589 |
+
return "", gr.update(value=None), seed, '<div class="status-complete">Ready to generate</div>'
|
590 |
+
|
591 |
+
def clear_img2img_output():
|
592 |
+
return None, "", img2img_seed, '<div class="status-complete">Ready to process</div>', None
|
593 |
|
594 |
def before_generate():
|
595 |
return '<div class="status-generating">Generating image...</div>'
|
596 |
|
597 |
+
def before_process_image():
|
598 |
+
return '<div class="status-generating">Processing image...</div>'
|
599 |
+
|
600 |
def after_generate(image, seed_num, gallery):
|
601 |
return image, seed_num, gallery, '<div class="status-complete">Generation complete!</div>'
|
602 |
+
|
603 |
+
def after_process_image(image, seed_num, gallery):
|
604 |
+
return image, seed_num, gallery, '<div class="status-complete">Processing complete!</div>'
|
605 |
|
606 |
###########################################################################
|
607 |
# Gradio Event Wiring
|
|
|
612 |
outputs=generated_gallery,
|
613 |
)
|
614 |
|
615 |
+
# ํ
์คํธ-์ด๋ฏธ์ง ์์ฑ ์ด๋ฒคํธ
|
616 |
clear_button.click(
|
617 |
fn=clear_output,
|
618 |
inputs=None,
|
619 |
+
outputs=[prompt, result, seed_text, generation_status]
|
620 |
)
|
621 |
|
|
|
|
|
|
|
|
|
|
|
622 |
run_button.click(
|
623 |
fn=before_generate,
|
624 |
inputs=None,
|
625 |
outputs=generation_status,
|
|
|
|
|
|
|
|
|
626 |
).then(
|
627 |
fn=inference,
|
628 |
inputs=[
|
|
|
640 |
fn=after_generate,
|
641 |
inputs=[result, seed_text, generated_gallery],
|
642 |
outputs=[result, seed_text, generated_gallery, generation_status],
|
|
|
|
|
|
|
|
|
643 |
)
|
644 |
|
|
|
645 |
prompt.submit(
|
646 |
fn=before_generate,
|
647 |
inputs=None,
|
648 |
outputs=generation_status,
|
|
|
|
|
|
|
|
|
649 |
).then(
|
650 |
fn=inference,
|
651 |
inputs=[
|
|
|
663 |
fn=after_generate,
|
664 |
inputs=[result, seed_text, generated_gallery],
|
665 |
outputs=[result, seed_text, generated_gallery, generation_status],
|
666 |
+
)
|
667 |
+
|
668 |
+
# ์ด๋ฏธ์ง-์ด๋ฏธ์ง ๋ณํ ์ด๋ฒคํธ
|
669 |
+
clear_img_button.click(
|
670 |
+
fn=clear_img2img_output,
|
671 |
+
inputs=None,
|
672 |
+
outputs=[upload_image, img2img_prompt, img2img_seed_text, img2img_status, img2img_result]
|
673 |
+
)
|
674 |
+
|
675 |
+
process_button.click(
|
676 |
+
fn=before_process_image,
|
677 |
+
inputs=None,
|
678 |
+
outputs=img2img_status,
|
679 |
+
).then(
|
680 |
+
fn=process_uploaded_image,
|
681 |
+
inputs=[
|
682 |
+
upload_image,
|
683 |
+
img2img_prompt,
|
684 |
+
img2img_seed,
|
685 |
+
img2img_random_seed,
|
686 |
+
img2img_guidance_scale,
|
687 |
+
img2img_steps,
|
688 |
+
img2img_lora_scale,
|
689 |
+
],
|
690 |
+
outputs=[img2img_result, img2img_seed_text, generated_gallery],
|
691 |
).then(
|
692 |
+
fn=after_process_image,
|
693 |
+
inputs=[img2img_result, img2img_seed_text, generated_gallery],
|
694 |
+
outputs=[img2img_result, img2img_seed_text, generated_gallery, img2img_status],
|
695 |
)
|
696 |
|
697 |
# JS๋ก ์์ prompt ํด๋ฆญ ์ ์๋ ์ฑ์ฐ๊ธฐ
|
|
|
714 |
""")
|
715 |
|
716 |
###############################################################################
|
717 |
+
# 4. ์คํ
|
718 |
###############################################################################
|
719 |
try:
|
720 |
demo.queue(concurrency_count=1, max_size=20)
|