Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -59,10 +59,11 @@ class calculateDuration:
|
|
| 59 |
@spaces.GPU(duration=120)
|
| 60 |
@torch.inference_mode()
|
| 61 |
def generate_image(prompt, steps, seed, cfg_scale, width, height, progress):
|
| 62 |
-
|
| 63 |
-
generator = torch.Generator(device=device).manual_seed(seed)
|
| 64 |
with calculateDuration("Generating image"):
|
| 65 |
# Generate image
|
|
|
|
|
|
|
| 66 |
print(prompt, steps, seed, cfg_scale, width, height)
|
| 67 |
generated_image = pipe(
|
| 68 |
prompt=prompt,
|
|
@@ -100,6 +101,7 @@ def upload_image_to_r2(image, account_id, access_key, secret_key, bucket_name):
|
|
| 100 |
return image_file
|
| 101 |
|
| 102 |
def run_lora(prompt, lora_strings_json, cfg_scale, steps, randomize_seed, seed, width, height, upload_to_r2, account_id, access_key, secret_key, bucket, progress=gr.Progress(track_tqdm=True)):
|
|
|
|
| 103 |
|
| 104 |
# Load LoRA weights
|
| 105 |
if lora_strings_json:
|
|
@@ -131,9 +133,11 @@ def run_lora(prompt, lora_strings_json, cfg_scale, steps, randomize_seed, seed,
|
|
| 131 |
seed = random.randint(0, MAX_SEED)
|
| 132 |
|
| 133 |
# Generate image
|
|
|
|
| 134 |
try:
|
| 135 |
final_image = generate_image(prompt, steps, seed, cfg_scale, width, height, progress)
|
| 136 |
-
except:
|
|
|
|
| 137 |
final_image = None
|
| 138 |
|
| 139 |
if final_image:
|
|
@@ -144,7 +148,7 @@ def run_lora(prompt, lora_strings_json, cfg_scale, steps, randomize_seed, seed,
|
|
| 144 |
else:
|
| 145 |
result = {"status": "success", "message": "Image generated but not uploaded"}
|
| 146 |
else:
|
| 147 |
-
result = {"status": "failed", "message":
|
| 148 |
|
| 149 |
progress(100, "Completed!")
|
| 150 |
|
|
@@ -165,8 +169,8 @@ with gr.Blocks(css=css) as demo:
|
|
| 165 |
|
| 166 |
with gr.Column():
|
| 167 |
|
| 168 |
-
prompt = gr.Text(label="Prompt", placeholder="Enter prompt", lines=
|
| 169 |
-
lora_strings_json = gr.Text(label="LoRA
|
| 170 |
|
| 171 |
run_button = gr.Button("Run", scale=0)
|
| 172 |
|
|
|
|
| 59 |
@spaces.GPU(duration=120)
|
| 60 |
@torch.inference_mode()
|
| 61 |
def generate_image(prompt, steps, seed, cfg_scale, width, height, progress):
|
| 62 |
+
|
|
|
|
| 63 |
with calculateDuration("Generating image"):
|
| 64 |
# Generate image
|
| 65 |
+
pipe.to(device)
|
| 66 |
+
generator = torch.Generator(device=device).manual_seed(seed)
|
| 67 |
print(prompt, steps, seed, cfg_scale, width, height)
|
| 68 |
generated_image = pipe(
|
| 69 |
prompt=prompt,
|
|
|
|
| 101 |
return image_file
|
| 102 |
|
| 103 |
def run_lora(prompt, lora_strings_json, cfg_scale, steps, randomize_seed, seed, width, height, upload_to_r2, account_id, access_key, secret_key, bucket, progress=gr.Progress(track_tqdm=True)):
|
| 104 |
+
print("run_lora", prompt, lora_strings_json, cfg_scale, steps, width, height)
|
| 105 |
|
| 106 |
# Load LoRA weights
|
| 107 |
if lora_strings_json:
|
|
|
|
| 133 |
seed = random.randint(0, MAX_SEED)
|
| 134 |
|
| 135 |
# Generate image
|
| 136 |
+
error_message = ""
|
| 137 |
try:
|
| 138 |
final_image = generate_image(prompt, steps, seed, cfg_scale, width, height, progress)
|
| 139 |
+
except Exception as e:
|
| 140 |
+
error_message = str(e)
|
| 141 |
final_image = None
|
| 142 |
|
| 143 |
if final_image:
|
|
|
|
| 148 |
else:
|
| 149 |
result = {"status": "success", "message": "Image generated but not uploaded"}
|
| 150 |
else:
|
| 151 |
+
result = {"status": "failed", "message": error_message}
|
| 152 |
|
| 153 |
progress(100, "Completed!")
|
| 154 |
|
|
|
|
| 169 |
|
| 170 |
with gr.Column():
|
| 171 |
|
| 172 |
+
prompt = gr.Text(label="Prompt", placeholder="Enter prompt", lines=10)
|
| 173 |
+
lora_strings_json = gr.Text(label="LoRA Configs (JSON List String)", placeholder='[{"repo": "lora_repo1", "weights": "weights1", "adapter_name": "adapter_name1", "adapter_weight": 1}, {"repo": "lora_repo2", "weights": "weights2", "adapter_name": "adapter_name2", "adapter_weight": 1}]', lines=5)
|
| 174 |
|
| 175 |
run_button = gr.Button("Run", scale=0)
|
| 176 |
|