|
108 | 108 |
|
109 | 109 |
|
110 | 110 | @torch.no_grad() |
111 | | -def worker(input_image, prompt, n_prompt, seed, total_second_length, latent_window_size, steps, cfg, gs, rs, gpu_memory_preservation, use_teacache, mp4_crf): |
| 111 | +def worker(input_image, prompt, n_prompt, seed, total_second_length, latent_window_size, steps, cfg, gs, rs, gpu_memory_preservation, use_teacache, mp4_crf, resolution): |
112 | 112 | total_latent_sections = (total_second_length * 30) / (latent_window_size * 4) |
113 | 113 | total_latent_sections = int(max(round(total_latent_sections), 1)) |
114 | 114 |
|
@@ -328,14 +328,14 @@ def callback(d): |
328 | 328 | return |
329 | 329 |
|
330 | 330 |
|
331 | | -def process(input_image, prompt, n_prompt, seed, total_second_length, latent_window_size, steps, cfg, gs, rs, gpu_memory_preservation, use_teacache, mp4_crf): |
| 331 | +def process(input_image, prompt, n_prompt, seed, total_second_length, latent_window_size, steps, cfg, gs, rs, gpu_memory_preservation, use_teacache, mp4_crf, resolution): |
332 | 332 | global stream |
333 | 333 |
|
334 | 334 | yield None, None, '', '', gr.update(interactive=False), gr.update(interactive=True) |
335 | 335 |
|
336 | 336 | stream = AsyncStream() |
337 | 337 |
|
338 | | - async_run(worker, input_image, prompt, n_prompt, seed, total_second_length, latent_window_size, steps, cfg, gs, rs, gpu_memory_preservation, use_teacache, mp4_crf) |
| 338 | + async_run(worker, input_image, prompt, n_prompt, seed, total_second_length, latent_window_size, steps, cfg, gs, rs, gpu_memory_preservation, use_teacache, mp4_crf, resolution) |
339 | 339 |
|
340 | 340 | output_filename = None |
341 | 341 |
|
@@ -372,7 +372,8 @@ def end_process(): |
372 | 372 | gr.Markdown('# FramePack') |
373 | 373 | with gr.Row(): |
374 | 374 | with gr.Column(): |
375 | | - input_image = gr.Image(sources='upload', type="numpy", label="Image", height=320) |
| 375 | + input_image = gr.Image(sources='upload', type="numpy", label="Image", height=320) |
| 376 | + resolution = gr.Slider(label="Resolution", minimum=240, maximum=720, value=640, step=16) |
376 | 377 | prompt = gr.Textbox(label="Prompt", value='') |
377 | 378 | example_quick_prompts = gr.Dataset(samples=quick_prompts, label='Quick List', samples_per_page=1000, components=[prompt]) |
378 | 379 | example_quick_prompts.click(lambda x: x[0], inputs=[example_quick_prompts], outputs=prompt, show_progress=False, queue=False) |
|
0 commit comments