Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -49,10 +49,9 @@ with open('loras.json', 'r') as f:
|
|
| 49 |
#torch.cuda.empty_cache()
|
| 50 |
pipe = diffusers.ZImagePipeline.from_pretrained("dimitribarbot/Z-Image-Turbo-BF16", torch_dtype=torch.bfloat16)
|
| 51 |
|
| 52 |
-
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 53 |
-
|
| 54 |
#pipe.vae = AutoencoderTiny.from_pretrained("madebyollin/taef1", torch_dtype=torch.float16).to("cuda")
|
| 55 |
pipe.vae = AutoencoderKL.from_pretrained("AlekseyCalvin/AnimeVAE_by_Anzhc_for_Flux_ZiT", torch_dtype=torch.bfloat16, device_map="cuda")
|
|
|
|
| 56 |
|
| 57 |
|
| 58 |
#pipe.vae = AutoencoderKL.from_pretrained("REPA-E/e2e-flux-vae", torch_dtype=torch.bfloat16).to("cuda")
|
|
|
|
| 49 |
#torch.cuda.empty_cache()
|
| 50 |
pipe = diffusers.ZImagePipeline.from_pretrained("dimitribarbot/Z-Image-Turbo-BF16", torch_dtype=torch.bfloat16)
|
| 51 |
|
|
|
|
|
|
|
| 52 |
#pipe.vae = AutoencoderTiny.from_pretrained("madebyollin/taef1", torch_dtype=torch.float16).to("cuda")
|
| 53 |
pipe.vae = AutoencoderKL.from_pretrained("AlekseyCalvin/AnimeVAE_by_Anzhc_for_Flux_ZiT", torch_dtype=torch.bfloat16, device_map="cuda")
|
| 54 |
+
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 55 |
|
| 56 |
|
| 57 |
#pipe.vae = AutoencoderKL.from_pretrained("REPA-E/e2e-flux-vae", torch_dtype=torch.bfloat16).to("cuda")
|