Masrkai commited on
Commit
fe4aa3a
·
verified ·
1 Parent(s): 487d3dd

Update model.py

Browse files
Files changed (1) hide show
  1. model.py +16 -19
model.py CHANGED
@@ -1,24 +1,21 @@
 
1
  import torch
2
  from diffusers import ShapEPipeline
3
  from diffusers.utils import export_to_gif
4
 
5
- # Define checkpoint ID and load pipeline on CPU
6
- ckpt_id = "openai/shap-e"
7
- pipe = ShapEPipeline.from_pretrained(ckpt_id).to("cpu")
 
 
8
 
9
- # Define generation parameters
10
- guidance_scale = 10.0 # Lowered for efficiency on CPU
11
- num_inference_steps = 32 # Reduced steps for CPU performance
12
- prompt = "a shark"
13
-
14
- # Generate images from the prompt with reduced settings
15
- images = pipe(
16
- prompt=prompt,
17
- guidance_scale=guidance_scale,
18
- num_inference_steps=num_inference_steps,
19
- size=256, # Image size for the model
20
- ).images
21
-
22
- # Export images to GIF format
23
- gif_path = export_to_gif(images, "shark_3d.gif")
24
- print(f"GIF saved at {gif_path}")
 
1
+ # model.py
2
  import torch
3
  from diffusers import ShapEPipeline
4
  from diffusers.utils import export_to_gif
5
 
6
+ # Load pipeline once to avoid reloading with each request
7
+ def load_pipeline():
8
+ ckpt_id = "openai/shap-e"
9
+ pipe = ShapEPipeline.from_pretrained(ckpt_id, torch_dtype=torch.float32, trust_remote_code=True).to("cpu")
10
+ return pipe
11
 
12
+ # Generate images and export to GIF
13
+ def generate_3d_gif(pipe, prompt, guidance_scale=10.0, num_inference_steps=32, size=256):
14
+ images = pipe(
15
+ prompt=prompt,
16
+ guidance_scale=guidance_scale,
17
+ num_inference_steps=num_inference_steps,
18
+ size=size
19
+ ).images
20
+ gif_path = export_to_gif(images, "generated_3d.gif")
21
+ return gif_path