Add Dockerfile. Modify script to build from the Dockerfile.
This commit is contained in:
parent
869b4b8bc5
commit
51f2836df8
5
Dockerfile
Normal file
5
Dockerfile
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
FROM python:3.11.3-slim-bullseye
|
||||||
|
COPY requirements.txt /
|
||||||
|
RUN apt update \
|
||||||
|
&& apt install -y wget git \
|
||||||
|
&& pip install -r requirements.txt --extra-index-url https://download.pytorch.org/whl/cu117 --pre xformers
|
||||||
@ -1,5 +1,9 @@
|
|||||||
modal-client
|
accelerate
|
||||||
# The below packages are not necessary because them are installed into the image on Modal.
|
scipy
|
||||||
# However, you prevent some lint errors when you edit the code by the editor like VSCode or Vim.
|
diffusers[torch]
|
||||||
diffusers
|
safetensors
|
||||||
torch
|
torch==2.0.1+cu117
|
||||||
|
torchvision
|
||||||
|
torchmetrics
|
||||||
|
omegaconf
|
||||||
|
transformers
|
||||||
|
|||||||
41
sd_cli.py
41
sd_cli.py
@ -4,7 +4,7 @@ import os
|
|||||||
import time
|
import time
|
||||||
from datetime import date
|
from datetime import date
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from modal import Image, Secret, Stub, method
|
from modal import Image, Secret, Stub, method, Mount
|
||||||
|
|
||||||
stub = Stub("stable-diffusion-cli")
|
stub = Stub("stable-diffusion-cli")
|
||||||
|
|
||||||
@ -17,7 +17,6 @@ def download_models():
|
|||||||
diffusers.StableDiffusionPipeline.from_pretrained().
|
diffusers.StableDiffusionPipeline.from_pretrained().
|
||||||
"""
|
"""
|
||||||
import diffusers
|
import diffusers
|
||||||
import torch
|
|
||||||
|
|
||||||
hugging_face_token = os.environ["HUGGINGFACE_TOKEN"]
|
hugging_face_token = os.environ["HUGGINGFACE_TOKEN"]
|
||||||
model_repo_id = os.environ["MODEL_REPO_ID"]
|
model_repo_id = os.environ["MODEL_REPO_ID"]
|
||||||
@ -34,30 +33,17 @@ def download_models():
|
|||||||
pipe = diffusers.StableDiffusionPipeline.from_pretrained(
|
pipe = diffusers.StableDiffusionPipeline.from_pretrained(
|
||||||
model_repo_id,
|
model_repo_id,
|
||||||
use_auth_token=hugging_face_token,
|
use_auth_token=hugging_face_token,
|
||||||
torch_dtype=torch.float16,
|
|
||||||
cache_dir=cache_path,
|
cache_dir=cache_path,
|
||||||
)
|
)
|
||||||
pipe.save_pretrained(cache_path, safe_serialization=True)
|
pipe.save_pretrained(cache_path, safe_serialization=True)
|
||||||
|
|
||||||
|
|
||||||
stub_image = (
|
stub_image = Image.from_dockerfile(
|
||||||
Image.debian_slim(python_version="3.10")
|
path="./Dockerfile",
|
||||||
.pip_install(
|
context_mount=Mount.from_local_file("./requirements.txt"),
|
||||||
"accelerate",
|
).run_function(
|
||||||
"diffusers[torch]>=0.15.1",
|
download_models,
|
||||||
"ftfy",
|
secrets=[Secret.from_dotenv(__file__)],
|
||||||
"torch",
|
|
||||||
"torchvision",
|
|
||||||
"transformers~=4.25.1",
|
|
||||||
"triton",
|
|
||||||
"safetensors",
|
|
||||||
"torch>=2.0",
|
|
||||||
)
|
|
||||||
.pip_install("xformers", pre=True)
|
|
||||||
.run_function(
|
|
||||||
download_models,
|
|
||||||
secrets=[Secret.from_dotenv(__file__)],
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
stub.image = stub_image
|
stub.image = stub_image
|
||||||
|
|
||||||
@ -84,21 +70,14 @@ class StableDiffusion:
|
|||||||
scheduler = diffusers.EulerAncestralDiscreteScheduler.from_pretrained(
|
scheduler = diffusers.EulerAncestralDiscreteScheduler.from_pretrained(
|
||||||
cache_path,
|
cache_path,
|
||||||
subfolder="scheduler",
|
subfolder="scheduler",
|
||||||
solver_order=2,
|
|
||||||
prediction_type="epsilon",
|
|
||||||
thresholding=False,
|
|
||||||
algorithm_type="dpmsolver++",
|
|
||||||
solver_type="midpoint",
|
|
||||||
denoise_final=True, # important if steps are <= 10
|
|
||||||
low_cpu_mem_usage=True,
|
|
||||||
device_map="auto",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
self.pipe = diffusers.StableDiffusionPipeline.from_pretrained(
|
self.pipe = diffusers.StableDiffusionPipeline.from_pretrained(
|
||||||
cache_path,
|
cache_path,
|
||||||
scheduler=scheduler,
|
scheduler=scheduler,
|
||||||
low_cpu_mem_usage=True,
|
custom_pipeline="lpw_stable_diffusion",
|
||||||
device_map="auto",
|
max_embeddings_multiples=2,
|
||||||
|
safety_checker=None,
|
||||||
).to("cuda")
|
).to("cuda")
|
||||||
self.pipe.enable_xformers_memory_efficient_attention()
|
self.pipe.enable_xformers_memory_efficient_attention()
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user