Skip to content

Commit 9990445

Browse files
authored
Bump to 0.10.0.dev0 + deprecations (huggingface#1490)
1 parent eeeb28a commit 9990445

16 files changed

+20
-155
lines changed

examples/unconditional_image_generation/train_unconditional.py

+1-6
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@
1414
from diffusers import DDPMPipeline, DDPMScheduler, UNet2DModel, __version__
1515
from diffusers.optimization import get_scheduler
1616
from diffusers.training_utils import EMAModel
17-
from diffusers.utils import deprecate
1817
from huggingface_hub import HfFolder, Repository, whoami
1918
from packaging import version
2019
from torchvision.transforms import (
@@ -417,11 +416,7 @@ def transforms(examples):
417416
scheduler=noise_scheduler,
418417
)
419418

420-
deprecate("todo: remove this check", "0.10.0", "when the most used version is >= 0.8.0")
421-
if diffusers_version < version.parse("0.8.0"):
422-
generator = torch.manual_seed(0)
423-
else:
424-
generator = torch.Generator(device=pipeline.device).manual_seed(0)
419+
generator = torch.Generator(device=pipeline.device).manual_seed(0)
425420
# run pipeline in inference (sample random noise and denoise)
426421
images = pipeline(
427422
generator=generator,

setup.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -214,7 +214,7 @@ def run(self):
214214

215215
setup(
216216
name="diffusers",
217-
version="0.9.0", # expected format is one of x.y.z.dev0, or x.y.z.rc1 or x.y.z (no to dashes, yes to dots)
217+
version="0.10.0.dev0", # expected format is one of x.y.z.dev0, or x.y.z.rc1 or x.y.z (no to dashes, yes to dots)
218218
description="Diffusers",
219219
long_description=open("README.md", "r", encoding="utf-8").read(),
220220
long_description_content_type="text/markdown",

src/diffusers/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
)
1010

1111

12-
__version__ = "0.9.0"
12+
__version__ = "0.10.0.dev0"
1313

1414
from .configuration_utils import ConfigMixin
1515
from .onnx_utils import OnnxRuntimeModel

src/diffusers/hub_utils.py

+2-118
Original file line numberDiff line numberDiff line change
@@ -15,16 +15,15 @@
1515

1616

1717
import os
18-
import shutil
1918
import sys
2019
from pathlib import Path
2120
from typing import Dict, Optional, Union
2221
from uuid import uuid4
2322

24-
from huggingface_hub import HfFolder, Repository, whoami
23+
from huggingface_hub import HfFolder, whoami
2524

2625
from . import __version__
27-
from .utils import ENV_VARS_TRUE_VALUES, deprecate, logging
26+
from .utils import ENV_VARS_TRUE_VALUES, logging
2827
from .utils.import_utils import (
2928
_flax_version,
3029
_jax_version,
@@ -83,121 +82,6 @@ def get_full_repo_name(model_id: str, organization: Optional[str] = None, token:
8382
return f"{organization}/{model_id}"
8483

8584

86-
def init_git_repo(args, at_init: bool = False):
87-
"""
88-
Args:
89-
Initializes a git repo in `args.hub_model_id`.
90-
at_init (`bool`, *optional*, defaults to `False`):
91-
Whether this function is called before any training or not. If `self.args.overwrite_output_dir` is `True`
92-
and `at_init` is `True`, the path to the repo (which is `self.args.output_dir`) might be wiped out.
93-
"""
94-
deprecation_message = (
95-
"Please use `huggingface_hub.Repository`. "
96-
"See `examples/unconditional_image_generation/train_unconditional.py` for an example."
97-
)
98-
deprecate("init_git_repo()", "0.10.0", deprecation_message)
99-
100-
if hasattr(args, "local_rank") and args.local_rank not in [-1, 0]:
101-
return
102-
hub_token = args.hub_token if hasattr(args, "hub_token") else None
103-
use_auth_token = True if hub_token is None else hub_token
104-
if not hasattr(args, "hub_model_id") or args.hub_model_id is None:
105-
repo_name = Path(args.output_dir).absolute().name
106-
else:
107-
repo_name = args.hub_model_id
108-
if "/" not in repo_name:
109-
repo_name = get_full_repo_name(repo_name, token=hub_token)
110-
111-
try:
112-
repo = Repository(
113-
args.output_dir,
114-
clone_from=repo_name,
115-
use_auth_token=use_auth_token,
116-
private=args.hub_private_repo,
117-
)
118-
except EnvironmentError:
119-
if args.overwrite_output_dir and at_init:
120-
# Try again after wiping output_dir
121-
shutil.rmtree(args.output_dir)
122-
repo = Repository(
123-
args.output_dir,
124-
clone_from=repo_name,
125-
use_auth_token=use_auth_token,
126-
)
127-
else:
128-
raise
129-
130-
repo.git_pull()
131-
132-
# By default, ignore the checkpoint folders
133-
if not os.path.exists(os.path.join(args.output_dir, ".gitignore")):
134-
with open(os.path.join(args.output_dir, ".gitignore"), "w", encoding="utf-8") as writer:
135-
writer.writelines(["checkpoint-*/"])
136-
137-
return repo
138-
139-
140-
def push_to_hub(
141-
args,
142-
pipeline,
143-
repo: Repository,
144-
commit_message: Optional[str] = "End of training",
145-
blocking: bool = True,
146-
**kwargs,
147-
) -> str:
148-
"""
149-
Parameters:
150-
Upload *self.model* and *self.tokenizer* to the 🤗 model hub on the repo *self.args.hub_model_id*.
151-
commit_message (`str`, *optional*, defaults to `"End of training"`):
152-
Message to commit while pushing.
153-
blocking (`bool`, *optional*, defaults to `True`):
154-
Whether the function should return only when the `git push` has finished.
155-
kwargs:
156-
Additional keyword arguments passed along to [`create_model_card`].
157-
Returns:
158-
The url of the commit of your model in the given repository if `blocking=False`, a tuple with the url of the
159-
commit and an object to track the progress of the commit if `blocking=True`
160-
"""
161-
deprecation_message = (
162-
"Please use `huggingface_hub.Repository` and `Repository.push_to_hub()`. "
163-
"See `examples/unconditional_image_generation/train_unconditional.py` for an example."
164-
)
165-
deprecate("push_to_hub()", "0.10.0", deprecation_message)
166-
167-
if not hasattr(args, "hub_model_id") or args.hub_model_id is None:
168-
model_name = Path(args.output_dir).name
169-
else:
170-
model_name = args.hub_model_id.split("/")[-1]
171-
172-
output_dir = args.output_dir
173-
os.makedirs(output_dir, exist_ok=True)
174-
logger.info(f"Saving pipeline checkpoint to {output_dir}")
175-
pipeline.save_pretrained(output_dir)
176-
177-
# Only push from one node.
178-
if hasattr(args, "local_rank") and args.local_rank not in [-1, 0]:
179-
return
180-
181-
# Cancel any async push in progress if blocking=True. The commits will all be pushed together.
182-
if (
183-
blocking
184-
and len(repo.command_queue) > 0
185-
and repo.command_queue[-1] is not None
186-
and not repo.command_queue[-1].is_done
187-
):
188-
repo.command_queue[-1]._process.kill()
189-
190-
git_head_commit_url = repo.push_to_hub(commit_message=commit_message, blocking=blocking, auto_lfs_prune=True)
191-
# push separately the model card to be independent from the rest of the model
192-
create_model_card(args, model_name=model_name)
193-
try:
194-
repo.push_to_hub(commit_message="update model card README.md", blocking=blocking, auto_lfs_prune=True)
195-
except EnvironmentError as exc:
196-
logger.error(f"Error pushing update to the model card. Please read logs and retry.\n${exc}")
197-
198-
return git_head_commit_url
199-
200-
20185
def create_model_card(args, model_name):
20286
if not is_modelcards_available:
20387
raise ValueError(

src/diffusers/modeling_utils.py

-14
Original file line numberDiff line numberDiff line change
@@ -666,20 +666,6 @@ def num_parameters(self, only_trainable: bool = False, exclude_embeddings: bool
666666
return sum(p.numel() for p in self.parameters() if p.requires_grad or not only_trainable)
667667

668668

669-
def unwrap_model(model: torch.nn.Module) -> torch.nn.Module:
670-
"""
671-
Recursively unwraps a model from potential containers (as used in distributed training).
672-
673-
Args:
674-
model (`torch.nn.Module`): The model to unwrap.
675-
"""
676-
# since there could be multiple levels of wrapping, unwrap recursively
677-
if hasattr(model, "module"):
678-
return unwrap_model(model.module)
679-
else:
680-
return model
681-
682-
683669
def _get_model_file(
684670
pretrained_model_name_or_path,
685671
*,

src/diffusers/pipelines/ddpm/pipeline_ddpm.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ def __call__(
7373
"Please make sure to instantiate your scheduler with `prediction_type` instead. E.g. `scheduler ="
7474
" DDPMScheduler.from_pretrained(<model_id>, prediction_type='epsilon')`."
7575
)
76-
predict_epsilon = deprecate("predict_epsilon", "0.10.0", message, take_from=kwargs)
76+
predict_epsilon = deprecate("predict_epsilon", "0.11.0", message, take_from=kwargs)
7777

7878
if predict_epsilon is not None:
7979
new_config = dict(self.scheduler.config)

src/diffusers/schedulers/scheduling_ddim.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -134,7 +134,7 @@ def __init__(
134134
"Please make sure to instantiate your scheduler with `prediction_type` instead. E.g. `scheduler ="
135135
" DDIMScheduler.from_pretrained(<model_id>, prediction_type='epsilon')`."
136136
)
137-
predict_epsilon = deprecate("predict_epsilon", "0.10.0", message, take_from=kwargs)
137+
predict_epsilon = deprecate("predict_epsilon", "0.11.0", message, take_from=kwargs)
138138
if predict_epsilon is not None:
139139
self.register_to_config(prediction_type="epsilon" if predict_epsilon else "sample")
140140

src/diffusers/schedulers/scheduling_ddim_flax.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ def __init__(
138138
"Please make sure to instantiate your scheduler with `prediction_type` instead. E.g. `scheduler ="
139139
" FlaxDDIMScheduler.from_pretrained(<model_id>, prediction_type='epsilon')`."
140140
)
141-
predict_epsilon = deprecate("predict_epsilon", "0.10.0", message, take_from=kwargs)
141+
predict_epsilon = deprecate("predict_epsilon", "0.11.0", message, take_from=kwargs)
142142
if predict_epsilon is not None:
143143
self.register_to_config(prediction_type="epsilon" if predict_epsilon else "sample")
144144

src/diffusers/schedulers/scheduling_ddpm.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,7 @@ def __init__(
125125
"Please make sure to instantiate your scheduler with `prediction_type` instead. E.g. `scheduler ="
126126
" DDPMScheduler.from_pretrained(<model_id>, prediction_type='epsilon')`."
127127
)
128-
predict_epsilon = deprecate("predict_epsilon", "0.10.0", message, take_from=kwargs)
128+
predict_epsilon = deprecate("predict_epsilon", "0.11.0", message, take_from=kwargs)
129129
if predict_epsilon is not None:
130130
self.register_to_config(prediction_type="epsilon" if predict_epsilon else "sample")
131131

@@ -255,7 +255,7 @@ def step(
255255
"Please make sure to instantiate your scheduler with `prediction_type` instead. E.g. `scheduler ="
256256
" DDPMScheduler.from_pretrained(<model_id>, prediction_type='epsilon')`."
257257
)
258-
predict_epsilon = deprecate("predict_epsilon", "0.10.0", message, take_from=kwargs)
258+
predict_epsilon = deprecate("predict_epsilon", "0.11.0", message, take_from=kwargs)
259259
if predict_epsilon is not None:
260260
new_config = dict(self.config)
261261
new_config["prediction_type"] = "epsilon" if predict_epsilon else "sample"

src/diffusers/schedulers/scheduling_ddpm_flax.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,7 @@ def __init__(
132132
"Please make sure to instantiate your scheduler with `prediction_type` instead. E.g. `scheduler ="
133133
" FlaxDDPMScheduler.from_pretrained(<model_id>, prediction_type='epsilon')`."
134134
)
135-
predict_epsilon = deprecate("predict_epsilon", "0.10.0", message, take_from=kwargs)
135+
predict_epsilon = deprecate("predict_epsilon", "0.11.0", message, take_from=kwargs)
136136
if predict_epsilon is not None:
137137
self.register_to_config(prediction_type="epsilon" if predict_epsilon else "sample")
138138

@@ -239,7 +239,7 @@ def step(
239239
"Please make sure to instantiate your scheduler with `prediction_type` instead. E.g. `scheduler ="
240240
" FlaxDDPMScheduler.from_pretrained(<model_id>, prediction_type='epsilon')`."
241241
)
242-
predict_epsilon = deprecate("predict_epsilon", "0.10.0", message, take_from=kwargs)
242+
predict_epsilon = deprecate("predict_epsilon", "0.11.0", message, take_from=kwargs)
243243
if predict_epsilon is not None:
244244
new_config = dict(self.config)
245245
new_config["prediction_type"] = "epsilon" if predict_epsilon else "sample"

src/diffusers/schedulers/scheduling_dpmsolver_multistep.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,7 @@ def __init__(
142142
"Please make sure to instantiate your scheduler with `prediction_type` instead. E.g. `scheduler ="
143143
" DPMSolverMultistepScheduler.from_pretrained(<model_id>, prediction_type='epsilon')`."
144144
)
145-
predict_epsilon = deprecate("predict_epsilon", "0.10.0", message, take_from=kwargs)
145+
predict_epsilon = deprecate("predict_epsilon", "0.11.0", message, take_from=kwargs)
146146
if predict_epsilon is not None:
147147
self.register_to_config(prediction_type="epsilon" if predict_epsilon else "sample")
148148

src/diffusers/schedulers/scheduling_dpmsolver_multistep_flax.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,7 @@ def __init__(
177177
"Please make sure to instantiate your scheduler with `prediction_type` instead. E.g. `scheduler ="
178178
" FlaxDPMSolverMultistepScheduler.from_pretrained(<model_id>, prediction_type='epsilon')`."
179179
)
180-
predict_epsilon = deprecate("predict_epsilon", "0.10.0", message, take_from=kwargs)
180+
predict_epsilon = deprecate("predict_epsilon", "0.11.0", message, take_from=kwargs)
181181
if predict_epsilon is not None:
182182
self.register_to_config(prediction_type="epsilon" if predict_epsilon else "sample")
183183

tests/pipelines/ddpm/test_ddpm.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ def test_inference(self):
6969
assert np.abs(image_from_tuple_slice.flatten() - expected_slice).max() < 1e-2
7070

7171
def test_inference_deprecated_predict_epsilon(self):
72-
deprecate("remove this test", "0.10.0", "remove")
72+
deprecate("remove this test", "0.11.0", "remove")
7373
unet = self.dummy_uncond_unet
7474
scheduler = DDPMScheduler(predict_epsilon=False)
7575

tests/test_config.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -203,7 +203,7 @@ def test_overwrite_config_on_load(self):
203203
ddpm_2 = DDPMScheduler.from_pretrained("google/ddpm-celebahq-256", beta_start=88)
204204

205205
with CaptureLogger(logger) as cap_logger:
206-
deprecate("remove this case", "0.10.0", "remove")
206+
deprecate("remove this case", "0.11.0", "remove")
207207
ddpm_3 = DDPMScheduler.from_pretrained(
208208
"hf-internal-testing/tiny-stable-diffusion-torch",
209209
subfolder="scheduler",

tests/test_scheduler.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -639,12 +639,12 @@ def test_prediction_type(self):
639639
self.check_over_configs(prediction_type=prediction_type)
640640

641641
def test_deprecated_predict_epsilon(self):
642-
deprecate("remove this test", "0.10.0", "remove")
642+
deprecate("remove this test", "0.11.0", "remove")
643643
for predict_epsilon in [True, False]:
644644
self.check_over_configs(predict_epsilon=predict_epsilon)
645645

646646
def test_deprecated_epsilon(self):
647-
deprecate("remove this test", "0.10.0", "remove")
647+
deprecate("remove this test", "0.11.0", "remove")
648648
scheduler_class = self.scheduler_classes[0]
649649
scheduler_config = self.get_scheduler_config()
650650

tests/test_scheduler_flax.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -626,12 +626,12 @@ def test_prediction_type(self):
626626
self.check_over_configs(prediction_type=prediction_type)
627627

628628
def test_deprecated_predict_epsilon(self):
629-
deprecate("remove this test", "0.10.0", "remove")
629+
deprecate("remove this test", "0.11.0", "remove")
630630
for predict_epsilon in [True, False]:
631631
self.check_over_configs(predict_epsilon=predict_epsilon)
632632

633633
def test_deprecated_predict_epsilon_to_prediction_type(self):
634-
deprecate("remove this test", "0.10.0", "remove")
634+
deprecate("remove this test", "0.11.0", "remove")
635635
for scheduler_class in self.scheduler_classes:
636636
scheduler_config = self.get_scheduler_config(predict_epsilon=True)
637637
scheduler = scheduler_class.from_config(scheduler_config)

0 commit comments

Comments
 (0)