1818import huggingface_hub
1919
2020from .. import __version__ as version
21- from ..utils import is_torch_available , is_transformers_available
21+ from ..utils import is_accelerate_available , is_torch_available , is_transformers_available , is_xformers_available
2222from . import BaseDiffusersCLICommand
2323
2424
@@ -44,18 +44,32 @@ def run(self):
4444 pt_cuda_available = torch .cuda .is_available ()
4545
4646 transformers_version = "not installed"
47- if is_transformers_available :
47+ if is_transformers_available () :
4848 import transformers
4949
5050 transformers_version = transformers .__version__
5151
52+ accelerate_version = "not installed"
53+ if is_accelerate_available ():
54+ import accelerate
55+
56+ transformers_version = accelerate .__version__
57+
58+ xformers_version = "not installed"
59+ if is_xformers_available ():
60+ import xformers
61+
62+ xformers_version = xformers .__version__
63+
5264 info = {
5365 "`diffusers` version" : version ,
5466 "Platform" : platform .platform (),
5567 "Python version" : platform .python_version (),
5668 "PyTorch version (GPU?)" : f"{ pt_version } ({ pt_cuda_available } )" ,
5769 "Huggingface_hub version" : hub_version ,
5870 "Transformers version" : transformers_version ,
71+ "Accelerate version" : accelerate_version ,
72+ "xFormers version" : xformers_version ,
5973 "Using GPU in script?" : "<fill in>" ,
6074 "Using distributed or parallel set-up in script?" : "<fill in>" ,
6175 }
0 commit comments