From b633cc945608d118ca368e3f9b459f73391902f3 Mon Sep 17 00:00:00 2001 From: JackUrb Date: Fri, 13 Jun 2025 17:31:23 -0400 Subject: [PATCH 1/6] Moving to lazy root imports to make config loading snappy --- litgpt/__init__.py | 24 +++++++++++++++++++----- litgpt/config.py | 2 +- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/litgpt/__init__.py b/litgpt/__init__.py index 6c8dc3d820..b770430858 100644 --- a/litgpt/__init__.py +++ b/litgpt/__init__.py @@ -3,11 +3,25 @@ import logging import re -from litgpt.api import LLM -from litgpt.config import Config -from litgpt.model import GPT # needs to be imported before config -from litgpt.prompts import PromptStyle -from litgpt.tokenizer import Tokenizer + +def __getattr__(name): + if name == "LLM": + from litgpt.api import LLM + return LLM + elif name == "Config": + from litgpt.config import Config + return Config + elif name == "GPT": + from litgpt.model import GPT + return GPT + elif name == "PromptStyle": + from litgpt.prompts import PromptStyle + return PromptStyle + elif name == "Tokenizer": + from litgpt.tokenizer import Tokenizer + return Tokenizer + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") + # Suppress excessive warnings, see https://github.com/pytorch/pytorch/issues/111632 pattern = re.compile(".*Profiler function .* will be ignored") diff --git a/litgpt/config.py b/litgpt/config.py index cba52e3374..c946e2e951 100644 --- a/litgpt/config.py +++ b/litgpt/config.py @@ -5,7 +5,6 @@ from pathlib import Path from typing import Any, List, Literal, Optional, Type, Union -import torch import yaml from typing_extensions import Self @@ -184,6 +183,7 @@ def norm_class(self) -> Type: # `self.norm_class_name` cannot be the type to keep the config serializable from functools import partial + import torch if self.norm_class_name == "RMSNorm": from litgpt.model import RMSNorm From 43e7052d6eaff5f9f86a680fd72adc0b3cdb72a2 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 13 Jun 2025 21:39:20 +0000 Subject: [PATCH 2/6] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- litgpt/__init__.py | 5 +++++ litgpt/config.py | 1 + 2 files changed, 6 insertions(+) diff --git a/litgpt/__init__.py b/litgpt/__init__.py index b770430858..c89a7f20d4 100644 --- a/litgpt/__init__.py +++ b/litgpt/__init__.py @@ -7,18 +7,23 @@ def __getattr__(name): if name == "LLM": from litgpt.api import LLM + return LLM elif name == "Config": from litgpt.config import Config + return Config elif name == "GPT": from litgpt.model import GPT + return GPT elif name == "PromptStyle": from litgpt.prompts import PromptStyle + return PromptStyle elif name == "Tokenizer": from litgpt.tokenizer import Tokenizer + return Tokenizer raise AttributeError(f"module {__name__!r} has no attribute {name!r}") diff --git a/litgpt/config.py b/litgpt/config.py index c946e2e951..83db8da04b 100644 --- a/litgpt/config.py +++ b/litgpt/config.py @@ -183,6 +183,7 @@ def norm_class(self) -> Type: # `self.norm_class_name` cannot be the type to keep the config serializable from functools import partial + import torch if self.norm_class_name == "RMSNorm": From 22e1df03cbf356a2f3d982fe7cf25928187b22bd Mon Sep 17 00:00:00 2001 From: JackUrb Date: Mon, 16 Jun 2025 11:51:36 -0400 Subject: [PATCH 3/6] Handle reverse-compatibility with old import side-effects, cover TYPE_CHECKING --- litgpt/__init__.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/litgpt/__init__.py b/litgpt/__init__.py index c89a7f20d4..eb00d775fd 100644 --- a/litgpt/__init__.py +++ b/litgpt/__init__.py @@ -2,6 +2,15 @@ import logging import re +from typing import TYPE_CHECKING + + +if TYPE_CHECKING: + from litgpt.api import LLM + from litgpt.config import Config + from litgpt.model import GPT + from litgpt.prompts import PromptStyle + from litgpt.tokenizer import Tokenizer def __getattr__(name): @@ -25,6 +34,30 @@ def __getattr__(name): from litgpt.tokenizer import Tokenizer return Tokenizer + + # Handle the modules that used to be available immediately after the top-level import + elif name == "api": + import litgpt.api as api + + return api + elif name == "config": + import litgpt.config as config + + return config + elif name == "model": + import litgpt.model as model + + return model + elif name == "prompts": + import litgpt.prompts as prompts + + return prompts + elif name == "tokenizer": + import litgpt.tokenizer as tokenizer + + return tokenizer + + # If the attribute is not found, raise an AttributeError raise AttributeError(f"module {__name__!r} has no attribute {name!r}") From 52baf0bc665ca8e54949015bb9149602db2ae9aa Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 16 Jun 2025 15:52:16 +0000 Subject: [PATCH 4/6] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- litgpt/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/litgpt/__init__.py b/litgpt/__init__.py index eb00d775fd..21ab352f3b 100644 --- a/litgpt/__init__.py +++ b/litgpt/__init__.py @@ -4,7 +4,6 @@ import re from typing import TYPE_CHECKING - if TYPE_CHECKING: from litgpt.api import LLM from litgpt.config import Config @@ -56,7 +55,7 @@ def __getattr__(name): import litgpt.tokenizer as tokenizer return tokenizer - + # If the attribute is not found, raise an AttributeError raise AttributeError(f"module {__name__!r} has no attribute {name!r}") From 90c4193b8b5e66e399acade3b49cdc98380618d6 Mon Sep 17 00:00:00 2001 From: JackUrb Date: Tue, 17 Jun 2025 08:14:09 -0400 Subject: [PATCH 5/6] update model for easier maintenance, handle dir --- litgpt/__init__.py | 76 +++++++++++++++++++++------------------------- 1 file changed, 34 insertions(+), 42 deletions(-) diff --git a/litgpt/__init__.py b/litgpt/__init__.py index 21ab352f3b..020818a6eb 100644 --- a/litgpt/__init__.py +++ b/litgpt/__init__.py @@ -1,5 +1,6 @@ # Copyright Lightning AI. Licensed under the Apache License 2.0, see LICENSE file. +import importlib import logging import re from typing import TYPE_CHECKING @@ -12,53 +13,44 @@ from litgpt.tokenizer import Tokenizer -def __getattr__(name): - if name == "LLM": - from litgpt.api import LLM - - return LLM - elif name == "Config": - from litgpt.config import Config - - return Config - elif name == "GPT": - from litgpt.model import GPT - - return GPT - elif name == "PromptStyle": - from litgpt.prompts import PromptStyle - - return PromptStyle - elif name == "Tokenizer": - from litgpt.tokenizer import Tokenizer - - return Tokenizer +_LAZY_IMPORTS = { + "LLM": "litgpt.api", + "Config": "litgpt.config", + "GPT": "litgpt.model", + "PromptStyle": "litgpt.prompts", + "Tokenizer": "litgpt.tokenizer", + "api": "litgpt.api", + "chat": "litgpt.chat", + "config": "litgpt.config", + "generate": "litgpt.generate", + "lora": "litgpt.lora", + "model": "litgpt.model", + "prompts": "litgpt.prompts", + "scripts": "litgpt.scripts", + "tokenizer": "litgpt.tokenizer", + "utils": "litgpt.utils", +} - # Handle the modules that used to be available immediately after the top-level import - elif name == "api": - import litgpt.api as api - - return api - elif name == "config": - import litgpt.config as config - - return config - elif name == "model": - import litgpt.model as model - - return model - elif name == "prompts": - import litgpt.prompts as prompts - - return prompts - elif name == "tokenizer": - import litgpt.tokenizer as tokenizer - - return tokenizer +def __getattr__(name): + """ + Allow for lazy imports of all litgpt submodules, + as well as some selected top-level attributes. + """ + if name in _LAZY_IMPORTS: + module_name = _LAZY_IMPORTS[name] + module = importlib.import_module(module_name) + if not module_name.endswith(name): + return getattr(module, name) + return module # If the attribute is not found, raise an AttributeError raise AttributeError(f"module {__name__!r} has no attribute {name!r}") +def __dir__(): + """ + Return a list of all attributes in the litgpt module. + """ + return sorted(list(_LAZY_IMPORTS.keys()) + list(globals().keys())) # Suppress excessive warnings, see https://github.com/pytorch/pytorch/issues/111632 pattern = re.compile(".*Profiler function .* will be ignored") From 9f61ee65e85d41376d52ada89b910f83221f25bc Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 17 Jun 2025 12:34:52 +0000 Subject: [PATCH 6/6] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- litgpt/__init__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/litgpt/__init__.py b/litgpt/__init__.py index 020818a6eb..af29843106 100644 --- a/litgpt/__init__.py +++ b/litgpt/__init__.py @@ -31,6 +31,7 @@ "utils": "litgpt.utils", } + def __getattr__(name): """ Allow for lazy imports of all litgpt submodules, @@ -46,12 +47,14 @@ def __getattr__(name): # If the attribute is not found, raise an AttributeError raise AttributeError(f"module {__name__!r} has no attribute {name!r}") + def __dir__(): """ Return a list of all attributes in the litgpt module. """ return sorted(list(_LAZY_IMPORTS.keys()) + list(globals().keys())) + # Suppress excessive warnings, see https://github.com/pytorch/pytorch/issues/111632 pattern = re.compile(".*Profiler function .* will be ignored") logging.getLogger("torch._dynamo.variables.torch").addFilter(lambda record: not pattern.search(record.getMessage()))