Skip to content

Commit 1a94594

Browse files
tchatonlexierule
authored andcommitted
[App] Rename to new convention (#15621)
* update * update (cherry picked from commit 7ec15ae)
1 parent 860e139 commit 1a94594

File tree

6 files changed

+14
-14
lines changed

6 files changed

+14
-14
lines changed

examples/app_multi_node/train_pl.py renamed to examples/app_multi_node/train_lt.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
import lightning as L
2-
from lightning.app.components import PyTorchLightningMultiNode
2+
from lightning.app.components import LightningTrainerMultiNode
33
from lightning.pytorch.demos.boring_classes import BoringModel
44

55

6-
class PyTorchLightningDistributed(L.LightningWork):
6+
class LightningTrainerDistributed(L.LightningWork):
77
@staticmethod
88
def run():
99
model = BoringModel()
@@ -16,8 +16,8 @@ def run():
1616

1717
# Run over 2 nodes of 4 x V100
1818
app = L.LightningApp(
19-
PyTorchLightningMultiNode(
20-
PyTorchLightningDistributed,
19+
LightningTrainerMultiNode(
20+
LightningTrainerDistributed,
2121
num_nodes=2,
2222
cloud_compute=L.CloudCompute("gpu-fast-multi"), # 4 x V100
2323
)

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ module = [
6262
"lightning_app.components.multi_node.lite",
6363
"lightning_app.components.multi_node.base",
6464
"lightning_app.components.multi_node.pytorch_spawn",
65-
"lightning_app.components.multi_node.pl",
65+
"lightning_app.components.multi_node.trainer",
6666
"lightning_app.api.http_methods",
6767
"lightning_app.api.request_types",
6868
"lightning_app.cli.commands.app_commands",

src/lightning_app/components/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
from lightning_app.components.database.client import DatabaseClient
22
from lightning_app.components.database.server import Database
33
from lightning_app.components.multi_node import (
4+
LightningTrainerMultiNode,
45
LiteMultiNode,
56
MultiNode,
6-
PyTorchLightningMultiNode,
77
PyTorchSpawnMultiNode,
88
)
99
from lightning_app.components.python.popen import PopenPythonScript
@@ -29,5 +29,5 @@
2929
"LightningTrainingComponent",
3030
"PyTorchLightningScriptRunner",
3131
"PyTorchSpawnMultiNode",
32-
"PyTorchLightningMultiNode",
32+
"LightningTrainerMultiNode",
3333
]
Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from lightning_app.components.multi_node.base import MultiNode
22
from lightning_app.components.multi_node.lite import LiteMultiNode
3-
from lightning_app.components.multi_node.pl import PyTorchLightningMultiNode
43
from lightning_app.components.multi_node.pytorch_spawn import PyTorchSpawnMultiNode
4+
from lightning_app.components.multi_node.trainer import LightningTrainerMultiNode
55

6-
__all__ = ["LiteMultiNode", "MultiNode", "PyTorchSpawnMultiNode", "PyTorchLightningMultiNode"]
6+
__all__ = ["LiteMultiNode", "MultiNode", "PyTorchSpawnMultiNode", "LightningTrainerMultiNode"]

src/lightning_app/components/multi_node/pl.py renamed to src/lightning_app/components/multi_node/trainer.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -13,14 +13,14 @@
1313

1414

1515
@runtime_checkable
16-
class _PyTorchLightningWorkProtocol(Protocol):
16+
class _LightningTrainerWorkProtocol(Protocol):
1717
@staticmethod
1818
def run() -> None:
1919
...
2020

2121

2222
@dataclass
23-
class _PyTorchLightningRunExecutor(_PyTorchSpawnRunExecutor):
23+
class _LightningTrainerRunExecutor(_PyTorchSpawnRunExecutor):
2424
@staticmethod
2525
def run(
2626
local_rank: int,
@@ -71,7 +71,7 @@ def pre_fn(trainer, *args, **kwargs):
7171
tracer._restore()
7272

7373

74-
class PyTorchLightningMultiNode(MultiNode):
74+
class LightningTrainerMultiNode(MultiNode):
7575
def __init__(
7676
self,
7777
work_cls: Type["LightningWork"],
@@ -80,7 +80,7 @@ def __init__(
8080
*work_args: Any,
8181
**work_kwargs: Any,
8282
) -> None:
83-
assert issubclass(work_cls, _PyTorchLightningWorkProtocol)
83+
assert issubclass(work_cls, _LightningTrainerWorkProtocol)
8484
if not is_static_method(work_cls, "run"):
8585
raise TypeError(
8686
f"The provided {work_cls} run method needs to be static for now."
@@ -89,7 +89,7 @@ def __init__(
8989

9090
# Note: Private way to modify the work run executor
9191
# Probably exposed to the users in the future if needed.
92-
work_cls._run_executor_cls = _PyTorchLightningRunExecutor
92+
work_cls._run_executor_cls = _LightningTrainerRunExecutor
9393

9494
super().__init__(
9595
work_cls,

0 commit comments

Comments
 (0)