Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

train.py 4.7 KB

You have to be logged in to leave a comment. Sign In
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
  1. # train.py
  2. # silence pytorch lightning bolts UserWarning about missing gym package (as of v0.3.0)
  3. import warnings
  4. from pathlib import Path
  5. from typing import List, Optional
  6. import hydra
  7. from deadtrees.utils import utils
  8. from deadtrees.utils.env import get_env
  9. from omegaconf import DictConfig
  10. from pytorch_lightning import (
  11. Callback,
  12. LightningDataModule,
  13. LightningModule,
  14. seed_everything,
  15. Trainer,
  16. )
  17. from pytorch_lightning.loggers import LightningLoggerBase
  18. warnings.simplefilter(action="ignore", category=UserWarning)
  19. log = utils.get_logger(__name__)
  20. def train(config: DictConfig) -> Optional[float]:
  21. """Contains training pipeline.
  22. Instantiates all PyTorch Lightning objects from config.
  23. Args:
  24. config (DictConfig): Configuration composed by Hydra.
  25. Returns:
  26. Optional[float]: Metric score for hyperparameter optimization.
  27. """
  28. # Set seed for random number generators in pytorch, numpy and python.random
  29. if config.get("seed"):
  30. seed_everything(config.seed, workers=True)
  31. # Init Lightning datamodule
  32. ddir = Path(get_env("TRAIN_DATASET_PATH"))
  33. subfolders = ["train", "val", "test"]
  34. if all([(ddir / d).is_dir() for d in subfolders]):
  35. # dataset/train, dataset/val, dataset/test layout
  36. log.info(
  37. f"Instantiating datamodule <{config.datamodule._target_}> with train, val, test folder layout"
  38. )
  39. datamodule: LightningDataModule = hydra.utils.instantiate(
  40. config.datamodule,
  41. data_dir=[str(ddir / d) for d in subfolders],
  42. pattern=config.datamodule.pattern,
  43. pattern_extra=config.datamodule.get("pattern_extra", None),
  44. batch_size_extra=config.datamodule.get("batch_size_extra", None),
  45. )
  46. else:
  47. log.info(
  48. f"Instantiating datamodule <{config.datamodule._target_}> with single folder layout"
  49. )
  50. datamodule: LightningDataModule = hydra.utils.instantiate(
  51. config.datamodule,
  52. data_dir=get_env("TRAIN_DATASET_PATH"),
  53. pattern=config.datamodule.pattern,
  54. pattern_extra=config.datamodule.get("pattern_extra", None),
  55. batch_size_extra=config.datamodule.get("batch_size_extra", None),
  56. )
  57. datamodule.setup(
  58. in_channels=config.model.network.in_channels,
  59. classes=len(config.model.network.classes),
  60. )
  61. # Init Lightning model
  62. log.info(f"Instantiating model <{config.model._target_}>")
  63. model: LightningModule = hydra.utils.instantiate(config.model)
  64. # Init Lightning callbacks
  65. callbacks: List[Callback] = []
  66. if "callbacks" in config:
  67. for _, cb_conf in config.callbacks.items():
  68. if "_target_" in cb_conf:
  69. log.info(f"Instantiating callback <{cb_conf._target_}>")
  70. callbacks.append(hydra.utils.instantiate(cb_conf))
  71. # Init Lightning loggers
  72. logger: List[LightningLoggerBase] = []
  73. if "logger" in config:
  74. for _, lg_conf in config.logger.items():
  75. if "_target_" in lg_conf:
  76. log.info(f"Instantiating logger <{lg_conf._target_}>")
  77. logger.append(hydra.utils.instantiate(lg_conf))
  78. # Init Lightning trainer
  79. log.info(f"Instantiating trainer <{config.trainer._target_}>")
  80. trainer: Trainer = hydra.utils.instantiate(
  81. config.trainer, callbacks=callbacks, logger=logger, _convert_="partial"
  82. )
  83. # Send some parameters from config to all lightning loggers
  84. log.info("Logging hyperparameters!")
  85. utils.log_hyperparameters(
  86. config=config,
  87. model=model,
  88. datamodule=datamodule,
  89. trainer=trainer,
  90. callbacks=callbacks,
  91. logger=logger,
  92. )
  93. # Train the model
  94. log.info("Starting training!")
  95. trainer.fit(model=model, datamodule=datamodule)
  96. # Get metric score for hyperparameter optimization
  97. score = trainer.callback_metrics.get(config.get("optimized_metric"))
  98. # Evaluate model on test set after training (using best model)
  99. if config.get("test_after_training") and not config.trainer.get("fast_dev_run"):
  100. log.info("Starting testing!")
  101. trainer.test(model=model, datamodule=datamodule, ckpt_path="best")
  102. # Make sure everything closed properly
  103. log.info("Finalizing!")
  104. utils.finish(
  105. config=config,
  106. model=model,
  107. datamodule=datamodule,
  108. trainer=trainer,
  109. callbacks=callbacks,
  110. logger=logger,
  111. )
  112. # Print path to best checkpoint
  113. if not config.trainer.get("fast_dev_run"):
  114. log.info(f"Best model ckpt at {trainer.checkpoint_callback.best_model_path}")
  115. # Return metric score for hyperparameter optimization
  116. return score
  117. if __name__ == "__main__":
  118. train()
Tip!

Press p or to see the previous file or, n or to see the next file

Comments

Loading...