Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

#821 Feature/sg 735 deci yolo qs

Merged
Ghost merged 1 commits into Deci-AI:feature/SG-736_deci_yolo_rf100 from deci-ai:feature/SG-735_deci_yolo_qs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
  1. import os
  2. from pathlib import Path
  3. from typing import List, Optional, Union, Dict, Any
  4. import hydra
  5. import pkg_resources
  6. from hydra import initialize_config_dir, compose
  7. from hydra.core.global_hydra import GlobalHydra
  8. from omegaconf import OmegaConf, open_dict, DictConfig
  9. from super_gradients.common.environment.path_utils import normalize_path
  10. from super_gradients.common.environment.checkpoints_dir_utils import get_checkpoints_dir_path
  11. class RecipeNotFoundError(Exception):
  12. def __init__(self, config_name: str, config_dir: str, recipes_dir_path, config_type: str = "", postfix_err_msg: Optional[str] = None):
  13. config_dir = os.path.abspath(config_dir)
  14. message = f"Recipe '{os.path.join(config_dir, config_type, config_name.replace('.yaml', ''))}.yaml' was not found.\n"
  15. if recipes_dir_path is None:
  16. message += "Note: If you are NOT loading a built-in SuperGradients recipe, please set recipes_dir_path=<path-to-your-recipe-directory>.\n"
  17. if postfix_err_msg:
  18. message += postfix_err_msg
  19. self.config_name = config_name
  20. self.config_dir = config_dir
  21. self.recipes_dir_path = recipes_dir_path
  22. self.message = message
  23. super().__init__(self.message)
  24. def load_recipe(config_name: str, recipes_dir_path: Optional[str] = None, overrides: Optional[list] = None) -> DictConfig:
  25. """Load a single a file of the recipe directory.
  26. :param config_name: Name of the yaml to load (e.g. "cifar10_resnet")
  27. :param recipes_dir_path: Optional. Main directory where every recipe are stored. (e.g. ../super_gradients/recipes)
  28. This directory should include a folder corresponding to the subconfig, which itself should
  29. include the config file named after config_name.
  30. :param overrides: List of hydra overrides for config file
  31. """
  32. GlobalHydra.instance().clear()
  33. config_dir = recipes_dir_path or pkg_resources.resource_filename("super_gradients.recipes", "")
  34. with initialize_config_dir(config_dir=normalize_path(config_dir), version_base="1.2"):
  35. try:
  36. cfg = compose(config_name=normalize_path(config_name), overrides=overrides if overrides else [])
  37. except hydra.errors.MissingConfigException:
  38. raise RecipeNotFoundError(config_name=config_name, config_dir=config_dir, recipes_dir_path=recipes_dir_path)
  39. return cfg
  40. def load_experiment_cfg(experiment_name: str, ckpt_root_dir: str = None) -> DictConfig:
  41. """
  42. Load the hydra config associated to a specific experiment.
  43. Background Information: every time an experiment is launched based on a recipe, all the hydra config params are stored in a hidden folder ".hydra".
  44. This hidden folder is used here to recreate the exact same config as the one that was used to launch the experiment (Also include hydra overrides).
  45. The motivation is to be able to resume or evaluate an experiment with the exact same config as the one that was used when the experiment was
  46. initially started, regardless of any change that might have been introduced to the recipe, and also while using the same overrides that were used
  47. for that experiment.
  48. :param experiment_name: Name of the experiment to resume
  49. :param ckpt_root_dir: Directory including the checkpoints
  50. :return: The config that was used for that experiment
  51. """
  52. if not experiment_name:
  53. raise ValueError(f"experiment_name should be non empty string but got :{experiment_name}")
  54. checkpoints_dir_path = Path(get_checkpoints_dir_path(experiment_name, ckpt_root_dir))
  55. if not checkpoints_dir_path.exists():
  56. raise FileNotFoundError(f"Impossible to find checkpoint dir ({checkpoints_dir_path})")
  57. resume_dir = Path(checkpoints_dir_path) / ".hydra"
  58. if not resume_dir.exists():
  59. raise FileNotFoundError(f"The checkpoint directory {checkpoints_dir_path} does not include .hydra artifacts to resume the experiment.")
  60. # Load overrides that were used in previous run
  61. overrides_cfg = list(OmegaConf.load(resume_dir / "overrides.yaml"))
  62. cfg = load_recipe(config_name="config.yaml", recipes_dir_path=normalize_path(str(resume_dir)), overrides=overrides_cfg)
  63. return cfg
  64. def add_params_to_cfg(cfg: DictConfig, params: List[str]):
  65. """Add parameters to an existing config
  66. :param cfg: OmegaConf config
  67. :param params: List of parameters to add, in dotlist format (i.e. ["training_hyperparams.resume=True"])"""
  68. new_cfg = OmegaConf.from_dotlist(params)
  69. override_cfg(cfg, new_cfg)
  70. def load_recipe_from_subconfig(config_name: str, config_type: str, recipes_dir_path: Optional[str] = None, overrides: Optional[list] = None) -> DictConfig:
  71. """Load a single a file (e.g. "resnet18_cifar_arch_params") stored in a subconfig (e.g. "arch_param") of the recipe directory,.
  72. :param config_name: Name of the yaml to load (e.g. "resnet18_cifar_arch_params")
  73. :param config_type: Type of the subconfig (e.g. "arch_params")
  74. :param recipes_dir_path: Optional. Main directory where every recipe are stored. (e.g. ../super_gradients/recipes)
  75. This directory should include a folder corresponding to the subconfig,
  76. which itself should include the config file named after config_name.
  77. :param overrides: List of hydra overrides for config file
  78. """
  79. try:
  80. cfg = load_recipe(config_name=os.path.join(config_type, config_name), recipes_dir_path=recipes_dir_path, overrides=overrides)
  81. except RecipeNotFoundError as e:
  82. postfix_err_msg = (
  83. f"Note: If your recipe is saved at '{os.path.join(e.config_dir, config_name.replace('.yaml', ''))}.yaml', you can load it with load_recipe(...).\n"
  84. )
  85. raise RecipeNotFoundError(
  86. config_name=config_name,
  87. config_dir=e.config_dir,
  88. config_type=config_type,
  89. recipes_dir_path=recipes_dir_path,
  90. postfix_err_msg=postfix_err_msg,
  91. )
  92. # Because of the way we load the subconfig, cfg will start with a single key corresponding to the type (arch_params, ...) and don't want that.
  93. cfg = cfg[config_type]
  94. return cfg
  95. def load_arch_params(config_name: str, recipes_dir_path: Optional[str] = None, overrides: Optional[list] = None) -> DictConfig:
  96. """Load a single arch_params file.
  97. :param config_name: Name of the yaml to load (e.g. "resnet18_cifar_arch_params")
  98. :param recipes_dir_path: Optional. Main directory where every recipe are stored. (e.g. ../super_gradients/recipes)
  99. This directory should include a "arch_params" folder,
  100. which itself should include the config file named after config_name.
  101. :param overrides: List of hydra overrides for config file
  102. """
  103. return load_recipe_from_subconfig(config_name=config_name, recipes_dir_path=recipes_dir_path, overrides=overrides, config_type="arch_params")
  104. def load_training_hyperparams(config_name: str, recipes_dir_path: Optional[str] = None, overrides: Optional[list] = None) -> DictConfig:
  105. """Load a single training_hyperparams file.
  106. :param config_name: Name of the yaml to load (e.g. "cifar10_resnet_train_params")
  107. :param recipes_dir_path: Optional. Main directory where every recipe are stored. (e.g. ../super_gradients/recipes)
  108. This directory should include a "training_hyperparams" folder,
  109. which itself should include the config file named after config_name.
  110. :param overrides: List of hydra overrides for config file
  111. """
  112. return load_recipe_from_subconfig(config_name=config_name, recipes_dir_path=recipes_dir_path, overrides=overrides, config_type="training_hyperparams")
  113. def load_dataset_params(config_name: str, recipes_dir_path: Optional[str] = None, overrides: Optional[list] = None) -> DictConfig:
  114. """Load a single dataset_params file.
  115. :param config_name: Name of the yaml to load (e.g. "cifar10_dataset_params")
  116. :param recipes_dir_path: Optional. Main directory where every recipe are stored. (e.g. ../super_gradients/recipes)
  117. This directory should include a "training_hyperparams" folder,
  118. which itself should include the config file named after config_name.
  119. :param overrides: List of hydra overrides for config file
  120. """
  121. return load_recipe_from_subconfig(config_name=config_name, recipes_dir_path=recipes_dir_path, overrides=overrides, config_type="dataset_params")
  122. def override_cfg(cfg: DictConfig, overrides: Union[DictConfig, Dict[str, Any]]) -> None:
  123. """Override inplace a config with a list of hydra overrides
  124. :param cfg: OmegaConf config
  125. :param overrides: Dictionary like object that will be used to override cfg
  126. """
  127. with open_dict(cfg): # This is required to add new fields to existing config
  128. cfg.merge_with(overrides)
Discard
Tip!

Press p or to see the previous file or, n or to see the next file