Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

#204 Feature/sg 131 check env compliance

Merged
GitHub User merged 1 commits into Deci-AI:master from deci-ai:feature/SG-131_check_env_compliance
@@ -39,6 +39,10 @@ jobs:
       - deci-common/checkout_and_skip_build:
           check_version_file: true
       - deci-common/get_persisted_version_info
+      - run:
+          name: add requirements.txt to source code
+          command: |
+            cp requirements.txt src/super_gradients/requirements.txt
       - run:
           name: install python dependencies
           command: |
@@ -114,6 +118,10 @@ jobs:
       - deci-common/checkout_and_skip_build:
           check_version_file: true
 
+      - run:
+          name: add requirements.txt to source code
+          command: |
+            cp requirements.txt src/super_gradients/requirements.txt
       - run:
           name: edit package version
           command: |
Discard
@@ -28,4 +28,5 @@ pycocotools==2.0.4
 protobuf~=3.19.0
 deci-lab-client==2.38.0
 treelib==1.6.1
-termcolor==1.1.0
+termcolor==1.1.0
+packaging>=20.4
Discard
@@ -43,6 +43,7 @@ setup(
         'super_gradients.recipes': ['*.yaml', '**/*.yaml'],
         'super_gradients.common': ['auto_logging/auto_logging_conf.json'],
         'super_gradients.examples': ['*.ipynb', '**/*.ipynb'],
+        'super_gradients': ['requirements.txt'],
     },
     long_description=readme(),
     long_description_content_type="text/markdown"
Discard
@@ -3,7 +3,12 @@ from super_gradients.training import ARCHITECTURES, losses, utils, datasets_util
 from super_gradients.common import init_trainer, is_distributed
 from super_gradients.examples.train_from_recipe_example import train_from_recipe
 from super_gradients.examples.train_from_kd_recipe_example import train_from_kd_recipe
+from super_gradients.sanity_check import env_sanity_check
 
 __all__ = ['ARCHITECTURES', 'losses', 'utils', 'datasets_utils', 'DataAugmentation',
            'TestDatasetInterface', 'SgModel', 'KDModel', 'SegmentationTestDatasetInterface', 'DetectionTestDatasetInterface',
-           'ClassificationTestDatasetInterface', 'init_trainer', 'is_distributed', 'train_from_recipe', 'train_from_kd_recipe']
+           'ClassificationTestDatasetInterface', 'init_trainer', 'is_distributed', 'train_from_recipe', 'train_from_kd_recipe',
+           'env_sanity_check']
+
+
+env_sanity_check()
Discard
@@ -2,15 +2,17 @@ import logging
 import logging.config
 
 from super_gradients.common.auto_logging import AutoLoggerConfig
+from super_gradients.common.environment.environment_config import DEFAULT_LOGGING_LEVEL
 
 # Do not remove, it's necessary for the get_logger functionality.
 from cmreslogging.handlers import CMRESHandler
 
 
-def get_logger(logger_name: str, training_log_path=None, logs_dir_path=None) -> logging.Logger:
+def get_logger(logger_name: str, training_log_path=None, logs_dir_path=None, log_level=DEFAULT_LOGGING_LEVEL) -> logging.Logger:
     config_dict = AutoLoggerConfig.generate_config_for_module_name(module_name=logger_name,
                                                                    training_log_path=training_log_path,
-                                                                   logs_dir_path=logs_dir_path)
+                                                                   logs_dir_path=logs_dir_path,
+                                                                   log_level=log_level)
     logging.config.dictConfig(config_dict)
     logger: logging.Logger = logging.getLogger(logger_name)
     return logger
Discard
1
2
3
  1. from super_gradients.sanity_check.env_sanity_check import env_sanity_check
  2. __all__ = ['env_sanity_check']
Discard
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
  1. import logging
  2. import os
  3. import sys
  4. from pip._internal.operations.freeze import freeze
  5. from typing import List, Dict, Union, Tuple
  6. from pathlib import Path
  7. from packaging.version import Version
  8. from super_gradients.common.abstractions.abstract_logger import get_logger
  9. LIB_CHECK_IMPOSSIBLE_MSG = 'Library check is not supported when super_gradients installed through "git+https://github.com/..." command'
  10. logger = get_logger(__name__, log_level=logging.DEBUG)
  11. def get_requirements_path() -> Union[None, Path]:
  12. """Get the path of requirement.txt from the root if exist.
  13. There is a difference when installed from artifact or locally.
  14. - In the first case, requirements.txt is copied to the package during the CI.
  15. - In the second case, requirements.txt in the root of the project.
  16. Note: This is because when installed from artifact only the source code is accessible, so requirements.txt has to be
  17. copied to the package root (./src/super_gradients). This is automatically done with the CI to make sure that
  18. in the github we only have 1 source of truth for requirements.txt. The consequence being that when the code
  19. is copied/cloned from github, the requirements.txt was not copied to the super_gradients package root, so we
  20. need to go to the project root (.) to find it.
  21. """
  22. file_path = Path(__file__) # super-gradients/src/super_gradients/sanity_check/env_sanity_check.py
  23. package_root = file_path.parent.parent # moving to super-gradients/src/super_gradients
  24. project_root = package_root.parent.parent # moving to super-gradients
  25. # If installed from artifact, requirements.txt is in package_root, if installed locally it is in project_root
  26. if (package_root / "requirements.txt").exists():
  27. return package_root / "requirements.txt"
  28. elif (project_root / "requirements.txt").exists():
  29. return project_root / "requirements.txt"
  30. else:
  31. return None # Could happen when installed through github directly ("pip install git+https://github.com/...")
  32. def get_installed_libs_with_version() -> Dict[str, str]:
  33. """Get all the installed libraries, and outputs it as a dict: lib -> version"""
  34. installed_libs_with_version = {}
  35. for lib_with_version in freeze():
  36. if "==" in lib_with_version:
  37. lib, version = lib_with_version.split("==")
  38. installed_libs_with_version[lib.lower()] = version
  39. return installed_libs_with_version
  40. def verify_installed_libraries() -> List[str]:
  41. """Check that all installed libs respect the requirement.txt"""
  42. requirements_path = get_requirements_path()
  43. if requirements_path is None:
  44. return [LIB_CHECK_IMPOSSIBLE_MSG]
  45. with open(requirements_path, "r") as f:
  46. requirements = f.readlines()
  47. installed_libs_with_version = get_installed_libs_with_version()
  48. errors = []
  49. for requirement in requirements:
  50. if ">=" in requirement:
  51. constraint = ">="
  52. elif "~=" in requirement:
  53. constraint = "~="
  54. elif "==" in requirement:
  55. constraint = "=="
  56. else:
  57. continue
  58. lib, required_version_str = requirement.split(constraint)
  59. if lib.lower() not in installed_libs_with_version.keys():
  60. errors.append(f"{lib} required but not found")
  61. continue
  62. installed_version_str = installed_libs_with_version[lib.lower()]
  63. installed_version, required_version = Version(installed_version_str), Version(required_version_str)
  64. is_constraint_respected = {
  65. ">=": installed_version >= required_version,
  66. "~=": installed_version.major == required_version.major and installed_version.minor == required_version.minor and installed_version.micro >= required_version.micro,
  67. "==": installed_version == required_version
  68. }
  69. if not is_constraint_respected[constraint]:
  70. errors.append(
  71. f"{lib} is installed with version {installed_version} which does not satisfy {requirement} (based on {requirements_path})")
  72. return errors
  73. def verify_os() -> List[str]:
  74. """Verifying operating system name and platform"""
  75. if 'linux' not in sys.platform.lower():
  76. return ['Deci officially supports only Linux kernels. Some features may not work as expected.']
  77. return []
  78. def env_sanity_check():
  79. """Run the sanity check tests and log everything that does not meet requirements"""
  80. display_sanity_check = os.getenv("DISPLAY_SANITY_CHECK", "False") == "True"
  81. stdout_log_level = logging.INFO if display_sanity_check else logging.DEBUG
  82. logger.setLevel(logging.DEBUG) # We want to log everything regardless of DISPLAY_SANITY_CHECK
  83. requirement_checkers = {
  84. 'operating_system': verify_os,
  85. 'libraries': verify_installed_libraries,
  86. }
  87. logger.log(stdout_log_level, 'SuperGradients Sanity Check Started')
  88. logger.log(stdout_log_level, f'Checking the following components: {list(requirement_checkers.keys())}')
  89. logger.log(stdout_log_level, '_' * 20)
  90. lib_check_is_impossible = False
  91. sanity_check_errors = {}
  92. for test_name, test_function in requirement_checkers.items():
  93. logger.log(stdout_log_level, f"Verifying {test_name}...")
  94. errors = test_function()
  95. if errors == [LIB_CHECK_IMPOSSIBLE_MSG]:
  96. lib_check_is_impossible = True
  97. logger.log(stdout_log_level, LIB_CHECK_IMPOSSIBLE_MSG)
  98. elif len(errors) > 0:
  99. sanity_check_errors[test_name] = errors
  100. for error in errors:
  101. logger.log(stdout_log_level, f"Failed to verify {test_name}: {error}")
  102. else:
  103. logger.log(stdout_log_level, f'{test_name} OK')
  104. logger.log(stdout_log_level, '_' * 20)
  105. if sanity_check_errors:
  106. logger.log(stdout_log_level,
  107. f'The current environment does not meet Deci\'s needs, errors found in: {", ".join(list(sanity_check_errors.keys()))}')
  108. elif lib_check_is_impossible:
  109. logger.log(stdout_log_level, LIB_CHECK_IMPOSSIBLE_MSG)
  110. else:
  111. logger.log(stdout_log_level, 'Great, Looks like the current environment meet\'s Deci\'s requirements!')
  112. # The last message needs to be displayed independently of DISPLAY_SANITY_CHECK
  113. if display_sanity_check:
  114. logger.info(f'** This check can be hidden by setting the env variable DISPLAY_SANITY_CHECK=False prior to import. **')
  115. else:
  116. logger.info(f'** A sanity check is done when importing super_gradients for the first time. **\n'
  117. f'-> You can see the details by setting the env variable DISPLAY_SANITY_CHECK=True prior to import.')
  118. if __name__ == '__main__':
  119. env_sanity_check()
Discard