Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

train_logging_test.py 2.5 KB

You have to be logged in to leave a comment. Sign In
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
  1. import unittest
  2. from super_gradients import Trainer, \
  3. ClassificationTestDatasetInterface
  4. from super_gradients.training.metrics import Accuracy, Top5
  5. from super_gradients.training.models import ResNet18
  6. import os
  7. import logging
  8. from super_gradients.common.abstractions.abstract_logger import get_logger
  9. import shutil
  10. class SgTrainerLoggingTest(unittest.TestCase):
  11. def test_train_logging(self):
  12. trainer = Trainer("test_train_with_full_log", model_checkpoints_location='local')
  13. dataset_params = {"batch_size": 10}
  14. dataset = ClassificationTestDatasetInterface(dataset_params=dataset_params)
  15. trainer.connect_dataset_interface(dataset)
  16. net = ResNet18(num_classes=5, arch_params={})
  17. train_params = {"max_epochs": 2, "lr_updates": [1], "lr_decay_factor": 0.1, "lr_mode": "step",
  18. "lr_warmup_epochs": 0, "initial_lr": 0.1, "loss": "cross_entropy", "optimizer": "SGD",
  19. "criterion_params": {}, "optimizer_params": {"weight_decay": 1e-4, "momentum": 0.9},
  20. "train_metrics_list": [Accuracy(), Top5()], "valid_metrics_list": [Accuracy(), Top5()],
  21. "loss_logging_items_names": ["Loss"], "metric_to_watch": "Accuracy",
  22. "greater_metric_to_watch_is_better": True,
  23. "save_full_train_log": True}
  24. trainer.train(model=net, training_params=train_params)
  25. logfile_path = trainer.log_file.replace('.txt', 'full_train_log.log')
  26. assert os.path.exists(logfile_path) and os.path.getsize(logfile_path) > 0
  27. root_logger_handlers = logging.root.handlers
  28. assert any(isinstance(handler, logging.handlers.RotatingFileHandler) and handler.baseFilename == logfile_path for handler in root_logger_handlers)
  29. assert any(isinstance(handler, logging.StreamHandler) and handler.name == 'console' for handler in root_logger_handlers)
  30. def test_logger_with_non_existing_deci_logs_dir(self):
  31. user_dir = os.path.expanduser(r"~")
  32. logs_dir_path = os.path.join(user_dir, 'non_existing_deci_logs_dir')
  33. if os.path.exists(logs_dir_path):
  34. shutil.rmtree(logs_dir_path)
  35. module_name = 'super_gradients.trainer.sg_trainer'
  36. _ = get_logger(module_name, training_log_path=None, logs_dir_path=logs_dir_path)
  37. root_logger_handlers = logging.root.handlers
  38. assert any(isinstance(handler, logging.StreamHandler) and handler.name == 'console' for handler in root_logger_handlers)
  39. if __name__ == '__main__':
  40. unittest.main()
Tip!

Press p or to see the previous file or, n or to see the next file

Comments

Loading...