Register
Login
Resources
Docs Blog Datasets Glossary Case Studies Tutorials & Webinars
Product
Data Engine LLMs Platform Enterprise
Pricing Explore
Connect to our Discord channel

pose_estimation_models_test.py 5.5 KB

You have to be logged in to leave a comment. Sign In
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
  1. import os
  2. import unittest
  3. import torch
  4. from tqdm import tqdm
  5. from super_gradients.training import models
  6. from super_gradients.training.dataloaders.dataloaders import get_data_loader
  7. from super_gradients.training.datasets.pose_estimation_datasets import COCOKeypointsDataset
  8. from super_gradients.training.metrics import PoseEstimationMetrics
  9. from super_gradients.training.models.pose_estimation_models.dekr_hrnet import DEKRWrapper, DEKRHorisontalFlipWrapper
  10. from super_gradients.training.utils import DEKRPoseEstimationDecodeCallback
  11. from super_gradients.training.utils.pose_estimation import RescoringPoseEstimationDecodeCallback
  12. class PoseEstimationModelsIntegrationTest(unittest.TestCase):
  13. def setUp(self):
  14. self.oks_sigmas = [0.026, 0.025, 0.025, 0.035, 0.035, 0.079, 0.079, 0.072, 0.072, 0.062, 0.062, 0.107, 0.107, 0.087, 0.087, 0.089, 0.089]
  15. self.flip_indexes = [0, 2, 1, 4, 3, 6, 5, 8, 7, 10, 9, 12, 11, 14, 13, 16, 15]
  16. # This is for easy testing on local machine - you can set this environment variable to your own COCO dataset location
  17. self.data_dir = os.environ.get("SUPER_GRADIENTS_COCO_DATASET_DIR", "/data/coco")
  18. def test_dekr_model(self):
  19. val_loader = get_data_loader(
  20. "coco_pose_estimation_dekr_dataset_params",
  21. COCOKeypointsDataset,
  22. train=False,
  23. dataset_params=dict(data_dir=self.data_dir),
  24. dataloader_params=dict(num_workers=0),
  25. )
  26. model = models.get("dekr_w32_no_dc", pretrained_weights="coco_pose")
  27. model = DEKRWrapper(model, apply_sigmoid=True).cuda().eval()
  28. post_prediction_callback = DEKRPoseEstimationDecodeCallback(
  29. output_stride=4, max_num_people=30, apply_sigmoid=False, keypoint_threshold=0.05, nms_threshold=0.05, nms_num_threshold=8
  30. )
  31. post_prediction_callback.apply_sigmoid = False
  32. metric = PoseEstimationMetrics(
  33. post_prediction_callback=post_prediction_callback,
  34. max_objects_per_image=post_prediction_callback.max_num_people,
  35. num_joints=val_loader.dataset.num_joints,
  36. oks_sigmas=self.oks_sigmas,
  37. )
  38. for inputs, targets, extras in tqdm(val_loader):
  39. with torch.no_grad(), torch.cuda.amp.autocast(True):
  40. predictions = model(inputs.cuda(non_blocking=True))
  41. metric.update(predictions, targets, **extras)
  42. stats = metric.compute()
  43. self.assertAlmostEqual(stats["AP"], 0.6308, delta=0.05)
  44. def test_dekr_model_with_tta(self):
  45. val_loader = get_data_loader(
  46. "coco_pose_estimation_dekr_dataset_params",
  47. COCOKeypointsDataset,
  48. train=False,
  49. dataset_params=dict(data_dir=self.data_dir),
  50. dataloader_params=dict(num_workers=0),
  51. )
  52. model = models.get("dekr_w32_no_dc", pretrained_weights="coco_pose")
  53. model = DEKRHorisontalFlipWrapper(model, self.flip_indexes, apply_sigmoid=True).cuda().eval()
  54. post_prediction_callback = DEKRPoseEstimationDecodeCallback(
  55. output_stride=4, max_num_people=30, apply_sigmoid=False, keypoint_threshold=0.05, nms_threshold=0.05, nms_num_threshold=8
  56. )
  57. metric = PoseEstimationMetrics(
  58. post_prediction_callback=post_prediction_callback,
  59. max_objects_per_image=post_prediction_callback.max_num_people,
  60. num_joints=val_loader.dataset.num_joints,
  61. oks_sigmas=self.oks_sigmas,
  62. )
  63. for inputs, targets, extras in tqdm(val_loader):
  64. with torch.no_grad(), torch.cuda.amp.autocast(True):
  65. predictions = model(inputs.cuda(non_blocking=True))
  66. metric.update(predictions, targets, **extras)
  67. stats = metric.compute()
  68. self.assertAlmostEqual(stats["AP"], 0.6490, delta=0.05)
  69. def test_dekr_model_with_rescoring(self):
  70. val_loader = get_data_loader(
  71. "coco_pose_estimation_dekr_dataset_params",
  72. COCOKeypointsDataset,
  73. train=False,
  74. dataset_params=dict(data_dir=self.data_dir),
  75. dataloader_params=dict(batch_size=1, num_workers=0),
  76. )
  77. model = models.get("dekr_w32_no_dc", pretrained_weights="coco_pose")
  78. model = DEKRHorisontalFlipWrapper(model, self.flip_indexes, apply_sigmoid=True).cuda().eval()
  79. rescoring = models.get("pose_rescoring_coco", pretrained_weights="coco_pose").cuda().eval()
  80. post_prediction_callback = DEKRPoseEstimationDecodeCallback(
  81. output_stride=4, max_num_people=30, apply_sigmoid=False, keypoint_threshold=0.05, nms_threshold=0.05, nms_num_threshold=8
  82. )
  83. metric = PoseEstimationMetrics(
  84. post_prediction_callback=RescoringPoseEstimationDecodeCallback(apply_sigmoid=True),
  85. max_objects_per_image=post_prediction_callback.max_num_people,
  86. num_joints=val_loader.dataset.num_joints,
  87. oks_sigmas=self.oks_sigmas,
  88. )
  89. for inputs, targets, extras in tqdm(val_loader):
  90. with torch.no_grad(), torch.cuda.amp.autocast(True):
  91. raw_predictions = model(inputs.cuda(non_blocking=True))
  92. [predictions] = post_prediction_callback(raw_predictions)
  93. all_poses, new_scores = rescoring(torch.tensor(predictions.poses).cuda())
  94. metric.update(preds=(all_poses, new_scores), target=targets, **extras)
  95. stats = metric.compute()
  96. self.assertAlmostEqual(stats["AP"], 0.6734, delta=0.05)
  97. if __name__ == "__main__":
  98. unittest.main()
Tip!

Press p or to see the previous file or, n or to see the next file

Comments

Loading...