1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
|
- from super_gradients.modules.anti_alias import AntiAliasDownsample
- from super_gradients.modules.pixel_shuffle import PixelShuffle
- from super_gradients.modules.pose_estimation_modules import LightweightDEKRHead
- from super_gradients.modules.conv_bn_act_block import ConvBNAct, Conv
- from super_gradients.modules.conv_bn_relu_block import ConvBNReLU
- from super_gradients.modules.repvgg_block import RepVGGBlock
- from super_gradients.modules.qarepvgg_block import QARepVGGBlock
- from super_gradients.modules.se_blocks import SEBlock, EffectiveSEBlock
- from super_gradients.modules.skip_connections import (
- Residual,
- SkipConnection,
- CrossModelSkipConnection,
- BackboneInternalSkipConnection,
- HeadInternalSkipConnection,
- )
- from super_gradients.common.abstractions.abstract_logger import get_logger
- from super_gradients.common.registry.registry import ALL_DETECTION_MODULES
- from super_gradients.modules.base_modules import BaseDetectionModule
- from super_gradients.modules.detection_modules import (
- PANNeck,
- NHeads,
- MultiOutputBackbone,
- NStageBackbone,
- MobileNetV1Backbone,
- MobileNetV2Backbone,
- SSDNeck,
- SSDInvertedResidualNeck,
- SSDBottleneckNeck,
- SSDHead,
- )
- from super_gradients.module_interfaces import SupportsReplaceNumClasses
- __all__ = [
- "BaseDetectionModule",
- "ALL_DETECTION_MODULES",
- "PixelShuffle",
- "AntiAliasDownsample",
- "Conv",
- "ConvBNAct",
- "ConvBNReLU",
- "RepVGGBlock",
- "QARepVGGBlock",
- "SEBlock",
- "EffectiveSEBlock",
- "Residual",
- "SkipConnection",
- "CrossModelSkipConnection",
- "BackboneInternalSkipConnection",
- "HeadInternalSkipConnection",
- "LightweightDEKRHead",
- "PANNeck",
- "NHeads",
- "MultiOutputBackbone",
- "NStageBackbone",
- "MobileNetV1Backbone",
- "MobileNetV2Backbone",
- "SSDNeck",
- "SSDInvertedResidualNeck",
- "SSDBottleneckNeck",
- "SSDHead",
- "SupportsReplaceNumClasses",
- ]
- logger = get_logger(__name__)
- try:
- # flake8 respects only the first occurence of __all__ defined in the module's root
- from .quantization import QuantBottleneck # noqa: F401
- from .quantization import QuantResidual # noqa: F401
- from .quantization import QuantSkipConnection # noqa: F401
- from .quantization import QuantCrossModelSkipConnection # noqa: F401
- from .quantization import QuantBackboneInternalSkipConnection # noqa: F401
- from .quantization import QuantHeadInternalSkipConnection # noqa: F401
- from .quantization import QuantSTDCBlock # noqa: F401
- from .quantization import QuantAttentionRefinementModule # noqa: F401
- from .quantization import QuantFeatureFusionModule # noqa: F401
- from .quantization import QuantContextPath # noqa: F401
- quant_extensions = [
- "QuantBottleneck",
- "QuantResidual",
- "QuantSkipConnection",
- "QuantCrossModelSkipConnection",
- "QuantBackboneInternalSkipConnection",
- "QuantHeadInternalSkipConnection",
- "QuantSTDCBlock",
- "QuantAttentionRefinementModule",
- "QuantFeatureFusionModule",
- "QuantContextPath",
- ]
- except (ImportError, NameError, ModuleNotFoundError) as import_err:
- logger.debug(f"Failed to import pytorch_quantization: {import_err}")
- quant_extensions = None
- if quant_extensions is not None:
- __all__.extend(quant_extensions)
|