Skip to content

Commit

Permalink
New SG version to reflect bugfixes in DG (#1657)
Browse files Browse the repository at this point in the history
* Update version

* Postpone removal of deprecated classes from 3.5.0 to 4.0.0

* Change removed_at version to 3.6.0

* Revert

* Revert
  • Loading branch information
BloodAxe authored Nov 23, 2023
1 parent 26d62e0 commit 975a640
Show file tree
Hide file tree
Showing 17 changed files with 34 additions and 35 deletions.
2 changes: 1 addition & 1 deletion notebooks/PTQ_and_QAT_for_classification.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@
},
"outputs": [],
"source": [
"!pip -qq install super-gradients==3.4.1 pytorch-quantization==2.1.2 --extra-index-url https://pypi.ngc.nvidia.com"
"!pip -qq install super-gradients==3.5.0 pytorch-quantization==2.1.2 --extra-index-url https://pypi.ngc.nvidia.com"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion notebooks/dataloader_adapter.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
{
"cell_type": "code",
"source": [
"!pip install -q super-gradients==3.4.1"
"!pip install -q super-gradients==3.5.0"
],
"metadata": {
"id": "0puCRQGZSP8r",
Expand Down
2 changes: 1 addition & 1 deletion notebooks/detection_how_to_connect_custom_dataset.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@
},
"outputs": [],
"source": [
"! pip install -q super_gradients==3.4.1"
"! pip install -q super_gradients==3.5.0"
]
},
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
},
"outputs": [],
"source": [
"!pip install -qq super_gradients==3.4.1"
"!pip install -qq super_gradients==3.5.0"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion notebooks/quickstart_segmentation.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@
},
"outputs": [],
"source": [
"! pip install -qq super-gradients==3.4.1"
"! pip install -qq super-gradients==3.5.0"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion notebooks/segmentation_connect_custom_dataset.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@
}
],
"source": [
"! pip install -qq super-gradients==3.4.1"
"! pip install -qq super-gradients==3.5.0"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion notebooks/transfer_learning_classification.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@
}
],
"source": [
"! pip install -qq super-gradients==3.4.1"
"! pip install -qq super-gradients==3.5.0"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion notebooks/transfer_learning_semantic_segmentation.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@
}
],
"source": [
"! pip install -qq super-gradients==3.4.1"
"! pip install -qq super-gradients==3.5.0"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion notebooks/what_are_recipes_and_how_to_use.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
{
"cell_type": "code",
"source": [
"!pip install -q super-gradients==3.4.1"
"!pip install -q super-gradients==3.5.0"
],
"metadata": {
"id": "8uZM-4va5Rpu",
Expand Down
2 changes: 1 addition & 1 deletion notebooks/yolo_nas_pose_eval_with_pycocotools.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
"execution_count": null,
"outputs": [],
"source": [
"!pip install -qq super_gradients==3.4.1"
"!pip install -qq super_gradients==3.5.0"
],
"metadata": {
"collapsed": false
Expand Down
2 changes: 1 addition & 1 deletion src/super_gradients/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = "3.4.1"
__version__ = "3.5.0"

from super_gradients.common import init_trainer, is_distributed, object_names
from super_gradients.training import losses, utils, datasets_utils, DataAugmentation, Trainer, KDTrainer, QATTrainer
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@
"execution_count": null,
"outputs": [],
"source": [
"!pip install -qq super_gradients==3.4.1"
"!pip install -qq super_gradients==3.5.0"
],
"metadata": {
"collapsed": false
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@
"execution_count": 24,
"outputs": [],
"source": [
"!pip install -qq super-gradients==3.4.1"
"!pip install -qq super-gradients==3.5.0"
],
"metadata": {
"collapsed": false,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def forward(self, input, target, smooth_dist=None):
return loss, loss.unsqueeze(0).detach()


@deprecated(deprecated_since="3.2.1", removed_from="3.5.0", target=CrossEntropyLoss)
@deprecated(deprecated_since="3.2.1", removed_from="3.6.0", target=CrossEntropyLoss)
@register_loss("LabelSmoothingCrossEntropyLoss")
class LabelSmoothingCrossEntropyLoss(CrossEntropyLoss):
...
8 changes: 4 additions & 4 deletions src/super_gradients/training/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@
from super_gradients.training.utils import make_divisible as _make_divisible_current_version, HpmStruct as CurrVersionHpmStruct


@deprecated(deprecated_since="3.1.0", removed_from="3.5.0", target=_make_divisible_current_version)
@deprecated(deprecated_since="3.1.0", removed_from="3.6.0", target=_make_divisible_current_version)
def make_divisible(x: int, divisor: int, ceil: bool = True) -> int:
"""
Returns x evenly divisible by divisor.
Expand All @@ -144,17 +144,17 @@ def make_divisible(x: int, divisor: int, ceil: bool = True) -> int:
return _make_divisible_current_version(x=x, divisor=divisor, ceil=ceil)


@deprecated(deprecated_since="3.1.0", removed_from="3.5.0", target=BasicResNetBlock, reason="This block was renamed to BasicResNetBlock for better clarity.")
@deprecated(deprecated_since="3.1.0", removed_from="3.6.0", target=BasicResNetBlock, reason="This block was renamed to BasicResNetBlock for better clarity.")
class BasicBlock(BasicResNetBlock):
...


@deprecated(deprecated_since="3.1.0", removed_from="3.5.0", target=NewBottleneck, reason="This block was renamed to BasicResNetBlock for better clarity.")
@deprecated(deprecated_since="3.1.0", removed_from="3.6.0", target=NewBottleneck, reason="This block was renamed to BasicResNetBlock for better clarity.")
class Bottleneck(NewBottleneck):
...


@deprecated(deprecated_since="3.1.0", removed_from="3.5.0", target=CurrVersionHpmStruct)
@deprecated(deprecated_since="3.1.0", removed_from="3.6.0", target=CurrVersionHpmStruct)
class HpmStruct(CurrVersionHpmStruct):
...

Expand Down
21 changes: 10 additions & 11 deletions src/super_gradients/training/utils/callbacks/callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -315,17 +315,17 @@ def is_lr_scheduling_enabled(self, context):
return self.training_params.lr_warmup_epochs > 0 and self.training_params.lr_warmup_epochs >= context.epoch


@deprecated(deprecated_since="3.2.1", removed_from="3.5.0", target=LinearEpochLRWarmup)
@deprecated(deprecated_since="3.2.1", removed_from="3.6.0", target=LinearEpochLRWarmup)
class EpochStepWarmupLRCallback(LinearEpochLRWarmup):
...


@deprecated(deprecated_since="3.2.1", removed_from="3.5.0", target=LinearEpochLRWarmup)
@deprecated(deprecated_since="3.2.1", removed_from="3.6.0", target=LinearEpochLRWarmup)
class LinearLRWarmup(LinearEpochLRWarmup):
...


@deprecated(deprecated_since="3.2.1", removed_from="3.5.0", target=LinearEpochLRWarmup)
@deprecated(deprecated_since="3.2.1", removed_from="3.6.0", target=LinearEpochLRWarmup)
class LinearStepWarmupLRCallback(LinearEpochLRWarmup):
...

Expand Down Expand Up @@ -407,7 +407,7 @@ def update_lr(self, optimizer, epoch, batch_idx=None):
param_group["lr"] = self.lr[param_group["name"]]


@deprecated(deprecated_since="3.2.1", removed_from="3.5.0", target=LinearBatchLRWarmup)
@deprecated(deprecated_since="3.2.1", removed_from="3.6.0", target=LinearBatchLRWarmup)
class BatchStepLinearWarmupLRCallback(LinearBatchLRWarmup):
...

Expand Down Expand Up @@ -444,7 +444,7 @@ def is_lr_scheduling_enabled(self, context):
return self.training_params.lr_warmup_epochs <= context.epoch


@deprecated(deprecated_since="3.2.1", removed_from="3.5.0", target=StepLRScheduler)
@deprecated(deprecated_since="3.2.1", removed_from="3.6.0", target=StepLRScheduler)
class StepLRCallback(StepLRScheduler):
...

Expand All @@ -471,7 +471,7 @@ def is_lr_scheduling_enabled(self, context):
return self.training_params.lr_warmup_epochs <= context.epoch < post_warmup_epochs


@deprecated(deprecated_since="3.2.1", removed_from="3.5.0", target=ExponentialLRScheduler)
@deprecated(deprecated_since="3.2.1", removed_from="3.6.0", target=ExponentialLRScheduler)
class ExponentialLRCallback(ExponentialLRScheduler):
...

Expand Down Expand Up @@ -500,7 +500,7 @@ def is_lr_scheduling_enabled(self, context):
return self.training_params.lr_warmup_epochs <= context.epoch < post_warmup_epochs


@deprecated(deprecated_since="3.2.1", removed_from="3.5.0", target=PolyLRScheduler)
@deprecated(deprecated_since="3.2.1", removed_from="3.6.0", target=PolyLRScheduler)
class PolyLRCallback(PolyLRScheduler):
...

Expand Down Expand Up @@ -543,7 +543,7 @@ def compute_learning_rate(cls, step: Union[float, np.ndarray], total_steps: floa
return lr * (1 - final_lr_ratio) + (initial_lr * final_lr_ratio)


@deprecated(deprecated_since="3.2.1", removed_from="3.5.0", target=CosineLRScheduler)
@deprecated(deprecated_since="3.2.1", removed_from="3.6.0", target=CosineLRScheduler)
class CosineLRCallback(CosineLRScheduler):
...

Expand All @@ -554,7 +554,7 @@ class FunctionLRScheduler(LRCallbackBase):
Hard coded rate scheduling for user defined lr scheduling function.
"""

@deprecated(deprecated_since="3.2.0", removed_from="3.5.0", reason="This callback is deprecated and will be removed in future versions.")
@deprecated(deprecated_since="3.2.0", removed_from="3.6.0", reason="This callback is deprecated and will be removed in future versions.")
def __init__(self, max_epochs, lr_schedule_function, **kwargs):
super().__init__(Phase.TRAIN_BATCH_STEP, **kwargs)
assert callable(lr_schedule_function), "self.lr_function must be callable"
Expand All @@ -579,7 +579,7 @@ def perform_scheduling(self, context):
self.update_lr(context.optimizer, context.epoch, context.batch_idx)


@deprecated(deprecated_since="3.2.1", removed_from="3.5.0", target=FunctionLRScheduler)
@deprecated(deprecated_since="3.2.1", removed_from="3.6.0", target=FunctionLRScheduler)
class FunctionLRCallback(FunctionLRScheduler):
...

Expand Down Expand Up @@ -1232,7 +1232,6 @@ def _on_batch_end(self, context: PhaseContext) -> None:
self.metric.reset()

else:

# FOR LOSS VALUES, GET THE RIGHT COMPONENT, DERIVE IT ON THE FIRST PASS
loss_tuple = context.loss_log_items
if self._first_call:
Expand Down
12 changes: 6 additions & 6 deletions src/super_gradients/training/utils/distributed_training_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def compute_precise_bn_stats(model: nn.Module, loader: torch.utils.data.DataLoad
bn.momentum = momentums[i]


@deprecated(deprecated_since="3.2.1", removed_from="3.5.0", target=_get_local_rank)
@deprecated(deprecated_since="3.2.1", removed_from="3.6.0", target=_get_local_rank)
def get_local_rank():
"""
Returns the local rank if running in DDP, and 0 otherwise
Expand All @@ -163,12 +163,12 @@ def get_local_rank():
return _get_local_rank()


@deprecated(deprecated_since="3.2.1", removed_from="3.5.0", target=_is_ddp_subprocess)
@deprecated(deprecated_since="3.2.1", removed_from="3.6.0", target=_is_ddp_subprocess)
def is_ddp_subprocess():
return _is_ddp_subprocess()


@deprecated(deprecated_since="3.2.1", removed_from="3.5.0", target=_get_world_size)
@deprecated(deprecated_since="3.2.1", removed_from="3.6.0", target=_get_world_size)
def get_world_size() -> int:
"""
Returns the world size if running in DDP, and 1 otherwise
Expand All @@ -177,17 +177,17 @@ def get_world_size() -> int:
return _get_world_size()


@deprecated(deprecated_since="3.2.1", removed_from="3.5.0", target=_get_device_ids)
@deprecated(deprecated_since="3.2.1", removed_from="3.6.0", target=_get_device_ids)
def get_device_ids() -> List[int]:
return _get_device_ids()


@deprecated(deprecated_since="3.2.1", removed_from="3.5.0", target=_count_used_devices)
@deprecated(deprecated_since="3.2.1", removed_from="3.6.0", target=_count_used_devices)
def count_used_devices() -> int:
return _count_used_devices()


@deprecated(deprecated_since="3.2.1", removed_from="3.5.0", target=_require_ddp_setup)
@deprecated(deprecated_since="3.2.1", removed_from="3.6.0", target=_require_ddp_setup)
def require_ddp_setup() -> bool:
return _require_ddp_setup()

Expand Down

0 comments on commit 975a640

Please sign in to comment.