-
Notifications
You must be signed in to change notification settings - Fork 4.3k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[legacy] clean up legacy code (#4743)
* [legacy] remove outdated codes of pipeline (#4692) * [legacy] remove cli of benchmark and update optim (#4690) * [legacy] remove cli of benchmark and update optim * [doc] fix cli doc test * [legacy] fix engine clip grad norm * [legacy] remove outdated colo tensor (#4694) * [legacy] remove outdated colo tensor * [test] fix test import * [legacy] move outdated zero to legacy (#4696) * [legacy] clean up utils (#4700) * [legacy] clean up utils * [example] update examples * [legacy] clean up amp * [legacy] fix amp module * [legacy] clean up gpc (#4742) * [legacy] clean up context * [legacy] clean core, constants and global vars * [legacy] refactor initialize * [example] fix examples ci * [example] fix examples ci * [legacy] fix tests * [example] fix gpt example * [example] fix examples ci * [devops] fix ci installation * [example] fix examples ci
- Loading branch information
Showing
342 changed files
with
2,917 additions
and
4,180 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,54 +0,0 @@ | ||
#!/usr/bin/env python | ||
# -*- encoding: utf-8 -*- | ||
|
||
import torch.nn as nn | ||
from torch.nn.modules.loss import _Loss | ||
from torch.optim import Optimizer | ||
|
||
from colossalai.context import Config | ||
|
||
from .amp_type import AMP_TYPE | ||
from .apex_amp import convert_to_apex_amp | ||
from .naive_amp import convert_to_naive_amp | ||
from .torch_amp import convert_to_torch_amp | ||
|
||
__all__ = ['convert_to_amp', 'convert_to_naive_amp', 'convert_to_apex_amp', 'convert_to_torch_amp', 'AMP_TYPE'] | ||
|
||
|
||
def convert_to_amp(model: nn.Module, optimizer: Optimizer, criterion: _Loss, mode: AMP_TYPE, amp_config: Config = None): | ||
"""A helper function to wrap training components with Torch AMP modules. | ||
Args: | ||
param model (:class:`torch.nn.Module`): your model object. | ||
optimizer (:class:`torch.optim.Optimizer`): your optimizer object. | ||
criterion (:class:`torch.nn.modules.loss._Loss`): your loss function object. | ||
mode (:class:`colossalai.amp.AMP_TYPE`): amp mode. | ||
amp_config (Union[:class:`colossalai.context.Config`, dict]): configuration for different amp modes. | ||
Returns: | ||
A tuple (model, optimizer, criterion). | ||
Note: | ||
``amp_config`` may vary from different mode you choose. You should check the corresponding amp mode | ||
for more details about ``amp_config``. | ||
For ``apex_amp``, please check | ||
`apex_amp config <https://nvidia.github.io/apex/amp.html?highlight=apex%20amp>`_. | ||
For ``naive_amp``, please check | ||
`naive_amp config <https://github.com/hpcaitech/ColossalAI/blob/main/colossalai/amp/naive_amp/_fp16_optimizer.py#L42>`_. | ||
For ``torch_amp``, please check | ||
`torch_amp config <https://github.com/pytorch/pytorch/blob/master/torch/cuda/amp/grad_scaler.py#L97>`_. | ||
""" | ||
assert isinstance(mode, AMP_TYPE), \ | ||
f'expected the argument mode be AMP_TYPE, but got {type(mode)}' | ||
|
||
if amp_config is None: | ||
amp_config = Config() | ||
|
||
if mode == AMP_TYPE.TORCH: | ||
model, optimizer, criterion = convert_to_torch_amp(model, optimizer, criterion, amp_config) | ||
elif mode == AMP_TYPE.APEX: | ||
model, optimizer = convert_to_apex_amp(model, optimizer, amp_config) | ||
elif mode == AMP_TYPE.NAIVE: | ||
model, optimizer = convert_to_naive_amp(model, optimizer, amp_config) | ||
|
||
return model, optimizer, criterion | ||
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,60 +0,0 @@ | ||
import inspect | ||
|
||
import torch.nn as nn | ||
from torch.optim import Optimizer | ||
|
||
from colossalai.utils import is_no_pp_or_last_stage | ||
|
||
from ._fp16_optimizer import FP16Optimizer | ||
from .grad_scaler import ConstantGradScaler, DynamicGradScaler | ||
from .naive_amp import NaiveAMPModel, NaiveAMPOptimizer | ||
|
||
|
||
def convert_to_naive_amp(model: nn.Module, optimizer: Optimizer, amp_config): | ||
"""A helper function to wrap training components with naive AMP modules. In this mode, | ||
we forcibly cast the model weights and inputs to FP16, and cast the model outputs to FP32 to calculate loss, | ||
which is equivalent to Apex O3. | ||
Args: | ||
model (:class:`torch.nn.Module`): your model object | ||
optimizer (:class:`torch.optim.Optimizer`): your optimizer object | ||
amp_config (:class:`colossalai.context.Config` or dict): configuration for naive mode amp. | ||
Returns: | ||
Tuple: A tuple (model, optimizer) | ||
The ``amp_config`` should contain parameters below:: | ||
verbose (bool, optional): if set to `True`, will print debug info (Default: False). | ||
clip_grad_norm (float, optional): clip gradients with this global L2 norm (Default 0). | ||
Note that clipping is ignored if clip_grad == 0. | ||
dynamic_grad_scale (bool): whether to use dynamic grad scaler. | ||
""" | ||
if isinstance(model, nn.ModuleList): | ||
# interleaved pipeline | ||
module_list = [] | ||
for chunk, m in enumerate(model): | ||
output_to_fp32 = is_no_pp_or_last_stage() and chunk == len(model) - 1 | ||
module_list.append(NaiveAMPModel(m, output_to_fp32=output_to_fp32)) | ||
model = nn.ModuleList(module_list) | ||
else: | ||
output_to_fp32 = is_no_pp_or_last_stage() | ||
model = NaiveAMPModel(model, output_to_fp32=output_to_fp32) | ||
|
||
use_dynamic_grad_scaler = amp_config.pop('dynamic_grad_scale', True) | ||
if use_dynamic_grad_scaler: | ||
scaler_class = DynamicGradScaler | ||
else: | ||
scaler_class = ConstantGradScaler | ||
|
||
sig = inspect.signature(scaler_class.__init__) | ||
kwargs = dict() | ||
for param in sig.parameters.values(): | ||
if param.name in amp_config: | ||
kwargs[param.name] = amp_config.pop(param.name) | ||
grad_scaler = scaler_class(**kwargs) | ||
optimizer = NaiveAMPOptimizer(optimizer, grad_scaler, **amp_config) | ||
return model, optimizer | ||
|
||
|
||
__all__ = ['convert_to_naive_amp', 'NaiveAMPOptimizer', 'FP16Optimizer'] | ||
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file was deleted.
Oops, something went wrong.
This file was deleted.
Oops, something went wrong.
This file was deleted.
Oops, something went wrong.
Oops, something went wrong.