Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Make clearer about zero_init requirements #29879

Merged
merged 8 commits into from
Apr 3, 2024
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions src/transformers/integrations/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
"deepspeed_optim_sched",
"is_deepspeed_available",
"is_deepspeed_zero3_enabled",
"is_deepspeed_zero3_enabled_accelerate",
"set_hf_deepspeed_config",
"unset_hf_deepspeed_config",
],
Expand Down Expand Up @@ -108,6 +109,7 @@
deepspeed_optim_sched,
is_deepspeed_available,
is_deepspeed_zero3_enabled,
is_deepspeed_zero3_enabled_accelerate,
set_hf_deepspeed_config,
unset_hf_deepspeed_config,
)
Expand Down
14 changes: 14 additions & 0 deletions src/transformers/integrations/deepspeed.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
import copy
import importlib.metadata as importlib_metadata
import importlib.util
import os
import weakref
from functools import partialmethod

Expand Down Expand Up @@ -289,6 +290,19 @@ def is_deepspeed_zero3_enabled():
return False


def is_deepspeed_zero3_enabled_accelerate():
"""
Will check if the user has enabled ZeRO 3 with accelerate. This is useful
when checking if the environment is configured for it before
the deepspeed config reference has been made.
"""
accelerate_zero_stage = int(os.environ.get("ACCELERATE_DEEPSPEED_ZERO_STAGE", -1))
accelerate_zero_init = os.environ.get("ACCELERATE_DEEPSPEED_ZERO3_INIT", "0")
# This only gets triggered passively if the user launches code with a configured
# `accelerate launch` without making `TrainingArguments`
return accelerate_zero_stage != -1 and accelerate_zero_init != "0"


def deepspeed_config():
if _hf_deepspeed_config_weak_ref is not None and _hf_deepspeed_config_weak_ref() is not None:
return _hf_deepspeed_config_weak_ref().config
Expand Down
19 changes: 18 additions & 1 deletion src/transformers/modeling_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,12 @@
from .configuration_utils import PretrainedConfig
from .dynamic_module_utils import custom_object_save
from .generation import GenerationConfig, GenerationMixin
from .integrations import PeftAdapterMixin, deepspeed_config, is_deepspeed_zero3_enabled
from .integrations import (
PeftAdapterMixin,
deepspeed_config,
is_deepspeed_zero3_enabled,
is_deepspeed_zero3_enabled_accelerate,
)
from .pytorch_utils import ( # noqa: F401
Conv1D,
apply_chunking_to_forward,
Expand Down Expand Up @@ -1313,6 +1318,12 @@ def _from_config(cls, config, **kwargs):
)

if is_deepspeed_zero3_enabled():
if not is_deepspeed_zero3_enabled_accelerate():
raise ValueError(
"Detected that you want to use `zero-3` Init, but the environment "
"has not been setup yet. Please create `TrainingArguments` before "
"initializing the model."
)
import deepspeed

logger.info("Detected DeepSpeed ZeRO-3: activating zero.init() for this model")
Expand Down Expand Up @@ -3387,6 +3398,12 @@ def from_pretrained(
init_contexts = [no_init_weights(_enable=_fast_init)]

if is_deepspeed_zero3_enabled() and not is_quantized:
if not is_deepspeed_zero3_enabled_accelerate():
raise ValueError(
"Detected that you want to use `zero-3` Init, but the environment "
"has not been setup yet. Please create `TrainingArguments` before "
"initializing the model."
)
import deepspeed

logger.info("Detected DeepSpeed ZeRO-3: activating zero.init() for this model")
Expand Down
5 changes: 5 additions & 0 deletions src/transformers/training_args.py
Original file line number Diff line number Diff line change
Expand Up @@ -503,6 +503,11 @@ class TrainingArguments:
evolve in the future. The value is either the location of DeepSpeed json config file (e.g.,
`ds_config.json`) or an already loaded json file as a `dict`"

<Tip warning={true}>
If enabling any Zero-init, make sure that your model is not initialized until
*after* initializing the `TrainingArguments`, else it will not be applied.
</Tip>

accelerator_config (`str`, `dict`, or `AcceleratorConfig`, *optional*):
Config to be used with the internal `Accelerator` implementation. The value is either a location of
accelerator json config file (e.g., `accelerator_config.json`), an already loaded json file as `dict`,
Expand Down
Loading