Skip to content

Commit

Permalink
Reorganize file utils (huggingface#16264)
Browse files Browse the repository at this point in the history
* Split file_utils in several submodules

* Fixes

* Add back more objects

* More fixes

* Who exactly decided to import that from there?

* Second suggestion to code with code review

* Revert wront move

* Fix imports

* Adapt all imports

* Adapt all imports everywhere

* Revert this import, will fix in a separate commit
  • Loading branch information
sgugger authored Mar 23, 2022
1 parent 7135603 commit 4975002
Show file tree
Hide file tree
Showing 583 changed files with 4,666 additions and 4,510 deletions.
4 changes: 2 additions & 2 deletions ISSUES.md
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ You are not required to read the following guidelines before opening an issue. H
File "/transformers/src/transformers/__init__.py", line 34, in <module>
from . import dependency_versions_check
File "/transformers/src/transformers/dependency_versions_check.py", line 34, in <module>
from .file_utils import is_tokenizers_available
from .utils import is_tokenizers_available
File "/transformers/src/transformers/file_utils.py", line 40, in <module>
from tqdm.auto import tqdm
ModuleNotFoundError: No module named 'tqdm.auto'
Expand Down Expand Up @@ -124,7 +124,7 @@ You are not required to read the following guidelines before opening an issue. H
File "/transformers/src/transformers/__init__.py", line 34, in <module>
from . import dependency_versions_check
File "/transformers/src/transformers/dependency_versions_check.py", line 34, in <module>
from .file_utils import is_tokenizers_available
from .utils import is_tokenizers_available
File "/transformers/src/transformers/file_utils.py", line 40, in <module>
from tqdm.auto import tqdm
ModuleNotFoundError: No module named 'tqdm.auto'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@
HfArgumentParser,
is_tensorboard_available,
)
from transformers.file_utils import get_full_repo_name, is_offline_mode
from transformers.utils import get_full_repo_name, is_offline_mode


logger = logging.getLogger(__name__)
Expand Down
2 changes: 1 addition & 1 deletion examples/flax/language-modeling/run_clm_flax.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,8 @@
is_tensorboard_available,
set_seed,
)
from transformers.file_utils import get_full_repo_name
from transformers.testing_utils import CaptureLogger
from transformers.utils import get_full_repo_name


logger = logging.getLogger(__name__)
Expand Down
2 changes: 1 addition & 1 deletion examples/flax/language-modeling/run_mlm_flax.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@
is_tensorboard_available,
set_seed,
)
from transformers.file_utils import get_full_repo_name
from transformers.utils import get_full_repo_name


MODEL_CONFIG_CLASSES = list(FLAX_MODEL_FOR_MASKED_LM_MAPPING.keys())
Expand Down
2 changes: 1 addition & 1 deletion examples/flax/language-modeling/run_t5_mlm_flax.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,8 @@
is_tensorboard_available,
set_seed,
)
from transformers.file_utils import get_full_repo_name
from transformers.models.t5.modeling_flax_t5 import shift_tokens_right
from transformers.utils import get_full_repo_name


MODEL_CONFIG_CLASSES = list(FLAX_MODEL_FOR_MASKED_LM_MAPPING.keys())
Expand Down
3 changes: 1 addition & 2 deletions examples/flax/question-answering/run_qa.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,7 @@
PreTrainedTokenizerFast,
is_tensorboard_available,
)
from transformers.file_utils import get_full_repo_name
from transformers.utils import check_min_version
from transformers.utils import check_min_version, get_full_repo_name
from utils_qa import postprocess_qa_predictions


Expand Down
2 changes: 1 addition & 1 deletion examples/flax/summarization/run_summarization_flax.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@
HfArgumentParser,
is_tensorboard_available,
)
from transformers.file_utils import get_full_repo_name, is_offline_mode
from transformers.utils import get_full_repo_name, is_offline_mode


logger = logging.getLogger(__name__)
Expand Down
3 changes: 1 addition & 2 deletions examples/flax/text-classification/run_flax_glue.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,7 @@
TrainingArguments,
is_tensorboard_available,
)
from transformers.file_utils import get_full_repo_name
from transformers.utils import check_min_version
from transformers.utils import check_min_version, get_full_repo_name


logger = logging.getLogger(__name__)
Expand Down
3 changes: 1 addition & 2 deletions examples/flax/token-classification/run_flax_ner.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,7 @@
HfArgumentParser,
is_tensorboard_available,
)
from transformers.file_utils import get_full_repo_name
from transformers.utils import check_min_version
from transformers.utils import check_min_version, get_full_repo_name
from transformers.utils.versions import require_version


Expand Down
2 changes: 1 addition & 1 deletion examples/flax/vision/run_image_classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@
is_tensorboard_available,
set_seed,
)
from transformers.file_utils import get_full_repo_name
from transformers.utils import get_full_repo_name


logger = logging.getLogger(__name__)
Expand Down
2 changes: 1 addition & 1 deletion examples/legacy/seq2seq/old_test_tatoeba_conversion.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@
import tempfile
import unittest

from transformers.file_utils import cached_property
from transformers.models.marian.convert_marian_tatoeba_to_pytorch import DEFAULT_REPO, TatoebaConverter
from transformers.testing_utils import slow
from transformers.utils import cached_property


@unittest.skipUnless(os.path.exists(DEFAULT_REPO), "Tatoeba directory does not exist.")
Expand Down
2 changes: 1 addition & 1 deletion examples/legacy/seq2seq/seq2seq_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
from torch.utils.data import DistributedSampler, RandomSampler

from transformers import PreTrainedModel, Trainer, logging
from transformers.file_utils import is_torch_tpu_available
from transformers.integrations import is_fairscale_available
from transformers.models.fsmt.configuration_fsmt import FSMTConfig
from transformers.optimization import (
Expand All @@ -34,6 +33,7 @@
)
from transformers.trainer_pt_utils import get_tpu_sampler
from transformers.training_args import ParallelMode
from transformers.utils import is_torch_tpu_available


if is_fairscale_available():
Expand Down
2 changes: 1 addition & 1 deletion examples/legacy/seq2seq/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,8 @@

from sentence_splitter import add_newline_to_end_of_each_sentence
from transformers import BartTokenizer, EvalPrediction, PreTrainedTokenizer, T5Tokenizer
from transformers.file_utils import cached_property
from transformers.models.bart.modeling_bart import shift_tokens_right
from transformers.utils import cached_property


try:
Expand Down
2 changes: 1 addition & 1 deletion examples/pytorch/language-modeling/run_clm_no_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
get_scheduler,
set_seed,
)
from transformers.file_utils import get_full_repo_name
from transformers.utils import get_full_repo_name
from transformers.utils.versions import require_version


Expand Down
2 changes: 1 addition & 1 deletion examples/pytorch/language-modeling/run_mlm_no_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
get_scheduler,
set_seed,
)
from transformers.file_utils import get_full_repo_name
from transformers.utils import get_full_repo_name
from transformers.utils.versions import require_version


Expand Down
3 changes: 1 addition & 2 deletions examples/pytorch/multiple-choice/run_swag.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,9 @@
default_data_collator,
set_seed,
)
from transformers.file_utils import PaddingStrategy
from transformers.tokenization_utils_base import PreTrainedTokenizerBase
from transformers.trainer_utils import get_last_checkpoint
from transformers.utils import check_min_version
from transformers.utils import PaddingStrategy, check_min_version


# Will error if the minimal version of Transformers is not installed. Remove at your own risks.
Expand Down
2 changes: 1 addition & 1 deletion examples/pytorch/multiple-choice/run_swag_no_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@
get_scheduler,
set_seed,
)
from transformers.file_utils import PaddingStrategy, get_full_repo_name
from transformers.utils import PaddingStrategy, get_full_repo_name


logger = logging.getLogger(__name__)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,7 @@
get_scheduler,
set_seed,
)
from transformers.file_utils import get_full_repo_name
from transformers.utils import check_min_version
from transformers.utils import check_min_version, get_full_repo_name
from transformers.utils.versions import require_version
from utils_qa import postprocess_qa_predictions_with_beam_search

Expand Down
3 changes: 1 addition & 2 deletions examples/pytorch/question-answering/run_qa_no_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,7 @@
get_scheduler,
set_seed,
)
from transformers.file_utils import get_full_repo_name
from transformers.utils import check_min_version
from transformers.utils import check_min_version, get_full_repo_name
from transformers.utils.versions import require_version
from utils_qa import postprocess_qa_predictions

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@
is_wandb_available,
set_seed,
)
from transformers.file_utils import get_full_repo_name
from transformers.models.wav2vec2.modeling_wav2vec2 import _compute_mask_indices, _sample_negative_indices
from transformers.utils import get_full_repo_name


logger = logging.getLogger(__name__)
Expand Down
3 changes: 1 addition & 2 deletions examples/pytorch/summarization/run_summarization.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,8 @@
Seq2SeqTrainingArguments,
set_seed,
)
from transformers.file_utils import is_offline_mode
from transformers.trainer_utils import get_last_checkpoint
from transformers.utils import check_min_version
from transformers.utils import check_min_version, is_offline_mode
from transformers.utils.versions import require_version


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@
get_scheduler,
set_seed,
)
from transformers.file_utils import get_full_repo_name, is_offline_mode
from transformers.utils import get_full_repo_name, is_offline_mode
from transformers.utils.versions import require_version


Expand Down
2 changes: 1 addition & 1 deletion examples/pytorch/test_pytorch_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@
import torch

from transformers import ViTMAEForPreTraining, Wav2Vec2ForPreTraining
from transformers.file_utils import is_apex_available
from transformers.testing_utils import CaptureLogger, TestCasePlus, get_gpu_count, slow, torch_device
from transformers.utils import is_apex_available


SRC_DIRS = [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
get_scheduler,
set_seed,
)
from transformers.file_utils import get_full_repo_name
from transformers.utils import get_full_repo_name
from transformers.utils.versions import require_version


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@
get_scheduler,
set_seed,
)
from transformers.file_utils import get_full_repo_name
from transformers.utils import get_full_repo_name
from transformers.utils.versions import require_version


Expand Down
2 changes: 1 addition & 1 deletion examples/pytorch/translation/run_translation_no_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@
get_scheduler,
set_seed,
)
from transformers.file_utils import get_full_repo_name
from transformers.utils import get_full_repo_name
from transformers.utils.versions import require_version


Expand Down
3 changes: 1 addition & 2 deletions examples/tensorflow/multiple-choice/run_swag.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,8 @@
create_optimizer,
set_seed,
)
from transformers.file_utils import PaddingStrategy
from transformers.tokenization_utils_base import PreTrainedTokenizerBase
from transformers.utils import check_min_version
from transformers.utils import PaddingStrategy, check_min_version


# Will error if the minimal version of Transformers is not installed. Remove at your own risks.
Expand Down
3 changes: 1 addition & 2 deletions examples/tensorflow/question-answering/run_qa.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,7 @@
TFTrainingArguments,
set_seed,
)
from transformers.file_utils import CONFIG_NAME, TF2_WEIGHTS_NAME
from transformers.utils import check_min_version
from transformers.utils import CONFIG_NAME, TF2_WEIGHTS_NAME, check_min_version
from utils_qa import postprocess_qa_predictions


Expand Down
3 changes: 1 addition & 2 deletions examples/tensorflow/summarization/run_summarization.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,8 @@
create_optimizer,
set_seed,
)
from transformers.file_utils import is_offline_mode
from transformers.trainer_utils import get_last_checkpoint
from transformers.utils import check_min_version
from transformers.utils import check_min_version, is_offline_mode
from transformers.utils.versions import require_version


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
TFTrainingArguments,
set_seed,
)
from transformers.file_utils import CONFIG_NAME, TF2_WEIGHTS_NAME
from transformers.utils import CONFIG_NAME, TF2_WEIGHTS_NAME


os.environ["TF_CPP_MIN_LOG_LEVEL"] = "1" # Reduce the amount of console output from TF
Expand Down
Loading

0 comments on commit 4975002

Please sign in to comment.