You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
修改本地模型地址为上图所示,但是bash run_sft_Yi_6b.sh 之后
raceback (most recent call last):
File "/home/ma-user/work/Yi-main/finetune/sft/main.py", line 415, in
tokenizer = get_tokenizer(model_name_or_path, fast_tokenizer=fast_tokenizer)
tokenizer = get_tokenizer(model_name_or_path, fast_tokenizer=fast_tokenizer)tokenizer = get_tokenizer(model_name_or_path, fast_tokenizer=fast_tokenizer)tokenizer = get_tokenizer(model_name_or_path, fast_tokenizer=fast_tokenizer) File "/home/ma-user/work/Yi-main/finetune/utils/utils.py", line 42, in get_tokenizer
tokenizer = get_tokenizer(model_name_or_path, fast_tokenizer=fast_tokenizer)tokenizer = get_tokenizer(model_name_or_path, fast_tokenizer=fast_tokenizer)
File "/home/ma-user/work/Yi-main/finetune/utils/utils.py", line 42, in get_tokenizer
File "/home/ma-user/work/Yi-main/finetune/utils/utils.py", line 42, in get_tokenizer
File "/home/ma-user/work/Yi-main/finetune/utils/utils.py", line 42, in get_tokenizer
File "/home/ma-user/work/Yi-main/finetune/utils/utils.py", line 42, in get_tokenizer
File "/home/ma-user/work/Yi-main/finetune/utils/utils.py", line 42, in get_tokenizer
main()
tokenizer = AutoTokenizer.from_pretrained(
File "/home/ma-user/work/Yi-main/finetune/sft/main.py", line 252, in main
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 837, in from_pretrained
tokenizer = AutoTokenizer.from_pretrained(tokenizer = AutoTokenizer.from_pretrained(
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 837, in from_pretrained
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 837, in from_pretrained
tokenizer = AutoTokenizer.from_pretrained(tokenizer = AutoTokenizer.from_pretrained(
tokenizer = AutoTokenizer.from_pretrained( File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 837, in from_pretrained
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 837, in from_pretrained
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 837, in from_pretrained
tokenizer = load_hf_tokenizer(args.model_name_or_path, fast_tokenizer=False)
File "/home/ma-user/work/Yi-main/finetune/utils/utils.py", line 58, in load_hf_tokenizer
tokenizer = get_tokenizer(model_name_or_path, fast_tokenizer=fast_tokenizer)
File "/home/ma-user/work/Yi-main/finetune/utils/utils.py", line 42, in get_tokenizer
tokenizer = AutoTokenizer.from_pretrained(
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 837, in from_pretrained
return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs)
return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs)return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs) return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs)
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2086, in from_pretrained
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2086, in from_pretrained
return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs)return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs) File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2086, in from_pretrained
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2086, in from_pretrained
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2086, in from_pretrained
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2086, in from_pretrained
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2086, in from_pretrained
Traceback (most recent call last):
File "/home/ma-user/work/Yi-main/finetune/sft/main.py", line 415, in
main()
File "/home/ma-user/work/Yi-main/finetune/sft/main.py", line 252, in main
tokenizer = load_hf_tokenizer(args.model_name_or_path, fast_tokenizer=False)
File "/home/ma-user/work/Yi-main/finetune/utils/utils.py", line 58, in load_hf_tokenizer
tokenizer = get_tokenizer(model_name_or_path, fast_tokenizer=fast_tokenizer)
File "/home/ma-user/work/Yi-main/finetune/utils/utils.py", line 42, in get_tokenizer
tokenizer = AutoTokenizer.from_pretrained(
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 837, in from_pretrained
return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs)
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2086, in from_pretrained
return cls._from_pretrained(
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2325, in _from_pretrained
return cls._from_pretrained(return cls._from_pretrained(return cls._from_pretrained(
return cls._from_pretrained( File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2325, in _from_pretrained
return cls._from_pretrained(return cls._from_pretrained(
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2325, in _from_pretrained
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2325, in _from_pretrained
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2325, in _from_pretrained
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2325, in _from_pretrained
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2325, in _from_pretrained
return cls._from_pretrained(
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2325, in _from_pretrained
tokenizer = cls(*init_inputs, **init_kwargs)
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 182, in init
tokenizer = cls(*init_inputs, **init_kwargs)tokenizer = cls(*init_inputs, **init_kwargs)tokenizer = cls(*init_inputs, **init_kwargs)
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 182, in init
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 182, in init
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 182, in init
tokenizer = cls(*init_inputs, **init_kwargs)tokenizer = cls(*init_inputs, **init_kwargs)tokenizer = cls(*init_inputs, **init_kwargs)
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 182, in init
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 182, in init
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 182, in init
self.sp_model = self.get_spm_processor(kwargs.pop("from_slow", False))self.sp_model = self.get_spm_processor(kwargs.pop("from_slow", False))
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 209, in get_spm_processor
self.sp_model = self.get_spm_processor(kwargs.pop("from_slow", False))self.sp_model = self.get_spm_processor(kwargs.pop("from_slow", False)) self.sp_model = self.get_spm_processor(kwargs.pop("from_slow", False)) File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 209, in get_spm_processor
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 209, in get_spm_processor
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 209, in get_spm_processor
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 209, in get_spm_processor
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 209, in get_spm_processor
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 209, in get_spm_processor
tokenizer = cls(*init_inputs, **init_kwargs)tokenizer.Load(self.vocab_file)
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 182, in init
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 961, in Load
tokenizer.Load(self.vocab_file)
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 961, in Load
tokenizer.Load(self.vocab_file)
tokenizer.Load(self.vocab_file) File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 961, in Load
tokenizer.Load(self.vocab_file)tokenizer.Load(self.vocab_file)tokenizer.Load(self.vocab_file) File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 961, in Load
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 961, in Load
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 961, in Load
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 961, in Load
self.sp_model = self.get_spm_processor(kwargs.pop("from_slow", False))
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 209, in get_spm_processor
tokenizer.Load(self.vocab_file)
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 961, in Load
return self.LoadFromFile(model_file) return self.LoadFromFile(model_file)
return self.LoadFromFile(model_file)
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 316, in LoadFromFile
return self.LoadFromFile(model_file)return self.LoadFromFile(model_file)return self.LoadFromFile(model_file)return self.LoadFromFile(model_file)return self.LoadFromFile(model_file)
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 316, in LoadFromFile
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 316, in LoadFromFile
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 316, in LoadFromFile
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 316, in LoadFromFile
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 316, in LoadFromFile
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 316, in LoadFromFile
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 316, in LoadFromFile
return _sentencepiece.SentencePieceProcessor_LoadFromFile(self, arg)
TypeError: not a stringreturn _sentencepiece.SentencePieceProcessor_LoadFromFile(self, arg)
reacted with thumbs up emoji reacted with thumbs down emoji reacted with laugh emoji reacted with hooray emoji reacted with confused emoji reacted with heart emoji reacted with rocket emoji reacted with eyes emoji
-
修改本地模型地址为上图所示,但是bash run_sft_Yi_6b.sh 之后
raceback (most recent call last):
File "/home/ma-user/work/Yi-main/finetune/sft/main.py", line 415, in
tokenizer = get_tokenizer(model_name_or_path, fast_tokenizer=fast_tokenizer)
tokenizer = get_tokenizer(model_name_or_path, fast_tokenizer=fast_tokenizer)tokenizer = get_tokenizer(model_name_or_path, fast_tokenizer=fast_tokenizer)tokenizer = get_tokenizer(model_name_or_path, fast_tokenizer=fast_tokenizer) File "/home/ma-user/work/Yi-main/finetune/utils/utils.py", line 42, in get_tokenizer
tokenizer = get_tokenizer(model_name_or_path, fast_tokenizer=fast_tokenizer)tokenizer = get_tokenizer(model_name_or_path, fast_tokenizer=fast_tokenizer)
File "/home/ma-user/work/Yi-main/finetune/utils/utils.py", line 42, in get_tokenizer
File "/home/ma-user/work/Yi-main/finetune/utils/utils.py", line 42, in get_tokenizer
File "/home/ma-user/work/Yi-main/finetune/utils/utils.py", line 42, in get_tokenizer
File "/home/ma-user/work/Yi-main/finetune/utils/utils.py", line 42, in get_tokenizer
File "/home/ma-user/work/Yi-main/finetune/utils/utils.py", line 42, in get_tokenizer
main()
tokenizer = AutoTokenizer.from_pretrained(
File "/home/ma-user/work/Yi-main/finetune/sft/main.py", line 252, in main
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 837, in from_pretrained
tokenizer = AutoTokenizer.from_pretrained(tokenizer = AutoTokenizer.from_pretrained(
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 837, in from_pretrained
tokenizer = AutoTokenizer.from_pretrained(tokenizer = AutoTokenizer.from_pretrained(
tokenizer = AutoTokenizer.from_pretrained( File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 837, in from_pretrained
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 837, in from_pretrained
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 837, in from_pretrained
tokenizer = load_hf_tokenizer(args.model_name_or_path, fast_tokenizer=False)
File "/home/ma-user/work/Yi-main/finetune/utils/utils.py", line 58, in load_hf_tokenizer
tokenizer = get_tokenizer(model_name_or_path, fast_tokenizer=fast_tokenizer)
File "/home/ma-user/work/Yi-main/finetune/utils/utils.py", line 42, in get_tokenizer
tokenizer = AutoTokenizer.from_pretrained(
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 837, in from_pretrained
return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs)
return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs)return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs) return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs)
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2086, in from_pretrained
return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs)
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2086, in from_pretrained
return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs)return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs) File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2086, in from_pretrained
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2086, in from_pretrained
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2086, in from_pretrained
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2086, in from_pretrained
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2086, in from_pretrained
Traceback (most recent call last):
File "/home/ma-user/work/Yi-main/finetune/sft/main.py", line 415, in
main()
File "/home/ma-user/work/Yi-main/finetune/sft/main.py", line 252, in main
tokenizer = load_hf_tokenizer(args.model_name_or_path, fast_tokenizer=False)
File "/home/ma-user/work/Yi-main/finetune/utils/utils.py", line 58, in load_hf_tokenizer
tokenizer = get_tokenizer(model_name_or_path, fast_tokenizer=fast_tokenizer)
File "/home/ma-user/work/Yi-main/finetune/utils/utils.py", line 42, in get_tokenizer
tokenizer = AutoTokenizer.from_pretrained(
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 837, in from_pretrained
return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs)
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2086, in from_pretrained
return cls._from_pretrained(
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2325, in _from_pretrained
return cls._from_pretrained(return cls._from_pretrained(return cls._from_pretrained(
return cls._from_pretrained( File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2325, in _from_pretrained
return cls._from_pretrained(return cls._from_pretrained(
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2325, in _from_pretrained
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2325, in _from_pretrained
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2325, in _from_pretrained
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2325, in _from_pretrained
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2325, in _from_pretrained
return cls._from_pretrained(
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2325, in _from_pretrained
tokenizer = cls(*init_inputs, **init_kwargs)
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 182, in init
tokenizer = cls(*init_inputs, **init_kwargs)tokenizer = cls(*init_inputs, **init_kwargs)tokenizer = cls(*init_inputs, **init_kwargs)
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 182, in init
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 182, in init
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 182, in init
tokenizer = cls(*init_inputs, **init_kwargs)tokenizer = cls(*init_inputs, **init_kwargs)tokenizer = cls(*init_inputs, **init_kwargs)
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 182, in init
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 182, in init
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 182, in init
self.sp_model = self.get_spm_processor(kwargs.pop("from_slow", False))self.sp_model = self.get_spm_processor(kwargs.pop("from_slow", False))
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 209, in get_spm_processor
self.sp_model = self.get_spm_processor(kwargs.pop("from_slow", False))
self.sp_model = self.get_spm_processor(kwargs.pop("from_slow", False))
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 209, in get_spm_processor
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 209, in get_spm_processor
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 209, in get_spm_processor
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 209, in get_spm_processor
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 209, in get_spm_processor
tokenizer = cls(*init_inputs, **init_kwargs)tokenizer.Load(self.vocab_file)
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 182, in init
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 961, in Load
tokenizer.Load(self.vocab_file)
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 961, in Load
tokenizer.Load(self.vocab_file)
tokenizer.Load(self.vocab_file) File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 961, in Load
tokenizer.Load(self.vocab_file)tokenizer.Load(self.vocab_file)tokenizer.Load(self.vocab_file) File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 961, in Load
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 961, in Load
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 961, in Load
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 961, in Load
self.sp_model = self.get_spm_processor(kwargs.pop("from_slow", False))
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/transformers/models/llama/tokenization_llama.py", line 209, in get_spm_processor
tokenizer.Load(self.vocab_file)
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 961, in Load
return self.LoadFromFile(model_file) return self.LoadFromFile(model_file)
return self.LoadFromFile(model_file)
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 316, in LoadFromFile
return self.LoadFromFile(model_file)return self.LoadFromFile(model_file)return self.LoadFromFile(model_file)return self.LoadFromFile(model_file)return self.LoadFromFile(model_file)
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 316, in LoadFromFile
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 316, in LoadFromFile
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 316, in LoadFromFile
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 316, in LoadFromFile
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 316, in LoadFromFile
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 316, in LoadFromFile
File "/home/ma-user/anaconda3/envs/zeroone_LLM_env/lib/python3.10/site-packages/sentencepiece/init.py", line 316, in LoadFromFile
return _sentencepiece.SentencePieceProcessor_LoadFromFile(self, arg)
TypeError: not a stringreturn _sentencepiece.SentencePieceProcessor_LoadFromFile(self, arg)
TypeError return _sentencepiece.SentencePieceProcessor_LoadFromFile(self, arg) : not a stringreturn _sentencepiece.SentencePieceProcessor_LoadFromFile(self, arg)TypeError
return _sentencepiece.SentencePieceProcessor_LoadFromFile(self, arg)return _sentencepiece.SentencePieceProcessor_LoadFromFile(self, arg)return _sentencepiece.SentencePieceProcessor_LoadFromFile(self, arg)
:
not a stringTypeErrorTypeError
TypeError: TypeErrornot a string: : : TypeError
not a stringnot a stringnot a string:
not a string
[2024-04-08 09:44:31,368] [INFO] [launch.py:315:sigkill_handler] Killing subprocess 655513
[2024-04-08 09:44:31,462] [INFO] [launch.py:315:sigkill_handler] Killing subprocess 655514
[2024-04-08 09:44:31,756] [INFO] [launch.py:315:sigkill_handler] Killing subprocess 655515
[2024-04-08 09:44:31,758] [INFO] [launch.py:315:sigkill_handler] Killing subprocess 655516
[2024-04-08 09:44:31,758] [INFO] [launch.py:315:sigkill_handler] Killing subprocess 655517
[2024-04-08 09:44:31,759] [INFO] [launch.py:315:sigkill_handler] Killing subprocess 655518
[2024-04-08 09:44:31,811] [INFO] [launch.py:315:sigkill_handler] Killing subprocess 655519
[2024-04-08 09:44:31,812] [INFO] [launch.py:315:sigkill_handler] Killing subprocess 655520
Beta Was this translation helpful? Give feedback.
All reactions