You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Traceback (most recent call last):
File "/home/haitaiwork/pbg/eval/opencompass-main/opencompass/tasks/openicl_infer.py", line 162, in
inferencer.run()
File "/home/haitaiwork/pbg/eval/opencompass-main/opencompass/tasks/openicl_infer.py", line 73, in run
self.model = build_model_from_cfg(model_cfg)
File "/home/haitaiwork/pbg/eval/opencompass-main/opencompass/utils/build.py", line 25, in build_model_from_cfg
return MODELS.build(model_cfg)
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/mmengine/registry/registry.py", line 570, in build
return self.build_func(cfg, *args, **kwargs, registry=self)
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/mmengine/registry/build_functions.py", line 121, in build_from_cfg
obj = obj_cls(**args) # type: ignore
File "/home/haitaiwork/pbg/eval/opencompass-main/opencompass/models/huggingface_above_v4_33.py", line 166, in init
self._load_tokenizer(tokenizer_path or path, tokenizer_kwargs, pad_token_id)
File "/home/haitaiwork/pbg/eval/opencompass-main/opencompass/models/huggingface_above_v4_33.py", line 183, in _load_tokenizer
self.tokenizer = AutoTokenizer.from_pretrained(path, **tokenizer_kwargs)
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 880, in from_pretrained
return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs)
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2110, in from_pretrained
return cls._from_pretrained(
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2336, in _from_pretrained
tokenizer = cls(*init_inputs, **init_kwargs)
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/transformers/models/qwen2/tokenization_qwen2_fast.py", line 120, in init
super().init(
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/transformers/tokenization_utils_fast.py", line 114, in init
fast_tokenizer = TokenizerFast.from_file(fast_tokenizer_file)
Exception: data did not match any variant of untagged enum ModelWrapper at line 757443 column 3
E0120 10:43:50.609000 139627271737856 torch/distributed/elastic/multiprocessing/api.py:833] failed (exitcode: 1) local_rank: 0 (pid: 8743) of binary: /home/haitaiwork/llm/anaconda3/bin/python
Traceback (most recent call last):
File "/home/haitaiwork/llm/anaconda3/bin/torchrun", line 8, in
sys.exit(main())
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/torch/distributed/elastic/multiprocessing/errors/init.py", line 348, in wrapper
return f(*args, **kwargs)
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/torch/distributed/run.py", line 901, in main
run(args)
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/torch/distributed/run.py", line 892, in run
elastic_launch(
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/torch/distributed/launcher/api.py", line 133, in call
return launch_agent(self._config, self._entrypoint, list(args))
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/torch/distributed/launcher/api.py", line 264, in launch_agent
raise ChildFailedError(
torch.distributed.elastic.multiprocessing.errors.ChildFailedError:
The text was updated successfully, but these errors were encountered:
想使用eval测评一下1B左右的模型,能到多少分,但出现以下错误。
好像是模型结构的问题,请楼主帮忙看看。
Traceback (most recent call last):
File "/home/haitaiwork/pbg/eval/opencompass-main/opencompass/tasks/openicl_infer.py", line 162, in
inferencer.run()
File "/home/haitaiwork/pbg/eval/opencompass-main/opencompass/tasks/openicl_infer.py", line 73, in run
self.model = build_model_from_cfg(model_cfg)
File "/home/haitaiwork/pbg/eval/opencompass-main/opencompass/utils/build.py", line 25, in build_model_from_cfg
return MODELS.build(model_cfg)
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/mmengine/registry/registry.py", line 570, in build
return self.build_func(cfg, *args, **kwargs, registry=self)
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/mmengine/registry/build_functions.py", line 121, in build_from_cfg
obj = obj_cls(**args) # type: ignore
File "/home/haitaiwork/pbg/eval/opencompass-main/opencompass/models/huggingface_above_v4_33.py", line 166, in init
self._load_tokenizer(tokenizer_path or path, tokenizer_kwargs, pad_token_id)
File "/home/haitaiwork/pbg/eval/opencompass-main/opencompass/models/huggingface_above_v4_33.py", line 183, in _load_tokenizer
self.tokenizer = AutoTokenizer.from_pretrained(path, **tokenizer_kwargs)
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 880, in from_pretrained
return tokenizer_class.from_pretrained(pretrained_model_name_or_path, *inputs, **kwargs)
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2110, in from_pretrained
return cls._from_pretrained(
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/transformers/tokenization_utils_base.py", line 2336, in _from_pretrained
tokenizer = cls(*init_inputs, **init_kwargs)
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/transformers/models/qwen2/tokenization_qwen2_fast.py", line 120, in init
super().init(
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/transformers/tokenization_utils_fast.py", line 114, in init
fast_tokenizer = TokenizerFast.from_file(fast_tokenizer_file)
Exception: data did not match any variant of untagged enum ModelWrapper at line 757443 column 3
E0120 10:43:50.609000 139627271737856 torch/distributed/elastic/multiprocessing/api.py:833] failed (exitcode: 1) local_rank: 0 (pid: 8743) of binary: /home/haitaiwork/llm/anaconda3/bin/python
Traceback (most recent call last):
File "/home/haitaiwork/llm/anaconda3/bin/torchrun", line 8, in
sys.exit(main())
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/torch/distributed/elastic/multiprocessing/errors/init.py", line 348, in wrapper
return f(*args, **kwargs)
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/torch/distributed/run.py", line 901, in main
run(args)
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/torch/distributed/run.py", line 892, in run
elastic_launch(
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/torch/distributed/launcher/api.py", line 133, in call
return launch_agent(self._config, self._entrypoint, list(args))
File "/home/haitaiwork/llm/anaconda3/lib/python3.10/site-packages/torch/distributed/launcher/api.py", line 264, in launch_agent
raise ChildFailedError(
torch.distributed.elastic.multiprocessing.errors.ChildFailedError:
The text was updated successfully, but these errors were encountered: