You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
when I run python functioncall.py --query "I need the current stock price of Tesla (TSLA)"
there is error:
(tool) root@rt-res-public24-77c5bb65b9-b7srr:/public/zzy/tool_project/Hermes-Function-Calling# python functioncall.py --query "I need the current stock price of Tesla (TSLA)"
dP
88
88d888b. .d8888b. dP dP .d8888b. 88d888b. .d8888b. .d8888b. .d8888b. .d8888b. 88d888b. .d8888b. 88d888b.
88' 88 88' 88 88 88 Y8ooooo. 88' 88 88ooood8 Y8ooooo. 88ooood8 88' 88 88' 88 88' "" 88' 88 88 88 88. .88 88. .88 88 88 88. ... 88 88. ... 88. .88 88 88. ... 88 88 dP dP 88888P' 88888P' 88888P' dP 88888P' 88888P' 88888P' 88888P8 dP `88888P' dP dP
2024-09-03:14:25:06,069 INFO [functioncall.py:25] None
Traceback (most recent call last):
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/transformers/utils/import_utils.py", line 1603, in _get_module
return importlib.import_module("." + module_name, self.name)
File "/root/anaconda/envs/tool/lib/python3.10/importlib/init.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "", line 1050, in _gcd_import
File "", line 1027, in _find_and_load
File "", line 1006, in _find_and_load_unlocked
File "", line 688, in _load_unlocked
File "", line 883, in exec_module
File "", line 241, in _call_with_frames_removed
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/transformers/models/llama/modeling_llama.py", line 32, in
from ...modeling_flash_attention_utils import _flash_attention_forward
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/transformers/modeling_flash_attention_utils.py", line 27, in
from flash_attn.bert_padding import index_first_axis, pad_input, unpad_input # noqa
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/flash_attn/init.py", line 3, in
from flash_attn.flash_attn_interface import (
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/flash_attn/flash_attn_interface.py", line 10, in
import flash_attn_2_cuda as flash_attn_cuda
ImportError: /root/anaconda/envs/tool/lib/python3.10/site-packages/flash_attn_2_cuda.cpython-310-x86_64-linux-gnu.so: undefined symbol: _ZNK3c106SymIntltEl
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/public/zzy/tool_project/Hermes-Function-Calling/functioncall.py", line 178, in
inference = ModelInference(model_path, args.chat_template, args.load_in_4bit)
File "/public/zzy/tool_project/Hermes-Function-Calling/functioncall.py", line 35, in init
self.model = AutoModelForCausalLM.from_pretrained(
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py", line 563, in from_pretrained
model_class = _get_model_class(config, cls._model_mapping)
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py", line 384, in _get_model_class
supported_models = model_mapping[type(config)]
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py", line 735, in getitem
return self._load_attr_from_module(model_type, model_name)
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py", line 749, in _load_attr_from_module
return getattribute_from_module(self._modules[module_name], attr)
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py", line 693, in getattribute_from_module
if hasattr(module, attr):
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/transformers/utils/import_utils.py", line 1593, in getattr
module = self._get_module(self._class_to_module[name])
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/transformers/utils/import_utils.py", line 1605, in _get_module
raise RuntimeError(
RuntimeError: Failed to import transformers.models.llama.modeling_llama because of the following error (look up to see its traceback):
/root/anaconda/envs/tool/lib/python3.10/site-packages/flash_attn_2_cuda.cpython-310-x86_64-linux-gnu.so: undefined symbol: _ZNK3c106SymIntltEl
The text was updated successfully, but these errors were encountered:
when I run python functioncall.py --query "I need the current stock price of Tesla (TSLA)"
there is error:
(tool) root@rt-res-public24-77c5bb65b9-b7srr:/public/zzy/tool_project/Hermes-Function-Calling# python functioncall.py --query "I need the current stock price of Tesla (TSLA)"
dP
88
88d888b. .d8888b. dP dP .d8888b. 88d888b. .d8888b. .d8888b. .d8888b. .d8888b. 88d888b. .d8888b. 88d888b.
88'
88 88'
88 88 88 Y8ooooo. 88'88 88ooood8 Y8ooooo. 88ooood8 88'
88 88'88 88'
"" 88'88 88 88 88. .88 88. .88 88 88 88. ... 88 88. ... 88. .88 88 88. ... 88 88 dP dP
88888P'88888P'
88888P' dP88888P'
88888P'88888P'
88888P8 dP `88888P' dP dP2024-09-03:14:25:06,069 INFO [functioncall.py:25] None
Traceback (most recent call last):
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/transformers/utils/import_utils.py", line 1603, in _get_module
return importlib.import_module("." + module_name, self.name)
File "/root/anaconda/envs/tool/lib/python3.10/importlib/init.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "", line 1050, in _gcd_import
File "", line 1027, in _find_and_load
File "", line 1006, in _find_and_load_unlocked
File "", line 688, in _load_unlocked
File "", line 883, in exec_module
File "", line 241, in _call_with_frames_removed
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/transformers/models/llama/modeling_llama.py", line 32, in
from ...modeling_flash_attention_utils import _flash_attention_forward
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/transformers/modeling_flash_attention_utils.py", line 27, in
from flash_attn.bert_padding import index_first_axis, pad_input, unpad_input # noqa
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/flash_attn/init.py", line 3, in
from flash_attn.flash_attn_interface import (
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/flash_attn/flash_attn_interface.py", line 10, in
import flash_attn_2_cuda as flash_attn_cuda
ImportError: /root/anaconda/envs/tool/lib/python3.10/site-packages/flash_attn_2_cuda.cpython-310-x86_64-linux-gnu.so: undefined symbol: _ZNK3c106SymIntltEl
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/public/zzy/tool_project/Hermes-Function-Calling/functioncall.py", line 178, in
inference = ModelInference(model_path, args.chat_template, args.load_in_4bit)
File "/public/zzy/tool_project/Hermes-Function-Calling/functioncall.py", line 35, in init
self.model = AutoModelForCausalLM.from_pretrained(
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py", line 563, in from_pretrained
model_class = _get_model_class(config, cls._model_mapping)
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py", line 384, in _get_model_class
supported_models = model_mapping[type(config)]
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py", line 735, in getitem
return self._load_attr_from_module(model_type, model_name)
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py", line 749, in _load_attr_from_module
return getattribute_from_module(self._modules[module_name], attr)
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py", line 693, in getattribute_from_module
if hasattr(module, attr):
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/transformers/utils/import_utils.py", line 1593, in getattr
module = self._get_module(self._class_to_module[name])
File "/root/anaconda/envs/tool/lib/python3.10/site-packages/transformers/utils/import_utils.py", line 1605, in _get_module
raise RuntimeError(
RuntimeError: Failed to import transformers.models.llama.modeling_llama because of the following error (look up to see its traceback):
/root/anaconda/envs/tool/lib/python3.10/site-packages/flash_attn_2_cuda.cpython-310-x86_64-linux-gnu.so: undefined symbol: _ZNK3c106SymIntltEl
The text was updated successfully, but these errors were encountered: