You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Important data file "/usr/local/lib/python3.10/dist-packages/OpenHowNet/HowNet_dict_complete" lost, please run OpenHowNet.download().
0%| | 0/11915 [00:02<?, ?it/s]
Traceback (most recent call last):
File "/home/pentest/work/OpenBackdoor-main/demo_attack.py", line 63, in
main(config)
File "/home/pentest/work/OpenBackdoor-main/demo_attack.py", line 43, in main
backdoored_model = attacker.attack(victim, poison_dataset, config)
File "/home/pentest/work/OpenBackdoor-main/openbackdoor/attackers/lws_attacker.py", line 166, in attack
poison_datasets = wrap_dataset_lws({'train': data['train']}, self.poisoner.target_label, model.tokenizer, self.poisoner_config['poison_rate'])
File "/home/pentest/work/OpenBackdoor-main/openbackdoor/data/data_utils.py", line 26, in wrap_dataset_lws
return wrap_util(dataset, target_label, tokenizer, poison_rate)
File "/home/pentest/work/OpenBackdoor-main/openbackdoor/data/lws_utils.py", line 289, in wrap_util
datasets[key] = prepare_dataset_parallel(dataset[key], poison_rate, train=(key == 'train'))
File "/home/pentest/work/OpenBackdoor-main/openbackdoor/data/lws_utils.py", line 118, in prepare_dataset_par allel
dataset = prepare_dataset_for_self_learning_bert(dataset, poison_rate, train)
File "/home/pentest/work/OpenBackdoor-main/openbackdoor/data/lws_utils.py", line 62, in prepare_dataset_for_ self_learning_bert
cands = get_candidates_sememe(sentence, tokenizer, MAX_CANDIDATES)
File "/home/pentest/work/OpenBackdoor-main/openbackdoor/data/lws_utils.py", line 212, in get_candidates_seme me
tags = pos_tag_wordnet(words)
File "/home/pentest/work/OpenBackdoor-main/openbackdoor/data/lws_utils.py", line 199, in pos_tag_wordnet
pos_tagged_text = nltk.pos_tag(text)
File "/usr/local/lib/python3.10/dist-packages/nltk/tag/init.py", line 168, in pos_tag
tagger = _get_tagger(lang)
File "/usr/local/lib/python3.10/dist-packages/nltk/tag/init.py", line 110, in _get_tagger
tagger = PerceptronTagger()
File "/usr/local/lib/python3.10/dist-packages/nltk/tag/perceptron.py", line 183, in init
self.load_from_json(lang)
File "/usr/local/lib/python3.10/dist-packages/nltk/tag/perceptron.py", line 274, in load_from_json
with open(loc + TAGGER_JSONS[lang]["weights"]) as fin:
TypeError: unsupported operand type(s) for +: 'ZipFilePathPointer' and 'str'
The text was updated successfully, but these errors were encountered:
Important data file "/usr/local/lib/python3.10/dist-packages/OpenHowNet/HowNet_dict_complete" lost, please run
OpenHowNet.download()
.0%| | 0/11915 [00:02<?, ?it/s]
Traceback (most recent call last):
File "/home/pentest/work/OpenBackdoor-main/demo_attack.py", line 63, in
main(config)
File "/home/pentest/work/OpenBackdoor-main/demo_attack.py", line 43, in main
backdoored_model = attacker.attack(victim, poison_dataset, config)
File "/home/pentest/work/OpenBackdoor-main/openbackdoor/attackers/lws_attacker.py", line 166, in attack
poison_datasets = wrap_dataset_lws({'train': data['train']}, self.poisoner.target_label, model.tokenizer, self.poisoner_config['poison_rate'])
File "/home/pentest/work/OpenBackdoor-main/openbackdoor/data/data_utils.py", line 26, in wrap_dataset_lws
return wrap_util(dataset, target_label, tokenizer, poison_rate)
File "/home/pentest/work/OpenBackdoor-main/openbackdoor/data/lws_utils.py", line 289, in wrap_util
datasets[key] = prepare_dataset_parallel(dataset[key], poison_rate, train=(key == 'train'))
File "/home/pentest/work/OpenBackdoor-main/openbackdoor/data/lws_utils.py", line 118, in prepare_dataset_par allel
dataset = prepare_dataset_for_self_learning_bert(dataset, poison_rate, train)
File "/home/pentest/work/OpenBackdoor-main/openbackdoor/data/lws_utils.py", line 62, in prepare_dataset_for_ self_learning_bert
cands = get_candidates_sememe(sentence, tokenizer, MAX_CANDIDATES)
File "/home/pentest/work/OpenBackdoor-main/openbackdoor/data/lws_utils.py", line 212, in get_candidates_seme me
tags = pos_tag_wordnet(words)
File "/home/pentest/work/OpenBackdoor-main/openbackdoor/data/lws_utils.py", line 199, in pos_tag_wordnet
pos_tagged_text = nltk.pos_tag(text)
File "/usr/local/lib/python3.10/dist-packages/nltk/tag/init.py", line 168, in pos_tag
tagger = _get_tagger(lang)
File "/usr/local/lib/python3.10/dist-packages/nltk/tag/init.py", line 110, in _get_tagger
tagger = PerceptronTagger()
File "/usr/local/lib/python3.10/dist-packages/nltk/tag/perceptron.py", line 183, in init
self.load_from_json(lang)
File "/usr/local/lib/python3.10/dist-packages/nltk/tag/perceptron.py", line 274, in load_from_json
with open(loc + TAGGER_JSONS[lang]["weights"]) as fin:
TypeError: unsupported operand type(s) for +: 'ZipFilePathPointer' and 'str'
The text was updated successfully, but these errors were encountered: