os : mac os 12.5.1
python version : 3.9.7
python packages :
torch 1.12.1
tensorflow-macos 2.8.0
tensorflow-metadata 1.7.0
tensorflow-metal 0.4.0
위와 같은 환경에서 아래와 같은 코드를 실행 시켰을때 오류 발생.
code
import torch
from transformers import GPT2LMHeadModel
from transformers import PreTrainedTokenizerFast
device = torch.device("mps")
device = torch.device("cpu")
fast = "skt/kogpt2-base-v2"
tokenizer = PreTrainedTokenizerFast.from_pretrained(fast)
model = GPT2LMHeadModel.from_pretrained("skt/kogpt2-base-v2").to(device=device, non_blocking=True)
text = "테스트입니당"
inputs = tokenizer(text, return_tensors="pt").to(device=device)
gen_ids = model.generate(
inputs["input_ids"],
max_length=128,
repetition_penalty=2.0,
pad_token_id=tokenizer.pad_token_id,
eos_token_id=tokenizer.eos_token_id,
bos_token_id=tokenizer.bos_token_id,
use_cache=True,
)
generated = tokenizer.decode(gen_ids[0])
print(generated)
에러 메세지
gen_ids = model.generate(
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/torch/autograd/grad_mode.py", line 27, in decorate_context
return func(*args, **kwargs)
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/transformers/generation_utils.py", line 1294, in generate
return self.greedy_search(
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/transformers/generation_utils.py", line 1689, in greedy_search
outputs = self(
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/torch/nn/modules/module.py", line 1130, in _call_impl
return forward_call(*input, **kwargs)
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/transformers/models/gpt2/modeling_gpt2.py", line 1058, in forward
transformer_outputs = self.transformer(
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/torch/nn/modules/module.py", line 1130, in _call_impl
return forward_call(*input, **kwargs)
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/transformers/models/gpt2/modeling_gpt2.py", line 901, in forward
outputs = block(
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/torch/nn/modules/module.py", line 1130, in _call_impl
return forward_call(*input, **kwargs)
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/transformers/models/gpt2/modeling_gpt2.py", line 401, in forward
attn_outputs = self.attn(
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/torch/nn/modules/module.py", line 1130, in _call_impl
return forward_call(*input, **kwargs)
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/transformers/models/gpt2/modeling_gpt2.py", line 323, in forward
query, key, value = self.c_attn(hidden_states).split(self.split_size, dim=2)
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/torch/nn/modules/module.py", line 1130, in _call_impl
return forward_call(*input, **kwargs)
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/transformers/pytorch_utils.py", line 109, in forward
x = torch.addmm(self.bias, x.view(-1, x.size(-1)), self.weight)
RuntimeError: tensors must be 2-D
위의 코드에서 device를 cpu로 설정시는 정상 동작이 됩니다.
위 문제 관련하여 문의 드립니다.
os : mac os 12.5.1
python version : 3.9.7
python packages :
torch 1.12.1
tensorflow-macos 2.8.0
tensorflow-metadata 1.7.0
tensorflow-metal 0.4.0
위와 같은 환경에서 아래와 같은 코드를 실행 시켰을때 오류 발생.
code
import torch
from transformers import GPT2LMHeadModel
from transformers import PreTrainedTokenizerFast
device = torch.device("mps")
device = torch.device("cpu")
fast = "skt/kogpt2-base-v2"
tokenizer = PreTrainedTokenizerFast.from_pretrained(fast)
model = GPT2LMHeadModel.from_pretrained("skt/kogpt2-base-v2").to(device=device, non_blocking=True)
text = "테스트입니당"
inputs = tokenizer(text, return_tensors="pt").to(device=device)
gen_ids = model.generate(
)
generated = tokenizer.decode(gen_ids[0])
print(generated)
에러 메세지
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/torch/autograd/grad_mode.py", line 27, in decorate_context
return func(*args, **kwargs)
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/transformers/generation_utils.py", line 1294, in generate
return self.greedy_search(
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/transformers/generation_utils.py", line 1689, in greedy_search
outputs = self(
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/torch/nn/modules/module.py", line 1130, in _call_impl
return forward_call(*input, **kwargs)
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/transformers/models/gpt2/modeling_gpt2.py", line 1058, in forward
transformer_outputs = self.transformer(
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/torch/nn/modules/module.py", line 1130, in _call_impl
return forward_call(*input, **kwargs)
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/transformers/models/gpt2/modeling_gpt2.py", line 901, in forward
outputs = block(
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/torch/nn/modules/module.py", line 1130, in _call_impl
return forward_call(*input, **kwargs)
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/transformers/models/gpt2/modeling_gpt2.py", line 401, in forward
attn_outputs = self.attn(
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/torch/nn/modules/module.py", line 1130, in _call_impl
return forward_call(*input, **kwargs)
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/transformers/models/gpt2/modeling_gpt2.py", line 323, in forward
query, key, value = self.c_attn(hidden_states).split(self.split_size, dim=2)
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/torch/nn/modules/module.py", line 1130, in _call_impl
return forward_call(*input, **kwargs)
File "/Users/nuri/miniforge3/envs/env_nlp/lib/python3.9/site-packages/transformers/pytorch_utils.py", line 109, in forward
x = torch.addmm(self.bias, x.view(-1, x.size(-1)), self.weight)
RuntimeError: tensors must be 2-D
위의 코드에서 device를 cpu로 설정시는 정상 동작이 됩니다.
위 문제 관련하여 문의 드립니다.