Could you support CPU device?

#85
by changwangss - opened
Traceback (most recent call last):
  File "test.py", line 5, in <module>
    response, history = model.chat(tokenizer, "你好", history=[])
  File "/home/changwa1/anaconda3/envs/ipex_latest/lib/python3.8/site-packages/torch/utils/_contextlib.py", line 115, in decorate_context
    return func(*args, **kwargs)
  File "/dataset/huggingface/modules/transformers_modules/THUDM/chatglm-6b/1d240ba371910e9282298d4592532d7f0f3e9f3e/modeling_chatglm.py", line 1285, in chat
    outputs = self.generate(**inputs, **gen_kwargs)
  File "/home/changwa1/anaconda3/envs/ipex_latest/lib/python3.8/site-packages/torch/utils/_contextlib.py", line 115, in decorate_context
    return func(*args, **kwargs)
  File "/home/changwa1/anaconda3/envs/ipex_latest/lib/python3.8/site-packages/transformers/generation/utils.py", line 1572, in generate
    return self.sample(
  File "/home/changwa1/anaconda3/envs/ipex_latest/lib/python3.8/site-packages/transformers/generation/utils.py", line 2619, in sample
    outputs = self(
  File "/home/changwa1/anaconda3/envs/ipex_latest/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1502, in _wrapped_call_impl
    return self._call_impl(*args, **kwargs)
  File "/home/changwa1/anaconda3/envs/ipex_latest/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1511, in _call_impl
    return forward_call(*args, **kwargs)
  File "/dataset/huggingface/modules/transformers_modules/THUDM/chatglm-6b/1d240ba371910e9282298d4592532d7f0f3e9f3e/modeling_chatglm.py", line 1190, in forward
    transformer_outputs = self.transformer(
  File "/home/changwa1/anaconda3/envs/ipex_latest/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1502, in _wrapped_call_impl
    return self._call_impl(*args, **kwargs)
  File "/home/changwa1/anaconda3/envs/ipex_latest/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1511, in _call_impl
    return forward_call(*args, **kwargs)
  File "/dataset/huggingface/modules/transformers_modules/THUDM/chatglm-6b/1d240ba371910e9282298d4592532d7f0f3e9f3e/modeling_chatglm.py", line 996, in forward
    layer_ret = layer(
  File "/home/changwa1/anaconda3/envs/ipex_latest/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1502, in _wrapped_call_impl
    return self._call_impl(*args, **kwargs)
  File "/home/changwa1/anaconda3/envs/ipex_latest/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1511, in _call_impl
    return forward_call(*args, **kwargs)
  File "/dataset/huggingface/modules/transformers_modules/THUDM/chatglm-6b/1d240ba371910e9282298d4592532d7f0f3e9f3e/modeling_chatglm.py", line 624, in forward
    attention_input = self.input_layernorm(hidden_states)
  File "/home/changwa1/anaconda3/envs/ipex_latest/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1502, in _wrapped_call_impl
    return self._call_impl(*args, **kwargs)
  File "/home/changwa1/anaconda3/envs/ipex_latest/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1511, in _call_impl
    return forward_call(*args, **kwargs)
  File "/home/changwa1/anaconda3/envs/ipex_latest/lib/python3.8/site-packages/torch/nn/modules/normalization.py", line 190, in forward
    return F.layer_norm(
  File "/home/changwa1/anaconda3/envs/ipex_latest/lib/python3.8/site-packages/torch/nn/functional.py", line 2548, in layer_norm
    return torch.layer_norm(input, normalized_shape, weight, bias, eps, torch.backends.cudnn.enabled)
RuntimeError: mixed dtype (CPU): all inputs must share same datatype.

Sign up or log in to comment