Transformers
English
Inference Endpoints

error in this line model = PeftModel.from_pretrained(model, "bjoernp/alpaca-cerebras-6.7B", torch_dtype=torch.float16, device_map='auto')

#1
by smjain - opened

in <cell line: 1>:1 │
│ │
│ /usr/local/lib/python3.9/dist-packages/peft/peft_model.py:161 in from_pretrained │
│ │
│ 158 │ │ │ filename, map_location=torch.device("cuda" if torch.cuda.is_available() else │
│ 159 │ │ ) │
│ 160 │ │ # load the weights into the model │
│ ❱ 161 │ │ model = set_peft_model_state_dict(model, adapters_weights) │
│ 162 │ │ if getattr(model, "hf_device_map", None) is not None: │
│ 163 │ │ │ device_map = kwargs.get("device_map", "auto") │
│ 164 │ │ │ max_memory = kwargs.get("max_memory", None) │
│ │
│ /usr/local/lib/python3.9/dist-packages/peft/utils/save_and_load.py:74 in │
│ set_peft_model_state_dict │
│ │
│ 71 │ │ peft_model_state_dict (dict): The state dict of the Peft model. │
│ 72 │ """ │
│ 73 │ │
│ ❱ 74 │ model.load_state_dict(peft_model_state_dict, strict=False) │
│ 75 │ if model.peft_config.peft_type != PeftType.LORA: │
│ 76 │ │ model.prompt_encoder.embedding.load_state_dict( │
│ 77 │ │ │ {"weight": peft_model_state_dict["prompt_embeddings"]}, strict=True │
│ │
│ /usr/local/lib/python3.9/dist-packages/torch/nn/modules/module.py:1671 in load_state_dict │
│ │
│ 1668 │ │ │ │ │ │ ', '.join('"{}"'.format(k) for k in missing_keys))) │
│ 1669 │ │ │
│ 1670 │ │ if len(error_msgs) > 0: │
│ ❱ 1671 │ │ │ raise RuntimeError('Error(s) in loading state_dict for {}:\n\t{}'.format( │
│ 1672 │ │ │ │ │ │ │ self.class.name, "\n\t".join(error_msgs))) │
│ 1673 │ │ return _IncompatibleKeys(missing_keys, unexpected_keys)

Sign up or log in to comment