OSError: akrititiwari/model does not appear to have a file named config.json. Checkout 'https://huggingface.co/akrititiwari/model/tree/main' for available files.

#1
by blackhole33 - opened

I fine tuned LLama 3 model for uzbek langauge , then I wanto to laod that model with HuggingFaceLLM . But , it returns error.
how to load adapter model with HuggingFaceLLM.

error:

/opt/conda/lib/python3.10/site-packages/pydantic/_internal/fields.py:160: UserWarning: Field "model_id" has conflict with protected namespace "model".

You may be able to resolve this warning by setting model_config['protected_namespaces'] = ().
warnings.warn(
/opt/conda/lib/python3.10/site-packages/huggingface_hub/file_download.py:1132: FutureWarning: resume_download is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use force_download=True.
warnings.warn(

HTTPError Traceback (most recent call last)
File /opt/conda/lib/python3.10/site-packages/huggingface_hub/utils/_errors.py:304, in hf_raise_for_status(response, endpoint_name)
303 try:
--> 304 response.raise_for_status()
305 except HTTPError as e:

File /opt/conda/lib/python3.10/site-packages/requests/models.py:1024, in Response.raise_for_status(self)
1023 if http_error_msg:
-> 1024 raise HTTPError(http_error_msg, response=self)

HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/blackhole33/llama-3-8b-Instruct-bnb-4bit-V2/resolve/main/config.json

The above exception was the direct cause of the following exception:

EntryNotFoundError Traceback (most recent call last)
File /opt/conda/lib/python3.10/site-packages/transformers/utils/hub.py:399, in cached_file(path_or_repo_id, filename, cache_dir, force_download, resume_download, proxies, token, revision, local_files_only, subfolder, repo_type, user_agent, _raise_exceptions_for_gated_repo, _raise_exceptions_for_missing_entries, _raise_exceptions_for_connection_errors, _commit_hash, **deprecated_kwargs)
397 try:
398 # Load from URL or cache if already cached
--> 399 resolved_file = hf_hub_download(
400 path_or_repo_id,
401 filename,
402 subfolder=None if len(subfolder) == 0 else subfolder,
403 repo_type=repo_type,
404 revision=revision,
405 cache_dir=cache_dir,
406 user_agent=user_agent,
407 force_download=force_download,
408 proxies=proxies,
409 resume_download=resume_download,
410 token=token,
411 local_files_only=local_files_only,
412 )
413 except GatedRepoError as e:

File /opt/conda/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py:114, in validate_hf_hub_args.._inner_fn(*args, **kwargs)
112 kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.name, has_token=has_token, kwargs=kwargs)
--> 114 return fn(*args, **kwargs)

File /opt/conda/lib/python3.10/site-packages/huggingface_hub/file_download.py:1221, in hf_hub_download(repo_id, filename, subfolder, repo_type, revision, library_name, library_version, cache_dir, local_dir, user_agent, force_download, proxies, etag_timeout, token, local_files_only, headers, endpoint, legacy_cache_layout, resume_download, force_filename, local_dir_use_symlinks)
1220 else:
-> 1221 return _hf_hub_download_to_cache_dir(
1222 # Destination
1223 cache_dir=cache_dir,
1224 # File info
1225 repo_id=repo_id,
1226 filename=filename,
1227 repo_type=repo_type,
1228 revision=revision,
1229 # HTTP info
1230 headers=headers,
1231 proxies=proxies,
1232 etag_timeout=etag_timeout,
1233 endpoint=endpoint,
1234 # Additional options
1235 local_files_only=local_files_only,
1236 force_download=force_download,
1237 )

File /opt/conda/lib/python3.10/site-packages/huggingface_hub/file_download.py:1282, in _hf_hub_download_to_cache_dir(cache_dir, repo_id, filename, repo_type, revision, headers, proxies, etag_timeout, endpoint, local_files_only, force_download)
1280 # Try to get metadata (etag, commit_hash, url, size) from the server.
1281 # If we can't, a HEAD request error is returned.
-> 1282 (url_to_download, etag, commit_hash, expected_size, head_call_error) = _get_metadata_or_catch_error(
1283 repo_id=repo_id,
1284 filename=filename,
1285 repo_type=repo_type,
1286 revision=revision,
1287 endpoint=endpoint,
1288 proxies=proxies,
1289 etag_timeout=etag_timeout,
1290 headers=headers,
1291 local_files_only=local_files_only,
1292 storage_folder=storage_folder,
1293 relative_filename=relative_filename,
1294 )
1296 # etag can be None for several reasons:
1297 # 1. we passed local_files_only.
1298 # 2. we don't have a connection
(...)
1304 # If the specified revision is a commit hash, look inside "snapshots".
1305 # If the specified revision is a branch or tag, look inside "refs".

File /opt/conda/lib/python3.10/site-packages/huggingface_hub/file_download.py:1722, in _get_metadata_or_catch_error(repo_id, filename, repo_type, revision, endpoint, proxies, etag_timeout, headers, local_files_only, relative_filename, storage_folder)
1721 try:
-> 1722 metadata = get_hf_file_metadata(url=url, proxies=proxies, timeout=etag_timeout, headers=headers)
1723 except EntryNotFoundError as http_error:

File /opt/conda/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py:114, in validate_hf_hub_args.._inner_fn(*args, **kwargs)
112 kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.name, has_token=has_token, kwargs=kwargs)
--> 114 return fn(*args, **kwargs)

File /opt/conda/lib/python3.10/site-packages/huggingface_hub/file_download.py:1645, in get_hf_file_metadata(url, token, proxies, timeout, library_name, library_version, user_agent, headers)
1644 # Retrieve metadata
-> 1645 r = _request_wrapper(
1646 method="HEAD",
1647 url=url,
1648 headers=headers,
1649 allow_redirects=False,
1650 follow_relative_redirects=True,
1651 proxies=proxies,
1652 timeout=timeout,
1653 )
1654 hf_raise_for_status(r)

File /opt/conda/lib/python3.10/site-packages/huggingface_hub/file_download.py:372, in _request_wrapper(method, url, follow_relative_redirects, **params)
371 if follow_relative_redirects:
--> 372 response = _request_wrapper(
373 method=method,
374 url=url,
375 follow_relative_redirects=False,
376 **params,
377 )
379 # If redirection, we redirect only relative paths.
380 # This is useful in case of a renamed repository.

File /opt/conda/lib/python3.10/site-packages/huggingface_hub/file_download.py:396, in _request_wrapper(method, url, follow_relative_redirects, **params)
395 response = get_session().request(method=method, url=url, **params)
--> 396 hf_raise_for_status(response)
397 return response

File /opt/conda/lib/python3.10/site-packages/huggingface_hub/utils/_errors.py:315, in hf_raise_for_status(response, endpoint_name)
314 message = f"{response.status_code} Client Error." + "\n\n" + f"Entry Not Found for url: {response.url}."
--> 315 raise EntryNotFoundError(message, response) from e
317 elif error_code == "GatedRepo":

EntryNotFoundError: 404 Client Error. (Request ID: Root=1-6666e326-4b8c39bd7a60696f2c7c6c44;69c530af-6691-442f-91e3-17b486ab2099)

Entry Not Found for url: https://huggingface.co/blackhole33/llama-3-8b-Instruct-bnb-4bit-V2/resolve/main/config.json.

The above exception was the direct cause of the following exception:

OSError Traceback (most recent call last)
Cell In[3], line 3
1 from llama_index.llms.huggingface import HuggingFaceLLM
2 import torch
----> 3 llm = HuggingFaceLLM(
4 context_window=4096,
5 max_new_tokens=256,
6 generate_kwargs={"temperature": 0.7, "do_sample": False},
7 tokenizer_name="blackhole33/llama-3-8b-Instruct-bnb-4bit-V2",
8 model_name="blackhole33/llama-3-8b-Instruct-bnb-4bit-V2",
9 device_map="auto",
10 stopping_ids=[50278, 50279, 50277, 1, 0],
11 tokenizer_kwargs={"max_length": 4096},
12 model_kwargs={"torch_dtype": torch.float16}
13 )

File /opt/conda/lib/python3.10/site-packages/llama_index/llms/huggingface/base.py:237, in HuggingFaceLLM.init(self, context_window, max_new_tokens, query_wrapper_prompt, tokenizer_name, model_name, model, tokenizer, device_map, stopping_ids, tokenizer_kwargs, tokenizer_outputs_to_remove, model_kwargs, generate_kwargs, is_chat_model, callback_manager, system_prompt, messages_to_prompt, completion_to_prompt, pydantic_program_mode, output_parser)
235 """Initialize params."""
236 model_kwargs = model_kwargs or {}
--> 237 self._model = model or AutoModelForCausalLM.from_pretrained(
238 model_name, device_map=device_map, **model_kwargs
239 )
241 # check context_window
242 config_dict = self._model.config.to_dict()

File /opt/conda/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:523, in _BaseAutoModelClass.from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs)
520 if kwargs.get("quantization_config", None) is not None:
521 _ = kwargs.pop("quantization_config")
--> 523 config, kwargs = AutoConfig.from_pretrained(
524 pretrained_model_name_or_path,
525 return_unused_kwargs=True,
526 trust_remote_code=trust_remote_code,
527 code_revision=code_revision,
528 _commit_hash=commit_hash,
529 **hub_kwargs,
530 **kwargs,
531 )
533 # if torch_dtype=auto was passed here, ensure to pass it on
534 if kwargs_orig.get("torch_dtype", None) == "auto":

File /opt/conda/lib/python3.10/site-packages/transformers/models/auto/configuration_auto.py:934, in AutoConfig.from_pretrained(cls, pretrained_model_name_or_path, **kwargs)
931 trust_remote_code = kwargs.pop("trust_remote_code", None)
932 code_revision = kwargs.pop("code_revision", None)
--> 934 config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs)
935 has_remote_code = "auto_map" in config_dict and "AutoConfig" in config_dict["auto_map"]
936 has_local_code = "model_type" in config_dict and config_dict["model_type"] in CONFIG_MAPPING

File /opt/conda/lib/python3.10/site-packages/transformers/configuration_utils.py:632, in PretrainedConfig.get_config_dict(cls, pretrained_model_name_or_path, **kwargs)
630 original_kwargs = copy.deepcopy(kwargs)
631 # Get config dict associated with the base config file
--> 632 config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs)
633 if "_commit_hash" in config_dict:
634 original_kwargs["_commit_hash"] = config_dict["_commit_hash"]

File /opt/conda/lib/python3.10/site-packages/transformers/configuration_utils.py:689, in PretrainedConfig._get_config_dict(cls, pretrained_model_name_or_path, **kwargs)
685 configuration_file = kwargs.pop("_configuration_file", CONFIG_NAME) if gguf_file is None else gguf_file
687 try:
688 # Load from local folder or from cache or download from model Hub and cache
--> 689 resolved_config_file = cached_file(
690 pretrained_model_name_or_path,
691 configuration_file,
692 cache_dir=cache_dir,
693 force_download=force_download,
694 proxies=proxies,
695 resume_download=resume_download,
696 local_files_only=local_files_only,
697 token=token,
698 user_agent=user_agent,
699 revision=revision,
700 subfolder=subfolder,
701 _commit_hash=commit_hash,
702 )
703 commit_hash = extract_commit_hash(resolved_config_file, commit_hash)
704 except EnvironmentError:
705 # Raise any environment error raise by cached_file. It will have a helpful error message adapted to
706 # the original exception.

File /opt/conda/lib/python3.10/site-packages/transformers/utils/hub.py:453, in cached_file(path_or_repo_id, filename, cache_dir, force_download, resume_download, proxies, token, revision, local_files_only, subfolder, repo_type, user_agent, _raise_exceptions_for_gated_repo, _raise_exceptions_for_missing_entries, _raise_exceptions_for_connection_errors, _commit_hash, **deprecated_kwargs)
451 if revision is None:
452 revision = "main"
--> 453 raise EnvironmentError(
454 f"{path_or_repo_id} does not appear to have a file named {full_filename}. Checkout "
455 f"'https://huggingface.co/ class="ansi-bold" style="color:rgb(175,95,135)">{path_or_repo_id}/tree/{revision}' for available files."
456 ) from e
457 except HTTPError as err:
458 resolved_file = _get_cache_file_to_return(path_or_repo_id, full_filename, cache_dir, revision)

OSError: blackhole33/llama-3-8b-Instruct-bnb-4bit-V2 does not appear to have a file named config.json. Checkout 'https://huggingface.co/blackhole33/llama-3-8b-Instruct-bnb-4bit-V2/tree/main' for available files.

Sign up or log in to comment