You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
But when I tried to load model from local dir I could not save model to my disk and raise file not found error. Detailed output as following:
---------------------------------------------------------------------------
HFValidationError Traceback (most recent call last)
File ~/anaconda3/envs/llm/lib/python3.10/site-packages/huggingface_hub/hf_file_system.py:125, in HfFileSystem._repo_and_revision_exist(self, repo_type, repo_id, revision)
124 try:
--> 125 self._api.repo_info(
126 repo_id, revision=revision, repo_type=repo_type, timeout=constants.HF_HUB_ETAG_TIMEOUT
127 )
128 except (RepositoryNotFoundError, HFValidationError) as e:
File ~/anaconda3/envs/llm/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py:106, in validate_hf_hub_args.<locals>._inner_fn(*args, **kwargs)
105 if arg_name in ["repo_id", "from_id", "to_id"]:
--> 106 validate_repo_id(arg_value)
108 elif arg_name == "token" and arg_value is not None:
File ~/anaconda3/envs/llm/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py:160, in validate_repo_id(repo_id)
159 if not REPO_ID_REGEX.match(repo_id):
--> 160 raise HFValidationError(
161 "Repo id must use alphanumeric chars or '-', '_', '.', '--' and '..' are"
162 " forbidden, '-' and '.' cannot start or end the name, max length is 96:"
163 f" '{repo_id}'."
164 )
166 if "--" in repo_id or ".." in repo_id:
HFValidationError: Repo id must use alphanumeric chars or '-', '_', '.', '--' and '..' are forbidden, '-' and '.' cannot start or end the name, max length is 96: '/home'.
The above exception was the direct cause of the following exception:
FileNotFoundError Traceback (most recent call last)
Cell In[9], line 1
----> 1 if True: model.save_pretrained_merged("unsloth_finetune", tokenizer,)
File ~/anaconda3/envs/llm/lib/python3.10/site-packages/unsloth/save.py:2185, in unsloth_generic_save_pretrained_merged(self, save_directory, tokenizer, save_method, push_to_hub, token, is_main_process, state_dict, save_function, max_shard_size, safe_serialization, variant, save_peft_format, tags, temporary_location, maximum_memory_usage)
2183 arguments["model"] = self
2184 del arguments["self"]
-> 2185 unsloth_generic_save(**arguments)
2186 for _ in range(3):
2187 gc.collect()
File ~/anaconda3/envs/llm/lib/python3.10/site-packages/torch/utils/_contextlib.py:116, in context_decorator.<locals>.decorate_context(*args, **kwargs)
113 @functools.wraps(func)
114 def decorate_context(*args, **kwargs):
115 with ctx_factory():
--> 116 return func(*args, **kwargs)
File ~/anaconda3/envs/llm/lib/python3.10/site-packages/unsloth/save.py:2132, in unsloth_generic_save(model, tokenizer, save_directory, save_method, push_to_hub, token, is_main_process, state_dict, save_function, max_shard_size, safe_serialization, variant, save_peft_format, use_temp_dir, commit_message, private, create_pr, revision, commit_description, tags, temporary_location, maximum_memory_usage)
2102 @torch.inference_mode
2103 def unsloth_generic_save(
2104 model,
(...)
2129 maximum_memory_usage : float = 0.9,
2130 ):
2131 if token is None and push_to_hub: token = get_token()
-> 2132 merge_and_overwrite_lora(
2133 get_model_name,
2134 model = model,
2135 tokenizer = tokenizer,
2136 save_directory = save_directory,
2137 push_to_hub = push_to_hub,
2138 private = private,
2139 token = token,
2140 output_dtype = None,
2141 low_disk_space_usage = False,
2142 use_temp_file = False,
2143 )
2144 return
File ~/anaconda3/envs/llm/lib/python3.10/site-packages/torch/utils/_contextlib.py:116, in context_decorator.<locals>.decorate_context(*args, **kwargs)
113 @functools.wraps(func)
114 def decorate_context(*args, **kwargs):
115 with ctx_factory():
--> 116 return func(*args, **kwargs)
File ~/anaconda3/envs/llm/lib/python3.10/site-packages/unsloth_zoo/saving_utils.py:509, in merge_and_overwrite_lora(get_model_name, model, tokenizer, save_directory, push_to_hub, private, token, output_dtype, low_disk_space_usage, use_temp_file)
506 model_name = model.config._name_or_path
508 # Find repository's max shard size and total size of everything
--> 509 file_list = HfFileSystem(token = token).ls(model_name, detail = True)
510 safetensors_list = []
511 max_size_in_bytes = 0
File ~/anaconda3/envs/llm/lib/python3.10/site-packages/huggingface_hub/hf_file_system.py:368, in HfFileSystem.ls(self, path, detail, refresh, revision, **kwargs)
339 def ls(
340 self, path: str, detail: bool = True, refresh: bool = False, revision: Optional[str] = None, **kwargs
341 ) -> List[Union[str, Dict[str, Any]]]:
342 """
343 List the contents of a directory.
344
(...)
366 dictionaries (if detail=True).
367 """
--> 368 resolved_path = self.resolve_path(path, revision=revision)
369 path = resolved_path.unresolve()
370 kwargs = {"expand_info": detail, **kwargs}
File ~/anaconda3/envs/llm/lib/python3.10/site-packages/huggingface_hub/hf_file_system.py:216, in HfFileSystem.resolve_path(self, path, revision)
214 repo_and_revision_exist, _ = self._repo_and_revision_exist(repo_type, repo_id, revision)
215 if not repo_and_revision_exist:
--> 216 _raise_file_not_found(path, err)
217 else:
218 _raise_file_not_found(path, err)
File ~/anaconda3/envs/llm/lib/python3.10/site-packages/huggingface_hub/hf_file_system.py:1136, in _raise_file_not_found(path, err)
1134 elif isinstance(err, HFValidationError):
1135 msg = f"{path} (invalid repository id)"
-> 1136 raise FileNotFoundError(msg) from err
FileNotFoundError: /home/Desktop/work/ai_model/Llama-3.2-11B-Vision-Instruct-bnb-4bit (invalid repository id)
To reproduce
change model to local dir train follow the note book and save
model, tokenizer = FastVisionModel.from_pretrained(
"/home/Desktop/work/ai_model/Llama-3.2-11B-Vision-Instruct-bnb-4bit",
load_in_4bit = True, # Use 4bit to reduce memory use. False for 16bit LoRA.
use_gradient_checkpointing = "unsloth", # True or "unsloth" for long context
)
Thanks for help
The text was updated successfully, but these errors were encountered:
Hi,
I have tried the officall notebook online in the repohttps://colab.research.google.com/github/unslothai/notebooks/blob/main/nb/Llama3.2_(11B)-Vision.ipynb, which works good.
But when I tried to load model from local dir I could not save model to my disk and raise file not found error. Detailed output as following:
To reproduce
change model to local dir train follow the note book and save
Thanks for help
The text was updated successfully, but these errors were encountered: