Skip to content

Commit

Permalink
Add a default timeout for filelock (huggingface#2391)
Browse files Browse the repository at this point in the history
File lock operations did not have an associated timeout, which could
present problems when trying to debug an issue since nothing would be
logged if we could not acquire the lock and would wait forever.
  • Loading branch information
edevil authored Jul 15, 2024
1 parent c8a3e3a commit de160a4
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 3 deletions.
3 changes: 3 additions & 0 deletions src/huggingface_hub/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,9 @@ def _as_int(value: Optional[str]) -> Optional[int]:
SAFETENSORS_INDEX_FILE = "model.safetensors.index.json"
SAFETENSORS_MAX_HEADER_LENGTH = 25_000_000

# Timeout of aquiring file lock and logging the attempt
FILELOCK_LOG_EVERY_SECONDS = 10

# Git-related constants

DEFAULT_REVISION = "main"
Expand Down
17 changes: 14 additions & 3 deletions src/huggingface_hub/utils/_fixes.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,13 @@
from typing import Callable, Generator, Optional, Union

import yaml
from filelock import BaseFileLock, FileLock
from filelock import BaseFileLock, FileLock, Timeout

from .. import constants
from . import logging


logger = logging.get_logger(__name__)

# Wrap `yaml.dump` to set `allow_unicode=True` by default.
#
Expand Down Expand Up @@ -80,8 +85,14 @@ def _set_write_permission_and_retry(func, path, excinfo):
@contextlib.contextmanager
def WeakFileLock(lock_file: Union[str, Path]) -> Generator[BaseFileLock, None, None]:
"""A filelock that won't raise an exception if release fails."""
lock = FileLock(lock_file)
lock.acquire()
lock = FileLock(lock_file, timeout=constants.FILELOCK_LOG_EVERY_SECONDS)
while True:
try:
lock.acquire()
except Timeout:
logger.info("still waiting to acquire lock on %s", lock_file)
else:
break

yield lock

Expand Down

0 comments on commit de160a4

Please sign in to comment.