Skip to content

Commit

Permalink
fetch_on_fail
Browse files Browse the repository at this point in the history
  • Loading branch information
cmcmarrow committed Sep 1, 2023
1 parent d45fcac commit e68115f
Show file tree
Hide file tree
Showing 4 changed files with 82 additions and 28 deletions.
21 changes: 15 additions & 6 deletions salt/utils/gitfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -2931,15 +2931,18 @@ def write_remote_map(self):
else:
log.info("Wrote new %s remote map to %s", self.role, remote_map)

def do_checkout(self, repo):
def do_checkout(self, repo, fetch_on_fail=True):
"""
Common code for git_pillar/winrepo to handle locking and checking out
of a repo.
fetch_on_fail
If checkout fails perform a fetch then try to checkout again.
"""
time_start = time.time()
while time.time() - time_start <= 5:
try:
return repo.checkout()
return repo.checkout(fetch_on_fail=fetch_on_fail)
except GitLockError as exc:
if exc.errno == errno.EEXIST:
time.sleep(0.1)
Expand Down Expand Up @@ -3334,14 +3337,17 @@ class GitPillar(GitBase):

role = "git_pillar"

def checkout(self):
def checkout(self, fetch_on_fail=True):
"""
Checkout the targeted branches/tags from the git_pillar remotes
fetch_on_fail
If checkout fails perform a fetch then try to checkout again.
"""
self.pillar_dirs = OrderedDict()
self.pillar_linked_dirs = []
for repo in self.remotes:
cachedir = self.do_checkout(repo)
cachedir = self.do_checkout(repo, fetch_on_fail=fetch_on_fail)
if cachedir is not None:
# Figure out which environment this remote should be assigned
if repo.branch == "__env__" and hasattr(repo, "all_saltenvs"):
Expand Down Expand Up @@ -3493,19 +3499,22 @@ def link_mountpoint(self, repo):
class WinRepo(GitBase):
"""
Functionality specific to the winrepo runner
fetch_on_fail
If checkout fails perform a fetch then try to checkout again.
"""

role = "winrepo"
# Need to define this in case we try to reference it before checking
# out the repos.
winrepo_dirs = {}

def checkout(self):
def checkout(self, fetch_on_fail=True):
"""
Checkout the targeted branches/tags from the winrepo remotes
"""
self.winrepo_dirs = {}
for repo in self.remotes:
cachedir = self.do_checkout(repo)
cachedir = self.do_checkout(repo, fetch_on_fail=fetch_on_fail)
if cachedir is not None:
self.winrepo_dirs[repo.id] = cachedir
16 changes: 15 additions & 1 deletion tests/pytests/functional/utils/test_cache.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
import os

import pytest

import salt.utils.cache
import salt.utils.files
import salt.utils.path
import salt.version

_DUMMY_FILES = (
"data.txt",
Expand Down Expand Up @@ -41,15 +44,26 @@ def _dummy_files_exists(tmp_path):
return ret


def test_verify_cache_version_bad_path():
with pytest.raises(ValueError):
# cache version should fail if given bad file python
salt.utils.cache.verify_cache_version("\0/bad/path")


def test_verify_cache_version(tmp_path):
tmp_path = str(tmp_path)
# cache version should make dir if it does not exist
tmp_path = str(salt.utils.path.join(str(tmp_path), "work", "salt"))
cache_version = salt.utils.path.join(tmp_path, "cache_version")

# check that cache clears when no cache_version is present
_make_dummy_files(tmp_path)
assert salt.utils.cache.verify_cache_version(tmp_path) is False
assert _dummy_files_exists(tmp_path) is False

# check that cache_version has correct salt version
with salt.utils.files.fopen(cache_version, "r") as file:
assert "\n".join(file.readlines()) == salt.version.__version__

# check that cache does not get clear when check is called multiple times
_make_dummy_files(tmp_path)
for _ in range(3):
Expand Down
39 changes: 18 additions & 21 deletions tests/pytests/functional/utils/test_gitfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,24 +47,30 @@ def gitfs_opts(salt_factories, tmp_path):
@pytest.fixture
def gitpython_gitfs_opts(gitfs_opts):
gitfs_opts["verified_gitfs_provider"] = "gitpython"
GitFS.instance_map.clear()
GitFS.instance_map.clear() # wipe instance_map object map for clean run
return gitfs_opts


@pytest.fixture
def pygit2_gitfs_opts(gitfs_opts):
gitfs_opts["verified_gitfs_provider"] = "pygit2"
GitFS.instance_map.clear()
GitFS.instance_map.clear() # wipe instance_map object map for clean run
return gitfs_opts


def _test_gitfs_simple(gitfs_opts):
g = GitFS(
gitfs_opts,
["https://github.com/saltstack/salt-test-pillar-gitfs.git"],
def _get_gitfs(opts, *remotes):
return GitFS(
opts,
remotes,
per_remote_overrides=PER_REMOTE_OVERRIDES,
per_remote_only=PER_REMOTE_ONLY,
)


def _test_gitfs_simple(gitfs_opts):
g = _get_gitfs(
gitfs_opts, "https://github.com/saltstack/salt-test-pillar-gitfs.git"
)
g.fetch_remotes()
assert len(g.remotes) == 1
assert set(g.file_list({"saltenv": "main"})) == {".gitignore", "README.md"}
Expand All @@ -81,11 +87,8 @@ def test_pygit2_gitfs_simple(pygit2_gitfs_opts):


def _test_gitfs_simple_base(gitfs_opts):
g = GitFS(
gitfs_opts,
["https://github.com/saltstack/salt-test-pillar-gitfs.git"],
per_remote_overrides=PER_REMOTE_OVERRIDES,
per_remote_only=PER_REMOTE_ONLY,
g = _get_gitfs(
gitfs_opts, "https://github.com/saltstack/salt-test-pillar-gitfs.git"
)
g.fetch_remotes()
assert len(g.remotes) == 1
Expand All @@ -109,11 +112,8 @@ def test_pygit2_gitfs_simple_base(pygit2_gitfs_opts):

@skipif_no_gitpython
def test_gitpython_gitfs_provider(gitpython_gitfs_opts):
g = GitFS(
gitpython_gitfs_opts,
["https://github.com/saltstack/salt-test-pillar-gitfs.git"],
per_remote_overrides=PER_REMOTE_OVERRIDES,
per_remote_only=PER_REMOTE_ONLY,
g = _get_gitfs(
gitpython_gitfs_opts, "https://github.com/saltstack/salt-test-pillar-gitfs.git"
)
assert len(g.remotes) == 1
assert g.provider == "gitpython"
Expand All @@ -122,11 +122,8 @@ def test_gitpython_gitfs_provider(gitpython_gitfs_opts):

@skipif_no_pygit2
def test_pygit2_gitfs_provider(pygit2_gitfs_opts):
g = GitFS(
pygit2_gitfs_opts,
["https://github.com/saltstack/salt-test-pillar-gitfs.git"],
per_remote_overrides=PER_REMOTE_OVERRIDES,
per_remote_only=PER_REMOTE_ONLY,
g = _get_gitfs(
pygit2_gitfs_opts, "https://github.com/saltstack/salt-test-pillar-gitfs.git"
)
assert len(g.remotes) == 1
assert g.provider == "pygit2"
Expand Down
34 changes: 34 additions & 0 deletions tests/pytests/functional/utils/test_pillar.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ def _test_env(opts):
assert len(p.remotes) == 1
p.checkout()
repo = p.remotes[0]
# test that two different pillarenvs can exist at the same time
files = set(os.listdir(repo.get_cachedir()))
for f in (".gitignore", "README.md", "file.sls", "top.sls"):
assert f in files
Expand All @@ -115,6 +116,24 @@ def _test_env(opts):
for f in (".gitignore", "README.md", "file.sls", "top.sls"):
assert f in files

# double check cache paths
assert (
repo.get_cache_hash() == repo2.get_cache_hash()
) # __env__ repos share same hash
assert repo.get_cache_basename() != repo2.get_cache_basename()
assert repo.get_linkdir() != repo2.get_linkdir()
assert repo.get_salt_working_dir() != repo2.get_salt_working_dir()
assert repo.get_cache_basename() == "master"
assert repo2.get_cache_basename() == "main"

assert repo.get_cache_basename() in repo.get_cachedir()
assert (
os.path.join(repo.get_cache_basehash(), repo.get_cache_basename())
== repo.get_cache_full_basename()
)
assert repo.get_linkdir() not in repo.get_cachedir()
assert repo.get_salt_working_dir() not in repo.get_cachedir()


@skipif_no_gitpython
def test_gitpython_env(gitpython_pillar_opts):
Expand All @@ -124,3 +143,18 @@ def test_gitpython_env(gitpython_pillar_opts):
@skipif_no_pygit2
def test_pygit2_env(pygit2_pillar_opts):
_test_env(pygit2_pillar_opts)


def _test_checkout_fetch_on_fail(opts):
p = _get_pillar(opts, "https://github.com/saltstack/salt-test-pillar-gitfs.git")
p.checkout(fetch_on_fail=False) # TODO write me


@skipif_no_gitpython
def test_gitpython_checkout_fetch_on_fail(gitpython_pillar_opts):
_test_checkout_fetch_on_fail(gitpython_pillar_opts)


@skipif_no_pygit2
def test_pygit2_checkout_fetch_on_fail(pygit2_pillar_opts):
_test_checkout_fetch_on_fail(pygit2_pillar_opts)

0 comments on commit e68115f

Please sign in to comment.