Page Menu
Home
Phorge
Search
Configure Global Search
Log In
Files
F30869008
D267.1768242085.diff
No One
Temporary
Actions
View File
Edit File
Delete File
View Transforms
Subscribe
Award Token
Flag For Later
Size
25 KB
Referenced Files
None
Subscribers
None
D267.1768242085.diff
View Options
diff --git a/lilybuild/lilybuild/ci_steps.py b/lilybuild/lilybuild/ci_steps.py
--- a/lilybuild/lilybuild/ci_steps.py
+++ b/lilybuild/lilybuild/ci_steps.py
@@ -5,7 +5,14 @@
from twisted.internet import defer
from .ci_syntax import ci_file
from .ci_syntax import rules as ci_rules
-from .helpers import rsync_rules_from_artifacts, get_job_script, normalize_image, normalize_services, ci_vars_to_env_file
+from .helpers import (
+ rsync_rules_from_artifacts,
+ get_job_script,
+ normalize_image,
+ normalize_services,
+ ci_vars_to_env_file,
+ generate_metadata_from_job,
+)
from .phorge import SendCoverageToPhorge
import re
import sys
@@ -190,6 +197,7 @@
def job_to_steps(self, job, job_index, variables):
script_name = self.script_dir + '/run.sh'
env_filename = self.script_dir + '/env'
+ metadata_filename = self.script_dir + '/metadata.json'
source_step = self.lbc.create_source_step()
@@ -209,6 +217,18 @@
doStepIf=on_success,
)
+ metadata_step = steps.StringDownload(
+ generate_metadata_from_job(
+ self.getProperty('lilybuild_repo_id'),
+ job,
+ variables,
+ ),
+ name='Set up metadata file',
+ workerdest=metadata_filename,
+ workdir=self.work_root_dir,
+ doStepIf=on_success,
+ )
+
chmod_step = steps.ShellCommand(
name='Make script executable',
command=['chmod', '+x', script_name],
@@ -284,7 +304,7 @@
alwaysRun=True,
)
- steps_to_run = [source_step, script_step, chmod_step, env_step] + artifact_steps + [run_step, clean_script_step]
+ steps_to_run = [source_step, script_step, chmod_step, env_step, metadata_step] + artifact_steps + [run_step, clean_script_step]
if 'paths' in job.artifacts:
steps_to_run += self.get_upload_artifacts_jobs(
'files',
diff --git a/lilybuild/lilybuild/helpers.py b/lilybuild/lilybuild/helpers.py
--- a/lilybuild/lilybuild/helpers.py
+++ b/lilybuild/lilybuild/helpers.py
@@ -103,3 +103,37 @@
res.append(normalized_service)
return json.dumps(res)
+
+VAR_REGEX = re.compile(r'\$([A-Za-z0-9_]+|\{[A-Za-z0-9_]+\})')
+def expand_in_vars(value, vs):
+ def replacement(match):
+ varname = match.group(1)
+ if varname.startswith('{'):
+ varname = varname[1:-1]
+ return vs.get(varname, '')
+ return VAR_REGEX.sub(replacement, value)
+
+def generate_metadata_from_job(repo_id, job, vs):
+ res = {
+ 'repo_id': repo_id,
+ 'caches': [],
+ 'cache_last_invalidated_sec': 0,
+ 'protected': False,
+ }
+ caches = job.struct_raw.get('cache') or []
+ if not isinstance(caches, list):
+ caches = [caches]
+ for cache_def in caches:
+ cache_key = cache_def.get('key')
+ if isinstance(cache_key, str):
+ cache_key = expand_in_vars(cache_key, vs)
+
+ paths = cache_def.get('paths') or []
+ res['caches'].append({
+ 'key': cache_key,
+ 'paths': paths,
+ 'when': cache_def.get('when') or 'on_success',
+ 'policy': cache_def.get('policy') or 'pull-push',
+ })
+
+ return json.dumps(res)
diff --git a/lilybuild/lilybuild/podman_helper.py b/lilybuild/lilybuild/podman_helper.py
--- a/lilybuild/lilybuild/podman_helper.py
+++ b/lilybuild/lilybuild/podman_helper.py
@@ -9,12 +9,16 @@
import string
import time
import hashlib
+import re
+import tempfile
+import lilybuild.safetar
col_info = '\x1b[1;34m[INFO]'
col_success = '\x1b[1;32m[SUCC]'
col_warn = '\x1b[1;33m<WARN>'
col_error = '\x1b[1;31m!ERROR!'
col_reset = '\x1b[0m'
+any_spaces_re = re.compile(r'\s')
def pinfo(*args, **kwargs):
print(col_info, *args, col_reset, **kwargs)
@@ -48,10 +52,11 @@
class PodmanHelper:
cache_storage_root_dir = '/cache'
- cache_max_bytes = 10 * 1024 * 1024
+ cache_max_bytes = 10 * 1024 * 1024 * 1024
work_vol_mount_dir = '/build'
script_vol_mount_dir = '/script'
+ cache_tmp_dir = '/tmp/cache/'
script_name = script_vol_mount_dir + '/run.sh'
env_file_basename = 'env'
metadata_file_basename = 'metadata.json'
@@ -61,6 +66,7 @@
key_file_sub = '/secrets/lilybuild-volume-helper-key'
ssh_port = '2222'
ssh_command = f'ssh -p {ssh_port} -i {key_file_sub} -oStrictHostKeyChecking=no -oUserKnownHostsFile=/dev/null'
+ ssh_command_list = ['ssh', '-p', ssh_port, '-i', key_file_sub, '-oStrictHostKeyChecking=no', '-oUserKnownHostsFile=/dev/null']
worker_container_name = os.environ.get('HOSTNAME', '')
ssh_max_wait = 10
ssh_wait_interval_sec = 1
@@ -153,10 +159,111 @@
psuccess('Service is up.')
return (container_name, alias)
+ def get_valid_caches(self, md):
+ using_caches = []
+
+ for c in md['caches']:
+ if c['policy'] != 'pull-push' and c['policy'] != 'pull':
+ continue
+ storage_dir = self.get_cache_storage_dir(md['repo_id'], c, protected=md['protected'])
+ pinfo('Cache storage dir is ', storage_dir)
+ if os.path.exists(storage_dir):
+ pinfo('Cache exists')
+ cur_cache_name = os.path.join(storage_dir, self.cur_cache_basename)
+ try:
+ stat_res = os.stat(cur_cache_name)
+ if stat_res.st_mtime > md['cache_last_invalidated_sec']:
+ pinfo('Cache not expired')
+ using_caches.append(cur_cache_name)
+ else:
+ pinfo('Cache expired')
+ except:
+ pinfo('Cache does not exist')
+
+ return using_caches
+
+ def import_caches(self, md, vol_mount_dir):
+ # Ensure we do not accidentally remove root
+ # although it should be pretty safe (it's a constant), but who knows
+ # Validate against spaces because anything passed after ssh is processed
+ # through a shell
+ if not (vol_mount_dir and
+ isinstance(vol_mount_dir, str) and
+ not any_spaces_re.search(vol_mount_dir)):
+ perror('vol_mount_dir cannot be empty and cannot contain spaces')
+ raise RuntimeError('vol_mount_dir cannot be empty and cannot contain spaces')
+ valid_caches = self.get_valid_caches(md)
+ cache_file = os.path.join(self.cache_tmp_dir, self.cur_cache_basename)
+ for c in valid_caches:
+ try:
+ pinfo('Uploading cache...')
+ self.verbose_run([
+ 'rsync', '-a', '--delete',
+ '--rsh', self.ssh_command,
+ '--',
+ c,
+ f'helper@{self.helper_container_alias}:{self.cache_tmp_dir}',
+ ], check=True)
+ pinfo('Extracting cache...')
+ self.verbose_run(self.ssh_command_list + [
+ f'helper@{self.helper_container_alias}',
+ 'tar', '-xf', cache_file, '-C', vol_mount_dir,
+ ], check=True)
+ except subprocess.CalledProcessError as e:
+ pwarn('Error when importing cache:', e)
+ # Cache is corrupt and should not be trusted
+ self.verbose_run(self.ssh_command_list + [
+ f'helper@{self.helper_container_alias}',
+ 'rm', '-rf', '--', f'{vol_mount_dir}/*', f'{vol_mount_dir}/.*',
+ ])
+ except:
+ pwarn('Other error occurred', sys.exception())
+ finally:
+ pinfo('Removing uploaded cache archive...')
+ self.verbose_run(self.ssh_command_list + [
+ f'helper@{self.helper_container_alias}',
+ 'rm', '-f', '--', cache_file,
+ ])
+
+ def save_caches(self, md, /, succeeded):
+ for c in md['caches']:
+ if c['policy'] != 'pull-push' and c['policy'] != 'push':
+ pinfo('Cache saving skipped because of policy')
+ continue
+ if (c['when'] == 'on_success' and not succeeded) or (c['when'] == 'on_failure' and succeeded):
+ pinfo('Cache saving skipped because mismatch in success status', c)
+ continue
+ storage_dir = self.get_cache_storage_dir(md['repo_id'], c, protected=md['protected'])
+ cache_file = os.path.join(storage_dir, self.cur_cache_basename)
+ try:
+ replaced = False
+ os.makedirs(storage_dir, exist_ok=True)
+ fd, fn = tempfile.mkstemp(dir=storage_dir)
+ os.close(fd)
+ lilybuild.safetar.create(
+ fn,
+ self.result_dir,
+ c['paths'],
+ self.cache_max_bytes,
+ items_to_exclude=None,
+ compression='gz',
+ )
+ os.replace(fn, cache_file)
+ replaced = True
+ except:
+ pwarn('Unable to create cache', sys.exception())
+ finally:
+ # Either the temp file is renamed, or it is not
+ if not replaced:
+ try:
+ os.remove(fn)
+ except:
+ pass
+
def import_volume(self, local_dir, vol_mount_dir):
# I'll just use the shell instead of pipe2+fork+exec+wait, much easier
self.verbose_run([
- 'rsync', '-a', '--delete',
+ 'rsync', '-a',
'--rsh', self.ssh_command,
f'{local_dir}/',
f'helper@{self.helper_container_alias}:{vol_mount_dir}',
@@ -310,6 +417,25 @@
pinfo('Cleaned.')
return retcode
+ def hash_cache_key(self, cache_key):
+ if not isinstance(cache_key, str):
+ cache_key = ''
+ m = hashlib.sha256()
+ m.update(cache_key.encode())
+ return m.hexdigest()
+
+ def get_cache_storage_dir(self, repo_id, cache_def, /, protected):
+ cache_key = cache_def.get('key', '')
+ hashed_key = self.hash_cache_key(cache_key)
+ return os.path.join(
+ self.cache_storage_root_dir,
+ 'repos',
+ str(repo_id),
+ 'protected' if protected else 'unprotected',
+ 'cache-keys',
+ hashed_key,
+ )
+
def main(self, argv):
image = json.loads(argv[1])
self.work_dir = argv[2]
@@ -350,6 +476,11 @@
pinfo('Waiting for job-defined services...')
self.ensure_service_containers_up()
+ if self.metadata['caches']:
+ pinfo('Importing caches...')
+ self.import_caches(self.metadata, self.work_vol_mount_dir)
+ psuccess('Imported.')
+
pinfo('Importing volumes...')
self.import_volume(self.work_dir, self.work_vol_mount_dir)
self.import_volume(self.script_dir, self.script_vol_mount_dir)
@@ -357,10 +488,10 @@
pinfo('Running container...')
retcode = self.run_in_container(image, work_vol, script_vol)
-
+ succeeded = retcode == 0
pinfo(f'Returned {retcode}.')
- if retcode != 0:
+ if not succeeded:
perror('Job failed.')
else:
psuccess('Job succeeded.')
@@ -370,6 +501,11 @@
self.export_volume(self.result_dir, self.work_vol_mount_dir)
psuccess('Collected.')
+ if self.metadata['caches']:
+ pinfo('Saving caches...')
+ self.save_caches(self.metadata, succeeded=succeeded)
+ psuccess('Saved.')
+
return retcode
def cleanup_all(self):
diff --git a/lilybuild/lilybuild/tests/ci_syntax/res/cache.yaml b/lilybuild/lilybuild/tests/ci_syntax/res/cache.yaml
new file mode 100644
--- /dev/null
+++ b/lilybuild/lilybuild/tests/ci_syntax/res/cache.yaml
@@ -0,0 +1,23 @@
+
+a:
+ cache:
+ - key: test
+ paths:
+ - abc
+ when: always
+ policy: pull
+
+b:
+ cache:
+ key: test
+ paths:
+ - abc
+
+c:
+ cache:
+ - key: $CI_JOB_NAME
+ paths:
+ - abc
+ - key: xx$CI_JOB_NAME
+ paths:
+ - def
diff --git a/lilybuild/lilybuild/tests/helpers_test.py b/lilybuild/lilybuild/tests/helpers_test.py
--- a/lilybuild/lilybuild/tests/helpers_test.py
+++ b/lilybuild/lilybuild/tests/helpers_test.py
@@ -12,6 +12,8 @@
normalize_image,
get_service_aliases_from_name,
normalize_services,
+ expand_in_vars,
+ generate_metadata_from_job,
)
from lilybuild.tests.resources import get_res
@@ -187,5 +189,93 @@
[{ 'name': 'mysql:latest', 'aliases': ['mysql'], 'entrypoint': ['a', 'b'], 'command': ['b c', 'c d'] }]
)
+class ExpandInVarsTest(unittest.TestCase):
+ def test_simple(self):
+ self.assertEqual(
+ expand_in_vars('a', {}),
+ 'a'
+ )
+ self.assertEqual(
+ expand_in_vars('$abc', {'a': '1', 'abc': '2'}),
+ '2'
+ )
+ self.assertEqual(
+ expand_in_vars('$abc_$def-$g', {'abc': '1', 'def': '2'}),
+ '2-'
+ )
+ self.assertEqual(
+ expand_in_vars('$abc$def-$g', {'abc': '$def', 'def': '2'}),
+ '$def2-'
+ )
+ self.assertEqual(
+ expand_in_vars('${abc}def-$g', {'abc': '$def', 'def': '2'}),
+ '$defdef-'
+ )
+ self.assertEqual(
+ expand_in_vars('${ab$defc}', {'abc': '$def', 'def': '2'}),
+ '${ab}'
+ )
+ self.assertEqual(
+ expand_in_vars('${ab${def}c}', {'abc': '$def', 'def': '2'}),
+ '${ab2c}'
+ )
+ self.assertEqual(
+ expand_in_vars('${ab${def}c', {'abc': '$def', 'def': '2'}),
+ '${ab2c'
+ )
+
+class GenerateMetadataFromJobTest(unittest.TestCase):
+ def test_simple(self):
+ r = CIFile(get_res('cache'))
+ self.assertEqual(
+ json.loads(generate_metadata_from_job(2, r.jobs['a'], {'CI_JOB_NAME': 'a'})),
+ {
+ 'repo_id': 2,
+ 'caches': [{
+ 'key': 'test',
+ 'paths': ['abc'],
+ 'when': 'always',
+ 'policy': 'pull',
+ }],
+ 'cache_last_invalidated_sec': 0,
+ 'protected': False,
+ }
+ )
+
+ self.assertEqual(
+ json.loads(generate_metadata_from_job(2, r.jobs['b'], {'CI_JOB_NAME': 'b'})),
+ {
+ 'repo_id': 2,
+ 'caches': [{
+ 'key': 'test',
+ 'paths': ['abc'],
+ 'when': 'on_success',
+ 'policy': 'pull-push',
+ }],
+ 'cache_last_invalidated_sec': 0,
+ 'protected': False,
+ }
+ )
+
+ self.assertEqual(
+ json.loads(generate_metadata_from_job(2, r.jobs['c'], {'CI_JOB_NAME': 'c'})),
+ {
+ 'repo_id': 2,
+ 'caches': [{
+ 'key': 'c',
+ 'paths': ['abc'],
+ 'when': 'on_success',
+ 'policy': 'pull-push',
+ }, {
+ 'key': 'xxc',
+ 'paths': ['def'],
+ 'when': 'on_success',
+ 'policy': 'pull-push',
+ }],
+ 'cache_last_invalidated_sec': 0,
+ 'protected': False,
+ }
+ )
+
if __name__ == '__main__':
unittest.main()
diff --git a/lilybuild/lilybuild/tests/podman_helper_test_worker.py b/lilybuild/lilybuild/tests/podman_helper_test_worker.py
--- a/lilybuild/lilybuild/tests/podman_helper_test_worker.py
+++ b/lilybuild/lilybuild/tests/podman_helper_test_worker.py
@@ -9,6 +9,11 @@
from dataclasses import dataclass
from contextlib import contextmanager
from lilybuild.podman_helper import PodmanHelper, image_to_podman_args
+try:
+ import tarfile
+ tarfile.FilterError
+except AttributeError:
+ import backports.tarfile as tarfile
@dataclass
class MockedCompletedProcess:
@@ -20,7 +25,210 @@
ph.verbose_run = mock or Mock()
return ph
+def make_cache_file(cache_file_name):
+ os.makedirs(os.path.dirname(cache_file_name), exist_ok=True)
+ with tempfile.TemporaryDirectory() as dir_name:
+ os.makedirs(os.path.join(dir_name, 'a'))
+ with open(os.path.join(dir_name, 'a', 'b'), 'w') as f:
+ print('bbb', file=f)
+ with tarfile.open(cache_file_name, 'w:gz') as f:
+ f.add(os.path.join(dir_name, 'a'), 'a')
+ return cache_file_name
+
class PodmanHelperTest(unittest.TestCase):
+ def test_get_cache_storage_dir(self):
+ ph = PodmanHelper(cache_storage_root_dir='/foo/cache')
+ self.assertTrue(
+ ph.get_cache_storage_dir(1, {
+ 'key': 'foo',
+ }, protected=False)
+ .startswith('/foo/cache/repos/1/unprotected/cache-keys/')
+ )
+ self.assertTrue(
+ ph.get_cache_storage_dir(1, {
+ 'key': 'bar',
+ }, protected=True)
+ .startswith('/foo/cache/repos/1/protected/cache-keys/')
+ )
+
+ def test_get_valid_caches(self):
+ md = {
+ 'repo_id': 1,
+ 'caches': [{
+ 'key': 'bar',
+ 'paths': ['a'],
+ 'when': 'on_success',
+ 'policy': 'pull-push',
+ }],
+ 'cache_last_invalidated_sec': 0,
+ 'protected': False,
+ }
+ # No cache directory
+ with tempfile.TemporaryDirectory() as dir_name:
+ ph = PodmanHelper(cache_storage_root_dir=dir_name)
+ self.assertEqual(ph.get_valid_caches(md), [])
+
+ # With cache directory, no cache file
+ with tempfile.TemporaryDirectory() as dir_name:
+ ph = PodmanHelper(cache_storage_root_dir=dir_name)
+ d1 = ph.get_cache_storage_dir(1, md['caches'][0], protected=False)
+ os.makedirs(d1)
+ self.assertEqual(ph.get_valid_caches(md), [])
+
+ # With cache directory, with cache file
+ with tempfile.TemporaryDirectory() as dir_name:
+ ph = PodmanHelper(cache_storage_root_dir=dir_name)
+ d1 = ph.get_cache_storage_dir(1, md['caches'][0], protected=False)
+ os.makedirs(d1)
+ cache_file = os.path.join(d1, 'cur')
+ with open(cache_file, 'w') as f:
+ print('', file=f)
+ self.assertEqual(ph.get_valid_caches(md), [cache_file])
+
+ # With cache directory, with expired cache file
+ with tempfile.TemporaryDirectory() as dir_name:
+ ph = PodmanHelper(cache_storage_root_dir=dir_name)
+ d1 = ph.get_cache_storage_dir(1, md['caches'][0], protected=False)
+ os.makedirs(d1)
+ cache_file = os.path.join(d1, 'cur')
+ with open(cache_file, 'w') as f:
+ print('', file=f)
+ md2 = md.copy()
+ md2['cache_last_invalidated_sec'] = time.time() + 1
+ self.assertEqual(ph.get_valid_caches(md2), [])
+
+ # With cache directory and cache file, but policy does not contain pull
+ with tempfile.TemporaryDirectory() as dir_name:
+ ph = PodmanHelper(cache_storage_root_dir=dir_name)
+ d1 = ph.get_cache_storage_dir(1, md['caches'][0], protected=False)
+ os.makedirs(d1)
+ cache_file = os.path.join(d1, 'cur')
+ with open(cache_file, 'w') as f:
+ print('', file=f)
+ md2 = md.copy()
+ md2['caches'] = [md['caches'][0].copy()]
+ md2['caches'][0]['policy'] = 'push'
+ self.assertEqual(ph.get_valid_caches(md2), [])
+
+ def test_import_caches(self):
+ md = {
+ 'repo_id': 1,
+ 'caches': [{
+ 'key': 'bar',
+ 'paths': ['a'],
+ 'when': 'on_success',
+ 'policy': 'pull-push',
+ }, {
+ 'key': 'mew',
+ 'paths': ['b'],
+ 'when': 'on_success',
+ 'policy': 'pull-push',
+ }],
+ 'cache_last_invalidated_sec': 0,
+ 'protected': False,
+ }
+ with tempfile.TemporaryDirectory() as dir_name:
+ ph = mocked(
+ PodmanHelper(cache_storage_root_dir=dir_name)
+ )
+ ph.helper_container_alias = 'helper-xxx'
+ with self.assertRaises(RuntimeError) as m:
+ ph.import_caches(md, '')
+
+ with tempfile.TemporaryDirectory() as dir_name:
+ ph = mocked(
+ PodmanHelper(cache_storage_root_dir=dir_name)
+ )
+ ph.helper_container_alias = 'helper-xxx'
+ d = ph.get_cache_storage_dir(md['repo_id'], md['caches'][0], protected=md['protected'])
+ cache_name = os.path.join(d, ph.cur_cache_basename)
+ make_cache_file(cache_name)
+ ph.import_caches(md, ph.work_vol_mount_dir)
+ # rsync -> tar -> rm archive
+ self.assertEqual(ph.verbose_run.call_count, 3)
+ self.assertTrue('rsync' in ph.verbose_run.call_args_list[0].args[0])
+ self.assertTrue('tar' in ph.verbose_run.call_args_list[1].args[0])
+ self.assertTrue('rm' in ph.verbose_run.call_args_list[2].args[0])
+
+ with tempfile.TemporaryDirectory() as dir_name:
+ def handle(run_args, **kwargs):
+ if 'tar' in run_args:
+ raise subprocess.CalledProcessError(returncode=1, cmd=run_args)
+ return MockedCompletedProcess()
+ ph = mocked(
+ PodmanHelper(cache_storage_root_dir=dir_name),
+ Mock(side_effect=handle),
+ )
+ ph.helper_container_alias = 'helper-xxx'
+ d = ph.get_cache_storage_dir(md['repo_id'], md['caches'][0], protected=md['protected'])
+ cache_name = os.path.join(d, ph.cur_cache_basename)
+ make_cache_file(cache_name)
+ ph.import_caches(md, ph.work_vol_mount_dir)
+ # rsync -> tar -> clean up extracted dir -> rm archive
+ self.assertEqual(ph.verbose_run.call_count, 4)
+ self.assertTrue('rsync' in ph.verbose_run.call_args_list[0].args[0])
+ self.assertTrue('tar' in ph.verbose_run.call_args_list[1].args[0])
+ self.assertTrue('rm' in ph.verbose_run.call_args_list[2].args[0])
+ self.assertTrue('rm' in ph.verbose_run.call_args_list[3].args[0])
+
+ def test_save_caches(self):
+ md = {
+ 'repo_id': 1,
+ 'caches': [{
+ 'key': 'bar',
+ 'paths': ['a'],
+ 'when': 'on_success',
+ 'policy': 'pull-push',
+ }],
+ 'cache_last_invalidated_sec': 0,
+ 'protected': False,
+ }
+ with tempfile.TemporaryDirectory() as dir_name:
+ cache_root = os.path.join(dir_name, 'cache')
+ result_dir = os.path.join(dir_name, 'result')
+ os.makedirs(os.path.join(result_dir, 'a'))
+ with open(os.path.join(result_dir, 'a', 'b'), 'w') as f:
+ print('mewmew', file=f)
+ ph = PodmanHelper(cache_storage_root_dir=cache_root, result_dir=result_dir)
+ ph.save_caches(md, succeeded=True)
+ cache_dir = ph.get_cache_storage_dir(md['repo_id'], md['caches'][0], protected=md['protected'])
+ cache_file = os.path.join(cache_dir, ph.cur_cache_basename)
+ self.assertTrue(os.path.exists(cache_file))
+ with tarfile.open(cache_file) as f:
+ f.getmember('a/b')
+
+ # did not succeed
+ with tempfile.TemporaryDirectory() as dir_name:
+ cache_root = os.path.join(dir_name, 'cache')
+ result_dir = os.path.join(dir_name, 'result')
+ ph = PodmanHelper(cache_storage_root_dir=cache_root, result_dir=result_dir)
+ ph.save_caches(md, succeeded=False)
+ cache_dir = ph.get_cache_storage_dir(md['repo_id'], md['caches'][0], protected=md['protected'])
+ cache_file = os.path.join(cache_dir, ph.cur_cache_basename)
+ self.assertFalse(os.path.exists(cache_file))
+
+ md2 = {
+ 'repo_id': 1,
+ 'caches': [{
+ 'key': 'bar',
+ 'paths': ['a'],
+ 'when': 'on_success',
+ 'policy': 'pull',
+ }],
+ 'cache_last_invalidated_sec': 0,
+ 'protected': False,
+ }
+
+ # policy does not contain push
+ with tempfile.TemporaryDirectory() as dir_name:
+ cache_root = os.path.join(dir_name, 'cache')
+ result_dir = os.path.join(dir_name, 'result')
+ ph = PodmanHelper(cache_storage_root_dir=cache_root, result_dir=result_dir)
+ ph.save_caches(md2, succeeded=True)
+ cache_dir = ph.get_cache_storage_dir(md2['repo_id'], md2['caches'][0], protected=md2['protected'])
+ cache_file = os.path.join(cache_dir, ph.cur_cache_basename)
+ self.assertFalse(os.path.exists(cache_file))
+
def test_create_and_clean_volume(self):
ph = mocked(
PodmanHelper(),
File Metadata
Details
Attached
Mime Type
text/plain
Expires
Mon, Jan 12, 10:21 AM (16 h, 29 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
947666
Default Alt Text
D267.1768242085.diff (25 KB)
Attached To
Mode
D267: Support caching
Attached
Detach File
Event Timeline
Log In to Comment