Page MenuHomePhorge

D213.1750680491.diff
No OneTemporary

Size
12 KB
Referenced Files
None
Subscribers
None

D213.1750680491.diff

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -20,7 +20,7 @@
stage: unit-test
image: docker.io/buildbot/buildbot-master:v4.2.1
script:
- - /buildbot_venv/bin/pip3 install jsonschema
+ - /buildbot_venv/bin/pip3 install jsonschema backports.tarfile
- . /buildbot_venv/bin/activate
- ./lilybuild/run-tests.sh
diff --git a/Containerfile.master b/Containerfile.master
--- a/Containerfile.master
+++ b/Containerfile.master
@@ -1,4 +1,4 @@
FROM docker.io/buildbot/buildbot-master:v4.2.1
-RUN /buildbot_venv/bin/pip3 install jsonschema
+RUN /buildbot_venv/bin/pip3 install jsonschema backports.tarfile
diff --git a/lilybuild/lilybuild/ci_steps.py b/lilybuild/lilybuild/ci_steps.py
--- a/lilybuild/lilybuild/ci_steps.py
+++ b/lilybuild/lilybuild/ci_steps.py
@@ -6,6 +6,7 @@
from .ci_syntax import ci_file
from .helpers import rsync_rules_from_artifacts, ci_vars_to_cmds
import re
+import sys
def on_success(step):
return step.build.results == util.SUCCESS
@@ -18,6 +19,9 @@
'''
+def fill_list(*args):
+ return list(args)
+
class RunCIJobStep(steps.BuildStep):
# 200 MiB
artifact_max_size = 200 * 1024 * 1024
@@ -26,6 +30,7 @@
master_job_artifact_dir_pattern = '%(kw:st)s/repos/%(prop:lilybuild_repo_id)s/builds/%(prop:lilybuild_root_build_id)s/jobs/%(kw:job)s/artifacts'
artifact_file_name = 'artifacts.tar'
master_job_artifact_file_name_pattern = master_job_artifact_dir_pattern + '/' + artifact_file_name
+ master_pages_dir_pattern = '%(kw:st)s/repos/%(prop:lilybuild_repo_id)s/pages'
def __init__(
self,
@@ -233,6 +238,23 @@
)
steps_to_run.append(clean_stage_dir_again_step)
+ if job.is_pages() and 'paths' in job.artifacts:
+ deploy_pages_step = steps.MasterShellCommand(
+ command=util.Transform(fill_list,
+ sys.executable,
+ '-m', 'lilybuild.pages',
+ util.Interpolate(
+ self.master_pages_dir_pattern,
+ st=self.storage_dir,
+ ),
+ masterdest,
+ ),
+ name='Deploy pages',
+ logEnviron=False,
+ doStepIf=on_success,
+ )
+ steps_to_run.append(deploy_pages_step)
+
return steps_to_run
class TriggerMultipleJobsStep(steps.Trigger):
diff --git a/lilybuild/lilybuild/ci_syntax/ci_file.py b/lilybuild/lilybuild/ci_syntax/ci_file.py
--- a/lilybuild/lilybuild/ci_syntax/ci_file.py
+++ b/lilybuild/lilybuild/ci_syntax/ci_file.py
@@ -80,6 +80,9 @@
prop['defaults_raw']
)
+ def is_pages(self):
+ return self.name == 'pages' or self.struct_raw.get('pages', False)
+
OLD_TOPLEVEL_DEFAULTS = ['image', 'services', 'cache', 'before_script', 'after_script']
DEFAULT_STAGES = ['.pre', 'build', 'test', 'deploy', '.post']
DEFAULT_JOB_STAGE = 'test'
diff --git a/lilybuild/lilybuild/pages.py b/lilybuild/lilybuild/pages.py
new file mode 100644
--- /dev/null
+++ b/lilybuild/lilybuild/pages.py
@@ -0,0 +1,84 @@
+
+import os
+import subprocess
+import shutil
+import sys
+try:
+ import tarfile
+ tarfile.FilterError
+except AttributeError:
+ import backports.tarfile as tarfile
+
+'''
+This module implements blue-green deployment for pages.
+'''
+
+FIRST = '0'
+SECOND = '1'
+CURRENT = 'cur'
+
+def init_deployment(dir_name):
+ '''
+ Init the deployment at `dir_name` and return which directory we should
+ write into for the next deployment.
+ '''
+ os.makedirs(os.path.join(dir_name, FIRST), exist_ok=True)
+ os.makedirs(os.path.join(dir_name, SECOND), exist_ok=True)
+ link_file = os.path.join(dir_name, CURRENT)
+ try:
+ current = os.readlink(link_file)
+ except FileNotFoundError as e:
+ os.symlink(SECOND, link_file, target_is_directory=True)
+ current = SECOND
+ return FIRST if current == SECOND else SECOND
+
+def switch_symlink(dir_name, target):
+ subprocess.run([
+ 'ln',
+ '-sfvn',
+ '--',
+ target,
+ os.path.join(dir_name, CURRENT),
+ ], check=True)
+
+def empty_directory(dir_name):
+ shutil.rmtree(dir_name)
+ os.makedirs(dir_name, exist_ok=True)
+
+class TarPagesFilter:
+ public_dir = 'public'
+ def __init__(
+ self,
+ limit_bytes=100*1024*1024 # 100 MiB
+ ):
+ self.total_bytes_extracted = 0
+ self.limit_bytes = limit_bytes
+
+ def __call__(self, member, path):
+ filtered_member = tarfile.data_filter(member, path)
+ if not filtered_member:
+ return None
+ if self.total_bytes_extracted + filtered_member.size > self.limit_bytes:
+ self.total_bytes_extracted = self.limit_bytes + 1
+ raise RuntimeError('Limit exceeded')
+ name = os.path.normpath(filtered_member.name)
+ if not (name == self.public_dir or name.startswith(self.public_dir + '/')):
+ return None
+
+ self.total_bytes_extracted += filtered_member.size
+ return filtered_member
+
+def extract_archive(dir_name, archive_file):
+ with tarfile.open(archive_file) as tf:
+ tf.errorlevel = 1
+ tf.extractall(dir_name, filter=TarPagesFilter())
+
+def deploy_pages(dir_name, archive_file):
+ target = init_deployment(dir_name)
+ full_target = os.path.join(dir_name, target)
+ empty_directory(full_target)
+ extract_archive(full_target, archive_file)
+ switch_symlink(dir_name, target)
+
+if __name__ == '__main__':
+ deploy_pages(sys.argv[1], sys.argv[2])
diff --git a/lilybuild/lilybuild/tests/ci_syntax/ci_file_test.py b/lilybuild/lilybuild/tests/ci_syntax/ci_file_test.py
--- a/lilybuild/lilybuild/tests/ci_syntax/ci_file_test.py
+++ b/lilybuild/lilybuild/tests/ci_syntax/ci_file_test.py
@@ -51,6 +51,12 @@
self.assertEqual(r.stages, ['test'])
self.assertEqual(list(r.jobs), ['pages'])
self.assertEqual(r.jobs['pages'].script, ['make docs'])
+ self.assertTrue(r.jobs['pages'].is_pages())
+
+ def test_pages_attr(self):
+ r = CIFile(get_res('pages_attr'))
+ self.assertFalse(r.jobs['not-pages'].is_pages())
+ self.assertTrue(r.jobs['is-pages'].is_pages())
def test_dotjobs(self):
r = CIFile(get_res('dotjobs'))
diff --git a/lilybuild/lilybuild/tests/ci_syntax/res/pages_attr.yaml b/lilybuild/lilybuild/tests/ci_syntax/res/pages_attr.yaml
new file mode 100644
--- /dev/null
+++ b/lilybuild/lilybuild/tests/ci_syntax/res/pages_attr.yaml
@@ -0,0 +1,12 @@
+
+stages:
+ - test
+
+not-pages:
+ image: node
+ script: make docs
+
+is-pages:
+ image: node
+ script: make docs
+ pages: true
diff --git a/lilybuild/lilybuild/tests/pages_test.py b/lilybuild/lilybuild/tests/pages_test.py
new file mode 100644
--- /dev/null
+++ b/lilybuild/lilybuild/tests/pages_test.py
@@ -0,0 +1,128 @@
+
+import unittest
+import tempfile
+import os
+import stat
+import subprocess
+from lilybuild.pages import (
+ FIRST, SECOND, CURRENT, init_deployment,
+ switch_symlink, extract_archive, deploy_pages,
+)
+
+try:
+ import tarfile
+ tarfile.FilterError
+except AttributeError:
+ import backports.tarfile as tarfile
+
+
+def is_dir(filename):
+ return stat.S_ISDIR(os.lstat(filename).st_mode)
+
+def make_artifact_archive(root_dir):
+ os.makedirs(os.path.join(root_dir, 'public'))
+ with open(os.path.join(root_dir, 'public', 'a'), 'w') as f:
+ print('test', file=f)
+ os.makedirs(os.path.join(root_dir, 'other'))
+ with open(os.path.join(root_dir, 'other', 'a'), 'w') as f:
+ print('should not be there', file=f)
+ archive = os.path.join(root_dir, 'artifacts.tar')
+ with tarfile.open(archive, 'w') as f:
+ f.add(os.path.join(root_dir, 'public'), 'public')
+ f.add(os.path.join(root_dir, 'other'), 'other')
+ return archive
+
+def make_artifact_archive2(root_dir):
+ os.makedirs(os.path.join(root_dir, 'public'))
+ with open(os.path.join(root_dir, 'public', 'b'), 'w') as f:
+ print('test b', file=f)
+ archive = os.path.join(root_dir, 'artifacts.tar')
+ with tarfile.open(archive, 'w') as f:
+ f.add(os.path.join(root_dir, 'public'), 'public')
+ return archive
+
+def make_bad_artifact_archive(root_dir):
+ os.makedirs(os.path.join(root_dir, 'public'))
+ with open(os.path.join(root_dir, 'public', 'a'), 'w') as f:
+ print('test', file=f)
+ os.makedirs(os.path.join(root_dir, 'other'))
+ with open(os.path.join(root_dir, 'other', 'a'), 'w') as f:
+ print('should not be there', file=f)
+ archive = os.path.join(root_dir, 'artifacts.tar')
+ with tarfile.open(archive, 'w') as f:
+ f.add(os.path.join(root_dir, 'public'), 'public')
+ f.add(os.path.join(root_dir, 'other'), '../../../other')
+ return archive
+
+class PagesTest(unittest.TestCase):
+ def test_init_deployment(self):
+ with tempfile.TemporaryDirectory() as dir_name:
+ res = init_deployment(dir_name)
+ self.assertEqual(res, FIRST)
+ self.assertTrue(is_dir(os.path.join(dir_name, FIRST)))
+ self.assertTrue(is_dir(os.path.join(dir_name, SECOND)))
+ self.assertEqual(os.readlink(os.path.join(dir_name, CURRENT)), SECOND)
+
+ res = init_deployment(dir_name)
+ self.assertEqual(res, FIRST)
+ self.assertEqual(os.readlink(os.path.join(dir_name, CURRENT)), SECOND)
+
+ def test_switch_symlink(self):
+ with tempfile.TemporaryDirectory() as dir_name:
+ res = init_deployment(dir_name)
+ switch_symlink(dir_name, res)
+ self.assertEqual(os.readlink(os.path.join(dir_name, CURRENT)), FIRST)
+
+ def test_extract_archive(self):
+ with tempfile.TemporaryDirectory() as root_dir:
+ archive_file = make_artifact_archive(root_dir)
+ dir_name = os.path.join(root_dir, 'deployment')
+ extract_archive(dir_name, archive_file)
+ self.assertTrue(is_dir(os.path.join(dir_name, 'public')))
+ self.assertTrue(os.path.exists(os.path.join(dir_name, 'public', 'a')))
+ self.assertFalse(os.path.exists(os.path.join(dir_name, 'other')))
+
+ def test_extract_bad_archive(self):
+ with tempfile.TemporaryDirectory() as root_dir:
+ archive_file = make_bad_artifact_archive(root_dir)
+ dir_name = os.path.join(root_dir, 'deployment')
+ with self.assertRaises(tarfile.FilterError):
+ extract_archive(dir_name, archive_file)
+
+ def test_deploy_pages(self):
+ with tempfile.TemporaryDirectory() as root_dir:
+ archive_file = make_artifact_archive(os.path.join(root_dir, 'a1'))
+ bad_archive = make_bad_artifact_archive(os.path.join(root_dir, 'bad'))
+ archive_file2 = make_artifact_archive2(os.path.join(root_dir, 'a2'))
+ dir_name = os.path.join(root_dir, 'deployment')
+ deploy_pages(dir_name, archive_file)
+ self.assertEqual(os.readlink(os.path.join(dir_name, CURRENT)), FIRST)
+ self.assertTrue(os.path.exists(os.path.join(dir_name, FIRST, 'public', 'a')))
+ with open(os.path.join(dir_name, FIRST, 'public', 'a')) as f:
+ self.assertEqual(f.read(), 'test\n')
+
+ # deploy the same thing again, it should go to SECOND
+ deploy_pages(dir_name, archive_file)
+ self.assertEqual(os.readlink(os.path.join(dir_name, CURRENT)), SECOND)
+ self.assertTrue(os.path.exists(os.path.join(dir_name, SECOND, 'public', 'a')))
+ with open(os.path.join(dir_name, SECOND, 'public', 'a')) as f:
+ self.assertEqual(f.read(), 'test\n')
+
+ # try to deploy the bad archive, it should not update the link
+ with self.assertRaises(tarfile.FilterError):
+ deploy_pages(dir_name, bad_archive)
+ self.assertEqual(os.readlink(os.path.join(dir_name, CURRENT)), SECOND)
+
+ # deploy something else, verify it cleans up all old files
+ deploy_pages(dir_name, archive_file2)
+ self.assertEqual(os.readlink(os.path.join(dir_name, CURRENT)), FIRST)
+ self.assertTrue(os.path.exists(os.path.join(dir_name, FIRST, 'public', 'b')))
+ self.assertFalse(os.path.exists(os.path.join(dir_name, FIRST, 'public', 'a')))
+ self.assertTrue(os.path.exists(os.path.join(dir_name, SECOND, 'public', 'a')))
+ self.assertFalse(os.path.exists(os.path.join(dir_name, SECOND, 'public', 'b')))
+ with open(os.path.join(dir_name, FIRST, 'public', 'b')) as f:
+ self.assertEqual(f.read(), 'test b\n')
+
+
+if __name__ == '__main__':
+ unittest.main()

File Metadata

Mime Type
text/plain
Expires
Mon, Jun 23, 5:08 AM (6 h, 49 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
234682
Default Alt Text
D213.1750680491.diff (12 KB)

Event Timeline