Page MenuHomePhorge

D242.1760328125.diff
No OneTemporary

Size
14 KB
Referenced Files
None
Subscribers
None

D242.1760328125.diff

diff --git a/Containerfile.worker b/Containerfile.worker
--- a/Containerfile.worker
+++ b/Containerfile.worker
@@ -21,5 +21,6 @@
USER buildbot
WORKDIR /buildbot
+ENV PATH="/buildbot_venv/bin:/tools/arcanist/bin:$PATH"
CMD ["/usr/bin/dumb-init", "/buildbot_venv/bin/twistd", "--pidfile=", "-ny", "buildbot.tac"]
diff --git a/lilybuild/lilybuild/ci_steps.py b/lilybuild/lilybuild/ci_steps.py
--- a/lilybuild/lilybuild/ci_steps.py
+++ b/lilybuild/lilybuild/ci_steps.py
@@ -5,15 +5,20 @@
from twisted.internet import defer
from .ci_syntax import ci_file
from .helpers import rsync_rules_from_artifacts, get_job_script, normalize_image
+from .phorge import SendCoverageToPhorge
import re
import sys
import json
SAFETAR_EXEC = '/lilybuild/lilybuild/safetar.py'
+COVERAGE_EXEC = '/lilybuild/lilybuild/coverage.py'
def on_success(step):
return step.build.results == util.SUCCESS
+def on_always(_step):
+ return True
+
def fill_list(*args):
return list(args)
@@ -24,7 +29,10 @@
master_job_artifact_dir_pattern = '%(kw:st)s/repos/%(prop:lilybuild_repo_id)s/builds/%(prop:lilybuild_root_build_id)s/jobs/%(kw:job)s/artifacts'
artifact_file_name = 'artifacts.tar'
master_job_artifact_file_name_pattern = master_job_artifact_dir_pattern + '/' + artifact_file_name
+ reports_file_name = 'reports.tar'
+ master_reports_file_name_pattern = master_job_artifact_dir_pattern + '/' + reports_file_name
master_pages_dir_pattern = '%(kw:st)s/repos/%(prop:lilybuild_repo_id)s/pages'
+ phorge_coverage_file_name = 'coverage-phorge.json'
def __init__(
self,
@@ -86,6 +94,38 @@
self.addCompleteLog('exception', f'{e}')
return res
+ def get_upload_artifacts_jobs(self, short_name, artifact_name, base_dir, paths, exclude, master_pattern, job_index, doStepIf=on_success):
+ archive_artifact_step = steps.ShellCommand(
+ name=f'Archive artifacts: {short_name}',
+ command=[
+ SAFETAR_EXEC,
+ ],
+ initialStdin=json.dumps({
+ 'op': 'create',
+ 'archive_file': artifact_name,
+ 'base_dir': base_dir,
+ 'content': paths,
+ 'items_to_exclude': exclude,
+ }),
+ workdir=self.work_root_dir,
+ doStepIf=doStepIf,
+ )
+ masterdest = util.Interpolate(
+ master_pattern,
+ st=self.storage_dir,
+ job=job_index,
+ doStepIf=doStepIf,
+ )
+ upload_artifact_step = steps.FileUpload(
+ workersrc=artifact_name,
+ maxsize=self.artifact_max_size,
+ name=f'Upload artifacts: {short_name}',
+ masterdest=masterdest,
+ workdir=self.work_root_dir,
+ doStepIf=doStepIf,
+ )
+ return [archive_artifact_step, upload_artifact_step]
+
@defer.inlineCallbacks
def job_to_steps(self, job, job_index):
script_name = self.script_dir + '/run.sh'
@@ -164,36 +204,44 @@
steps_to_run = [script_step, chmod_step] + artifact_steps + [run_step, clean_script_step]
if 'paths' in job.artifacts:
- archive_artifact_step = steps.ShellCommand(
- name='Archive artifacts',
+ steps_to_run += self.get_upload_artifacts_jobs(
+ 'files',
+ self.artifact_file_name,
+ self.result_relative,
+ job.artifacts.get('paths', []),
+ job.artifacts.get('exclude', []),
+ self.master_job_artifact_file_name_pattern,
+ job_index
+ )
+ if job.has_supported_coverage_report():
+ steps_to_run += [steps.ShellCommand(
+ name='Process reports',
command=[
- SAFETAR_EXEC,
+ COVERAGE_EXEC,
],
initialStdin=json.dumps({
- 'op': 'create',
- 'archive_file': self.artifact_file_name,
- 'base_dir': self.result_relative,
- 'content': job.artifacts.get('paths', []),
- 'items_to_exclude': job.artifacts.get('exclude', []),
+ 'source_dir': self.src_relative,
+ 'result_dir': self.result_relative,
+ 'untrusted_coverage_file': job.artifacts['reports']['coverage_report']['path'],
+ 'output_dir': self.artifact_stage_relative,
}),
workdir=self.work_root_dir,
- doStepIf=on_success,
- )
- masterdest = util.Interpolate(
- self.master_job_artifact_file_name_pattern,
- st=self.storage_dir,
- job=job_index,
- doStepIf=on_success,
- )
- upload_artifact_step = steps.FileUpload(
- workersrc=self.artifact_file_name,
- maxsize=self.artifact_max_size,
- name='Upload artifacts',
- masterdest=masterdest,
+ doStepIf=on_always,
+ )] + self.get_upload_artifacts_jobs(
+ 'reports',
+ self.reports_file_name,
+ self.artifact_stage_relative,
+ ['*'],
+ [],
+ self.master_reports_file_name_pattern,
+ job_index,
+ doStepIf=on_always
+ ) + [SendCoverageToPhorge(
+ self.lbc,
+ self.artifact_stage_relative + '/' + self.phorge_coverage_file_name,
workdir=self.work_root_dir,
- doStepIf=on_success,
- )
- steps_to_run += [archive_artifact_step, upload_artifact_step]
+ )]
+
clean_stage_dir_again_step = steps.ShellCommand(
name='Clean stage, result and artifact',
command=[
@@ -201,6 +249,8 @@
'-rf',
self.result_relative,
self.artifact_file_name,
+ self.artifact_stage_relative,
+ self.reports_file_name,
],
workdir=self.work_root_dir,
alwaysRun=True,
diff --git a/lilybuild/lilybuild/ci_syntax/ci_file.py b/lilybuild/lilybuild/ci_syntax/ci_file.py
--- a/lilybuild/lilybuild/ci_syntax/ci_file.py
+++ b/lilybuild/lilybuild/ci_syntax/ci_file.py
@@ -130,6 +130,14 @@
def is_pages(self):
return self.name == 'pages' or self.struct_raw.get('pages', False)
+ def has_supported_coverage_report(self):
+ return (
+ 'reports' in self.artifacts
+ and self.artifacts['reports'].get('coverage_report')
+ and self.artifacts['reports']['coverage_report'].get('coverage_format') == 'cobertura'
+ and self.artifacts['reports']['coverage_report'].get('path')
+ )
+
OLD_TOPLEVEL_DEFAULTS = ['image', 'services', 'cache', 'before_script', 'after_script']
DEFAULT_STAGES = ['.pre', 'build', 'test', 'deploy', '.post']
DEFAULT_JOB_STAGE = 'test'
diff --git a/lilybuild/lilybuild/config.py b/lilybuild/lilybuild/config.py
--- a/lilybuild/lilybuild/config.py
+++ b/lilybuild/lilybuild/config.py
@@ -70,6 +70,9 @@
self.repos[repo_id]['phorge_token'],
)
+ def get_phorge_base_url_for_repo(self, repo_id):
+ return self.repos[repo_id]['phorge_base_url']
+
def add_lilybuild_builder(self):
factory = util.BuildFactory()
factory.addStep(self.create_source_step())
diff --git a/lilybuild/lilybuild/phorge.py b/lilybuild/lilybuild/phorge.py
--- a/lilybuild/lilybuild/phorge.py
+++ b/lilybuild/lilybuild/phorge.py
@@ -11,6 +11,8 @@
from twisted.internet import defer
from twisted.python import log
+SEND_COVERAGE_EXEC = '/lilybuild/lilybuild/send_coverage.py'
+
class PhorgeMixin(CompositeStepMixin, buildstep.ShellMixin):
'''
Should be inherited by a BuildStep.
@@ -56,21 +58,28 @@
return secret_dir
@defer.inlineCallbacks
- def make_arc_command(self, args, **kwargs):
+ def make_command(self, **kwargs):
cmd = yield self.makeRemoteShellCommand(
- command=[self.arc_command] + args,
env={'HOME': self.get_secret_dir()},
**kwargs
)
return cmd
@defer.inlineCallbacks
- def run_arc_with_secret(self, args, **kwargs):
+ def make_arc_command(self, args, **kwargs):
+ cmd = yield self.make_command(
+ command=[self.arc_command] + args,
+ **kwargs
+ )
+ return cmd
+
+ @defer.inlineCallbacks
+ def run_command_with_secret(self, **kwargs):
try:
res = yield self.download_arc_secret()
if res != util.SUCCESS:
return res
- cmd = yield self.make_arc_command(args, **kwargs)
+ cmd = yield self.make_command(**kwargs)
yield self.runCommand(cmd)
return cmd.results()
except Exception as e:
@@ -79,6 +88,14 @@
yield self.remove_arc_secret()
+ @defer.inlineCallbacks
+ def run_arc_with_secret(self, args, **kwargs):
+ res = yield self.run_command_with_secret(
+ command=[self.arc_command] + args,
+ **kwargs
+ )
+ return res
+
@defer.inlineCallbacks
def download_arc_secret(self):
arcrc = self.lbc.get_arcrc_for_repo(self.getProperty('lilybuild_repo_id'))
@@ -292,3 +309,38 @@
'harbormaster.sendmessage',
], initialStdin=encoded_data)
return res
+
+class SendCoverageToPhorge(PhorgeMixin, steps.BuildStep):
+ def __init__(self, lbc, coverage_file, **kwargs):
+ self.lbc = lbc
+ self.coverage_file = coverage_file
+ self.setup_phorge_mixin(kwargs)
+ super().__init__(
+ name='Send coverage to Phorge',
+ doStepIf=lambda step: step.get_is_phorge(),
+ alwaysRun=True,
+ **kwargs
+ )
+
+ @defer.inlineCallbacks
+ def run(self):
+ phorge_url = yield self.build.render(self.lbc.get_phorge_base_url_for_repo(self.getProperty('lilybuild_repo_id')))
+ build_url = yield self.build.getUrl()
+ filename = yield self.build.render(self.coverage_file)
+ receiver = self.getProperty(self.build_target_prop_name)
+ job_name = self.getProperty('lilybuild_job_prop').get('name')
+ is_success = self.build.results == util.SUCCESS
+
+ data = {
+ 'filename': filename,
+ 'build_url': build_url,
+ 'receiver': receiver,
+ 'is_success': is_success,
+ 'phorge_url': phorge_url,
+ 'job_name': job_name,
+ }
+ encoded_data = json.dumps(data)
+ res = yield self.run_command_with_secret(command=[
+ SEND_COVERAGE_EXEC,
+ ], initialStdin=encoded_data)
+ return res
diff --git a/lilybuild/lilybuild/send_coverage.py b/lilybuild/lilybuild/send_coverage.py
new file mode 100755
--- /dev/null
+++ b/lilybuild/lilybuild/send_coverage.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python3
+
+import sys
+import json
+import subprocess
+
+def main(args):
+ filename = args['filename']
+ build_url = args['build_url']
+ is_success = args['is_success']
+ receiver = args['receiver']
+ phorge_url = args['phorge_url']
+ job_name = args['job_name']
+
+ with open(filename) as f:
+ coverage = json.loads(f.read())
+
+ data = {
+ 'receiver': receiver,
+ 'type': 'work',
+ 'unit': [{
+ 'name': f'Coverage ({job_name})',
+ 'result': 'pass' if is_success else 'fail',
+ 'details': build_url,
+ 'format': 'remarkup',
+ 'coverage': coverage,
+ }],
+ }
+
+ subprocess.run([
+ 'arc',
+ '--config',
+ 'phabricator.uri=' + phorge_url,
+ 'call-conduit',
+ '--',
+ 'harbormaster.sendmessage',
+ ], check=True, input=json.dumps(data), encoding='utf-8')
+
+if __name__ == '__main__':
+ if len(sys.argv) > 1:
+ main(json.loads(sys.argv[1]))
+ else:
+ main(json.loads(sys.stdin.read()))
diff --git a/lilybuild/lilybuild/tests/ci_syntax/ci_file_test.py b/lilybuild/lilybuild/tests/ci_syntax/ci_file_test.py
--- a/lilybuild/lilybuild/tests/ci_syntax/ci_file_test.py
+++ b/lilybuild/lilybuild/tests/ci_syntax/ci_file_test.py
@@ -111,6 +111,13 @@
self.assertFalse(r.jobs['no_archive'].has_artifacts_archive())
self.assertFalse(r.jobs['no_artifacts'].has_artifacts_archive())
+ def test_artifacts_reports(self):
+ r = CIFile(get_res('artifacts_reports'))
+ self.assertFalse(r.jobs['has_archive'].has_supported_coverage_report())
+ self.assertFalse(r.jobs['empty_reports'].has_supported_coverage_report())
+ self.assertTrue(r.jobs['yes_reports'].has_supported_coverage_report())
+ self.assertFalse(r.jobs['no_supported_reports'].has_supported_coverage_report())
+
def test_extends(self):
r = CIFile(get_res('extends'))
self.assertEqual(r.jobs['build-a'].struct_raw, {
diff --git a/lilybuild/lilybuild/tests/ci_syntax/res/artifacts_reports.yaml b/lilybuild/lilybuild/tests/ci_syntax/res/artifacts_reports.yaml
new file mode 100644
--- /dev/null
+++ b/lilybuild/lilybuild/tests/ci_syntax/res/artifacts_reports.yaml
@@ -0,0 +1,30 @@
+
+has_archive:
+ script: echo
+ artifacts:
+ paths:
+ - a
+
+empty_reports:
+ script: echo
+ artifacts:
+ paths:
+ - a
+ reports:
+ coverage_report:
+
+yes_reports:
+ script: echo
+ artifacts:
+ reports:
+ coverage_report:
+ coverage_format: 'cobertura'
+ path: 'x.xml'
+
+no_supported_reports:
+ script: echo
+ artifacts:
+ reports:
+ coverage_report:
+ coverage_format: 'jacoco'
+ path: 'x.xml'
diff --git a/lilybuild/podman-helper b/lilybuild/podman-helper
--- a/lilybuild/podman-helper
+++ b/lilybuild/podman-helper
@@ -184,9 +184,10 @@
else:
psuccess('Job succeeded.')
- pinfo('Collecting build changes...')
- export_volume(alias, result_dir, work_vol_mount_dir)
- psuccess('Collected.')
+ # We should collect the result regardless whether it succeeded
+ pinfo('Collecting build changes...')
+ export_volume(alias, result_dir, work_vol_mount_dir)
+ psuccess('Collected.')
return retcode

File Metadata

Mime Type
text/plain
Expires
Sun, Oct 12, 9:02 PM (19 h, 11 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
512264
Default Alt Text
D242.1760328125.diff (14 KB)

Event Timeline