taskotron: prevent duplicate jobs in buildmaster config

There was one remaining case when duplicate jobs could crash the job -
when the tasks were not run simmultanously at all. I added another guard
at the final location of the artifacts, which should hopefully solve it.
This commit is contained in:
Kamil Páral 2019-04-15 15:27:36 +02:00
parent 09ae58534c
commit 111fc1805c

View file

@ -241,6 +241,15 @@ factory.addStep(steps.DirectoryUpload(
descriptionDone='Copy artifacts',
))
{% if deployment_type in ['local'] %}
# copy taskotron log to master
factory.addStep(steps.FileUpload(
workersrc='/var/log/taskotron/taskotron.log',
masterdest=util.Interpolate('{{ public_artifacts_dir }}/%(prop:uuid)s/taskotron.log'),
mode=0644,
))
{% endif %}
# save stdio from runtask step
# FIXME: worked with buildbot 0.8, later buildbots changed URLs
# factory.addStep(steps.MasterShellCommand(
@ -263,15 +272,6 @@ factory.addStep(steps.MasterShellCommand(
descriptionDone='Compress artifacs',
))
{% if deployment_type in ['local'] %}
# copy taskotron log to master
factory.addStep(steps.FileUpload(
workersrc='/var/log/taskotron/taskotron.log',
masterdest=util.Interpolate('{{ public_artifacts_dir }}/%(prop:uuid)s/taskotron.log'),
mode=0644,
))
{% endif %}
# render current time when needed
import datetime
from buildbot.process.properties import renderer
@ -280,6 +280,19 @@ from buildbot.process.properties import renderer
def today(props):
return datetime.datetime.now().strftime("%Y%m%d")
# prevent duplicated buildbot jobs
# (see https://pagure.io/taskotron/issue/273 )
factory.addStep(steps.MasterShellCommand(
command=util.Interpolate(
'test -d {{ public_artifacts_dir }}/%(kw:today)s/%(prop:uuid)s/ || '
'( echo Multiple jobs with same UUID detected, aborting execution!; '
' echo See https://pagure.io/taskotron/issue/273 ; '
' exit 1 )'
),
descriptionDone='Check duplicate jobs',
haltOnFailure=True,
))
# move the artifacts to the correct dir on the master
factory.addStep(steps.MasterShellCommand(
command=util.Interpolate(