ansible/roles/bodhi2/backend/files/new-updates-sync
Patrick Uiterwijk 50cbc6e1f9 Sync out f27m u-t
Signed-off-by: Patrick Uiterwijk <puiterwijk@redhat.com>
2017-10-31 17:55:57 +00:00

302 lines
11 KiB
Python
Executable file

#!/bin/python2
import copy
import fedmsg
import functools
import logging
import subprocess
import os
import sys
logging.basicConfig(level=logging.ERROR)
logger = logging.getLogger('updates-sync')
SOURCE = '/mnt/koji/compose/updates/'
FEDORADEST = '/pub/fedora/linux/updates/'
FEDORAALTDEST = '/pub/fedora-secondary/updates/'
EPELDEST = '/pub/epel/'
ATOMICSOURCE = '/mnt/koji/compose/updates/atomic/'
ATOMICDEST = '/mnt/koji/atomic/'
RELEASES = {'f27': {'topic': 'fedora',
'version': '27',
'modules': ['fedora', 'fedora-secondary'],
'repos': {'updates-testing': {
'from': 'f27-updates-testing',
'to': [{'arches': ['x86_64', 'armhfp', 'source'],
'dest': os.path.join(FEDORADEST, 'testing', '27')},
{'arches': ['aarch64', 'i386', 'ppc64', 'ppc64le', 's390x'],
'dest': os.path.join(FEDORAALTDEST, 'testing', '27')}
]}}
},
'f25': {'topic': 'fedora',
'version': '25',
'modules': ['fedora'],
'repos': {'updates': {
'from': 'f25-updates',
'to': [{'arches': ['x86_64', 'armhfp', 'i386', 'source'],
'dest': os.path.join(FEDORADEST, '25')},
]},
'updates-testing': {
'from': 'f25-updates-testing',
'to': [{'arches': ['x86_64', 'armhfp', 'i386', 'source'],
'dest': os.path.join(FEDORADEST, 'testing', '25')},
]}}
},
'f26': {'topic': 'fedora',
'version': '26',
'modules': ['fedora', 'fedora-secondary'],
'repos': {'updates': {
'from': 'f26-updates',
'ostree': {'ref': 'fedora/26/x86_64/updates/atomic-host',
'dest': os.path.join(ATOMICDEST, '26')},
'to': [{'arches': ['x86_64', 'armhfp', 'source'],
'dest': os.path.join(FEDORADEST, '26')},
{'arches': ['aarch64', 'i386', 'ppc64', 'ppc64le'],
'dest': os.path.join(FEDORAALTDEST, '26')}
]},
'updates-testing': {
'from': 'f26-updates-testing',
'ostree': {'ref': 'fedora/26/x86_64/testing/atomic-host',
'dest': os.path.join(ATOMICDEST, '26')},
'to': [{'arches': ['x86_64', 'armhfp', 'source'],
'dest': os.path.join(FEDORADEST, 'testing', '26')},
{'arches': ['aarch64', 'i386', 'ppc64', 'ppc64le'],
'dest': os.path.join(FEDORAALTDEST, 'testing', '26')}
]}}
},
'epel7': {'topic': 'epel',
'version': '7',
'modules': ['epel'],
'repos': {'epel-testing': {
'from': 'epel7-testing',
'to': [{'arches': ['x86_64', 'aarch64', 'ppc64', 'ppc64le', 'source'],
'dest': os.path.join(EPELDEST, 'testing', '7')}
]},
'epel': {
'from': 'epel7',
'to': [{'arches': ['x86_64', 'aarch64', 'ppc64', 'ppc64le', 'source'],
'dest': os.path.join(EPELDEST, '7')}
]}}
},
'epel6': {'topic': 'epel',
'version': '6',
'modules': ['epel'],
'repos': {'epel': {
'from': 'dist-6E-epel',
'to': [{'arches': ['x86_64', 'ppc64', 'i386', 'source'],
'dest': os.path.join(EPELDEST, '6')}
]},
'epel-testing': {
'from': 'dist-6E-epel-testing',
'to': [{'arches': ['x86_64', 'ppc64', 'i386', 'source'],
'dest': os.path.join(EPELDEST, 'testing', '6')}
]},
}
},
'f27m': {'topic': 'fedora',
'version': '27-modular',
'modules': ['fedora'],
'repos': {'updates-testing': {
'from': 'f27-modular-updates-testing',
'to': [{'arches': ['aarch64', 'armhfp', 'i386', 'ppc64', 'ppc64le',
's390x', 'source'],
'dest': os.path.join(FEDORAMODDEST, 'testing', '27', 'Server')},
]}}
},
}
# Beneath this is code, no config needed here
FEDMSG_INITED = False
def run_command(cmd):
logger.info('Running %s', cmd)
return subprocess.check_output(cmd,
stderr=subprocess.STDOUT,
shell=False)
def get_ostree_ref(repo, ref):
reffile = os.path.join(repo, 'refs', 'heads', ref)
if not os.path.exists(reffile):
return '----'
with open(reffile, 'r') as f:
return f.read().split()[0]
def sync_ostree(dst, ref):
src_commit = get_ostree_ref(ATOMICSOURCE, ref)
dst_commit = get_ostree_ref(dst, ref)
if src_commit == dst_commit:
logger.info('OSTree at %s, ref %s in sync', dst, ref)
else:
logger.info('Syncing OSTree to %s, ref %s: %s -> %s',
dst, ref, src_commit, dst_commit)
cmd = ['ostree', 'pull-local', '--verbose', '--repo',
dst, ATOMICSOURCE, ref]
out = run_command(cmd)
cmd = ['ostree', 'summary', '--verbose', '--repo', dst, '--update']
run_command(cmd)
def update_fullfilelist(modules):
if not modules:
logger.info('No filelists to update')
return
cmd = ['/usr/local/bin/update-fullfiletimelist', '-l',
'/tmp/update-fullfiletimelist.lock', '-t', '/pub']
cmd.extend(modules)
run_command(cmd)
def rsync(from_path, to_path, excludes=[], link_dest=None, delete=False):
cmd = ['rsync', '-rlptDvHh', '--stats', '--no-human-readable']
for excl in excludes:
cmd += ['--exclude', excl]
if link_dest:
cmd += ['--link-dest', link_dest]
if delete:
cmd += ['--delete', '--delete-delay']
cmd += [from_path, to_path]
stdout = run_command(cmd)
results = {'num_bytes': 0,
'num_deleted': 0}
for line in stdout.split('\n'):
if 'Literal data' in line:
results['num_bytes'] = int(line.split()[2])
elif 'deleting ' in line:
results['num_deleted'] += 1
return results
def collect_stats(stats):
to_collect = ['num_bytes', 'num_deleted']
totals = {}
for stat in to_collect:
totals[stat] = functools.reduce(lambda x, y: x + y,
[r[stat] for r in stats])
return totals
def to_human(num_bytes):
ranges = ['B', 'KB', 'MB', 'GB', 'TB']
cur_range = 0
while num_bytes >= 1024 and cur_range+1 <= len(ranges):
num_bytes = num_bytes / 1024
cur_range += 1
return '%s %s' % (num_bytes, ranges[cur_range])
def sync_single_repo_arch(release, repo, arch, dest_path):
source_path = os.path.join(SOURCE,
RELEASES[release]['repos'][repo]['from'],
'compose', 'Everything', arch)
maindir = 'tree' if arch == 'source' else 'os'
results = []
results.append(rsync(os.path.join(source_path, maindir, 'Packages'),
os.path.join(dest_path)))
if arch != 'source':
results.append(rsync(os.path.join(source_path, 'debug', 'tree', 'Packages'),
os.path.join(dest_path, 'debug')))
results.append(rsync(os.path.join(source_path, 'debug', 'tree', 'repodata'),
os.path.join(dest_path, 'debug', 'repodata'),
delete=True))
results.append(rsync(os.path.join(source_path, 'debug', 'tree', 'Packages'),
os.path.join(dest_path, 'debug'),
delete=True))
results.append(rsync(os.path.join(source_path, maindir, 'repodata'),
os.path.join(dest_path),
delete=True))
results.append(rsync(os.path.join(source_path, maindir, 'Packages'),
os.path.join(dest_path),
delete=True))
return collect_stats(results)
def sync_single_repo(release, repo):
global FEDMSG_INITED
results = []
for archdef in RELEASES[release]['repos'][repo]['to']:
for arch in archdef['arches']:
destarch = 'SRPMS' if arch == 'source' else arch
dest_path = os.path.join(archdef['dest'], destarch)
results.append(sync_single_repo_arch(release, repo, arch, dest_path))
stats = collect_stats(results)
fedmsg_msg = {'repo': repo,
'release': RELEASES[release]['version'],
'bytes': to_human(stats['num_bytes']),
'raw_bytes': str(stats['num_bytes']),
'deleted': str(stats['num_deleted'])}
if not FEDMSG_INITED:
fedmsg.init(active=True, name='relay_inbound', cert_prefix='ftpsync')
FEDMSG_INITED = True
fedmsg.publish(topic='updates.%s.sync' % RELEASES[release]['topic'],
modname='bodhi',
msg=fedmsg_msg)
def determine_last_link(release, repo):
source_path = os.path.join(SOURCE,
RELEASES[release]['repos'][repo]['from'])
target = os.readlink(source_path)
logger.info('Release %s, repo %s, target %s', release, repo, target)
RELEASES[release]['repos'][repo]['from'] = target
return target
def sync_single_release(release):
needssync = False
for repo in RELEASES[release]['repos']:
target = determine_last_link(release, repo)
curstatefile = os.path.join(
RELEASES[release]['repos'][repo]['to'][0]['dest'], 'state')
curstate = None
if os.path.exists(curstatefile):
with open(curstatefile, 'r') as f:
curstate = f.read().split()[0]
if curstate and curstate == target:
logger.info('This repo has already been synced')
else:
sync_single_repo(release, repo)
with open(curstatefile, 'w') as f:
f.write(target)
needssync = True
if 'ostree' in RELEASES[release]['repos'][repo]:
ostree = RELEASES[release]['repos'][repo]['ostree']
sync_ostree(ostree['dest'], ostree['ref'])
return needssync
def main():
to_update = []
for release in sys.argv[1:] or RELEASES:
if sync_single_release(release):
to_update.extend(RELEASES[release]['modules'])
to_update = list(set(to_update))
logger.info('Filelists to update: %s', to_update)
update_fullfilelist(to_update)
if __name__ == '__main__':
main()