Restructure the code

- Add an --env variable to change the environment used via the CLI
- Drop the ProductCache object
- Introduce the DistgitBugzillaSync object which allows dropping global
  variables to rely on attributes instead.
- Drop the pkgdb compatibility layer that was once introduced, pkgdb is
  no longer a thing and it's time to move on

This changes a little bit the order in which projects are updated, it
used to be by products first (Fedora, Fedora EPEL...) then package,
now it's the opposite: by package first then product.

Signed-off-by: Pierre-Yves Chibon <pingou@pingoured.fr>
This commit is contained in:
Pierre-Yves Chibon 2019-11-20 17:15:36 +01:00
parent fd4c3b0714
commit 94d17b2c05
2 changed files with 371 additions and 380 deletions

View file

@ -34,7 +34,7 @@ import datetime
from email.message import EmailMessage
import itertools
import json
import multiprocessing.pool
from operator import itemgetter
import os
import re
import smtplib
@ -77,10 +77,6 @@ def retry_session():
return session
VERBOSE = False
DRYRUN = False
def resilient_partial(fn, *initial, **kwargs):
""" A decorator that partially applies arguments.
@ -114,55 +110,9 @@ def segment(iterable, chunk, fill=None):
return itertools.zip_longest(*args, fillvalue=fill)
class ProductCache(dict):
def __init__(self, bz, acls):
self.bz = bz
self.acls = acls
# Ask bugzilla for a section of the pkglist.
# Save the information from the section that we want.
def __getitem__(self, key):
try:
return super(ProductCache, self).__getitem__(key)
except KeyError:
# We can only cache products we have pagure information for
if key not in self.acls:
raise
if env['bugzilla']['compat_api'] == 'getcomponentsdetails':
# Old API -- in python-bugzilla. But with current server, this
# gives ProxyError
products = self.bz.getcomponentsdetails(key)
elif env['bugzilla']['compat_api'] == 'component.get':
# Way that's undocumented in the partner-bugzilla api but works
# currently
pkglist = list(projects_dict[key])
products = {}
for pkg_segment in segment(pkglist, env['bugzilla']['req_segment']):
# Format that bugzilla will understand. Strip None's that
# segment() pads out the final data segment() with
query = [
dict(product=env['products'][key], component=p)
for p in pkg_segment if p is not None
]
raw_data = self.bz._proxy.Component.get(dict(names=query))
for package in raw_data['components']:
# Reformat data to be the same as what's returned from
# getcomponentsdetails
product = dict(
initialowner=package['default_assignee'],
description=package['description'],
initialqacontact=package['default_qa_contact'],
initialcclist=package['default_cc'])
products[package['name'].lower()] = product
self[key] = products
return super(ProductCache, self).__getitem__(key)
class BugzillaProxy:
def __init__(self, bzServer, username, password, acls):
def __init__(self, bzServer, username, password, config):
self.bzXmlRpcServer = bzServer
self.username = username
self.password = password
@ -171,13 +121,15 @@ class BugzillaProxy:
url=self.bzXmlRpcServer,
user=self.username,
password=self.password)
self.productCache = ProductCache(self.server, acls)
self.productCache = {}
# Connect to the fedora account system
self.fas = AccountSystem(
base_url=env['fas']['url'],
username=env['fas']['username'],
password=env['fas']['password'])
base_url=config['fas']['url'],
username=config['fas']['username'],
password=config['fas']['password'])
self.config = config
try:
self.userCache = self.fas.people_by_key(
@ -188,6 +140,53 @@ class BugzillaProxy:
# It's ok, we build the cache as-needed later in the script.
self.userCache = {}
def build_product_cache(self, pagure_project):
""" Cache the bugzilla info about each package in each product.
"""
products = {}
if self.config['bugzilla']['compat_api'] == 'getcomponentsdetails':
# Old API -- in python-bugzilla. But with current server, this
# gives ProxyError
for collection, product in self.config["products"].items():
self.productCache[collection] = self.server.getcomponentsdetails(product)
elif self.config['bugzilla']['compat_api'] == 'component.get':
# Way that's undocumented in the partner-bugzilla api but works
# currently
products = {}
for collection, product in self.config["products"].items():
# restrict the list of info returned to only the packages of
# interest
pkglist = [
project["name"]
for project in pagure_project
if product in project["products"]
]
for pkg_segment in segment(pkglist, self.config['bugzilla']['req_segment']):
# Format that bugzilla will understand. Strip None's that
# segment() pads out the final data segment() with
query = [
dict(
product=self.config['products'][collection],
component=p
)
for p in pkg_segment
if p is not None
]
raw_data = self.server._proxy.Component.get(dict(names=query))
for package in raw_data['components']:
# Reformat data to be the same as what's returned from
# getcomponentsdetails
product = dict(
initialowner=package['default_assignee'],
description=package['description'],
initialqacontact=package['default_qa_contact'],
initialcclist=package['default_cc']
)
products[package['name'].lower()] = product
self.productCache[collection] = products
def _get_bugzilla_email(self, username):
'''Return the bugzilla email address for a user.
@ -283,10 +282,10 @@ class BugzillaProxy:
if data:
# Changes occurred. Submit a request to change via xmlrpc
data['product'] = env['products'][collection]
data['product'] = self.config['products'][collection]
data['component'] = package
if VERBOSE:
if self.config["verbose"]:
print('[EDITCOMP] %s/%s' % (data["product"], data["component"]))
for key in ["initialowner", "description", "initialqacontact", "initialcclist"]:
if data.get(key):
@ -296,7 +295,7 @@ class BugzillaProxy:
# FIXME: initialowner has been made mandatory for some
# reason. Asking dkl why.
data['initialowner'] = owner
if not DRYRUN:
if not self.config["dryrun"]:
try:
self.server.editcomponent(data)
except xmlrpc.client.Fault as e:
@ -314,7 +313,7 @@ class BugzillaProxy:
qacontact = 'extras-qa@fedoraproject.org'
data = {
'product': env['products'][collection],
'product': self.config['products'][collection],
'component': package,
'description': description or 'NA',
'initialowner': owner,
@ -323,12 +322,12 @@ class BugzillaProxy:
if initialCCList:
data['initialcclist'] = initialCCList
if VERBOSE:
if self.config["verbose"]:
print('[ADDCOMP] %s/%s' % (data["product"], data["component"]))
for key in ["initialowner", "description", "initialqacontact", "initialcclist"]:
if data.get(key):
print(f" {key} set to {data.get(key)}")
if not DRYRUN:
if not self.config["dryrun"]:
try:
self.server.addcomponent(data)
except xmlrpc.client.Fault as e:
@ -359,71 +358,13 @@ def send_email(fromAddress, toAddress, subject, message, ccAddress=None):
smtp.quit()
def notify_users(errors):
''' Browse the list of errors and when we can retrieve the email
address, use it to notify the user about the issue.
'''
data = {}
if os.path.exists(env['data_cache']):
try:
with open(env['data_cache']) as stream:
data = json.load(stream)
except Exception as err:
print('Could not read the json file at %s: \nError: %s' % (
env['data_cache'], err))
new_data = {}
seen = []
for error in errors:
notify_user = False
if 'The name ' in error and ' is not a valid username' in error:
user_email = error.split(' is not a valid username')[0].split(
'The name ')[1].strip()
now = datetime.datetime.utcnow()
# See if we already know about this user
if user_email in data and data[user_email]['last_update']:
last_update = datetime.datetime.fromtimestamp(
int(data[user_email]['last_update']))
# Only notify users once per hour
if (now - last_update).seconds >= 3600:
notify_user = True
else:
new_data[user_email] = data[user_email]
elif not data or user_email not in data:
notify_user = True
# Ensure we notify the user only once, no matter how many errors we
# got concerning them.
if user_email not in seen:
seen.append(user_email)
else:
notify_user = False
if notify_user:
send_email(
env['email_from'],
[user_email],
subject='Please fix your bugzilla.redhat.com account',
message=env['tmpl_user_email'],
ccAddress=env['notify_emails'],
)
new_data[user_email] = {
'last_update': time.mktime(now.timetuple())
}
with open(env['data_cache'], 'w') as stream:
json.dump(new_data, stream)
@cache.cache_on_arguments()
def _get_override_yaml(project, session):
pagure_override_url = '{0}/{1}/raw/master/f/{2}/{3}'.format(
env['pagure_url'].rstrip('/'), env['bugzilla']['override_repo'], project['namespace'],
project['name'])
if VERBOSE:
if config["verbose"]:
print('Querying {0}'.format(pagure_override_url))
override_rv = session.get(pagure_override_url, timeout=30)
if override_rv.status_code == 200:
@ -443,7 +384,7 @@ def _get_pdc_branches(session, repo):
global_component=repo['name'],
type=env['pdc_types'][repo['namespace']]
)
if VERBOSE:
if config["verbose"]:
print('Querying {0} {1}'.format(branches_url, params))
rv = session.get(branches_url, params=params, timeout=60)
@ -475,272 +416,322 @@ def _is_retired(product, project):
return True
def _to_legacy_schema(product_and_project_and_summary, session=None):
"""
This function translates the JSON of a Pagure project to what PkgDB used to
output in the Bugzilla API. This function is used for mapping.
:param project_and_product: a tuple containing the dictionary of the JSON
of a Pagure project and a string of the product (e.g. "Fedora",
"Fedora EPEL")
:param session: a requests session object or None
:return: a dictionary of the content that the PkgDB Bugzilla API would
return
"""
product, project, rpm_summary = product_and_project_and_summary
class DistgitBugzillaSync:
if session is None:
session = retry_session()
owner = project['poc']
watchers = project['watchers']
summary = None
if project["namespace"] == "rpms":
summary = rpm_summary.get(project["name"])
# Check if the project is retired in PDC, and if so set assignee to orphan.
if _is_retired(product, project):
owner = 'orphan'
# Check if the Bugzilla ticket assignee has been manually overridden
override_yaml = _get_override_yaml(project, session)
if override_yaml.get(product) \
and isinstance(override_yaml[product], str):
owner = override_yaml[product]
return {
'cclist': {
# Groups is empty because you can't have groups watch projects.
# This is done only at the user level.
'groups': [],
'people': watchers,
},
'owner': owner,
# No package has this set in PkgDB's API, so it can be safely turned
# off and set to the defaults later on in the code
'qacontact': None,
'summary': summary,
# These two values are not part of original PkgDB RV, but they are
# useful
'product': product,
'project': project['name']
}
def main():
"""The entrypoint to the script."""
global envname, env, VERBOSE, DRYRUN, projects_dict
times = {
"start": time.time(),
}
parser = argparse.ArgumentParser(
description='Script syncing information between Pagure and bugzilla'
)
parser.add_argument(
'--dry-run', dest='dryrun', action='store_true', default=False,
help='Do not actually make the changes')
parser.add_argument(
'--verbose', dest='verbose', action='store_true', default=False,
help='Print actions verbosely')
parser.add_argument(
'--debug', dest='debug', action='store_true', default=False,
help='Combination of --verbose and --dry-run')
args = parser.parse_args()
load_configuration()
envname = config['environment']
env = config['environments'][envname]
if args.debug:
VERBOSE = True
DRYRUN = True
if args.verbose:
VERBOSE = True
if args.dryrun:
DRYRUN = True
# Non-fatal errors to alert people about
errors = []
projects_dict = {
'Fedora': {},
'Fedora Container': {},
'Fedora Modules': {},
'Fedora EPEL': {},
}
session = retry_session()
pool = multiprocessing.pool.ThreadPool(8)
# Get the initial ownership and CC data from pagure
# This part is easy.
poc_url = env['distgit_url'] + '/extras/pagure_poc.json'
if VERBOSE:
print("Querying %r for points of contact." % poc_url)
pagure_namespace_to_poc = session.get(poc_url, timeout=120).json()
cc_url = env['distgit_url'] + '/extras/pagure_bz.json'
if VERBOSE:
print("Querying %r for initial cc list." % cc_url)
pagure_namespace_to_cc = session.get(cc_url, timeout=120).json()
if VERBOSE:
print("Building a cache of the rpm packages' summary")
rpm_summary = package_summary.get_package_summary()
# Combine and collapse those two into a single list:
pagure_projects = []
for namespace, entries in pagure_namespace_to_poc.items():
for name, poc in entries.items():
pagure_projects.append(dict(
namespace=namespace,
name=name,
poc=poc,
watchers=pagure_namespace_to_cc[namespace][name],
))
if env == 'staging':
# Filter out any modules, since we don't have rights to create new
# components in the "Fedora Modules" project yet
pagure_projects = [
p for p in pagure_projects if p['namespace'] != 'modules'
]
branches_url = env['pdc_url'].split('rest_api')[0] + 'extras/active_branches.json'
if VERBOSE:
print("Querying %r for EOL information." % branches_url)
pdc_branches = session.get(branches_url, timeout=120).json()
for proj in pagure_projects:
if proj['namespace'] not in env['pdc_types']:
proj['branches'] = []
if VERBOSE:
print('! Namespace {namespace} unknown to PDC, project '
'{namespace}/{name} ignored'.format(**proj))
continue
pdc_type = env['pdc_types'][proj['namespace']]
proj['branches'] = pdc_branches.get(pdc_type, {}).get(proj['name'], [])
if not proj['branches'] and VERBOSE:
print("! No PDC branch found for {namespace}/{name}".format(**proj))
# Determine what products each project maps to based on its branches.
# pagure_rpms_project_products will be in the format of
# [('python-requests': 'Fedora')...] which will be used my a mapping
# function below
for project in pagure_projects:
products = set()
for branch, active in project['branches']:
if re.match(r'^epel\d+$', branch):
products.add('Fedora EPEL')
else:
products.add(env['namespace_to_product'][project['namespace']])
project['products'] = list(products)
# Now, we must transform the data we collected into something that PkgDB
# would have returned
p_to_legacy_schema = resilient_partial(_to_legacy_schema, session=session)
items = [
(product, project, rpm_summary)
for project in pagure_projects
for product in project['products']
]
legacy_responses = pool.map(p_to_legacy_schema, items)
for response in legacy_responses:
if not response:
continue
projects_dict[response['product']][response['project']] = response
if VERBOSE:
times["data structure end"] = time.time()
delta = times["data structure end"] - times["start"]
print("Ran for %s seconds -- ie: %.2f minutes" % (delta, delta/60.0))
print("Going to update bugzilla now (unless --dry-run)")
# Initialize the connection to bugzilla
bugzilla = BugzillaProxy(env['bugzilla']['url'],
env['bugzilla']['user'],
env['bugzilla']['password'],
projects_dict)
if VERBOSE:
times["FAS cache building end"] = time.time()
delta = times["FAS cache building end"] - times["data structure end"]
print(f"Ran for {delta} seconds -- ie: {delta/60} minutes")
print("bugzilla connection set!")
for product, pkgs in projects_dict.items():
if product not in env['products']:
if VERBOSE:
print(f"Ignoring: {product}")
continue
for pkgname, pkginfo in sorted(projects_dict[product].items(),
key=lambda x: x[0]):
def notify_users(self, errors):
''' Browse the list of errors and when we can retrieve the email
address, use it to notify the user about the issue.
'''
data = {}
if os.path.exists(self.env['data_cache']):
try:
bugzilla.add_edit_component(
pkgname,
product,
pkginfo['owner'],
pkginfo['summary'],
pkginfo['qacontact'],
pkginfo['cclist']
)
except ValueError as e:
# A username didn't have a bugzilla address
errors.append(str(e.args))
except DataChangedError as e:
# A Package or Collection was returned via xmlrpc but wasn't
# present when we tried to change it
errors.append(str(e.args))
except xmlrpc.client.ProtocolError as e:
# Unrecoverable and likely means that nothing is going to
# succeed.
errors.append(str(e.args))
break
except xmlrpc.client.Error as e:
# An error occurred in the xmlrpc call. Shouldn't happen but
# we better see what it is
errors.append('%s -- %s' % (pkgname, e.args[-1]))
with open(self.env['data_cache']) as stream:
data = json.load(stream)
except Exception as err:
print('Could not read the json file at %s: \nError: %s' % (
env['data_cache'], err))
new_data = {}
seen = []
for error in errors:
notify_user = False
if 'The name ' in error and ' is not a valid username' in error:
user_email = error.split(' is not a valid username')[0].split(
'The name ')[1].strip()
now = datetime.datetime.utcnow()
# See if we already know about this user
if user_email in data and data[user_email]['last_update']:
last_update = datetime.datetime.fromtimestamp(
int(data[user_email]['last_update']))
# Only notify users once per hour
if (now - last_update).seconds >= 3600:
notify_user = True
else:
new_data[user_email] = data[user_email]
elif not data or user_email not in data:
notify_user = True
# Ensure we notify the user only once, no matter how many errors we
# got concerning them.
if user_email not in seen:
seen.append(user_email)
else:
notify_user = False
if notify_user:
send_email(
self.env['email_from'],
[user_email],
subject='Please fix your bugzilla.redhat.com account',
message=self.env['tmpl_user_email'],
ccAddress=self.env['notify_emails'],
)
new_data[user_email] = {
'last_update': time.mktime(now.timetuple())
}
# Send notification of errors
if errors:
if VERBOSE:
print('[DEBUG]', '\n'.join(errors))
else:
notify_users(errors)
send_email(
env['email_from'],
env['notify_emails'],
'Errors while syncing bugzilla with the PackageDB',
env['tmpl_admin_email'].format(errors='\n'.join(errors))
)
else:
with open(env['data_cache'], 'w') as stream:
json.dump({}, stream)
json.dump(new_data, stream)
if VERBOSE:
times["end"] = time.time()
def get_cli_arguments(self):
""" Set the CLI argument parser and return the argument parsed.
"""
parser = argparse.ArgumentParser(
description='Script syncing information between Pagure and bugzilla'
)
parser.add_argument(
'--dry-run', dest='dryrun', action='store_true', default=False,
help='Do not actually make any changes - Overrides the configuration')
parser.add_argument(
'--verbose', dest='verbose', action='store_true', default=False,
help='Print actions verbosely - Overrides the configuration')
parser.add_argument(
'--debug', dest='debug', action='store_true', default=False,
help='Combination of --verbose and --dry-run')
parser.add_argument(
'--env', dest='env',
help='Run the script for a specific environment, overrides the one '
'set in the configuration file')
print(" ----------")
print("Building the data structure")
delta = times["data structure end"] - times["start"]
print(f" Ran on {delta:.2f} seconds -- ie: {delta/60:.2f} minutes")
self.args = parser.parse_args()
print("Building the FAS cache")
delta = times["FAS cache building end"] - times["data structure end"]
print(f" Ran on {delta:.2f} seconds -- ie: {delta/60:.2f} minutes")
def get_pagure_project(self):
""" Builds a large list of all the projects on pagure.
Each item in that list is a dict containing:
- the namespace of the project
- the name of the project
- the point of contact of this project (ie: the default assignee
in bugzilla)
- the watchers of this project (ie: the initial CC list in bugzilla)
"""
print("Interacting with bugzilla")
delta = times["end"] - times["FAS cache building end"]
print(f" Ran on {delta:.2f} seconds -- ie: {delta/60:.2f} minutes")
# Get the initial ownership and CC data from pagure
# This part is easy.
poc_url = self.env['distgit_url'] + '/extras/pagure_poc.json'
if self.env["verbose"]:
print("Querying %r for points of contact." % poc_url)
pagure_namespace_to_poc = self.session.get(poc_url, timeout=120).json()
print("Total")
delta = times["end"] - times["start"]
print(f" Ran on {delta:.2f} seconds -- ie: {delta/60:.2f} minutes")
cc_url = self.env['distgit_url'] + '/extras/pagure_bz.json'
if self.env["verbose"]:
print("Querying %r for initial cc list." % cc_url)
pagure_namespace_to_cc = self.session.get(cc_url, timeout=120).json()
sys.exit(0)
# Combine and collapse those two into a single list:
self.pagure_projects = []
for namespace, entries in pagure_namespace_to_poc.items():
for name, poc in entries.items():
self.pagure_projects.append(dict(
namespace=namespace,
name=name,
poc=poc,
watchers=pagure_namespace_to_cc[namespace][name],
))
# if self.env["environment"] == 'staging':
# # Filter out any modules, since we don't have rights to create new
# # components in the "Fedora Modules" project yet
# self.pagure_projects = [
# p for p in self.pagure_projects if p['namespace'] != 'modules'
# ]
def add_branches_product_and_summary(self):
""" For each project retrieved this method adds branches, products
and summary information.
The branches are retrieved from PDC
The products are determined based on the branches.
The summary is coming from the primary.xml file from the repodata
of the rawhide repository in koji.
"""
branches_url = "/".join([
self.env['pdc_url'].split('rest_api')[0].rstrip("/"),
'extras/active_branches.json',
])
if self.env["verbose"]:
print("Querying %r for EOL information." % branches_url)
pdc_branches = self.session.get(branches_url, timeout=120).json()
for idx, project in enumerate(self.pagure_projects):
# Summary
summary = None
if project["namespace"] == "rpms":
summary = self.rpm_summary.get(project["name"])
project["summary"] = summary
# Branches
if project['namespace'] not in self.env['pdc_types']:
project['branches'] = []
project['products'] = []
if self.env["verbose"]:
print(
f'! Namespace {project["namespace"]} not found in the pdc_type '
f'configuration key, project {project["namespace"]}/{project["name"]} '
'ignored'
)
continue
pdc_type = self.env['pdc_types'][project['namespace']]
project['branches'] = pdc_branches.get(pdc_type, {}).get(project['name'], [])
if not project['branches']:
if self.env["verbose"]:
print(f"! No PDC branch found for {project['namespace']}/{project['name']}")
# Products
products = set()
for branch, active in project.get('branches'):
if re.match(r'^epel\d+$', branch):
products.add('Fedora EPEL')
else:
products.add(self.env['namespace_to_product'][project['namespace']])
project['products'] = list(products)
self.pagure_projects[idx] = project
def main(self):
"""The entrypoint to the script."""
global envname, env, projects_dict
times = {
"start": time.time(),
}
self.get_cli_arguments()
load_configuration()
self.config = config
envname = self.config['environment']
if self.args.env:
if self.args.env in self.config['environments']:
envname = self.args.env
else:
print(f"Invalid environment specified: {self.args.env}")
return 1
self.env = self.config['environments'][envname]
if self.args.debug:
self.env["verbose"] = True
self.env["dryrun"] = True
if self.args.verbose:
self.env["verbose"] = True
if self.args.dryrun:
self.env["dryrun"] = True
# Non-fatal errors to alert people about
errors = []
self.session = retry_session()
if self.env["verbose"]:
print("Building a cache of the rpm packages' summary")
self.rpm_summary = package_summary.get_package_summary()
self.get_pagure_project()
self.add_branches_product_and_summary()
if self.env["verbose"]:
print(f"{len(self.pagure_projects)} projects to consider")
if self.env["verbose"]:
times["data structure end"] = time.time()
delta = times["data structure end"] - times["start"]
print("Ran for %s seconds -- ie: %.2f minutes" % (delta, delta/60.0))
print("Building FAS' cache")
# Initialize the connection to bugzilla
bugzilla = BugzillaProxy(self.env['bugzilla']['url'],
self.env['bugzilla']['user'],
self.env['bugzilla']['password'],
self.env)
if self.env["verbose"]:
times["FAS cache building end"] = time.time()
delta = times["FAS cache building end"] - times["data structure end"]
print(f"Ran for {delta} seconds -- ie: {delta/60} minutes")
if self.env["dryrun"]:
print("Querying bugzilla but not doing anything")
else:
print("Updating bugzilla")
bugzilla.build_product_cache(self.pagure_projects)
for project in sorted(self.pagure_projects, key=itemgetter('name')):
for product in project["products"]:
if product not in self.env['products']:
if self.env["verbose"]:
print(f"Ignoring: {product}/{project['name']}")
continue
try:
bugzilla.add_edit_component(
package=project["name"],
collection=product,
owner=project['poc'],
description=project['summary'],
qacontact=None,
cclist=project['watchers']
)
except ValueError as e:
# A username didn't have a bugzilla address
errors.append(str(e.args))
except DataChangedError as e:
# A Package or Collection was returned via xmlrpc but wasn't
# present when we tried to change it
errors.append(str(e.args))
except xmlrpc.client.ProtocolError as e:
# Unrecoverable and likely means that nothing is going to
# succeed.
errors.append(str(e.args))
break
except xmlrpc.client.Error as e:
# An error occurred in the xmlrpc call. Shouldn't happen but
# we better see what it is
errors.append('%s -- %s' % (project["name"], e.args[-1]))
# Send notification of errors
if errors:
if self.env["verbose"] or self.env["dryrun"]:
print('[DEBUG]', '\n'.join(errors))
else:
self.notify_users(errors)
send_email(
self.env['email_from'],
self.env['notify_emails'],
'Errors while syncing bugzilla with the PackageDB',
self.env['tmpl_admin_email'].format(errors='\n'.join(errors))
)
else:
with open(self.env['data_cache'], 'w') as stream:
json.dump({}, stream)
if self.env["verbose"]:
times["end"] = time.time()
print(" ----------")
print("Building the data structure")
delta = times["data structure end"] - times["start"]
print(f" Ran on {delta:.2f} seconds -- ie: {delta/60:.2f} minutes")
print("Building the FAS cache")
delta = times["FAS cache building end"] - times["data structure end"]
print(f" Ran on {delta:.2f} seconds -- ie: {delta/60:.2f} minutes")
print("Interacting with bugzilla")
delta = times["end"] - times["FAS cache building end"]
print(f" Ran on {delta:.2f} seconds -- ie: {delta/60:.2f} minutes")
print("Total")
delta = times["end"] - times["start"]
print(f" Ran on {delta:.2f} seconds -- ie: {delta/60:.2f} minutes")
return 0
if __name__ == '__main__':
main()
dbs = DistgitBugzillaSync()
sys.exit(dbs.main())

View file

@ -1,5 +1,5 @@
#!/usr/bin/python3 -tt
from distgit_bugzilla_sync.script import main
from distgit_bugzilla_sync.script import DistgitBugzillaSync
main()
DistgitBugzillaSync().main()