Merge branch 'master' of /git/ansible

This commit is contained in:
Jan Kaluža 2018-04-09 13:55:14 +00:00
commit 0a0efd9e04
48 changed files with 315 additions and 176 deletions

View file

@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
# This file is part of fedimg. # This file is part of fedimg.
# Copyright (C) 2014 Red Hat, Inc. # Copyright (C) 2014-2017 Red Hat, Inc.
# #
# fedimg is free software: you can redistribute it and/or modify # fedimg is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as # it under the terms of the GNU Affero General Public License as
@ -17,11 +18,14 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# #
# Authors: David Gay <dgay@redhat.com> # Authors: David Gay <dgay@redhat.com>
# # Sayan Chowdhury <sayanchowdhury@fedoraproject.org>
"""
This is the `fedmsg consumer`_ that subscribes to the topic emitted after the
completion of the nightly and production compose. The consumer on receving the
message uploads the image using the API of the cloud providers.
"""
import logging import logging
log = logging.getLogger("fedmsg")
import multiprocessing.pool import multiprocessing.pool
import fedmsg.consumers import fedmsg.consumers
@ -29,46 +33,54 @@ import fedmsg.encoding
import fedfind.release import fedfind.release
import fedimg.uploader import fedimg.uploader
from fedimg.util import get_rawxz_urls, safeget
from fedimg.config import PROCESS_COUNT, STATUS_FILTER
from fedimg.utils import get_rawxz_urls, get_value_from_dict
LOG = logging.getLogger(__name__)
class FedimgConsumer(fedmsg.consumers.FedmsgConsumer): class FedimgConsumer(fedmsg.consumers.FedmsgConsumer):
""" Listens for image Koji task completion and sends image files """
produced by the child createImage tasks to the uploader. """ A `fedmsg consumer`_ that listens to the pungi compose topics and kicks
of the process to upload the images to various cloud providers.
# It used to be that all *image* builds appeared as scratch builds on the Attributes:
# task.state.change topic. However, with the switch to pungi4, some of topic (str): The topics this consumer is subscribed to. Set to
# them (and all of them in the future) appear as full builds under the ``org.fedoraproject.prod.pungi.compose.status.change``.
# build.state.change topic. That means we have to handle both cases like config_key (str): The key to set to ``True`` in the fedmsg config to
# this, at least for now. enable this consumer. The key is ``fedimgconsumer.prod.enabled``.
topic = [ """
'org.fedoraproject.prod.pungi.compose.status.change', topic = ['org.fedoraproject.prod.pungi.compose.status.change']
] config_key = "fedimgconsumer.prod.enabled"
config_key = 'fedimgconsumer'
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
LOG.info("FedimgConsumer initializing")
super(FedimgConsumer, self).__init__(*args, **kwargs) super(FedimgConsumer, self).__init__(*args, **kwargs)
# threadpool for upload jobs # Threadpool for upload jobs
self.upload_pool = multiprocessing.pool.ThreadPool(processes=4) LOG.info("Creating thread pool of %s process", PROCESS_COUNT)
self.upload_pool = multiprocessing.pool.ThreadPool(
log.info("Super happy fedimg ready and reporting for duty.") processes=PROCESS_COUNT
)
LOG.info("FedimgConsumer initialized")
def consume(self, msg): def consume(self, msg):
""" This is called when we receive a message matching our topics. """ """
This is called when we receive a message matching our topics.
log.info('Received %r %r' % (msg['topic'], msg['body']['msg_id'])) Args:
msg (dict): The raw message from fedmsg.
STATUS_F = ('FINISHED_INCOMPLETE', 'FINISHED',) """
LOG.info('Received %r %r', msg['topic'], msg['body']['msg_id'])
msg_info = msg['body']['msg'] msg_info = msg['body']['msg']
if msg_info['status'] not in STATUS_F: if msg_info['status'] not in STATUS_FILTER:
return return
location = msg_info['location'] location = msg_info['location']
compose_id = msg_info['compose_id'] compose_id = msg_info['compose_id']
cmetadata = fedfind.release.get_release_cid(compose_id).metadata compose_metadata = fedfind.release.get_release(cid=compose_id).metadata
# Till F27, both cloud-base and atomic images were available # Till F27, both cloud-base and atomic images were available
# under variant CloudImages. With F28 and onward releases, # under variant CloudImages. With F28 and onward releases,
@ -76,24 +88,57 @@ class FedimgConsumer(fedmsg.consumers.FedmsgConsumer):
# moved under atomic variant. # moved under atomic variant.
prev_rel = ['26', '27'] prev_rel = ['26', '27']
if msg_info['release_version'] in prev_rel: if msg_info['release_version'] in prev_rel:
images_meta = safeget(cmetadata, 'images', 'payload', 'images', images_meta = get_value_from_dict(
'CloudImages', 'x86_64') compose_metadata, 'images', 'payload', 'images', 'CloudImages',
'x86_64')
else: else:
images_meta = safeget(cmetadata, 'images', 'payload', 'images', images_meta = get_value_from_dict(
'Cloud', 'x86_64') compose_metadata, 'images', 'payload', 'images',
images_meta.extend(safeget(cmetadata, 'images', 'payload', 'Cloud', 'x86_64')
'images', 'AtomicHost', 'x86_64')) images_meta.extend(get_value_from_dict(
compose_metadata, 'images', 'payload',
'images', 'AtomicHost', 'x86_64'))
if images_meta is None: if images_meta is None:
LOG.debug('No compatible image found to process')
return return
self.upload_urls = get_rawxz_urls(location, images_meta) upload_urls = get_rawxz_urls(location, images_meta)
compose_meta = { if len(upload_urls) > 0:
'compose_id': compose_id, LOG.info("Start processing compose id: %s", compose_id)
} fedimg.uploader.upload(
pool=self.upload_pool,
urls=upload_urls,
compose_id=compose_id
)
class FedimgStagingConsumer(FedimgConsumer):
"""
A `fedmsg consumer`_ that listens to the staging pungi compose topics and
kicks of the process to upload the images to various cloud providers.
Attributes:
topic (str): The topics this consumer is subscribed to. Set to
``org.fedoraproject.stg.pungi.compose.status.change``.
config_key (str): The key to set to ``True`` in the fedmsg config to
enable this consumer. The key is ``fedimgconsumer.stg.enabled``.
"""
topic = ['org.fedoraproject.stg.pungi.compose.status.change']
config_key = "fedimgconsumer.stg.enabled"
class FedimgDevConsumer(FedimgConsumer):
"""
A `fedmsg consumer`_ that listens to the dev pungi compose topics and
kicks of the process to upload the images to various cloud providers.
Attributes:
topic (str): The topics this consumer is subscribed to. Set to
``org.fedoraproject.dev.pungi.compose.status.change``.
config_key (str): The key to set to ``True`` in the fedmsg config to
enable this consumer. The key is ``fedimgconsumer.dev.enabled``.
"""
topic = ['org.fedoraproject.dev.pungi.compose.status.change']
config_key = "fedimgconsumer.dev.enabled"
if len(self.upload_urls) > 0:
log.info("Processing compose id: %s" % compose_id)
fedimg.uploader.upload(self.upload_pool,
self.upload_urls,
compose_meta)

View file

@ -24,6 +24,7 @@ el6-test.fedorainfracloud.org
el7-test.fedorainfracloud.org el7-test.fedorainfracloud.org
f26-test.fedorainfracloud.org f26-test.fedorainfracloud.org
f27-test.fedorainfracloud.org f27-test.fedorainfracloud.org
f28-test.fedorainfracloud.org
faitout.fedorainfracloud.org faitout.fedorainfracloud.org
fas2-dev.fedorainfracloud.org fas2-dev.fedorainfracloud.org
fas3-dev.fedorainfracloud.org fas3-dev.fedorainfracloud.org

View file

@ -60,6 +60,7 @@ fedmsg_certs:
- bodhi.update.request.revoke - bodhi.update.request.revoke
- bodhi.update.request.stable - bodhi.update.request.stable
- bodhi.update.request.testing - bodhi.update.request.testing
- bodhi.update.request.batched
- bodhi.update.request.unpush - bodhi.update.request.unpush
# Things that only the mash does - not the web UI # Things that only the mash does - not the web UI

View file

@ -3,6 +3,8 @@ lvm_size: 20000
mem_size: 6144 mem_size: 6144
num_cpus: 2 num_cpus: 2
testing: False
# for systems that do not match the above - specify the same parameter in # for systems that do not match the above - specify the same parameter in
# the host_vars/$hostname file # the host_vars/$hostname file

View file

@ -40,11 +40,11 @@ osbs_conf_service_accounts:
- koji - koji
osbs_conf_readwrite_users: osbs_conf_readwrite_users:
- system:serviceaccount:{{ osbs_namespace }}:default - "system:serviceaccount:{{ osbs_namespace }}:default"
- system:serviceaccount:{{ osbs_namespace }}:builder - "system:serviceaccount:{{ osbs_namespace }}:builder"
osbs_worker_clusters: osbs_conf_worker_clusters:
x86_64: x86_64:
- name: x86_64-stg - name: x86_64-stg
max_concurrent_builds: 2 max_concurrent_builds: 2

View file

@ -23,18 +23,18 @@ csi_relationship: |
fedmsg-hub daemon that loads the pdc-updater consumer plugin. However, the fedmsg-hub daemon that loads the pdc-updater consumer plugin. However, the
pdc-updater plugin is configured to do different things in each place. pdc-updater plugin is configured to do different things in each place.
On pdc-updater01, the compose handler is enabled which listens for new pungi On pdc-backend01, the compose handler is enabled which listens for new pungi
composes, and stores them in PDC. Fedora QE uses this data. The consumer composes, and stores them in PDC. Fedora QE uses this data. The consumer
has only a single thread enabled to avoid OOMing itself with more than one has only a single thread enabled to avoid OOMing itself with more than one
compose at a time. compose at a time.
On pdc-updater02, the modularity handlers are enabled which listen for MBS On pdc-backend02, the modularity handlers are enabled which listen for MBS
activity, and store that in PDC. pdc-updater02 also hosts the retirement activity, and store that in PDC. pdc-backend02 also hosts the retirement
handler which listens to dist-git for new dead.package files, and propagates handler which listens to dist-git for new dead.package files, and propagates
the retirement to PDC (by prematurely EOLing the branch). Multiple threads are the retirement to PDC (by prematurely EOLing the branch). Multiple threads are
enabled so that it can work more efficiently on these smaller tasks. enabled so that it can work more efficiently on these smaller tasks.
On pdc-updater03, the dep chain handlers are enabled which listen for koji On pdc-backend03, the dep chain handlers are enabled which listen for koji
messages and store dep chain information in PDC, like what rpms depend on what messages and store dep chain information in PDC, like what rpms depend on what
other rpms at build time, and what containers depend on what rpms, etc.. other rpms at build time, and what containers depend on what rpms, etc..
Multiple threads are enabled so that it can work more efficiently on these Multiple threads are enabled so that it can work more efficiently on these

View file

@ -23,11 +23,11 @@ csi_relationship: |
a fedmsg-hub daemon that loads the pdc-updater consumer plugin. However, the a fedmsg-hub daemon that loads the pdc-updater consumer plugin. However, the
pdc-updater plugin is configured to do different things in each place. pdc-updater plugin is configured to do different things in each place.
On pdc-updater01, the compose handler is enabled which listens for new pungi On pdc-backend01, the compose handler is enabled which listens for new pungi
composes, and stores them in PDC. Fedora QE uses this data. The consumer composes, and stores them in PDC. Fedora QE uses this data. The consumer
has only a single thread enabled to avoid OOMing itself with more than one has only a single thread enabled to avoid OOMing itself with more than one
compose at a time. compose at a time.
On pdc-updater02, the dep chain and modularity handlers are enabled which On pdc-backend02, the dep chain and modularity handlers are enabled which
listen for koji and MBS activity, and store that in PDC. Multiple threads listen for koji and MBS activity, and store that in PDC. Multiple threads
are enabled so that it can work more efficiently on these smaller tasks. are enabled so that it can work more efficiently on these smaller tasks.

View file

@ -76,3 +76,4 @@ fedmsg_certs:
group: apache group: apache
can_send: can_send:
- taskotron.result.new - taskotron.result.new
- resultsdb.result.new

View file

@ -73,3 +73,4 @@ fedmsg_certs:
group: apache group: apache
can_send: can_send:
- taskotron.result.new - taskotron.result.new
- resultsdb.result.new

View file

@ -23,6 +23,7 @@ fedmsg_certs:
- pungi.compose.createiso.targets - pungi.compose.createiso.targets
- pungi.compose.createiso.imagefail - pungi.compose.createiso.imagefail
- pungi.compose.createiso.imagedone - pungi.compose.createiso.imagedone
- pungi.compose.ostree
- compose.branched.complete - compose.branched.complete
- compose.branched.mash.complete - compose.branched.mash.complete
- compose.branched.mash.start - compose.branched.mash.start

View file

@ -0,0 +1,17 @@
---
image: Fedora-Cloud-Base-28_Beta-1.3.x86_64
instance_type: ms1.medium
keypair: fedora-admin-20130801
security_group: ssh-anywhere-maintainertest,web-80-anywhere-maintainertest,default,web-443-anywhere-maintainertest,all-icmp-maintainertest
zone: nova
tcp_ports: [22]
inventory_tenant: maintainertest
inventory_instance_name: f28-test
hostbase: f28-test
public_ip: 209.132.184.118
description: f28-test instance
cloud_networks:
# maintainertest-net
- net-id: "a512d096-3fa2-49cc-b989-d43ca687f91d"

View file

@ -3,8 +3,8 @@ nm: 255.255.255.0
gw: 10.5.126.254 gw: 10.5.126.254
dns: 10.5.126.21 dns: 10.5.126.21
ks_url: http://10.5.126.23/repo/rhel/ks/kvm-fedora-25 ks_url: http://10.5.126.23/repo/rhel/ks/kvm-fedora-27
ks_repo: http://10.5.126.23/pub/fedora/linux/releases/25/Server/x86_64/os/ ks_repo: http://10.5.126.23/pub/fedora/linux/releases/27/Server/x86_64/os/
volgroup: /dev/vg_virthost03 volgroup: /dev/vg_virthost03
eth0_ip: 10.5.126.230 eth0_ip: 10.5.126.230

View file

@ -3,8 +3,8 @@ nm: 255.255.255.0
gw: 10.5.126.254 gw: 10.5.126.254
dns: 10.5.126.21 dns: 10.5.126.21
ks_url: http://10.5.126.23/repo/rhel/ks/kvm-fedora-25 ks_url: http://10.5.126.23/repo/rhel/ks/kvm-fedora-27
ks_repo: http://10.5.126.23/pub/fedora/linux/releases/25/Server/x86_64/os/ ks_repo: http://10.5.126.23/pub/fedora/linux/releases/27/Server/x86_64/os/
volgroup: /dev/vg_virthost01 volgroup: /dev/vg_virthost01
eth0_ip: 10.5.126.238 eth0_ip: 10.5.126.238

View file

@ -2,8 +2,8 @@
nm: 255.255.255.0 nm: 255.255.255.0
gw: 10.5.128.254 gw: 10.5.128.254
dns: 10.5.126.21 dns: 10.5.126.21
ks_url: http://10.5.126.23/repo/rhel/ks/kvm-fedora-26 ks_url: http://10.5.126.23/repo/rhel/ks/kvm-fedora-27
ks_repo: http://10.5.126.23/pub/fedora/linux/releases/26/Server/x86_64/os/ ks_repo: http://10.5.126.23/pub/fedora/linux/releases/27/Server/x86_64/os/
volgroup: /dev/vg_guests volgroup: /dev/vg_guests
eth0_ip: 10.5.128.161 eth0_ip: 10.5.128.161
vmhost: virthost20.phx2.fedoraproject.org vmhost: virthost20.phx2.fedoraproject.org

View file

@ -6,8 +6,8 @@ gw: 10.5.125.254
dns: 10.5.126.21 dns: 10.5.126.21
ks_url: http://10.5.126.23/repo/rhel/ks/kvm-rhel-7 ks_url: http://10.5.126.23/repo/rhel/ks/kvm-rhel-7
ks_repo: http://10.5.126.23/repo/rhel/RHEL7-x86_64/ ks_repo: http://10.5.126.23/repo/rhel/RHEL7-x86_64/
volgroup: /dev/xenGuests volgroup: /dev/vg_guests
vmhost: bvirthost11.phx2.fedoraproject.org vmhost: bvirthost04.phx2.fedoraproject.org
datacenter: phx2 datacenter: phx2
mem_size: 16384 mem_size: 16384

View file

@ -1120,7 +1120,7 @@ newcloud-control
newcloud-compute newcloud-compute
[newcloud-control] [newcloud-control]
control01.cloud.fedoraproject.org #control01.cloud.fedoraproject.org
[newcloud-compute] [newcloud-compute]
@ -1216,6 +1216,7 @@ testdays.fedorainfracloud.org
[maintainer-test] [maintainer-test]
f26-test.fedorainfracloud.org f26-test.fedorainfracloud.org
f27-test.fedorainfracloud.org f27-test.fedorainfracloud.org
f28-test.fedorainfracloud.org
rawhide-test.fedorainfracloud.org rawhide-test.fedorainfracloud.org
ppc64-test.fedorainfracloud.org ppc64-test.fedorainfracloud.org
ppc64le-test.fedorainfracloud.org ppc64le-test.fedorainfracloud.org

View file

@ -45,7 +45,7 @@
roles: roles:
- role: gluster/server - role: gluster/server
name: gluster glusterservername: gluster
username: "{{ ask_gluster_username }}" username: "{{ ask_gluster_username }}"
password: "{{ ask_gluster_password }}" password: "{{ ask_gluster_password }}"
owner: root owner: root
@ -53,7 +53,7 @@
datadir: /srv/glusterfs/ask-stg datadir: /srv/glusterfs/ask-stg
- role: gluster/client - role: gluster/client
name: gluster glusterservername: gluster
servers: servers:
- ask01.stg.phx2.fedoraproject.org - ask01.stg.phx2.fedoraproject.org
username: "{{ ask_gluster_username }}" username: "{{ ask_gluster_username }}"
@ -74,7 +74,7 @@
roles: roles:
- role: gluster/server - role: gluster/server
name: gluster glusterservername: gluster
username: "{{ ask_gluster_username }}" username: "{{ ask_gluster_username }}"
password: "{{ ask_gluster_password }}" password: "{{ ask_gluster_password }}"
owner: root owner: root
@ -82,7 +82,7 @@
datadir: /srv/glusterfs/ask datadir: /srv/glusterfs/ask
- role: gluster/client - role: gluster/client
name: gluster glusterservername: gluster
servers: servers:
- ask01.phx2.fedoraproject.org - ask01.phx2.fedoraproject.org
- ask02.phx2.fedoraproject.org - ask02.phx2.fedoraproject.org

View file

@ -26,7 +26,9 @@
- rsyncd - rsyncd
- apache - apache
- httpd/mod_ssl - httpd/mod_ssl
- { role: httpd/certificate, name: "{{wildcard_cert_name}}", SSLCertificateChainFile: "{{wildcard_int_file}}" } - role: httpd/certificate
certname: "{{wildcard_cert_name}}"
SSLCertificateChainFile: "{{wildcard_int_file}}"
- openvpn/client - openvpn/client
- batcave - batcave

View file

@ -67,7 +67,7 @@
roles: roles:
- role: gluster/server - role: gluster/server
name: gluster glusterservername: gluster
username: "{{ registry_gluster_username_prod }}" username: "{{ registry_gluster_username_prod }}"
password: "{{ registry_gluster_password_prod }}" password: "{{ registry_gluster_password_prod }}"
owner: root owner: root
@ -75,7 +75,7 @@
datadir: /srv/glusterfs/registry datadir: /srv/glusterfs/registry
- role: gluster/client - role: gluster/client
name: gluster glusterservername: gluster
servers: servers:
- docker-registry02.phx2.fedoraproject.org - docker-registry02.phx2.fedoraproject.org
- docker-registry03.phx2.fedoraproject.org - docker-registry03.phx2.fedoraproject.org

View file

@ -63,7 +63,7 @@
roles: roles:
- role: gluster/server - role: gluster/server
name: gluster glusterservername: gluster
username: "{{ nuancier_gluster_username }}" username: "{{ nuancier_gluster_username }}"
password: "{{ nuancier_gluster_password }}" password: "{{ nuancier_gluster_password }}"
owner: root owner: root
@ -71,7 +71,7 @@
datadir: /srv/glusterfs/nuancier-stg datadir: /srv/glusterfs/nuancier-stg
- role: gluster/client - role: gluster/client
name: gluster glusterservername: gluster
servers: servers:
- nuancier01.stg.phx2.fedoraproject.org - nuancier01.stg.phx2.fedoraproject.org
- nuancier02.stg.phx2.fedoraproject.org - nuancier02.stg.phx2.fedoraproject.org
@ -93,7 +93,7 @@
roles: roles:
- role: gluster/server - role: gluster/server
name: gluster glusterservername: gluster
username: "{{ nuancier_gluster_username }}" username: "{{ nuancier_gluster_username }}"
password: "{{ nuancier_gluster_password }}" password: "{{ nuancier_gluster_password }}"
owner: root owner: root
@ -101,7 +101,7 @@
datadir: /srv/glusterfs/nuancier datadir: /srv/glusterfs/nuancier
- role: gluster/client - role: gluster/client
name: gluster glusterservername: gluster
servers: servers:
- nuancier01.phx2.fedoraproject.org - nuancier01.phx2.fedoraproject.org
- nuancier02.phx2.fedoraproject.org - nuancier02.phx2.fedoraproject.org

View file

@ -23,9 +23,9 @@
- nagios_client - nagios_client
- hosts - hosts
- fas_client - fas_client
- sudo
- collectd/base - collectd/base
- rsyncd - rsyncd
- sudo
tasks: tasks:
- import_tasks: "{{ tasks_path }}/2fa_client.yml" - import_tasks: "{{ tasks_path }}/2fa_client.yml"
@ -305,7 +305,8 @@
state: restarted state: restarted
tasks: tasks:
- name: Ensures /etc/dnsmasq.d/ dir exists
file: path="/etc/dnsmasq.d/" state=directory
- name: install fedora dnsmasq specific config - name: install fedora dnsmasq specific config
copy: copy:
src: "{{files}}/osbs/fedora-dnsmasq.conf.{{env}}" src: "{{files}}/osbs/fedora-dnsmasq.conf.{{env}}"
@ -324,12 +325,6 @@
- /srv/web/infra/ansible/vars/{{ ansible_distribution }}.yml - /srv/web/infra/ansible/vars/{{ ansible_distribution }}.yml
tasks: tasks:
- name: set policy for koji builder in openshift for osbs
shell: "oadm policy add-role-to-user -n default edit htpasswd_provider: {{ osbs_koji_stg_username }} && touch /etc/origin/koji-builder-policy-added"
args:
creates: "/etc/origin/koji-builder-policy-added"
when: env == "staging"
- name: set policy for koji builder in openshift for osbs - name: set policy for koji builder in openshift for osbs
shell: "oadm policy add-role-to-user -n default edit htpasswd_provider: {{ osbs_koji_prod_username }} && touch /etc/origin/koji-builder-policy-added" shell: "oadm policy add-role-to-user -n default edit htpasswd_provider: {{ osbs_koji_prod_username }} && touch /etc/origin/koji-builder-policy-added"
args: args:
@ -340,6 +335,7 @@
shell: "oadm policy add-role-to-user -n default edit system:serviceaccount:default:builder && touch /etc/origin/atomic-reactor-policy-added" shell: "oadm policy add-role-to-user -n default edit system:serviceaccount:default:builder && touch /etc/origin/atomic-reactor-policy-added"
args: args:
creates: "/etc/origin/atomic-reactor-policy-added" creates: "/etc/origin/atomic-reactor-policy-added"
when: env == "production"
- name: Deploy OSBS on top of OpenShift - name: Deploy OSBS on top of OpenShift
hosts: osbs-masters-stg[0]:osbs-masters[0] hosts: osbs-masters-stg[0]:osbs-masters[0]
@ -399,6 +395,16 @@
tags: tags:
- osbs-worker-namespace - osbs-worker-namespace
user: root user: root
vars_files:
- /srv/web/infra/ansible/vars/global.yml
- "/srv/private/ansible/vars.yml"
- /srv/web/infra/ansible/vars/{{ ansible_distribution }}.yml
vars:
osbs_kubeconfig_path: /etc/origin/master/admin.kubeconfig
osbs_environment:
KUBECONFIG: "{{ osbs_kubeconfig_path }}"
roles: roles:
- role: osbs-namespace - role: osbs-namespace
osbs_namespace: "{{ osbs_worker_namespace }}" osbs_namespace: "{{ osbs_worker_namespace }}"
@ -407,7 +413,6 @@
osbs_authoritative_registry: "{{ source_registry }}" osbs_authoritative_registry: "{{ source_registry }}"
osbs_sources_command: "{{ osbs_conf_sources_command }}" osbs_sources_command: "{{ osbs_conf_sources_command }}"
osbs_vendor: "{{ osbs_conf_vendor }}" osbs_vendor: "{{ osbs_conf_vendor }}"
osbs_readwrite_users: "{{ osbs_conf_readwrite_users }}"
when: env == "staging" when: env == "staging"
- name: setup koji secret in worker namespace - name: setup koji secret in worker namespace

View file

@ -75,7 +75,7 @@
- role: apache - role: apache
- role: httpd/certificate - role: httpd/certificate
name: wildcard-2017.fedorapeople.org certname: wildcard-2017.fedorapeople.org
SSLCertificateChainFile: wildcard-2017.fedorapeople.org.intermediate.cert SSLCertificateChainFile: wildcard-2017.fedorapeople.org.intermediate.cert
- people - people

View file

@ -37,12 +37,13 @@
- role: httpd/mod_ssl - role: httpd/mod_ssl
- role: httpd/certificate - role: httpd/certificate
name: "{{wildcard_cert_name}}" certname: "{{wildcard_cert_name}}"
SSLCertificateChainFile: "{{wildcard_int_file}}" SSLCertificateChainFile: "{{wildcard_int_file}}"
- role: httpd/website - role: httpd/website
name: secondary.fedoraproject.org vars:
cert_name: "{{wildcard_cert_name}}" - name: secondary.fedoraproject.org
- cert_name: "{{wildcard_cert_name}}"
server_aliases: server_aliases:
- archive.fedoraproject.org - archive.fedoraproject.org
- archives.fedoraproject.org - archives.fedoraproject.org

View file

@ -26,13 +26,10 @@
- role: httpd/mod_ssl - role: httpd/mod_ssl
- role: httpd/certificate - role: httpd/certificate
name: "{{wildcard_cert_name}}" certname: "{{wildcard_cert_name}}"
SSLCertificateChainFile: "{{wildcard_int_file}}" SSLCertificateChainFile: "{{wildcard_int_file}}"
- role: httpd/website - {role: httpd/website, vars: {name: torrent.fedoraproject.org, cert_name: "{{wildcard_cert_name}}", sslonly: true}}
name: torrent.fedoraproject.org
cert_name: "{{wildcard_cert_name}}"
sslonly: true
tasks: tasks:
- import_tasks: "{{ tasks_path }}/yumrepos.yml" - import_tasks: "{{ tasks_path }}/yumrepos.yml"

View file

@ -16,72 +16,72 @@
- role: httpd/mod_ssl - role: httpd/mod_ssl
- role: httpd/certificate - role: httpd/certificate
name: wildcard-2017.fedoraproject.org certname: wildcard-2017.fedoraproject.org
SSLCertificateChainFile: wildcard-2017.fedoraproject.org.intermediate.cert SSLCertificateChainFile: wildcard-2017.fedoraproject.org.intermediate.cert
- role: httpd/certificate - role: httpd/certificate
name: wildcard-2017.fedorahosted.org certname: wildcard-2017.fedorahosted.org
SSLCertificateChainFile: wildcard-2017.fedorahosted.org.intermediate.cert SSLCertificateChainFile: wildcard-2017.fedorahosted.org.intermediate.cert
- role: httpd/certificate - role: httpd/certificate
name: wildcard-2017.id.fedoraproject.org certname: wildcard-2017.id.fedoraproject.org
SSLCertificateChainFile: wildcard-2017.id.fedoraproject.org.intermediate.cert SSLCertificateChainFile: wildcard-2017.id.fedoraproject.org.intermediate.cert
- role: httpd/certificate - role: httpd/certificate
name: wildcard-2017.stg.fedoraproject.org certname: wildcard-2017.stg.fedoraproject.org
SSLCertificateChainFile: wildcard-2017.stg.fedoraproject.org.intermediate.cert SSLCertificateChainFile: wildcard-2017.stg.fedoraproject.org.intermediate.cert
when: env == "staging" when: env == "staging"
- role: httpd/certificate - role: httpd/certificate
name: wildcard-2017.app.os.stg.fedoraproject.org certname: wildcard-2017.app.os.stg.fedoraproject.org
SSLCertificateChainFile: wildcard-2017.app.os.stg.fedoraproject.org.intermediate.cert SSLCertificateChainFile: wildcard-2017.app.os.stg.fedoraproject.org.intermediate.cert
when: env == "staging" when: env == "staging"
tags: tags:
- app.os.fedoraproject.org - app.os.fedoraproject.org
- role: httpd/certificate - role: httpd/certificate
name: wildcard-2017.app.os.fedoraproject.org certname: wildcard-2017.app.os.fedoraproject.org
SSLCertificateChainFile: wildcard-2017.app.os.fedoraproject.org.intermediate.cert SSLCertificateChainFile: wildcard-2017.app.os.fedoraproject.org.intermediate.cert
tags: tags:
- app.os.fedoraproject.org - app.os.fedoraproject.org
- role: httpd/certificate - role: httpd/certificate
name: fedoramagazine.org certname: fedoramagazine.org
SSLCertificateChainFile: fedoramagazine.org.intermediate.cert SSLCertificateChainFile: fedoramagazine.org.intermediate.cert
- role: httpd/certificate - role: httpd/certificate
name: fpaste.org certname: fpaste.org
SSLCertificateChainFile: fpaste.org.intermediate.cert SSLCertificateChainFile: fpaste.org.intermediate.cert
- role: httpd/certificate - role: httpd/certificate
name: getfedora.org certname: getfedora.org
SSLCertificateChainFile: getfedora.org.intermediate.cert SSLCertificateChainFile: getfedora.org.intermediate.cert
- role: httpd/certificate - role: httpd/certificate
name: flocktofedora.org certname: flocktofedora.org
SSLCertificateChainFile: flocktofedora.org.intermediate.cert SSLCertificateChainFile: flocktofedora.org.intermediate.cert
- role: httpd/certificate - role: httpd/certificate
name: qa.stg.fedoraproject.org certname: qa.stg.fedoraproject.org
SSLCertificateChainFile: qa.stg.fedoraproject.org.intermediate.cert SSLCertificateChainFile: qa.stg.fedoraproject.org.intermediate.cert
when: env == "staging" when: env == "staging"
- role: httpd/certificate - role: httpd/certificate
name: qa.fedoraproject.org certname: qa.fedoraproject.org
SSLCertificateChainFile: qa.fedoraproject.org.intermediate.cert SSLCertificateChainFile: qa.fedoraproject.org.intermediate.cert
- role: httpd/certificate - role: httpd/certificate
name: secondary.koji.fedoraproject.org.letsencrypt certname: secondary.koji.fedoraproject.org.letsencrypt
SSLCertificateChainFile: secondary.koji.fedoraproject.org.letsencrypt.intermediate.crt SSLCertificateChainFile: secondary.koji.fedoraproject.org.letsencrypt.intermediate.crt
- role: httpd/certificate - role: httpd/certificate
name: whatcanidoforfedora.org certname: whatcanidoforfedora.org
SSLCertificateChainFile: whatcanidoforfedora.org.intermediate.crt SSLCertificateChainFile: whatcanidoforfedora.org.intermediate.crt
tags: tags:
- whatcanidoforfedora.org - whatcanidoforfedora.org
- role: httpd/certificate - role: httpd/certificate
name: fedoracommunity.org certname: fedoracommunity.org
SSLCertificateChainFile: fedoracommunity.org.intermediate.cert SSLCertificateChainFile: fedoracommunity.org.intermediate.cert
tags: tags:
- fedoracommunity.org - fedoracommunity.org

View file

@ -42,11 +42,11 @@
key: fedmsg-greenwave.crt key: fedmsg-greenwave.crt
privatefile: fedmsg-certs/keys/greenwave-greenwave-web-greenwave.app.os.fedoraproject.org.crt privatefile: fedmsg-certs/keys/greenwave-greenwave-web-greenwave.app.os.fedoraproject.org.crt
when: env != "staging" when: env != "staging"
- { role: openshift/object, app: greenwave, file: imagestream.yml } - { role: openshift/object, vars: {app: greenwave, file: imagestream.yml }}
- { role: openshift/object, app: greenwave, template: buildconfig.yml } - { role: openshift/object, vars: {app: greenwave, template: buildconfig.yml }}
- { role: openshift/start-build, app: greenwave, name: greenwave-docker-build } - { role: openshift/start-build, vars: {app: greenwave, name: greenwave-docker-build }}
- { role: openshift/object, app: greenwave, template: configmap.yml } - { role: openshift/object, vars: {app: greenwave, template: configmap.yml }}
- { role: openshift/object, app: greenwave, file: service.yml } - { role: openshift/object, vars: {app: greenwave, file: service.yml }}
- { role: openshift/object, app: greenwave, file: route.yml } - { role: openshift/object, vars: {app: greenwave, file: route.yml }}
- { role: openshift/object, app: greenwave, file: deploymentconfig.yml } - { role: openshift/object, vars: {app: greenwave, file: deploymentconfig.yml }}
- { role: openshift/rollout, app: greenwave, name: greenwave-web } - { role: openshift/rollout, vars: {app: greenwave, name: greenwave-web }}

View file

@ -24,9 +24,9 @@
secret_name: librariesio2fedmsg-fedmsg-crt secret_name: librariesio2fedmsg-fedmsg-crt
key: fedmsg-librariesio2fedmsg.crt key: fedmsg-librariesio2fedmsg.crt
privatefile: fedmsg-certs/keys/librariesio2fedmsg-librariesio2fedmsg.app.os.fedoraproject.org.crt privatefile: fedmsg-certs/keys/librariesio2fedmsg-librariesio2fedmsg.app.os.fedoraproject.org.crt
- { role: openshift/object, app: librariesio2fedmsg, file: imagestream.yml } - { role: openshift/object, vars: {app: librariesio2fedmsg, file: imagestream.yml }}
- { role: openshift/object, app: librariesio2fedmsg, file: buildconfig.yml } - { role: openshift/object, vars: {app: librariesio2fedmsg, file: buildconfig.yml }}
- { role: openshift/start-build, app: librariesio2fedmsg, name: sse2fedmsg-docker-build } - { role: openshift/start-build, vars: {app: librariesio2fedmsg, name: sse2fedmsg-docker-build }}
- { role: openshift/object, app: librariesio2fedmsg, template: configmap.yml } - { role: openshift/object, vars: {app: librariesio2fedmsg, template: configmap.yml }}
- { role: openshift/object, app: librariesio2fedmsg, file: deploymentconfig.yml } - { role: openshift/object, vars: {app: librariesio2fedmsg, file: deploymentconfig.yml }}
- { role: openshift/rollout, app: librariesio2fedmsg, name: librariesio2fedmsg } - { role: openshift/rollout, vars: {app: librariesio2fedmsg, name: librariesio2fedmsg }}

View file

@ -14,12 +14,12 @@
description: modernpaste description: modernpaste
appowners: appowners:
- codeblock - codeblock
- { role: openshift/object, app: modernpaste, file: imagestream.yml } - { role: openshift/object, vars: {app: modernpaste, file: imagestream.yml }}
- { role: openshift/object, app: modernpaste, template: secret.yml } - { role: openshift/object, vars: {app: modernpaste, template: secret.yml }}
- { role: openshift/object, app: modernpaste, file: buildconfig.yml } - { role: openshift/object, vars: {app: modernpaste, file: buildconfig.yml }}
- { role: openshift/start-build, app: modernpaste, name: modernpaste-docker-build } - { role: openshift/start-build, vars: {app: modernpaste, name: modernpaste-docker-build }}
- { role: openshift/object, app: modernpaste, template: configmap.yml } - { role: openshift/object, vars: {app: modernpaste, template: configmap.yml }}
- { role: openshift/object, app: modernpaste, file: service.yml } - { role: openshift/object, vars: {app: modernpaste, file: service.yml }}
- { role: openshift/object, app: modernpaste, file: route.yml } - { role: openshift/object, vars: {app: modernpaste, file: route.yml }}
- { role: openshift/object, app: modernpaste, file: deploymentconfig.yml } - { role: openshift/object, vars: {app: modernpaste, file: deploymentconfig.yml }}
- { role: openshift/rollout, app: modernpaste, name: modernpaste-web } - { role: openshift/rollout, vars: {app: modernpaste, name: modernpaste-web }}

View file

@ -14,11 +14,11 @@
description: release-monitoring description: release-monitoring
appowners: appowners:
- jcline - jcline
- { role: openshift/object, app: release-monitoring, file: imagestream.yml } - { role: openshift/object, vars: {app: release-monitoring, file: imagestream.yml }}
- { role: openshift/object, app: release-monitoring, file: buildconfig.yml } - { role: openshift/object, vars: {app: release-monitoring, file: buildconfig.yml }}
- { role: openshift/start-build, app: release-monitoring, name: release-monitoring-web-build } - { role: openshift/start-build, vars: {app: release-monitoring, name: release-monitoring-web-build }}
- { role: openshift/object, app: release-monitoring, template: configmap.yml } - { role: openshift/object, vars: {app: release-monitoring, template: configmap.yml }}
- { role: openshift/object, app: release-monitoring, file: service.yml } - { role: openshift/object, vars: {app: release-monitoring, file: service.yml }}
- { role: openshift/object, app: release-monitoring, file: route.yml } - { role: openshift/object, vars: {app: release-monitoring, file: route.yml }}
- { role: openshift/object, app: release-monitoring, file: deploymentconfig.yml } - { role: openshift/object, vars: {app: release-monitoring, file: deploymentconfig.yml }}
- { role: openshift/rollout, app: release-monitoring, name: release-monitoring-web } - { role: openshift/rollout, vars: {app: release-monitoring, name: release-monitoring-web }}

View file

@ -14,11 +14,11 @@
description: transtats description: transtats
appowners: appowners:
- suanand - suanand
- { role: openshift/object, app: transtats, template: secret.yml } - { role: openshift/object, vars: {app: transtats, template: secret.yml }}
- { role: openshift/object, app: transtats, file: imagestream.yml } - { role: openshift/object, vars: {app: transtats, file: imagestream.yml }}
- { role: openshift/object, app: transtats, file: buildconfig.yml } - { role: openshift/object, vars: {app: transtats, file: buildconfig.yml }}
- { role: openshift/start-build, app: transtats, name: transtats-build } - { role: openshift/start-build, vars: {app: transtats, name: transtats-build }}
- { role: openshift/object, app: transtats, file: service.yml } - { role: openshift/object, vars: {app: transtats, file: service.yml }}
- { role: openshift/object, app: transtats, file: route.yml } - { role: openshift/object, vars: {app: transtats, file: route.yml }}
- { role: openshift/object, app: transtats, file: deploymentconfig.yml } - { role: openshift/object, vars: {app: transtats, file: deploymentconfig.yml }}
- { role: openshift/rollout, app: transtats, name: transtats-web } - { role: openshift/rollout, vars: {app: transtats, name: transtats-web }}

View file

@ -49,11 +49,11 @@
key: fedmsg-waiverdb.crt key: fedmsg-waiverdb.crt
privatefile: fedmsg-certs/keys/waiverdb-waiverdb-web-waiverdb.app.os.fedoraproject.org.crt privatefile: fedmsg-certs/keys/waiverdb-waiverdb-web-waiverdb.app.os.fedoraproject.org.crt
when: env != "staging" when: env != "staging"
- { role: openshift/object, app: waiverdb, file: imagestream.yml } - { role: openshift/object, vars: {app: waiverdb, file: imagestream.yml }}
- { role: openshift/object, app: waiverdb, file: buildconfig.yml } - { role: openshift/object, vars: {app: waiverdb, file: buildconfig.yml }}
- { role: openshift/start-build, app: waiverdb, name: waiverdb-docker-build } - { role: openshift/start-build, vars: {app: waiverdb, name: waiverdb-docker-build }}
- { role: openshift/object, app: waiverdb, template: configmap.yml } - { role: openshift/object, vars: {app: waiverdb, template: configmap.yml }}
- { role: openshift/object, app: waiverdb, file: service.yml } - { role: openshift/object, vars: {app: waiverdb, file: service.yml }}
- { role: openshift/object, app: waiverdb, file: route.yml } - { role: openshift/object, vars: {app: waiverdb, file: route.yml }}
- { role: openshift/object, app: waiverdb, template: deploymentconfig.yml } - { role: openshift/object, vars: {app: waiverdb, template: deploymentconfig.yml }}
- { role: openshift/rollout, app: waiverdb, name: waiverdb-web } - { role: openshift/rollout, vars: {app: waiverdb, name: waiverdb-web }}

View file

@ -267,6 +267,7 @@
- pagure - pagure
- name: Configure cron job for a hourly pagure_poc - name: Configure cron job for a hourly pagure_poc
when: env != 'staging'
cron: cron:
name: pagure-poc name: pagure-poc
user: root user: root
@ -278,6 +279,7 @@
- pagure - pagure
- name: Configure cron job for a hourly pagure_bz - name: Configure cron job for a hourly pagure_bz
when: env != 'staging'
cron: cron:
name: pagure-poc name: pagure-poc
user: root user: root
@ -289,6 +291,7 @@
- pagure - pagure
- name: Configure cron job for a hourly pagure_owner_alias - name: Configure cron job for a hourly pagure_owner_alias
when: env != 'staging'
cron: cron:
name: pagure-poc name: pagure-poc
user: root user: root

View file

@ -17,7 +17,6 @@
package: name={{ item }} state=present package: name={{ item }} state=present
with_items: with_items:
- euca2ools - euca2ools
when: env == "staging"
tags: tags:
- fedimg - fedimg
@ -44,7 +43,6 @@
owner=fedmsg group=fedmsg mode=0700 owner=fedmsg group=fedmsg mode=0700
notify: notify:
- restart fedmsg-hub - restart fedmsg-hub
when: env == "staging"
tags: tags:
- fedimg - fedimg

View file

@ -29,6 +29,8 @@ config = {
} }
{% else %} {% else %}
config = { config = {
'fedimgconsumer': True, 'fedimgconsumer.dev.enabled': False,
'fedimgconsumer.prod.enabled': True,
'fedimgconsumer.stg.enabled': False,
} }
{% endif %} {% endif %}

View file

@ -19,14 +19,14 @@
- name: copy over the client config - name: copy over the client config
template: template:
src: client.config src: client.config
dest: /etc/glusterfs/glusterfs.{{name}}.vol dest: /etc/glusterfs/glusterfs.{{glusterservername}}.vol
mode: 0640 mode: 0640
#notify: #notify:
#- remount? no idea... #- remount? no idea...
- name: mount it up - name: mount it up
mount: mount:
src: /etc/glusterfs/glusterfs.{{name}}.vol src: /etc/glusterfs/glusterfs.{{glusterservername}}.vol
state: mounted state: mounted
fstype: glusterfs fstype: glusterfs
name: "{{mountdir}}" name: "{{mountdir}}"

View file

@ -1,4 +1,4 @@
# Config for {{ name }} # Config for {{ glusterservername }}
# Generated by ansible # Generated by ansible
{% for server in servers %} {% for server in servers %}

View file

@ -13,7 +13,7 @@
- restart glusterd - restart glusterd
- name: make the datapath - name: make the datapath
file: dest={{ datadir }}/{{ name }} state=directory file: dest={{ datadir }}/{{ glusterservername }} state=directory
notify: notify:
- restart glusterd - restart glusterd

View file

@ -1,9 +1,9 @@
# Config for {{ name }} # Config for {{ glusterservername }}
# Generated by ansible # Generated by ansible
volume posix volume posix
type storage/posix type storage/posix
option directory {{ datadir }}/{{ name }} option directory {{ datadir }}/{{ glusterservername }}
end-volume end-volume
volume locks volume locks
@ -22,8 +22,8 @@ volume server-tcp
type protocol/server type protocol/server
subvolumes iothreads subvolumes iothreads
option transport-type tcp option transport-type tcp
option auth.login.iothreads.allow {{ username }} option auth.login.iothreads.allow {{ username }}
option auth.login.{{ username }}.password {{ password }} option auth.login.{{ username }}.password {{ password }}
option transport.socket.listen-port 6996 option transport.socket.listen-port 6996
option transport.socket.nodelay on option transport.socket.nodelay on
end-volume end-volume

View file

@ -18,7 +18,7 @@
- httpd - httpd
- httpd/certificate - httpd/certificate
- name: Copy {{name}}.cert - name: Copy {{certname}}.cert
copy: > copy: >
src={{item}} src={{item}}
dest=/etc/pki/tls/certs/{{item | basename}} dest=/etc/pki/tls/certs/{{item | basename}}
@ -27,14 +27,14 @@
mode=0644 mode=0644
with_first_found: with_first_found:
- "{{private}}/files/httpd/{{cert}}.cert" - "{{private}}/files/httpd/{{cert}}.cert"
- "{{private}}/files/httpd/{{name}}.cert" - "{{private}}/files/httpd/{{certname}}.cert"
notify: notify:
- reload proxyhttpd - reload proxyhttpd
tags: tags:
- httpd - httpd
- httpd/certificate - httpd/certificate
- name: Copy {{name}}.key - name: Copy {{certname}}.key
copy: > copy: >
src={{item}} src={{item}}
dest=/etc/pki/tls/private/{{item | basename}} dest=/etc/pki/tls/private/{{item | basename}}
@ -43,7 +43,7 @@
mode=0600 mode=0600
with_first_found: with_first_found:
- "{{private}}/files/httpd/{{key}}.key" - "{{private}}/files/httpd/{{key}}.key"
- "{{private}}/files/httpd/{{name}}.key" - "{{private}}/files/httpd/{{certname}}.key"
notify: notify:
- reload proxyhttpd - reload proxyhttpd
tags: tags:

View file

@ -80,8 +80,11 @@
tags: tags:
- modernpaste - modernpaste
- name: set sebooleans so paste can talk to the db - name: set sebooleans so paste can talk to the db and sn2mp can talk to paste
seboolean: name=httpd_can_network_connect_db state=true persistent=true seboolean: name={{item}} state=true persistent=true
with_items:
- httpd_can_network_connect_db
- httpd_can_network_connect
tags: tags:
- config - config
- selinux - selinux

View file

@ -6,7 +6,8 @@
"client_id": "modernpaste", "client_id": "modernpaste",
"client_secret": "{{stg_modernpaste_oidc_secret}}", "client_secret": "{{stg_modernpaste_oidc_secret}}",
"userinfo_uri": "https://id.stg.fedoraproject.org/openidc/UserInfo", "userinfo_uri": "https://id.stg.fedoraproject.org/openidc/UserInfo",
"token_introspection_uri": "https://id.stg.fedoraproject.org/openidc/TokenInfo" "token_introspection_uri": "https://id.stg.fedoraproject.org/openidc/TokenInfo",
"issuer": ["https://id.stg.fedoraproject.org/openidc/"]
} }
} }
{% else %} {% else %}
@ -17,7 +18,8 @@
"client_id": "modernpaste", "client_id": "modernpaste",
"client_secret": "{{prod_modernpaste_oidc_secret}}", "client_secret": "{{prod_modernpaste_oidc_secret}}",
"userinfo_uri": "https://id.fedoraproject.org/openidc/UserInfo", "userinfo_uri": "https://id.fedoraproject.org/openidc/UserInfo",
"token_introspection_uri": "https://id.fedoraproject.org/openidc/TokenInfo" "token_introspection_uri": "https://id.fedoraproject.org/openidc/TokenInfo",
"issuer": ["https://id.stg.fedoraproject.org/openidc/"]
} }
} }
{% endif %} {% endif %}

View file

@ -52,7 +52,11 @@ REQUIRE_LOGIN_TO_PASTE = False
# Authentication method # Authentication method
# This selects between either local users or oidc (OpenID Connect) # This selects between either local users or oidc (OpenID Connect)
{% if env == 'staging' %}
AUTH_METHOD = 'oidc' AUTH_METHOD = 'oidc'
{% else %}
AUTH_METHOD = 'local'
{% endif %}
# OpenID Connect client secrets file # OpenID Connect client secrets file
AUTH_OIDC_CLIENT_SECRETS = '/etc/modern-paste/client_secrets.json' AUTH_OIDC_CLIENT_SECRETS = '/etc/modern-paste/client_secrets.json'

View file

@ -114,6 +114,13 @@ define service {
use websitetemplate use websitetemplate
} }
define service {
hostgroup_name proxies
service_description http-modernpaste
check_command check_website_ssl!paste.fedoraproject.org!/archive!NEXT
max_check_attempts 8
use websitetemplate
}
## ##
## Individual hosts ## Individual hosts

View file

@ -15,7 +15,7 @@ items:
name="release-monitoring-web" \ name="release-monitoring-web" \
vendor="Fedora Infrastructure" \ vendor="Fedora Infrastructure" \
license="MIT" license="MIT"
RUN dnf install -y \ RUN dnf install -y --enable-repo=updates-testing \
git \ git \
python3-blinker \ python3-blinker \
python3-dateutil \ python3-dateutil \
@ -30,18 +30,17 @@ items:
python3-pip \ python3-pip \
python3-psycopg2 \ python3-psycopg2 \
python3-setuptools \ python3-setuptools \
python3-social-auth-app-flask-sqlalchemy \
python3-straight-plugin \ python3-straight-plugin \
python3-sqlalchemy \ python3-sqlalchemy \
python3-wtforms && \ python3-wtforms && \
dnf autoremove -y && \ dnf autoremove -y && \
dnf clean all -y dnf clean all -y
RUN pip-3 install social-auth-app-flask social-auth-app-flask-sqlalchemy
RUN pip-3 install git+https://github.com/release-monitoring/anitya.git RUN pip-3 install git+https://github.com/release-monitoring/anitya.git
ENV USER=anitya ENV USER=anitya
EXPOSE 8080 EXPOSE 8080
EXPOSE 9940 EXPOSE 9940
ENTRYPOINT python3 -c "from anitya.config import config; from anitya.lib import utilities; utilities.init('sqlite:////var/tmp/anitya-dev.sqlite', None, debug=True, create=True)" \ ENTRYPOINT python3-gunicorn --bind 0.0.0.0:8080 --access-logfile=- anitya.wsgi:application
&& python3-gunicorn --bind 0.0.0.0:8080 --access-logfile=- anitya.wsgi:application
type: Dockerfile type: Dockerfile
strategy: strategy:
type: Docker type: Docker

View file

@ -22,6 +22,13 @@ items:
maxUnavailable: 25% maxUnavailable: 25%
timeoutSeconds: 600 timeoutSeconds: 600
updatePeriodSeconds: 1 updatePeriodSeconds: 1
pre:
failurePolicy: Abort
execNewPod:
containerName: release-monitoring-web
command: [ /bin/sh, -i, -c, "alembic -c /etc/anitya/alembic.ini upgrade head" ]
volumes:
- config-volume
type: Rolling type: Rolling
template: template:
metadata: metadata:

View file

@ -20,7 +20,7 @@ data:
permanent_session_lifetime = 3600 permanent_session_lifetime = 3600
{% if env == 'staging' %} {% if env == 'staging' %}
db_url = "sqlite:////var/tmp/anitya-dev.sqlite" db_url = "postgresql://{{ anitya_stg_db_user }}:{{ anitya_stg_db_pass }}@{{ anitya_stg_db_host }}/{{ anitya_stg_db_name }}"
{% else %} {% else %}
db_url = "postgresql://{{ anitya_db_user }}:{{ anitya_db_pass }}@{{ anitya_db_host }}/{{ anitya_db_name }}" db_url = "postgresql://{{ anitya_db_user }}:{{ anitya_db_pass }}@{{ anitya_db_host }}/{{ anitya_db_name }}"
{% endif %} {% endif %}
@ -70,3 +70,38 @@ data:
[anitya_log_config.root] [anitya_log_config.root]
level = "ERROR" level = "ERROR"
handlers = ["console"] handlers = ["console"]
alembic.ini: |-
[alembic]
script_location = anitya:db/migrations
sourceless = false
{% if env == 'staging' %}
sqlalchemy.url = "postgresql://{{ anitya_stg_db_user }}:{{ anitya_stg_db_pass }}@{{ anitya_stg_db_host }}/{{ anitya_stg_db_name }}"
{% else %}
sqlalchemy.url = "postgresql://{{ anitya_db_user }}:{{ anitya_db_pass }}@{{ anitya_db_host }}/{{ anitya_db_name }}"
{% endif %}
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View file

@ -643,3 +643,7 @@ SCRIPTWHITELIST=/usr/bin/groups
SCRIPTWHITELIST=/usr/bin/GET SCRIPTWHITELIST=/usr/bin/GET
SCRIPTWHITELIST=/sbin/ifup SCRIPTWHITELIST=/sbin/ifup
SCRIPTWHITELIST=/sbin/ifdown SCRIPTWHITELIST=/sbin/ifdown
{% if inventory_hostname.startswith(('db','pagure','retrace','anitya','upstream')) %}
# Set this size very large on postgres running servers.
IPC_SEG_SIZE=100000000000
{% endif %}

View file

@ -17,7 +17,7 @@
login_tenant_name: "{{inventory_tenant}}" login_tenant_name: "{{inventory_tenant}}"
name: "{{inventory_instance_name}}" name: "{{inventory_instance_name}}"
image_id: "{{ image|image_name_to_id('admin', ADMIN_PASS, inventory_tenant, os_auth_url) }}" image_id: "{{ image|image_name_to_id('admin', ADMIN_PASS, inventory_tenant, os_auth_url) }}"
wait_for: 300 wait_for: 600
flavor_id: "{{ instance_type|flavor_name_to_id('admin', ADMIN_PASS, inventory_tenant, os_auth_url) }}" flavor_id: "{{ instance_type|flavor_name_to_id('admin', ADMIN_PASS, inventory_tenant, os_auth_url) }}"
security_groups: "{{security_group}}" security_groups: "{{security_group}}"
key_name: "{{ keypair }}" key_name: "{{ keypair }}"