From 52217cef19971438b8016d5dd365cae60e37b7d1 Mon Sep 17 00:00:00 2001 From: Pavel Raiskup Date: Fri, 10 Jan 2025 20:25:49 +0100 Subject: [PATCH] copr-be: per-zone pools started in IBM Cloud Complements: https://github.com/fedora-copr/resalloc-ibm-cloud/pull/31 Relates: https://github.com/fedora-copr/copr/issues/3557 --- inventory/group_vars/copr_aws | 127 ++++++++++-- inventory/group_vars/copr_dev_aws | 117 +++++++++-- roles/copr/backend/tasks/main.yml | 2 + roles/copr/backend/tasks/resalloc.yml | 2 - .../templates/resalloc/ibm-cloud-list-vms.j2 | 25 --- .../templates/resalloc/ibm-cloud-vm.j2 | 79 -------- .../backend/templates/resalloc/pools.yaml | 188 +++++++++++------- .../templates/resalloc/pools.yaml.expand.sh | 51 +++++ .../templates/resalloc/reserve-ibm-cloud-ips | 45 +++++ .../backend/templates/resalloc/vm-delete.j2 | 4 +- 10 files changed, 431 insertions(+), 209 deletions(-) delete mode 100644 roles/copr/backend/templates/resalloc/ibm-cloud-list-vms.j2 delete mode 100644 roles/copr/backend/templates/resalloc/ibm-cloud-vm.j2 create mode 100755 roles/copr/backend/templates/resalloc/pools.yaml.expand.sh create mode 100755 roles/copr/backend/templates/resalloc/reserve-ibm-cloud-ips diff --git a/inventory/group_vars/copr_aws b/inventory/group_vars/copr_aws index 7021d49c6f..89fbde5016 100644 --- a/inventory/group_vars/copr_aws +++ b/inventory/group_vars/copr_aws @@ -39,16 +39,111 @@ builders: x86_64: [20, 4, 20] x86_hypervisor_04: x86_64: [20, 4, 20] - ibm_cloud_us_east: - # s390x is on-demand - s390x: [0, 0, 0] - ibm_cloud_tokyo: - # tokyo is evacuated - s390x: [0, 6, 6] - ibm_cloud_spaulo: - s390x: [9, 3, 3] - ibm_cloud_madrid: - s390x: [9, 3, 3] + + ibm_cloud_us_east_hp: + s390x: [2, 1, 0] + + ibm_cloud_br_sao_1: + s390x: [3, 1, 1] + ibm_cloud_br_sao_2: + s390x: [3, 1, 1] + ibm_cloud_br_sao_3: + s390x: [3, 1, 1] + + ibm_cloud_eu_es_1: + s390x: [3, 1, 1] + ibm_cloud_eu_es_2: + s390x: [3, 1, 1] + ibm_cloud_eu_es_3: + s390x: [3, 1, 1] + +cloud_instance_types: + ibm_cloud: + s390x: mz2-2x16 + s390x_hp: bz2-16x64 + +ibm_cloud_regions: + eu_es: + name: eu-es + name_humans: Madrid, Spain + ssh_key: r050-cbd10ece-a28f-45c8-bc3a-15ecb9ff66d5 + vpc: r050-190b8bac-3bae-47cf-9be7-ac70fc54ccb9 + security_group: r050-cfc3db07-8cde-4807-9756-db91b242092a + images: "{{ copr_builder_images.ibm_cloud.eu_es }}" + br_sao: + name: br-sao + name_humans: Sao Paulo, Brazil + ssh_key: r042-4fb65a23-e7bd-440f-8d71-ff57b83a4152 + vpc: r042-c902f12d-556f-4334-95cb-d181ebb3d8ba + security_group: r042-22ef3881-fa20-4caa-9873-c8dc41c5dcb7 + images: "{{ copr_builder_images.ibm_cloud.br_sao }}" + +resalloc_pools: + + ibm_us_east: + images: "{{ copr_builder_images.ibm_cloud.us_east }}" + subnets: + - us-east-1:0757-df8209fd-e4e4-4659-bcca-b60c9eddfb35 + - us-east-2:0767-4cb7e11d-14db-48a8-9d1d-c59679112cd5 + - us-east-3:0777-bc697f15-4f13-4843-b338-d8950e3d23c0 + ssh_key: r014-fc1c1b90-5a7f-4cc6-a6d4-b273bd19be99 + vpc: r014-e90f14b0-a9c0-4c0b-bc81-207904b2d19e + security_group: r014-941706bd-062c-4d4c-9512-9a31b8f257d7 + + ibm_eu_es_1: + zone: eu-es-1 + subnet: 02w7-84eded83-3077-44c3-a84c-90c7afb7ff4f + region_config: "{{ ibm_cloud_regions.eu_es }}" + floating_ips: + - r050-957fb229-4001-426c-b03b-9ec73fbcfc01 + - r050-b395c059-8407-4b7a-bc31-332f1de0a8c7 + - r050-2e71b2bd-66a5-4c76-a158-cce17471dadd + + ibm_eu_es_2: + zone: eu-es-2 + subnet: 02x7-05de0643-b257-4500-9a14-648421b16738 + region_config: "{{ ibm_cloud_regions.eu_es }}" + floating_ips: + - r050-f0791af4-128d-48bb-8a3d-fad718003062 + - r050-350e120d-b91f-41aa-b600-66c63182fc66 + - r050-633ebce4-4e3b-4367-8c15-2c014ff39869 + + ibm_eu_es_3: + zone: eu-es-3 + subnet: 02y7-e32873a0-ad98-44d5-b49d-da49f731a914 + region_config: "{{ ibm_cloud_regions.eu_es }}" + floating_ips: + - r050-6390c1bf-a057-4c9c-b0ae-ac2b0ed591a7 + - r050-dca6a5f9-6fe0-4d1c-9217-3fd4357bf55d + - r050-5d7d0f7e-5fc0-4a45-8efd-09cf819ee00a + + ibm_br_sao_1: + zone: br-sao-1 + subnet: 02t7-1c0b186a-ca5e-44ea-bf2c-ca2f3122a9ff + region_config: "{{ ibm_cloud_regions.br_sao }}" + floating_ips: + - r042-c409079e-f2a2-44ff-a993-b09f21148296 + - r042-4196a36b-5fb5-462f-8f98-46d1320fb1d1 + - r042-f2724485-478e-4059-bf57-cc485aade9f5 + + ibm_br_sao_2: + zone: br-sao-2 + subnet: 02u7-75fa6c55-b65a-4d38-a011-15559c7171ce + region_config: "{{ ibm_cloud_regions.br_sao }}" + floating_ips: + - r042-b66c2a71-71e0-4120-a05b-662b02ebb357 + - r042-9e75ba4e-8005-4f36-9e4f-462ab0c93f7c + - r042-54f648f6-a5f8-4535-8b16-ef76743e7937 + + ibm_br_sao_3: + zone: br-sao-3 + subnet: 02v7-439195e6-2df0-4720-9696-76d0a63c352d + region_config: "{{ ibm_cloud_regions.br_sao }}" + floating_ips: + - r042-03e04fbd-04f4-48ad-826e-48c0c41beeff + - r042-0ebc5298-857c-4832-9ece-016baf74cdd0 + - r042-082eba49-2952-43a3-be23-ac202ad847fc + copr_aws_region: us-east-1 # don't forget to update ip in ./copr-keygen, due to custom firewall rules @@ -70,13 +165,13 @@ copr_builder_images: ppc64le: copr-builder-ppc64le-20241119_171239 x86_64: copr-builder-x86_64-20241119_172236 ibm_cloud: - s390x: - # tokyo is not used right now - tokyo: r022-3bca519e-3cc7-432e-9280-8ff89738e3a5 + br_sao: # São Paulo + s390x: r042-5e105a96-4a84-4c07-950b-9fd12d58c8a2 + eu_es: # Madrid + s390x: r050-053cfdb8-52eb-486d-b8b3-bc9cba2fbe56 + us_east: # Washington # n-1: us_east: r014-1469fafc-d906-44fa-b780-e4599ed56fde - us_east: r014-9ba4feec-ce45-4401-acea-f123114685b5 - sao: r042-5e105a96-4a84-4c07-950b-9fd12d58c8a2 - madrid: r050-053cfdb8-52eb-486d-b8b3-bc9cba2fbe56 + s390x: r014-9ba4feec-ce45-4401-acea-f123114685b5 osuosl: # ppc64le: copr-builder-ppc64le-20240206_143009 ppc64le: copr-builder-ppc64le-20241119_171239 diff --git a/inventory/group_vars/copr_dev_aws b/inventory/group_vars/copr_dev_aws index 9034be4744..7873b5a2a7 100644 --- a/inventory/group_vars/copr_dev_aws +++ b/inventory/group_vars/copr_dev_aws @@ -18,8 +18,9 @@ aws_arch_subnets: - subnet-01d4e967ab5e78005 - subnet-05437ac82d63b6ef5 backend_base_url: "https://download.copr-dev.fedorainfracloud.org" + builders: - # max|prealloc|max_concurrently_starting + # max|spawn_concurrently|prealloc aws: aarch64: [2, 0, 0] x86_64: [4, 0, 0] @@ -40,16 +41,98 @@ builders: x86_64: [2, 1, 1] x86_hypervisor_04: x86_64: [2, 1, 1] - ibm_cloud_us_east: - # s390x is on-demand - s390x: [0, 0, 0] - ibm_cloud_tokyo: - # tokyo is evacuated - s390x: [0, 0, 0] - ibm_cloud_spaulo: - s390x: [0, 0, 0] - ibm_cloud_madrid: - s390x: [1, 0, 0] + + ibm_cloud_us_east_hp: + s390x: [1, 1, 0] + + ibm_cloud_br_sao_1: + s390x: [1, 1, 0] + ibm_cloud_br_sao_2: + s390x: [1, 1, 0] + ibm_cloud_br_sao_3: + s390x: [1, 1, 0] + + ibm_cloud_eu_es_1: + s390x: [1, 1, 0] + ibm_cloud_eu_es_2: + s390x: [1, 1, 0] + ibm_cloud_eu_es_3: + s390x: [1, 1, 0] + +cloud_instance_types: + ibm_cloud: + s390x: mz2-2x16 + s390x_hp: bz2-16x64 + +ibm_cloud_regions: + eu_es: + name: eu-es + name_humans: Madrid, Spain + ssh_key: r050-cbd10ece-a28f-45c8-bc3a-15ecb9ff66d5 + vpc: r050-190b8bac-3bae-47cf-9be7-ac70fc54ccb9 + security_group: r050-cfc3db07-8cde-4807-9756-db91b242092a + images: "{{ copr_builder_images.ibm_cloud.eu_es }}" + br_sao: + name: br-sao + name_humans: Sao Paulo, Brazil + ssh_key: r042-4fb65a23-e7bd-440f-8d71-ff57b83a4152 + vpc: r042-c902f12d-556f-4334-95cb-d181ebb3d8ba + security_group: r042-22ef3881-fa20-4caa-9873-c8dc41c5dcb7 + images: "{{ copr_builder_images.ibm_cloud.br_sao }}" + +resalloc_pools: + ibm_us_east: + images: "{{ copr_builder_images.ibm_cloud.us_east }}" + subnets: + - us-east-1:0757-df8209fd-e4e4-4659-bcca-b60c9eddfb35 + - us-east-2:0767-4cb7e11d-14db-48a8-9d1d-c59679112cd5 + - us-east-3:0777-bc697f15-4f13-4843-b338-d8950e3d23c0 + ssh_key: r014-fc1c1b90-5a7f-4cc6-a6d4-b273bd19be99 + vpc: r014-e90f14b0-a9c0-4c0b-bc81-207904b2d19e + security_group: r014-941706bd-062c-4d4c-9512-9a31b8f257d7 + + ibm_eu_es_1: + zone: eu-es-1 + subnet: 02w7-84eded83-3077-44c3-a84c-90c7afb7ff4f + region_config: "{{ ibm_cloud_regions.eu_es }}" + floating_ips: + - r050-c7f0c426-6842-4a68-91ca-72faf38d1fca + + ibm_eu_es_2: + zone: eu-es-2 + subnet: 02x7-05de0643-b257-4500-9a14-648421b16738 + region_config: "{{ ibm_cloud_regions.eu_es }}" + floating_ips: + - r050-97186457-f797-4d90-82bb-7f363453edb9 + + ibm_eu_es_3: + zone: eu-es-3 + subnet: 02y7-e32873a0-ad98-44d5-b49d-da49f731a914 + region_config: "{{ ibm_cloud_regions.eu_es }}" + floating_ips: + - r050-5c361f73-2663-406f-96c9-5274552e3206 + + ibm_br_sao_1: + zone: br-sao-1 + subnet: 02t7-1c0b186a-ca5e-44ea-bf2c-ca2f3122a9ff + region_config: "{{ ibm_cloud_regions.br_sao }}" + floating_ips: + - r042-ac2bf299-90e4-4804-91a1-f0a4168abdd0 + + ibm_br_sao_2: + zone: br-sao-2 + subnet: 02u7-75fa6c55-b65a-4d38-a011-15559c7171ce + region_config: "{{ ibm_cloud_regions.br_sao }}" + floating_ips: + - r042-b58c7ed2-8552-4310-bfcf-a5dbef36cdd5 + + ibm_br_sao_3: + zone: br-sao-3 + subnet: 02v7-439195e6-2df0-4720-9696-76d0a63c352d + region_config: "{{ ibm_cloud_regions.br_sao }}" + floating_ips: + - r042-e97d42a1-dede-4b4b-b663-c6fcef8a7a2b + copr_aws_region: us-east-1 # don't forget to update ip in ./copr-keygen-stg, due to custom firewall rules @@ -73,13 +156,13 @@ copr_builder_images: ppc64le: copr-builder-ppc64le-20241119_171239 x86_64: copr-builder-x86_64-20241119_172236 ibm_cloud: - s390x: - # tokyo is not used right now - tokyo: r022-3bca519e-3cc7-432e-9280-8ff89738e3a5 + br_sao: # São Paulo + s390x: r042-5e105a96-4a84-4c07-950b-9fd12d58c8a2 + eu_es: # Madrid + s390x: r050-053cfdb8-52eb-486d-b8b3-bc9cba2fbe56 + us_east: # Washington # n-1: us_east: r014-1469fafc-d906-44fa-b780-e4599ed56fde - us_east: r014-9ba4feec-ce45-4401-acea-f123114685b5 - sao: r042-5e105a96-4a84-4c07-950b-9fd12d58c8a2 - madrid: r050-053cfdb8-52eb-486d-b8b3-bc9cba2fbe56 + s390x: r014-9ba4feec-ce45-4401-acea-f123114685b5 osuosl: # ppc64le: copr-builder-ppc64le-20240206_143009 ppc64le: copr-builder-ppc64le-20241119_171239 diff --git a/roles/copr/backend/tasks/main.yml b/roles/copr/backend/tasks/main.yml index 20f9fca97e..e636cc7db2 100644 --- a/roles/copr/backend/tasks/main.yml +++ b/roles/copr/backend/tasks/main.yml @@ -195,6 +195,8 @@ with_items: - x86_64 - aarch64 + - s390x + - ppc64le tags: - provision_config diff --git a/roles/copr/backend/tasks/resalloc.yml b/roles/copr/backend/tasks/resalloc.yml index 41d71c387f..ec77676038 100644 --- a/roles/copr/backend/tasks/resalloc.yml +++ b/roles/copr/backend/tasks/resalloc.yml @@ -89,8 +89,6 @@ with_items: - vm-delete - vm-release - - ibm-cloud-vm - - ibm-cloud-list-vms - ibm-cloud-list-deleting-vms - osuosl-vm - osuosl-list diff --git a/roles/copr/backend/templates/resalloc/ibm-cloud-list-vms.j2 b/roles/copr/backend/templates/resalloc/ibm-cloud-list-vms.j2 deleted file mode 100644 index 732c8b5ee6..0000000000 --- a/roles/copr/backend/templates/resalloc/ibm-cloud-list-vms.j2 +++ /dev/null @@ -1,25 +0,0 @@ -#! /bin/bash - -case $RESALLOC_POOL_ID in - *s390x*tokyo*) - zone=jp-tok - ;; - *s390x*us_east*) - zone=us-east - ;; - *s390x*sao*) - zone=br-sao - ;; - *s390x*madrid*) - zone=eu-es - ;; - *) - echo "Can't decide what location to assign from pool id: '$RESALLOC_POOL_ID'" - exit 1 - ;; -esac - -exec resalloc-ibm-cloud-list-vms \ - --token-file "{{ ibmcloud_token_file }}" \ - --zone "$zone" \ - "$@" diff --git a/roles/copr/backend/templates/resalloc/ibm-cloud-vm.j2 b/roles/copr/backend/templates/resalloc/ibm-cloud-vm.j2 deleted file mode 100644 index c3224dc1ac..0000000000 --- a/roles/copr/backend/templates/resalloc/ibm-cloud-vm.j2 +++ /dev/null @@ -1,79 +0,0 @@ -#! /bin/bash - -case $RESALLOC_NAME in - *s390x*tokyo*) - vpc_id=r022-8438169e-d881-4bda-b603-d31fdf0f8b3a - security_group_id=r022-bf49b90e-c00f-4c68-8707-2936b47b286b - ssh_key_id=r022-3918e368-8e00-4e23-9119-5e3ce1eb33bd - instance_type=cz2-2x4 - image_uuid="{{ copr_builder_images.ibm_cloud.s390x.tokyo }}" - subnets_ids="jp-tok-1:02e7-84755ffa-97bb-4067-b618-24c788c901cb jp-tok-2:02f7-98674f68-aae1-4ea1-a889-5a0b7a07f4b8 jp-tok-3:02g7-6d92562d-b868-411f-a962-99271d609ba6" - zone=jp-tok - ;; - *s390x*sao*) - vpc_id=r042-c902f12d-556f-4334-95cb-d181ebb3d8ba - security_group_id=r042-22ef3881-fa20-4caa-9873-c8dc41c5dcb7 - ssh_key_id=r042-4fb65a23-e7bd-440f-8d71-ff57b83a4152 - instance_type=mz2-2x16 - additional_volume_size=160 - image_uuid="{{ copr_builder_images.ibm_cloud.s390x.sao }}" - subnets_ids="br-sao-1:02t7-1c0b186a-ca5e-44ea-bf2c-ca2f3122a9ff br-sao-2:02u7-75fa6c55-b65a-4d38-a011-15559c7171ce br-sao-3:02v7-439195e6-2df0-4720-9696-76d0a63c352d" - zone=br-sao - ;; - *s390x*madrid*) - vpc_id=r050-190b8bac-3bae-47cf-9be7-ac70fc54ccb9 - security_group_id=r050-cfc3db07-8cde-4807-9756-db91b242092a - ssh_key_id=r050-cbd10ece-a28f-45c8-bc3a-15ecb9ff66d5 - instance_type=mz2-2x16 - additional_volume_size=160 - image_uuid="{{ copr_builder_images.ibm_cloud.s390x.madrid }}" - subnets_ids="eu-es-1:02w7-84eded83-3077-44c3-a84c-90c7afb7ff4f eu-es-2:02x7-05de0643-b257-4500-9a14-648421b16738 eu-es-3:02y7-e32873a0-ad98-44d5-b49d-da49f731a914" - zone=eu-es - ;; - *s390x*us_east*) - vpc_id=r014-e90f14b0-a9c0-4c0b-bc81-207904b2d19e - security_group_id=r014-941706bd-062c-4d4c-9512-9a31b8f257d7 - ssh_key_id=r014-fc1c1b90-5a7f-4cc6-a6d4-b273bd19be99 - case $RESALLOC_NAME in - *_hp_*) - instance_type=bz2-16x64 - additional_volume_size=320 - ;; - *) - instance_type=mz2-2x16 - additional_volume_size=160 - ;; - esac - image_uuid="{{ copr_builder_images.ibm_cloud.s390x.us_east }}" - subnets_ids="us-east-1:0757-df8209fd-e4e4-4659-bcca-b60c9eddfb35 us-east-2:0767-4cb7e11d-14db-48a8-9d1d-c59679112cd5 us-east-3:0777-bc697f15-4f13-4843-b338-d8950e3d23c0" - zone=us-east - ;; - *) - echo "Can't decide what location to assign from: $RESALLOC_NAME" - exit 1 - ;; -esac - -params=() - -if [ "$1" == "create" ]; then - params+=( - --playbook "{{ provision_directory }}/libvirt-provision.yml" - --image-uuid "$image_uuid" - --vpc-id "$vpc_id" - --security-group-id "$security_group_id" - --ssh-key-id "$ssh_key_id" - --instance-type "$instance_type" - --subnets-ids $subnets_ids - --additional-volume-size "$additional_volume_size" - -- - ) -fi - -exec resalloc-ibm-cloud-vm \ - --token-file "{{ ibmcloud_token_file }}" \ - --zone "$zone" \ - --log-level debug \ - "$1" \ - "${params[@]}" \ - "${@:2}" diff --git a/roles/copr/backend/templates/resalloc/pools.yaml b/roles/copr/backend/templates/resalloc/pools.yaml index cf2751e568..7ef0e08600 100644 --- a/roles/copr/backend/templates/resalloc/pools.yaml +++ b/roles/copr/backend/templates/resalloc/pools.yaml @@ -1,13 +1,27 @@ --- - -# You do NOT need to restart resalloc after change -# daemon will fetch it automatically after an update. # -# Note: higher priority pools are used first -# default priority is 0 -# reserved instances in cloud has > 0 -# on-premise instance < 0 -# high performance instances <= 40 +# Notes for Resalloc configuration file: +# +# - You do NOT need to restart resalloc server after changing this file, it +# automatically reloads this configuration file. +# +{% if False %} +# - This is rather a complex Jinja template. Feel free to use the helper +# script to expand locally before you commit any change to this file: +# ./roles/copr/backend/templates/resalloc/pools.yaml.expand.sh +{% endif %} +# - higher priority pools are used first +# default priority is 0 +# reserved instances in cloud has > 0 +# on-premise instance < 0 +# high performance instances <= 40 +# +# - if you need to drop a pool, it requires you to do a few steps: +# a) first evacutate the pool by setting `max: 0`, +# b) wait till all machines are deallocated (the cmd_delete is still defined +# for this pool!) +# c) you may remove the pool configuration in this file, and finally +# d) remove the pool in PostgreSQL database. {% macro aws(arch, max, max_starting, max_prealloc, spot=False, on_demand=none, priority=0, reserved=False) %} aws_{{ arch }}_{{ on_demand + '_' if on_demand is not none else '' }}{% if spot %}spot{% else %}normal{% endif %}{% if reserved %}reserved{% endif %}_{% if devel %}dev{% else %}prod{% endif %}: @@ -190,52 +204,18 @@ copr_p09_{{ hv }}_{% if devel %}dev{% else %}prod{% endif %}: {% endif %} {% endfor %} -{% if False %} -# Z Cloud instances Tokyo -copr_ibm_cloud_s390x_tokyo_{% if devel %}dev{% else %}prod{% endif %}: - max: {{ builders.ibm_cloud_tokyo.s390x[0] }} -{% if not devel %} - max_prealloc: {{ builders.ibm_cloud_tokyo.s390x[1] }} -{% endif %} - max_starting: {{ builders.ibm_cloud_tokyo.s390x[2] }} - tags: - - copr_builder - - name: arch_noarch - priority: -30 - - ibm_cloud - - tokyo -{% if devel %} - tags_on_demand: -{% endif %} - - arch_s390x - - arch_s390x_native - cmd_new: '/var/lib/resallocserver/resalloc_provision/ibm-cloud-vm create "$RESALLOC_NAME"' - cmd_delete: "/var/lib/resallocserver/resalloc_provision/vm-delete" - cmd_livecheck: "resalloc-check-vm-ip" - cmd_release: "/var/lib/resallocserver/resalloc_provision/vm-release" - cmd_list: "/var/lib/resallocserver/resalloc_provision/ibm-cloud-list-vms" - livecheck_period: 180 - reuse_opportunity_time: 90 - reuse_max_count: 8 - reuse_max_time: 1800 - description: > - A pool of s390x instances in the IBM cloud. They are located in Tokyo. - Thank you IBM for sponsoring these builders. -{% endif %} - - # Z Cloud instances in Washington (hp == high performance) {% for t in ['', 'hp'] %} copr_ic_s390x_{{ t + '_' if t or '' }}us_east_{% if devel %}dev{% else %}prod{% endif %}: {% if not t %} - max: {{ builders.ibm_cloud_us_east.s390x[0] }} + max: 0 {% if not devel %} - max_prealloc: {{ builders.ibm_cloud_us_east.s390x[1] }} + max_prealloc: 0 {% endif %} - max_starting: {{ builders.ibm_cloud_us_east.s390x[2] }} + max_starting: 0 {% else %} - max: 2 - max_starting: 2 + max: {{ builders.ibm_cloud_us_east_hp.s390x[0] }} + max_starting: {{ builders.ibm_cloud_us_east_hp.s390x[1] }} {% endif %} tags: - copr_builder @@ -250,34 +230,51 @@ copr_ic_s390x_{{ t + '_' if t or '' }}us_east_{% if devel %}dev{% else %}prod{% - name: arch_s390x {% if t == 'hp' %} {% if devel %} - priority: -30 + priority: -300 {% else %} tags_on_demand: {% endif %} - on_demand_powerful {% endif %} - cmd_new: '/var/lib/resallocserver/resalloc_provision/ibm-cloud-vm create "$RESALLOC_NAME"' - cmd_delete: "/var/lib/resallocserver/resalloc_provision/vm-delete" +{% set pool_config = resalloc_pools.ibm_us_east %} + cmd_new: | + resalloc-ibm-cloud-vm \ + --token-file "{{ ibmcloud_token_file }}" \ + --region us-east \ + --log-level debug \ + create "$RESALLOC_NAME" \ + --playbook "{{ provision_directory }}/libvirt-provision.yml" \ + --image-uuid "{{ pool_config.images.s390x }}" \ + --vpc-id "{{ pool_config.vpc }}" \ + --security-group-id "{{ pool_config.security_group }}" \ + --ssh-key-id "{{ pool_config.ssh_key }}" \ + --instance-type "{{ cloud_instance_types.ibm_cloud.s390x_hp }}" \ + --subnets-ids "{{ '" "'.join(pool_config.subnets) }}" \ + --additional-volume-size "320" + cmd_delete: | + /var/lib/resallocserver/resalloc_provision/vm-delete + resalloc-ibm-cloud-vm --token-file "{{ ibmcloud_token_file }}" \ + --region us-east --log-level debug delete "$RESALLOC_NAME" cmd_livecheck: "resalloc-check-vm-ip" cmd_release: "/var/lib/resallocserver/resalloc_provision/vm-release" - cmd_list: "/var/lib/resallocserver/resalloc_provision/ibm-cloud-list-vms" + cmd_list: resalloc-ibm-cloud-list-vms --token-file "{{ ibmcloud_token_file }}" --region us-east + livecheck_period: 180 reuse_opportunity_time: 45 reuse_max_count: 8 reuse_max_time: 1800 description: > - A pool of s390x instances in the IBM cloud. - They are located in Washington, D.C. - Thank you IBM for sponsoring these builders. + A pool of high-performance s390x instances in the IBM Cloud, Washington, D.C. (us-east). + Thanks to IBM for sponsoring these builders. {% endfor %} copr_ic_s390x_br_sao_{% if devel %}dev{% else %}prod{% endif %}: - max: {{ builders.ibm_cloud_spaulo.s390x[0] }} + max: 0 {% if not devel %} - max_prealloc: {{ builders.ibm_cloud_spaulo.s390x[1] }} + max_prealloc: 0 {% endif %} - max_starting: {{ builders.ibm_cloud_spaulo.s390x[2] }} + max_starting: 0 tags: - copr_builder - name: arch_noarch @@ -289,22 +286,76 @@ copr_ic_s390x_br_sao_{% if devel %}dev{% else %}prod{% endif %}: {% endif %} - name: arch_s390x_native - name: arch_s390x - cmd_new: '/var/lib/resallocserver/resalloc_provision/ibm-cloud-vm create "$RESALLOC_NAME"' - cmd_delete: "/var/lib/resallocserver/resalloc_provision/vm-delete" + cmd_new: /bin/true no new machines + cmd_delete: | + /var/lib/resallocserver/resalloc_provision/vm-delete + resalloc-ibm-cloud-vm --token-file "{{ ibmcloud_token_file }}" \ + --region us-east --log-level debug delete "$RESALLOC_NAME" cmd_livecheck: "resalloc-check-vm-ip" cmd_release: "/var/lib/resallocserver/resalloc_provision/vm-release" - cmd_list: "/var/lib/resallocserver/resalloc_provision/ibm-cloud-list-vms" + cmd_list: resalloc-ibm-cloud-list-vms --token-file "{{ ibmcloud_token_file }}" --region br-sao livecheck_period: 180 reuse_opportunity_time: 45 reuse_max_count: 8 reuse_max_time: 1800 -copr_ic_s390x_madrid_{% if devel %}dev{% else %}prod{% endif %}: - max: {{ builders.ibm_cloud_madrid.s390x[0] }} +{% for zone in ['br_sao_1', 'br_sao_2', 'br_sao_3', 'eu_es_1', 'eu_es_2', 'eu_es_3'] %} +copr_ic_s390x_{{ zone }}_{% if devel %}dev{% else %}prod{% endif %}: + max: {{ builders['ibm_cloud_' + zone].s390x[0] }} + max_starting: {{ builders['ibm_cloud_' + zone].s390x[1] }} {% if not devel %} - max_prealloc: {{ builders.ibm_cloud_madrid.s390x[1] }} + max_prealloc: {{ builders['ibm_cloud_' + zone].s390x[2] }} {% endif %} - max_starting: {{ builders.ibm_cloud_madrid.s390x[2] }} + tags: + - copr_builder + - name: arch_noarch + priority: -30 + - ibm_cloud + - {{ zone }} +{% if devel %} + tags_on_demand: +{% endif %} + - name: arch_s390x_native + - name: arch_s390x + cmd_new: | +{% set pool_config = resalloc_pools["ibm_" + zone] %} + resalloc-ibm-cloud-vm \ + --token-file "{{ ibmcloud_token_file }}" \ + --region {{ pool_config.region_config.name }} \ + --log-level debug \ + create "$RESALLOC_NAME" \ + --playbook "{{ provision_directory }}/libvirt-provision.yml" \ + --image-uuid "{{ pool_config.region_config.images.s390x }}" \ + --vpc-id "{{ pool_config.region_config.vpc }}" \ + --security-group-id "{{ pool_config.region_config.security_group }}" \ + --ssh-key-id "{{ pool_config.region_config.ssh_key }}" \ + --instance-type "{{ cloud_instance_types.ibm_cloud.s390x }}" \ + --subnets-ids "{{ pool_config.zone }}:{{ pool_config.subnet }}" \ +{% for ip in pool_config.floating_ips %} + --floating-ip-uuid-in-subnet {{ pool_config.subnet }} {{ ip }} \ +{% endfor %} + --additional-volume-size "160" + cmd_delete: | + /var/lib/resallocserver/resalloc_provision/vm-delete + resalloc-ibm-cloud-vm --token-file "{{ ibmcloud_token_file }}" \ + --region us-east --log-level debug delete "$RESALLOC_NAME" + cmd_livecheck: "resalloc-check-vm-ip" + cmd_release: "/var/lib/resallocserver/resalloc_provision/vm-release" + cmd_list: resalloc-ibm-cloud-list-vms --token-file "{{ ibmcloud_token_file }}" --region {{ pool_config.region_config.name }} + livecheck_period: 180 + reuse_opportunity_time: 45 + reuse_max_count: 8 + reuse_max_time: 1800 + description: > + A pool of s390x instances in the IBM Cloud, {{ pool_config.region_config.name_humans }} ({{ pool_config.zone }}). + Thank to IBM for sponsoring these builders. + +{% endfor %} + +copr_ic_s390x_madrid_{% if devel %}dev{% else %}prod{% endif %}: + max: 0 + max_prealloc: 0 + max_starting: 0 tags: - copr_builder - name: arch_noarch @@ -316,11 +367,14 @@ copr_ic_s390x_madrid_{% if devel %}dev{% else %}prod{% endif %}: {% endif %} - name: arch_s390x_native - name: arch_s390x - cmd_new: '/var/lib/resallocserver/resalloc_provision/ibm-cloud-vm create "$RESALLOC_NAME"' - cmd_delete: "/var/lib/resallocserver/resalloc_provision/vm-delete" + cmd_new: /bin/true no new machines + cmd_delete: | + /var/lib/resallocserver/resalloc_provision/vm-delete + resalloc-ibm-cloud-vm --token-file "{{ ibmcloud_token_file }}" \ + --region us-east --log-level debug delete "$RESALLOC_NAME" cmd_livecheck: "resalloc-check-vm-ip" cmd_release: "/var/lib/resallocserver/resalloc_provision/vm-release" - cmd_list: "/var/lib/resallocserver/resalloc_provision/ibm-cloud-list-vms" + cmd_list: resalloc-ibm-cloud-list-vms --token-file "{{ ibmcloud_token_file }}" --region eu-es livecheck_period: 180 reuse_opportunity_time: 45 reuse_max_count: 8 diff --git a/roles/copr/backend/templates/resalloc/pools.yaml.expand.sh b/roles/copr/backend/templates/resalloc/pools.yaml.expand.sh new file mode 100755 index 0000000000..389716412f --- /dev/null +++ b/roles/copr/backend/templates/resalloc/pools.yaml.expand.sh @@ -0,0 +1,51 @@ +#! /bin/bash + +# Helper script to expand pools.yaml jinja. + +pfx=pools-yaml-expander +pbook=/tmp/playbook-$pfx.yaml +outdir=/tmp/pools_debugging + +sourcedir=$(dirname "$0") +gitroot=$(cd "$sourcedir" && git rev-parse --show-toplevel) +pools=$gitroot/roles/copr/backend/templates/resalloc/pools.yaml + +if ! test -d "$outdir"; then + mkdir -p "$outdir" + (cd "$outdir" && git init .) +fi + + +for i in devel production; do + if test $i = production; then + vars=$gitroot/inventory/group_vars/copr_aws + file=pools.prod.yaml + else + vars=$gitroot/inventory/group_vars/copr_dev_aws + file=pools.dev.yaml + fi + + cat > $pbook <