Fixes Docker image upload for master/rocky
[apex.git] / apex / builders / common_builder.py
1 ##############################################################################
2 # Copyright (c) 2017 Tim Rozet (trozet@redhat.com) and others.
3 #
4 # All rights reserved. This program and the accompanying materials
5 # are made available under the terms of the Apache License, Version 2.0
6 # which accompanies this distribution, and is available at
7 # http://www.apache.org/licenses/LICENSE-2.0
8 ##############################################################################
9
10 # Common building utilities for undercloud and overcloud
11
12 import datetime
13 import git
14 import json
15 import logging
16 import os
17 import platform
18 import pprint
19 import re
20 import urllib.parse
21 import yaml
22
23 import apex.builders.overcloud_builder as oc_builder
24 from apex import build_utils
25 from apex.builders import exceptions as exc
26 from apex.common import constants as con
27 from apex.common import utils
28 from apex.virtual import utils as virt_utils
29
30
31 def project_to_path(project, patch=None):
32     """
33     Translates project to absolute file path to use in patching
34     :param project: name of project
35     :param patch: the patch to applied to the project
36     :return: File path
37     """
38     if project.startswith('openstack/'):
39         project = os.path.basename(project)
40     if 'puppet' in project:
41         return "/etc/puppet/modules/{}".format(project.replace('puppet-', ''))
42     elif 'tripleo-heat-templates' in project:
43         return "/usr/share/openstack-tripleo-heat-templates"
44     elif ('tripleo-common' in project and
45           build_utils.is_path_in_patch(patch, 'container-images/')):
46         # tripleo-common has python and another component to it
47         # here we detect if there is a change to the yaml component and if so
48         # treat it like it is not python. This has the caveat of if there
49         # is a patch to both python and yaml this will not work
50         # FIXME(trozet): add ability to split tripleo-common patches that
51         # modify both python and yaml
52         return "/usr/share/openstack-tripleo-common-containers/"
53     else:
54         # assume python.  python patches will apply to a project name subdir.
55         # For example, python-tripleoclient patch will apply to the
56         # tripleoclient directory, which is the directory extracted during
57         # python install into the PYTHONPATH.  Therefore we need to just be
58         # in the PYTHONPATH directory to apply a patch
59         return "/usr/lib/python2.7/site-packages/"
60
61
62 def project_to_docker_image(project):
63     """
64     Translates OpenStack project to OOO services that are containerized
65     :param project: name of OpenStack project
66     :return: List of OOO docker service names
67     """
68     # Fetch all docker containers in docker hub with tripleo and filter
69     # based on project
70
71     hub_output = utils.open_webpage(
72         urllib.parse.urljoin(con.DOCKERHUB_OOO, '?page_size=1024'), timeout=10)
73     try:
74         results = json.loads(hub_output.decode())['results']
75     except Exception as e:
76         logging.error("Unable to parse docker hub output for"
77                       "tripleoupstream repository")
78         logging.debug("HTTP response from dockerhub:\n{}".format(hub_output))
79         raise exc.ApexCommonBuilderException(
80             "Failed to parse docker image info from Docker Hub: {}".format(e))
81     logging.debug("Docker Hub tripleoupstream entities found: {}".format(
82         results))
83     docker_images = list()
84     for result in results:
85         if result['name'].startswith("centos-binary-{}".format(project)):
86             # add as docker image shortname (just service name)
87             docker_images.append(result['name'].replace('centos-binary-', ''))
88
89     return docker_images
90
91
92 def is_patch_promoted(change, branch, docker_image=None):
93     """
94     Checks to see if a patch that is in merged exists in either the docker
95     container or the promoted tripleo images
96     :param change: gerrit change json output
97     :param branch: branch to use when polling artifacts (does not include
98     stable prefix)
99     :param docker_image: container this applies to if (defaults to None)
100     :return: True if the patch exists in a promoted artifact upstream
101     """
102     assert isinstance(change, dict)
103     assert 'status' in change
104
105     # if not merged we already know this is not closed/abandoned, so we know
106     # this is not promoted
107     if change['status'] != 'MERGED':
108         return False
109     assert 'submitted' in change
110     # drop microseconds cause who cares
111     stime = re.sub('\..*$', '', change['submitted'])
112     submitted_date = datetime.datetime.strptime(stime, "%Y-%m-%d %H:%M:%S")
113     # Patch applies to overcloud/undercloud
114     if docker_image is None:
115         oc_url = urllib.parse.urljoin(
116             con.UPSTREAM_RDO.replace('master', branch), 'overcloud-full.tar')
117         oc_mtime = utils.get_url_modified_date(oc_url)
118         if oc_mtime > submitted_date:
119             logging.debug("oc image was last modified at {}, which is"
120                           "newer than merge date: {}".format(oc_mtime,
121                                                              submitted_date))
122             return True
123     else:
124         # must be a docker patch, check docker tag modified time
125         docker_url = con.DOCKERHUB_OOO.replace('tripleomaster',
126                                                "tripleo{}".format(branch))
127         url_path = "{}/tags/{}".format(docker_image, con.DOCKER_TAG)
128         docker_url = urllib.parse.urljoin(docker_url, url_path)
129         logging.debug("docker url is: {}".format(docker_url))
130         docker_output = utils.open_webpage(docker_url, 10)
131         logging.debug('Docker web output: {}'.format(docker_output))
132         hub_mtime = json.loads(docker_output.decode())['last_updated']
133         hub_mtime = re.sub('\..*$', '', hub_mtime)
134         # docker modified time is in this format '2018-06-11T15:23:55.135744Z'
135         # and we drop microseconds
136         hub_dtime = datetime.datetime.strptime(hub_mtime, "%Y-%m-%dT%H:%M:%S")
137         if hub_dtime > submitted_date:
138             logging.debug("docker image: {} was last modified at {}, which is"
139                           "newer than merge date: {}".format(docker_image,
140                                                              hub_dtime,
141                                                              submitted_date))
142             return True
143     return False
144
145
146 def add_upstream_patches(patches, image, tmp_dir,
147                          default_branch=os.path.join('stable',
148                                                      con.DEFAULT_OS_VERSION),
149                          uc_ip=None, docker_tag=None):
150     """
151     Adds patches from upstream OpenStack gerrit to Undercloud for deployment
152     :param patches: list of patches
153     :param image: undercloud image
154     :param tmp_dir: to store temporary patch files
155     :param default_branch: default branch to fetch commit (if not specified
156     in patch)
157     :param uc_ip: undercloud IP (required only for docker patches)
158     :param docker_tag: Docker Tag (required only for docker patches)
159     :return: Set of docker services patched (if applicable)
160     """
161     virt_ops = [{con.VIRT_INSTALL: 'patch'}]
162     logging.debug("Evaluating upstream patches:\n{}".format(patches))
163     docker_services = set()
164     for patch in patches:
165         assert isinstance(patch, dict)
166         assert all(i in patch.keys() for i in ['project', 'change-id'])
167         if 'branch' in patch.keys():
168             branch = patch['branch']
169         else:
170             branch = default_branch
171         patch_diff = build_utils.get_patch(patch['change-id'],
172                                            patch['project'], branch)
173         project_path = project_to_path(patch['project'], patch_diff)
174         # If docker tag and python we know this patch belongs on docker
175         # container for a docker service. Therefore we build the dockerfile
176         # and move the patch into the containers directory.  We also assume
177         # this builder call is for overcloud, because we do not support
178         # undercloud containers
179         if docker_tag and 'python' in project_path:
180             # Projects map to multiple THT services, need to check which
181             # are supported
182             ooo_docker_services = project_to_docker_image(patch['project'])
183             docker_img = ooo_docker_services[0]
184         else:
185             ooo_docker_services = []
186             docker_img = None
187         change = build_utils.get_change(con.OPENSTACK_GERRIT,
188                                         patch['project'], branch,
189                                         patch['change-id'])
190         patch_promoted = is_patch_promoted(change,
191                                            branch.replace('stable/', ''),
192                                            docker_img)
193
194         if patch_diff and not patch_promoted:
195             patch_file = "{}.patch".format(patch['change-id'])
196             # If we found services, then we treat the patch like it applies to
197             # docker only
198             if ooo_docker_services:
199                 os_version = default_branch.replace('stable/', '')
200                 for service in ooo_docker_services:
201                     docker_services = docker_services.union({service})
202                     docker_cmds = [
203                         "WORKDIR {}".format(project_path),
204                         "ADD {} {}".format(patch_file, project_path),
205                         "RUN patch -p1 < {}".format(patch_file)
206                     ]
207                     src_img_uri = "{}:8787/tripleo{}/centos-binary-{}:" \
208                                   "{}".format(uc_ip, os_version, service,
209                                               docker_tag)
210                     oc_builder.build_dockerfile(service, tmp_dir, docker_cmds,
211                                                 src_img_uri)
212                 patch_file_path = os.path.join(tmp_dir, 'containers',
213                                                patch_file)
214             else:
215                 patch_file_path = os.path.join(tmp_dir, patch_file)
216                 virt_ops.extend([
217                     {con.VIRT_UPLOAD: "{}:{}".format(patch_file_path,
218                                                      project_path)},
219                     {con.VIRT_RUN_CMD: "cd {} && patch -p1 < {}".format(
220                         project_path, patch_file)}])
221                 logging.info("Adding patch {} to {}".format(patch_file,
222                                                             image))
223             with open(patch_file_path, 'w') as fh:
224                 fh.write(patch_diff)
225         else:
226             logging.info("Ignoring patch:\n{}".format(patch))
227     if len(virt_ops) > 1:
228         virt_utils.virt_customize(virt_ops, image)
229     return docker_services
230
231
232 def add_repo(repo_url, repo_name, image, tmp_dir):
233     assert repo_name is not None
234     assert repo_url is not None
235     repo_file = "{}.repo".format(repo_name)
236     repo_file_path = os.path.join(tmp_dir, repo_file)
237     content = [
238         "[{}]".format(repo_name),
239         "name={}".format(repo_name),
240         "baseurl={}".format(repo_url),
241         "gpgcheck=0"
242     ]
243     logging.debug("Creating repo file {}".format(repo_name))
244     with open(repo_file_path, 'w') as fh:
245         fh.writelines("{}\n".format(line) for line in content)
246     logging.debug("Adding repo {} to {}".format(repo_file, image))
247     virt_utils.virt_customize([
248         {con.VIRT_UPLOAD: "{}:/etc/yum.repos.d/".format(repo_file_path)}],
249         image
250     )
251
252
253 def create_git_archive(repo_url, repo_name, tmp_dir,
254                        branch='master', prefix=''):
255     repo = git.Repo.clone_from(repo_url, os.path.join(tmp_dir, repo_name))
256     repo_git = repo.git
257     if branch != str(repo.active_branch):
258         repo_git.checkout("origin/{}".format(branch))
259     archive_path = os.path.join(tmp_dir, "{}.tar".format(repo_name))
260     with open(archive_path, 'wb') as fh:
261         repo.archive(fh, prefix=prefix)
262     logging.debug("Wrote archive file: {}".format(archive_path))
263     return archive_path
264
265
266 def get_neutron_driver(ds_opts):
267     sdn = ds_opts.get('sdn_controller', None)
268     for controllers in 'opendaylight', 'ovn':
269         if sdn == controllers:
270             return sdn
271
272     if ds_opts.get('vpp', False):
273         return 'vpp'
274
275     return None
276
277
278 def prepare_container_images(prep_file, branch='master', neutron_driver=None):
279     if not os.path.isfile(prep_file):
280         raise exc.ApexCommonBuilderException("Prep file does not exist: "
281                                              "{}".format(prep_file))
282     with open(prep_file) as fh:
283         data = yaml.safe_load(fh)
284     try:
285         p_set = data['parameter_defaults']['ContainerImagePrepare'][0]['set']
286         if neutron_driver:
287             p_set['neutron_driver'] = neutron_driver
288         p_set['namespace'] = "docker.io/tripleo{}".format(branch)
289         if platform.machine() == 'aarch64':
290             p_set['ceph_tag'] = 'master-fafda7d-luminous-centos-7-aarch64'
291
292     except KeyError:
293         logging.error("Invalid prep file format: {}".format(prep_file))
294         raise exc.ApexCommonBuilderException("Invalid format for prep file")
295
296     logging.debug("Writing new container prep file:\n{}".format(
297         pprint.pformat(data)))
298     with open(prep_file, 'w') as fh:
299         yaml.safe_dump(data, fh, default_flow_style=False)