2 # Licensed under the Apache License, Version 2.0 (the "License"); you may
3 # not use this file except in compliance with the License. You may obtain
4 # a copy of the License at
6 # http://www.apache.org/licenses/LICENSE-2.0
8 # Unless required by applicable law or agreed to in writing, software
9 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
10 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
11 # License for the specific language governing permissions and limitations
20 required_params = ['EndpointMap', 'ServiceNetMap', 'DefaultPasswords',
21 'RoleName', 'RoleParameters', 'ServiceData']
23 # NOTE(bnemec): The duplication in this list is intentional. For the
24 # transition to generated environments we have two copies of these files,
25 # so they need to be listed twice. Once the deprecated version can be removed
26 # the duplicate entries can be as well.
27 envs_containing_endpoint_map = ['tls-endpoints-public-dns.yaml',
28 'tls-endpoints-public-ip.yaml',
29 'tls-everywhere-endpoints-dns.yaml',
30 'tls-endpoints-public-dns.yaml',
31 'tls-endpoints-public-ip.yaml',
32 'tls-everywhere-endpoints-dns.yaml']
33 ENDPOINT_MAP_FILE = 'endpoint_map.yaml'
34 REQUIRED_DOCKER_SECTIONS = ['service_name', 'docker_config', 'puppet_config',
35 'config_settings', 'step_config']
36 OPTIONAL_DOCKER_SECTIONS = ['docker_puppet_tasks', 'upgrade_tasks',
37 'service_config_settings', 'host_prep_tasks',
38 'metadata_settings', 'kolla_config']
39 REQUIRED_DOCKER_PUPPET_CONFIG_SECTIONS = ['config_volume', 'step_config',
41 OPTIONAL_DOCKER_PUPPET_CONFIG_SECTIONS = [ 'puppet_tags', 'volumes' ]
42 # Mapping of parameter names to a list of the fields we should _not_ enforce
43 # consistency across files on. This should only contain parameters whose
44 # definition we cannot change for backwards compatibility reasons. New
45 # parameters to the templates should not be added to this list.
46 PARAMETER_DEFINITION_EXCLUSIONS = {'ManagementNetCidr': ['default'],
47 'ManagementAllocationPools': ['default'],
48 'ExternalNetCidr': ['default'],
49 'ExternalAllocationPools': ['default'],
50 'StorageNetCidr': ['default'],
51 'StorageAllocationPools': ['default'],
52 'StorageMgmtNetCidr': ['default'],
53 'StorageMgmtAllocationPools': ['default'],
56 PREFERRED_CAMEL_CASE = {
63 print('Usage %s <yaml file or directory>' % sys.argv[0])
67 def to_camel_case(string):
68 return PREFERRED_CAMEL_CASE.get(string, ''.join(s.capitalize() or '_' for
69 s in string.split('_')))
72 def get_base_endpoint_map(filename):
74 tpl = yaml.load(open(filename).read())
75 return tpl['parameters']['EndpointMap']['default']
77 print(traceback.format_exc())
81 def get_endpoint_map_from_env(filename):
83 tpl = yaml.load(open(filename).read())
86 'map': tpl['parameter_defaults']['EndpointMap']
89 print(traceback.format_exc())
93 def validate_endpoint_map(base_map, env_map):
94 return sorted(base_map.keys()) == sorted(env_map.keys())
97 def validate_hci_compute_services_default(env_filename, env_tpl):
98 env_services_list = env_tpl['parameter_defaults']['ComputeServices']
99 env_services_list.remove('OS::TripleO::Services::CephOSD')
100 roles_filename = os.path.join(os.path.dirname(env_filename),
101 '../roles_data.yaml')
102 roles_tpl = yaml.load(open(roles_filename).read())
103 for role in roles_tpl:
104 if role['name'] == 'Compute':
105 roles_services_list = role['ServicesDefault']
106 if sorted(env_services_list) != sorted(roles_services_list):
107 print('ERROR: ComputeServices in %s is different '
108 'from ServicesDefault in roles_data.yaml' % env_filename)
113 def validate_mysql_connection(settings):
114 no_op = lambda *args: False
117 def mysql_protocol(items):
118 return items == ['EndpointMap', 'MysqlInternal', 'protocol']
120 def client_bind_address(item):
121 return 'read_default_file' in item and \
122 'read_default_group' in item
124 def validate_mysql_uri(key, items):
125 # Only consider a connection if it targets mysql
126 if key.endswith('connection') and \
127 search(items, mysql_protocol, no_op):
128 # Assume the "bind_address" option is one of
129 # the token that made up the uri
130 if not search(items, client_bind_address, no_op):
134 def search(item, check_item, check_key):
137 elif isinstance(item, list):
139 if search(i, check_item, check_key):
141 elif isinstance(item, dict):
142 for k in item.keys():
143 if check_key(k, item[k]):
145 elif search(item[k], check_item, check_key):
149 search(settings, no_op, validate_mysql_uri)
150 return error_status[0]
153 def validate_docker_service(filename, tpl):
154 if 'outputs' in tpl and 'role_data' in tpl['outputs']:
155 if 'value' not in tpl['outputs']['role_data']:
156 print('ERROR: invalid role_data for filename: %s'
159 role_data = tpl['outputs']['role_data']['value']
161 for section_name in REQUIRED_DOCKER_SECTIONS:
162 if section_name not in role_data:
163 print('ERROR: %s is required in role_data for %s.'
164 % (section_name, filename))
167 for section_name in role_data.keys():
168 if section_name in REQUIRED_DOCKER_SECTIONS:
171 if section_name in OPTIONAL_DOCKER_SECTIONS:
174 print('ERROR: %s is extra in role_data for %s.'
175 % (section_name, filename))
178 if 'puppet_config' in role_data:
179 puppet_config = role_data['puppet_config']
180 for key in puppet_config:
181 if key in REQUIRED_DOCKER_PUPPET_CONFIG_SECTIONS:
184 if key in OPTIONAL_DOCKER_PUPPET_CONFIG_SECTIONS:
187 print('ERROR: %s should not be in puppet_config section.'
190 for key in REQUIRED_DOCKER_PUPPET_CONFIG_SECTIONS:
191 if key not in puppet_config:
192 print('ERROR: %s is required in puppet_config for %s.'
196 config_volume = puppet_config.get('config_volume')
197 expected_config_image_parameter = "Docker%sConfigImage" % to_camel_case(config_volume)
198 if config_volume and not expected_config_image_parameter in tpl.get('parameters', []):
199 print('ERROR: Missing %s heat parameter for %s config_volume.'
200 % (expected_config_image_parameter, config_volume))
203 if 'docker_config' in role_data:
204 docker_config = role_data['docker_config']
205 for _, step in docker_config.items():
206 for _, container in step.items():
207 if not isinstance(container, dict):
208 # NOTE(mandre) this skips everything that is not a dict
209 # so we may ignore some containers definitions if they
210 # are in a map_merge for example
212 command = container.get('command', '')
213 if isinstance(command, list):
214 command = ' '.join(map(str, command))
215 if 'bootstrap_host_exec' in command \
216 and container.get('user') != 'root':
217 print('ERROR: bootstrap_host_exec needs to run as the root user.')
220 if 'parameters' in tpl:
221 for param in required_params:
222 if param not in tpl['parameters']:
223 print('ERROR: parameter %s is required for %s.'
229 def validate_service(filename, tpl):
230 if 'outputs' in tpl and 'role_data' in tpl['outputs']:
231 if 'value' not in tpl['outputs']['role_data']:
232 print('ERROR: invalid role_data for filename: %s'
235 role_data = tpl['outputs']['role_data']['value']
236 if 'service_name' not in role_data:
237 print('ERROR: service_name is required in role_data for %s.'
240 # service_name must match the filename, but with an underscore
241 if (role_data['service_name'] !=
242 os.path.basename(filename).split('.')[0].replace("-", "_")):
243 print('ERROR: service_name should match file name for service: %s.'
246 # if service connects to mysql, the uri should use option
247 # bind_address to avoid issues with VIP failover
248 if 'config_settings' in role_data and \
249 validate_mysql_connection(role_data['config_settings']):
250 print('ERROR: mysql connection uri should use option bind_address')
252 if 'parameters' in tpl:
253 for param in required_params:
254 if param not in tpl['parameters']:
255 print('ERROR: parameter %s is required for %s.'
261 def validate(filename, param_map):
262 """Validate a Heat template
264 :param filename: The path to the file to validate
265 :param param_map: A dict which will be populated with the details of the
266 parameters in the template. The dict will have the
270 {'filename': ./file1.yaml,
271 'data': {'description': '',
276 {'filename': ./file2.yaml,
277 'data': {'description': '',
285 print('Validating %s' % filename)
288 tpl = yaml.load(open(filename).read())
290 # The template alias version should be used instead a date, this validation
291 # will be applied to all templates not just for those in the services folder.
292 if 'heat_template_version' in tpl and not str(tpl['heat_template_version']).isalpha():
293 print('ERROR: heat_template_version needs to be the release alias not a date: %s'
297 # qdr aliases rabbitmq service to provide alternative messaging backend
298 if (filename.startswith('./puppet/services/') and
299 filename not in ['./puppet/services/qdr.yaml']):
300 retval = validate_service(filename, tpl)
302 if filename.startswith('./docker/services/'):
303 retval = validate_docker_service(filename, tpl)
305 if filename.endswith('hyperconverged-ceph.yaml'):
306 retval = validate_hci_compute_services_default(filename, tpl)
309 print(traceback.format_exc())
311 # yaml is OK, now walk the parameters and output a warning for unused ones
312 if 'heat_template_version' in tpl:
313 for p, data in tpl.get('parameters', {}).items():
314 definition = {'data': data, 'filename': filename}
315 param_map.setdefault(p, []).append(definition)
316 if p in required_params:
319 in_resources = str_p in str(tpl.get('resources', {}))
320 in_outputs = str_p in str(tpl.get('outputs', {}))
321 if not in_resources and not in_outputs:
322 print('Warning: parameter %s in template %s '
323 'appears to be unused' % (p, filename))
327 if len(sys.argv) < 2:
330 path_args = sys.argv[1:]
333 base_endpoint_map = None
334 env_endpoint_maps = list()
337 for base_path in path_args:
338 if os.path.isdir(base_path):
339 for subdir, dirs, files in os.walk(base_path):
343 if f.endswith('.yaml') and not f.endswith('.j2.yaml'):
344 file_path = os.path.join(subdir, f)
345 failed = validate(file_path, param_map)
347 failed_files.append(file_path)
349 if f == ENDPOINT_MAP_FILE:
350 base_endpoint_map = get_base_endpoint_map(file_path)
351 if f in envs_containing_endpoint_map:
352 env_endpoint_map = get_endpoint_map_from_env(file_path)
354 env_endpoint_maps.append(env_endpoint_map)
355 elif os.path.isfile(base_path) and base_path.endswith('.yaml'):
356 failed = validate(base_path, param_map)
358 failed_files.append(base_path)
361 print('Unexpected argument %s' % base_path)
364 if base_endpoint_map and \
365 len(env_endpoint_maps) == len(envs_containing_endpoint_map):
366 for env_endpoint_map in env_endpoint_maps:
367 matches = validate_endpoint_map(base_endpoint_map,
368 env_endpoint_map['map'])
370 print("ERROR: %s needs to be updated to match changes in base "
371 "endpoint map" % env_endpoint_map['file'])
372 failed_files.append(env_endpoint_map['file'])
375 print("%s matches base endpoint map" % env_endpoint_map['file'])
377 print("ERROR: Did not find expected number of environments containing the "
378 "EndpointMap parameter. If you meant to add or remove one of these "
379 "environments then you also need to update this tool.")
380 if not base_endpoint_map:
381 failed_files.append(ENDPOINT_MAP_FILE)
382 if len(env_endpoint_maps) != len(envs_containing_endpoint_map):
383 matched_files = set(os.path.basename(matched_env_file['file'])
384 for matched_env_file in env_endpoint_maps)
385 failed_files.extend(set(envs_containing_endpoint_map) - matched_files)
388 # Validate that duplicate parameters defined in multiple files all have the
391 for p, defs in param_map.items():
392 # Nothing to validate if the parameter is only defined once
395 check_data = [d['data'] for d in defs]
396 # Override excluded fields so they don't affect the result
397 exclusions = PARAMETER_DEFINITION_EXCLUSIONS.get(p, [])
399 for field in exclusions:
400 ex_dict[field] = 'IGNORED'
403 # If all items in the list are not == the first, then the check fails
404 if check_data.count(check_data[0]) != len(check_data):
406 # TODO(bnemec): Make this a hard failure once all the templates have
409 #failed_files.extend([d['filename'] for d in defs])
410 print('Mismatched parameter definitions found for "%s"' % p)
411 print('Definitions found:')
413 print(' %s:\n %s' % (d['filename'], d['data']))
414 print('Mismatched parameter definitions: %d' % mismatch_count)
417 print('Validation failed on:')
418 for f in failed_files:
421 print('Validation successful!')