2 # SPDX-license-identifier: Apache-2.0
3 ##############################################################################
4 # Copyright (c) 2016 The Linux Foundation and others
6 # Licensed under the Apache License, Version 2.0 (the "License");
7 # you may not use this file except in compliance with the License.
8 # You may obtain a copy of the License at
9 # http://www.apache.org/licenses/LICENSE-2.0
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
16 ##############################################################################
19 Generate JSON listing of OPNFV Artifacts
21 This produces a slimmed down version of metadata provided by Google
22 Storage for each artifact. Also excludes a large number of uninteresting
26 from apiclient import discovery
27 from apiclient.errors import HttpError
46 # List of file extensions to filter out
65 parser = argparse.ArgumentParser(
66 description='OPNFV Artifacts JSON Generator')
72 help='API Key for Google Cloud Storage')
80 help='pretty print the output')
82 # Parse and assign arguments
83 args = parser.parse_args()
85 pretty_print = args.pretty
88 def output(item, indent=2):
89 print(json.dumps(item, sort_keys=True, indent=indent))
92 def has_gerrit_review(dir_list):
94 If a directory contains an integer, it is assumed to be a gerrit
103 def has_release(dir_list):
105 Checks if any directory contains a release name
113 def has_documentation(dir_list):
115 Checks for a directory specifically named 'docs'
123 # Rename this or modify how gerrit review are handled
124 def has_logs(gerrit_review):
126 If a gerrit review exists, create a link to the review
129 return "https://gerrit.opnfv.org/gerrit/#/c/%s" % gerrit_review
134 def has_ignorable_extension(filename):
135 for extension in ignore_extensions:
136 if filename.lower().endswith(extension):
141 def get_results(key):
143 Pull down all metadata from artifacts.opnfv.org
144 and store it in projects as:
145 { 'PROJECT': [file ...], }
147 storage = discovery.build('storage', 'v1', developerKey=key)
148 files = storage.objects().list(bucket='artifacts.opnfv.org',
149 fields='nextPageToken,'
157 while (files is not None):
158 sites = files.execute()
160 for site in sites['items']:
161 # Filter out unneeded files (js, images, css, buildinfo, etc)
162 if has_ignorable_extension(site['name']):
165 # Split /foo/bar/ into ['foo', 'bar'] and remove any extra
166 # slashes (ex. /foo//bar/)
167 site_split = filter(None, site['name'].split('/'))
169 # Don't do anything if we aren't given files multiple
171 if len(site_split) < 2:
174 project = site_split[0]
175 name = '/'.join(site_split[1:])
176 proxy = "http://build.opnfv.org/artifacts.opnfv.org/%s" % site['name']
177 if name.endswith('.html'):
178 href = "http://artifacts.opnfv.org/%s" % site['name']
181 href = site['mediaLink']
182 href_type = 'download'
184 gerrit = has_gerrit_review(site_split)
185 logs = False # has_logs(gerrit)
186 documentation = has_documentation(site_split)
187 release = has_release(site_split)
198 'category': category,
199 'gerritreview': gerrit,
202 'size': site['size'],
203 'time': site['updated'],
204 'contentType': site['contentType'],
206 'href_type': href_type,
210 if project in releases:
211 if project not in api['releases']:
212 api['releases'][project] = [metadata]
214 api['releases'][project].append(metadata)
216 if project not in api['projects']:
217 api['projects'][project] = [metadata]
219 api['projects'][project].append(metadata)
221 files = storage.objects().list_next(files, sites)
226 # Fail if there is an invalid response from GCE
228 js = get_results(key)
229 except HttpError as e:
230 print >> sys.stderr, e
233 output(js, indent=pretty_print)