summaryrefslogtreecommitdiffstats
path: root/bitbake/lib/toaster/orm/management/commands/lsupdates.py
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib/toaster/orm/management/commands/lsupdates.py')
-rw-r--r--bitbake/lib/toaster/orm/management/commands/lsupdates.py228
1 files changed, 97 insertions, 131 deletions
diff --git a/bitbake/lib/toaster/orm/management/commands/lsupdates.py b/bitbake/lib/toaster/orm/management/commands/lsupdates.py
index efc6b3a946..66114ff89e 100644
--- a/bitbake/lib/toaster/orm/management/commands/lsupdates.py
+++ b/bitbake/lib/toaster/orm/management/commands/lsupdates.py
@@ -29,7 +29,6 @@ from orm.models import ToasterSetting
29import os 29import os
30import sys 30import sys
31 31
32import json
33import logging 32import logging
34import threading 33import threading
35import time 34import time
@@ -37,6 +36,18 @@ logger = logging.getLogger("toaster")
37 36
38DEFAULT_LAYERINDEX_SERVER = "http://layers.openembedded.org/layerindex/api/" 37DEFAULT_LAYERINDEX_SERVER = "http://layers.openembedded.org/layerindex/api/"
39 38
39# Add path to bitbake modules for layerindexlib
40# lib/toaster/orm/management/commands/lsupdates.py (abspath)
41# lib/toaster/orm/management/commands (dirname)
42# lib/toaster/orm/management (dirname)
43# lib/toaster/orm (dirname)
44# lib/toaster/ (dirname)
45# lib/ (dirname)
46path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))))
47sys.path.insert(0, path)
48
49import layerindexlib
50
40 51
41class Spinner(threading.Thread): 52class Spinner(threading.Thread):
42 """ A simple progress spinner to indicate download/parsing is happening""" 53 """ A simple progress spinner to indicate download/parsing is happening"""
@@ -86,45 +97,6 @@ class Command(BaseCommand):
86 self.apiurl = ToasterSetting.objects.get(name = 'CUSTOM_LAYERINDEX_SERVER').value 97 self.apiurl = ToasterSetting.objects.get(name = 'CUSTOM_LAYERINDEX_SERVER').value
87 98
88 assert self.apiurl is not None 99 assert self.apiurl is not None
89 try:
90 from urllib.request import urlopen, URLError
91 from urllib.parse import urlparse
92 except ImportError:
93 from urllib2 import urlopen, URLError
94 from urlparse import urlparse
95
96 proxy_settings = os.environ.get("http_proxy", None)
97
98 def _get_json_response(apiurl=None):
99 if None == apiurl:
100 apiurl=self.apiurl
101 http_progress = Spinner()
102 http_progress.start()
103
104 _parsedurl = urlparse(apiurl)
105 path = _parsedurl.path
106
107 # logger.debug("Fetching %s", apiurl)
108 try:
109 res = urlopen(apiurl)
110 except URLError as e:
111 raise Exception("Failed to read %s: %s" % (path, e.reason))
112
113 parsed = json.loads(res.read().decode('utf-8'))
114
115 http_progress.stop()
116 return parsed
117
118 # verify we can get the basic api
119 try:
120 apilinks = _get_json_response()
121 except Exception as e:
122 import traceback
123 if proxy_settings is not None:
124 logger.info("EE: Using proxy %s" % proxy_settings)
125 logger.warning("EE: could not connect to %s, skipping update:"
126 "%s\n%s" % (self.apiurl, e, traceback.format_exc()))
127 return
128 100
129 # update branches; only those that we already have names listed in the 101 # update branches; only those that we already have names listed in the
130 # Releases table 102 # Releases table
@@ -133,112 +105,118 @@ class Command(BaseCommand):
133 if len(whitelist_branch_names) == 0: 105 if len(whitelist_branch_names) == 0:
134 raise Exception("Failed to make list of branches to fetch") 106 raise Exception("Failed to make list of branches to fetch")
135 107
136 logger.info("Fetching metadata releases for %s", 108 logger.info("Fetching metadata for %s",
137 " ".join(whitelist_branch_names)) 109 " ".join(whitelist_branch_names))
138 110
139 branches_info = _get_json_response(apilinks['branches'] + 111 # We require a non-empty bb.data, but we can fake it with a dictionary
140 "?filter=name:%s" 112 layerindex = layerindexlib.LayerIndex({"DUMMY" : "VALUE"})
141 % "OR".join(whitelist_branch_names)) 113
114 http_progress = Spinner()
115 http_progress.start()
116
117 if whitelist_branch_names:
118 url_branches = ";branch=%s" % ','.join(whitelist_branch_names)
119 else:
120 url_branches = ""
121 layerindex.load_layerindex("%s%s" % (self.apiurl, url_branches))
122
123 http_progress.stop()
124
125 # We know we're only processing one entry, so we reference it here
126 # (this is cheating...)
127 index = layerindex.indexes[0]
142 128
143 # Map the layer index branches to toaster releases 129 # Map the layer index branches to toaster releases
144 li_branch_id_to_toaster_release = {} 130 li_branch_id_to_toaster_release = {}
145 131
146 total = len(branches_info) 132 logger.info("Processing releases")
147 for i, branch in enumerate(branches_info): 133
148 li_branch_id_to_toaster_release[branch['id']] = \ 134 total = len(index.branches)
149 Release.objects.get(name=branch['name']) 135 for i, id in enumerate(index.branches):
136 li_branch_id_to_toaster_release[id] = \
137 Release.objects.get(name=index.branches[id].name)
150 self.mini_progress("Releases", i, total) 138 self.mini_progress("Releases", i, total)
151 139
152 # keep a track of the layerindex (li) id mappings so that 140 # keep a track of the layerindex (li) id mappings so that
153 # layer_versions can be created for these layers later on 141 # layer_versions can be created for these layers later on
154 li_layer_id_to_toaster_layer_id = {} 142 li_layer_id_to_toaster_layer_id = {}
155 143
156 logger.info("Fetching layers") 144 logger.info("Processing layers")
157
158 layers_info = _get_json_response(apilinks['layerItems'])
159 145
160 total = len(layers_info) 146 total = len(index.layerItems)
161 for i, li in enumerate(layers_info): 147 for i, id in enumerate(index.layerItems):
162 try: 148 try:
163 l, created = Layer.objects.get_or_create(name=li['name']) 149 l, created = Layer.objects.get_or_create(name=index.layerItems[id].name)
164 l.up_date = li['updated'] 150 l.up_date = index.layerItems[id].updated
165 l.summary = li['summary'] 151 l.summary = index.layerItems[id].summary
166 l.description = li['description'] 152 l.description = index.layerItems[id].description
167 153
168 if created: 154 if created:
169 # predefined layers in the fixtures (for example poky.xml) 155 # predefined layers in the fixtures (for example poky.xml)
170 # always preempt the Layer Index for these values 156 # always preempt the Layer Index for these values
171 l.vcs_url = li['vcs_url'] 157 l.vcs_url = index.layerItems[id].vcs_url
172 l.vcs_web_url = li['vcs_web_url'] 158 l.vcs_web_url = index.layerItems[id].vcs_web_url
173 l.vcs_web_tree_base_url = li['vcs_web_tree_base_url'] 159 l.vcs_web_tree_base_url = index.layerItems[id].vcs_web_tree_base_url
174 l.vcs_web_file_base_url = li['vcs_web_file_base_url'] 160 l.vcs_web_file_base_url = index.layerItems[id].vcs_web_file_base_url
175 l.save() 161 l.save()
176 except Layer.MultipleObjectsReturned: 162 except Layer.MultipleObjectsReturned:
177 logger.info("Skipped %s as we found multiple layers and " 163 logger.info("Skipped %s as we found multiple layers and "
178 "don't know which to update" % 164 "don't know which to update" %
179 li['name']) 165 index.layerItems[id].name)
180 166
181 li_layer_id_to_toaster_layer_id[li['id']] = l.pk 167 li_layer_id_to_toaster_layer_id[id] = l.pk
182 168
183 self.mini_progress("layers", i, total) 169 self.mini_progress("layers", i, total)
184 170
185 # update layer_versions 171 # update layer_versions
186 logger.info("Fetching layer versions") 172 logger.info("Processing layer versions")
187 layerbranches_info = _get_json_response(
188 apilinks['layerBranches'] + "?filter=branch__name:%s" %
189 "OR".join(whitelist_branch_names))
190 173
191 # Map Layer index layer_branch object id to 174 # Map Layer index layer_branch object id to
192 # layer_version toaster object id 175 # layer_version toaster object id
193 li_layer_branch_id_to_toaster_lv_id = {} 176 li_layer_branch_id_to_toaster_lv_id = {}
194 177
195 total = len(layerbranches_info) 178 total = len(index.layerBranches)
196 for i, lbi in enumerate(layerbranches_info): 179 for i, id in enumerate(index.layerBranches):
197 # release as defined by toaster map to layerindex branch 180 # release as defined by toaster map to layerindex branch
198 release = li_branch_id_to_toaster_release[lbi['branch']] 181 release = li_branch_id_to_toaster_release[index.layerBranches[id].branch_id]
199 182
200 try: 183 try:
201 lv, created = Layer_Version.objects.get_or_create( 184 lv, created = Layer_Version.objects.get_or_create(
202 layer=Layer.objects.get( 185 layer=Layer.objects.get(
203 pk=li_layer_id_to_toaster_layer_id[lbi['layer']]), 186 pk=li_layer_id_to_toaster_layer_id[index.layerBranches[id].layer_id]),
204 release=release 187 release=release
205 ) 188 )
206 except KeyError: 189 except KeyError:
207 logger.warning( 190 logger.warning(
208 "No such layerindex layer referenced by layerbranch %d" % 191 "No such layerindex layer referenced by layerbranch %d" %
209 lbi['layer']) 192 index.layerBranches[id].layer_id)
210 continue 193 continue
211 194
212 if created: 195 if created:
213 lv.release = li_branch_id_to_toaster_release[lbi['branch']] 196 lv.release = li_branch_id_to_toaster_release[index.layerBranches[id].branch_id]
214 lv.up_date = lbi['updated'] 197 lv.up_date = index.layerBranches[id].updated
215 lv.commit = lbi['actual_branch'] 198 lv.commit = index.layerBranches[id].actual_branch
216 lv.dirpath = lbi['vcs_subdir'] 199 lv.dirpath = index.layerBranches[id].vcs_subdir
217 lv.save() 200 lv.save()
218 201
219 li_layer_branch_id_to_toaster_lv_id[lbi['id']] =\ 202 li_layer_branch_id_to_toaster_lv_id[index.layerBranches[id].id] =\
220 lv.pk 203 lv.pk
221 self.mini_progress("layer versions", i, total) 204 self.mini_progress("layer versions", i, total)
222 205
223 logger.info("Fetching layer version dependencies") 206 logger.info("Processing layer version dependencies")
224 # update layer dependencies
225 layerdependencies_info = _get_json_response(
226 apilinks['layerDependencies'] +
227 "?filter=layerbranch__branch__name:%s" %
228 "OR".join(whitelist_branch_names))
229 207
230 dependlist = {} 208 dependlist = {}
231 for ldi in layerdependencies_info: 209 for id in index.layerDependencies:
232 try: 210 try:
233 lv = Layer_Version.objects.get( 211 lv = Layer_Version.objects.get(
234 pk=li_layer_branch_id_to_toaster_lv_id[ldi['layerbranch']]) 212 pk=li_layer_branch_id_to_toaster_lv_id[index.layerDependencies[id].layerbranch_id])
235 except Layer_Version.DoesNotExist as e: 213 except Layer_Version.DoesNotExist as e:
236 continue 214 continue
237 215
238 if lv not in dependlist: 216 if lv not in dependlist:
239 dependlist[lv] = [] 217 dependlist[lv] = []
240 try: 218 try:
241 layer_id = li_layer_id_to_toaster_layer_id[ldi['dependency']] 219 layer_id = li_layer_id_to_toaster_layer_id[index.layerDependencies[id].dependency_id]
242 220
243 dependlist[lv].append( 221 dependlist[lv].append(
244 Layer_Version.objects.get(layer__pk=layer_id, 222 Layer_Version.objects.get(layer__pk=layer_id,
@@ -247,7 +225,7 @@ class Command(BaseCommand):
247 except Layer_Version.DoesNotExist: 225 except Layer_Version.DoesNotExist:
248 logger.warning("Cannot find layer version (ls:%s)," 226 logger.warning("Cannot find layer version (ls:%s),"
249 "up_id:%s lv:%s" % 227 "up_id:%s lv:%s" %
250 (self, ldi['dependency'], lv)) 228 (self, index.layerDependencies[id].dependency_id, lv))
251 229
252 total = len(dependlist) 230 total = len(dependlist)
253 for i, lv in enumerate(dependlist): 231 for i, lv in enumerate(dependlist):
@@ -258,73 +236,61 @@ class Command(BaseCommand):
258 self.mini_progress("Layer version dependencies", i, total) 236 self.mini_progress("Layer version dependencies", i, total)
259 237
260 # update Distros 238 # update Distros
261 logger.info("Fetching distro information") 239 logger.info("Processing distro information")
262 distros_info = _get_json_response(
263 apilinks['distros'] + "?filter=layerbranch__branch__name:%s" %
264 "OR".join(whitelist_branch_names))
265 240
266 total = len(distros_info) 241 total = len(index.distros)
267 for i, di in enumerate(distros_info): 242 for i, id in enumerate(index.distros):
268 distro, created = Distro.objects.get_or_create( 243 distro, created = Distro.objects.get_or_create(
269 name=di['name'], 244 name=index.distros[id].name,
270 layer_version=Layer_Version.objects.get( 245 layer_version=Layer_Version.objects.get(
271 pk=li_layer_branch_id_to_toaster_lv_id[di['layerbranch']])) 246 pk=li_layer_branch_id_to_toaster_lv_id[index.distros[id].layerbranch_id]))
272 distro.up_date = di['updated'] 247 distro.up_date = index.distros[id].updated
273 distro.name = di['name'] 248 distro.name = index.distros[id].name
274 distro.description = di['description'] 249 distro.description = index.distros[id].description
275 distro.save() 250 distro.save()
276 self.mini_progress("distros", i, total) 251 self.mini_progress("distros", i, total)
277 252
278 # update machines 253 # update machines
279 logger.info("Fetching machine information") 254 logger.info("Processing machine information")
280 machines_info = _get_json_response(
281 apilinks['machines'] + "?filter=layerbranch__branch__name:%s" %
282 "OR".join(whitelist_branch_names))
283 255
284 total = len(machines_info) 256 total = len(index.machines)
285 for i, mi in enumerate(machines_info): 257 for i, id in enumerate(index.machines):
286 mo, created = Machine.objects.get_or_create( 258 mo, created = Machine.objects.get_or_create(
287 name=mi['name'], 259 name=index.machines[id].name,
288 layer_version=Layer_Version.objects.get( 260 layer_version=Layer_Version.objects.get(
289 pk=li_layer_branch_id_to_toaster_lv_id[mi['layerbranch']])) 261 pk=li_layer_branch_id_to_toaster_lv_id[index.machines[id].layerbranch_id]))
290 mo.up_date = mi['updated'] 262 mo.up_date = index.machines[id].updated
291 mo.name = mi['name'] 263 mo.name = index.machines[id].name
292 mo.description = mi['description'] 264 mo.description = index.machines[id].description
293 mo.save() 265 mo.save()
294 self.mini_progress("machines", i, total) 266 self.mini_progress("machines", i, total)
295 267
296 # update recipes; paginate by layer version / layer branch 268 # update recipes; paginate by layer version / layer branch
297 logger.info("Fetching recipe information") 269 logger.info("Processing recipe information")
298 recipes_info = _get_json_response(
299 apilinks['recipes'] + "?filter=layerbranch__branch__name:%s" %
300 "OR".join(whitelist_branch_names))
301 270
302 total = len(recipes_info) 271 total = len(index.recipes)
303 for i, ri in enumerate(recipes_info): 272 for i, id in enumerate(index.recipes):
304 try: 273 try:
305 lv_id = li_layer_branch_id_to_toaster_lv_id[ri['layerbranch']] 274 lv_id = li_layer_branch_id_to_toaster_lv_id[index.recipes[id].layerbranch_id]
306 lv = Layer_Version.objects.get(pk=lv_id) 275 lv = Layer_Version.objects.get(pk=lv_id)
307 276
308 ro, created = Recipe.objects.get_or_create( 277 ro, created = Recipe.objects.get_or_create(
309 layer_version=lv, 278 layer_version=lv,
310 name=ri['pn'] 279 name=index.recipes[id].pn
311 ) 280 )
312 281
313 ro.layer_version = lv 282 ro.layer_version = lv
314 ro.up_date = ri['updated'] 283 ro.up_date = index.recipes[id].updated
315 ro.name = ri['pn'] 284 ro.name = index.recipes[id].pn
316 ro.version = ri['pv'] 285 ro.version = index.recipes[id].pv
317 ro.summary = ri['summary'] 286 ro.summary = index.recipes[id].summary
318 ro.description = ri['description'] 287 ro.description = index.recipes[id].description
319 ro.section = ri['section'] 288 ro.section = index.recipes[id].section
320 ro.license = ri['license'] 289 ro.license = index.recipes[id].license
321 ro.homepage = ri['homepage'] 290 ro.homepage = index.recipes[id].homepage
322 ro.bugtracker = ri['bugtracker'] 291 ro.bugtracker = index.recipes[id].bugtracker
323 ro.file_path = ri['filepath'] + "/" + ri['filename'] 292 ro.file_path = index.recipes[id].fullpath
324 if 'inherits' in ri: 293 ro.is_image = 'image' in index.recipes[id].inherits.split()
325 ro.is_image = 'image' in ri['inherits'].split()
326 else: # workaround for old style layer index
327 ro.is_image = "-image-" in ri['pn']
328 ro.save() 294 ro.save()
329 except Exception as e: 295 except Exception as e:
330 logger.warning("Failed saving recipe %s", e) 296 logger.warning("Failed saving recipe %s", e)