diff options
author | Alexandru DAMIAN <alexandru.damian@intel.com> | 2015-08-18 17:28:52 +0100 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2015-08-19 18:05:53 +0100 |
commit | 12c9cca5b1de74d5662175f3bf127ab33a2a7441 (patch) | |
tree | febde7e67e30697dac92844c5680e69dc24d3f2e | |
parent | d18e6303a0bbfcac14766749c4fc8e41478f6db6 (diff) | |
download | poky-12c9cca5b1de74d5662175f3bf127ab33a2a7441.tar.gz |
bitbake: toaster: use loggers instead of prints
Switching debugging from using print statement to using
loggers, as it uses the logging infrastructure to process
the messages.
The messages are logged with the "toaster" logger which
is defined in the toastermain/settings.py.
(Bitbake rev: adf3bdcbe8b0b0bbe9de2800f2d20a53e8d88543)
Signed-off-by: Alexandru DAMIAN <alexandru.damian@intel.com>
Signed-off-by: Michael Wood <michael.g.wood@intel.com>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
-rw-r--r-- | bitbake/lib/toaster/orm/models.py | 22 | ||||
-rwxr-xr-x | bitbake/lib/toaster/toastergui/views.py | 4 |
2 files changed, 17 insertions, 9 deletions
diff --git a/bitbake/lib/toaster/orm/models.py b/bitbake/lib/toaster/orm/models.py index 26abf370d9..c3fb766cf4 100644 --- a/bitbake/lib/toaster/orm/models.py +++ b/bitbake/lib/toaster/orm/models.py | |||
@@ -29,6 +29,11 @@ from django.core import validators | |||
29 | from django.conf import settings | 29 | from django.conf import settings |
30 | import django.db.models.signals | 30 | import django.db.models.signals |
31 | 31 | ||
32 | |||
33 | import logging | ||
34 | logger = logging.getLogger("toaster") | ||
35 | |||
36 | |||
32 | class GitURLValidator(validators.URLValidator): | 37 | class GitURLValidator(validators.URLValidator): |
33 | import re | 38 | import re |
34 | regex = re.compile( | 39 | regex = re.compile( |
@@ -855,8 +860,8 @@ class LayerIndexLayerSource(LayerSource): | |||
855 | except Exception as e: | 860 | except Exception as e: |
856 | import traceback | 861 | import traceback |
857 | if proxy_settings is not None: | 862 | if proxy_settings is not None: |
858 | print "EE: Using proxy ", proxy_settings | 863 | logger.info("EE: Using proxy %s" % proxy_settings) |
859 | print "EE: could not connect to %s, skipping update: %s\n%s" % (self.apiurl, e, traceback.format_exc(e)) | 864 | logger.warning("EE: could not connect to %s, skipping update: %s\n%s" % (self.apiurl, e, traceback.format_exc(e))) |
860 | return | 865 | return |
861 | 866 | ||
862 | # update branches; only those that we already have names listed in the | 867 | # update branches; only those that we already have names listed in the |
@@ -865,7 +870,7 @@ class LayerIndexLayerSource(LayerSource): | |||
865 | if len(whitelist_branch_names) == 0: | 870 | if len(whitelist_branch_names) == 0: |
866 | raise Exception("Failed to make list of branches to fetch") | 871 | raise Exception("Failed to make list of branches to fetch") |
867 | 872 | ||
868 | print "Fetching branches" | 873 | logger.debug("Fetching branches") |
869 | branches_info = _get_json_response(apilinks['branches'] | 874 | branches_info = _get_json_response(apilinks['branches'] |
870 | + "?filter=name:%s" % "OR".join(whitelist_branch_names)) | 875 | + "?filter=name:%s" % "OR".join(whitelist_branch_names)) |
871 | for bi in branches_info: | 876 | for bi in branches_info: |
@@ -895,7 +900,7 @@ class LayerIndexLayerSource(LayerSource): | |||
895 | transaction.set_autocommit(True) | 900 | transaction.set_autocommit(True) |
896 | 901 | ||
897 | # update layerbranches/layer_versions | 902 | # update layerbranches/layer_versions |
898 | print "Fetching layer information" | 903 | logger.debug("Fetching layer information") |
899 | layerbranches_info = _get_json_response(apilinks['layerBranches'] | 904 | layerbranches_info = _get_json_response(apilinks['layerBranches'] |
900 | + "?filter=branch:%s" % "OR".join(map(lambda x: str(x.up_id), [i for i in Branch.objects.filter(layer_source = self) if i.up_id is not None] )) | 905 | + "?filter=branch:%s" % "OR".join(map(lambda x: str(x.up_id), [i for i in Branch.objects.filter(layer_source = self) if i.up_id is not None] )) |
901 | ) | 906 | ) |
@@ -933,7 +938,7 @@ class LayerIndexLayerSource(LayerSource): | |||
933 | try: | 938 | try: |
934 | dependlist[lv].append(Layer_Version.objects.get(layer_source = self, layer__up_id = ldi['dependency'], up_branch = lv.up_branch)) | 939 | dependlist[lv].append(Layer_Version.objects.get(layer_source = self, layer__up_id = ldi['dependency'], up_branch = lv.up_branch)) |
935 | except Layer_Version.DoesNotExist: | 940 | except Layer_Version.DoesNotExist: |
936 | print "Cannot find layer version ", self, ldi['dependency'], lv.up_branch | 941 | logger.warning("Cannot find layer version %s dep:%s up_brach:%s" % (self, ldi['dependency'], lv.up_branch)) |
937 | 942 | ||
938 | for lv in dependlist: | 943 | for lv in dependlist: |
939 | LayerVersionDependency.objects.filter(layer_version = lv).delete() | 944 | LayerVersionDependency.objects.filter(layer_version = lv).delete() |
@@ -944,7 +949,7 @@ class LayerIndexLayerSource(LayerSource): | |||
944 | 949 | ||
945 | 950 | ||
946 | # update machines | 951 | # update machines |
947 | print "Fetching machine information" | 952 | logger.debug("Fetching machine information") |
948 | machines_info = _get_json_response(apilinks['machines'] | 953 | machines_info = _get_json_response(apilinks['machines'] |
949 | + "?filter=layerbranch:%s" % "OR".join(map(lambda x: str(x.up_id), Layer_Version.objects.filter(layer_source = self))) | 954 | + "?filter=layerbranch:%s" % "OR".join(map(lambda x: str(x.up_id), Layer_Version.objects.filter(layer_source = self))) |
950 | ) | 955 | ) |
@@ -962,7 +967,7 @@ class LayerIndexLayerSource(LayerSource): | |||
962 | transaction.set_autocommit(True) | 967 | transaction.set_autocommit(True) |
963 | 968 | ||
964 | # update recipes; paginate by layer version / layer branch | 969 | # update recipes; paginate by layer version / layer branch |
965 | print "Fetching target information" | 970 | logger.debug("Fetching target information") |
966 | recipes_info = _get_json_response(apilinks['recipes'] | 971 | recipes_info = _get_json_response(apilinks['recipes'] |
967 | + "?filter=layerbranch:%s" % "OR".join(map(lambda x: str(x.up_id), Layer_Version.objects.filter(layer_source = self))) | 972 | + "?filter=layerbranch:%s" % "OR".join(map(lambda x: str(x.up_id), Layer_Version.objects.filter(layer_source = self))) |
968 | ) | 973 | ) |
@@ -1236,8 +1241,7 @@ def invalidate_cache(**kwargs): | |||
1236 | try: | 1241 | try: |
1237 | cache.clear() | 1242 | cache.clear() |
1238 | except Exception as e: | 1243 | except Exception as e: |
1239 | print "Problem with cache backend: Failed to clear cache" | 1244 | logger.warning("Problem with cache backend: Failed to clear cache: %s" % e) |
1240 | pass | ||
1241 | 1245 | ||
1242 | django.db.models.signals.post_save.connect(invalidate_cache) | 1246 | django.db.models.signals.post_save.connect(invalidate_cache) |
1243 | django.db.models.signals.post_delete.connect(invalidate_cache) | 1247 | django.db.models.signals.post_delete.connect(invalidate_cache) |
diff --git a/bitbake/lib/toaster/toastergui/views.py b/bitbake/lib/toaster/toastergui/views.py index d6bd7c94ba..0f05955af4 100755 --- a/bitbake/lib/toaster/toastergui/views.py +++ b/bitbake/lib/toaster/toastergui/views.py | |||
@@ -42,6 +42,10 @@ import json | |||
42 | from os.path import dirname | 42 | from os.path import dirname |
43 | import itertools | 43 | import itertools |
44 | 44 | ||
45 | import logging | ||
46 | |||
47 | logger = logging.getLogger("toaster") | ||
48 | |||
45 | # all new sessions should come through the landing page; | 49 | # all new sessions should come through the landing page; |
46 | # determine in which mode we are running in, and redirect appropriately | 50 | # determine in which mode we are running in, and redirect appropriately |
47 | def landing(request): | 51 | def landing(request): |