diff options
author | Frazer Clews <frazer.clews@codethink.co.uk> | 2020-01-16 16:55:18 +0000 |
---|---|---|
committer | Richard Purdie <richard.purdie@linuxfoundation.org> | 2020-01-19 13:31:05 +0000 |
commit | 0ac5174c7d39a3e49893df0d517d47bec1935555 (patch) | |
tree | 479496afb1da7814071e39e888e8926cd03bec57 /bitbake/lib/bb/codeparser.py | |
parent | 444bcb6cb6be8d5205fc88790360d864e633a555 (diff) | |
download | poky-0ac5174c7d39a3e49893df0d517d47bec1935555.tar.gz |
bitbake: lib: remove unused imports
removed unused imports which made the code harder to read, and slightly
but less efficient
(Bitbake rev: 4367692a932ac135c5aa4f9f2a4e4f0150f76697)
Signed-off-by: Frazer Clews <frazer.clews@codethink.co.uk>
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Diffstat (limited to 'bitbake/lib/bb/codeparser.py')
-rw-r--r-- | bitbake/lib/bb/codeparser.py | 26 |
1 files changed, 2 insertions, 24 deletions
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py index fd2c4734f0..25a7ac69d3 100644 --- a/bitbake/lib/bb/codeparser.py +++ b/bitbake/lib/bb/codeparser.py | |||
@@ -25,13 +25,11 @@ import ast | |||
25 | import sys | 25 | import sys |
26 | import codegen | 26 | import codegen |
27 | import logging | 27 | import logging |
28 | import pickle | ||
29 | import bb.pysh as pysh | 28 | import bb.pysh as pysh |
30 | import os.path | ||
31 | import bb.utils, bb.data | 29 | import bb.utils, bb.data |
32 | import hashlib | 30 | import hashlib |
33 | from itertools import chain | 31 | from itertools import chain |
34 | from bb.pysh import pyshyacc, pyshlex, sherrors | 32 | from bb.pysh import pyshyacc, pyshlex |
35 | from bb.cache import MultiProcessCache | 33 | from bb.cache import MultiProcessCache |
36 | 34 | ||
37 | logger = logging.getLogger('BitBake.CodeParser') | 35 | logger = logging.getLogger('BitBake.CodeParser') |
@@ -58,30 +56,10 @@ def check_indent(codestr): | |||
58 | 56 | ||
59 | return codestr | 57 | return codestr |
60 | 58 | ||
61 | |||
62 | # Basically pickle, in python 2.7.3 at least, does badly with data duplication | ||
63 | # upon pickling and unpickling. Combine this with duplicate objects and things | ||
64 | # are a mess. | ||
65 | # | ||
66 | # When the sets are originally created, python calls intern() on the set keys | ||
67 | # which significantly improves memory usage. Sadly the pickle/unpickle process | ||
68 | # doesn't call intern() on the keys and results in the same strings being duplicated | ||
69 | # in memory. This also means pickle will save the same string multiple times in | ||
70 | # the cache file. | ||
71 | # | ||
72 | # By having shell and python cacheline objects with setstate/getstate, we force | ||
73 | # the object creation through our own routine where we can call intern (via internSet). | ||
74 | # | ||
75 | # We also use hashable frozensets and ensure we use references to these so that | ||
76 | # duplicates can be removed, both in memory and in the resulting pickled data. | ||
77 | # | ||
78 | # By playing these games, the size of the cache file shrinks dramatically | ||
79 | # meaning faster load times and the reloaded cache files also consume much less | ||
80 | # memory. Smaller cache files, faster load times and lower memory usage is good. | ||
81 | # | ||
82 | # A custom getstate/setstate using tuples is actually worth 15% cachesize by | 59 | # A custom getstate/setstate using tuples is actually worth 15% cachesize by |
83 | # avoiding duplication of the attribute names! | 60 | # avoiding duplication of the attribute names! |
84 | 61 | ||
62 | |||
85 | class SetCache(object): | 63 | class SetCache(object): |
86 | def __init__(self): | 64 | def __init__(self): |
87 | self.setcache = {} | 65 | self.setcache = {} |