summaryrefslogtreecommitdiffstats
path: root/bitbake/lib
diff options
context:
space:
mode:
Diffstat (limited to 'bitbake/lib')
-rw-r--r--bitbake/lib/bb/__init__.py2
-rw-r--r--bitbake/lib/bb/asyncrpc/client.py24
-rw-r--r--bitbake/lib/bb/build.py2
-rw-r--r--bitbake/lib/bb/codeparser.py31
-rw-r--r--bitbake/lib/bb/cooker.py14
-rw-r--r--bitbake/lib/bb/fetch2/crate.py9
-rw-r--r--bitbake/lib/bb/fetch2/svn.py3
-rw-r--r--bitbake/lib/bb/parse/__init__.py12
-rw-r--r--bitbake/lib/bb/runqueue.py8
-rw-r--r--bitbake/lib/bb/siggen.py2
-rw-r--r--bitbake/lib/bb/tests/codeparser.py40
-rw-r--r--bitbake/lib/bb/tests/fetch.py26
-rw-r--r--bitbake/lib/bblayers/action.py4
-rw-r--r--bitbake/lib/prserv/__init__.py97
-rw-r--r--bitbake/lib/prserv/client.py15
-rw-r--r--bitbake/lib/prserv/db.py452
-rw-r--r--bitbake/lib/prserv/serv.py140
-rw-r--r--bitbake/lib/prserv/tests.py386
18 files changed, 942 insertions, 325 deletions
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
index 15013540c2..8b6ea2d8ed 100644
--- a/bitbake/lib/bb/__init__.py
+++ b/bitbake/lib/bb/__init__.py
@@ -9,7 +9,7 @@
9# SPDX-License-Identifier: GPL-2.0-only 9# SPDX-License-Identifier: GPL-2.0-only
10# 10#
11 11
12__version__ = "2.9.0" 12__version__ = "2.9.1"
13 13
14import sys 14import sys
15if sys.version_info < (3, 8, 0): 15if sys.version_info < (3, 8, 0):
diff --git a/bitbake/lib/bb/asyncrpc/client.py b/bitbake/lib/bb/asyncrpc/client.py
index a350b4fb12..b49de99313 100644
--- a/bitbake/lib/bb/asyncrpc/client.py
+++ b/bitbake/lib/bb/asyncrpc/client.py
@@ -24,6 +24,11 @@ ADDR_TYPE_UNIX = 0
24ADDR_TYPE_TCP = 1 24ADDR_TYPE_TCP = 1
25ADDR_TYPE_WS = 2 25ADDR_TYPE_WS = 2
26 26
27WEBSOCKETS_MIN_VERSION = (9, 1)
28# Need websockets 10 with python 3.10+
29if sys.version_info >= (3, 10, 0):
30 WEBSOCKETS_MIN_VERSION = (10, 0)
31
27def parse_address(addr): 32def parse_address(addr):
28 if addr.startswith(UNIX_PREFIX): 33 if addr.startswith(UNIX_PREFIX):
29 return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX) :],)) 34 return (ADDR_TYPE_UNIX, (addr[len(UNIX_PREFIX) :],))
@@ -39,6 +44,7 @@ def parse_address(addr):
39 44
40 return (ADDR_TYPE_TCP, (host, int(port))) 45 return (ADDR_TYPE_TCP, (host, int(port)))
41 46
47
42class AsyncClient(object): 48class AsyncClient(object):
43 def __init__( 49 def __init__(
44 self, 50 self,
@@ -86,6 +92,24 @@ class AsyncClient(object):
86 async def connect_websocket(self, uri): 92 async def connect_websocket(self, uri):
87 import websockets 93 import websockets
88 94
95 try:
96 version = tuple(
97 int(v)
98 for v in websockets.__version__.split(".")[
99 0 : len(WEBSOCKETS_MIN_VERSION)
100 ]
101 )
102 except ValueError:
103 raise ImportError(
104 f"Unable to parse websockets version '{websockets.__version__}'"
105 )
106
107 if version < WEBSOCKETS_MIN_VERSION:
108 min_ver_str = ".".join(str(v) for v in WEBSOCKETS_MIN_VERSION)
109 raise ImportError(
110 f"Websockets version {websockets.__version__} is less than minimum required version {min_ver_str}"
111 )
112
89 async def connect_sock(): 113 async def connect_sock():
90 websocket = await websockets.connect(uri, ping_interval=None) 114 websocket = await websockets.connect(uri, ping_interval=None)
91 return WebsocketConnection(websocket, self.timeout) 115 return WebsocketConnection(websocket, self.timeout)
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
index 44d08f5c55..ab8bce3d57 100644
--- a/bitbake/lib/bb/build.py
+++ b/bitbake/lib/bb/build.py
@@ -197,6 +197,8 @@ def exec_func(func, d, dirs = None):
197 for cdir in d.expand(cleandirs).split(): 197 for cdir in d.expand(cleandirs).split():
198 bb.utils.remove(cdir, True) 198 bb.utils.remove(cdir, True)
199 bb.utils.mkdirhier(cdir) 199 bb.utils.mkdirhier(cdir)
200 if cdir == oldcwd:
201 os.chdir(cdir)
200 202
201 if flags and dirs is None: 203 if flags and dirs is None:
202 dirs = flags.get('dirs') 204 dirs = flags.get('dirs')
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py
index 2e8b7ced3c..691bdff75e 100644
--- a/bitbake/lib/bb/codeparser.py
+++ b/bitbake/lib/bb/codeparser.py
@@ -484,19 +484,34 @@ class ShellParser():
484 """ 484 """
485 485
486 words = list(words) 486 words = list(words)
487 for word in list(words): 487 for word in words:
488 wtree = pyshlex.make_wordtree(word[1]) 488 wtree = pyshlex.make_wordtree(word[1])
489 for part in wtree: 489 for part in wtree:
490 if not isinstance(part, list): 490 if not isinstance(part, list):
491 continue 491 continue
492 492
493 if part[0] in ('`', '$('): 493 candidates = [part]
494 command = pyshlex.wordtree_as_string(part[1:-1]) 494
495 self._parse_shell(command) 495 # If command is of type:
496 496 #
497 if word[0] in ("cmd_name", "cmd_word"): 497 # var="... $(cmd [...]) ..."
498 if word in words: 498 #
499 words.remove(word) 499 # Then iterate on what's between the quotes and if we find a
500 # list, make that what we check for below.
501 if len(part) >= 3 and part[0] == '"':
502 for p in part[1:-1]:
503 if isinstance(p, list):
504 candidates.append(p)
505
506 for candidate in candidates:
507 if len(candidate) >= 2:
508 if candidate[0] in ('`', '$('):
509 command = pyshlex.wordtree_as_string(candidate[1:-1])
510 self._parse_shell(command)
511
512 if word[0] in ("cmd_name", "cmd_word"):
513 if word in words:
514 words.remove(word)
500 515
501 usetoken = False 516 usetoken = False
502 for word in words: 517 for word in words:
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
index c5bfef55d6..076ddaa58d 100644
--- a/bitbake/lib/bb/cooker.py
+++ b/bitbake/lib/bb/cooker.py
@@ -315,13 +315,13 @@ class BBCooker:
315 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db" 315 dbfile = (self.data.getVar("PERSISTENT_DIR") or self.data.getVar("CACHE")) + "/hashserv.db"
316 upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None 316 upstream = self.data.getVar("BB_HASHSERVE_UPSTREAM") or None
317 if upstream: 317 if upstream:
318 import socket
319 try: 318 try:
320 sock = socket.create_connection(upstream.split(":"), 5) 319 with hashserv.create_client(upstream) as client:
321 sock.close() 320 client.ping()
322 except socket.error as e: 321 except (ConnectionError, ImportError) as e:
323 bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s" 322 bb.warn("BB_HASHSERVE_UPSTREAM is not valid, unable to connect hash equivalence server at '%s': %s"
324 % (upstream, repr(e))) 323 % (upstream, repr(e)))
324 upstream = None
325 325
326 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR") 326 self.hashservaddr = "unix://%s/hashserve.sock" % self.data.getVar("TOPDIR")
327 self.hashserv = hashserv.create_server( 327 self.hashserv = hashserv.create_server(
@@ -680,14 +680,14 @@ class BBCooker:
680 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data) 680 bb.event.fire(bb.event.TreeDataPreparationCompleted(len(fulltargetlist)), self.data)
681 return taskdata, runlist 681 return taskdata, runlist
682 682
683 def prepareTreeData(self, pkgs_to_build, task): 683 def prepareTreeData(self, pkgs_to_build, task, halt=False):
684 """ 684 """
685 Prepare a runqueue and taskdata object for iteration over pkgs_to_build 685 Prepare a runqueue and taskdata object for iteration over pkgs_to_build
686 """ 686 """
687 687
688 # We set halt to False here to prevent unbuildable targets raising 688 # We set halt to False here to prevent unbuildable targets raising
689 # an exception when we're just generating data 689 # an exception when we're just generating data
690 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, False, allowincomplete=True) 690 taskdata, runlist = self.buildTaskData(pkgs_to_build, task, halt, allowincomplete=True)
691 691
692 return runlist, taskdata 692 return runlist, taskdata
693 693
@@ -701,7 +701,7 @@ class BBCooker:
701 if not task.startswith("do_"): 701 if not task.startswith("do_"):
702 task = "do_%s" % task 702 task = "do_%s" % task
703 703
704 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task) 704 runlist, taskdata = self.prepareTreeData(pkgs_to_build, task, halt=True)
705 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist) 705 rq = bb.runqueue.RunQueue(self, self.data, self.recipecaches, taskdata, runlist)
706 rq.rqdata.prepare() 706 rq.rqdata.prepare()
707 return self.buildDependTree(rq, taskdata) 707 return self.buildDependTree(rq, taskdata)
diff --git a/bitbake/lib/bb/fetch2/crate.py b/bitbake/lib/bb/fetch2/crate.py
index 01d49435c3..e611736f06 100644
--- a/bitbake/lib/bb/fetch2/crate.py
+++ b/bitbake/lib/bb/fetch2/crate.py
@@ -70,6 +70,7 @@ class Crate(Wget):
70 host = 'crates.io/api/v1/crates' 70 host = 'crates.io/api/v1/crates'
71 71
72 ud.url = "https://%s/%s/%s/download" % (host, name, version) 72 ud.url = "https://%s/%s/%s/download" % (host, name, version)
73 ud.versionsurl = "https://%s/%s/versions" % (host, name)
73 ud.parm['downloadfilename'] = "%s-%s.crate" % (name, version) 74 ud.parm['downloadfilename'] = "%s-%s.crate" % (name, version)
74 if 'name' not in ud.parm: 75 if 'name' not in ud.parm:
75 ud.parm['name'] = '%s-%s' % (name, version) 76 ud.parm['name'] = '%s-%s' % (name, version)
@@ -139,3 +140,11 @@ class Crate(Wget):
139 mdpath = os.path.join(bbpath, cratepath, mdfile) 140 mdpath = os.path.join(bbpath, cratepath, mdfile)
140 with open(mdpath, "w") as f: 141 with open(mdpath, "w") as f:
141 json.dump(metadata, f) 142 json.dump(metadata, f)
143
144 def latest_versionstring(self, ud, d):
145 from functools import cmp_to_key
146 json_data = json.loads(self._fetch_index(ud.versionsurl, ud, d))
147 versions = [(0, i["num"], "") for i in json_data["versions"]]
148 versions = sorted(versions, key=cmp_to_key(bb.utils.vercmp))
149
150 return (versions[-1][1], "")
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py
index d40e4d2909..0852108e7d 100644
--- a/bitbake/lib/bb/fetch2/svn.py
+++ b/bitbake/lib/bb/fetch2/svn.py
@@ -210,3 +210,6 @@ class Svn(FetchMethod):
210 210
211 def _build_revision(self, ud, d): 211 def _build_revision(self, ud, d):
212 return ud.revision 212 return ud.revision
213
214 def supports_checksum(self, urldata):
215 return False
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py
index a4358f1374..7ffdaa6fd7 100644
--- a/bitbake/lib/bb/parse/__init__.py
+++ b/bitbake/lib/bb/parse/__init__.py
@@ -49,20 +49,23 @@ class SkipPackage(SkipRecipe):
49__mtime_cache = {} 49__mtime_cache = {}
50def cached_mtime(f): 50def cached_mtime(f):
51 if f not in __mtime_cache: 51 if f not in __mtime_cache:
52 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] 52 res = os.stat(f)
53 __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino)
53 return __mtime_cache[f] 54 return __mtime_cache[f]
54 55
55def cached_mtime_noerror(f): 56def cached_mtime_noerror(f):
56 if f not in __mtime_cache: 57 if f not in __mtime_cache:
57 try: 58 try:
58 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] 59 res = os.stat(f)
60 __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino)
59 except OSError: 61 except OSError:
60 return 0 62 return 0
61 return __mtime_cache[f] 63 return __mtime_cache[f]
62 64
63def check_mtime(f, mtime): 65def check_mtime(f, mtime):
64 try: 66 try:
65 current_mtime = os.stat(f)[stat.ST_MTIME] 67 res = os.stat(f)
68 current_mtime = (res.st_mtime_ns, res.st_size, res.st_ino)
66 __mtime_cache[f] = current_mtime 69 __mtime_cache[f] = current_mtime
67 except OSError: 70 except OSError:
68 current_mtime = 0 71 current_mtime = 0
@@ -70,7 +73,8 @@ def check_mtime(f, mtime):
70 73
71def update_mtime(f): 74def update_mtime(f):
72 try: 75 try:
73 __mtime_cache[f] = os.stat(f)[stat.ST_MTIME] 76 res = os.stat(f)
77 __mtime_cache[f] = (res.st_mtime_ns, res.st_size, res.st_ino)
74 except OSError: 78 except OSError:
75 if f in __mtime_cache: 79 if f in __mtime_cache:
76 del __mtime_cache[f] 80 del __mtime_cache[f]
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
index bc7e18175d..6b43f303d5 100644
--- a/bitbake/lib/bb/runqueue.py
+++ b/bitbake/lib/bb/runqueue.py
@@ -1290,7 +1290,7 @@ class RunQueueData:
1290 return len(self.runtaskentries) 1290 return len(self.runtaskentries)
1291 1291
1292 def prepare_task_hash(self, tid): 1292 def prepare_task_hash(self, tid):
1293 bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches) 1293 self.runtaskentries[tid].taskhash_deps = bb.parse.siggen.prep_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
1294 self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches) 1294 self.runtaskentries[tid].hash = bb.parse.siggen.get_taskhash(tid, self.runtaskentries[tid].depends, self.dataCaches)
1295 self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(tid) 1295 self.runtaskentries[tid].unihash = bb.parse.siggen.get_unihash(tid)
1296 1296
@@ -2445,7 +2445,8 @@ class RunQueueExecute:
2445 unihash = self.rqdata.runtaskentries[task].unihash 2445 unihash = self.rqdata.runtaskentries[task].unihash
2446 deps = self.filtermcdeps(task, mc, deps) 2446 deps = self.filtermcdeps(task, mc, deps)
2447 hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn] 2447 hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn]
2448 taskdepdata_cache[task] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn] 2448 taskhash_deps = self.rqdata.runtaskentries[task].taskhash_deps
2449 taskdepdata_cache[task] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn, taskhash_deps]
2449 2450
2450 self.taskdepdata_cache = taskdepdata_cache 2451 self.taskdepdata_cache = taskdepdata_cache
2451 2452
@@ -2812,7 +2813,8 @@ class RunQueueExecute:
2812 taskhash = self.rqdata.runtaskentries[revdep].hash 2813 taskhash = self.rqdata.runtaskentries[revdep].hash
2813 unihash = self.rqdata.runtaskentries[revdep].unihash 2814 unihash = self.rqdata.runtaskentries[revdep].unihash
2814 hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn] 2815 hashfn = self.rqdata.dataCaches[mc].hashfn[taskfn]
2815 taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn] 2816 taskhash_deps = self.rqdata.runtaskentries[revdep].taskhash_deps
2817 taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash, unihash, hashfn, taskhash_deps]
2816 for revdep2 in deps: 2818 for revdep2 in deps:
2817 if revdep2 not in taskdepdata: 2819 if revdep2 not in taskdepdata:
2818 additional.append(revdep2) 2820 additional.append(revdep2)
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py
index 8ab08ec961..03dfda6f3c 100644
--- a/bitbake/lib/bb/siggen.py
+++ b/bitbake/lib/bb/siggen.py
@@ -381,7 +381,7 @@ class SignatureGeneratorBasic(SignatureGenerator):
381 self.taints[tid] = taint 381 self.taints[tid] = taint
382 logger.warning("%s is tainted from a forced run" % tid) 382 logger.warning("%s is tainted from a forced run" % tid)
383 383
384 return 384 return set(dep for _, dep in self.runtaskdeps[tid])
385 385
386 def get_taskhash(self, tid, deps, dataCaches): 386 def get_taskhash(self, tid, deps, dataCaches):
387 387
diff --git a/bitbake/lib/bb/tests/codeparser.py b/bitbake/lib/bb/tests/codeparser.py
index f6585fb3aa..c0d1362a0c 100644
--- a/bitbake/lib/bb/tests/codeparser.py
+++ b/bitbake/lib/bb/tests/codeparser.py
@@ -106,6 +106,46 @@ ${D}${libdir}/pkgconfig/*.pc
106 self.parseExpression("foo=$(echo bar)") 106 self.parseExpression("foo=$(echo bar)")
107 self.assertExecs(set(["echo"])) 107 self.assertExecs(set(["echo"]))
108 108
109 def test_assign_subshell_expansion_quotes(self):
110 self.parseExpression('foo="$(echo bar)"')
111 self.assertExecs(set(["echo"]))
112
113 def test_assign_subshell_expansion_nested(self):
114 self.parseExpression('foo="$(func1 "$(func2 bar$(func3))")"')
115 self.assertExecs(set(["func1", "func2", "func3"]))
116
117 def test_assign_subshell_expansion_multiple(self):
118 self.parseExpression('foo="$(func1 "$(func2)") $(func3)"')
119 self.assertExecs(set(["func1", "func2", "func3"]))
120
121 def test_assign_subshell_expansion_escaped_quotes(self):
122 self.parseExpression('foo="\\"fo\\"o$(func1)"')
123 self.assertExecs(set(["func1"]))
124
125 def test_assign_subshell_expansion_empty(self):
126 self.parseExpression('foo="bar$()foo"')
127 self.assertExecs(set())
128
129 def test_assign_subshell_backticks(self):
130 self.parseExpression("foo=`echo bar`")
131 self.assertExecs(set(["echo"]))
132
133 def test_assign_subshell_backticks_quotes(self):
134 self.parseExpression('foo="`echo bar`"')
135 self.assertExecs(set(["echo"]))
136
137 def test_assign_subshell_backticks_multiple(self):
138 self.parseExpression('foo="`func1 bar` `func2`"')
139 self.assertExecs(set(["func1", "func2"]))
140
141 def test_assign_subshell_backticks_escaped_quotes(self):
142 self.parseExpression('foo="\\"fo\\"o`func1`"')
143 self.assertExecs(set(["func1"]))
144
145 def test_assign_subshell_backticks_empty(self):
146 self.parseExpression('foo="bar``foo"')
147 self.assertExecs(set())
148
109 def test_shell_unexpanded(self): 149 def test_shell_unexpanded(self):
110 self.setEmptyVars(["QT_BASE_NAME"]) 150 self.setEmptyVars(["QT_BASE_NAME"])
111 self.parseExpression('echo "${QT_BASE_NAME}"') 151 self.parseExpression('echo "${QT_BASE_NAME}"')
diff --git a/bitbake/lib/bb/tests/fetch.py b/bitbake/lib/bb/tests/fetch.py
index 85c1f79ff3..ed7a39a723 100644
--- a/bitbake/lib/bb/tests/fetch.py
+++ b/bitbake/lib/bb/tests/fetch.py
@@ -511,7 +511,8 @@ class MirrorUriTest(FetcherTest):
511 mirrorvar = "http://.*/.* file:///somepath/downloads/ " \ 511 mirrorvar = "http://.*/.* file:///somepath/downloads/ " \
512 "git://someserver.org/bitbake git://git.openembedded.org/bitbake " \ 512 "git://someserver.org/bitbake git://git.openembedded.org/bitbake " \
513 "https://.*/.* file:///someotherpath/downloads/ " \ 513 "https://.*/.* file:///someotherpath/downloads/ " \
514 "http://.*/.* file:///someotherpath/downloads/" 514 "http://.*/.* file:///someotherpath/downloads/ " \
515 "svn://svn.server1.com/ svn://svn.server2.com/"
515 516
516 def test_urireplace(self): 517 def test_urireplace(self):
517 self.d.setVar("FILESPATH", ".") 518 self.d.setVar("FILESPATH", ".")
@@ -535,6 +536,13 @@ class MirrorUriTest(FetcherTest):
535 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d) 536 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
536 self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz']) 537 self.assertEqual(uris, ['file:///someotherpath/downloads/bitbake-1.0.tar.gz'])
537 538
539 def test_urilistsvn(self):
540 # Catch svn:// -> svn:// bug
541 fetcher = bb.fetch.FetchData("svn://svn.server1.com/isource/svnroot/reponame/tags/tagname;module=path_in_tagnamefolder;protocol=https;rev=2", self.d)
542 mirrors = bb.fetch2.mirror_from_string(self.mirrorvar)
543 uris, uds = bb.fetch2.build_mirroruris(fetcher, mirrors, self.d)
544 self.assertEqual(uris, ['svn://svn.server2.com/isource/svnroot/reponame/tags/tagname;module=path_in_tagnamefolder;protocol=https;rev=2'])
545
538 def test_mirror_of_mirror(self): 546 def test_mirror_of_mirror(self):
539 # Test if mirror of a mirror works 547 # Test if mirror of a mirror works
540 mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/" 548 mirrorvar = self.mirrorvar + " http://.*/.* http://otherdownloads.yoctoproject.org/downloads/"
@@ -1493,6 +1501,12 @@ class FetchLatestVersionTest(FetcherTest):
1493 : "2.8", 1501 : "2.8",
1494 } 1502 }
1495 1503
1504 test_crate_uris = {
1505 # basic example; version pattern "A.B.C+cargo-D.E.F"
1506 ("cargo-c", "crate://crates.io/cargo-c/0.9.18+cargo-0.69")
1507 : "0.9.29"
1508 }
1509
1496 @skipIfNoNetwork() 1510 @skipIfNoNetwork()
1497 def test_git_latest_versionstring(self): 1511 def test_git_latest_versionstring(self):
1498 for k, v in self.test_git_uris.items(): 1512 for k, v in self.test_git_uris.items():
@@ -1532,6 +1546,16 @@ class FetchLatestVersionTest(FetcherTest):
1532 finally: 1546 finally:
1533 server.stop() 1547 server.stop()
1534 1548
1549 @skipIfNoNetwork()
1550 def test_crate_latest_versionstring(self):
1551 for k, v in self.test_crate_uris.items():
1552 self.d.setVar("PN", k[0])
1553 ud = bb.fetch2.FetchData(k[1], self.d)
1554 pupver = ud.method.latest_versionstring(ud, self.d)
1555 verstring = pupver[0]
1556 self.assertTrue(verstring, msg="Could not find upstream version for %s" % k[0])
1557 r = bb.utils.vercmp_string(v, verstring)
1558 self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring))
1535 1559
1536class FetchCheckStatusTest(FetcherTest): 1560class FetchCheckStatusTest(FetcherTest):
1537 test_wget_uris = ["https://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz", 1561 test_wget_uris = ["https://downloads.yoctoproject.org/releases/sato/sato-engine-0.1.tar.gz",
diff --git a/bitbake/lib/bblayers/action.py b/bitbake/lib/bblayers/action.py
index a8f2699335..a14f19948e 100644
--- a/bitbake/lib/bblayers/action.py
+++ b/bitbake/lib/bblayers/action.py
@@ -50,8 +50,8 @@ class ActionPlugin(LayerPlugin):
50 50
51 try: 51 try:
52 notadded, _ = bb.utils.edit_bblayers_conf(bblayers_conf, layerdirs, None) 52 notadded, _ = bb.utils.edit_bblayers_conf(bblayers_conf, layerdirs, None)
53 self.tinfoil.modified_files()
54 if not (args.force or notadded): 53 if not (args.force or notadded):
54 self.tinfoil.modified_files()
55 try: 55 try:
56 self.tinfoil.run_command('parseConfiguration') 56 self.tinfoil.run_command('parseConfiguration')
57 except (bb.tinfoil.TinfoilUIException, bb.BBHandledException): 57 except (bb.tinfoil.TinfoilUIException, bb.BBHandledException):
@@ -83,6 +83,8 @@ class ActionPlugin(LayerPlugin):
83 layerdir = os.path.abspath(item) 83 layerdir = os.path.abspath(item)
84 layerdirs.append(layerdir) 84 layerdirs.append(layerdir)
85 (_, notremoved) = bb.utils.edit_bblayers_conf(bblayers_conf, None, layerdirs) 85 (_, notremoved) = bb.utils.edit_bblayers_conf(bblayers_conf, None, layerdirs)
86 if args.force > 1:
87 return 0
86 self.tinfoil.modified_files() 88 self.tinfoil.modified_files()
87 if notremoved: 89 if notremoved:
88 for item in notremoved: 90 for item in notremoved:
diff --git a/bitbake/lib/prserv/__init__.py b/bitbake/lib/prserv/__init__.py
index 0e0aa34d0e..a817b03c1e 100644
--- a/bitbake/lib/prserv/__init__.py
+++ b/bitbake/lib/prserv/__init__.py
@@ -4,17 +4,92 @@
4# SPDX-License-Identifier: GPL-2.0-only 4# SPDX-License-Identifier: GPL-2.0-only
5# 5#
6 6
7__version__ = "1.0.0"
8 7
9import os, time 8__version__ = "2.0.0"
10import sys, logging
11 9
12def init_logger(logfile, loglevel): 10import logging
13 numeric_level = getattr(logging, loglevel.upper(), None) 11logger = logging.getLogger("BitBake.PRserv")
14 if not isinstance(numeric_level, int):
15 raise ValueError("Invalid log level: %s" % loglevel)
16 FORMAT = "%(asctime)-15s %(message)s"
17 logging.basicConfig(level=numeric_level, filename=logfile, format=FORMAT)
18 12
19class NotFoundError(Exception): 13from bb.asyncrpc.client import parse_address, ADDR_TYPE_UNIX, ADDR_TYPE_WS
20 pass 14
15def create_server(addr, dbpath, upstream=None, read_only=False):
16 from . import serv
17
18 s = serv.PRServer(dbpath, upstream=upstream, read_only=read_only)
19 host, port = addr.split(":")
20 s.start_tcp_server(host, int(port))
21
22 return s
23
24def increase_revision(ver):
25 """Take a revision string such as "1" or "1.2.3" or even a number and increase its last number
26 This fails if the last number is not an integer"""
27
28 fields=str(ver).split('.')
29 last = fields[-1]
30
31 try:
32 val = int(last)
33 except Exception as e:
34 logger.critical("Unable to increase revision value %s: %s" % (ver, e))
35 raise e
36
37 return ".".join(fields[0:-1] + list(str(val + 1)))
38
39def _revision_greater_or_equal(rev1, rev2):
40 """Compares x.y.z revision numbers, using integer comparison
41 Returns True if rev1 is greater or equal to rev2"""
42
43 fields1 = rev1.split(".")
44 fields2 = rev2.split(".")
45 l1 = len(fields1)
46 l2 = len(fields2)
47
48 for i in range(l1):
49 val1 = int(fields1[i])
50 if i < l2:
51 val2 = int(fields2[i])
52 if val2 < val1:
53 return True
54 elif val2 > val1:
55 return False
56 else:
57 return True
58 return True
59
60def revision_smaller(rev1, rev2):
61 """Compares x.y.z revision numbers, using integer comparison
62 Returns True if rev1 is strictly smaller than rev2"""
63 return not(_revision_greater_or_equal(rev1, rev2))
64
65def revision_greater(rev1, rev2):
66 """Compares x.y.z revision numbers, using integer comparison
67 Returns True if rev1 is strictly greater than rev2"""
68 return _revision_greater_or_equal(rev1, rev2) and (rev1 != rev2)
69
70def create_client(addr):
71 from . import client
72
73 c = client.PRClient()
74
75 try:
76 (typ, a) = parse_address(addr)
77 c.connect_tcp(*a)
78 return c
79 except Exception as e:
80 c.close()
81 raise e
82
83async def create_async_client(addr):
84 from . import client
85
86 c = client.PRAsyncClient()
87
88 try:
89 (typ, a) = parse_address(addr)
90 await c.connect_tcp(*a)
91 return c
92
93 except Exception as e:
94 await c.close()
95 raise e
diff --git a/bitbake/lib/prserv/client.py b/bitbake/lib/prserv/client.py
index 8471ee3046..9f5794c433 100644
--- a/bitbake/lib/prserv/client.py
+++ b/bitbake/lib/prserv/client.py
@@ -6,6 +6,7 @@
6 6
7import logging 7import logging
8import bb.asyncrpc 8import bb.asyncrpc
9from . import create_async_client
9 10
10logger = logging.getLogger("BitBake.PRserv") 11logger = logging.getLogger("BitBake.PRserv")
11 12
@@ -13,16 +14,16 @@ class PRAsyncClient(bb.asyncrpc.AsyncClient):
13 def __init__(self): 14 def __init__(self):
14 super().__init__("PRSERVICE", "1.0", logger) 15 super().__init__("PRSERVICE", "1.0", logger)
15 16
16 async def getPR(self, version, pkgarch, checksum): 17 async def getPR(self, version, pkgarch, checksum, history=False):
17 response = await self.invoke( 18 response = await self.invoke(
18 {"get-pr": {"version": version, "pkgarch": pkgarch, "checksum": checksum}} 19 {"get-pr": {"version": version, "pkgarch": pkgarch, "checksum": checksum, "history": history}}
19 ) 20 )
20 if response: 21 if response:
21 return response["value"] 22 return response["value"]
22 23
23 async def test_pr(self, version, pkgarch, checksum): 24 async def test_pr(self, version, pkgarch, checksum, history=False):
24 response = await self.invoke( 25 response = await self.invoke(
25 {"test-pr": {"version": version, "pkgarch": pkgarch, "checksum": checksum}} 26 {"test-pr": {"version": version, "pkgarch": pkgarch, "checksum": checksum, "history": history}}
26 ) 27 )
27 if response: 28 if response:
28 return response["value"] 29 return response["value"]
@@ -48,9 +49,9 @@ class PRAsyncClient(bb.asyncrpc.AsyncClient):
48 if response: 49 if response:
49 return response["value"] 50 return response["value"]
50 51
51 async def export(self, version, pkgarch, checksum, colinfo): 52 async def export(self, version, pkgarch, checksum, colinfo, history=False):
52 response = await self.invoke( 53 response = await self.invoke(
53 {"export": {"version": version, "pkgarch": pkgarch, "checksum": checksum, "colinfo": colinfo}} 54 {"export": {"version": version, "pkgarch": pkgarch, "checksum": checksum, "colinfo": colinfo, "history": history}}
54 ) 55 )
55 if response: 56 if response:
56 return (response["metainfo"], response["datainfo"]) 57 return (response["metainfo"], response["datainfo"])
@@ -65,7 +66,7 @@ class PRAsyncClient(bb.asyncrpc.AsyncClient):
65class PRClient(bb.asyncrpc.Client): 66class PRClient(bb.asyncrpc.Client):
66 def __init__(self): 67 def __init__(self):
67 super().__init__() 68 super().__init__()
68 self._add_methods("getPR", "test_pr", "test_package", "importone", "export", "is_readonly") 69 self._add_methods("getPR", "test_pr", "test_package", "max_package_pr", "importone", "export", "is_readonly")
69 70
70 def _get_async_client(self): 71 def _get_async_client(self):
71 return PRAsyncClient() 72 return PRAsyncClient()
diff --git a/bitbake/lib/prserv/db.py b/bitbake/lib/prserv/db.py
index eb41508198..2da493ddf5 100644
--- a/bitbake/lib/prserv/db.py
+++ b/bitbake/lib/prserv/db.py
@@ -8,19 +8,13 @@ import logging
8import os.path 8import os.path
9import errno 9import errno
10import prserv 10import prserv
11import time 11import sqlite3
12 12
13try: 13from contextlib import closing
14 import sqlite3 14from . import increase_revision, revision_greater, revision_smaller
15except ImportError:
16 from pysqlite2 import dbapi2 as sqlite3
17 15
18logger = logging.getLogger("BitBake.PRserv") 16logger = logging.getLogger("BitBake.PRserv")
19 17
20sqlversion = sqlite3.sqlite_version_info
21if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
22 raise Exception("sqlite3 version 3.3.0 or later is required.")
23
24# 18#
25# "No History" mode - for a given query tuple (version, pkgarch, checksum), 19# "No History" mode - for a given query tuple (version, pkgarch, checksum),
26# the returned value will be the largest among all the values of the same 20# the returned value will be the largest among all the values of the same
@@ -29,287 +23,232 @@ if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
29# "History" mode - Return a new higher value for previously unseen query 23# "History" mode - Return a new higher value for previously unseen query
30# tuple (version, pkgarch, checksum), otherwise return historical value. 24# tuple (version, pkgarch, checksum), otherwise return historical value.
31# Value can decrement if returning to a previous build. 25# Value can decrement if returning to a previous build.
32#
33 26
34class PRTable(object): 27class PRTable(object):
35 def __init__(self, conn, table, nohist, read_only): 28 def __init__(self, conn, table, read_only):
36 self.conn = conn 29 self.conn = conn
37 self.nohist = nohist
38 self.read_only = read_only 30 self.read_only = read_only
39 self.dirty = False 31 self.table = table
40 if nohist: 32
41 self.table = "%s_nohist" % table 33 # Creating the table even if the server is read-only.
42 else: 34 # This avoids a race condition if a shared database
43 self.table = "%s_hist" % table 35 # is accessed by a read-only server first.
44 36
45 if self.read_only: 37 with closing(self.conn.cursor()) as cursor:
46 table_exists = self._execute( 38 cursor.execute("CREATE TABLE IF NOT EXISTS %s \
47 "SELECT count(*) FROM sqlite_master \
48 WHERE type='table' AND name='%s'" % (self.table))
49 if not table_exists:
50 raise prserv.NotFoundError
51 else:
52 self._execute("CREATE TABLE IF NOT EXISTS %s \
53 (version TEXT NOT NULL, \ 39 (version TEXT NOT NULL, \
54 pkgarch TEXT NOT NULL, \ 40 pkgarch TEXT NOT NULL, \
55 checksum TEXT NOT NULL, \ 41 checksum TEXT NOT NULL, \
56 value INTEGER, \ 42 value TEXT, \
57 PRIMARY KEY (version, pkgarch, checksum));" % self.table) 43 PRIMARY KEY (version, pkgarch, checksum, value));" % self.table)
58
59 def _execute(self, *query):
60 """Execute a query, waiting to acquire a lock if necessary"""
61 start = time.time()
62 end = start + 20
63 while True:
64 try:
65 return self.conn.execute(*query)
66 except sqlite3.OperationalError as exc:
67 if "is locked" in str(exc) and end > time.time():
68 continue
69 raise exc
70
71 def sync(self):
72 if not self.read_only:
73 self.conn.commit() 44 self.conn.commit()
74 self._execute("BEGIN EXCLUSIVE TRANSACTION")
75 45
76 def sync_if_dirty(self): 46 def _extremum_value(self, rows, is_max):
77 if self.dirty: 47 value = None
78 self.sync() 48
79 self.dirty = False 49 for row in rows:
50 current_value = row[0]
51 if value is None:
52 value = current_value
53 else:
54 if is_max:
55 is_new_extremum = revision_greater(current_value, value)
56 else:
57 is_new_extremum = revision_smaller(current_value, value)
58 if is_new_extremum:
59 value = current_value
60 return value
61
62 def _max_value(self, rows):
63 return self._extremum_value(rows, True)
64
65 def _min_value(self, rows):
66 return self._extremum_value(rows, False)
80 67
81 def test_package(self, version, pkgarch): 68 def test_package(self, version, pkgarch):
82 """Returns whether the specified package version is found in the database for the specified architecture""" 69 """Returns whether the specified package version is found in the database for the specified architecture"""
83 70
84 # Just returns the value if found or None otherwise 71 # Just returns the value if found or None otherwise
85 data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=?;" % self.table, 72 with closing(self.conn.cursor()) as cursor:
86 (version, pkgarch)) 73 data=cursor.execute("SELECT value FROM %s WHERE version=? AND pkgarch=?;" % self.table,
87 row=data.fetchone() 74 (version, pkgarch))
88 if row is not None: 75 row=data.fetchone()
89 return True 76 if row is not None:
90 else: 77 return True
91 return False 78 else:
79 return False
80
81 def test_checksum_value(self, version, pkgarch, checksum, value):
82 """Returns whether the specified value is found in the database for the specified package, architecture and checksum"""
83
84 with closing(self.conn.cursor()) as cursor:
85 data=cursor.execute("SELECT value FROM %s WHERE version=? AND pkgarch=? and checksum=? and value=?;" % self.table,
86 (version, pkgarch, checksum, value))
87 row=data.fetchone()
88 if row is not None:
89 return True
90 else:
91 return False
92 92
93 def test_value(self, version, pkgarch, value): 93 def test_value(self, version, pkgarch, value):
94 """Returns whether the specified value is found in the database for the specified package and architecture""" 94 """Returns whether the specified value is found in the database for the specified package and architecture"""
95 95
96 # Just returns the value if found or None otherwise 96 # Just returns the value if found or None otherwise
97 data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? and value=?;" % self.table, 97 with closing(self.conn.cursor()) as cursor:
98 (version, pkgarch, value)) 98 data=cursor.execute("SELECT value FROM %s WHERE version=? AND pkgarch=? and value=?;" % self.table,
99 row=data.fetchone() 99 (version, pkgarch, value))
100 if row is not None: 100 row=data.fetchone()
101 return True 101 if row is not None:
102 else: 102 return True
103 return False 103 else:
104 return False
104 105
105 def find_value(self, version, pkgarch, checksum): 106
107 def find_package_max_value(self, version, pkgarch):
108 """Returns the greatest value for (version, pkgarch), or None if not found. Doesn't create a new value"""
109
110 with closing(self.conn.cursor()) as cursor:
111 data = cursor.execute("SELECT value FROM %s where version=? AND pkgarch=?;" % (self.table),
112 (version, pkgarch))
113 rows = data.fetchall()
114 value = self._max_value(rows)
115 return value
116
117 def find_value(self, version, pkgarch, checksum, history=False):
106 """Returns the value for the specified checksum if found or None otherwise.""" 118 """Returns the value for the specified checksum if found or None otherwise."""
107 119
108 data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, 120 if history:
109 (version, pkgarch, checksum)) 121 return self.find_min_value(version, pkgarch, checksum)
110 row=data.fetchone()
111 if row is not None:
112 return row[0]
113 else: 122 else:
114 return None 123 return self.find_max_value(version, pkgarch, checksum)
115 124
116 def find_max_value(self, version, pkgarch):
117 """Returns the greatest value for (version, pkgarch), or None if not found. Doesn't create a new value"""
118 125
119 data = self._execute("SELECT max(value) FROM %s where version=? AND pkgarch=?;" % (self.table), 126 def _find_extremum_value(self, version, pkgarch, checksum, is_max):
120 (version, pkgarch)) 127 """Returns the maximum (if is_max is True) or minimum (if is_max is False) value
121 row = data.fetchone() 128 for (version, pkgarch, checksum), or None if not found. Doesn't create a new value"""
122 if row is not None:
123 return row[0]
124 else:
125 return None
126
127 def _get_value_hist(self, version, pkgarch, checksum):
128 data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
129 (version, pkgarch, checksum))
130 row=data.fetchone()
131 if row is not None:
132 return row[0]
133 else:
134 #no value found, try to insert
135 if self.read_only:
136 data = self._execute("SELECT ifnull(max(value)+1, 0) FROM %s where version=? AND pkgarch=?;" % (self.table),
137 (version, pkgarch))
138 row = data.fetchone()
139 if row is not None:
140 return row[0]
141 else:
142 return 0
143 129
144 try: 130 with closing(self.conn.cursor()) as cursor:
145 self._execute("INSERT INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1, 0) from %s where version=? AND pkgarch=?));" 131 data = cursor.execute("SELECT value FROM %s where version=? AND pkgarch=? AND checksum=?;" % (self.table),
146 % (self.table, self.table), 132 (version, pkgarch, checksum))
147 (version, pkgarch, checksum, version, pkgarch)) 133 rows = data.fetchall()
148 except sqlite3.IntegrityError as exc: 134 return self._extremum_value(rows, is_max)
149 logger.error(str(exc))
150 135
151 self.dirty = True 136 def find_max_value(self, version, pkgarch, checksum):
137 return self._find_extremum_value(version, pkgarch, checksum, True)
152 138
153 data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, 139 def find_min_value(self, version, pkgarch, checksum):
154 (version, pkgarch, checksum)) 140 return self._find_extremum_value(version, pkgarch, checksum, False)
155 row=data.fetchone() 141
156 if row is not None: 142 def find_new_subvalue(self, version, pkgarch, base):
157 return row[0] 143 """Take and increase the greatest "<base>.y" value for (version, pkgarch), or return "<base>.0" if not found.
158 else: 144 This doesn't store a new value."""
159 raise prserv.NotFoundError 145
160 146 with closing(self.conn.cursor()) as cursor:
161 def _get_value_no_hist(self, version, pkgarch, checksum): 147 data = cursor.execute("SELECT value FROM %s where version=? AND pkgarch=? AND value LIKE '%s.%%';" % (self.table, base),
162 data=self._execute("SELECT value FROM %s \ 148 (version, pkgarch))
163 WHERE version=? AND pkgarch=? AND checksum=? AND \ 149 rows = data.fetchall()
164 value >= (select max(value) from %s where version=? AND pkgarch=?);" 150 value = self._max_value(rows)
165 % (self.table, self.table), 151
166 (version, pkgarch, checksum, version, pkgarch)) 152 if value is not None:
167 row=data.fetchone() 153 return increase_revision(value)
168 if row is not None:
169 return row[0]
170 else:
171 #no value found, try to insert
172 if self.read_only:
173 data = self._execute("SELECT ifnull(max(value)+1, 0) FROM %s where version=? AND pkgarch=?;" % (self.table),
174 (version, pkgarch))
175 return data.fetchone()[0]
176
177 try:
178 self._execute("INSERT OR REPLACE INTO %s VALUES (?, ?, ?, (select ifnull(max(value)+1, 0) from %s where version=? AND pkgarch=?));"
179 % (self.table, self.table),
180 (version, pkgarch, checksum, version, pkgarch))
181 except sqlite3.IntegrityError as exc:
182 logger.error(str(exc))
183 self.conn.rollback()
184
185 self.dirty = True
186
187 data=self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
188 (version, pkgarch, checksum))
189 row=data.fetchone()
190 if row is not None:
191 return row[0]
192 else: 154 else:
193 raise prserv.NotFoundError 155 return base + ".0"
194 156
195 def get_value(self, version, pkgarch, checksum): 157 def store_value(self, version, pkgarch, checksum, value):
196 if self.nohist: 158 """Store value in the database"""
197 return self._get_value_no_hist(version, pkgarch, checksum) 159
198 else: 160 if not self.read_only and not self.test_checksum_value(version, pkgarch, checksum, value):
199 return self._get_value_hist(version, pkgarch, checksum) 161 with closing(self.conn.cursor()) as cursor:
200 162 cursor.execute("INSERT INTO %s VALUES (?, ?, ?, ?);" % (self.table),
201 def _import_hist(self, version, pkgarch, checksum, value):
202 if self.read_only:
203 return None
204
205 val = None
206 data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table,
207 (version, pkgarch, checksum))
208 row = data.fetchone()
209 if row is not None:
210 val=row[0]
211 else:
212 #no value found, try to insert
213 try:
214 self._execute("INSERT INTO %s VALUES (?, ?, ?, ?);" % (self.table),
215 (version, pkgarch, checksum, value)) 163 (version, pkgarch, checksum, value))
216 except sqlite3.IntegrityError as exc: 164 self.conn.commit()
217 logger.error(str(exc))
218 165
219 self.dirty = True 166 def _get_value(self, version, pkgarch, checksum, history):
220 167
221 data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=?;" % self.table, 168 max_value = self.find_package_max_value(version, pkgarch)
222 (version, pkgarch, checksum))
223 row = data.fetchone()
224 if row is not None:
225 val = row[0]
226 return val
227 169
228 def _import_no_hist(self, version, pkgarch, checksum, value): 170 if max_value is None:
229 if self.read_only: 171 # version, pkgarch completely unknown. Return initial value.
230 return None 172 return "0"
231 173
232 try: 174 value = self.find_value(version, pkgarch, checksum, history)
233 #try to insert 175
234 self._execute("INSERT INTO %s VALUES (?, ?, ?, ?);" % (self.table), 176 if value is None:
235 (version, pkgarch, checksum, value)) 177 # version, pkgarch found but not checksum. Create a new value from the maximum one
236 except sqlite3.IntegrityError as exc: 178 return increase_revision(max_value)
237 #already have the record, try to update 179
238 try: 180 if history:
239 self._execute("UPDATE %s SET value=? WHERE version=? AND pkgarch=? AND checksum=? AND value<?" 181 return value
240 % (self.table), 182
241 (value, version, pkgarch, checksum, value)) 183 # "no history" mode - If the value is not the maximum value for the package, need to increase it.
242 except sqlite3.IntegrityError as exc: 184 if max_value > value:
243 logger.error(str(exc)) 185 return increase_revision(max_value)
244
245 self.dirty = True
246
247 data = self._execute("SELECT value FROM %s WHERE version=? AND pkgarch=? AND checksum=? AND value>=?;" % self.table,
248 (version, pkgarch, checksum, value))
249 row=data.fetchone()
250 if row is not None:
251 return row[0]
252 else: 186 else:
253 return None 187 return value
188
189 def get_value(self, version, pkgarch, checksum, history):
190 value = self._get_value(version, pkgarch, checksum, history)
191 if not self.read_only:
192 self.store_value(version, pkgarch, checksum, value)
193 return value
254 194
255 def importone(self, version, pkgarch, checksum, value): 195 def importone(self, version, pkgarch, checksum, value):
256 if self.nohist: 196 self.store_value(version, pkgarch, checksum, value)
257 return self._import_no_hist(version, pkgarch, checksum, value) 197 return value
258 else:
259 return self._import_hist(version, pkgarch, checksum, value)
260 198
261 def export(self, version, pkgarch, checksum, colinfo): 199 def export(self, version, pkgarch, checksum, colinfo, history=False):
262 metainfo = {} 200 metainfo = {}
263 #column info 201 with closing(self.conn.cursor()) as cursor:
264 if colinfo: 202 #column info
265 metainfo["tbl_name"] = self.table 203 if colinfo:
266 metainfo["core_ver"] = prserv.__version__ 204 metainfo["tbl_name"] = self.table
267 metainfo["col_info"] = [] 205 metainfo["core_ver"] = prserv.__version__
268 data = self._execute("PRAGMA table_info(%s);" % self.table) 206 metainfo["col_info"] = []
207 data = cursor.execute("PRAGMA table_info(%s);" % self.table)
208 for row in data:
209 col = {}
210 col["name"] = row["name"]
211 col["type"] = row["type"]
212 col["notnull"] = row["notnull"]
213 col["dflt_value"] = row["dflt_value"]
214 col["pk"] = row["pk"]
215 metainfo["col_info"].append(col)
216
217 #data info
218 datainfo = []
219
220 if history:
221 sqlstmt = "SELECT * FROM %s as T1 WHERE 1=1 " % self.table
222 else:
223 sqlstmt = "SELECT T1.version, T1.pkgarch, T1.checksum, T1.value FROM %s as T1, \
224 (SELECT version, pkgarch, max(value) as maxvalue FROM %s GROUP BY version, pkgarch) as T2 \
225 WHERE T1.version=T2.version AND T1.pkgarch=T2.pkgarch AND T1.value=T2.maxvalue " % (self.table, self.table)
226 sqlarg = []
227 where = ""
228 if version:
229 where += "AND T1.version=? "
230 sqlarg.append(str(version))
231 if pkgarch:
232 where += "AND T1.pkgarch=? "
233 sqlarg.append(str(pkgarch))
234 if checksum:
235 where += "AND T1.checksum=? "
236 sqlarg.append(str(checksum))
237
238 sqlstmt += where + ";"
239
240 if len(sqlarg):
241 data = cursor.execute(sqlstmt, tuple(sqlarg))
242 else:
243 data = cursor.execute(sqlstmt)
269 for row in data: 244 for row in data:
270 col = {} 245 if row["version"]:
271 col["name"] = row["name"] 246 col = {}
272 col["type"] = row["type"] 247 col["version"] = row["version"]
273 col["notnull"] = row["notnull"] 248 col["pkgarch"] = row["pkgarch"]
274 col["dflt_value"] = row["dflt_value"] 249 col["checksum"] = row["checksum"]
275 col["pk"] = row["pk"] 250 col["value"] = row["value"]
276 metainfo["col_info"].append(col) 251 datainfo.append(col)
277
278 #data info
279 datainfo = []
280
281 if self.nohist:
282 sqlstmt = "SELECT T1.version, T1.pkgarch, T1.checksum, T1.value FROM %s as T1, \
283 (SELECT version, pkgarch, max(value) as maxvalue FROM %s GROUP BY version, pkgarch) as T2 \
284 WHERE T1.version=T2.version AND T1.pkgarch=T2.pkgarch AND T1.value=T2.maxvalue " % (self.table, self.table)
285 else:
286 sqlstmt = "SELECT * FROM %s as T1 WHERE 1=1 " % self.table
287 sqlarg = []
288 where = ""
289 if version:
290 where += "AND T1.version=? "
291 sqlarg.append(str(version))
292 if pkgarch:
293 where += "AND T1.pkgarch=? "
294 sqlarg.append(str(pkgarch))
295 if checksum:
296 where += "AND T1.checksum=? "
297 sqlarg.append(str(checksum))
298
299 sqlstmt += where + ";"
300
301 if len(sqlarg):
302 data = self._execute(sqlstmt, tuple(sqlarg))
303 else:
304 data = self._execute(sqlstmt)
305 for row in data:
306 if row["version"]:
307 col = {}
308 col["version"] = row["version"]
309 col["pkgarch"] = row["pkgarch"]
310 col["checksum"] = row["checksum"]
311 col["value"] = row["value"]
312 datainfo.append(col)
313 return (metainfo, datainfo) 252 return (metainfo, datainfo)
314 253
315 def dump_db(self, fd): 254 def dump_db(self, fd):
@@ -322,9 +261,8 @@ class PRTable(object):
322 261
323class PRData(object): 262class PRData(object):
324 """Object representing the PR database""" 263 """Object representing the PR database"""
325 def __init__(self, filename, nohist=True, read_only=False): 264 def __init__(self, filename, read_only=False):
326 self.filename=os.path.abspath(filename) 265 self.filename=os.path.abspath(filename)
327 self.nohist=nohist
328 self.read_only = read_only 266 self.read_only = read_only
329 #build directory hierarchy 267 #build directory hierarchy
330 try: 268 try:
@@ -334,14 +272,15 @@ class PRData(object):
334 raise e 272 raise e
335 uri = "file:%s%s" % (self.filename, "?mode=ro" if self.read_only else "") 273 uri = "file:%s%s" % (self.filename, "?mode=ro" if self.read_only else "")
336 logger.debug("Opening PRServ database '%s'" % (uri)) 274 logger.debug("Opening PRServ database '%s'" % (uri))
337 self.connection=sqlite3.connect(uri, uri=True, isolation_level="EXCLUSIVE", check_same_thread = False) 275 self.connection=sqlite3.connect(uri, uri=True)
338 self.connection.row_factory=sqlite3.Row 276 self.connection.row_factory=sqlite3.Row
339 if not self.read_only: 277 self.connection.execute("PRAGMA synchronous = OFF;")
340 self.connection.execute("pragma synchronous = off;") 278 self.connection.execute("PRAGMA journal_mode = WAL;")
341 self.connection.execute("PRAGMA journal_mode = MEMORY;") 279 self.connection.commit()
342 self._tables={} 280 self._tables={}
343 281
344 def disconnect(self): 282 def disconnect(self):
283 self.connection.commit()
345 self.connection.close() 284 self.connection.close()
346 285
347 def __getitem__(self, tblname): 286 def __getitem__(self, tblname):
@@ -351,7 +290,7 @@ class PRData(object):
351 if tblname in self._tables: 290 if tblname in self._tables:
352 return self._tables[tblname] 291 return self._tables[tblname]
353 else: 292 else:
354 tableobj = self._tables[tblname] = PRTable(self.connection, tblname, self.nohist, self.read_only) 293 tableobj = self._tables[tblname] = PRTable(self.connection, tblname, self.read_only)
355 return tableobj 294 return tableobj
356 295
357 def __delitem__(self, tblname): 296 def __delitem__(self, tblname):
@@ -359,3 +298,4 @@ class PRData(object):
359 del self._tables[tblname] 298 del self._tables[tblname]
360 logger.info("drop table %s" % (tblname)) 299 logger.info("drop table %s" % (tblname))
361 self.connection.execute("DROP TABLE IF EXISTS %s;" % tblname) 300 self.connection.execute("DROP TABLE IF EXISTS %s;" % tblname)
301 self.connection.commit()
diff --git a/bitbake/lib/prserv/serv.py b/bitbake/lib/prserv/serv.py
index dc4be5b620..e175886308 100644
--- a/bitbake/lib/prserv/serv.py
+++ b/bitbake/lib/prserv/serv.py
@@ -12,6 +12,7 @@ import sqlite3
12import prserv 12import prserv
13import prserv.db 13import prserv.db
14import errno 14import errno
15from . import create_async_client, revision_smaller, increase_revision
15import bb.asyncrpc 16import bb.asyncrpc
16 17
17logger = logging.getLogger("BitBake.PRserv") 18logger = logging.getLogger("BitBake.PRserv")
@@ -41,18 +42,16 @@ class PRServerClient(bb.asyncrpc.AsyncServerConnection):
41 try: 42 try:
42 return await super().dispatch_message(msg) 43 return await super().dispatch_message(msg)
43 except: 44 except:
44 self.server.table.sync()
45 raise 45 raise
46 else:
47 self.server.table.sync_if_dirty()
48 46
49 async def handle_test_pr(self, request): 47 async def handle_test_pr(self, request):
50 '''Finds the PR value corresponding to the request. If not found, returns None and doesn't insert a new value''' 48 '''Finds the PR value corresponding to the request. If not found, returns None and doesn't insert a new value'''
51 version = request["version"] 49 version = request["version"]
52 pkgarch = request["pkgarch"] 50 pkgarch = request["pkgarch"]
53 checksum = request["checksum"] 51 checksum = request["checksum"]
52 history = request["history"]
54 53
55 value = self.server.table.find_value(version, pkgarch, checksum) 54 value = self.server.table.find_value(version, pkgarch, checksum, history)
56 return {"value": value} 55 return {"value": value}
57 56
58 async def handle_test_package(self, request): 57 async def handle_test_package(self, request):
@@ -68,22 +67,110 @@ class PRServerClient(bb.asyncrpc.AsyncServerConnection):
68 version = request["version"] 67 version = request["version"]
69 pkgarch = request["pkgarch"] 68 pkgarch = request["pkgarch"]
70 69
71 value = self.server.table.find_max_value(version, pkgarch) 70 value = self.server.table.find_package_max_value(version, pkgarch)
72 return {"value": value} 71 return {"value": value}
73 72
74 async def handle_get_pr(self, request): 73 async def handle_get_pr(self, request):
75 version = request["version"] 74 version = request["version"]
76 pkgarch = request["pkgarch"] 75 pkgarch = request["pkgarch"]
77 checksum = request["checksum"] 76 checksum = request["checksum"]
77 history = request["history"]
78 78
79 response = None 79 if self.upstream_client is None:
80 try: 80 value = self.server.table.get_value(version, pkgarch, checksum, history)
81 value = self.server.table.get_value(version, pkgarch, checksum) 81 return {"value": value}
82 response = {"value": value}
83 except prserv.NotFoundError:
84 self.logger.error("failure storing value in database for (%s, %s)",version, checksum)
85 82
86 return response 83 # We have an upstream server.
84 # Check whether the local server already knows the requested configuration.
85 # If the configuration is a new one, the generated value we will add will
86 # depend on what's on the upstream server. That's why we're calling find_value()
87 # instead of get_value() directly.
88
89 value = self.server.table.find_value(version, pkgarch, checksum, history)
90 upstream_max = await self.upstream_client.max_package_pr(version, pkgarch)
91
92 if value is not None:
93
94 # The configuration is already known locally.
95
96 if history:
97 value = self.server.table.get_value(version, pkgarch, checksum, history)
98 else:
99 existing_value = value
100 # In "no history", we need to make sure the value doesn't decrease
101 # and is at least greater than the maximum upstream value
102 # and the maximum local value
103
104 local_max = self.server.table.find_package_max_value(version, pkgarch)
105 if revision_smaller(value, local_max):
106 value = increase_revision(local_max)
107
108 if revision_smaller(value, upstream_max):
109 # Ask upstream whether it knows the checksum
110 upstream_value = await self.upstream_client.test_pr(version, pkgarch, checksum)
111 if upstream_value is None:
112 # Upstream doesn't have our checksum, let create a new one
113 value = upstream_max + ".0"
114 else:
115 # Fine to take the same value as upstream
116 value = upstream_max
117
118 if not value == existing_value and not self.server.read_only:
119 self.server.table.store_value(version, pkgarch, checksum, value)
120
121 return {"value": value}
122
123 # The configuration is a new one for the local server
124 # Let's ask the upstream server whether it knows it
125
126 known_upstream = await self.upstream_client.test_package(version, pkgarch)
127
128 if not known_upstream:
129
130 # The package is not known upstream, must be a local-only package
131 # Let's compute the PR number using the local-only method
132
133 value = self.server.table.get_value(version, pkgarch, checksum, history)
134 return {"value": value}
135
136 # The package is known upstream, let's ask the upstream server
137 # whether it knows our new output hash
138
139 value = await self.upstream_client.test_pr(version, pkgarch, checksum)
140
141 if value is not None:
142
143 # Upstream knows this output hash, let's store it and use it too.
144
145 if not self.server.read_only:
146 self.server.table.store_value(version, pkgarch, checksum, value)
147 # If the local server is read only, won't be able to store the new
148 # value in the database and will have to keep asking the upstream server
149 return {"value": value}
150
151 # The output hash doesn't exist upstream, get the most recent number from upstream (x)
152 # Then, we want to have a new PR value for the local server: x.y
153
154 upstream_max = await self.upstream_client.max_package_pr(version, pkgarch)
155 # Here we know that the package is known upstream, so upstream_max can't be None
156 subvalue = self.server.table.find_new_subvalue(version, pkgarch, upstream_max)
157
158 if not self.server.read_only:
159 self.server.table.store_value(version, pkgarch, checksum, subvalue)
160
161 return {"value": subvalue}
162
163 async def process_requests(self):
164 if self.server.upstream is not None:
165 self.upstream_client = await create_async_client(self.server.upstream)
166 else:
167 self.upstream_client = None
168
169 try:
170 await super().process_requests()
171 finally:
172 if self.upstream_client is not None:
173 await self.upstream_client.close()
87 174
88 async def handle_import_one(self, request): 175 async def handle_import_one(self, request):
89 response = None 176 response = None
@@ -104,9 +191,10 @@ class PRServerClient(bb.asyncrpc.AsyncServerConnection):
104 pkgarch = request["pkgarch"] 191 pkgarch = request["pkgarch"]
105 checksum = request["checksum"] 192 checksum = request["checksum"]
106 colinfo = request["colinfo"] 193 colinfo = request["colinfo"]
194 history = request["history"]
107 195
108 try: 196 try:
109 (metainfo, datainfo) = self.server.table.export(version, pkgarch, checksum, colinfo) 197 (metainfo, datainfo) = self.server.table.export(version, pkgarch, checksum, colinfo, history)
110 except sqlite3.Error as exc: 198 except sqlite3.Error as exc:
111 self.logger.error(str(exc)) 199 self.logger.error(str(exc))
112 metainfo = datainfo = None 200 metainfo = datainfo = None
@@ -117,11 +205,12 @@ class PRServerClient(bb.asyncrpc.AsyncServerConnection):
117 return {"readonly": self.server.read_only} 205 return {"readonly": self.server.read_only}
118 206
119class PRServer(bb.asyncrpc.AsyncServer): 207class PRServer(bb.asyncrpc.AsyncServer):
120 def __init__(self, dbfile, read_only=False): 208 def __init__(self, dbfile, read_only=False, upstream=None):
121 super().__init__(logger) 209 super().__init__(logger)
122 self.dbfile = dbfile 210 self.dbfile = dbfile
123 self.table = None 211 self.table = None
124 self.read_only = read_only 212 self.read_only = read_only
213 self.upstream = upstream
125 214
126 def accept_client(self, socket): 215 def accept_client(self, socket):
127 return PRServerClient(socket, self) 216 return PRServerClient(socket, self)
@@ -134,27 +223,25 @@ class PRServer(bb.asyncrpc.AsyncServer):
134 self.logger.info("Started PRServer with DBfile: %s, Address: %s, PID: %s" % 223 self.logger.info("Started PRServer with DBfile: %s, Address: %s, PID: %s" %
135 (self.dbfile, self.address, str(os.getpid()))) 224 (self.dbfile, self.address, str(os.getpid())))
136 225
226 if self.upstream is not None:
227 self.logger.info("And upstream PRServer: %s " % (self.upstream))
228
137 return tasks 229 return tasks
138 230
139 async def stop(self): 231 async def stop(self):
140 self.table.sync_if_dirty()
141 self.db.disconnect() 232 self.db.disconnect()
142 await super().stop() 233 await super().stop()
143 234
144 def signal_handler(self):
145 super().signal_handler()
146 if self.table:
147 self.table.sync()
148
149class PRServSingleton(object): 235class PRServSingleton(object):
150 def __init__(self, dbfile, logfile, host, port): 236 def __init__(self, dbfile, logfile, host, port, upstream):
151 self.dbfile = dbfile 237 self.dbfile = dbfile
152 self.logfile = logfile 238 self.logfile = logfile
153 self.host = host 239 self.host = host
154 self.port = port 240 self.port = port
241 self.upstream = upstream
155 242
156 def start(self): 243 def start(self):
157 self.prserv = PRServer(self.dbfile) 244 self.prserv = PRServer(self.dbfile, upstream=self.upstream)
158 self.prserv.start_tcp_server(socket.gethostbyname(self.host), self.port) 245 self.prserv.start_tcp_server(socket.gethostbyname(self.host), self.port)
159 self.process = self.prserv.serve_as_process(log_level=logging.WARNING) 246 self.process = self.prserv.serve_as_process(log_level=logging.WARNING)
160 247
@@ -233,7 +320,7 @@ def run_as_daemon(func, pidfile, logfile):
233 os.remove(pidfile) 320 os.remove(pidfile)
234 os._exit(0) 321 os._exit(0)
235 322
236def start_daemon(dbfile, host, port, logfile, read_only=False): 323def start_daemon(dbfile, host, port, logfile, read_only=False, upstream=None):
237 ip = socket.gethostbyname(host) 324 ip = socket.gethostbyname(host)
238 pidfile = PIDPREFIX % (ip, port) 325 pidfile = PIDPREFIX % (ip, port)
239 try: 326 try:
@@ -249,7 +336,7 @@ def start_daemon(dbfile, host, port, logfile, read_only=False):
249 336
250 dbfile = os.path.abspath(dbfile) 337 dbfile = os.path.abspath(dbfile)
251 def daemon_main(): 338 def daemon_main():
252 server = PRServer(dbfile, read_only=read_only) 339 server = PRServer(dbfile, read_only=read_only, upstream=upstream)
253 server.start_tcp_server(ip, port) 340 server.start_tcp_server(ip, port)
254 server.serve_forever() 341 server.serve_forever()
255 342
@@ -336,6 +423,9 @@ def auto_start(d):
336 423
337 host = host_params[0].strip().lower() 424 host = host_params[0].strip().lower()
338 port = int(host_params[1]) 425 port = int(host_params[1])
426
427 upstream = d.getVar("PRSERV_UPSTREAM") or None
428
339 if is_local_special(host, port): 429 if is_local_special(host, port):
340 import bb.utils 430 import bb.utils
341 cachedir = (d.getVar("PERSISTENT_DIR") or d.getVar("CACHE")) 431 cachedir = (d.getVar("PERSISTENT_DIR") or d.getVar("CACHE"))
@@ -350,7 +440,7 @@ def auto_start(d):
350 auto_shutdown() 440 auto_shutdown()
351 if not singleton: 441 if not singleton:
352 bb.utils.mkdirhier(cachedir) 442 bb.utils.mkdirhier(cachedir)
353 singleton = PRServSingleton(os.path.abspath(dbfile), os.path.abspath(logfile), host, port) 443 singleton = PRServSingleton(os.path.abspath(dbfile), os.path.abspath(logfile), host, port, upstream)
354 singleton.start() 444 singleton.start()
355 if singleton: 445 if singleton:
356 host = singleton.host 446 host = singleton.host
diff --git a/bitbake/lib/prserv/tests.py b/bitbake/lib/prserv/tests.py
new file mode 100644
index 0000000000..8765b129f2
--- /dev/null
+++ b/bitbake/lib/prserv/tests.py
@@ -0,0 +1,386 @@
1#! /usr/bin/env python3
2#
3# Copyright (C) 2024 BitBake Contributors
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8from . import create_server, create_client, increase_revision, revision_greater, revision_smaller, _revision_greater_or_equal
9import prserv.db as db
10from bb.asyncrpc import InvokeError
11import logging
12import os
13import sys
14import tempfile
15import unittest
16import socket
17import subprocess
18from pathlib import Path
19
20THIS_DIR = Path(__file__).parent
21BIN_DIR = THIS_DIR.parent.parent / "bin"
22
23version = "dummy-1.0-r0"
24pkgarch = "core2-64"
25other_arch = "aarch64"
26
27checksumX = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4f0"
28checksum0 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a0"
29checksum1 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a1"
30checksum2 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a2"
31checksum3 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a3"
32checksum4 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a4"
33checksum5 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a5"
34checksum6 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a6"
35checksum7 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a7"
36checksum8 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a8"
37checksum9 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4a9"
38checksum10 = "51bf8189dbe9ea81fa6dd89608bf19380c437a9cf12f6c6239887801ba4ab4aa"
39
40def server_prefunc(server, name):
41 logging.basicConfig(level=logging.DEBUG, filename='prserv-%s.log' % name, filemode='w',
42 format='%(levelname)s %(filename)s:%(lineno)d %(message)s')
43 server.logger.debug("Running server %s" % name)
44 sys.stdout = open('prserv-stdout-%s.log' % name, 'w')
45 sys.stderr = sys.stdout
46
47class PRTestSetup(object):
48
49 def start_server(self, name, dbfile, upstream=None, read_only=False, prefunc=server_prefunc):
50
51 def cleanup_server(server):
52 if server.process.exitcode is not None:
53 return
54 server.process.terminate()
55 server.process.join()
56
57 server = create_server(socket.gethostbyname("localhost") + ":0",
58 dbfile,
59 upstream=upstream,
60 read_only=read_only)
61
62 server.serve_as_process(prefunc=prefunc, args=(name,))
63 self.addCleanup(cleanup_server, server)
64
65 return server
66
67 def start_client(self, server_address):
68 def cleanup_client(client):
69 client.close()
70
71 client = create_client(server_address)
72 self.addCleanup(cleanup_client, client)
73
74 return client
75
76class FunctionTests(unittest.TestCase):
77
78 def setUp(self):
79 self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-prserv')
80 self.addCleanup(self.temp_dir.cleanup)
81
82 def test_increase_revision(self):
83 self.assertEqual(increase_revision("1"), "2")
84 self.assertEqual(increase_revision("1.0"), "1.1")
85 self.assertEqual(increase_revision("1.1.1"), "1.1.2")
86 self.assertEqual(increase_revision("1.1.1.3"), "1.1.1.4")
87 self.assertRaises(ValueError, increase_revision, "1.a")
88 self.assertRaises(ValueError, increase_revision, "1.")
89 self.assertRaises(ValueError, increase_revision, "")
90
91 def test_revision_greater_or_equal(self):
92 self.assertTrue(_revision_greater_or_equal("2", "2"))
93 self.assertTrue(_revision_greater_or_equal("2", "1"))
94 self.assertTrue(_revision_greater_or_equal("10", "2"))
95 self.assertTrue(_revision_greater_or_equal("1.10", "1.2"))
96 self.assertFalse(_revision_greater_or_equal("1.2", "1.10"))
97 self.assertTrue(_revision_greater_or_equal("1.10", "1"))
98 self.assertTrue(_revision_greater_or_equal("1.10.1", "1.10"))
99 self.assertFalse(_revision_greater_or_equal("1.10.1", "1.10.2"))
100 self.assertTrue(_revision_greater_or_equal("1.10.1", "1.10.1"))
101 self.assertTrue(_revision_greater_or_equal("1.10.1", "1"))
102 self.assertTrue(revision_greater("1.20", "1.3"))
103 self.assertTrue(revision_smaller("1.3", "1.20"))
104
105 # DB tests
106
107 def test_db(self):
108 dbfile = os.path.join(self.temp_dir.name, "testtable.sqlite3")
109
110 self.db = db.PRData(dbfile)
111 self.table = self.db["PRMAIN"]
112
113 self.table.store_value(version, pkgarch, checksum0, "0")
114 self.table.store_value(version, pkgarch, checksum1, "1")
115 # "No history" mode supports multiple PRs for the same checksum
116 self.table.store_value(version, pkgarch, checksum0, "2")
117 self.table.store_value(version, pkgarch, checksum2, "1.0")
118
119 self.assertTrue(self.table.test_package(version, pkgarch))
120 self.assertFalse(self.table.test_package(version, other_arch))
121
122 self.assertTrue(self.table.test_value(version, pkgarch, "0"))
123 self.assertTrue(self.table.test_value(version, pkgarch, "1"))
124 self.assertTrue(self.table.test_value(version, pkgarch, "2"))
125
126 self.assertEqual(self.table.find_package_max_value(version, pkgarch), "2")
127
128 self.assertEqual(self.table.find_min_value(version, pkgarch, checksum0), "0")
129 self.assertEqual(self.table.find_max_value(version, pkgarch, checksum0), "2")
130
131 # Test history modes
132 self.assertEqual(self.table.find_value(version, pkgarch, checksum0, True), "0")
133 self.assertEqual(self.table.find_value(version, pkgarch, checksum0, False), "2")
134
135 self.assertEqual(self.table.find_new_subvalue(version, pkgarch, "3"), "3.0")
136 self.assertEqual(self.table.find_new_subvalue(version, pkgarch, "1"), "1.1")
137
138 # Revision comparison tests
139 self.table.store_value(version, pkgarch, checksum1, "1.3")
140 self.table.store_value(version, pkgarch, checksum1, "1.20")
141 self.assertEqual(self.table.find_min_value(version, pkgarch, checksum1), "1")
142 self.assertEqual(self.table.find_max_value(version, pkgarch, checksum1), "1.20")
143
144class PRBasicTests(PRTestSetup, unittest.TestCase):
145
146 def setUp(self):
147 self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-prserv')
148 self.addCleanup(self.temp_dir.cleanup)
149
150 dbfile = os.path.join(self.temp_dir.name, "prtest-basic.sqlite3")
151
152 self.server1 = self.start_server("basic", dbfile)
153 self.client1 = self.start_client(self.server1.address)
154
155 def test_basic(self):
156
157 # Checks on non existing configuration
158
159 result = self.client1.test_pr(version, pkgarch, checksum0)
160 self.assertIsNone(result, "test_pr should return 'None' for a non existing PR")
161
162 result = self.client1.test_package(version, pkgarch)
163 self.assertFalse(result, "test_package should return 'False' for a non existing PR")
164
165 result = self.client1.max_package_pr(version, pkgarch)
166 self.assertIsNone(result, "max_package_pr should return 'None' for a non existing PR")
167
168 # Add a first configuration
169
170 result = self.client1.getPR(version, pkgarch, checksum0)
171 self.assertEqual(result, "0", "getPR: initial PR of a package should be '0'")
172
173 result = self.client1.test_pr(version, pkgarch, checksum0)
174 self.assertEqual(result, "0", "test_pr should return '0' here, matching the result of getPR")
175
176 result = self.client1.test_package(version, pkgarch)
177 self.assertTrue(result, "test_package should return 'True' for an existing PR")
178
179 result = self.client1.max_package_pr(version, pkgarch)
180 self.assertEqual(result, "0", "max_package_pr should return '0' in the current test series")
181
182 # Check that the same request gets the same value
183
184 result = self.client1.getPR(version, pkgarch, checksum0)
185 self.assertEqual(result, "0", "getPR: asking for the same PR a second time in a row should return the same value.")
186
187 # Add new configurations
188
189 result = self.client1.getPR(version, pkgarch, checksum1)
190 self.assertEqual(result, "1", "getPR: second PR of a package should be '1'")
191
192 result = self.client1.test_pr(version, pkgarch, checksum1)
193 self.assertEqual(result, "1", "test_pr should return '1' here, matching the result of getPR")
194
195 result = self.client1.max_package_pr(version, pkgarch)
196 self.assertEqual(result, "1", "max_package_pr should return '1' in the current test series")
197
198 result = self.client1.getPR(version, pkgarch, checksum2)
199 self.assertEqual(result, "2", "getPR: second PR of a package should be '2'")
200
201 result = self.client1.test_pr(version, pkgarch, checksum2)
202 self.assertEqual(result, "2", "test_pr should return '2' here, matching the result of getPR")
203
204 result = self.client1.max_package_pr(version, pkgarch)
205 self.assertEqual(result, "2", "max_package_pr should return '2' in the current test series")
206
207 result = self.client1.getPR(version, pkgarch, checksum3)
208 self.assertEqual(result, "3", "getPR: second PR of a package should be '3'")
209
210 result = self.client1.test_pr(version, pkgarch, checksum3)
211 self.assertEqual(result, "3", "test_pr should return '3' here, matching the result of getPR")
212
213 result = self.client1.max_package_pr(version, pkgarch)
214 self.assertEqual(result, "3", "max_package_pr should return '3' in the current test series")
215
216 # Ask again for the first configuration
217
218 result = self.client1.getPR(version, pkgarch, checksum0)
219 self.assertEqual(result, "4", "getPR: should return '4' in this configuration")
220
221 # Ask again with explicit "no history" mode
222
223 result = self.client1.getPR(version, pkgarch, checksum0, False)
224 self.assertEqual(result, "4", "getPR: should return '4' in this configuration")
225
226 # Ask again with explicit "history" mode. This should return the first recorded PR for checksum0
227
228 result = self.client1.getPR(version, pkgarch, checksum0, True)
229 self.assertEqual(result, "0", "getPR: should return '0' in this configuration")
230
231 # Check again that another pkgarg resets the counters
232
233 result = self.client1.test_pr(version, other_arch, checksum0)
234 self.assertIsNone(result, "test_pr should return 'None' for a non existing PR")
235
236 result = self.client1.test_package(version, other_arch)
237 self.assertFalse(result, "test_package should return 'False' for a non existing PR")
238
239 result = self.client1.max_package_pr(version, other_arch)
240 self.assertIsNone(result, "max_package_pr should return 'None' for a non existing PR")
241
242 # Now add the configuration
243
244 result = self.client1.getPR(version, other_arch, checksum0)
245 self.assertEqual(result, "0", "getPR: initial PR of a package should be '0'")
246
247 result = self.client1.test_pr(version, other_arch, checksum0)
248 self.assertEqual(result, "0", "test_pr should return '0' here, matching the result of getPR")
249
250 result = self.client1.test_package(version, other_arch)
251 self.assertTrue(result, "test_package should return 'True' for an existing PR")
252
253 result = self.client1.max_package_pr(version, other_arch)
254 self.assertEqual(result, "0", "max_package_pr should return '0' in the current test series")
255
256 result = self.client1.is_readonly()
257 self.assertFalse(result, "Server should not be described as 'read-only'")
258
259class PRUpstreamTests(PRTestSetup, unittest.TestCase):
260
261 def setUp(self):
262
263 self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-prserv')
264 self.addCleanup(self.temp_dir.cleanup)
265
266 dbfile2 = os.path.join(self.temp_dir.name, "prtest-upstream2.sqlite3")
267 self.server2 = self.start_server("upstream2", dbfile2)
268 self.client2 = self.start_client(self.server2.address)
269
270 dbfile1 = os.path.join(self.temp_dir.name, "prtest-upstream1.sqlite3")
271 self.server1 = self.start_server("upstream1", dbfile1, upstream=self.server2.address)
272 self.client1 = self.start_client(self.server1.address)
273
274 dbfile0 = os.path.join(self.temp_dir.name, "prtest-local.sqlite3")
275 self.server0 = self.start_server("local", dbfile0, upstream=self.server1.address)
276 self.client0 = self.start_client(self.server0.address)
277 self.shared_db = dbfile0
278
279 def test_upstream_and_readonly(self):
280
281 # For identical checksums, all servers should return the same PR
282
283 result = self.client2.getPR(version, pkgarch, checksum0)
284 self.assertEqual(result, "0", "getPR: initial PR of a package should be '0'")
285
286 result = self.client1.getPR(version, pkgarch, checksum0)
287 self.assertEqual(result, "0", "getPR: initial PR of a package should be '0' (same as upstream)")
288
289 result = self.client0.getPR(version, pkgarch, checksum0)
290 self.assertEqual(result, "0", "getPR: initial PR of a package should be '0' (same as upstream)")
291
292 # Now introduce new checksums on server1 for, same version
293
294 result = self.client1.getPR(version, pkgarch, checksum1)
295 self.assertEqual(result, "0.0", "getPR: first PR of a package which has a different checksum upstream should be '0.0'")
296
297 result = self.client1.getPR(version, pkgarch, checksum2)
298 self.assertEqual(result, "0.1", "getPR: second PR of a package that has a different checksum upstream should be '0.1'")
299
300 # Now introduce checksums on server0 for, same version
301
302 result = self.client1.getPR(version, pkgarch, checksum1)
303 self.assertEqual(result, "0.2", "getPR: can't decrease for known PR")
304
305 result = self.client1.getPR(version, pkgarch, checksum2)
306 self.assertEqual(result, "0.3")
307
308 result = self.client1.max_package_pr(version, pkgarch)
309 self.assertEqual(result, "0.3")
310
311 result = self.client0.getPR(version, pkgarch, checksum3)
312 self.assertEqual(result, "0.3.0", "getPR: first PR of a package that doesn't exist upstream should be '0.3.0'")
313
314 result = self.client0.getPR(version, pkgarch, checksum4)
315 self.assertEqual(result, "0.3.1", "getPR: second PR of a package that doesn't exist upstream should be '0.3.1'")
316
317 result = self.client0.getPR(version, pkgarch, checksum3)
318 self.assertEqual(result, "0.3.2")
319
320 # More upstream updates
321 # Here, we assume no communication between server2 and server0. server2 only impacts server0
322 # after impacting server1
323
324 self.assertEqual(self.client2.getPR(version, pkgarch, checksum5), "1")
325 self.assertEqual(self.client1.getPR(version, pkgarch, checksum6), "1.0")
326 self.assertEqual(self.client1.getPR(version, pkgarch, checksum7), "1.1")
327 self.assertEqual(self.client0.getPR(version, pkgarch, checksum8), "1.1.0")
328 self.assertEqual(self.client0.getPR(version, pkgarch, checksum9), "1.1.1")
329
330 # "history" mode tests
331
332 self.assertEqual(self.client2.getPR(version, pkgarch, checksum0, True), "0")
333 self.assertEqual(self.client1.getPR(version, pkgarch, checksum2, True), "0.1")
334 self.assertEqual(self.client0.getPR(version, pkgarch, checksum3, True), "0.3.0")
335
336 # More "no history" mode tests
337
338 self.assertEqual(self.client2.getPR(version, pkgarch, checksum0), "2")
339 self.assertEqual(self.client1.getPR(version, pkgarch, checksum0), "2") # Same as upstream
340 self.assertEqual(self.client0.getPR(version, pkgarch, checksum0), "2") # Same as upstream
341 self.assertEqual(self.client1.getPR(version, pkgarch, checksum7), "3") # This could be surprising, but since the previous revision was "2", increasing it yields "3".
342 # We don't know how many upstream servers we have
343 # Start read-only server with server1 as upstream
344 self.server_ro = self.start_server("local-ro", self.shared_db, upstream=self.server1.address, read_only=True)
345 self.client_ro = self.start_client(self.server_ro.address)
346
347 self.assertTrue(self.client_ro.is_readonly(), "Database should be described as 'read-only'")
348
349 # Checks on non existing configurations
350 self.assertIsNone(self.client_ro.test_pr(version, pkgarch, checksumX))
351 self.assertFalse(self.client_ro.test_package("unknown", pkgarch))
352
353 # Look up existing configurations
354 self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum0), "3") # "no history" mode
355 self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum0, True), "0") # "history" mode
356 self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum3), "3")
357 self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum3, True), "0.3.0")
358 self.assertEqual(self.client_ro.max_package_pr(version, pkgarch), "2") # normal as "3" was never saved
359
360 # Try to insert a new value. Here this one is know upstream.
361 self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum7), "3")
362 # Try to insert a completely new value. As the max upstream value is already "3", it should be "3.0"
363 self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum10), "3.0")
364 # Same with another value which only exists in the upstream upstream server
365 # This time, as the upstream server doesn't know it, it will ask its upstream server. So that's a known one.
366 self.assertEqual(self.client_ro.getPR(version, pkgarch, checksum9), "3")
367
368class ScriptTests(unittest.TestCase):
369
370 def setUp(self):
371
372 self.temp_dir = tempfile.TemporaryDirectory(prefix='bb-prserv')
373 self.addCleanup(self.temp_dir.cleanup)
374 self.dbfile = os.path.join(self.temp_dir.name, "prtest.sqlite3")
375
376 def test_1_start_bitbake_prserv(self):
377 try:
378 subprocess.check_call([BIN_DIR / "bitbake-prserv", "--start", "-f", self.dbfile])
379 except subprocess.CalledProcessError as e:
380 self.fail("Failed to start bitbake-prserv: %s" % e.returncode)
381
382 def test_2_stop_bitbake_prserv(self):
383 try:
384 subprocess.check_call([BIN_DIR / "bitbake-prserv", "--stop"])
385 except subprocess.CalledProcessError as e:
386 self.fail("Failed to stop bitbake-prserv: %s" % e.returncode)