summaryrefslogtreecommitdiffstats
path: root/scripts/lib/recipetool
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/lib/recipetool')
-rw-r--r--scripts/lib/recipetool/append.py14
-rw-r--r--scripts/lib/recipetool/create.py253
-rw-r--r--scripts/lib/recipetool/create_go.py677
-rw-r--r--scripts/lib/recipetool/create_npm.py96
-rw-r--r--scripts/lib/recipetool/licenses.csv37
5 files changed, 109 insertions, 968 deletions
diff --git a/scripts/lib/recipetool/append.py b/scripts/lib/recipetool/append.py
index 341e893305..041d79f162 100644
--- a/scripts/lib/recipetool/append.py
+++ b/scripts/lib/recipetool/append.py
@@ -101,7 +101,7 @@ def determine_file_source(targetpath, rd):
101 import oe.recipeutils 101 import oe.recipeutils
102 102
103 # See if it's in do_install for the recipe 103 # See if it's in do_install for the recipe
104 workdir = rd.getVar('WORKDIR') 104 unpackdir = rd.getVar('UNPACKDIR')
105 src_uri = rd.getVar('SRC_URI') 105 src_uri = rd.getVar('SRC_URI')
106 srcfile = '' 106 srcfile = ''
107 modpatches = [] 107 modpatches = []
@@ -113,9 +113,9 @@ def determine_file_source(targetpath, rd):
113 if not srcpath.startswith('/'): 113 if not srcpath.startswith('/'):
114 # Handle non-absolute path 114 # Handle non-absolute path
115 srcpath = os.path.abspath(os.path.join(rd.getVarFlag('do_install', 'dirs').split()[-1], srcpath)) 115 srcpath = os.path.abspath(os.path.join(rd.getVarFlag('do_install', 'dirs').split()[-1], srcpath))
116 if srcpath.startswith(workdir): 116 if srcpath.startswith(unpackdir):
117 # OK, now we have the source file name, look for it in SRC_URI 117 # OK, now we have the source file name, look for it in SRC_URI
118 workdirfile = os.path.relpath(srcpath, workdir) 118 workdirfile = os.path.relpath(srcpath, unpackdir)
119 # FIXME this is where we ought to have some code in the fetcher, because this is naive 119 # FIXME this is where we ought to have some code in the fetcher, because this is naive
120 for item in src_uri.split(): 120 for item in src_uri.split():
121 localpath = bb.fetch2.localpath(item, rd) 121 localpath = bb.fetch2.localpath(item, rd)
@@ -317,7 +317,7 @@ def appendsrc(args, files, rd, extralines=None):
317 import oe.recipeutils 317 import oe.recipeutils
318 318
319 srcdir = rd.getVar('S') 319 srcdir = rd.getVar('S')
320 workdir = rd.getVar('WORKDIR') 320 unpackdir = rd.getVar('UNPACKDIR')
321 321
322 import bb.fetch 322 import bb.fetch
323 simplified = {} 323 simplified = {}
@@ -336,10 +336,10 @@ def appendsrc(args, files, rd, extralines=None):
336 src_destdir = os.path.dirname(srcfile) 336 src_destdir = os.path.dirname(srcfile)
337 if not args.use_workdir: 337 if not args.use_workdir:
338 if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'): 338 if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'):
339 srcdir = os.path.join(workdir, 'git') 339 srcdir = os.path.join(unpackdir, rd.getVar('BB_GIT_DEFAULT_DESTSUFFIX'))
340 if not bb.data.inherits_class('kernel-yocto', rd): 340 if not bb.data.inherits_class('kernel-yocto', rd):
341 logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${WORKDIR}/git') 341 logger.warning('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${UNPACKDIR}/${BB_GIT_DEFAULT_DESTSUFFIX}')
342 src_destdir = os.path.join(os.path.relpath(srcdir, workdir), src_destdir) 342 src_destdir = os.path.join(os.path.relpath(srcdir, unpackdir), src_destdir)
343 src_destdir = os.path.normpath(src_destdir) 343 src_destdir = os.path.normpath(src_destdir)
344 344
345 if src_destdir and src_destdir != '.': 345 if src_destdir and src_destdir != '.':
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py
index 8e9ff38db6..ef0ba974a9 100644
--- a/scripts/lib/recipetool/create.py
+++ b/scripts/lib/recipetool/create.py
@@ -18,6 +18,8 @@ from urllib.parse import urlparse, urldefrag, urlsplit
18import hashlib 18import hashlib
19import bb.fetch2 19import bb.fetch2
20logger = logging.getLogger('recipetool') 20logger = logging.getLogger('recipetool')
21from oe.license import tidy_licenses
22from oe.license_finder import find_licenses
21 23
22tinfoil = None 24tinfoil = None
23plugins = None 25plugins = None
@@ -528,7 +530,7 @@ def create_recipe(args):
528 if ftmpdir and args.keep_temp: 530 if ftmpdir and args.keep_temp:
529 logger.info('Fetch temp directory is %s' % ftmpdir) 531 logger.info('Fetch temp directory is %s' % ftmpdir)
530 532
531 dirlist = scriptutils.filter_src_subdirs(srctree) 533 dirlist = os.listdir(srctree)
532 logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist)) 534 logger.debug('Directory listing (excluding filtered out):\n %s' % '\n '.join(dirlist))
533 if len(dirlist) == 1: 535 if len(dirlist) == 1:
534 singleitem = os.path.join(srctree, dirlist[0]) 536 singleitem = os.path.join(srctree, dirlist[0])
@@ -637,7 +639,6 @@ def create_recipe(args):
637 if len(splitline) > 1: 639 if len(splitline) > 1:
638 if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]): 640 if splitline[0] == 'origin' and scriptutils.is_src_url(splitline[1]):
639 srcuri = reformat_git_uri(splitline[1]) + ';branch=master' 641 srcuri = reformat_git_uri(splitline[1]) + ';branch=master'
640 srcsubdir = 'git'
641 break 642 break
642 643
643 if args.src_subdir: 644 if args.src_subdir:
@@ -735,7 +736,7 @@ def create_recipe(args):
735 if srcsubdir and not args.binary: 736 if srcsubdir and not args.binary:
736 # (for binary packages we explicitly specify subdir= when fetching to 737 # (for binary packages we explicitly specify subdir= when fetching to
737 # match the default value of S, so we don't need to set it in that case) 738 # match the default value of S, so we don't need to set it in that case)
738 lines_before.append('S = "${WORKDIR}/%s"' % srcsubdir) 739 lines_before.append('S = "${UNPACKDIR}/%s"' % srcsubdir)
739 lines_before.append('') 740 lines_before.append('')
740 741
741 if pkgarch: 742 if pkgarch:
@@ -764,6 +765,7 @@ def create_recipe(args):
764 extrafiles = extravalues.pop('extrafiles', {}) 765 extrafiles = extravalues.pop('extrafiles', {})
765 extra_pn = extravalues.pop('PN', None) 766 extra_pn = extravalues.pop('PN', None)
766 extra_pv = extravalues.pop('PV', None) 767 extra_pv = extravalues.pop('PV', None)
768 run_tasks = extravalues.pop('run_tasks', "").split()
767 769
768 if extra_pv and not realpv: 770 if extra_pv and not realpv:
769 realpv = extra_pv 771 realpv = extra_pv
@@ -824,7 +826,8 @@ def create_recipe(args):
824 extraoutdir = os.path.join(os.path.dirname(outfile), pn) 826 extraoutdir = os.path.join(os.path.dirname(outfile), pn)
825 bb.utils.mkdirhier(extraoutdir) 827 bb.utils.mkdirhier(extraoutdir)
826 for destfn, extrafile in extrafiles.items(): 828 for destfn, extrafile in extrafiles.items():
827 shutil.move(extrafile, os.path.join(extraoutdir, destfn)) 829 fn = destfn.format(pn=pn, pv=realpv)
830 shutil.move(extrafile, os.path.join(extraoutdir, fn))
828 831
829 lines = lines_before 832 lines = lines_before
830 lines_before = [] 833 lines_before = []
@@ -839,7 +842,7 @@ def create_recipe(args):
839 line = line.replace(realpv, '${PV}') 842 line = line.replace(realpv, '${PV}')
840 if pn: 843 if pn:
841 line = line.replace(pn, '${BPN}') 844 line = line.replace(pn, '${BPN}')
842 if line == 'S = "${WORKDIR}/${BPN}-${PV}"': 845 if line == 'S = "${UNPACKDIR}/${BPN}-${PV}"' or 'tmp-recipetool-' in line:
843 skipblank = True 846 skipblank = True
844 continue 847 continue
845 elif line.startswith('SRC_URI = '): 848 elif line.startswith('SRC_URI = '):
@@ -917,6 +920,10 @@ def create_recipe(args):
917 log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) 920 log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool)
918 tinfoil.modified_files() 921 tinfoil.modified_files()
919 922
923 for task in run_tasks:
924 logger.info("Running task %s" % task)
925 tinfoil.build_file_sync(outfile, task)
926
920 if tempsrc: 927 if tempsrc:
921 if args.keep_temp: 928 if args.keep_temp:
922 logger.info('Preserving temporary directory %s' % tempsrc) 929 logger.info('Preserving temporary directory %s' % tempsrc)
@@ -944,23 +951,13 @@ def fixup_license(value):
944 return '(' + value + ')' 951 return '(' + value + ')'
945 return value 952 return value
946 953
947def tidy_licenses(value):
948 """Flat, split and sort licenses"""
949 from oe.license import flattened_licenses
950 def _choose(a, b):
951 str_a, str_b = sorted((" & ".join(a), " & ".join(b)), key=str.casefold)
952 return ["(%s | %s)" % (str_a, str_b)]
953 if not isinstance(value, str):
954 value = " & ".join(value)
955 return sorted(list(set(flattened_licenses(value, _choose))), key=str.casefold)
956
957def handle_license_vars(srctree, lines_before, handled, extravalues, d): 954def handle_license_vars(srctree, lines_before, handled, extravalues, d):
958 lichandled = [x for x in handled if x[0] == 'license'] 955 lichandled = [x for x in handled if x[0] == 'license']
959 if lichandled: 956 if lichandled:
960 # Someone else has already handled the license vars, just return their value 957 # Someone else has already handled the license vars, just return their value
961 return lichandled[0][1] 958 return lichandled[0][1]
962 959
963 licvalues = guess_license(srctree, d) 960 licvalues = find_licenses(srctree, d)
964 licenses = [] 961 licenses = []
965 lic_files_chksum = [] 962 lic_files_chksum = []
966 lic_unknown = [] 963 lic_unknown = []
@@ -1040,222 +1037,9 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d):
1040 handled.append(('license', licvalues)) 1037 handled.append(('license', licvalues))
1041 return licvalues 1038 return licvalues
1042 1039
1043def get_license_md5sums(d, static_only=False, linenumbers=False):
1044 import bb.utils
1045 import csv
1046 md5sums = {}
1047 if not static_only and not linenumbers:
1048 # Gather md5sums of license files in common license dir
1049 commonlicdir = d.getVar('COMMON_LICENSE_DIR')
1050 for fn in os.listdir(commonlicdir):
1051 md5value = bb.utils.md5_file(os.path.join(commonlicdir, fn))
1052 md5sums[md5value] = fn
1053
1054 # The following were extracted from common values in various recipes
1055 # (double checking the license against the license file itself, not just
1056 # the LICENSE value in the recipe)
1057
1058 # Read license md5sums from csv file
1059 scripts_path = os.path.dirname(os.path.realpath(__file__))
1060 for path in (d.getVar('BBPATH').split(':')
1061 + [os.path.join(scripts_path, '..', '..')]):
1062 csv_path = os.path.join(path, 'lib', 'recipetool', 'licenses.csv')
1063 if os.path.isfile(csv_path):
1064 with open(csv_path, newline='') as csv_file:
1065 fieldnames = ['md5sum', 'license', 'beginline', 'endline', 'md5']
1066 reader = csv.DictReader(csv_file, delimiter=',', fieldnames=fieldnames)
1067 for row in reader:
1068 if linenumbers:
1069 md5sums[row['md5sum']] = (
1070 row['license'], row['beginline'], row['endline'], row['md5'])
1071 else:
1072 md5sums[row['md5sum']] = row['license']
1073
1074 return md5sums
1075
1076def crunch_known_licenses(d):
1077 '''
1078 Calculate the MD5 checksums for the crunched versions of all common
1079 licenses. Also add additional known checksums.
1080 '''
1081
1082 crunched_md5sums = {}
1083
1084 # common licenses
1085 crunched_md5sums['ad4e9d34a2e966dfe9837f18de03266d'] = 'GFDL-1.1-only'
1086 crunched_md5sums['d014fb11a34eb67dc717fdcfc97e60ed'] = 'GFDL-1.2-only'
1087 crunched_md5sums['e020ca655b06c112def28e597ab844f1'] = 'GFDL-1.3-only'
1088
1089 # The following two were gleaned from the "forever" npm package
1090 crunched_md5sums['0a97f8e4cbaf889d6fa51f84b89a79f6'] = 'ISC'
1091 # https://github.com/waffle-gl/waffle/blob/master/LICENSE.txt
1092 crunched_md5sums['50fab24ce589d69af8964fdbfe414c60'] = 'BSD-2-Clause'
1093 # https://github.com/spigwitmer/fakeds1963s/blob/master/LICENSE
1094 crunched_md5sums['88a4355858a1433fea99fae34a44da88'] = 'GPL-2.0-only'
1095 # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
1096 crunched_md5sums['063b5c3ebb5f3aa4c85a2ed18a31fbe7'] = 'GPL-2.0-only'
1097 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv2.1
1098 crunched_md5sums['7f5202f4d44ed15dcd4915f5210417d8'] = 'LGPL-2.1-only'
1099 # unixODBC-2.3.4 COPYING
1100 crunched_md5sums['3debde09238a8c8e1f6a847e1ec9055b'] = 'LGPL-2.1-only'
1101 # https://github.com/FFmpeg/FFmpeg/blob/master/COPYING.LGPLv3
1102 crunched_md5sums['f90c613c51aa35da4d79dd55fc724ceb'] = 'LGPL-3.0-only'
1103 # https://raw.githubusercontent.com/eclipse/mosquitto/v1.4.14/epl-v10
1104 crunched_md5sums['efe2cb9a35826992b9df68224e3c2628'] = 'EPL-1.0'
1105
1106 # https://raw.githubusercontent.com/jquery/esprima/3.1.3/LICENSE.BSD
1107 crunched_md5sums['80fa7b56a28e8c902e6af194003220a5'] = 'BSD-2-Clause'
1108 # https://raw.githubusercontent.com/npm/npm-install-checks/master/LICENSE
1109 crunched_md5sums['e659f77bfd9002659e112d0d3d59b2c1'] = 'BSD-2-Clause'
1110 # https://raw.githubusercontent.com/silverwind/default-gateway/4.2.0/LICENSE
1111 crunched_md5sums['4c641f2d995c47f5cb08bdb4b5b6ea05'] = 'BSD-2-Clause'
1112 # https://raw.githubusercontent.com/tad-lispy/node-damerau-levenshtein/v1.0.5/LICENSE
1113 crunched_md5sums['2b8c039b2b9a25f0feb4410c4542d346'] = 'BSD-2-Clause'
1114 # https://raw.githubusercontent.com/terser/terser/v3.17.0/LICENSE
1115 crunched_md5sums['8bd23871802951c9ad63855151204c2c'] = 'BSD-2-Clause'
1116 # https://raw.githubusercontent.com/alexei/sprintf.js/1.0.3/LICENSE
1117 crunched_md5sums['008c22318c8ea65928bf730ddd0273e3'] = 'BSD-3-Clause'
1118 # https://raw.githubusercontent.com/Caligatio/jsSHA/v3.2.0/LICENSE
1119 crunched_md5sums['0e46634a01bfef056892949acaea85b1'] = 'BSD-3-Clause'
1120 # https://raw.githubusercontent.com/d3/d3-path/v1.0.9/LICENSE
1121 crunched_md5sums['b5f72aef53d3b2b432702c30b0215666'] = 'BSD-3-Clause'
1122 # https://raw.githubusercontent.com/feross/ieee754/v1.1.13/LICENSE
1123 crunched_md5sums['a39327c997c20da0937955192d86232d'] = 'BSD-3-Clause'
1124 # https://raw.githubusercontent.com/joyent/node-extsprintf/v1.3.0/LICENSE
1125 crunched_md5sums['721f23a96ff4161ca3a5f071bbe18108'] = 'MIT'
1126 # https://raw.githubusercontent.com/pvorb/clone/v0.2.0/LICENSE
1127 crunched_md5sums['b376d29a53c9573006b9970709231431'] = 'MIT'
1128 # https://raw.githubusercontent.com/andris9/encoding/v0.1.12/LICENSE
1129 crunched_md5sums['85d8a977ee9d7c5ab4ac03c9b95431c4'] = 'MIT-0'
1130 # https://raw.githubusercontent.com/faye/websocket-driver-node/0.7.3/LICENSE.md
1131 crunched_md5sums['b66384e7137e41a9b1904ef4d39703b6'] = 'Apache-2.0'
1132 # https://raw.githubusercontent.com/less/less.js/v4.1.1/LICENSE
1133 crunched_md5sums['b27575459e02221ccef97ec0bfd457ae'] = 'Apache-2.0'
1134 # https://raw.githubusercontent.com/microsoft/TypeScript/v3.5.3/LICENSE.txt
1135 crunched_md5sums['a54a1a6a39e7f9dbb4a23a42f5c7fd1c'] = 'Apache-2.0'
1136 # https://raw.githubusercontent.com/request/request/v2.87.0/LICENSE
1137 crunched_md5sums['1034431802e57486b393d00c5d262b8a'] = 'Apache-2.0'
1138 # https://raw.githubusercontent.com/dchest/tweetnacl-js/v0.14.5/LICENSE
1139 crunched_md5sums['75605e6bdd564791ab698fca65c94a4f'] = 'Unlicense'
1140 # https://raw.githubusercontent.com/stackgl/gl-mat3/v2.0.0/LICENSE.md
1141 crunched_md5sums['75512892d6f59dddb6d1c7e191957e9c'] = 'Zlib'
1142
1143 commonlicdir = d.getVar('COMMON_LICENSE_DIR')
1144 for fn in sorted(os.listdir(commonlicdir)):
1145 md5value, lictext = crunch_license(os.path.join(commonlicdir, fn))
1146 if md5value not in crunched_md5sums:
1147 crunched_md5sums[md5value] = fn
1148 elif fn != crunched_md5sums[md5value]:
1149 bb.debug(2, "crunched_md5sums['%s'] is already set to '%s' rather than '%s'" % (md5value, crunched_md5sums[md5value], fn))
1150 else:
1151 bb.debug(2, "crunched_md5sums['%s'] is already set to '%s'" % (md5value, crunched_md5sums[md5value]))
1152
1153 return crunched_md5sums
1154
1155def crunch_license(licfile):
1156 '''
1157 Remove non-material text from a license file and then calculate its
1158 md5sum. This works well for licenses that contain a copyright statement,
1159 but is also a useful way to handle people's insistence upon reformatting
1160 the license text slightly (with no material difference to the text of the
1161 license).
1162 '''
1163
1164 import oe.utils
1165
1166 # Note: these are carefully constructed!
1167 license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$')
1168 license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$')
1169 copyright_re = re.compile(r'^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$')
1170 disclaimer_re = re.compile(r'^ *\*? ?All [Rr]ights [Rr]eserved\.$')
1171 email_re = re.compile(r'^.*<[\w\.-]*@[\w\.\-]*>$')
1172 header_re = re.compile(r'^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$')
1173 tag_re = re.compile(r'^ *@?\(?([Ll]icense|MIT)\)?$')
1174 url_re = re.compile(r'^ *[#\*]* *https?:\/\/[\w\.\/\-]+$')
1175
1176 lictext = []
1177 with open(licfile, 'r', errors='surrogateescape') as f:
1178 for line in f:
1179 # Drop opening statements
1180 if copyright_re.match(line):
1181 continue
1182 elif disclaimer_re.match(line):
1183 continue
1184 elif email_re.match(line):
1185 continue
1186 elif header_re.match(line):
1187 continue
1188 elif tag_re.match(line):
1189 continue
1190 elif url_re.match(line):
1191 continue
1192 elif license_title_re.match(line):
1193 continue
1194 elif license_statement_re.match(line):
1195 continue
1196 # Strip comment symbols
1197 line = line.replace('*', '') \
1198 .replace('#', '')
1199 # Unify spelling
1200 line = line.replace('sub-license', 'sublicense')
1201 # Squash spaces
1202 line = oe.utils.squashspaces(line.strip())
1203 # Replace smart quotes, double quotes and backticks with single quotes
1204 line = line.replace(u"\u2018", "'").replace(u"\u2019", "'").replace(u"\u201c","'").replace(u"\u201d", "'").replace('"', '\'').replace('`', '\'')
1205 # Unify brackets
1206 line = line.replace("{", "[").replace("}", "]")
1207 if line:
1208 lictext.append(line)
1209
1210 m = hashlib.md5()
1211 try:
1212 m.update(' '.join(lictext).encode('utf-8'))
1213 md5val = m.hexdigest()
1214 except UnicodeEncodeError:
1215 md5val = None
1216 lictext = ''
1217 return md5val, lictext
1218
1219def guess_license(srctree, d):
1220 import bb
1221 md5sums = get_license_md5sums(d)
1222
1223 crunched_md5sums = crunch_known_licenses(d)
1224
1225 licenses = []
1226 licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10']
1227 skip_extensions = (".html", ".js", ".json", ".svg", ".ts", ".go")
1228 licfiles = []
1229 for root, dirs, files in os.walk(srctree):
1230 for fn in files:
1231 if fn.endswith(skip_extensions):
1232 continue
1233 for spec in licspecs:
1234 if fnmatch.fnmatch(fn, spec):
1235 fullpath = os.path.join(root, fn)
1236 if not fullpath in licfiles:
1237 licfiles.append(fullpath)
1238 for licfile in sorted(licfiles):
1239 md5value = bb.utils.md5_file(licfile)
1240 license = md5sums.get(md5value, None)
1241 if not license:
1242 crunched_md5, lictext = crunch_license(licfile)
1243 license = crunched_md5sums.get(crunched_md5, None)
1244 if lictext and not license:
1245 license = 'Unknown'
1246 logger.info("Please add the following line for '%s' to a 'lib/recipetool/licenses.csv' " \
1247 "and replace `Unknown` with the license:\n" \
1248 "%s,Unknown" % (os.path.relpath(licfile, srctree), md5value))
1249 if license:
1250 licenses.append((license, os.path.relpath(licfile, srctree), md5value))
1251
1252 # FIXME should we grab at least one source file with a license header and add that too?
1253
1254 return licenses
1255
1256def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'): 1040def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn='${PN}'):
1257 """ 1041 """
1258 Given a list of (license, path, md5sum) as returned by guess_license(), 1042 Given a list of (license, path, md5sum) as returned by match_licenses(),
1259 a dict of package name to path mappings, write out a set of 1043 a dict of package name to path mappings, write out a set of
1260 package-specific LICENSE values. 1044 package-specific LICENSE values.
1261 """ 1045 """
@@ -1284,6 +1068,14 @@ def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn
1284 outlicenses[pkgname] = licenses 1068 outlicenses[pkgname] = licenses
1285 return outlicenses 1069 return outlicenses
1286 1070
1071def generate_common_licenses_chksums(common_licenses, d):
1072 lic_files_chksums = []
1073 for license in tidy_licenses(common_licenses):
1074 licfile = '${COMMON_LICENSE_DIR}/' + license
1075 md5value = bb.utils.md5_file(d.expand(licfile))
1076 lic_files_chksums.append('file://%s;md5=%s' % (licfile, md5value))
1077 return lic_files_chksums
1078
1287def read_pkgconfig_provides(d): 1079def read_pkgconfig_provides(d):
1288 pkgdatadir = d.getVar('PKGDATA_DIR') 1080 pkgdatadir = d.getVar('PKGDATA_DIR')
1289 pkgmap = {} 1081 pkgmap = {}
@@ -1418,4 +1210,3 @@ def register_commands(subparsers):
1418 parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) 1210 parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS)
1419 parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).') 1211 parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).')
1420 parser_create.set_defaults(func=create_recipe) 1212 parser_create.set_defaults(func=create_recipe)
1421
diff --git a/scripts/lib/recipetool/create_go.py b/scripts/lib/recipetool/create_go.py
index a85a2f2786..4b1fa39d13 100644
--- a/scripts/lib/recipetool/create_go.py
+++ b/scripts/lib/recipetool/create_go.py
@@ -10,13 +10,7 @@
10# 10#
11 11
12 12
13from collections import namedtuple
14from enum import Enum
15from html.parser import HTMLParser
16from recipetool.create import RecipeHandler, handle_license_vars 13from recipetool.create import RecipeHandler, handle_license_vars
17from recipetool.create import guess_license, tidy_licenses, fixup_license
18from recipetool.create import determine_from_url
19from urllib.error import URLError, HTTPError
20 14
21import bb.utils 15import bb.utils
22import json 16import json
@@ -25,33 +19,20 @@ import os
25import re 19import re
26import subprocess 20import subprocess
27import sys 21import sys
28import shutil
29import tempfile 22import tempfile
30import urllib.parse
31import urllib.request
32 23
33 24
34GoImport = namedtuple('GoImport', 'root vcs url suffix')
35logger = logging.getLogger('recipetool') 25logger = logging.getLogger('recipetool')
36CodeRepo = namedtuple(
37 'CodeRepo', 'path codeRoot codeDir pathMajor pathPrefix pseudoMajor')
38 26
39tinfoil = None 27tinfoil = None
40 28
41# Regular expression to parse pseudo semantic version
42# see https://go.dev/ref/mod#pseudo-versions
43re_pseudo_semver = re.compile(
44 r"^v[0-9]+\.(0\.0-|\d+\.\d+-([^+]*\.)?0\.)(?P<utc>\d{14})-(?P<commithash>[A-Za-z0-9]+)(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$")
45# Regular expression to parse semantic version
46re_semver = re.compile(
47 r"^v(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$")
48
49 29
50def tinfoil_init(instance): 30def tinfoil_init(instance):
51 global tinfoil 31 global tinfoil
52 tinfoil = instance 32 tinfoil = instance
53 33
54 34
35
55class GoRecipeHandler(RecipeHandler): 36class GoRecipeHandler(RecipeHandler):
56 """Class to handle the go recipe creation""" 37 """Class to handle the go recipe creation"""
57 38
@@ -83,578 +64,6 @@ class GoRecipeHandler(RecipeHandler):
83 64
84 return bindir 65 return bindir
85 66
86 def __resolve_repository_static(self, modulepath):
87 """Resolve the repository in a static manner
88
89 The method is based on the go implementation of
90 `repoRootFromVCSPaths` in
91 https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go
92 """
93
94 url = urllib.parse.urlparse("https://" + modulepath)
95 req = urllib.request.Request(url.geturl())
96
97 try:
98 resp = urllib.request.urlopen(req)
99 # Some modulepath are just redirects to github (or some other vcs
100 # hoster). Therefore, we check if this modulepath redirects to
101 # somewhere else
102 if resp.geturl() != url.geturl():
103 bb.debug(1, "%s is redirectred to %s" %
104 (url.geturl(), resp.geturl()))
105 url = urllib.parse.urlparse(resp.geturl())
106 modulepath = url.netloc + url.path
107
108 except URLError as url_err:
109 # This is probably because the module path
110 # contains the subdir and major path. Thus,
111 # we ignore this error for now
112 logger.debug(
113 1, "Failed to fetch page from [%s]: %s" % (url, str(url_err)))
114
115 host, _, _ = modulepath.partition('/')
116
117 class vcs(Enum):
118 pathprefix = "pathprefix"
119 regexp = "regexp"
120 type = "type"
121 repo = "repo"
122 check = "check"
123 schemelessRepo = "schemelessRepo"
124
125 # GitHub
126 vcsGitHub = {}
127 vcsGitHub[vcs.pathprefix] = "github.com"
128 vcsGitHub[vcs.regexp] = re.compile(
129 r'^(?P<root>github\.com/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
130 vcsGitHub[vcs.type] = "git"
131 vcsGitHub[vcs.repo] = "https://\\g<root>"
132
133 # Bitbucket
134 vcsBitbucket = {}
135 vcsBitbucket[vcs.pathprefix] = "bitbucket.org"
136 vcsBitbucket[vcs.regexp] = re.compile(
137 r'^(?P<root>bitbucket\.org/(?P<bitname>[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+))(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
138 vcsBitbucket[vcs.type] = "git"
139 vcsBitbucket[vcs.repo] = "https://\\g<root>"
140
141 # IBM DevOps Services (JazzHub)
142 vcsIBMDevOps = {}
143 vcsIBMDevOps[vcs.pathprefix] = "hub.jazz.net/git"
144 vcsIBMDevOps[vcs.regexp] = re.compile(
145 r'^(?P<root>hub\.jazz\.net/git/[a-z0-9]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
146 vcsIBMDevOps[vcs.type] = "git"
147 vcsIBMDevOps[vcs.repo] = "https://\\g<root>"
148
149 # Git at Apache
150 vcsApacheGit = {}
151 vcsApacheGit[vcs.pathprefix] = "git.apache.org"
152 vcsApacheGit[vcs.regexp] = re.compile(
153 r'^(?P<root>git\.apache\.org/[a-z0-9_.\-]+\.git)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
154 vcsApacheGit[vcs.type] = "git"
155 vcsApacheGit[vcs.repo] = "https://\\g<root>"
156
157 # Git at OpenStack
158 vcsOpenStackGit = {}
159 vcsOpenStackGit[vcs.pathprefix] = "git.openstack.org"
160 vcsOpenStackGit[vcs.regexp] = re.compile(
161 r'^(?P<root>git\.openstack\.org/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(\.git)?(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
162 vcsOpenStackGit[vcs.type] = "git"
163 vcsOpenStackGit[vcs.repo] = "https://\\g<root>"
164
165 # chiselapp.com for fossil
166 vcsChiselapp = {}
167 vcsChiselapp[vcs.pathprefix] = "chiselapp.com"
168 vcsChiselapp[vcs.regexp] = re.compile(
169 r'^(?P<root>chiselapp\.com/user/[A-Za-z0-9]+/repository/[A-Za-z0-9_.\-]+)$')
170 vcsChiselapp[vcs.type] = "fossil"
171 vcsChiselapp[vcs.repo] = "https://\\g<root>"
172
173 # General syntax for any server.
174 # Must be last.
175 vcsGeneralServer = {}
176 vcsGeneralServer[vcs.regexp] = re.compile(
177 "(?P<root>(?P<repo>([a-z0-9.\\-]+\\.)+[a-z0-9.\\-]+(:[0-9]+)?(/~?[A-Za-z0-9_.\\-]+)+?)\\.(?P<vcs>bzr|fossil|git|hg|svn))(/~?(?P<suffix>[A-Za-z0-9_.\\-]+))*$")
178 vcsGeneralServer[vcs.schemelessRepo] = True
179
180 vcsPaths = [vcsGitHub, vcsBitbucket, vcsIBMDevOps,
181 vcsApacheGit, vcsOpenStackGit, vcsChiselapp,
182 vcsGeneralServer]
183
184 if modulepath.startswith("example.net") or modulepath == "rsc.io":
185 logger.warning("Suspicious module path %s" % modulepath)
186 return None
187 if modulepath.startswith("http:") or modulepath.startswith("https:"):
188 logger.warning("Import path should not start with %s %s" %
189 ("http", "https"))
190 return None
191
192 rootpath = None
193 vcstype = None
194 repourl = None
195 suffix = None
196
197 for srv in vcsPaths:
198 m = srv[vcs.regexp].match(modulepath)
199 if vcs.pathprefix in srv:
200 if host == srv[vcs.pathprefix]:
201 rootpath = m.group('root')
202 vcstype = srv[vcs.type]
203 repourl = m.expand(srv[vcs.repo])
204 suffix = m.group('suffix')
205 break
206 elif m and srv[vcs.schemelessRepo]:
207 rootpath = m.group('root')
208 vcstype = m[vcs.type]
209 repourl = m[vcs.repo]
210 suffix = m.group('suffix')
211 break
212
213 return GoImport(rootpath, vcstype, repourl, suffix)
214
215 def __resolve_repository_dynamic(self, modulepath):
216 """Resolve the repository root in a dynamic manner.
217
218 The method is based on the go implementation of
219 `repoRootForImportDynamic` in
220 https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go
221 """
222 url = urllib.parse.urlparse("https://" + modulepath)
223
224 class GoImportHTMLParser(HTMLParser):
225
226 def __init__(self):
227 super().__init__()
228 self.__srv = {}
229
230 def handle_starttag(self, tag, attrs):
231 if tag == 'meta' and list(
232 filter(lambda a: (a[0] == 'name' and a[1] == 'go-import'), attrs)):
233 content = list(
234 filter(lambda a: (a[0] == 'content'), attrs))
235 if content:
236 srv = content[0][1].split()
237 self.__srv[srv[0]] = srv
238
239 def go_import(self, modulepath):
240 if modulepath in self.__srv:
241 srv = self.__srv[modulepath]
242 return GoImport(srv[0], srv[1], srv[2], None)
243 return None
244
245 url = url.geturl() + "?go-get=1"
246 req = urllib.request.Request(url)
247
248 try:
249 body = urllib.request.urlopen(req).read()
250 except HTTPError as http_err:
251 logger.warning(
252 "Unclean status when fetching page from [%s]: %s", url, str(http_err))
253 body = http_err.fp.read()
254 except URLError as url_err:
255 logger.warning(
256 "Failed to fetch page from [%s]: %s", url, str(url_err))
257 return None
258
259 parser = GoImportHTMLParser()
260 parser.feed(body.decode('utf-8'))
261 parser.close()
262
263 return parser.go_import(modulepath)
264
265 def __resolve_from_golang_proxy(self, modulepath, version):
266 """
267 Resolves repository data from golang proxy
268 """
269 url = urllib.parse.urlparse("https://proxy.golang.org/"
270 + modulepath
271 + "/@v/"
272 + version
273 + ".info")
274
275 # Transform url to lower case, golang proxy doesn't like mixed case
276 req = urllib.request.Request(url.geturl().lower())
277
278 try:
279 resp = urllib.request.urlopen(req)
280 except URLError as url_err:
281 logger.warning(
282 "Failed to fetch page from [%s]: %s", url, str(url_err))
283 return None
284
285 golang_proxy_res = resp.read().decode('utf-8')
286 modinfo = json.loads(golang_proxy_res)
287
288 if modinfo and 'Origin' in modinfo:
289 origin = modinfo['Origin']
290 _root_url = urllib.parse.urlparse(origin['URL'])
291
292 # We normalize the repo URL since we don't want the scheme in it
293 _subdir = origin['Subdir'] if 'Subdir' in origin else None
294 _root, _, _ = self.__split_path_version(modulepath)
295 if _subdir:
296 _root = _root[:-len(_subdir)].strip('/')
297
298 _commit = origin['Hash']
299 _vcs = origin['VCS']
300 return (GoImport(_root, _vcs, _root_url.geturl(), None), _commit)
301
302 return None
303
304 def __resolve_repository(self, modulepath):
305 """
306 Resolves src uri from go module-path
307 """
308 repodata = self.__resolve_repository_static(modulepath)
309 if not repodata or not repodata.url:
310 repodata = self.__resolve_repository_dynamic(modulepath)
311 if not repodata or not repodata.url:
312 logger.error(
313 "Could not resolve repository for module path '%s'" % modulepath)
314 # There is no way to recover from this
315 sys.exit(14)
316 if repodata:
317 logger.debug(1, "Resolved download path for import '%s' => %s" % (
318 modulepath, repodata.url))
319 return repodata
320
321 def __split_path_version(self, path):
322 i = len(path)
323 dot = False
324 for j in range(i, 0, -1):
325 if path[j - 1] < '0' or path[j - 1] > '9':
326 break
327 if path[j - 1] == '.':
328 dot = True
329 break
330 i = j - 1
331
332 if i <= 1 or i == len(
333 path) or path[i - 1] != 'v' or path[i - 2] != '/':
334 return path, "", True
335
336 prefix, pathMajor = path[:i - 2], path[i - 2:]
337 if dot or len(
338 pathMajor) <= 2 or pathMajor[2] == '0' or pathMajor == "/v1":
339 return path, "", False
340
341 return prefix, pathMajor, True
342
343 def __get_path_major(self, pathMajor):
344 if not pathMajor:
345 return ""
346
347 if pathMajor[0] != '/' and pathMajor[0] != '.':
348 logger.error(
349 "pathMajor suffix %s passed to PathMajorPrefix lacks separator", pathMajor)
350
351 if pathMajor.startswith(".v") and pathMajor.endswith("-unstable"):
352 pathMajor = pathMajor[:len("-unstable") - 2]
353
354 return pathMajor[1:]
355
356 def __build_coderepo(self, repo, path):
357 codedir = ""
358 pathprefix, pathMajor, _ = self.__split_path_version(path)
359 if repo.root == path:
360 pathprefix = path
361 elif path.startswith(repo.root):
362 codedir = pathprefix[len(repo.root):].strip('/')
363
364 pseudoMajor = self.__get_path_major(pathMajor)
365
366 logger.debug("root='%s', codedir='%s', prefix='%s', pathMajor='%s', pseudoMajor='%s'",
367 repo.root, codedir, pathprefix, pathMajor, pseudoMajor)
368
369 return CodeRepo(path, repo.root, codedir,
370 pathMajor, pathprefix, pseudoMajor)
371
372 def __resolve_version(self, repo, path, version):
373 hash = None
374 coderoot = self.__build_coderepo(repo, path)
375
376 def vcs_fetch_all():
377 tmpdir = tempfile.mkdtemp()
378 clone_cmd = "%s clone --bare %s %s" % ('git', repo.url, tmpdir)
379 bb.process.run(clone_cmd)
380 log_cmd = "git log --all --pretty='%H %d' --decorate=short"
381 output, _ = bb.process.run(
382 log_cmd, shell=True, stderr=subprocess.PIPE, cwd=tmpdir)
383 bb.utils.prunedir(tmpdir)
384 return output.strip().split('\n')
385
386 def vcs_fetch_remote(tag):
387 # add * to grab ^{}
388 refs = {}
389 ls_remote_cmd = "git ls-remote -q --tags {} {}*".format(
390 repo.url, tag)
391 output, _ = bb.process.run(ls_remote_cmd)
392 output = output.strip().split('\n')
393 for line in output:
394 f = line.split(maxsplit=1)
395 if len(f) != 2:
396 continue
397
398 for prefix in ["HEAD", "refs/heads/", "refs/tags/"]:
399 if f[1].startswith(prefix):
400 refs[f[1][len(prefix):]] = f[0]
401
402 for key, hash in refs.items():
403 if key.endswith(r"^{}"):
404 refs[key.strip(r"^{}")] = hash
405
406 return refs[tag]
407
408 m_pseudo_semver = re_pseudo_semver.match(version)
409
410 if m_pseudo_semver:
411 remote_refs = vcs_fetch_all()
412 short_commit = m_pseudo_semver.group('commithash')
413 for l in remote_refs:
414 r = l.split(maxsplit=1)
415 sha1 = r[0] if len(r) else None
416 if not sha1:
417 logger.error(
418 "Ups: could not resolve abbref commit for %s" % short_commit)
419
420 elif sha1.startswith(short_commit):
421 hash = sha1
422 break
423 else:
424 m_semver = re_semver.match(version)
425 if m_semver:
426
427 def get_sha1_remote(re):
428 rsha1 = None
429 for line in remote_refs:
430 # Split lines of the following format:
431 # 22e90d9b964610628c10f673ca5f85b8c2a2ca9a (tag: sometag)
432 lineparts = line.split(maxsplit=1)
433 sha1 = lineparts[0] if len(lineparts) else None
434 refstring = lineparts[1] if len(
435 lineparts) == 2 else None
436 if refstring:
437 # Normalize tag string and split in case of multiple
438 # regs e.g. (tag: speech/v1.10.0, tag: orchestration/v1.5.0 ...)
439 refs = refstring.strip('(), ').split(',')
440 for ref in refs:
441 if re.match(ref.strip()):
442 rsha1 = sha1
443 return rsha1
444
445 semver = "v" + m_semver.group('major') + "."\
446 + m_semver.group('minor') + "."\
447 + m_semver.group('patch') \
448 + (("-" + m_semver.group('prerelease'))
449 if m_semver.group('prerelease') else "")
450
451 tag = os.path.join(
452 coderoot.codeDir, semver) if coderoot.codeDir else semver
453
454 # probe tag using 'ls-remote', which is faster than fetching
455 # complete history
456 hash = vcs_fetch_remote(tag)
457 if not hash:
458 # backup: fetch complete history
459 remote_refs = vcs_fetch_all()
460 hash = get_sha1_remote(
461 re.compile(fr"(tag:|HEAD ->) ({tag})"))
462
463 logger.debug(
464 "Resolving commit for tag '%s' -> '%s'", tag, hash)
465 return hash
466
467 def __generate_srcuri_inline_fcn(self, path, version, replaces=None):
468 """Generate SRC_URI functions for go imports"""
469
470 logger.info("Resolving repository for module %s", path)
471 # First try to resolve repo and commit from golang proxy
472 # Most info is already there and we don't have to go through the
473 # repository or even perform the version resolve magic
474 golang_proxy_info = self.__resolve_from_golang_proxy(path, version)
475 if golang_proxy_info:
476 repo = golang_proxy_info[0]
477 commit = golang_proxy_info[1]
478 else:
479 # Fallback
480 # Resolve repository by 'hand'
481 repo = self.__resolve_repository(path)
482 commit = self.__resolve_version(repo, path, version)
483
484 url = urllib.parse.urlparse(repo.url)
485 repo_url = url.netloc + url.path
486
487 coderoot = self.__build_coderepo(repo, path)
488
489 inline_fcn = "${@go_src_uri("
490 inline_fcn += f"'{repo_url}','{version}'"
491 if repo_url != path:
492 inline_fcn += f",path='{path}'"
493 if coderoot.codeDir:
494 inline_fcn += f",subdir='{coderoot.codeDir}'"
495 if repo.vcs != 'git':
496 inline_fcn += f",vcs='{repo.vcs}'"
497 if replaces:
498 inline_fcn += f",replaces='{replaces}'"
499 if coderoot.pathMajor:
500 inline_fcn += f",pathmajor='{coderoot.pathMajor}'"
501 inline_fcn += ")}"
502
503 return inline_fcn, commit
504
505 def __go_handle_dependencies(self, go_mod, srctree, localfilesdir, extravalues, d):
506
507 import re
508 src_uris = []
509 src_revs = []
510
511 def generate_src_rev(path, version, commithash):
512 src_rev = f"# {path}@{version} => {commithash}\n"
513 # Ups...maybe someone manipulated the source repository and the
514 # version or commit could not be resolved. This is a sign of
515 # a) the supply chain was manipulated (bad)
516 # b) the implementation for the version resolving didn't work
517 # anymore (less bad)
518 if not commithash:
519 src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
520 src_rev += f"#!!! Could not resolve version !!!\n"
521 src_rev += f"#!!! Possible supply chain attack !!!\n"
522 src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
523 src_rev += f"SRCREV_{path.replace('/', '.')} = \"{commithash}\""
524
525 return src_rev
526
527 # we first go over replacement list, because we are essentialy
528 # interested only in the replaced path
529 if go_mod['Replace']:
530 for replacement in go_mod['Replace']:
531 oldpath = replacement['Old']['Path']
532 path = replacement['New']['Path']
533 version = ''
534 if 'Version' in replacement['New']:
535 version = replacement['New']['Version']
536
537 if os.path.exists(os.path.join(srctree, path)):
538 # the module refers to the local path, remove it from requirement list
539 # because it's a local module
540 go_mod['Require'][:] = [v for v in go_mod['Require'] if v.get('Path') != oldpath]
541 else:
542 # Replace the path and the version, so we don't iterate replacement list anymore
543 for require in go_mod['Require']:
544 if require['Path'] == oldpath:
545 require.update({'Path': path, 'Version': version})
546 break
547
548 for require in go_mod['Require']:
549 path = require['Path']
550 version = require['Version']
551
552 inline_fcn, commithash = self.__generate_srcuri_inline_fcn(
553 path, version)
554 src_uris.append(inline_fcn)
555 src_revs.append(generate_src_rev(path, version, commithash))
556
557 # strip version part from module URL /vXX
558 baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
559 pn, _ = determine_from_url(baseurl)
560 go_mods_basename = "%s-modules.inc" % pn
561
562 go_mods_filename = os.path.join(localfilesdir, go_mods_basename)
563 with open(go_mods_filename, "w") as f:
564 # We introduce this indirection to make the tests a little easier
565 f.write("SRC_URI += \"${GO_DEPENDENCIES_SRC_URI}\"\n")
566 f.write("GO_DEPENDENCIES_SRC_URI = \"\\\n")
567 for uri in src_uris:
568 f.write(" " + uri + " \\\n")
569 f.write("\"\n\n")
570 for rev in src_revs:
571 f.write(rev + "\n")
572
573 extravalues['extrafiles'][go_mods_basename] = go_mods_filename
574
575 def __go_run_cmd(self, cmd, cwd, d):
576 return bb.process.run(cmd, env=dict(os.environ, PATH=d.getVar('PATH')),
577 shell=True, cwd=cwd)
578
579 def __go_native_version(self, d):
580 stdout, _ = self.__go_run_cmd("go version", None, d)
581 m = re.match(r".*\sgo((\d+).(\d+).(\d+))\s([\w\/]*)", stdout)
582 major = int(m.group(2))
583 minor = int(m.group(3))
584 patch = int(m.group(4))
585
586 return major, minor, patch
587
588 def __go_mod_patch(self, srctree, localfilesdir, extravalues, d):
589
590 patchfilename = "go.mod.patch"
591 go_native_version_major, go_native_version_minor, _ = self.__go_native_version(
592 d)
593 self.__go_run_cmd("go mod tidy -go=%d.%d" %
594 (go_native_version_major, go_native_version_minor), srctree, d)
595 stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d)
596
597 # Create patch in order to upgrade go version
598 self.__go_run_cmd("git diff go.mod > %s" % (patchfilename), srctree, d)
599 # Restore original state
600 self.__go_run_cmd("git checkout HEAD go.mod go.sum", srctree, d)
601
602 go_mod = json.loads(stdout)
603 tmpfile = os.path.join(localfilesdir, patchfilename)
604 shutil.move(os.path.join(srctree, patchfilename), tmpfile)
605
606 extravalues['extrafiles'][patchfilename] = tmpfile
607
608 return go_mod, patchfilename
609
610 def __go_mod_vendor(self, go_mod, srctree, localfilesdir, extravalues, d):
611 # Perform vendoring to retrieve the correct modules.txt
612 tmp_vendor_dir = tempfile.mkdtemp()
613
614 # -v causes to go to print modules.txt to stderr
615 _, stderr = self.__go_run_cmd(
616 "go mod vendor -v -o %s" % (tmp_vendor_dir), srctree, d)
617
618 modules_txt_basename = "modules.txt"
619 modules_txt_filename = os.path.join(localfilesdir, modules_txt_basename)
620 with open(modules_txt_filename, "w") as f:
621 f.write(stderr)
622
623 extravalues['extrafiles'][modules_txt_basename] = modules_txt_filename
624
625 licenses = []
626 lic_files_chksum = []
627 licvalues = guess_license(tmp_vendor_dir, d)
628 shutil.rmtree(tmp_vendor_dir)
629
630 if licvalues:
631 for licvalue in licvalues:
632 license = licvalue[0]
633 lics = tidy_licenses(fixup_license(license))
634 lics = [lic for lic in lics if lic not in licenses]
635 if len(lics):
636 licenses.extend(lics)
637 lic_files_chksum.append(
638 'file://src/${GO_IMPORT}/vendor/%s;md5=%s' % (licvalue[1], licvalue[2]))
639
640 # strip version part from module URL /vXX
641 baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
642 pn, _ = determine_from_url(baseurl)
643 licenses_basename = "%s-licenses.inc" % pn
644
645 licenses_filename = os.path.join(localfilesdir, licenses_basename)
646 with open(licenses_filename, "w") as f:
647 f.write("GO_MOD_LICENSES = \"%s\"\n\n" %
648 ' & '.join(sorted(licenses, key=str.casefold)))
649 # We introduce this indirection to make the tests a little easier
650 f.write("LIC_FILES_CHKSUM += \"${VENDORED_LIC_FILES_CHKSUM}\"\n")
651 f.write("VENDORED_LIC_FILES_CHKSUM = \"\\\n")
652 for lic in lic_files_chksum:
653 f.write(" " + lic + " \\\n")
654 f.write("\"\n")
655
656 extravalues['extrafiles'][licenses_basename] = licenses_filename
657
658 def process(self, srctree, classes, lines_before, 67 def process(self, srctree, classes, lines_before,
659 lines_after, handled, extravalues): 68 lines_after, handled, extravalues):
660 69
@@ -665,63 +74,52 @@ class GoRecipeHandler(RecipeHandler):
665 if not files: 74 if not files:
666 return False 75 return False
667 76
668 d = bb.data.createCopy(tinfoil.config_data)
669 go_bindir = self.__ensure_go() 77 go_bindir = self.__ensure_go()
670 if not go_bindir: 78 if not go_bindir:
671 sys.exit(14) 79 sys.exit(14)
672 80
673 d.prependVar('PATH', '%s:' % go_bindir)
674 handled.append('buildsystem') 81 handled.append('buildsystem')
675 classes.append("go-vendor") 82 classes.append("go-mod")
676 83
677 stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d) 84 # Use go-mod-update-modules to set the full SRC_URI and LICENSE
85 classes.append("go-mod-update-modules")
86 extravalues["run_tasks"] = "update_modules"
678 87
679 go_mod = json.loads(stdout) 88 with tempfile.TemporaryDirectory(prefix="go-mod-") as tmp_mod_dir:
680 go_import = go_mod['Module']['Path'] 89 env = dict(os.environ)
681 go_version_match = re.match("([0-9]+).([0-9]+)", go_mod['Go']) 90 env["PATH"] += f":{go_bindir}"
682 go_version_major = int(go_version_match.group(1)) 91 env['GOMODCACHE'] = tmp_mod_dir
683 go_version_minor = int(go_version_match.group(2))
684 src_uris = []
685 92
686 localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-') 93 stdout = subprocess.check_output(["go", "mod", "edit", "-json"], cwd=srctree, env=env, text=True)
687 extravalues.setdefault('extrafiles', {}) 94 go_mod = json.loads(stdout)
95 go_import = re.sub(r'/v([0-9]+)$', '', go_mod['Module']['Path'])
688 96
689 # Use an explicit name determined from the module name because it 97 localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-')
690 # might differ from the actual URL for replaced modules 98 extravalues.setdefault('extrafiles', {})
691 # strip version part from module URL /vXX
692 baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
693 pn, _ = determine_from_url(baseurl)
694 99
695 # go.mod files with version < 1.17 may not include all indirect 100 # Write the stub ${BPN}-licenses.inc and ${BPN}-go-mods.inc files
696 # dependencies. Thus, we have to upgrade the go version. 101 basename = "{pn}-licenses.inc"
697 if go_version_major == 1 and go_version_minor < 17: 102 filename = os.path.join(localfilesdir, basename)
698 logger.warning( 103 with open(filename, "w") as f:
699 "go.mod files generated by Go < 1.17 might have incomplete indirect dependencies.") 104 f.write("# FROM RECIPETOOL\n")
700 go_mod, patchfilename = self.__go_mod_patch(srctree, localfilesdir, 105 extravalues['extrafiles'][f"../{basename}"] = filename
701 extravalues, d)
702 src_uris.append(
703 "file://%s;patchdir=src/${GO_IMPORT}" % (patchfilename))
704 106
705 # Check whether the module is vendored. If so, we have nothing to do. 107 basename = "{pn}-go-mods.inc"
706 # Otherwise we gather all dependencies and add them to the recipe 108 filename = os.path.join(localfilesdir, basename)
707 if not os.path.exists(os.path.join(srctree, "vendor")): 109 with open(filename, "w") as f:
110 f.write("# FROM RECIPETOOL\n")
111 extravalues['extrafiles'][f"../{basename}"] = filename
708 112
709 # Write additional $BPN-modules.inc file 113 # Do generic license handling
710 self.__go_mod_vendor(go_mod, srctree, localfilesdir, extravalues, d) 114 d = bb.data.createCopy(tinfoil.config_data)
711 lines_before.append("LICENSE += \" & ${GO_MOD_LICENSES}\"") 115 handle_license_vars(srctree, lines_before, handled, extravalues, d)
712 lines_before.append("require %s-licenses.inc" % (pn)) 116 self.__rewrite_lic_vars(lines_before)
713 117
714 self.__rewrite_src_uri(lines_before, ["file://modules.txt"]) 118 self.__rewrite_src_uri(lines_before)
715 119
716 self.__go_handle_dependencies(go_mod, srctree, localfilesdir, extravalues, d) 120 lines_before.append('require ${BPN}-licenses.inc')
717 lines_before.append("require %s-modules.inc" % (pn)) 121 lines_before.append('require ${BPN}-go-mods.inc')
718 122 lines_before.append(f'GO_IMPORT = "{go_import}"')
719 # Do generic license handling
720 handle_license_vars(srctree, lines_before, handled, extravalues, d)
721 self.__rewrite_lic_uri(lines_before)
722
723 lines_before.append("GO_IMPORT = \"{}\"".format(baseurl))
724 lines_before.append("SRCREV_FORMAT = \"${BPN}\"")
725 123
726 def __update_lines_before(self, updated, newlines, lines_before): 124 def __update_lines_before(self, updated, newlines, lines_before):
727 if updated: 125 if updated:
@@ -733,9 +131,9 @@ class GoRecipeHandler(RecipeHandler):
733 lines_before.append(line) 131 lines_before.append(line)
734 return updated 132 return updated
735 133
736 def __rewrite_lic_uri(self, lines_before): 134 def __rewrite_lic_vars(self, lines_before):
737
738 def varfunc(varname, origvalue, op, newlines): 135 def varfunc(varname, origvalue, op, newlines):
136 import urllib.parse
739 if varname == 'LIC_FILES_CHKSUM': 137 if varname == 'LIC_FILES_CHKSUM':
740 new_licenses = [] 138 new_licenses = []
741 licenses = origvalue.split('\\') 139 licenses = origvalue.split('\\')
@@ -760,12 +158,11 @@ class GoRecipeHandler(RecipeHandler):
760 lines_before, ['LIC_FILES_CHKSUM'], varfunc) 158 lines_before, ['LIC_FILES_CHKSUM'], varfunc)
761 return self.__update_lines_before(updated, newlines, lines_before) 159 return self.__update_lines_before(updated, newlines, lines_before)
762 160
763 def __rewrite_src_uri(self, lines_before, additional_uris = []): 161 def __rewrite_src_uri(self, lines_before):
764 162
765 def varfunc(varname, origvalue, op, newlines): 163 def varfunc(varname, origvalue, op, newlines):
766 if varname == 'SRC_URI': 164 if varname == 'SRC_URI':
767 src_uri = ["git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https"] 165 src_uri = ['git://${GO_IMPORT};protocol=https;nobranch=1;destsuffix=${GO_SRCURI_DESTSUFFIX}']
768 src_uri.extend(additional_uris)
769 return src_uri, None, -1, True 166 return src_uri, None, -1, True
770 return origvalue, None, 0, True 167 return origvalue, None, 0, True
771 168
diff --git a/scripts/lib/recipetool/create_npm.py b/scripts/lib/recipetool/create_npm.py
index 113a89f6a6..8c4cdd5234 100644
--- a/scripts/lib/recipetool/create_npm.py
+++ b/scripts/lib/recipetool/create_npm.py
@@ -15,9 +15,9 @@ import bb
15from bb.fetch2.npm import NpmEnvironment 15from bb.fetch2.npm import NpmEnvironment
16from bb.fetch2.npm import npm_package 16from bb.fetch2.npm import npm_package
17from bb.fetch2.npmsw import foreach_dependencies 17from bb.fetch2.npmsw import foreach_dependencies
18from oe.license_finder import match_licenses, find_license_files
18from recipetool.create import RecipeHandler 19from recipetool.create import RecipeHandler
19from recipetool.create import get_license_md5sums 20from recipetool.create import generate_common_licenses_chksums
20from recipetool.create import guess_license
21from recipetool.create import split_pkg_licenses 21from recipetool.create import split_pkg_licenses
22logger = logging.getLogger('recipetool') 22logger = logging.getLogger('recipetool')
23 23
@@ -112,40 +112,54 @@ class NpmRecipeHandler(RecipeHandler):
112 """Return the extra license files and the list of packages""" 112 """Return the extra license files and the list of packages"""
113 licfiles = [] 113 licfiles = []
114 packages = {} 114 packages = {}
115 # Licenses from package.json will point to COMMON_LICENSE_DIR so we need
116 # to associate them explicitely to packages for split_pkg_licenses()
117 fallback_licenses = dict()
118
119 def _find_package_licenses(destdir):
120 """Either find license files, or use package.json metadata"""
121 def _get_licenses_from_package_json(package_json):
122 with open(os.path.join(srctree, package_json), "r") as f:
123 data = json.load(f)
124 if "license" in data:
125 licenses = data["license"].split(" ")
126 licenses = [license.strip("()") for license in licenses if license != "OR" and license != "AND"]
127 return [], licenses
128 else:
129 return [package_json], None
115 130
116 # Handle the parent package
117 packages["${PN}"] = ""
118
119 def _licfiles_append_fallback_readme_files(destdir):
120 """Append README files as fallback to license files if a license files is missing"""
121
122 fallback = True
123 readmes = []
124 basedir = os.path.join(srctree, destdir) 131 basedir = os.path.join(srctree, destdir)
125 for fn in os.listdir(basedir): 132 licfiles = find_license_files(basedir)
126 upper = fn.upper() 133 if len(licfiles) > 0:
127 if upper.startswith("README"): 134 return licfiles, None
128 fullpath = os.path.join(basedir, fn) 135 else:
129 readmes.append(fullpath) 136 # A license wasn't found in the package directory, so we'll use the package.json metadata
130 if upper.startswith("COPYING") or "LICENCE" in upper or "LICENSE" in upper: 137 pkg_json = os.path.join(basedir, "package.json")
131 fallback = False 138 return _get_licenses_from_package_json(pkg_json)
132 if fallback: 139
133 for readme in readmes: 140 def _get_package_licenses(destdir, package):
134 licfiles.append(os.path.relpath(readme, srctree)) 141 (package_licfiles, package_licenses) = _find_package_licenses(destdir)
142 if package_licfiles:
143 licfiles.extend(package_licfiles)
144 else:
145 fallback_licenses[package] = package_licenses
135 146
136 # Handle the dependencies 147 # Handle the dependencies
137 def _handle_dependency(name, params, destdir): 148 def _handle_dependency(name, params, destdir):
138 deptree = destdir.split('node_modules/') 149 deptree = destdir.split('node_modules/')
139 suffix = "-".join([npm_package(dep) for dep in deptree]) 150 suffix = "-".join([npm_package(dep) for dep in deptree])
140 packages["${PN}" + suffix] = destdir 151 packages["${PN}" + suffix] = destdir
141 _licfiles_append_fallback_readme_files(destdir) 152 _get_package_licenses(destdir, "${PN}" + suffix)
142 153
143 with open(shrinkwrap_file, "r") as f: 154 with open(shrinkwrap_file, "r") as f:
144 shrinkwrap = json.load(f) 155 shrinkwrap = json.load(f)
145
146 foreach_dependencies(shrinkwrap, _handle_dependency, dev) 156 foreach_dependencies(shrinkwrap, _handle_dependency, dev)
147 157
148 return licfiles, packages 158 # Handle the parent package
159 packages["${PN}"] = ""
160 _get_package_licenses(srctree, "${PN}")
161
162 return licfiles, packages, fallback_licenses
149 163
150 # Handle the peer dependencies 164 # Handle the peer dependencies
151 def _handle_peer_dependency(self, shrinkwrap_file): 165 def _handle_peer_dependency(self, shrinkwrap_file):
@@ -266,36 +280,12 @@ class NpmRecipeHandler(RecipeHandler):
266 fetcher.unpack(srctree) 280 fetcher.unpack(srctree)
267 281
268 bb.note("Handling licences ...") 282 bb.note("Handling licences ...")
269 (licfiles, packages) = self._handle_licenses(srctree, shrinkwrap_file, dev) 283 (licfiles, packages, fallback_licenses) = self._handle_licenses(srctree, shrinkwrap_file, dev)
270 284 licvalues = match_licenses(licfiles, srctree, d)
271 def _guess_odd_license(licfiles): 285 split_pkg_licenses(licvalues, packages, lines_after, fallback_licenses)
272 import bb 286 fallback_licenses_flat = [license for sublist in fallback_licenses.values() for license in sublist]
273 287 extravalues["LIC_FILES_CHKSUM"] = generate_common_licenses_chksums(fallback_licenses_flat, d)
274 md5sums = get_license_md5sums(d, linenumbers=True) 288 extravalues["LICENSE"] = fallback_licenses_flat
275
276 chksums = []
277 licenses = []
278 for licfile in licfiles:
279 f = os.path.join(srctree, licfile)
280 md5value = bb.utils.md5_file(f)
281 (license, beginline, endline, md5) = md5sums.get(md5value,
282 (None, "", "", ""))
283 if not license:
284 license = "Unknown"
285 logger.info("Please add the following line for '%s' to a "
286 "'lib/recipetool/licenses.csv' and replace `Unknown`, "
287 "`X`, `Y` and `MD5` with the license, begin line, "
288 "end line and partial MD5 checksum:\n" \
289 "%s,Unknown,X,Y,MD5" % (licfile, md5value))
290 chksums.append("file://%s%s%s;md5=%s" % (licfile,
291 ";beginline=%s" % (beginline) if beginline else "",
292 ";endline=%s" % (endline) if endline else "",
293 md5 if md5 else md5value))
294 licenses.append((license, licfile, md5value))
295 return (licenses, chksums)
296
297 (licenses, extravalues["LIC_FILES_CHKSUM"]) = _guess_odd_license(licfiles)
298 split_pkg_licenses([*licenses, *guess_license(srctree, d)], packages, lines_after)
299 289
300 classes.append("npm") 290 classes.append("npm")
301 handled.append("buildsystem") 291 handled.append("buildsystem")
diff --git a/scripts/lib/recipetool/licenses.csv b/scripts/lib/recipetool/licenses.csv
deleted file mode 100644
index 80851111b3..0000000000
--- a/scripts/lib/recipetool/licenses.csv
+++ /dev/null
@@ -1,37 +0,0 @@
10636e73ff0215e8d672dc4c32c317bb3,GPL-2.0-only
212f884d2ae1ff87c09e5b7ccc2c4ca7e,GPL-2.0-only
318810669f13b87348459e611d31ab760,GPL-2.0-only
4252890d9eee26aab7b432e8b8a616475,LGPL-2.0-only
52d5025d4aa3495befef8f17206a5b0a1,LGPL-2.1-only
63214f080875748938ba060314b4f727d,LGPL-2.0-only
7385c55653886acac3821999a3ccd17b3,Artistic-1.0 | GPL-2.0-only
8393a5ca445f6965873eca0259a17f833,GPL-2.0-only
93b83ef96387f14655fc854ddc3c6bd57,Apache-2.0
103bf50002aefd002f49e7bb854063f7e7,LGPL-2.0-only
114325afd396febcb659c36b49533135d4,GPL-2.0-only
124fbd65380cdd255951079008b364516c,LGPL-2.1-only
1354c7042be62e169199200bc6477f04d1,BSD-3-Clause
1455ca817ccb7d5b5b66355690e9abc605,LGPL-2.0-only
1559530bdf33659b29e73d4adb9f9f6552,GPL-2.0-only
165f30f0716dfdd0d91eb439ebec522ec2,LGPL-2.0-only
176a6a8e020838b23406c81b19c1d46df6,LGPL-3.0-only
18751419260aa954499f7abaabaa882bbe,GPL-2.0-only
197fbc338309ac38fefcd64b04bb903e34,LGPL-2.1-only
208ca43cbc842c2336e835926c2166c28b,GPL-2.0-only
2194d55d512a9ba36caa9b7df079bae19f,GPL-2.0-only
229ac2e7cff1ddaf48b6eab6028f23ef88,GPL-2.0-only
239f604d8a4f8e74f4f5140845a21b6674,LGPL-2.0-only
24a6f89e2100d9b6cdffcea4f398e37343,LGPL-2.1-only
25b234ee4d69f5fce4486a80fdaf4a4263,GPL-2.0-only
26bbb461211a33b134d42ed5ee802b37ff,LGPL-2.1-only
27bfe1f75d606912a4111c90743d6c7325,MPL-1.1-only
28c93c0550bd3173f4504b2cbd8991e50b,GPL-2.0-only
29d32239bcb673463ab874e80d47fae504,GPL-3.0-only
30d7810fab7487fb0aad327b76f1be7cd7,GPL-2.0-only
31d8045f3b8f929c1cb29a1e3fd737b499,LGPL-2.1-only
32db979804f025cf55aabec7129cb671ed,LGPL-2.0-only
33eb723b61539feef013de476e68b5c50a,GPL-2.0-only
34ebb5c50ab7cab4baeffba14977030c07,GPL-2.0-only
35f27defe1e96c2e1ecd4e0c9be8967949,GPL-3.0-only
36fad9b3332be894bab9bc501572864b29,LGPL-2.1-only
37fbc093901857fcd118f065f900982c24,LGPL-2.1-only