diff options
-rw-r--r-- | meta/classes/spdx.bbclass | 254 |
1 files changed, 119 insertions, 135 deletions
diff --git a/meta/classes/spdx.bbclass b/meta/classes/spdx.bbclass index c0050f394d..454c53e96f 100644 --- a/meta/classes/spdx.bbclass +++ b/meta/classes/spdx.bbclass | |||
@@ -15,7 +15,6 @@ | |||
15 | # SPDX file will be output to the path which is defined as[SPDX_MANIFEST_DIR] | 15 | # SPDX file will be output to the path which is defined as[SPDX_MANIFEST_DIR] |
16 | # in ./meta/conf/licenses.conf. | 16 | # in ./meta/conf/licenses.conf. |
17 | 17 | ||
18 | SPDXOUTPUTDIR = "${WORKDIR}/spdx_output_dir" | ||
19 | SPDXSSTATEDIR = "${WORKDIR}/spdx_sstate_dir" | 18 | SPDXSSTATEDIR = "${WORKDIR}/spdx_sstate_dir" |
20 | 19 | ||
21 | # If ${S} isn't actually the top-level source directory, set SPDX_S to point at | 20 | # If ${S} isn't actually the top-level source directory, set SPDX_S to point at |
@@ -24,49 +23,50 @@ SPDX_S ?= "${S}" | |||
24 | 23 | ||
25 | python do_spdx () { | 24 | python do_spdx () { |
26 | import os, sys | 25 | import os, sys |
27 | import json | 26 | import json, shutil |
28 | 27 | ||
29 | info = {} | 28 | info = {} |
30 | info['workdir'] = (d.getVar('WORKDIR', True) or "") | 29 | info['workdir'] = d.getVar('WORKDIR', True) |
31 | info['sourcedir'] = (d.getVar('SPDX_S', True) or "") | 30 | info['sourcedir'] = d.getVar('SPDX_S', True) |
32 | info['pn'] = (d.getVar( 'PN', True ) or "") | 31 | info['pn'] = d.getVar('PN', True) |
33 | info['pv'] = (d.getVar( 'PV', True ) or "") | 32 | info['pv'] = d.getVar('PV', True) |
34 | info['src_uri'] = (d.getVar( 'SRC_URI', True ) or "") | 33 | info['spdx_version'] = d.getVar('SPDX_VERSION', True) |
35 | info['spdx_version'] = (d.getVar('SPDX_VERSION', True) or '') | 34 | info['data_license'] = d.getVar('DATA_LICENSE', True) |
36 | info['data_license'] = (d.getVar('DATA_LICENSE', True) or '') | 35 | |
37 | 36 | sstatedir = d.getVar('SPDXSSTATEDIR', True) | |
38 | spdx_sstate_dir = (d.getVar('SPDXSSTATEDIR', True) or "") | 37 | sstatefile = os.path.join(sstatedir, info['pn'] + info['pv'] + ".spdx") |
39 | manifest_dir = (d.getVar('SPDX_MANIFEST_DIR', True) or "") | 38 | |
39 | manifest_dir = d.getVar('SPDX_MANIFEST_DIR', True) | ||
40 | info['outfile'] = os.path.join(manifest_dir, info['pn'] + ".spdx" ) | 40 | info['outfile'] = os.path.join(manifest_dir, info['pn'] + ".spdx" ) |
41 | sstatefile = os.path.join(spdx_sstate_dir, | ||
42 | info['pn'] + info['pv'] + ".spdx" ) | ||
43 | info['spdx_temp_dir'] = (d.getVar('SPDX_TEMP_DIR', True) or "") | ||
44 | info['tar_file'] = os.path.join( info['workdir'], info['pn'] + ".tar.gz" ) | ||
45 | 41 | ||
46 | # Make sure manifest dir exists | 42 | info['spdx_temp_dir'] = d.getVar('SPDX_TEMP_DIR', True) |
47 | if not os.path.exists( manifest_dir ): | 43 | info['tar_file'] = os.path.join(info['workdir'], info['pn'] + ".tar.gz" ) |
48 | bb.utils.mkdirhier( manifest_dir ) | 44 | |
45 | # Make sure important dirs exist | ||
46 | try: | ||
47 | bb.utils.mkdirhier(manifest_dir) | ||
48 | bb.utils.mkdirhier(sstatedir) | ||
49 | bb.utils.mkdirhier(info['spdx_temp_dir']) | ||
50 | except OSError as e: | ||
51 | bb.error("SPDX: Could not set up required directories: " + str(e)) | ||
52 | return | ||
49 | 53 | ||
50 | ## get everything from cache. use it to decide if | 54 | ## get everything from cache. use it to decide if |
51 | ## something needs to be rerun | 55 | ## something needs to be rerun |
52 | cur_ver_code = get_ver_code( info['sourcedir'] ) | 56 | cur_ver_code = get_ver_code(info['sourcedir']) |
53 | cache_cur = False | 57 | cache_cur = False |
54 | if not os.path.exists( spdx_sstate_dir ): | 58 | if os.path.exists(sstatefile): |
55 | bb.utils.mkdirhier( spdx_sstate_dir ) | ||
56 | if not os.path.exists( info['spdx_temp_dir'] ): | ||
57 | bb.utils.mkdirhier( info['spdx_temp_dir'] ) | ||
58 | if os.path.exists( sstatefile ): | ||
59 | ## cache for this package exists. read it in | 59 | ## cache for this package exists. read it in |
60 | cached_spdx = get_cached_spdx( sstatefile ) | 60 | cached_spdx = get_cached_spdx(sstatefile) |
61 | 61 | ||
62 | if cached_spdx['PackageVerificationCode'] == cur_ver_code: | 62 | if cached_spdx['PackageVerificationCode'] == cur_ver_code: |
63 | bb.warn(info['pn'] + "'s ver code same as cache's. do nothing") | 63 | bb.warn("SPDX: Verification code for " + info['pn'] |
64 | + "is same as cache's. do nothing") | ||
64 | cache_cur = True | 65 | cache_cur = True |
65 | else: | 66 | else: |
66 | local_file_info = setup_foss_scan( info, | 67 | local_file_info = setup_foss_scan(info, True, cached_spdx['Files']) |
67 | True, cached_spdx['Files'] ) | ||
68 | else: | 68 | else: |
69 | local_file_info = setup_foss_scan( info, False, None ) | 69 | local_file_info = setup_foss_scan(info, False, None) |
70 | 70 | ||
71 | if cache_cur: | 71 | if cache_cur: |
72 | spdx_file_info = cached_spdx['Files'] | 72 | spdx_file_info = cached_spdx['Files'] |
@@ -74,28 +74,33 @@ python do_spdx () { | |||
74 | foss_license_info = cached_spdx['Licenses'] | 74 | foss_license_info = cached_spdx['Licenses'] |
75 | else: | 75 | else: |
76 | ## setup fossology command | 76 | ## setup fossology command |
77 | foss_server = (d.getVar('FOSS_SERVER', True) or "") | 77 | foss_server = d.getVar('FOSS_SERVER', True) |
78 | foss_flags = (d.getVar('FOSS_WGET_FLAGS', True) or "") | 78 | foss_flags = d.getVar('FOSS_WGET_FLAGS', True) |
79 | foss_full_spdx = (d.getVar('FOSS_FULL_SPDX', True) == "true" or false) | 79 | foss_full_spdx = d.getVar('FOSS_FULL_SPDX', True) == "true" or False |
80 | foss_command = "wget %s --post-file=%s %s"\ | 80 | foss_command = "wget %s --post-file=%s %s"\ |
81 | % (foss_flags,info['tar_file'],foss_server) | 81 | % (foss_flags, info['tar_file'], foss_server) |
82 | 82 | ||
83 | (foss_package_info, foss_file_info, foss_license_info) = run_fossology( foss_command, foss_full_spdx ) | 83 | foss_result = run_fossology(foss_command, foss_full_spdx) |
84 | spdx_file_info = create_spdx_doc( local_file_info, foss_file_info ) | 84 | if foss_result is not None: |
85 | ## write to cache | 85 | (foss_package_info, foss_file_info, foss_license_info) = foss_result |
86 | write_cached_spdx(sstatefile, cur_ver_code, foss_package_info, | 86 | spdx_file_info = create_spdx_doc(local_file_info, foss_file_info) |
87 | spdx_file_info, foss_license_info) | 87 | ## write to cache |
88 | write_cached_spdx(sstatefile, cur_ver_code, foss_package_info, | ||
89 | spdx_file_info, foss_license_info) | ||
90 | else: | ||
91 | bb.error("SPDX: Could not communicate with FOSSology server. Command was: " + foss_command) | ||
92 | return | ||
88 | 93 | ||
89 | ## Get document and package level information | 94 | ## Get document and package level information |
90 | spdx_header_info = get_header_info(info, cur_ver_code, foss_package_info) | 95 | spdx_header_info = get_header_info(info, cur_ver_code, foss_package_info) |
91 | 96 | ||
92 | ## CREATE MANIFEST | 97 | ## CREATE MANIFEST |
93 | create_manifest(info,spdx_header_info,spdx_file_info, foss_license_info) | 98 | create_manifest(info, spdx_header_info, spdx_file_info, foss_license_info) |
94 | 99 | ||
95 | ## clean up the temp stuff | 100 | ## clean up the temp stuff |
96 | remove_dir_tree( info['spdx_temp_dir'] ) | 101 | shutil.rmtree(info['spdx_temp_dir'], ignore_errors=True) |
97 | if os.path.exists(info['tar_file']): | 102 | if os.path.exists(info['tar_file']): |
98 | remove_file( info['tar_file'] ) | 103 | remove_file(info['tar_file']) |
99 | } | 104 | } |
100 | addtask spdx after do_patch before do_configure | 105 | addtask spdx after do_patch before do_configure |
101 | 106 | ||
@@ -120,18 +125,18 @@ def create_manifest(info, header, files, licenses): | |||
120 | f.write(key + ": " + value + '\n') | 125 | f.write(key + ": " + value + '\n') |
121 | f.write('\n') | 126 | f.write('\n') |
122 | 127 | ||
123 | def get_cached_spdx( sstatefile ): | 128 | def get_cached_spdx(sstatefile): |
124 | import json | 129 | import json |
125 | import codecs | 130 | import codecs |
126 | cached_spdx_info = {} | 131 | cached_spdx_info = {} |
127 | with codecs.open( sstatefile, mode='r', encoding='utf-8' ) as f: | 132 | with codecs.open(sstatefile, mode='r', encoding='utf-8') as f: |
128 | try: | 133 | try: |
129 | cached_spdx_info = json.load(f) | 134 | cached_spdx_info = json.load(f) |
130 | except ValueError as e: | 135 | except ValueError as e: |
131 | cached_spdx_info = None | 136 | cached_spdx_info = None |
132 | return cached_spdx_info | 137 | return cached_spdx_info |
133 | 138 | ||
134 | def write_cached_spdx( sstatefile, ver_code, package_info, files, license_info): | 139 | def write_cached_spdx(sstatefile, ver_code, package_info, files, license_info): |
135 | import json | 140 | import json |
136 | import codecs | 141 | import codecs |
137 | spdx_doc = {} | 142 | spdx_doc = {} |
@@ -142,105 +147,85 @@ def write_cached_spdx( sstatefile, ver_code, package_info, files, license_info): | |||
142 | spdx_doc['Package'] = package_info | 147 | spdx_doc['Package'] = package_info |
143 | spdx_doc['Licenses'] = {} | 148 | spdx_doc['Licenses'] = {} |
144 | spdx_doc['Licenses'] = license_info | 149 | spdx_doc['Licenses'] = license_info |
145 | with codecs.open( sstatefile, mode='w', encoding='utf-8' ) as f: | 150 | with codecs.open(sstatefile, mode='w', encoding='utf-8') as f: |
146 | f.write(json.dumps(spdx_doc)) | 151 | f.write(json.dumps(spdx_doc)) |
147 | 152 | ||
148 | def setup_foss_scan( info, cache, cached_files ): | 153 | def setup_foss_scan(info, cache, cached_files): |
149 | import errno, shutil | 154 | import errno, shutil |
150 | import tarfile | 155 | import tarfile |
151 | file_info = {} | 156 | file_info = {} |
152 | cache_dict = {} | 157 | cache_dict = {} |
153 | 158 | ||
154 | for f_dir, f in list_files( info['sourcedir'] ): | 159 | for f_dir, f in list_files(info['sourcedir']): |
155 | full_path = os.path.join( f_dir, f ) | 160 | full_path = os.path.join(f_dir, f) |
156 | abs_path = os.path.join(info['sourcedir'], full_path) | 161 | abs_path = os.path.join(info['sourcedir'], full_path) |
157 | dest_dir = os.path.join( info['spdx_temp_dir'], f_dir ) | 162 | dest_dir = os.path.join(info['spdx_temp_dir'], f_dir) |
158 | dest_path = os.path.join( info['spdx_temp_dir'], full_path ) | 163 | dest_path = os.path.join(info['spdx_temp_dir'], full_path) |
159 | try: | ||
160 | stats = os.stat(abs_path) | ||
161 | except OSError as e: | ||
162 | bb.warn( "Stat failed" + str(e) + "\n") | ||
163 | continue | ||
164 | 164 | ||
165 | checksum = hash_file( abs_path ) | 165 | checksum = hash_file(abs_path) |
166 | if not checksum is None: | 166 | if not checksum is None: |
167 | mtime = time.asctime(time.localtime(stats.st_mtime)) | 167 | file_info[checksum] = {} |
168 | 168 | ## retain cache information if it exists | |
169 | ## retain cache information if it exists | 169 | if cache and checksum in cached_files: |
170 | file_info[checksum] = {} | 170 | file_info[checksum] = cached_files[checksum] |
171 | if cache and checksum in cached_files: | 171 | ## have the file included in what's sent to the FOSSology server |
172 | file_info[checksum] = cached_files[checksum] | 172 | else: |
173 | else: | 173 | file_info[checksum]['FileName'] = full_path |
174 | file_info[checksum]['FileName'] = full_path | ||
175 | try: | ||
176 | os.makedirs(dest_dir) | ||
177 | except OSError as e: | ||
178 | if e.errno == errno.EEXIST and os.path.isdir(dest_dir): | ||
179 | pass | ||
180 | else: | ||
181 | bb.warn( "mkdir failed " + str(e) + "\n" ) | ||
182 | continue | ||
183 | |||
184 | if (cache and checksum not in cached_files) or not cache: | ||
185 | try: | 174 | try: |
186 | shutil.copyfile( abs_path, dest_path ) | 175 | bb.utils.mkdirhier(dest_dir) |
176 | shutil.copyfile(abs_path, dest_path) | ||
177 | except OSError as e: | ||
178 | bb.warn("SPDX: mkdirhier failed: " + str(e)) | ||
187 | except shutil.Error as e: | 179 | except shutil.Error as e: |
188 | bb.warn( str(e) + "\n" ) | 180 | bb.warn("SPDX: copyfile failed: " + str(e)) |
189 | except IOError as e: | 181 | except IOError as e: |
190 | bb.warn( str(e) + "\n" ) | 182 | bb.warn("SPDX: copyfile failed: " + str(e)) |
183 | else: | ||
184 | bb.warn("SPDX: Could not get checksum for file: " + f) | ||
191 | 185 | ||
192 | with tarfile.open( info['tar_file'], "w:gz" ) as tar: | 186 | with tarfile.open(info['tar_file'], "w:gz") as tar: |
193 | tar.add( info['spdx_temp_dir'], arcname=os.path.basename(info['spdx_temp_dir']) ) | 187 | tar.add(info['spdx_temp_dir'], arcname=os.path.basename(info['spdx_temp_dir'])) |
194 | 188 | ||
195 | return file_info | 189 | return file_info |
196 | 190 | ||
197 | 191 | def remove_file(file_name): | |
198 | def remove_dir_tree( dir_name ): | ||
199 | import shutil | ||
200 | try: | 192 | try: |
201 | shutil.rmtree( dir_name ) | 193 | os.remove(file_name) |
202 | except: | ||
203 | pass | ||
204 | |||
205 | def remove_file( file_name ): | ||
206 | try: | ||
207 | os.remove( file_name ) | ||
208 | except OSError as e: | 194 | except OSError as e: |
209 | pass | 195 | pass |
210 | 196 | ||
211 | def list_files( dir ): | 197 | def list_files(dir): |
212 | for root, subFolders, files in os.walk( dir ): | 198 | for root, subFolders, files in os.walk(dir): |
213 | for f in files: | 199 | for f in files: |
214 | rel_root = os.path.relpath( root, dir ) | 200 | rel_root = os.path.relpath(root, dir) |
215 | yield rel_root, f | 201 | yield rel_root, f |
216 | return | 202 | return |
217 | 203 | ||
218 | def hash_file( file_name ): | 204 | def hash_file(file_name): |
219 | f = None | ||
220 | try: | 205 | try: |
221 | f = open( file_name, 'rb' ) | 206 | with open(file_name, 'rb') as f: |
222 | data_string = f.read() | 207 | data_string = f.read() |
208 | sha1 = hash_string(data_string) | ||
209 | return sha1 | ||
223 | except: | 210 | except: |
224 | return None | 211 | return None |
225 | finally: | 212 | |
226 | if not f is None: | 213 | def hash_string(data): |
227 | f.close() | ||
228 | sha1 = hash_string( data_string ) | ||
229 | return sha1 | ||
230 | |||
231 | def hash_string( data ): | ||
232 | import hashlib | 214 | import hashlib |
233 | sha1 = hashlib.sha1() | 215 | sha1 = hashlib.sha1() |
234 | sha1.update( data ) | 216 | sha1.update(data) |
235 | return sha1.hexdigest() | 217 | return sha1.hexdigest() |
236 | 218 | ||
237 | def run_fossology( foss_command, full_spdx ): | 219 | def run_fossology(foss_command, full_spdx): |
238 | import string, re | 220 | import string, re |
239 | import subprocess | 221 | import subprocess |
240 | 222 | ||
241 | p = subprocess.Popen(foss_command.split(), | 223 | p = subprocess.Popen(foss_command.split(), |
242 | stdout=subprocess.PIPE, stderr=subprocess.PIPE) | 224 | stdout=subprocess.PIPE, stderr=subprocess.PIPE) |
243 | foss_output, foss_error = p.communicate() | 225 | foss_output, foss_error = p.communicate() |
226 | if p.returncode != 0: | ||
227 | return None | ||
228 | |||
244 | foss_output = unicode(foss_output, "utf-8") | 229 | foss_output = unicode(foss_output, "utf-8") |
245 | foss_output = string.replace(foss_output, '\r', '') | 230 | foss_output = string.replace(foss_output, '\r', '') |
246 | 231 | ||
@@ -265,11 +250,10 @@ def run_fossology( foss_command, full_spdx ): | |||
265 | records = [] | 250 | records = [] |
266 | # FileName is also in PackageFileName, so we match on FileType as well. | 251 | # FileName is also in PackageFileName, so we match on FileType as well. |
267 | records = re.findall('FileName:.*?FileType:.*?</text>', foss_output, re.S) | 252 | records = re.findall('FileName:.*?FileType:.*?</text>', foss_output, re.S) |
268 | |||
269 | for rec in records: | 253 | for rec in records: |
270 | chksum = re.findall( 'FileChecksum: SHA1: (.*)\n', rec)[0] | 254 | chksum = re.findall('FileChecksum: SHA1: (.*)\n', rec)[0] |
271 | file_info[chksum] = {} | 255 | file_info[chksum] = {} |
272 | file_info[chksum]['FileCopyrightText'] = re.findall( 'FileCopyrightText: ' | 256 | file_info[chksum]['FileCopyrightText'] = re.findall('FileCopyrightText: ' |
273 | + '(.*?</text>)', rec, re.S )[0] | 257 | + '(.*?</text>)', rec, re.S )[0] |
274 | fields = ['FileName', 'FileType', 'LicenseConcluded', 'LicenseInfoInFile'] | 258 | fields = ['FileName', 'FileType', 'LicenseConcluded', 'LicenseInfoInFile'] |
275 | for field in fields: | 259 | for field in fields: |
@@ -282,44 +266,40 @@ def run_fossology( foss_command, full_spdx ): | |||
282 | for lic in licenses: | 266 | for lic in licenses: |
283 | license_id = re.findall('LicenseID: (.*)\n', lic)[0] | 267 | license_id = re.findall('LicenseID: (.*)\n', lic)[0] |
284 | license_info[license_id] = {} | 268 | license_info[license_id] = {} |
285 | license_info[license_id]['ExtractedText'] = re.findall('ExtractedText: (.*?</text>)',lic, re.S)[0] | 269 | license_info[license_id]['ExtractedText'] = re.findall('ExtractedText: (.*?</text>)', lic, re.S)[0] |
286 | license_info[license_id]['LicenseName'] = re.findall('LicenseName: (.*)', lic)[0] | 270 | license_info[license_id]['LicenseName'] = re.findall('LicenseName: (.*)', lic)[0] |
287 | 271 | ||
288 | return (package_info, file_info, license_info) | 272 | return (package_info, file_info, license_info) |
289 | 273 | ||
290 | def create_spdx_doc( file_info, scanned_files ): | 274 | def create_spdx_doc(file_info, scanned_files): |
291 | import json | 275 | import json |
292 | ## push foss changes back into cache | 276 | ## push foss changes back into cache |
293 | for chksum, lic_info in scanned_files.iteritems(): | 277 | for chksum, lic_info in scanned_files.iteritems(): |
294 | if chksum in file_info: | 278 | if chksum in file_info: |
295 | file_info[chksum]['FileName'] = file_info[chksum]['FileName'] | ||
296 | file_info[chksum]['FileType'] = lic_info['FileType'] | 279 | file_info[chksum]['FileType'] = lic_info['FileType'] |
297 | file_info[chksum]['FileChecksum: SHA1'] = chksum | 280 | file_info[chksum]['FileChecksum: SHA1'] = chksum |
298 | file_info[chksum]['LicenseInfoInFile'] = lic_info['LicenseInfoInFile'] | 281 | file_info[chksum]['LicenseInfoInFile'] = lic_info['LicenseInfoInFile'] |
299 | file_info[chksum]['LicenseConcluded'] = lic_info['LicenseConcluded'] | 282 | file_info[chksum]['LicenseConcluded'] = lic_info['LicenseConcluded'] |
300 | file_info[chksum]['FileCopyrightText'] = lic_info['FileCopyrightText'] | 283 | file_info[chksum]['FileCopyrightText'] = lic_info['FileCopyrightText'] |
301 | else: | 284 | else: |
302 | bb.warn(lic_info['FileName'] + " : " + chksum | 285 | bb.warn("SPDX: " + lic_info['FileName'] + " : " + chksum |
303 | + " : is not in the local file info: " | 286 | + " : is not in the local file info: " |
304 | + json.dumps(lic_info,indent=1)) | 287 | + json.dumps(lic_info, indent=1)) |
305 | return file_info | 288 | return file_info |
306 | 289 | ||
307 | def get_ver_code( dirname ): | 290 | def get_ver_code(dirname): |
308 | chksums = [] | 291 | chksums = [] |
309 | for f_dir, f in list_files( dirname ): | 292 | for f_dir, f in list_files(dirname): |
310 | try: | 293 | hash = hash_file(os.path.join(dirname, f_dir, f)) |
311 | stats = os.stat(os.path.join(dirname,f_dir,f)) | ||
312 | except OSError as e: | ||
313 | bb.warn( "Stat failed" + str(e) + "\n") | ||
314 | continue | ||
315 | hash = hash_file(os.path.join(dirname,f_dir,f)) | ||
316 | if not hash is None: | 294 | if not hash is None: |
317 | chksums.append(hash) | 295 | chksums.append(hash) |
318 | ver_code_string = ''.join( chksums ).lower() | 296 | else: |
319 | ver_code = hash_string( ver_code_string ) | 297 | bb.warn("SPDX: Could not hash file: " + path) |
298 | ver_code_string = ''.join(chksums).lower() | ||
299 | ver_code = hash_string(ver_code_string) | ||
320 | return ver_code | 300 | return ver_code |
321 | 301 | ||
322 | def get_header_info( info, spdx_verification_code, package_info): | 302 | def get_header_info(info, spdx_verification_code, package_info): |
323 | """ | 303 | """ |
324 | Put together the header SPDX information. | 304 | Put together the header SPDX information. |
325 | Eventually this needs to become a lot less | 305 | Eventually this needs to become a lot less |
@@ -330,14 +310,12 @@ def get_header_info( info, spdx_verification_code, package_info): | |||
330 | head = [] | 310 | head = [] |
331 | DEFAULT = "NOASSERTION" | 311 | DEFAULT = "NOASSERTION" |
332 | 312 | ||
333 | #spdx_verification_code = get_ver_code( info['sourcedir'] ) | 313 | package_checksum = hash_file(info['tar_file']) |
334 | package_checksum = '' | 314 | if package_checksum is None: |
335 | if os.path.exists(info['tar_file']): | ||
336 | package_checksum = hash_file( info['tar_file'] ) | ||
337 | else: | ||
338 | package_checksum = DEFAULT | 315 | package_checksum = DEFAULT |
339 | 316 | ||
340 | ## document level information | 317 | ## document level information |
318 | head.append("## SPDX Document Information") | ||
341 | head.append("SPDXVersion: " + info['spdx_version']) | 319 | head.append("SPDXVersion: " + info['spdx_version']) |
342 | head.append("DataLicense: " + info['data_license']) | 320 | head.append("DataLicense: " + info['data_license']) |
343 | head.append("DocumentComment: <text>SPDX for " | 321 | head.append("DocumentComment: <text>SPDX for " |
@@ -345,9 +323,11 @@ def get_header_info( info, spdx_verification_code, package_info): | |||
345 | head.append("") | 323 | head.append("") |
346 | 324 | ||
347 | ## Creator information | 325 | ## Creator information |
326 | ## Note that this does not give time in UTC. | ||
348 | now = datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ') | 327 | now = datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ') |
349 | head.append("## Creation Information") | 328 | head.append("## Creation Information") |
350 | head.append("Creator: Tool: fossology-spdx") | 329 | ## Tools are supposed to have a version, but FOSSology+SPDX provides none. |
330 | head.append("Creator: Tool: FOSSology+SPDX") | ||
351 | head.append("Created: " + now) | 331 | head.append("Created: " + now) |
352 | head.append("CreatorComment: <text>UNO</text>") | 332 | head.append("CreatorComment: <text>UNO</text>") |
353 | head.append("") | 333 | head.append("") |
@@ -366,10 +346,14 @@ def get_header_info( info, spdx_verification_code, package_info): | |||
366 | head.append("PackageDescription: <text>" + info['pn'] | 346 | head.append("PackageDescription: <text>" + info['pn'] |
367 | + " version " + info['pv'] + "</text>") | 347 | + " version " + info['pv'] + "</text>") |
368 | head.append("") | 348 | head.append("") |
369 | head.append("PackageCopyrightText: " + package_info['PackageCopyrightText']) | 349 | head.append("PackageCopyrightText: " |
350 | + package_info['PackageCopyrightText']) | ||
370 | head.append("") | 351 | head.append("") |
371 | head.append("PackageLicenseDeclared: " + package_info['PackageLicenseDeclared']) | 352 | head.append("PackageLicenseDeclared: " |
372 | head.append("PackageLicenseConcluded: " + package_info['PackageLicenseConcluded']) | 353 | + package_info['PackageLicenseDeclared']) |
354 | head.append("PackageLicenseConcluded: " | ||
355 | + package_info['PackageLicenseConcluded']) | ||
356 | |||
373 | for licref in package_info['PackageLicenseInfoFromFiles']: | 357 | for licref in package_info['PackageLicenseInfoFromFiles']: |
374 | head.append("PackageLicenseInfoFromFiles: " + licref) | 358 | head.append("PackageLicenseInfoFromFiles: " + licref) |
375 | head.append("") | 359 | head.append("") |