summaryrefslogtreecommitdiffstats
path: root/scripts/contrib
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/contrib')
-rwxr-xr-xscripts/contrib/bb-perf/bb-matrix-plot.sh124
-rwxr-xr-xscripts/contrib/bb-perf/bb-matrix.sh66
-rwxr-xr-xscripts/contrib/bb-perf/buildstats-plot.sh160
-rwxr-xr-xscripts/contrib/bb-perf/buildstats.sh167
-rwxr-xr-xscripts/contrib/bbvars.py168
-rwxr-xr-xscripts/contrib/build-perf-test-wrapper.sh247
-rwxr-xr-xscripts/contrib/convert-overrides.py155
-rwxr-xr-xscripts/contrib/convert-spdx-licenses.py145
-rwxr-xr-xscripts/contrib/convert-srcuri.py77
-rwxr-xr-xscripts/contrib/convert-variable-renames.py116
-rwxr-xr-xscripts/contrib/ddimage172
-rwxr-xr-xscripts/contrib/devtool-stress.py245
-rwxr-xr-xscripts/contrib/dialog-power-control57
-rwxr-xr-xscripts/contrib/documentation-audit.sh97
-rwxr-xr-xscripts/contrib/graph-tool118
-rwxr-xr-xscripts/contrib/image-manifest523
-rwxr-xr-xscripts/contrib/improve_kernel_cve_report.py500
-rwxr-xr-xscripts/contrib/list-packageconfig-flags.py167
-rwxr-xr-xscripts/contrib/make-spdx-bindings.sh12
-rwxr-xr-xscripts/contrib/oe-build-perf-report-email.py121
-rw-r--r--scripts/contrib/oe-image-files-spdx/.gitignore8
-rw-r--r--scripts/contrib/oe-image-files-spdx/README.md24
-rw-r--r--scripts/contrib/oe-image-files-spdx/pyproject.toml23
-rw-r--r--scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py1
-rw-r--r--scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py86
-rw-r--r--scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py1
-rwxr-xr-xscripts/contrib/patchreview.py280
-rwxr-xr-xscripts/contrib/patchtest.sh104
-rwxr-xr-xscripts/contrib/serdevtry61
-rwxr-xr-xscripts/contrib/test_build_time.sh223
-rwxr-xr-xscripts/contrib/test_build_time_worker.sh41
-rwxr-xr-xscripts/contrib/uncovered26
-rwxr-xr-xscripts/contrib/verify-homepage.py66
33 files changed, 0 insertions, 4381 deletions
diff --git a/scripts/contrib/bb-perf/bb-matrix-plot.sh b/scripts/contrib/bb-perf/bb-matrix-plot.sh
deleted file mode 100755
index 6672189c95..0000000000
--- a/scripts/contrib/bb-perf/bb-matrix-plot.sh
+++ /dev/null
@@ -1,124 +0,0 @@
1#!/bin/bash
2#
3# Copyright (c) 2011, Intel Corporation.
4#
5# SPDX-License-Identifier: GPL-2.0-or-later
6#
7# DESCRIPTION
8# This script operates on the .dat file generated by bb-matrix.sh. It tolerates
9# the header by skipping the first line, but error messages and bad data records
10# need to be removed first. It will generate three views of the plot, and leave
11# an interactive view open for further analysis.
12#
13# AUTHORS
14# Darren Hart <dvhart@linux.intel.com>
15#
16
17# Setup the defaults
18DATFILE="bb-matrix.dat"
19XLABEL="BB\\\\_NUMBER\\\\_THREADS"
20YLABEL="PARALLEL\\\\_MAKE"
21FIELD=3
22DEF_TITLE="Elapsed Time (seconds)"
23PM3D_FRAGMENT="unset surface; set pm3d at s hidden3d 100"
24SIZE="640,480"
25
26function usage {
27CMD=$(basename $0)
28cat <<EOM
29Usage: $CMD [-d datfile] [-f field] [-h] [-t title] [-w]
30 -d datfile The data file generated by bb-matrix.sh (default: $DATFILE)
31 -f field The field index to plot as the Z axis from the data file
32 (default: $FIELD, "$DEF_TITLE")
33 -h Display this help message
34 -s W,H PNG and window size in pixels (default: $SIZE)
35 -t title The title to display, should describe the field (-f) and units
36 (default: "$DEF_TITLE")
37 -w Render the plot as wireframe with a 2D colormap projected on the
38 XY plane rather than as the texture for the surface
39EOM
40}
41
42# Parse and validate arguments
43while getopts "d:f:hs:t:w" OPT; do
44 case $OPT in
45 d)
46 DATFILE="$OPTARG"
47 ;;
48 f)
49 FIELD="$OPTARG"
50 ;;
51 h)
52 usage
53 exit 0
54 ;;
55 s)
56 SIZE="$OPTARG"
57 ;;
58 t)
59 TITLE="$OPTARG"
60 ;;
61 w)
62 PM3D_FRAGMENT="set pm3d at b"
63 W="-w"
64 ;;
65 *)
66 usage
67 exit 1
68 ;;
69 esac
70done
71
72# Ensure the data file exists
73if [ ! -f "$DATFILE" ]; then
74 echo "ERROR: $DATFILE does not exist"
75 usage
76 exit 1
77fi
78PLOT_BASENAME=${DATFILE%.*}-f$FIELD$W
79
80# Set a sane title
81# TODO: parse the header and define titles for each format parameter for TIME(1)
82if [ -z "$TITLE" ]; then
83 if [ ! "$FIELD" == "3" ]; then
84 TITLE="Field $FIELD"
85 else
86 TITLE="$DEF_TITLE"
87 fi
88fi
89
90# Determine the dgrid3d mesh dimensions size
91MIN=$(tail -n +2 "$DATFILE" | cut -d ' ' -f 1 | sed 's/^0*//' | sort -n | uniq | head -n1)
92MAX=$(tail -n +2 "$DATFILE" | cut -d ' ' -f 1 | sed 's/^0*//' | sort -n | uniq | tail -n1)
93BB_CNT=$[${MAX} - $MIN + 1]
94MIN=$(tail -n +2 "$DATFILE" | cut -d ' ' -f 2 | sed 's/^0*//' | sort -n | uniq | head -n1)
95MAX=$(tail -n +2 "$DATFILE" | cut -d ' ' -f 2 | sed 's/^0*//' | sort -n | uniq | tail -n1)
96PM_CNT=$[${MAX} - $MIN + 1]
97
98
99(cat <<EOF
100set title "$TITLE"
101set xlabel "$XLABEL"
102set ylabel "$YLABEL"
103set style line 100 lt 5 lw 1.5
104$PM3D_FRAGMENT
105set dgrid3d $PM_CNT,$BB_CNT splines
106set ticslevel 0.2
107
108set term png size $SIZE
109set output "$PLOT_BASENAME.png"
110splot "$DATFILE" every ::1 using 1:2:$FIELD with lines ls 100
111
112set view 90,0
113set output "$PLOT_BASENAME-bb.png"
114replot
115
116set view 90,90
117set output "$PLOT_BASENAME-pm.png"
118replot
119
120set view 60,30
121set term wxt size $SIZE
122replot
123EOF
124) | gnuplot --persist
diff --git a/scripts/contrib/bb-perf/bb-matrix.sh b/scripts/contrib/bb-perf/bb-matrix.sh
deleted file mode 100755
index b1fff0f344..0000000000
--- a/scripts/contrib/bb-perf/bb-matrix.sh
+++ /dev/null
@@ -1,66 +0,0 @@
1#!/bin/bash
2#
3# Copyright (c) 2011, Intel Corporation.
4#
5# SPDX-License-Identifier: GPL-2.0-or-later
6#
7# DESCRIPTION
8# This script runs BB_CMD (typically building core-image-sato) for all
9# combincations of BB_RANGE and PM_RANGE values. It saves off all the console
10# logs, the buildstats directories, and creates a bb-pm-runtime.dat file which
11# can be used to postprocess the results with a plotting tool, spreadsheet, etc.
12# Before running this script, it is recommended that you pre-download all the
13# necessary sources by performing the BB_CMD once manually. It is also a good
14# idea to disable cron to avoid runtime variations caused by things like the
15# locate process. Be sure to sanitize the dat file prior to post-processing as
16# it may contain error messages or bad runs that should be removed.
17#
18# AUTHORS
19# Darren Hart <dvhart@linux.intel.com>
20#
21
22# The following ranges are appropriate for a 4 core system with 8 logical units
23# Use leading 0s to ensure all digits are the same string length, this results
24# in nice log file names and columnar dat files.
25BB_RANGE="04 05 06 07 08 09 10 11 12 13 14 15 16"
26PM_RANGE="04 05 06 07 08 09 10 11 12 13 14 15 16"
27
28DATADIR="bb-matrix-$$"
29BB_CMD="bitbake core-image-minimal"
30RUNTIME_LOG="$DATADIR/bb-matrix.dat"
31
32# See TIME(1) for a description of the time format parameters
33# The following all report 0: W K r s t w
34TIME_STR="%e %S %U %P %c %w %R %F %M %x"
35
36# Prepare the DATADIR
37mkdir $DATADIR
38if [ $? -ne 0 ]; then
39 echo "Failed to create $DATADIR."
40 exit 1
41fi
42
43# Add a simple header
44echo "BB PM $TIME_STR" > $RUNTIME_LOG
45for BB in $BB_RANGE; do
46 for PM in $PM_RANGE; do
47 RUNDIR="$DATADIR/$BB-$PM-build"
48 mkdir $RUNDIR
49 BB_LOG=$RUNDIR/$BB-$PM-bitbake.log
50 date
51 echo "BB=$BB PM=$PM Logging to $BB_LOG"
52
53 echo -n " Preparing the work directory... "
54 rm -rf pseudodone tmp sstate-cache tmp-eglibc &> /dev/null
55 echo "done"
56
57 # Export the variables under test and run the bitbake command
58 # Strip any leading zeroes before passing to bitbake
59 export BB_NUMBER_THREADS=$(echo $BB | sed 's/^0*//')
60 export PARALLEL_MAKE="-j $(echo $PM | sed 's/^0*//')"
61 /usr/bin/time -f "$BB $PM $TIME_STR" -a -o $RUNTIME_LOG $BB_CMD &> $BB_LOG
62
63 echo " $(tail -n1 $RUNTIME_LOG)"
64 cp -a tmp/buildstats $RUNDIR/$BB-$PM-buildstats
65 done
66done
diff --git a/scripts/contrib/bb-perf/buildstats-plot.sh b/scripts/contrib/bb-perf/buildstats-plot.sh
deleted file mode 100755
index 45c27d0b97..0000000000
--- a/scripts/contrib/bb-perf/buildstats-plot.sh
+++ /dev/null
@@ -1,160 +0,0 @@
1#!/usr/bin/env bash
2#
3# Copyright (c) 2011, Intel Corporation.
4#
5# SPDX-License-Identifier: GPL-2.0-or-later
6#
7# DESCRIPTION
8#
9# Produces script data to be consumed by gnuplot. There are two possible plots
10# depending if either the -S parameter is present or not:
11#
12# * without -S: Produces a histogram listing top N recipes/tasks versus
13# stats. The first stat defined in the -s parameter is the one taken
14# into account for ranking
15# * -S: Produces a histogram listing tasks versus stats. In this case,
16# the value of each stat is the sum for that particular stat in all recipes found.
17# Stats values are in descending order defined by the first stat defined on -s
18#
19# EXAMPLES
20#
21# 1. Top recipes' tasks taking into account utime
22#
23# $ buildstats-plot.sh -s utime | gnuplot -p
24#
25# 2. Tasks versus utime:stime
26#
27# $ buildstats-plot.sh -s utime:stime -S | gnuplot -p
28#
29# 3. Tasks versus IO write_bytes:IO read_bytes
30#
31# $ buildstats-plot.sh -s 'IO write_bytes:IO read_bytes' -S | gnuplot -p
32#
33# AUTHORS
34# Leonardo Sandoval <leonardo.sandoval.gonzalez@linux.intel.com>
35#
36
37set -o nounset
38set -o errexit
39
40BS_DIR="tmp/buildstats"
41N=10
42RECIPE=""
43TASKS="compile:configure:fetch:install:patch:populate_lic:populate_sysroot:unpack"
44STATS="utime"
45ACCUMULATE=""
46SUM=""
47OUTDATA_FILE="$PWD/buildstats-plot.out"
48
49function usage {
50 CMD=$(basename $0)
51 cat <<EOM
52Usage: $CMD [-b buildstats_dir] [-t do_task]
53 -b buildstats The path where the folder resides
54 (default: "$BS_DIR")
55 -n N Top N recipes to display. Ignored if -S is present
56 (default: "$N")
57 -r recipe The recipe mask to be searched
58 -t tasks The tasks to be computed
59 (default: "$TASKS")
60 -s stats The stats to be matched. If more that one stat, units
61 should be the same because data is plot as histogram.
62 (see buildstats.sh -h for all options) or any other defined
63 (build)stat separated by colons, i.e. stime:utime
64 (default: "$STATS")
65 -a Accumulate all stats values for found recipes
66 -S Sum values for a particular stat for found recipes
67 -o Output data file.
68 (default: "$OUTDATA_FILE")
69 -h Display this help message
70EOM
71}
72
73# Parse and validate arguments
74while getopts "b:n:r:t:s:o:aSh" OPT; do
75 case $OPT in
76 b)
77 BS_DIR="$OPTARG"
78 ;;
79 n)
80 N="$OPTARG"
81 ;;
82 r)
83 RECIPE="-r $OPTARG"
84 ;;
85 t)
86 TASKS="$OPTARG"
87 ;;
88 s)
89 STATS="$OPTARG"
90 ;;
91 a)
92 ACCUMULATE="-a"
93 ;;
94 S)
95 SUM="y"
96 ;;
97 o)
98 OUTDATA_FILE="$OPTARG"
99 ;;
100 h)
101 usage
102 exit 0
103 ;;
104 *)
105 usage
106 exit 1
107 ;;
108 esac
109done
110
111# Get number of stats
112IFS=':'; statsarray=(${STATS}); unset IFS
113nstats=${#statsarray[@]}
114
115# Get script folder, use to run buildstats.sh
116CD=$(dirname $0)
117
118# Parse buildstats recipes to produce a single table
119OUTBUILDSTATS="$PWD/buildstats.log"
120$CD/buildstats.sh -b "$BS_DIR" -s "$STATS" -t "$TASKS" $RECIPE $ACCUMULATE -H > $OUTBUILDSTATS
121
122# Get headers
123HEADERS=$(cat $OUTBUILDSTATS | sed -n -e 's/\(.*\)/"\1"/' -e '1s/ /\\\\\\\\ /g' -e 's/_/\\\\\\\\_/g' -e '1s/:/" "/gp')
124
125echo -e "set boxwidth 0.9 relative"
126echo -e "set style data histograms"
127echo -e "set style fill solid 1.0 border lt -1"
128echo -e "set xtics rotate by 45 right"
129
130# Get output data
131if [ -z "$SUM" ]; then
132 cat $OUTBUILDSTATS | sed -e '1d' -e 's/_/\\\\_/g' | sort -k3 -n -r | head -$N > $OUTDATA_FILE
133 # include task at recipe column
134 sed -i -e "1i\
135${HEADERS}" $OUTDATA_FILE
136 echo -e "set title \"Top task/recipes\""
137 echo -e "plot for [COL=3:`expr 3 + ${nstats} - 1`] '${OUTDATA_FILE}' using COL:xtic(stringcolumn(1).' '.stringcolumn(2)) title columnheader(COL)"
138else
139
140 # Construct datatamash sum argument (sum 3 sum 4 ...)
141 declare -a sumargs
142 j=0
143 for i in `seq $nstats`; do
144 sumargs[j]=sum; j=$(( $j + 1 ))
145 sumargs[j]=`expr 3 + $i - 1`; j=$(( $j + 1 ))
146 done
147
148 # Do the processing with datamash
149 cat $OUTBUILDSTATS | sed -e '1d' | datamash -t ' ' -g1 ${sumargs[*]} | sort -k2 -n -r > $OUTDATA_FILE
150
151 # Include headers into resulted file, so we can include gnuplot xtics
152 HEADERS=$(echo $HEADERS | sed -e 's/recipe//1')
153 sed -i -e "1i\
154${HEADERS}" $OUTDATA_FILE
155
156 # Plot
157 echo -e "set title \"Sum stats values per task for all recipes\""
158 echo -e "plot for [COL=2:`expr 2 + ${nstats} - 1`] '${OUTDATA_FILE}' using COL:xtic(1) title columnheader(COL)"
159fi
160
diff --git a/scripts/contrib/bb-perf/buildstats.sh b/scripts/contrib/bb-perf/buildstats.sh
deleted file mode 100755
index e45cfc146d..0000000000
--- a/scripts/contrib/bb-perf/buildstats.sh
+++ /dev/null
@@ -1,167 +0,0 @@
1#!/bin/bash
2#
3# Copyright (c) 2011, Intel Corporation.
4#
5# SPDX-License-Identifier: GPL-2.0-or-later
6#
7# DESCRIPTION
8# Given 'buildstats' data (generate by bitbake when setting
9# USER_CLASSES ?= "buildstats" on local.conf), task names and a stats values
10# (these are the ones preset on the buildstats files), outputs
11# '<task> <recipe> <value_1> <value_2> ... <value_n>'. The units are the ones
12# defined at buildstats, which in turn takes data from /proc/[pid] files
13#
14# Some useful pipelines
15#
16# 1. Tasks with largest stime (Amount of time that this process has been scheduled
17# in kernel mode) values
18# $ buildstats.sh -b <buildstats> -s stime | sort -k3 -n -r | head
19#
20# 2. Min, max, sum utime (Amount of time that this process has been scheduled
21# in user mode) per task (in needs GNU datamash)
22# $ buildstats.sh -b <buildstats> -s utime | datamash -t' ' -g1 min 3 max 3 sum 3 | sort -k4 -n -r
23#
24# AUTHORS
25# Leonardo Sandoval <leonardo.sandoval.gonzalez@linux.intel.com>
26#
27
28# Stats, by type
29TIME="utime:stime:cutime:cstime"
30IO="IO wchar:IO write_bytes:IO syscr:IO read_bytes:IO rchar:IO syscw:IO cancelled_write_bytes"
31RUSAGE="rusage ru_utime:rusage ru_stime:rusage ru_maxrss:rusage ru_minflt:rusage ru_majflt:\
32rusage ru_inblock:rusage ru_oublock:rusage ru_nvcsw:rusage ru_nivcsw"
33
34CHILD_RUSAGE="Child rusage ru_utime:Child rusage ru_stime:Child rusage ru_maxrss:Child rusage ru_minflt:\
35Child rusage ru_majflt:Child rusage ru_inblock:Child rusage ru_oublock:Child rusage ru_nvcsw:\
36Child rusage ru_nivcsw"
37
38BS_DIR="tmp/buildstats"
39RECIPE=""
40TASKS="compile:configure:fetch:install:patch:populate_lic:populate_sysroot:unpack"
41STATS="$TIME"
42ACCUMULATE=""
43HEADER="" # No header by default
44
45function usage {
46CMD=$(basename $0)
47cat <<EOM
48Usage: $CMD [-b buildstats_dir] [-t do_task]
49 -b buildstats The path where the folder resides
50 (default: "$BS_DIR")
51 -r recipe The recipe to be computed
52 -t tasks The tasks to be computed
53 (default: "$TASKS")
54 -s stats The stats to be matched. Options: TIME, IO, RUSAGE, CHILD_RUSAGE
55 or any other defined buildstat separated by colons, i.e. stime:utime
56 (default: "$STATS")
57 Default stat sets:
58 TIME=$TIME
59 IO=$IO
60 RUSAGE=$RUSAGE
61 CHILD_RUSAGE=$CHILD_RUSAGE
62 -a Accumulate all stats values for found recipes
63 -h Display this help message
64EOM
65}
66
67# Parse and validate arguments
68while getopts "b:r:t:s:aHh" OPT; do
69 case $OPT in
70 b)
71 BS_DIR="$OPTARG"
72 ;;
73 r)
74 RECIPE="$OPTARG"
75 ;;
76 t)
77 TASKS="$OPTARG"
78 ;;
79 s)
80 STATS="$OPTARG"
81 ;;
82 a)
83 ACCUMULATE="y"
84 ;;
85 H)
86 HEADER="y"
87 ;;
88 h)
89 usage
90 exit 0
91 ;;
92 *)
93 usage
94 exit 1
95 ;;
96 esac
97done
98
99# Ensure the buildstats folder exists
100if [ ! -d "$BS_DIR" ]; then
101 echo "ERROR: $BS_DIR does not exist"
102 usage
103 exit 1
104fi
105
106stats=""
107IFS=":"
108for stat in ${STATS}; do
109 case $stat in
110 TIME)
111 stats="${stats}:${TIME}"
112 ;;
113 IO)
114 stats="${stats}:${IO}"
115 ;;
116 RUSAGE)
117 stats="${stats}:${RUSAGE}"
118 ;;
119 CHILD_RUSAGE)
120 stats="${stats}:${CHILD_RUSAGE}"
121 ;;
122 *)
123 stats="${STATS}"
124 ;;
125 esac
126done
127
128# remove possible colon at the beginning
129stats="$(echo "$stats" | sed -e 's/^://1')"
130
131# Provide a header if required by the user
132if [ -n "$HEADER" ] ; then
133 if [ -n "$ACCUMULATE" ]; then
134 echo "task:recipe:accumulated(${stats//:/;})"
135 else
136 echo "task:recipe:$stats"
137 fi
138fi
139
140for task in ${TASKS}; do
141 task="do_${task}"
142 for file in $(find ${BS_DIR} -type f -path *${RECIPE}*/${task} | awk 'BEGIN{ ORS=""; OFS=":" } { print $0,"" }'); do
143 recipe="$(basename $(dirname $file))"
144 times=""
145 for stat in ${stats}; do
146 [ -z "$stat" ] && { echo "empty stats"; }
147 time=$(sed -n -e "s/^\($stat\): \\(.*\\)/\\2/p" $file)
148 # in case the stat is not present, set the value as NA
149 [ -z "$time" ] && { time="NA"; }
150 # Append it to times
151 if [ -z "$times" ]; then
152 times="${time}"
153 else
154 times="${times} ${time}"
155 fi
156 done
157 if [ -n "$ACCUMULATE" ]; then
158 IFS=' '; valuesarray=(${times}); IFS=':'
159 times=0
160 for value in "${valuesarray[@]}"; do
161 [ "$value" == "NA" ] && { echo "ERROR: stat is not present."; usage; exit 1; }
162 times=$(( $times + $value ))
163 done
164 fi
165 echo "${task} ${recipe} ${times}"
166 done
167done
diff --git a/scripts/contrib/bbvars.py b/scripts/contrib/bbvars.py
deleted file mode 100755
index a9cdf082ab..0000000000
--- a/scripts/contrib/bbvars.py
+++ /dev/null
@@ -1,168 +0,0 @@
1#!/usr/bin/env python3
2#
3# SPDX-License-Identifier: GPL-2.0-or-later
4#
5# Copyright (C) Darren Hart <dvhart@linux.intel.com>, 2010
6
7
8import sys
9import getopt
10import os
11import os.path
12import re
13
14# Set up sys.path to let us import tinfoil
15scripts_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
16lib_path = scripts_path + '/lib'
17sys.path.insert(0, lib_path)
18import scriptpath
19scriptpath.add_bitbake_lib_path()
20import bb.tinfoil
21
22def usage():
23 print('Usage: %s -d FILENAME [-d FILENAME]*' % os.path.basename(sys.argv[0]))
24 print(' -d FILENAME documentation file to search')
25 print(' -h, --help display this help and exit')
26 print(' -t FILENAME documentation config file (for doc tags)')
27 print(' -T Only display variables with doc tags (requires -t)')
28
29def bbvar_is_documented(var, documented_vars):
30 ''' Check if variable (var) is in the list of documented variables(documented_vars) '''
31 if var in documented_vars:
32 return True
33 else:
34 return False
35
36def collect_documented_vars(docfiles):
37 ''' Walk the docfiles and collect the documented variables '''
38 documented_vars = []
39 prog = re.compile(r".*($|[^A-Z_])<glossentry id=\'var-")
40 var_prog = re.compile(r'<glossentry id=\'var-(.*)\'>')
41 for d in docfiles:
42 with open(d) as f:
43 documented_vars += var_prog.findall(f.read())
44
45 return documented_vars
46
47def bbvar_doctag(var, docconf):
48 prog = re.compile(r'^%s\[doc\] *= *"(.*)"' % (var))
49 if docconf == "":
50 return "?"
51
52 try:
53 f = open(docconf)
54 except IOError as err:
55 return err.args[1]
56
57 for line in f:
58 m = prog.search(line)
59 if m:
60 return m.group(1)
61
62 f.close()
63 return ""
64
65def main():
66 docfiles = []
67 bbvars = set()
68 undocumented = []
69 docconf = ""
70 onlydoctags = False
71
72 # Collect and validate input
73 try:
74 opts, args = getopt.getopt(sys.argv[1:], "d:hm:t:T", ["help"])
75 except getopt.GetoptError as err:
76 print('%s' % str(err))
77 usage()
78 sys.exit(2)
79
80 for o, a in opts:
81 if o in ('-h', '--help'):
82 usage()
83 sys.exit(0)
84 elif o == '-d':
85 if os.path.isfile(a):
86 docfiles.append(a)
87 else:
88 print('ERROR: documentation file %s is not a regular file' % a)
89 sys.exit(3)
90 elif o == "-t":
91 if os.path.isfile(a):
92 docconf = a
93 elif o == "-T":
94 onlydoctags = True
95 else:
96 assert False, "unhandled option"
97
98 if len(docfiles) == 0:
99 print('ERROR: no docfile specified')
100 usage()
101 sys.exit(5)
102
103 if onlydoctags and docconf == "":
104 print('ERROR: no docconf specified')
105 usage()
106 sys.exit(7)
107
108 prog = re.compile("^[^a-z]*$")
109 with bb.tinfoil.Tinfoil() as tinfoil:
110 tinfoil.prepare(config_only=False)
111 parser = bb.codeparser.PythonParser('parser', None)
112 datastore = tinfoil.config_data
113
114 def bbvars_update(data):
115 if prog.match(data):
116 bbvars.add(data)
117 if tinfoil.config_data.getVarFlag(data, 'python'):
118 try:
119 parser.parse_python(tinfoil.config_data.getVar(data))
120 except bb.data_smart.ExpansionError:
121 pass
122 for var in parser.references:
123 if prog.match(var):
124 bbvars.add(var)
125 else:
126 try:
127 expandedVar = datastore.expandWithRefs(datastore.getVar(data, False), data)
128 for var in expandedVar.references:
129 if prog.match(var):
130 bbvars.add(var)
131 except bb.data_smart.ExpansionError:
132 pass
133
134 # Use tinfoil to collect all the variable names globally
135 for data in datastore:
136 bbvars_update(data)
137
138 # Collect variables from all recipes
139 for recipe in tinfoil.all_recipe_files(variants=False):
140 print("Checking %s" % recipe)
141 for data in tinfoil.parse_recipe_file(recipe):
142 bbvars_update(data)
143
144 documented_vars = collect_documented_vars(docfiles)
145
146 # Check each var for documentation
147 varlen = 0
148 for v in bbvars:
149 if len(v) > varlen:
150 varlen = len(v)
151 if not bbvar_is_documented(v, documented_vars):
152 undocumented.append(v)
153 undocumented.sort()
154 varlen = varlen + 1
155
156 # Report all undocumented variables
157 print('Found %d undocumented bb variables (out of %d):' % (len(undocumented), len(bbvars)))
158 header = '%s%s' % (str("VARIABLE").ljust(varlen), str("DOCTAG").ljust(7))
159 print(header)
160 print(str("").ljust(len(header), '='))
161 for v in undocumented:
162 doctag = bbvar_doctag(v, docconf)
163 if not onlydoctags or not doctag == "":
164 print('%s%s' % (v.ljust(varlen), doctag))
165
166
167if __name__ == "__main__":
168 main()
diff --git a/scripts/contrib/build-perf-test-wrapper.sh b/scripts/contrib/build-perf-test-wrapper.sh
deleted file mode 100755
index 0a85e6e708..0000000000
--- a/scripts/contrib/build-perf-test-wrapper.sh
+++ /dev/null
@@ -1,247 +0,0 @@
1#!/bin/bash
2#
3# Build performance test script wrapper
4#
5# Copyright (c) 2016, Intel Corporation.
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9# This script is a simple wrapper around the actual build performance tester
10# script. This script initializes the build environment, runs
11# oe-build-perf-test and archives the results.
12
13script=`basename $0`
14script_dir=$(realpath $(dirname $0))
15archive_dir=~/perf-results/archives
16
17usage () {
18cat << EOF
19Usage: $script [-h] [-c COMMITISH] [-C GIT_REPO]
20
21Optional arguments:
22 -h show this help and exit.
23 -a ARCHIVE_DIR archive results tarball here, give an empty string to
24 disable tarball archiving (default: $archive_dir)
25 -c COMMITISH test (checkout) this commit, <branch>:<commit> can be
26 specified to test specific commit of certain branch
27 -C GIT_REPO commit results into Git
28 -d DOWNLOAD_DIR directory to store downloaded sources in
29 -E EMAIL_ADDR send email report
30 -g GLOBALRES_DIR where to place the globalres file
31 -P GIT_REMOTE push results to a remote Git repository
32 -R DEST rsync reports to a remote destination
33 -w WORK_DIR work dir for this script
34 (default: GIT_TOP_DIR/build-perf-test)
35 -x create xml report (instead of json)
36EOF
37}
38
39get_os_release_var () {
40 ( source /etc/os-release; eval echo '$'$1 )
41}
42
43
44# Parse command line arguments
45commitish=""
46oe_build_perf_test_extra_opts=()
47oe_git_archive_extra_opts=()
48while getopts "ha:c:C:d:E:g:P:R:w:x" opt; do
49 case $opt in
50 h) usage
51 exit 0
52 ;;
53 a) mkdir -p "$OPTARG"
54 archive_dir=`realpath -s "$OPTARG"`
55 ;;
56 c) commitish=$OPTARG
57 ;;
58 C) mkdir -p "$OPTARG"
59 results_repo=`realpath -s "$OPTARG"`
60 ;;
61 d) download_dir=`realpath -s "$OPTARG"`
62 ;;
63 E) email_to="$OPTARG"
64 ;;
65 g) mkdir -p "$OPTARG"
66 globalres_dir=`realpath -s "$OPTARG"`
67 ;;
68 P) oe_git_archive_extra_opts+=("--push" "$OPTARG")
69 ;;
70 R) rsync_dst="$OPTARG"
71 ;;
72 w) base_dir=`realpath -s "$OPTARG"`
73 ;;
74 x) oe_build_perf_test_extra_opts+=("--xml")
75 ;;
76 *) usage
77 exit 1
78 ;;
79 esac
80done
81
82# Check positional args
83shift "$((OPTIND - 1))"
84if [ $# -ne 0 ]; then
85 echo "ERROR: No positional args are accepted."
86 usage
87 exit 1
88fi
89
90# Open a file descriptor for flock and acquire lock
91LOCK_FILE="/tmp/oe-build-perf-test-wrapper.lock"
92if ! exec 3> "$LOCK_FILE"; then
93 echo "ERROR: Unable to open loemack file"
94 exit 1
95fi
96if ! flock -n 3; then
97 echo "ERROR: Another instance of this script is running"
98 exit 1
99fi
100
101echo "Running on `uname -n`"
102if ! git_topdir=$(git rev-parse --show-toplevel); then
103 echo "The current working dir doesn't seem to be a git clone. Please cd there before running `basename $0`"
104 exit 1
105fi
106
107cd "$git_topdir"
108
109if [ -n "$commitish" ]; then
110 echo "Running git fetch"
111 git fetch &> /dev/null
112 git checkout HEAD^0 &> /dev/null
113
114 # Handle <branch>:<commit> format
115 if echo "$commitish" | grep -q ":"; then
116 commit=`echo "$commitish" | cut -d":" -f2`
117 branch=`echo "$commitish" | cut -d":" -f1`
118 else
119 commit="$commitish"
120 branch="$commitish"
121 fi
122
123 echo "Checking out $commitish"
124 git branch -D $branch &> /dev/null
125 if ! git checkout -f $branch &> /dev/null; then
126 echo "ERROR: Git checkout failed"
127 exit 1
128 fi
129
130 # Check that the specified branch really contains the commit
131 commit_hash=`git rev-parse --revs-only $commit --`
132 if [ -z "$commit_hash" -o "`git merge-base $branch $commit`" != "$commit_hash" ]; then
133 echo "ERROR: branch $branch does not contain commit $commit"
134 exit 1
135 fi
136 git reset --hard $commit > /dev/null
137fi
138
139# Determine name of the current branch
140branch=`git symbolic-ref HEAD 2> /dev/null`
141# Strip refs/heads/
142branch=${branch:11}
143
144# Setup build environment
145if [ -z "$base_dir" ]; then
146 base_dir="$git_topdir/build-perf-test"
147fi
148echo "Using working dir $base_dir"
149
150if [ -z "$download_dir" ]; then
151 download_dir="$base_dir/downloads"
152fi
153if [ -z "$globalres_dir" ]; then
154 globalres_dir="$base_dir"
155fi
156
157timestamp=`date "+%Y%m%d%H%M%S"`
158git_rev=$(git rev-parse --short HEAD) || exit 1
159build_dir="$base_dir/build-$git_rev-$timestamp"
160results_dir="$base_dir/results-$git_rev-$timestamp"
161globalres_log="$globalres_dir/globalres.log"
162machine="qemux86"
163
164mkdir -p "$base_dir"
165source ./oe-init-build-env $build_dir >/dev/null || exit 1
166
167# Additional config
168auto_conf="$build_dir/conf/auto.conf"
169echo "MACHINE = \"$machine\"" > "$auto_conf"
170echo 'BB_NUMBER_THREADS = "8"' >> "$auto_conf"
171echo 'PARALLEL_MAKE = "-j 8"' >> "$auto_conf"
172echo "DL_DIR = \"$download_dir\"" >> "$auto_conf"
173# Disabling network sanity check slightly reduces the variance of timing results
174echo 'CONNECTIVITY_CHECK_URIS = ""' >> "$auto_conf"
175# Possibility to define extra settings
176if [ -f "$base_dir/auto.conf.extra" ]; then
177 cat "$base_dir/auto.conf.extra" >> "$auto_conf"
178fi
179
180# Run actual test script
181oe-build-perf-test --out-dir "$results_dir" \
182 --globalres-file "$globalres_log" \
183 "${oe_build_perf_test_extra_opts[@]}" \
184 --lock-file "$base_dir/oe-build-perf.lock"
185
186case $? in
187 1) echo "ERROR: oe-build-perf-test script failed!"
188 exit 1
189 ;;
190 2) echo "NOTE: some tests failed!"
191 ;;
192esac
193
194# Commit results to git
195if [ -n "$results_repo" ]; then
196 echo -e "\nArchiving results in $results_repo"
197 oe-git-archive \
198 --git-dir "$results_repo" \
199 --branch-name "{hostname}/{branch}/{machine}" \
200 --tag-name "{hostname}/{branch}/{machine}/{commit_count}-g{commit}/{tag_number}" \
201 --exclude "buildstats.json" \
202 --notes "buildstats/{branch_name}" "$results_dir/buildstats.json" \
203 "${oe_git_archive_extra_opts[@]}" \
204 "$results_dir"
205
206 # Generate test reports
207 sanitized_branch=`echo $branch | tr / _`
208 report_txt=`hostname`_${sanitized_branch}_${machine}.txt
209 report_html=`hostname`_${sanitized_branch}_${machine}.html
210 echo -e "\nGenerating test report"
211 oe-build-perf-report -r "$results_repo" > $report_txt
212 oe-build-perf-report -r "$results_repo" --html > $report_html
213
214 # Send email report
215 if [ -n "$email_to" ]; then
216 echo "Emailing test report"
217 os_name=`get_os_release_var PRETTY_NAME`
218 "$script_dir"/oe-build-perf-report-email.py --to "$email_to" --subject "Build Perf Test Report for $os_name" --text $report_txt "${OE_BUILD_PERF_REPORT_EMAIL_EXTRA_ARGS[@]}"
219 fi
220
221 # Upload report files, unless we're on detached head
222 if [ -n "$rsync_dst" -a -n "$branch" ]; then
223 echo "Uploading test report"
224 rsync $report_txt $report_html $rsync_dst
225 fi
226fi
227
228
229echo -ne "\n\n-----------------\n"
230echo "Global results file:"
231echo -ne "\n"
232
233cat "$globalres_log"
234
235if [ -n "$archive_dir" ]; then
236 echo -ne "\n\n-----------------\n"
237 echo "Archiving results in $archive_dir"
238 mkdir -p "$archive_dir"
239 results_basename=`basename "$results_dir"`
240 results_dirname=`dirname "$results_dir"`
241 tar -czf "$archive_dir/`uname -n`-${results_basename}.tar.gz" -C "$results_dirname" "$results_basename"
242fi
243
244rm -rf "$build_dir"
245rm -rf "$results_dir"
246
247echo "DONE"
diff --git a/scripts/contrib/convert-overrides.py b/scripts/contrib/convert-overrides.py
deleted file mode 100755
index c69acb4095..0000000000
--- a/scripts/contrib/convert-overrides.py
+++ /dev/null
@@ -1,155 +0,0 @@
1#!/usr/bin/env python3
2#
3# Conversion script to add new override syntax to existing bitbake metadata
4#
5# Copyright (C) 2021 Richard Purdie
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9
10#
11# To use this script on a new layer you need to list the overrides the
12# layer is known to use in the list below.
13#
14# Known constraint: Matching is 'loose' and in particular will find variable
15# and function names with "_append" and "_remove" in them. Those need to be
16# filtered out manually or in the skip list below.
17#
18
19import re
20import os
21import sys
22import tempfile
23import shutil
24import mimetypes
25import argparse
26
27parser = argparse.ArgumentParser(description="Convert override syntax")
28parser.add_argument("--override", "-o", action="append", default=[], help="Add additional strings to consider as an override (e.g. custom machines/distros")
29parser.add_argument("--skip", "-s", action="append", default=[], help="Add additional string to skip and not consider an override")
30parser.add_argument("--skip-ext", "-e", action="append", default=[], help="Additional file suffixes to skip when processing (e.g. '.foo')")
31parser.add_argument("--package-vars", action="append", default=[], help="Additional variables to treat as package variables")
32parser.add_argument("--image-vars", action="append", default=[], help="Additional variables to treat as image variables")
33parser.add_argument("--short-override", action="append", default=[], help="Additional strings to treat as short overrides")
34parser.add_argument("path", nargs="+", help="Paths to convert")
35
36args = parser.parse_args()
37
38# List of strings to treat as overrides
39vars = args.override
40vars += ["append", "prepend", "remove"]
41vars += ["qemuarm", "qemux86", "qemumips", "qemuppc", "qemuriscv", "qemuall"]
42vars += ["genericx86", "edgerouter", "beaglebone-yocto"]
43vars += ["armeb", "arm", "armv5", "armv6", "armv4", "powerpc64", "aarch64", "riscv32", "riscv64", "x86", "mips64", "powerpc"]
44vars += ["mipsarch", "x86-x32", "mips16e", "microblaze", "e5500-64b", "mipsisa32", "mipsisa64"]
45vars += ["class-native", "class-target", "class-cross-canadian", "class-cross", "class-devupstream"]
46vars += ["tune-", "pn-", "forcevariable"]
47vars += ["libc-musl", "libc-glibc", "libc-newlib","libc-baremetal"]
48vars += ["task-configure", "task-compile", "task-install", "task-clean", "task-image-qa", "task-rm_work", "task-image-complete", "task-populate-sdk"]
49vars += ["toolchain-clang", "mydistro", "nios2", "sdkmingw32", "overrideone", "overridetwo"]
50vars += ["linux-gnux32", "linux-muslx32", "linux-gnun32", "mingw32", "poky", "darwin", "linuxstdbase"]
51vars += ["linux-gnueabi", "eabi"]
52vars += ["virtclass-multilib", "virtclass-mcextend"]
53
54# List of strings to treat as overrides but only with whitespace following or another override (more restricted matching).
55# Handles issues with arc matching arch.
56shortvars = ["arc", "mips", "mipsel", "sh4"] + args.short_override
57
58# Variables which take packagenames as an override
59packagevars = ["FILES", "RDEPENDS", "RRECOMMENDS", "SUMMARY", "DESCRIPTION", "RSUGGESTS", "RPROVIDES", "RCONFLICTS", "PKG", "ALLOW_EMPTY",
60 "pkg_postrm", "pkg_postinst_ontarget", "pkg_postinst", "INITSCRIPT_NAME", "INITSCRIPT_PARAMS", "DEBIAN_NOAUTONAME", "ALTERNATIVE",
61 "PKGE", "PKGV", "PKGR", "USERADD_PARAM", "GROUPADD_PARAM", "CONFFILES", "SYSTEMD_SERVICE", "LICENSE", "SECTION", "pkg_preinst",
62 "pkg_prerm", "RREPLACES", "GROUPMEMS_PARAM", "SYSTEMD_AUTO_ENABLE", "SKIP_FILEDEPS", "PRIVATE_LIBS", "PACKAGE_ADD_METADATA",
63 "INSANE_SKIP", "DEBIANNAME", "SYSTEMD_SERVICE_ESCAPED"] + args.package_vars
64
65# Expressions to skip if encountered, these are not overrides
66skips = args.skip
67skips += ["parser_append", "recipe_to_append", "extra_append", "to_remove", "show_appends", "applied_appends", "file_appends", "handle_remove"]
68skips += ["expanded_removes", "color_remove", "test_remove", "empty_remove", "toaster_prepend", "num_removed", "licfiles_append", "_write_append"]
69skips += ["no_report_remove", "test_prepend", "test_append", "multiple_append", "test_remove", "shallow_remove", "do_remove_layer", "first_append"]
70skips += ["parser_remove", "to_append", "no_remove", "bblayers_add_remove", "bblayers_remove", "apply_append", "is_x86", "base_dep_prepend"]
71skips += ["autotools_dep_prepend", "go_map_arm", "alt_remove_links", "systemd_append_file", "file_append", "process_file_darwin"]
72skips += ["run_loaddata_poky", "determine_if_poky_env", "do_populate_poky_src", "libc_cv_include_x86_isa_level", "test_rpm_remove", "do_install_armmultilib"]
73skips += ["get_appends_for_files", "test_doubleref_remove", "test_bitbakelayers_add_remove", "elf32_x86_64", "colour_remove", "revmap_remove"]
74skips += ["test_rpm_remove", "test_bitbakelayers_add_remove", "recipe_append_file", "log_data_removed", "recipe_append", "systemd_machine_unit_append"]
75skips += ["recipetool_append", "changetype_remove", "try_appendfile_wc", "test_qemux86_directdisk", "test_layer_appends", "tgz_removed"]
76
77imagevars = ["IMAGE_CMD", "EXTRA_IMAGECMD", "IMAGE_TYPEDEP", "CONVERSION_CMD", "COMPRESS_CMD"] + args.image_vars
78packagevars += imagevars
79
80skip_ext = [".html", ".patch", ".m4", ".diff"] + args.skip_ext
81
82vars_re = {}
83for exp in vars:
84 vars_re[exp] = (re.compile(r'((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp), r"\1:" + exp)
85
86shortvars_re = {}
87for exp in shortvars:
88 shortvars_re[exp] = (re.compile(r'((^|[#\'"\s\-\+])[A-Za-z0-9_\-:${}\.]+)_' + exp + r'([\(\'"\s:])'), r"\1:" + exp + r"\3")
89
90package_re = {}
91for exp in packagevars:
92 package_re[exp] = (re.compile(r'(^|[#\'"\s\-\+]+)' + exp + r'_' + r'([$a-z"\'\s%\[<{\\\*].)'), r"\1" + exp + r":\2")
93
94# Other substitutions to make
95subs = {
96 'r = re.compile(r"([^:]+):\s*(.*)")' : 'r = re.compile(r"(^.+?):\s+(.*)")',
97 "val = d.getVar('%s_%s' % (var, pkg))" : "val = d.getVar('%s:%s' % (var, pkg))",
98 "f.write('%s_%s: %s\\n' % (var, pkg, encode(val)))" : "f.write('%s:%s: %s\\n' % (var, pkg, encode(val)))",
99 "d.getVar('%s_%s' % (scriptlet_name, pkg))" : "d.getVar('%s:%s' % (scriptlet_name, pkg))",
100 'ret.append(v + "_" + p)' : 'ret.append(v + ":" + p)',
101}
102
103def processfile(fn):
104 print("processing file '%s'" % fn)
105 try:
106 fh, abs_path = tempfile.mkstemp()
107 with os.fdopen(fh, 'w') as new_file:
108 with open(fn, "r") as old_file:
109 for line in old_file:
110 skip = False
111 for s in skips:
112 if s in line:
113 skip = True
114 if "ptest_append" in line or "ptest_remove" in line or "ptest_prepend" in line:
115 skip = False
116 for sub in subs:
117 if sub in line:
118 line = line.replace(sub, subs[sub])
119 skip = True
120 if not skip:
121 for pvar in packagevars:
122 line = package_re[pvar][0].sub(package_re[pvar][1], line)
123 for var in vars:
124 line = vars_re[var][0].sub(vars_re[var][1], line)
125 for shortvar in shortvars:
126 line = shortvars_re[shortvar][0].sub(shortvars_re[shortvar][1], line)
127 if "pkg_postinst:ontarget" in line:
128 line = line.replace("pkg_postinst:ontarget", "pkg_postinst_ontarget")
129 new_file.write(line)
130 shutil.copymode(fn, abs_path)
131 os.remove(fn)
132 shutil.move(abs_path, fn)
133 except UnicodeDecodeError:
134 pass
135
136ourname = os.path.basename(sys.argv[0])
137ourversion = "0.9.3"
138
139for p in args.path:
140 if os.path.isfile(p):
141 processfile(p)
142 else:
143 print("processing directory '%s'" % p)
144 for root, dirs, files in os.walk(p):
145 for name in files:
146 if name == ourname:
147 continue
148 fn = os.path.join(root, name)
149 if os.path.islink(fn):
150 continue
151 if "/.git/" in fn or any(fn.endswith(ext) for ext in skip_ext):
152 continue
153 processfile(fn)
154
155print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/convert-spdx-licenses.py b/scripts/contrib/convert-spdx-licenses.py
deleted file mode 100755
index 13cf12a33f..0000000000
--- a/scripts/contrib/convert-spdx-licenses.py
+++ /dev/null
@@ -1,145 +0,0 @@
1#!/usr/bin/env python3
2#
3# Conversion script to change LICENSE entries to SPDX identifiers
4#
5# Copyright (C) 2021-2022 Richard Purdie
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9
10import re
11import os
12import sys
13import tempfile
14import shutil
15import mimetypes
16
17if len(sys.argv) < 2:
18 print("Please specify a directory to run the conversion script against.")
19 sys.exit(1)
20
21license_map = {
22"AGPL-3" : "AGPL-3.0-only",
23"AGPL-3+" : "AGPL-3.0-or-later",
24"AGPLv3" : "AGPL-3.0-only",
25"AGPLv3+" : "AGPL-3.0-or-later",
26"AGPLv3.0" : "AGPL-3.0-only",
27"AGPLv3.0+" : "AGPL-3.0-or-later",
28"AGPL-3.0" : "AGPL-3.0-only",
29"AGPL-3.0+" : "AGPL-3.0-or-later",
30"BSD-0-Clause" : "0BSD",
31"GPL-1" : "GPL-1.0-only",
32"GPL-1+" : "GPL-1.0-or-later",
33"GPLv1" : "GPL-1.0-only",
34"GPLv1+" : "GPL-1.0-or-later",
35"GPLv1.0" : "GPL-1.0-only",
36"GPLv1.0+" : "GPL-1.0-or-later",
37"GPL-1.0" : "GPL-1.0-only",
38"GPL-1.0+" : "GPL-1.0-or-later",
39"GPL-2" : "GPL-2.0-only",
40"GPL-2+" : "GPL-2.0-or-later",
41"GPLv2" : "GPL-2.0-only",
42"GPLv2+" : "GPL-2.0-or-later",
43"GPLv2.0" : "GPL-2.0-only",
44"GPLv2.0+" : "GPL-2.0-or-later",
45"GPL-2.0" : "GPL-2.0-only",
46"GPL-2.0+" : "GPL-2.0-or-later",
47"GPL-3" : "GPL-3.0-only",
48"GPL-3+" : "GPL-3.0-or-later",
49"GPLv3" : "GPL-3.0-only",
50"GPLv3+" : "GPL-3.0-or-later",
51"GPLv3.0" : "GPL-3.0-only",
52"GPLv3.0+" : "GPL-3.0-or-later",
53"GPL-3.0" : "GPL-3.0-only",
54"GPL-3.0+" : "GPL-3.0-or-later",
55"LGPLv2" : "LGPL-2.0-only",
56"LGPLv2+" : "LGPL-2.0-or-later",
57"LGPLv2.0" : "LGPL-2.0-only",
58"LGPLv2.0+" : "LGPL-2.0-or-later",
59"LGPL-2.0" : "LGPL-2.0-only",
60"LGPL-2.0+" : "LGPL-2.0-or-later",
61"LGPL2.1" : "LGPL-2.1-only",
62"LGPL2.1+" : "LGPL-2.1-or-later",
63"LGPLv2.1" : "LGPL-2.1-only",
64"LGPLv2.1+" : "LGPL-2.1-or-later",
65"LGPL-2.1" : "LGPL-2.1-only",
66"LGPL-2.1+" : "LGPL-2.1-or-later",
67"LGPLv3" : "LGPL-3.0-only",
68"LGPLv3+" : "LGPL-3.0-or-later",
69"LGPL-3.0" : "LGPL-3.0-only",
70"LGPL-3.0+" : "LGPL-3.0-or-later",
71"MPL-1" : "MPL-1.0",
72"MPLv1" : "MPL-1.0",
73"MPLv1.1" : "MPL-1.1",
74"MPLv2" : "MPL-2.0",
75"MIT-X" : "MIT",
76"MIT-style" : "MIT",
77"openssl" : "OpenSSL",
78"PSF" : "PSF-2.0",
79"PSFv2" : "PSF-2.0",
80"Python-2" : "Python-2.0",
81"Apachev2" : "Apache-2.0",
82"Apache-2" : "Apache-2.0",
83"Artisticv1" : "Artistic-1.0",
84"Artistic-1" : "Artistic-1.0",
85"AFL-2" : "AFL-2.0",
86"AFL-1" : "AFL-1.2",
87"AFLv2" : "AFL-2.0",
88"AFLv1" : "AFL-1.2",
89"CDDLv1" : "CDDL-1.0",
90"CDDL-1" : "CDDL-1.0",
91"EPLv1.0" : "EPL-1.0",
92"FreeType" : "FTL",
93"Nauman" : "Naumen",
94"tcl" : "TCL",
95"vim" : "Vim",
96"SGIv1" : "SGI-OpenGL",
97}
98
99def processfile(fn):
100 print("processing file '%s'" % fn)
101 try:
102 fh, abs_path = tempfile.mkstemp()
103 modified = False
104 with os.fdopen(fh, 'w') as new_file:
105 with open(fn, "r") as old_file:
106 for line in old_file:
107 if not line.startswith("LICENSE"):
108 new_file.write(line)
109 continue
110 orig = line
111 for license in sorted(license_map, key=len, reverse=True):
112 for ending in ['"', "'", " ", ")"]:
113 line = line.replace(license + ending, license_map[license] + ending)
114 if orig != line:
115 modified = True
116 new_file.write(line)
117 new_file.close()
118 if modified:
119 shutil.copymode(fn, abs_path)
120 os.remove(fn)
121 shutil.move(abs_path, fn)
122 except UnicodeDecodeError:
123 pass
124
125ourname = os.path.basename(sys.argv[0])
126ourversion = "0.01"
127
128if os.path.isfile(sys.argv[1]):
129 processfile(sys.argv[1])
130 sys.exit(0)
131
132for targetdir in sys.argv[1:]:
133 print("processing directory '%s'" % targetdir)
134 for root, dirs, files in os.walk(targetdir):
135 for name in files:
136 if name == ourname:
137 continue
138 fn = os.path.join(root, name)
139 if os.path.islink(fn):
140 continue
141 if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"):
142 continue
143 processfile(fn)
144
145print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/convert-srcuri.py b/scripts/contrib/convert-srcuri.py
deleted file mode 100755
index 587392334f..0000000000
--- a/scripts/contrib/convert-srcuri.py
+++ /dev/null
@@ -1,77 +0,0 @@
1#!/usr/bin/env python3
2#
3# Conversion script to update SRC_URI to add branch to git urls
4#
5# Copyright (C) 2021 Richard Purdie
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9
10import re
11import os
12import sys
13import tempfile
14import shutil
15import mimetypes
16
17if len(sys.argv) < 2:
18 print("Please specify a directory to run the conversion script against.")
19 sys.exit(1)
20
21def processfile(fn):
22 def matchline(line):
23 if "MIRROR" in line or ".*" in line or "GNOME_GIT" in line:
24 return False
25 return True
26 print("processing file '%s'" % fn)
27 try:
28 if "distro_alias.inc" in fn or "linux-yocto-custom.bb" in fn:
29 return
30 fh, abs_path = tempfile.mkstemp()
31 modified = False
32 with os.fdopen(fh, 'w') as new_file:
33 with open(fn, "r") as old_file:
34 for line in old_file:
35 if ("git://" in line or "gitsm://" in line) and "branch=" not in line and matchline(line):
36 if line.endswith('"\n'):
37 line = line.replace('"\n', ';branch=master"\n')
38 elif re.search('\s*\\\\$', line):
39 line = re.sub('\s*\\\\$', ';branch=master \\\\', line)
40 modified = True
41 if ("git://" in line or "gitsm://" in line) and "github.com" in line and "protocol=https" not in line and matchline(line):
42 if "protocol=git" in line:
43 line = line.replace('protocol=git', 'protocol=https')
44 elif line.endswith('"\n'):
45 line = line.replace('"\n', ';protocol=https"\n')
46 elif re.search('\s*\\\\$', line):
47 line = re.sub('\s*\\\\$', ';protocol=https \\\\', line)
48 modified = True
49 new_file.write(line)
50 if modified:
51 shutil.copymode(fn, abs_path)
52 os.remove(fn)
53 shutil.move(abs_path, fn)
54 except UnicodeDecodeError:
55 pass
56
57ourname = os.path.basename(sys.argv[0])
58ourversion = "0.1"
59
60if os.path.isfile(sys.argv[1]):
61 processfile(sys.argv[1])
62 sys.exit(0)
63
64for targetdir in sys.argv[1:]:
65 print("processing directory '%s'" % targetdir)
66 for root, dirs, files in os.walk(targetdir):
67 for name in files:
68 if name == ourname:
69 continue
70 fn = os.path.join(root, name)
71 if os.path.islink(fn):
72 continue
73 if "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff"):
74 continue
75 processfile(fn)
76
77print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/convert-variable-renames.py b/scripts/contrib/convert-variable-renames.py
deleted file mode 100755
index eded90ca61..0000000000
--- a/scripts/contrib/convert-variable-renames.py
+++ /dev/null
@@ -1,116 +0,0 @@
1#!/usr/bin/env python3
2#
3# Conversion script to rename variables to versions with improved terminology.
4# Also highlights potentially problematic language and removed variables.
5#
6# Copyright (C) 2021 Richard Purdie
7# Copyright (C) 2022 Wind River Systems, Inc.
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11
12import re
13import os
14import sys
15import tempfile
16import shutil
17import mimetypes
18
19if len(sys.argv) < 2:
20 print("Please specify a directory to run the conversion script against.")
21 sys.exit(1)
22
23renames = {
24"BB_ENV_WHITELIST" : "BB_ENV_PASSTHROUGH",
25"BB_ENV_EXTRAWHITE" : "BB_ENV_PASSTHROUGH_ADDITIONS",
26"BB_HASHCONFIG_WHITELIST" : "BB_HASHCONFIG_IGNORE_VARS",
27"BB_SETSCENE_ENFORCE_WHITELIST" : "BB_SETSCENE_ENFORCE_IGNORE_TASKS",
28"BB_HASHBASE_WHITELIST" : "BB_BASEHASH_IGNORE_VARS",
29"BB_HASHTASK_WHITELIST" : "BB_TASKHASH_IGNORE_TASKS",
30"CVE_CHECK_PN_WHITELIST" : "CVE_CHECK_SKIP_RECIPE",
31"CVE_CHECK_WHITELIST" : "CVE_CHECK_IGNORE",
32"MULTI_PROVIDER_WHITELIST" : "BB_MULTI_PROVIDER_ALLOWED",
33"PNBLACKLIST" : "SKIP_RECIPE",
34"SDK_LOCAL_CONF_BLACKLIST" : "ESDK_LOCALCONF_REMOVE",
35"SDK_LOCAL_CONF_WHITELIST" : "ESDK_LOCALCONF_ALLOW",
36"SDK_INHERIT_BLACKLIST" : "ESDK_CLASS_INHERIT_DISABLE",
37"SSTATE_DUPWHITELIST" : "SSTATE_ALLOW_OVERLAP_FILES",
38"SYSROOT_DIRS_BLACKLIST" : "SYSROOT_DIRS_IGNORE",
39"UNKNOWN_CONFIGURE_WHITELIST" : "UNKNOWN_CONFIGURE_OPT_IGNORE",
40"ICECC_USER_CLASS_BL" : "ICECC_CLASS_DISABLE",
41"ICECC_SYSTEM_CLASS_BL" : "ICECC_CLASS_DISABLE",
42"ICECC_USER_PACKAGE_WL" : "ICECC_RECIPE_ENABLE",
43"ICECC_USER_PACKAGE_BL" : "ICECC_RECIPE_DISABLE",
44"ICECC_SYSTEM_PACKAGE_BL" : "ICECC_RECIPE_DISABLE",
45"LICENSE_FLAGS_WHITELIST" : "LICENSE_FLAGS_ACCEPTED",
46}
47
48removed_list = [
49"BB_STAMP_WHITELIST",
50"BB_STAMP_POLICY",
51"INHERIT_BLACKLIST",
52"TUNEABI_WHITELIST",
53]
54
55context_check_list = [
56"blacklist",
57"whitelist",
58"abort",
59]
60
61def processfile(fn):
62
63 print("processing file '%s'" % fn)
64 try:
65 fh, abs_path = tempfile.mkstemp()
66 modified = False
67 with os.fdopen(fh, 'w') as new_file:
68 with open(fn, "r") as old_file:
69 lineno = 0
70 for line in old_file:
71 lineno += 1
72 if not line or "BB_RENAMED_VARIABLE" in line:
73 continue
74 # Do the renames
75 for old_name, new_name in renames.items():
76 if old_name in line:
77 line = line.replace(old_name, new_name)
78 modified = True
79 # Find removed names
80 for removed_name in removed_list:
81 if removed_name in line:
82 print("%s needs further work at line %s because %s has been deprecated" % (fn, lineno, removed_name))
83 for check_word in context_check_list:
84 if re.search(check_word, line, re.IGNORECASE):
85 print("%s needs further work at line %s since it contains %s"% (fn, lineno, check_word))
86 new_file.write(line)
87 new_file.close()
88 if modified:
89 print("*** Modified file '%s'" % (fn))
90 shutil.copymode(fn, abs_path)
91 os.remove(fn)
92 shutil.move(abs_path, fn)
93 except UnicodeDecodeError:
94 pass
95
96ourname = os.path.basename(sys.argv[0])
97ourversion = "0.1"
98
99if os.path.isfile(sys.argv[1]):
100 processfile(sys.argv[1])
101 sys.exit(0)
102
103for targetdir in sys.argv[1:]:
104 print("processing directory '%s'" % targetdir)
105 for root, dirs, files in os.walk(targetdir):
106 for name in files:
107 if name == ourname:
108 continue
109 fn = os.path.join(root, name)
110 if os.path.islink(fn):
111 continue
112 if "ChangeLog" in fn or "/.git/" in fn or fn.endswith(".html") or fn.endswith(".patch") or fn.endswith(".m4") or fn.endswith(".diff") or fn.endswith(".orig"):
113 continue
114 processfile(fn)
115
116print("All files processed with version %s" % ourversion)
diff --git a/scripts/contrib/ddimage b/scripts/contrib/ddimage
deleted file mode 100755
index 70eee8ebea..0000000000
--- a/scripts/contrib/ddimage
+++ /dev/null
@@ -1,172 +0,0 @@
1#!/bin/sh
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8# 1MB blocksize
9BLOCKSIZE=1048576
10
11usage() {
12 echo "Usage: $(basename $0) IMAGE DEVICE"
13}
14
15image_details() {
16 IMG=$1
17 echo "Image details"
18 echo "============="
19 echo " image: $(basename $IMG)"
20 # stat format is different on Mac OS and Linux
21 if [ "$(uname)" = "Darwin" ]; then
22 echo " size: $(stat -L -f '%z bytes' $IMG)"
23 echo " modified: $(stat -L -f '%Sm' $IMG)"
24 else
25 echo " size: $(stat -L -c '%s bytes' $IMG)"
26 echo " modified: $(stat -L -c '%y' $IMG)"
27 fi
28 echo " type: $(file -L -b $IMG)"
29 echo ""
30}
31
32device_details() {
33 BLOCK_SIZE=512
34
35 echo "Device details"
36 echo "=============="
37
38 # Collect disk info using diskutil on Mac OS
39 if [ "$(uname)" = "Darwin" ]; then
40 diskutil info $DEVICE | egrep "(Device Node|Media Name|Total Size)"
41 return
42 fi
43
44 # Default / Linux information collection
45 ACTUAL_DEVICE=`readlink -f $DEVICE`
46 DEV=`basename $ACTUAL_DEVICE`
47 if [ "$ACTUAL_DEVICE" != "$DEVICE" ] ; then
48 echo " device: $DEVICE -> $ACTUAL_DEVICE"
49 else
50 echo " device: $DEVICE"
51 fi
52 if [ -f "/sys/class/block/$DEV/device/vendor" ]; then
53 echo " vendor: $(cat /sys/class/block/$DEV/device/vendor)"
54 else
55 echo " vendor: UNKNOWN"
56 fi
57 if [ -f "/sys/class/block/$DEV/device/model" ]; then
58 echo " model: $(cat /sys/class/block/$DEV/device/model)"
59 else
60 echo " model: UNKNOWN"
61 fi
62 if [ -f "/sys/class/block/$DEV/size" ]; then
63 echo " size: $(($(cat /sys/class/block/$DEV/size) * $BLOCK_SIZE)) bytes"
64 else
65 echo " size: UNKNOWN"
66 fi
67 echo ""
68}
69
70check_mount_device() {
71 if cat /proc/self/mounts | awk '{ print $1 }' | grep /dev/ | grep -q -E "^$1$" ; then
72 return 0
73 fi
74 return 1
75}
76
77is_mounted() {
78 if [ "$(uname)" = "Darwin" ]; then
79 if df | awk '{ print $1 }' | grep /dev/ | grep -q -E "^$1(s[0-9]+)?$" ; then
80 return 0
81 fi
82 else
83 if check_mount_device $1 ; then
84 return 0
85 fi
86 DEV=`basename $1`
87 if [ -d /sys/class/block/$DEV/ ] ; then
88 PARENT_BLKDEV=`basename $(readlink -f "/sys/class/block/$DEV/..")`
89 if [ "$PARENT_BLKDEV" != "block" ] ; then
90 if check_mount_device $PARENT_BLKDEV ; then
91 return 0
92 fi
93 fi
94 for CHILD_BLKDEV in `find /sys/class/block/$DEV/ -mindepth 1 -maxdepth 1 -name "$DEV*" -type d`
95 do
96 if check_mount_device /dev/`basename $CHILD_BLKDEV` ; then
97 return 0
98 fi
99 done
100 fi
101 fi
102 return 1
103}
104
105is_inuse() {
106 HOLDERS_DIR="/sys/class/block/`basename $1`/holders"
107 if [ -d $HOLDERS_DIR ] && [ `ls -A $HOLDERS_DIR` ] ; then
108 return 0
109 fi
110 return 1
111}
112
113if [ $# -ne 2 ]; then
114 usage
115 exit 1
116fi
117
118IMAGE=$1
119DEVICE=$2
120
121if [ ! -e "$IMAGE" ]; then
122 echo "ERROR: Image $IMAGE does not exist"
123 usage
124 exit 1
125fi
126
127if [ ! -e "$DEVICE" ]; then
128 echo "ERROR: Device $DEVICE does not exist"
129 usage
130 exit 1
131fi
132
133if [ "$(uname)" = "Darwin" ]; then
134 # readlink doesn't support -f on MacOS, just assume it isn't a symlink
135 ACTUAL_DEVICE=$DEVICE
136else
137 ACTUAL_DEVICE=`readlink -f $DEVICE`
138fi
139if is_mounted $ACTUAL_DEVICE ; then
140 echo "ERROR: Device $DEVICE is currently mounted - check if this is the right device, and unmount it first if so"
141 device_details
142 exit 1
143fi
144if is_inuse $ACTUAL_DEVICE ; then
145 echo "ERROR: Device $DEVICE is currently in use (possibly part of LVM) - check if this is the right device!"
146 device_details
147 exit 1
148fi
149
150if [ ! -w "$DEVICE" ]; then
151 echo "ERROR: Device $DEVICE is not writable - possibly use sudo?"
152 usage
153 exit 1
154fi
155
156image_details $IMAGE
157device_details
158
159printf "Write $IMAGE to $DEVICE [y/N]? "
160read RESPONSE
161if [ "$RESPONSE" != "y" ]; then
162 echo "Write aborted"
163 exit 0
164fi
165
166echo "Writing image..."
167if which pv >/dev/null 2>&1; then
168 pv "$IMAGE" | dd of="$DEVICE" bs="$BLOCKSIZE"
169else
170 dd if="$IMAGE" of="$DEVICE" bs="$BLOCKSIZE"
171fi
172sync
diff --git a/scripts/contrib/devtool-stress.py b/scripts/contrib/devtool-stress.py
deleted file mode 100755
index 81046ecf49..0000000000
--- a/scripts/contrib/devtool-stress.py
+++ /dev/null
@@ -1,245 +0,0 @@
1#!/usr/bin/env python3
2
3# devtool stress tester
4#
5# Written by: Paul Eggleton <paul.eggleton@linux.intel.com>
6#
7# Copyright 2015 Intel Corporation
8#
9# SPDX-License-Identifier: GPL-2.0-only
10#
11
12import sys
13import os
14import os.path
15import subprocess
16import re
17import argparse
18import logging
19import tempfile
20import shutil
21import signal
22import fnmatch
23
24scripts_lib_path = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'lib'))
25sys.path.insert(0, scripts_lib_path)
26import scriptutils
27import argparse_oe
28logger = scriptutils.logger_create('devtool-stress')
29
30def select_recipes(args):
31 import bb.tinfoil
32 tinfoil = bb.tinfoil.Tinfoil()
33 tinfoil.prepare(False)
34
35 pkg_pn = tinfoil.cooker.recipecaches[''].pkg_pn
36 (latest_versions, preferred_versions) = bb.providers.findProviders(tinfoil.config_data, tinfoil.cooker.recipecaches[''], pkg_pn)
37
38 skip_classes = args.skip_classes.split(',')
39
40 recipelist = []
41 for pn in sorted(pkg_pn):
42 pref = preferred_versions[pn]
43 inherits = [os.path.splitext(os.path.basename(f))[0] for f in tinfoil.cooker.recipecaches[''].inherits[pref[1]]]
44 for cls in skip_classes:
45 if cls in inherits:
46 break
47 else:
48 recipelist.append(pn)
49
50 tinfoil.shutdown()
51
52 resume_from = args.resume_from
53 if resume_from:
54 if not resume_from in recipelist:
55 print('%s is not a testable recipe' % resume_from)
56 return 1
57 if args.only:
58 only = args.only.split(',')
59 for onlyitem in only:
60 for pn in recipelist:
61 if fnmatch.fnmatch(pn, onlyitem):
62 break
63 else:
64 print('%s does not match any testable recipe' % onlyitem)
65 return 1
66 else:
67 only = None
68 if args.skip:
69 skip = args.skip.split(',')
70 else:
71 skip = []
72
73 recipes = []
74 for pn in recipelist:
75 if resume_from:
76 if pn == resume_from:
77 resume_from = None
78 else:
79 continue
80
81 if args.only:
82 for item in only:
83 if fnmatch.fnmatch(pn, item):
84 break
85 else:
86 continue
87
88 skipit = False
89 for item in skip:
90 if fnmatch.fnmatch(pn, item):
91 skipit = True
92 if skipit:
93 continue
94
95 recipes.append(pn)
96
97 return recipes
98
99
100def stress_extract(args):
101 import bb.process
102
103 recipes = select_recipes(args)
104
105 failures = 0
106 tmpdir = tempfile.mkdtemp()
107 os.setpgrp()
108 try:
109 for pn in recipes:
110 sys.stdout.write('Testing %s ' % (pn + ' ').ljust(40, '.'))
111 sys.stdout.flush()
112 failed = False
113 skipped = None
114
115 srctree = os.path.join(tmpdir, pn)
116 try:
117 bb.process.run('devtool extract %s %s' % (pn, srctree))
118 except bb.process.ExecutionError as exc:
119 if exc.exitcode == 4:
120 skipped = 'incompatible'
121 else:
122 failed = True
123 with open('stress_%s_extract.log' % pn, 'w') as f:
124 f.write(str(exc))
125
126 if os.path.exists(srctree):
127 shutil.rmtree(srctree)
128
129 if failed:
130 print('failed')
131 failures += 1
132 elif skipped:
133 print('skipped (%s)' % skipped)
134 else:
135 print('ok')
136 except KeyboardInterrupt:
137 # We want any child processes killed. This is crude, but effective.
138 os.killpg(0, signal.SIGTERM)
139
140 if failures:
141 return 1
142 else:
143 return 0
144
145
146def stress_modify(args):
147 import bb.process
148
149 recipes = select_recipes(args)
150
151 failures = 0
152 tmpdir = tempfile.mkdtemp()
153 os.setpgrp()
154 try:
155 for pn in recipes:
156 sys.stdout.write('Testing %s ' % (pn + ' ').ljust(40, '.'))
157 sys.stdout.flush()
158 failed = False
159 reset = True
160 skipped = None
161
162 srctree = os.path.join(tmpdir, pn)
163 try:
164 bb.process.run('devtool modify -x %s %s' % (pn, srctree))
165 except bb.process.ExecutionError as exc:
166 if exc.exitcode == 4:
167 skipped = 'incompatible'
168 else:
169 with open('stress_%s_modify.log' % pn, 'w') as f:
170 f.write(str(exc))
171 failed = 'modify'
172 reset = False
173
174 if not skipped:
175 if not failed:
176 try:
177 bb.process.run('bitbake -c install %s' % pn)
178 except bb.process.CmdError as exc:
179 with open('stress_%s_install.log' % pn, 'w') as f:
180 f.write(str(exc))
181 failed = 'build'
182 if reset:
183 try:
184 bb.process.run('devtool reset %s' % pn)
185 except bb.process.CmdError as exc:
186 print('devtool reset failed: %s' % str(exc))
187 break
188
189 if os.path.exists(srctree):
190 shutil.rmtree(srctree)
191
192 if failed:
193 print('failed (%s)' % failed)
194 failures += 1
195 elif skipped:
196 print('skipped (%s)' % skipped)
197 else:
198 print('ok')
199 except KeyboardInterrupt:
200 # We want any child processes killed. This is crude, but effective.
201 os.killpg(0, signal.SIGTERM)
202
203 if failures:
204 return 1
205 else:
206 return 0
207
208
209def main():
210 parser = argparse_oe.ArgumentParser(description="devtool stress tester",
211 epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
212 parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
213 parser.add_argument('-r', '--resume-from', help='Resume from specified recipe', metavar='PN')
214 parser.add_argument('-o', '--only', help='Only test specified recipes (comma-separated without spaces, wildcards allowed)', metavar='PNLIST')
215 parser.add_argument('-s', '--skip', help='Skip specified recipes (comma-separated without spaces, wildcards allowed)', metavar='PNLIST', default='gcc-source-*,kernel-devsrc,package-index,perf,meta-world-pkgdata,glibc-locale,glibc-mtrace,glibc-scripts,os-release')
216 parser.add_argument('-c', '--skip-classes', help='Skip recipes inheriting specified classes (comma-separated) - default %(default)s', metavar='CLASSLIST', default='native,nativesdk,cross,cross-canadian,image,populate_sdk,meta,packagegroup')
217 subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>')
218 subparsers.required = True
219
220 parser_modify = subparsers.add_parser('modify',
221 help='Run "devtool modify" followed by a build with bitbake on matching recipes',
222 description='Runs "devtool modify" followed by a build with bitbake on matching recipes')
223 parser_modify.set_defaults(func=stress_modify)
224
225 parser_extract = subparsers.add_parser('extract',
226 help='Run "devtool extract" on matching recipes',
227 description='Runs "devtool extract" on matching recipes')
228 parser_extract.set_defaults(func=stress_extract)
229
230 args = parser.parse_args()
231
232 if args.debug:
233 logger.setLevel(logging.DEBUG)
234
235 import scriptpath
236 bitbakepath = scriptpath.add_bitbake_lib_path()
237 if not bitbakepath:
238 logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
239 return 1
240 logger.debug('Found bitbake path: %s' % bitbakepath)
241
242 ret = args.func(args)
243
244if __name__ == "__main__":
245 main()
diff --git a/scripts/contrib/dialog-power-control b/scripts/contrib/dialog-power-control
deleted file mode 100755
index 82c84baa1d..0000000000
--- a/scripts/contrib/dialog-power-control
+++ /dev/null
@@ -1,57 +0,0 @@
1#!/bin/sh
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7# Simple script to show a manual power prompt for when you want to use
8# automated hardware testing with testimage.bbclass but you don't have a
9# web-enabled power strip or similar to do the power on/off/cycle.
10#
11# You can enable it by enabling testimage (see the Yocto Project
12# Development manual "Performing Automated Runtime Testing" section)
13# and setting the following in your local.conf:
14#
15# TEST_POWERCONTROL_CMD = "${COREBASE}/scripts/contrib/dialog-power-control"
16#
17
18PROMPT=""
19while true; do
20 case $1 in
21 on)
22 PROMPT="Please turn device power on";;
23 off)
24 PROMPT="Please turn device power off";;
25 cycle)
26 PROMPT="Please click Done, then turn the device power off then on";;
27 "")
28 break;;
29 esac
30 shift
31done
32
33if [ "$PROMPT" = "" ] ; then
34 echo "ERROR: no power action specified on command line"
35 exit 2
36fi
37
38if [ "`which kdialog 2>/dev/null`" != "" ] ; then
39 DIALOGUTIL="kdialog"
40elif [ "`which zenity 2>/dev/null`" != "" ] ; then
41 DIALOGUTIL="zenity"
42else
43 echo "ERROR: couldn't find program to display a message, install kdialog or zenity"
44 exit 3
45fi
46
47if [ "$DIALOGUTIL" = "kdialog" ] ; then
48 kdialog --yesno "$PROMPT" --title "TestImage Power Control" --yes-label "Done" --no-label "Cancel test"
49elif [ "$DIALOGUTIL" = "zenity" ] ; then
50 zenity --question --text="$PROMPT" --title="TestImage Power Control" --ok-label="Done" --cancel-label="Cancel test"
51fi
52
53if [ "$?" != "0" ] ; then
54 echo "User cancelled test at power prompt"
55 exit 1
56fi
57
diff --git a/scripts/contrib/documentation-audit.sh b/scripts/contrib/documentation-audit.sh
deleted file mode 100755
index 7197a2fcea..0000000000
--- a/scripts/contrib/documentation-audit.sh
+++ /dev/null
@@ -1,97 +0,0 @@
1#!/bin/bash
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7# Perform an audit of which packages provide documentation and which
8# are missing -doc packages.
9#
10# Setup requirements: be sure to be building for MACHINE=qemux86. Run
11# this script after source'ing the build environment script, so you're
12# running it from build/ directory.
13#
14
15REPORT_DOC_SIMPLE="documentation_exists.txt"
16REPORT_DOC_DETAIL="documentation_exists_detail.txt"
17REPORT_MISSING_SIMPLE="documentation_missing.txt"
18REPORT_MISSING_DETAIL="documentation_missing_detail.txt"
19REPORT_BUILD_ERRORS="build_errors.txt"
20
21rm -rf $REPORT_DOC_SIMPLE $REPORT_DOC_DETAIL $REPORT_MISSING_SIMPLE $REPORT_MISSING_DETAIL
22
23BITBAKE=`which bitbake`
24if [ -z "$BITBAKE" ]; then
25 echo "Error: bitbake command not found."
26 echo "Did you forget to source the build environment script?"
27 exit 1
28fi
29
30echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results"
31echo "REMINDER: you need to set LICENSE_FLAGS_ACCEPTED appropriately in local.conf or "
32echo " you'll get false positives. For example, LICENSE_FLAGS_ACCEPTED = \"commercial\""
33
34for pkg in `bitbake -s | awk '{ print \$1 }'`; do
35 if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" ||
36 "$pkg" == "Recipe" ||
37 "$pkg" == "Parsing" || "$pkg" == "Package" ||
38 "$pkg" == "NOTE:" || "$pkg" == "WARNING:" ||
39 "$pkg" == "done." || "$pkg" == "===========" ]]
40 then
41 # Skip initial bitbake output
42 continue
43 fi
44 if [[ "$pkg" =~ -native$ || "$pkg" =~ -nativesdk$ ||
45 "$pkg" =~ -cross-canadian ]]; then
46 # Skip native/nativesdk/cross-canadian recipes
47 continue
48 fi
49 if [[ "$pkg" =~ ^meta- || "$pkg" =~ ^packagegroup- || "$pkg" =~ -image ]]; then
50 # Skip meta, task and image recipes
51 continue
52 fi
53 if [[ "$pkg" =~ ^glibc- || "$pkg" =~ ^libiconv$ ||
54 "$pkg" =~ -toolchain$ || "$pkg" =~ ^package-index$ ||
55 "$pkg" =~ ^linux- || "$pkg" =~ ^adt-installer$ ||
56 "$pkg" =~ ^eds-tools$ || "$pkg" =~ ^external-python-tarball$ ||
57 "$pkg" =~ ^qt4-embedded$ || "$pkg" =~ ^qt-mobility ]]; then
58 # Skip glibc, libiconv, -toolchain, and other recipes known
59 # to cause build conflicts or trigger false positives.
60 continue
61 fi
62
63 echo "Building package $pkg..."
64 bitbake $pkg > /dev/null
65 if [ $? -ne 0 ]; then
66 echo "There was an error building package $pkg" >> "$REPORT_MISSING_DETAIL"
67 echo "$pkg" >> $REPORT_BUILD_ERRORS
68
69 # Do not skip the remaining tests, as sometimes the
70 # exit status is 1 due to QA errors, and we can still
71 # perform the -doc checks.
72 fi
73
74 echo "$pkg built successfully, checking for a documentation package..."
75 WORKDIR=`bitbake -e $pkg | grep ^WORKDIR | awk -F '=' '{ print \$2 }' | awk -F '"' '{ print \$2 }'`
76 FIND_DOC_PKG=`find $WORKDIR/packages-split/*-doc -maxdepth 0 -type d`
77 if [ -z "$FIND_DOC_PKG" ]; then
78 # No -doc package was generated:
79 echo "No -doc package: $pkg" >> "$REPORT_MISSING_DETAIL"
80 echo "$pkg" >> $REPORT_MISSING_SIMPLE
81 continue
82 fi
83
84 FIND_DOC_FILES=`find $FIND_DOC_PKG -type f`
85 if [ -z "$FIND_DOC_FILES" ]; then
86 # No files shipped with the -doc package:
87 echo "No files shipped with the -doc package: $pkg" >> "$REPORT_MISSING_DETAIL"
88 echo "$pkg" >> $REPORT_MISSING_SIMPLE
89 continue
90 fi
91
92 echo "Documentation shipped with $pkg:" >> "$REPORT_DOC_DETAIL"
93 echo "$FIND_DOC_FILES" >> "$REPORT_DOC_DETAIL"
94 echo "" >> "$REPORT_DOC_DETAIL"
95
96 echo "$pkg" >> "$REPORT_DOC_SIMPLE"
97done
diff --git a/scripts/contrib/graph-tool b/scripts/contrib/graph-tool
deleted file mode 100755
index 26488930e0..0000000000
--- a/scripts/contrib/graph-tool
+++ /dev/null
@@ -1,118 +0,0 @@
1#!/usr/bin/env python3
2
3# Simple graph query utility
4# useful for getting answers from .dot files produced by bitbake -g
5#
6# Written by: Paul Eggleton <paul.eggleton@linux.intel.com>
7#
8# Copyright 2013 Intel Corporation
9#
10# SPDX-License-Identifier: GPL-2.0-only
11#
12
13import sys
14import os
15import argparse
16
17scripts_lib_path = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'lib'))
18sys.path.insert(0, scripts_lib_path)
19import argparse_oe
20
21
22def get_path_networkx(dotfile, fromnode, tonode):
23 try:
24 import networkx
25 except ImportError:
26 print('ERROR: Please install the networkx python module')
27 sys.exit(1)
28
29 graph = networkx.DiGraph(networkx.nx_pydot.read_dot(dotfile))
30 def node_missing(node):
31 import difflib
32 close_matches = difflib.get_close_matches(node, graph.nodes(), cutoff=0.7)
33 if close_matches:
34 print('ERROR: no node "%s" in graph. Close matches:\n %s' % (node, '\n '.join(close_matches)))
35 sys.exit(1)
36
37 if not fromnode in graph:
38 node_missing(fromnode)
39 if not tonode in graph:
40 node_missing(tonode)
41 return networkx.all_simple_paths(graph, source=fromnode, target=tonode)
42
43
44def find_paths(args):
45 path = None
46 for path in get_path_networkx(args.dotfile, args.fromnode, args.tonode):
47 print(" -> ".join(map(str, path)))
48 if not path:
49 print("ERROR: no path from %s to %s in graph" % (args.fromnode, args.tonode))
50 return 1
51
52
53def filter_graph(args):
54 import fnmatch
55
56 exclude_tasks = []
57 if args.exclude_tasks:
58 for task in args.exclude_tasks.split(','):
59 if not task.startswith('do_'):
60 task = 'do_%s' % task
61 exclude_tasks.append(task)
62
63 def checkref(strval):
64 strval = strval.strip().strip('"')
65 target, taskname = strval.rsplit('.', 1)
66 if exclude_tasks:
67 for extask in exclude_tasks:
68 if fnmatch.fnmatch(taskname, extask):
69 return False
70 if strval in args.ref or target in args.ref:
71 return True
72 return False
73
74 with open(args.infile, 'r') as f:
75 for line in f:
76 line = line.rstrip()
77 if line.startswith(('digraph', '}')):
78 print(line)
79 elif '->' in line:
80 linesplit = line.split('->')
81 if checkref(linesplit[0]) and checkref(linesplit[1]):
82 print(line)
83 elif (not args.no_nodes) and checkref(line.split()[0]):
84 print(line)
85
86
87def main():
88 parser = argparse_oe.ArgumentParser(description='Small utility for working with .dot graph files')
89
90 subparsers = parser.add_subparsers(title='subcommands', metavar='<subcommand>')
91 subparsers.required = True
92
93 parser_find_paths = subparsers.add_parser('find-paths',
94 help='Find all of the paths between two nodes in a dot graph',
95 description='Finds all of the paths between two nodes in a dot graph')
96 parser_find_paths.add_argument('dotfile', help='.dot graph to search in')
97 parser_find_paths.add_argument('fromnode', help='starting node name')
98 parser_find_paths.add_argument('tonode', help='ending node name')
99 parser_find_paths.set_defaults(func=find_paths)
100
101 parser_filter = subparsers.add_parser('filter',
102 help='Pare down a task graph to contain only the specified references',
103 description='Pares down a task-depends.dot graph produced by bitbake -g to contain only the specified references')
104 parser_filter.add_argument('infile', help='Input file')
105 parser_filter.add_argument('ref', nargs='+', help='Reference to include (either recipe/target name or full target.taskname specification)')
106 parser_filter.add_argument('-n', '--no-nodes', action='store_true', help='Skip node formatting lines')
107 parser_filter.add_argument('-x', '--exclude-tasks', help='Comma-separated list of tasks to exclude (do_ prefix optional, wildcards allowed)')
108 parser_filter.set_defaults(func=filter_graph)
109
110 args = parser.parse_args()
111
112 ret = args.func(args)
113 return ret
114
115
116if __name__ == "__main__":
117 ret = main()
118 sys.exit(ret)
diff --git a/scripts/contrib/image-manifest b/scripts/contrib/image-manifest
deleted file mode 100755
index 4d65a99258..0000000000
--- a/scripts/contrib/image-manifest
+++ /dev/null
@@ -1,523 +0,0 @@
1#!/usr/bin/env python3
2
3# Script to extract information from image manifests
4#
5# Copyright (C) 2018 Intel Corporation
6# Copyright (C) 2021 Wind River Systems, Inc.
7#
8# SPDX-License-Identifier: GPL-2.0-only
9#
10
11import sys
12import os
13import argparse
14import logging
15import json
16import shutil
17import tempfile
18import tarfile
19from collections import OrderedDict
20
21scripts_path = os.path.dirname(__file__)
22lib_path = scripts_path + '/../lib'
23sys.path = sys.path + [lib_path]
24
25import scriptutils
26logger = scriptutils.logger_create(os.path.basename(__file__))
27
28import argparse_oe
29import scriptpath
30bitbakepath = scriptpath.add_bitbake_lib_path()
31if not bitbakepath:
32 logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
33 sys.exit(1)
34logger.debug('Using standard bitbake path %s' % bitbakepath)
35scriptpath.add_oe_lib_path()
36
37import bb.tinfoil
38import bb.utils
39import oe.utils
40import oe.recipeutils
41
42def get_pkg_list(manifest):
43 pkglist = []
44 with open(manifest, 'r') as f:
45 for line in f:
46 linesplit = line.split()
47 if len(linesplit) == 3:
48 # manifest file
49 pkglist.append(linesplit[0])
50 elif len(linesplit) == 1:
51 # build dependency file
52 pkglist.append(linesplit[0])
53 return sorted(pkglist)
54
55def list_packages(args):
56 pkglist = get_pkg_list(args.manifest)
57 for pkg in pkglist:
58 print('%s' % pkg)
59
60def pkg2recipe(tinfoil, pkg):
61 if "-native" in pkg:
62 logger.info('skipping %s' % pkg)
63 return None
64
65 pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
66 pkgdatafile = os.path.join(pkgdata_dir, 'runtime-reverse', pkg)
67 logger.debug('pkgdatafile %s' % pkgdatafile)
68 try:
69 f = open(pkgdatafile, 'r')
70 for line in f:
71 if line.startswith('PN:'):
72 recipe = line.split(':', 1)[1].strip()
73 return recipe
74 except Exception:
75 logger.warning('%s is missing' % pkgdatafile)
76 return None
77
78def get_recipe_list(manifest, tinfoil):
79 pkglist = get_pkg_list(manifest)
80 recipelist = []
81 for pkg in pkglist:
82 recipe = pkg2recipe(tinfoil,pkg)
83 if recipe:
84 if not recipe in recipelist:
85 recipelist.append(recipe)
86
87 return sorted(recipelist)
88
89def list_recipes(args):
90 import bb.tinfoil
91 with bb.tinfoil.Tinfoil() as tinfoil:
92 tinfoil.logger.setLevel(logger.getEffectiveLevel())
93 tinfoil.prepare(config_only=True)
94 recipelist = get_recipe_list(args.manifest, tinfoil)
95 for recipe in sorted(recipelist):
96 print('%s' % recipe)
97
98def list_layers(args):
99
100 def find_git_repo(pth):
101 checkpth = pth
102 while checkpth != os.sep:
103 if os.path.exists(os.path.join(checkpth, '.git')):
104 return checkpth
105 checkpth = os.path.dirname(checkpth)
106 return None
107
108 def get_git_remote_branch(repodir):
109 try:
110 stdout, _ = bb.process.run(['git', 'rev-parse', '--abbrev-ref', '--symbolic-full-name', '@{u}'], cwd=repodir)
111 except bb.process.ExecutionError as e:
112 stdout = None
113 if stdout:
114 return stdout.strip()
115 else:
116 return None
117
118 def get_git_head_commit(repodir):
119 try:
120 stdout, _ = bb.process.run(['git', 'rev-parse', 'HEAD'], cwd=repodir)
121 except bb.process.ExecutionError as e:
122 stdout = None
123 if stdout:
124 return stdout.strip()
125 else:
126 return None
127
128 def get_git_repo_url(repodir, remote='origin'):
129 import bb.process
130 # Try to get upstream repo location from origin remote
131 try:
132 stdout, _ = bb.process.run(['git', 'remote', '-v'], cwd=repodir)
133 except bb.process.ExecutionError as e:
134 stdout = None
135 if stdout:
136 for line in stdout.splitlines():
137 splitline = line.split()
138 if len(splitline) > 1:
139 if splitline[0] == remote and scriptutils.is_src_url(splitline[1]):
140 return splitline[1]
141 return None
142
143 with bb.tinfoil.Tinfoil() as tinfoil:
144 tinfoil.logger.setLevel(logger.getEffectiveLevel())
145 tinfoil.prepare(config_only=False)
146 layers = OrderedDict()
147 for layerdir in tinfoil.config_data.getVar('BBLAYERS').split():
148 layerdata = OrderedDict()
149 layername = os.path.basename(layerdir)
150 logger.debug('layername %s, layerdir %s' % (layername, layerdir))
151 if layername in layers:
152 logger.warning('layername %s is not unique in configuration' % layername)
153 layername = os.path.basename(os.path.dirname(layerdir)) + '_' + os.path.basename(layerdir)
154 logger.debug('trying layername %s' % layername)
155 if layername in layers:
156 logger.error('Layer name %s is not unique in configuration' % layername)
157 sys.exit(2)
158 repodir = find_git_repo(layerdir)
159 if repodir:
160 remotebranch = get_git_remote_branch(repodir)
161 remote = 'origin'
162 if remotebranch and '/' in remotebranch:
163 rbsplit = remotebranch.split('/', 1)
164 layerdata['actual_branch'] = rbsplit[1]
165 remote = rbsplit[0]
166 layerdata['vcs_url'] = get_git_repo_url(repodir, remote)
167 if os.path.abspath(repodir) != os.path.abspath(layerdir):
168 layerdata['vcs_subdir'] = os.path.relpath(layerdir, repodir)
169 commit = get_git_head_commit(repodir)
170 if commit:
171 layerdata['vcs_commit'] = commit
172 layers[layername] = layerdata
173
174 json.dump(layers, args.output, indent=2)
175
176def get_recipe(args):
177 with bb.tinfoil.Tinfoil() as tinfoil:
178 tinfoil.logger.setLevel(logger.getEffectiveLevel())
179 tinfoil.prepare(config_only=True)
180
181 recipe = pkg2recipe(tinfoil, args.package)
182 print(' %s package provided by %s' % (args.package, recipe))
183
184def pkg_dependencies(args):
185 def get_recipe_info(tinfoil, recipe):
186 try:
187 info = tinfoil.get_recipe_info(recipe)
188 except Exception:
189 logger.error('Failed to get recipe info for: %s' % recipe)
190 sys.exit(1)
191 if not info:
192 logger.warning('No recipe info found for: %s' % recipe)
193 sys.exit(1)
194 append_files = tinfoil.get_file_appends(info.fn)
195 appends = True
196 data = tinfoil.parse_recipe_file(info.fn, appends, append_files)
197 data.pn = info.pn
198 data.pv = info.pv
199 return data
200
201 def find_dependencies(tinfoil, assume_provided, recipe_info, packages, rn, order):
202 spaces = ' ' * order
203 data = recipe_info[rn]
204 if args.native:
205 logger.debug('%s- %s' % (spaces, data.pn))
206 elif "-native" not in data.pn:
207 if "cross" not in data.pn:
208 logger.debug('%s- %s' % (spaces, data.pn))
209
210 depends = []
211 for dep in data.depends:
212 if dep not in assume_provided:
213 depends.append(dep)
214
215 # First find all dependencies not in package list.
216 for dep in depends:
217 if dep not in packages:
218 packages.append(dep)
219 dep_data = get_recipe_info(tinfoil, dep)
220 # Do this once now to reduce the number of bitbake calls.
221 dep_data.depends = dep_data.getVar('DEPENDS').split()
222 recipe_info[dep] = dep_data
223
224 # Then recursively analyze all of the dependencies for the current recipe.
225 for dep in depends:
226 find_dependencies(tinfoil, assume_provided, recipe_info, packages, dep, order + 1)
227
228 with bb.tinfoil.Tinfoil() as tinfoil:
229 tinfoil.logger.setLevel(logger.getEffectiveLevel())
230 tinfoil.prepare()
231
232 assume_provided = tinfoil.config_data.getVar('ASSUME_PROVIDED').split()
233 logger.debug('assumed provided:')
234 for ap in sorted(assume_provided):
235 logger.debug(' - %s' % ap)
236
237 recipe = pkg2recipe(tinfoil, args.package)
238 data = get_recipe_info(tinfoil, recipe)
239 data.depends = []
240 depends = data.getVar('DEPENDS').split()
241 for dep in depends:
242 if dep not in assume_provided:
243 data.depends.append(dep)
244
245 recipe_info = dict([(recipe, data)])
246 packages = []
247 find_dependencies(tinfoil, assume_provided, recipe_info, packages, recipe, order=1)
248
249 print('\nThe following packages are required to build %s' % recipe)
250 for p in sorted(packages):
251 data = recipe_info[p]
252 if "-native" not in data.pn:
253 if "cross" not in data.pn:
254 print(" %s (%s)" % (data.pn,p))
255
256 if args.native:
257 print('\nThe following native packages are required to build %s' % recipe)
258 for p in sorted(packages):
259 data = recipe_info[p]
260 if "-native" in data.pn:
261 print(" %s(%s)" % (data.pn,p))
262 if "cross" in data.pn:
263 print(" %s(%s)" % (data.pn,p))
264
265def default_config():
266 vlist = OrderedDict()
267 vlist['PV'] = 'yes'
268 vlist['SUMMARY'] = 'no'
269 vlist['DESCRIPTION'] = 'no'
270 vlist['SECTION'] = 'no'
271 vlist['LICENSE'] = 'yes'
272 vlist['HOMEPAGE'] = 'no'
273 vlist['BUGTRACKER'] = 'no'
274 vlist['PROVIDES'] = 'no'
275 vlist['BBCLASSEXTEND'] = 'no'
276 vlist['DEPENDS'] = 'no'
277 vlist['PACKAGECONFIG'] = 'no'
278 vlist['SRC_URI'] = 'yes'
279 vlist['SRCREV'] = 'yes'
280 vlist['EXTRA_OECONF'] = 'no'
281 vlist['EXTRA_OESCONS'] = 'no'
282 vlist['EXTRA_OECMAKE'] = 'no'
283 vlist['EXTRA_OEMESON'] = 'no'
284
285 clist = OrderedDict()
286 clist['variables'] = vlist
287 clist['filepath'] = 'no'
288 clist['sha256sum'] = 'no'
289 clist['layerdir'] = 'no'
290 clist['layer'] = 'no'
291 clist['inherits'] = 'no'
292 clist['source_urls'] = 'no'
293 clist['packageconfig_opts'] = 'no'
294 clist['patches'] = 'no'
295 clist['packagedir'] = 'no'
296 return clist
297
298def dump_config(args):
299 config = default_config()
300 f = open('default_config.json', 'w')
301 json.dump(config, f, indent=2)
302 logger.info('Default config list dumped to default_config.json')
303
304def export_manifest_info(args):
305
306 def handle_value(value):
307 if value:
308 return oe.utils.squashspaces(value)
309 else:
310 return value
311
312 if args.config:
313 logger.debug('config: %s' % args.config)
314 f = open(args.config, 'r')
315 config = json.load(f, object_pairs_hook=OrderedDict)
316 else:
317 config = default_config()
318 if logger.isEnabledFor(logging.DEBUG):
319 print('Configuration:')
320 json.dump(config, sys.stdout, indent=2)
321 print('')
322
323 tmpoutdir = tempfile.mkdtemp(prefix=os.path.basename(__file__)+'-')
324 logger.debug('tmp dir: %s' % tmpoutdir)
325
326 # export manifest
327 shutil.copy2(args.manifest,os.path.join(tmpoutdir, "manifest"))
328
329 with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
330 tinfoil.logger.setLevel(logger.getEffectiveLevel())
331 tinfoil.prepare(config_only=False)
332
333 pkglist = get_pkg_list(args.manifest)
334 # export pkg list
335 f = open(os.path.join(tmpoutdir, "pkgs"), 'w')
336 for pkg in pkglist:
337 f.write('%s\n' % pkg)
338 f.close()
339
340 recipelist = []
341 for pkg in pkglist:
342 recipe = pkg2recipe(tinfoil,pkg)
343 if recipe:
344 if not recipe in recipelist:
345 recipelist.append(recipe)
346 recipelist.sort()
347 # export recipe list
348 f = open(os.path.join(tmpoutdir, "recipes"), 'w')
349 for recipe in recipelist:
350 f.write('%s\n' % recipe)
351 f.close()
352
353 try:
354 rvalues = OrderedDict()
355 for pn in sorted(recipelist):
356 logger.debug('Package: %s' % pn)
357 rd = tinfoil.parse_recipe(pn)
358
359 rvalues[pn] = OrderedDict()
360
361 for varname in config['variables']:
362 if config['variables'][varname] == 'yes':
363 rvalues[pn][varname] = handle_value(rd.getVar(varname))
364
365 fpth = rd.getVar('FILE')
366 layerdir = oe.recipeutils.find_layerdir(fpth)
367 if config['filepath'] == 'yes':
368 rvalues[pn]['filepath'] = os.path.relpath(fpth, layerdir)
369 if config['sha256sum'] == 'yes':
370 rvalues[pn]['sha256sum'] = bb.utils.sha256_file(fpth)
371
372 if config['layerdir'] == 'yes':
373 rvalues[pn]['layerdir'] = layerdir
374
375 if config['layer'] == 'yes':
376 rvalues[pn]['layer'] = os.path.basename(layerdir)
377
378 if config['inherits'] == 'yes':
379 gr = set(tinfoil.config_data.getVar("__inherit_cache") or [])
380 lr = set(rd.getVar("__inherit_cache") or [])
381 rvalues[pn]['inherits'] = sorted({os.path.splitext(os.path.basename(r))[0] for r in lr if r not in gr})
382
383 if config['source_urls'] == 'yes':
384 rvalues[pn]['source_urls'] = []
385 for url in (rd.getVar('SRC_URI') or '').split():
386 if not url.startswith('file://'):
387 url = url.split(';')[0]
388 rvalues[pn]['source_urls'].append(url)
389
390 if config['packageconfig_opts'] == 'yes':
391 rvalues[pn]['packageconfig_opts'] = OrderedDict()
392 for key in rd.getVarFlags('PACKAGECONFIG').keys():
393 if key == 'doc':
394 continue
395 rvalues[pn]['packageconfig_opts'][key] = rd.getVarFlag('PACKAGECONFIG', key)
396
397 if config['patches'] == 'yes':
398 patches = oe.recipeutils.get_recipe_patches(rd)
399 rvalues[pn]['patches'] = []
400 if patches:
401 recipeoutdir = os.path.join(tmpoutdir, pn, 'patches')
402 bb.utils.mkdirhier(recipeoutdir)
403 for patch in patches:
404 # Patches may be in other layers too
405 patchlayerdir = oe.recipeutils.find_layerdir(patch)
406 # patchlayerdir will be None for remote patches, which we ignore
407 # (since currently they are considered as part of sources)
408 if patchlayerdir:
409 rvalues[pn]['patches'].append((os.path.basename(patchlayerdir), os.path.relpath(patch, patchlayerdir)))
410 shutil.copy(patch, recipeoutdir)
411
412 if config['packagedir'] == 'yes':
413 pn_dir = os.path.join(tmpoutdir, pn)
414 bb.utils.mkdirhier(pn_dir)
415 f = open(os.path.join(pn_dir, 'recipe.json'), 'w')
416 json.dump(rvalues[pn], f, indent=2)
417 f.close()
418
419 with open(os.path.join(tmpoutdir, 'recipes.json'), 'w') as f:
420 json.dump(rvalues, f, indent=2)
421
422 if args.output:
423 outname = os.path.basename(args.output)
424 else:
425 outname = os.path.splitext(os.path.basename(args.manifest))[0]
426 if outname.endswith('.tar.gz'):
427 outname = outname[:-7]
428 elif outname.endswith('.tgz'):
429 outname = outname[:-4]
430
431 tarfn = outname
432 if tarfn.endswith(os.sep):
433 tarfn = tarfn[:-1]
434 if not tarfn.endswith(('.tar.gz', '.tgz')):
435 tarfn += '.tar.gz'
436 with open(tarfn, 'wb') as f:
437 with tarfile.open(None, "w:gz", f) as tar:
438 tar.add(tmpoutdir, outname)
439 finally:
440 shutil.rmtree(tmpoutdir)
441
442
443def main():
444 parser = argparse_oe.ArgumentParser(description="Image manifest utility",
445 epilog="Use %(prog)s <subcommand> --help to get help on a specific command")
446 parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true')
447 parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true')
448 subparsers = parser.add_subparsers(dest="subparser_name", title='subcommands', metavar='<subcommand>')
449 subparsers.required = True
450
451 # get recipe info
452 parser_get_recipes = subparsers.add_parser('recipe-info',
453 help='Get recipe info',
454 description='Get recipe information for a package')
455 parser_get_recipes.add_argument('package', help='Package name')
456 parser_get_recipes.set_defaults(func=get_recipe)
457
458 # list runtime dependencies
459 parser_pkg_dep = subparsers.add_parser('list-depends',
460 help='List dependencies',
461 description='List dependencies required to build the package')
462 parser_pkg_dep.add_argument('--native', help='also print native and cross packages', action='store_true')
463 parser_pkg_dep.add_argument('package', help='Package name')
464 parser_pkg_dep.set_defaults(func=pkg_dependencies)
465
466 # list recipes
467 parser_recipes = subparsers.add_parser('list-recipes',
468 help='List recipes producing packages within an image',
469 description='Lists recipes producing the packages that went into an image, using the manifest and pkgdata')
470 parser_recipes.add_argument('manifest', help='Manifest file')
471 parser_recipes.set_defaults(func=list_recipes)
472
473 # list packages
474 parser_packages = subparsers.add_parser('list-packages',
475 help='List packages within an image',
476 description='Lists packages that went into an image, using the manifest')
477 parser_packages.add_argument('manifest', help='Manifest file')
478 parser_packages.set_defaults(func=list_packages)
479
480 # list layers
481 parser_layers = subparsers.add_parser('list-layers',
482 help='List included layers',
483 description='Lists included layers')
484 parser_layers.add_argument('-o', '--output', help='Output file - defaults to stdout if not specified',
485 default=sys.stdout, type=argparse.FileType('w'))
486 parser_layers.set_defaults(func=list_layers)
487
488 # dump default configuration file
489 parser_dconfig = subparsers.add_parser('dump-config',
490 help='Dump default config',
491 description='Dump default config to default_config.json')
492 parser_dconfig.set_defaults(func=dump_config)
493
494 # export recipe info for packages in manifest
495 parser_export = subparsers.add_parser('manifest-info',
496 help='Export recipe info for a manifest',
497 description='Export recipe information using the manifest')
498 parser_export.add_argument('-c', '--config', help='load config from json file')
499 parser_export.add_argument('-o', '--output', help='Output file (tarball) - defaults to manifest name if not specified')
500 parser_export.add_argument('manifest', help='Manifest file')
501 parser_export.set_defaults(func=export_manifest_info)
502
503 args = parser.parse_args()
504
505 if args.debug:
506 logger.setLevel(logging.DEBUG)
507 logger.debug("Debug Enabled")
508 elif args.quiet:
509 logger.setLevel(logging.ERROR)
510
511 ret = args.func(args)
512
513 return ret
514
515
516if __name__ == "__main__":
517 try:
518 ret = main()
519 except Exception:
520 ret = 1
521 import traceback
522 traceback.print_exc()
523 sys.exit(ret)
diff --git a/scripts/contrib/improve_kernel_cve_report.py b/scripts/contrib/improve_kernel_cve_report.py
deleted file mode 100755
index 3a15b1ed26..0000000000
--- a/scripts/contrib/improve_kernel_cve_report.py
+++ /dev/null
@@ -1,500 +0,0 @@
1#! /usr/bin/env python3
2#
3# Copyright OpenEmbedded Contributors
4#
5# The script uses another source of CVE information from linux-vulns
6# to enrich the cve-summary from cve-check or vex.
7# It can also use the list of compiled files from the kernel spdx to ignore CVEs
8# that are not affected since the files are not compiled.
9#
10# It creates a new json file with updated CVE information
11#
12# Compiled files can be extracted adding the following in local.conf
13# SPDX_INCLUDE_COMPILED_SOURCES:pn-linux-yocto = "1"
14#
15# Tested with the following CVE sources:
16# - https://git.kernel.org/pub/scm/linux/security/vulns.git
17# - https://github.com/CVEProject/cvelistV5
18#
19# Example:
20# python3 ./openembedded-core/scripts/contrib/improve_kernel_cve_report.py --spdx tmp/deploy/spdx/3.0.1/qemux86_64/recipes/recipe-linux-yocto.spdx.json --kernel-version 6.12.27 --datadir ./vulns
21# python3 ./openembedded-core/scripts/contrib/improve_kernel_cve_report.py --spdx tmp/deploy/spdx/3.0.1/qemux86_64/recipes/recipe-linux-yocto.spdx.json --datadir ./vulns --old-cve-report build/tmp/log/cve/cve-summary.json
22#
23# SPDX-License-Identifier: GPLv2
24
25import argparse
26import json
27import sys
28import logging
29import glob
30import os
31import pathlib
32from packaging.version import Version
33
34def is_linux_cve(cve_info):
35 '''Return true is the CVE belongs to Linux'''
36 if not "affected" in cve_info["containers"]["cna"]:
37 return False
38 for affected in cve_info["containers"]["cna"]["affected"]:
39 if not "product" in affected:
40 return False
41 if affected["product"] == "Linux" and affected["vendor"] == "Linux":
42 return True
43 return False
44
45def get_kernel_cves(datadir, compiled_files, version):
46 """
47 Get CVEs for the kernel
48 """
49 cves = {}
50
51 check_config = len(compiled_files) > 0
52
53 base_version = Version(f"{version.major}.{version.minor}")
54
55 # Check all CVES from kernel vulns
56 pattern = os.path.join(datadir, '**', "CVE-*.json")
57 cve_files = glob.glob(pattern, recursive=True)
58 not_applicable_config = 0
59 fixed_as_later_backport = 0
60 vulnerable = 0
61 not_vulnerable = 0
62 for cve_file in sorted(cve_files):
63 cve_info = {}
64 with open(cve_file, "r", encoding='ISO-8859-1') as f:
65 cve_info = json.load(f)
66
67 if len(cve_info) == 0:
68 logging.error("Not valid data in %s. Aborting", cve_file)
69 break
70
71 if not is_linux_cve(cve_info):
72 continue
73 cve_id = os.path.basename(cve_file)[:-5]
74 description = cve_info["containers"]["cna"]["descriptions"][0]["value"]
75 if cve_file.find("rejected") >= 0:
76 logging.debug("%s is rejected by the CNA", cve_id)
77 cves[cve_id] = {
78 "id": cve_id,
79 "status": "Ignored",
80 "detail": "rejected",
81 "summary": description,
82 "description": f"Rejected by CNA"
83 }
84 continue
85 if any(elem in cve_file for elem in ["review", "reverved", "testing"]):
86 continue
87
88 is_vulnerable, first_affected, last_affected, better_match_first, better_match_last, affected_versions = get_cpe_applicability(cve_info, version)
89
90 logging.debug("%s: %s (%s - %s) (%s - %s)", cve_id, is_vulnerable, better_match_first, better_match_last, first_affected, last_affected)
91
92 if is_vulnerable is None:
93 logging.warning("%s doesn't have good metadata", cve_id)
94 if is_vulnerable:
95 is_affected = True
96 affected_files = []
97 if check_config:
98 is_affected, affected_files = check_kernel_compiled_files(compiled_files, cve_info)
99
100 if not is_affected and len(affected_files) > 0:
101 logging.debug(
102 "%s - not applicable configuration since affected files not compiled: %s",
103 cve_id, affected_files)
104 cves[cve_id] = {
105 "id": cve_id,
106 "status": "Ignored",
107 "detail": "not-applicable-config",
108 "summary": description,
109 "description": f"Source code not compiled by config. {affected_files}"
110 }
111 not_applicable_config +=1
112 # Check if we have backport
113 else:
114 if not better_match_last:
115 fixed_in = last_affected
116 else:
117 fixed_in = better_match_last
118 logging.debug("%s needs backporting (fixed from %s)", cve_id, fixed_in)
119 cves[cve_id] = {
120 "id": cve_id,
121 "status": "Unpatched",
122 "detail": "version-in-range",
123 "summary": description,
124 "description": f"Needs backporting (fixed from {fixed_in})"
125 }
126 vulnerable += 1
127 if (better_match_last and
128 Version(f"{better_match_last.major}.{better_match_last.minor}") == base_version):
129 fixed_as_later_backport += 1
130 # Not vulnerable
131 else:
132 if not first_affected:
133 logging.debug("%s - not known affected %s",
134 cve_id,
135 better_match_last)
136 cves[cve_id] = {
137 "id": cve_id,
138 "status": "Patched",
139 "detail": "version-not-in-range",
140 "summary": description,
141 "description": "No CPE match"
142 }
143 not_vulnerable += 1
144 continue
145 backport_base = Version(f"{better_match_last.major}.{better_match_last.minor}")
146 if version < first_affected:
147 logging.debug('%s - fixed-version: only affects %s onwards',
148 cve_id,
149 first_affected)
150 cves[cve_id] = {
151 "id": cve_id,
152 "status": "Patched",
153 "detail": "fixed-version",
154 "summary": description,
155 "description": f"only affects {first_affected} onwards"
156 }
157 not_vulnerable += 1
158 elif last_affected <= version:
159 logging.debug("%s - fixed-version: Fixed from version %s",
160 cve_id,
161 last_affected)
162 cves[cve_id] = {
163 "id": cve_id,
164 "status": "Patched",
165 "detail": "fixed-version",
166 "summary": description,
167 "description": f"fixed-version: Fixed from version {last_affected}"
168 }
169 not_vulnerable += 1
170 elif backport_base == base_version:
171 logging.debug("%s - cpe-stable-backport: Backported in %s",
172 cve_id,
173 better_match_last)
174 cves[cve_id] = {
175 "id": cve_id,
176 "status": "Patched",
177 "detail": "cpe-stable-backport",
178 "summary": description,
179 "description": f"Backported in {better_match_last}"
180 }
181 not_vulnerable += 1
182 else:
183 logging.debug("%s - version not affected %s", cve_id, str(affected_versions))
184 cves[cve_id] = {
185 "id": cve_id,
186 "status": "Patched",
187 "detail": "version-not-in-range",
188 "summary": description,
189 "description": f"Range {affected_versions}"
190 }
191 not_vulnerable += 1
192
193 logging.info("Total CVEs ignored due to not applicable config: %d", not_applicable_config)
194 logging.info("Total CVEs not vulnerable due version-not-in-range: %d", not_vulnerable)
195 logging.info("Total vulnerable CVEs: %d", vulnerable)
196
197 logging.info("Total CVEs already backported in %s: %s", base_version,
198 fixed_as_later_backport)
199 return cves
200
201def read_spdx(spdx_file):
202 '''Open SPDX file and extract compiled files'''
203 with open(spdx_file, 'r', encoding='ISO-8859-1') as f:
204 spdx = json.load(f)
205 if "spdxVersion" in spdx:
206 if spdx["spdxVersion"] == "SPDX-2.2":
207 return read_spdx2(spdx)
208 if "@graph" in spdx:
209 return read_spdx3(spdx)
210 return []
211
212def read_spdx2(spdx):
213 '''
214 Read spdx2 compiled files from spdx
215 '''
216 cfiles = set()
217 if 'files' not in spdx:
218 return cfiles
219 for item in spdx['files']:
220 for ftype in item['fileTypes']:
221 if ftype == "SOURCE":
222 filename = item["fileName"][item["fileName"].find("/")+1:]
223 cfiles.add(filename)
224 return cfiles
225
226def read_spdx3(spdx):
227 '''
228 Read spdx3 compiled files from spdx
229 '''
230 cfiles = set()
231 for item in spdx["@graph"]:
232 if "software_primaryPurpose" not in item:
233 continue
234 if item["software_primaryPurpose"] == "source":
235 filename = item['name'][item['name'].find("/")+1:]
236 cfiles.add(filename)
237 return cfiles
238
239def read_debugsources(file_path):
240 '''
241 Read zstd file from pkgdata to extract sources
242 '''
243 import zstandard as zstd
244 import itertools
245 # Decompress the .zst file
246 cfiles = set()
247 with open(file_path, 'rb') as fh:
248 dctx = zstd.ZstdDecompressor()
249 with dctx.stream_reader(fh) as reader:
250 decompressed_bytes = reader.read()
251 json_data = json.loads(decompressed_bytes)
252 # We need to remove one level from the debug sources
253 for source_list in json_data.values():
254 for source in source_list:
255 src = source.split("/",1)[1]
256 cfiles.add(src)
257 return cfiles
258
259def check_kernel_compiled_files(compiled_files, cve_info):
260 """
261 Return if a CVE affected us depending on compiled files
262 """
263 files_affected = set()
264 is_affected = False
265
266 for item in cve_info['containers']['cna']['affected']:
267 if "programFiles" in item:
268 for f in item['programFiles']:
269 if f not in files_affected:
270 files_affected.add(f)
271
272 if len(files_affected) > 0:
273 for f in files_affected:
274 if f in compiled_files:
275 logging.debug("File match: %s", f)
276 is_affected = True
277 return is_affected, files_affected
278
279def get_cpe_applicability(cve_info, v):
280 '''
281 Check if version is affected and return affected versions
282 '''
283 base_branch = Version(f"{v.major}.{v.minor}")
284 affected = []
285 if not 'cpeApplicability' in cve_info["containers"]["cna"]:
286 return None, None, None, None, None, None
287
288 for nodes in cve_info["containers"]["cna"]["cpeApplicability"]:
289 for node in nodes.values():
290 vulnerable = False
291 matched_branch = False
292 first_affected = Version("5000")
293 last_affected = Version("0")
294 better_match_first = Version("0")
295 better_match_last = Version("5000")
296
297 if len(node[0]['cpeMatch']) == 0:
298 first_affected = None
299 last_affected = None
300 better_match_first = None
301 better_match_last = None
302
303 for cpe_match in node[0]['cpeMatch']:
304 version_start_including = Version("0")
305 version_end_excluding = Version("0")
306 if 'versionStartIncluding' in cpe_match:
307 version_start_including = Version(cpe_match['versionStartIncluding'])
308 else:
309 version_start_including = Version("0")
310 # if versionEndExcluding is missing we are in a branch, which is not fixed.
311 if "versionEndExcluding" in cpe_match:
312 version_end_excluding = Version(cpe_match["versionEndExcluding"])
313 else:
314 # if versionEndExcluding is missing we are in a branch, which is not fixed.
315 version_end_excluding = Version(
316 f"{version_start_including.major}.{version_start_including.minor}.5000"
317 )
318 affected.append(f" {version_start_including}-{version_end_excluding}")
319 # Detect if versionEnd is in fixed in base branch. It has precedence over the rest
320 branch_end = Version(f"{version_end_excluding.major}.{version_end_excluding.minor}")
321 if branch_end == base_branch:
322 if version_start_including <= v < version_end_excluding:
323 vulnerable = cpe_match['vulnerable']
324 # If we don't match in our branch, we are not vulnerable,
325 # since we have a backport
326 matched_branch = True
327 better_match_first = version_start_including
328 better_match_last = version_end_excluding
329 if version_start_including <= v < version_end_excluding and not matched_branch:
330 if version_end_excluding < better_match_last:
331 better_match_first = max(version_start_including, better_match_first)
332 better_match_last = min(better_match_last, version_end_excluding)
333 vulnerable = cpe_match['vulnerable']
334 matched_branch = True
335
336 first_affected = min(version_start_including, first_affected)
337 last_affected = max(version_end_excluding, last_affected)
338 # Not a better match, we use the first and last affected instead of the fake .5000
339 if vulnerable and better_match_last == Version(f"{base_branch}.5000"):
340 better_match_last = last_affected
341 better_match_first = first_affected
342 return vulnerable, first_affected, last_affected, better_match_first, better_match_last, affected
343
344def copy_data(old, new):
345 '''Update dictionary with new entries, while keeping the old ones'''
346 for k in new.keys():
347 old[k] = new[k]
348 return old
349
350# Function taken from cve_check.bbclass. Adapted to cve fields
351def cve_update(cve_data, cve, entry):
352 # If no entry, just add it
353 if cve not in cve_data:
354 cve_data[cve] = entry
355 return
356 # If we are updating, there might be change in the status
357 if cve_data[cve]['status'] == "Unknown":
358 cve_data[cve] = copy_data(cve_data[cve], entry)
359 return
360 if cve_data[cve]['status'] == entry['status']:
361 return
362 if entry['status'] == "Unpatched" and cve_data[cve]['status'] == "Patched":
363 # Backported-patch (e.g. vendor kernel repo with cherry-picked CVE patch)
364 # has priority over unpatch from CNA
365 if cve_data[cve]['detail'] == "backported-patch":
366 return
367 logging.warning("CVE entry %s update from Patched to Unpatched from the scan result", cve)
368 cve_data[cve] = copy_data(cve_data[cve], entry)
369 return
370 if entry['status'] == "Patched" and cve_data[cve]['status'] == "Unpatched":
371 logging.warning("CVE entry %s update from Unpatched to Patched from the scan result", cve)
372 cve_data[cve] = copy_data(cve_data[cve], entry)
373 return
374 # If we have an "Ignored", it has a priority
375 if cve_data[cve]['status'] == "Ignored":
376 logging.debug("CVE %s not updating because Ignored", cve)
377 return
378 # If we have an "Ignored", it has a priority
379 if entry['status'] == "Ignored":
380 cve_data[cve] = copy_data(cve_data[cve], entry)
381 logging.debug("CVE entry %s updated from Unpatched to Ignored", cve)
382 return
383 logging.warning("Unhandled CVE entry update for %s %s from %s %s to %s",
384 cve, cve_data[cve]['status'], cve_data[cve]['detail'], entry['status'], entry['detail'])
385
386def main():
387 parser = argparse.ArgumentParser(
388 description="Update cve-summary with kernel compiled files and kernel CVE information"
389 )
390 parser.add_argument(
391 "-s",
392 "--spdx",
393 help="SPDX2/3 for the kernel. Needs to include compiled sources",
394 )
395 parser.add_argument(
396 "--debug-sources-file",
397 help="Debug sources zstd file generated from Yocto",
398 )
399 parser.add_argument(
400 "--datadir",
401 type=pathlib.Path,
402 help="Directory where CVE data is",
403 required=True
404 )
405 parser.add_argument(
406 "--old-cve-report",
407 help="CVE report to update. (Optional)",
408 )
409 parser.add_argument(
410 "--kernel-version",
411 help="Kernel version. Needed if old cve_report is not provided (Optional)",
412 type=Version
413 )
414 parser.add_argument(
415 "--new-cve-report",
416 help="Output file",
417 default="cve-summary-enhance.json"
418 )
419 parser.add_argument(
420 "-D",
421 "--debug",
422 help='Enable debug ',
423 action="store_true")
424
425 args = parser.parse_args()
426
427 if args.debug:
428 log_level=logging.DEBUG
429 else:
430 log_level=logging.INFO
431 logging.basicConfig(format='[%(filename)s:%(lineno)d] %(message)s', level=log_level)
432
433 if not args.kernel_version and not args.old_cve_report:
434 parser.error("either --kernel-version or --old-cve-report are needed")
435 return -1
436
437 # by default we don't check the compiled files, unless provided
438 compiled_files = []
439 if args.spdx:
440 compiled_files = read_spdx(args.spdx)
441 logging.info("Total compiled files %d", len(compiled_files))
442 if args.debug_sources_file:
443 compiled_files = read_debugsources(args.debug_sources_file)
444 logging.info("Total compiled files %d", len(compiled_files))
445
446 if args.old_cve_report:
447 with open(args.old_cve_report, encoding='ISO-8859-1') as f:
448 cve_report = json.load(f)
449 else:
450 #If summary not provided, we create one
451 cve_report = {
452 "version": "1",
453 "package": [
454 {
455 "name": "linux-yocto",
456 "version": str(args.kernel_version),
457 "products": [
458 {
459 "product": "linux_kernel",
460 "cvesInRecord": "Yes"
461 }
462 ],
463 "issue": []
464 }
465 ]
466 }
467
468 for pkg in cve_report['package']:
469 is_kernel = False
470 for product in pkg['products']:
471 if product['product'] == "linux_kernel":
472 is_kernel=True
473 if not is_kernel:
474 continue
475 # We remove custom versions after -
476 upstream_version = Version(pkg["version"].split("-")[0])
477 logging.info("Checking kernel %s", upstream_version)
478 kernel_cves = get_kernel_cves(args.datadir,
479 compiled_files,
480 upstream_version)
481 logging.info("Total kernel cves from kernel CNA: %s", len(kernel_cves))
482 cves = {issue["id"]: issue for issue in pkg["issue"]}
483 logging.info("Total kernel before processing cves: %s", len(cves))
484
485 for cve in kernel_cves:
486 cve_update(cves, cve, kernel_cves[cve])
487
488 pkg["issue"] = []
489 for cve in sorted(cves):
490 pkg["issue"].extend([cves[cve]])
491 logging.info("Total kernel cves after processing: %s", len(pkg['issue']))
492
493 with open(args.new_cve_report, "w", encoding='ISO-8859-1') as f:
494 json.dump(cve_report, f, indent=2)
495
496 return 0
497
498if __name__ == "__main__":
499 sys.exit(main())
500
diff --git a/scripts/contrib/list-packageconfig-flags.py b/scripts/contrib/list-packageconfig-flags.py
deleted file mode 100755
index bb288e9099..0000000000
--- a/scripts/contrib/list-packageconfig-flags.py
+++ /dev/null
@@ -1,167 +0,0 @@
1#!/usr/bin/env python3
2
3# Copyright (C) 2013 Wind River Systems, Inc.
4# Copyright (C) 2014 Intel Corporation
5#
6# SPDX-License-Identifier: GPL-2.0-or-later
7#
8# - list available recipes which have PACKAGECONFIG flags
9# - list available PACKAGECONFIG flags and all affected recipes
10# - list all recipes and PACKAGECONFIG information
11
12import sys
13import optparse
14import os
15
16
17scripts_path = os.path.abspath(os.path.dirname(os.path.abspath(sys.argv[0])))
18lib_path = os.path.abspath(scripts_path + '/../lib')
19sys.path = sys.path + [lib_path]
20
21import scriptpath
22
23# For importing the following modules
24bitbakepath = scriptpath.add_bitbake_lib_path()
25if not bitbakepath:
26 sys.stderr.write("Unable to find bitbake by searching parent directory of this script or PATH\n")
27 sys.exit(1)
28
29import bb.cooker
30import bb.providers
31import bb.tinfoil
32
33def get_fnlist(bbhandler, pkg_pn, preferred):
34 ''' Get all recipe file names '''
35 if preferred:
36 (latest_versions, preferred_versions, required_versions) = bb.providers.findProviders(bbhandler.config_data, bbhandler.cooker.recipecaches[''], pkg_pn)
37
38 fn_list = []
39 for pn in sorted(pkg_pn):
40 if preferred:
41 fn_list.append(preferred_versions[pn][1])
42 else:
43 fn_list.extend(pkg_pn[pn])
44
45 return fn_list
46
47def get_recipesdata(bbhandler, preferred):
48 ''' Get data of all available recipes which have PACKAGECONFIG flags '''
49 pkg_pn = bbhandler.cooker.recipecaches[''].pkg_pn
50
51 data_dict = {}
52 for fn in get_fnlist(bbhandler, pkg_pn, preferred):
53 data = bbhandler.parse_recipe_file(fn)
54 flags = data.getVarFlags("PACKAGECONFIG")
55 flags.pop('doc', None)
56 if flags:
57 data_dict[fn] = data
58
59 return data_dict
60
61def collect_pkgs(data_dict):
62 ''' Collect available pkgs in which have PACKAGECONFIG flags '''
63 # pkg_dict = {'pkg1': ['flag1', 'flag2',...]}
64 pkg_dict = {}
65 for fn in data_dict:
66 pkgconfigflags = data_dict[fn].getVarFlags("PACKAGECONFIG")
67 pkgconfigflags.pop('doc', None)
68 pkgname = data_dict[fn].getVar("PN")
69 pkg_dict[pkgname] = sorted(pkgconfigflags.keys())
70
71 return pkg_dict
72
73def collect_flags(pkg_dict):
74 ''' Collect available PACKAGECONFIG flags and all affected pkgs '''
75 # flag_dict = {'flag': ['pkg1', 'pkg2',...]}
76 flag_dict = {}
77 for pkgname, flaglist in pkg_dict.items():
78 for flag in flaglist:
79 if flag in flag_dict:
80 flag_dict[flag].append(pkgname)
81 else:
82 flag_dict[flag] = [pkgname]
83
84 return flag_dict
85
86def display_pkgs(pkg_dict):
87 ''' Display available pkgs which have PACKAGECONFIG flags '''
88 pkgname_len = len("RECIPE NAME") + 1
89 for pkgname in pkg_dict:
90 if pkgname_len < len(pkgname):
91 pkgname_len = len(pkgname)
92 pkgname_len += 1
93
94 header = '%-*s%s' % (pkgname_len, str("RECIPE NAME"), str("PACKAGECONFIG FLAGS"))
95 print(header)
96 print(str("").ljust(len(header), '='))
97 for pkgname in sorted(pkg_dict):
98 print('%-*s%s' % (pkgname_len, pkgname, ' '.join(pkg_dict[pkgname])))
99
100
101def display_flags(flag_dict):
102 ''' Display available PACKAGECONFIG flags and all affected pkgs '''
103 flag_len = len("PACKAGECONFIG FLAG") + 5
104
105 header = '%-*s%s' % (flag_len, str("PACKAGECONFIG FLAG"), str("RECIPE NAMES"))
106 print(header)
107 print(str("").ljust(len(header), '='))
108
109 for flag in sorted(flag_dict):
110 print('%-*s%s' % (flag_len, flag, ' '.join(sorted(flag_dict[flag]))))
111
112def display_all(data_dict):
113 ''' Display all pkgs and PACKAGECONFIG information '''
114 print(str("").ljust(50, '='))
115 for fn in data_dict:
116 print('%s' % data_dict[fn].getVar("P"))
117 print(fn)
118 packageconfig = data_dict[fn].getVar("PACKAGECONFIG") or ''
119 if packageconfig.strip() == '':
120 packageconfig = 'None'
121 print('PACKAGECONFIG %s' % packageconfig)
122
123 for flag,flag_val in data_dict[fn].getVarFlags("PACKAGECONFIG").items():
124 if flag == "doc":
125 continue
126 print('PACKAGECONFIG[%s] %s' % (flag, flag_val))
127 print('')
128
129def main():
130 pkg_dict = {}
131 flag_dict = {}
132
133 # Collect and validate input
134 parser = optparse.OptionParser(
135 description = "Lists recipes and PACKAGECONFIG flags. Without -a or -f, recipes and their available PACKAGECONFIG flags are listed.",
136 usage = """
137 %prog [options]""")
138
139 parser.add_option("-f", "--flags",
140 help = "list available PACKAGECONFIG flags and affected recipes",
141 action="store_const", dest="listtype", const="flags", default="recipes")
142 parser.add_option("-a", "--all",
143 help = "list all recipes and PACKAGECONFIG information",
144 action="store_const", dest="listtype", const="all")
145 parser.add_option("-p", "--preferred-only",
146 help = "where multiple recipe versions are available, list only the preferred version",
147 action="store_true", dest="preferred", default=False)
148
149 options, args = parser.parse_args(sys.argv)
150
151 with bb.tinfoil.Tinfoil() as bbhandler:
152 bbhandler.prepare()
153 print("Gathering recipe data...")
154 data_dict = get_recipesdata(bbhandler, options.preferred)
155
156 if options.listtype == 'flags':
157 pkg_dict = collect_pkgs(data_dict)
158 flag_dict = collect_flags(pkg_dict)
159 display_flags(flag_dict)
160 elif options.listtype == 'recipes':
161 pkg_dict = collect_pkgs(data_dict)
162 display_pkgs(pkg_dict)
163 elif options.listtype == 'all':
164 display_all(data_dict)
165
166if __name__ == "__main__":
167 main()
diff --git a/scripts/contrib/make-spdx-bindings.sh b/scripts/contrib/make-spdx-bindings.sh
deleted file mode 100755
index 31caaf339d..0000000000
--- a/scripts/contrib/make-spdx-bindings.sh
+++ /dev/null
@@ -1,12 +0,0 @@
1#! /bin/sh
2#
3# SPDX-License-Identifier: MIT
4
5THIS_DIR="$(dirname "$0")"
6
7VERSION="3.0.1"
8
9shacl2code generate --input https://spdx.org/rdf/$VERSION/spdx-model.ttl \
10 --input https://spdx.org/rdf/$VERSION/spdx-json-serialize-annotations.ttl \
11 --context https://spdx.org/rdf/$VERSION/spdx-context.jsonld \
12 python -o $THIS_DIR/../../meta/lib/oe/spdx30.py
diff --git a/scripts/contrib/oe-build-perf-report-email.py b/scripts/contrib/oe-build-perf-report-email.py
deleted file mode 100755
index 7192113c28..0000000000
--- a/scripts/contrib/oe-build-perf-report-email.py
+++ /dev/null
@@ -1,121 +0,0 @@
1#!/usr/bin/python3
2#
3# Send build performance test report emails
4#
5# Copyright (c) 2017, Intel Corporation.
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9
10import argparse
11import base64
12import logging
13import os
14import pwd
15import re
16import shutil
17import smtplib
18import socket
19import subprocess
20import sys
21import tempfile
22from email.mime.text import MIMEText
23
24
25# Setup logging
26logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
27log = logging.getLogger('oe-build-perf-report')
28
29
30def parse_args(argv):
31 """Parse command line arguments"""
32 description = """Email build perf test report"""
33 parser = argparse.ArgumentParser(
34 formatter_class=argparse.ArgumentDefaultsHelpFormatter,
35 description=description)
36
37 parser.add_argument('--debug', '-d', action='store_true',
38 help="Verbose logging")
39 parser.add_argument('--quiet', '-q', action='store_true',
40 help="Only print errors")
41 parser.add_argument('--to', action='append',
42 help="Recipients of the email")
43 parser.add_argument('--cc', action='append',
44 help="Carbon copy recipients of the email")
45 parser.add_argument('--bcc', action='append',
46 help="Blind carbon copy recipients of the email")
47 parser.add_argument('--subject', default="Yocto build perf test report",
48 help="Email subject")
49 parser.add_argument('--outdir', '-o',
50 help="Store files in OUTDIR. Can be used to preserve "
51 "the email parts")
52 parser.add_argument('--text',
53 help="Plain text message")
54
55 args = parser.parse_args(argv)
56
57 if not args.text:
58 parser.error("Please specify --text")
59
60 return args
61
62
63def send_email(text_fn, subject, recipients, copy=[], blind_copy=[]):
64 # Generate email message
65 with open(text_fn) as f:
66 msg = MIMEText("Yocto build performance test report.\n" + f.read(), 'plain')
67
68 pw_data = pwd.getpwuid(os.getuid())
69 full_name = pw_data.pw_gecos.split(',')[0]
70 email = os.environ.get('EMAIL',
71 '{}@{}'.format(pw_data.pw_name, socket.getfqdn()))
72 msg['From'] = "{} <{}>".format(full_name, email)
73 msg['To'] = ', '.join(recipients)
74 if copy:
75 msg['Cc'] = ', '.join(copy)
76 if blind_copy:
77 msg['Bcc'] = ', '.join(blind_copy)
78 msg['Subject'] = subject
79
80 # Send email
81 with smtplib.SMTP('localhost') as smtp:
82 smtp.send_message(msg)
83
84
85def main(argv=None):
86 """Script entry point"""
87 args = parse_args(argv)
88 if args.quiet:
89 log.setLevel(logging.ERROR)
90 if args.debug:
91 log.setLevel(logging.DEBUG)
92
93 if args.outdir:
94 outdir = args.outdir
95 if not os.path.exists(outdir):
96 os.mkdir(outdir)
97 else:
98 outdir = tempfile.mkdtemp(dir='.')
99
100 try:
101 log.debug("Storing email parts in %s", outdir)
102 if args.to:
103 log.info("Sending email to %s", ', '.join(args.to))
104 if args.cc:
105 log.info("Copying to %s", ', '.join(args.cc))
106 if args.bcc:
107 log.info("Blind copying to %s", ', '.join(args.bcc))
108 send_email(args.text, args.subject, args.to, args.cc, args.bcc)
109 except subprocess.CalledProcessError as err:
110 log.error("%s, with output:\n%s", str(err), err.output.decode())
111 return 1
112 finally:
113 if not args.outdir:
114 log.debug("Wiping %s", outdir)
115 shutil.rmtree(outdir)
116
117 return 0
118
119
120if __name__ == "__main__":
121 sys.exit(main())
diff --git a/scripts/contrib/oe-image-files-spdx/.gitignore b/scripts/contrib/oe-image-files-spdx/.gitignore
deleted file mode 100644
index 285851c984..0000000000
--- a/scripts/contrib/oe-image-files-spdx/.gitignore
+++ /dev/null
@@ -1,8 +0,0 @@
1*.spdx.json
2*.pyc
3*.bak
4*.swp
5*.swo
6*.swn
7venv/*
8.venv/*
diff --git a/scripts/contrib/oe-image-files-spdx/README.md b/scripts/contrib/oe-image-files-spdx/README.md
deleted file mode 100644
index 44f76eacd8..0000000000
--- a/scripts/contrib/oe-image-files-spdx/README.md
+++ /dev/null
@@ -1,24 +0,0 @@
1# OE Image Files from SBoM
2
3This is an example python script that will list the packaged files with their
4checksums based on the SPDX 3.0.1 SBoM.
5
6It can be used as a template for other programs to investigate output based on
7OE SPDX SBoMs
8
9## Installation
10
11This project can be installed using an virtual environment:
12```
13python3 -m venv .venv
14.venv/bin/activate
15python3 -m pip install -e '.[dev]'
16```
17
18## Usage
19
20After installing, the `oe-image-files` program can be used to show the files, e.g.:
21
22```
23oe-image-files core-image-minimal-qemux86-64.rootfs.spdx.json
24```
diff --git a/scripts/contrib/oe-image-files-spdx/pyproject.toml b/scripts/contrib/oe-image-files-spdx/pyproject.toml
deleted file mode 100644
index 3fab5dd605..0000000000
--- a/scripts/contrib/oe-image-files-spdx/pyproject.toml
+++ /dev/null
@@ -1,23 +0,0 @@
1[project]
2name = "oe-image-files"
3description = "Displays all packaged files on the root file system"
4dynamic = ["version"]
5requires-python = ">= 3.8"
6readme = "README.md"
7
8dependencies = [
9 "spdx_python_model @ git+https://github.com/spdx/spdx-python-model.git@aa40861f11d1b5d20edba7101835341a70d91179",
10]
11
12[project.scripts]
13oe-image-files = "oe_image_files:main"
14
15[build-system]
16requires = ["hatchling"]
17build-backend = "hatchling.build"
18
19[tool.hatch.version]
20path = "src/oe_image_files/version.py"
21
22[tool.hatch.metadata]
23allow-direct-references = true
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py
deleted file mode 100644
index c28a133f2d..0000000000
--- a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
1from .main import main
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py
deleted file mode 100644
index 8476bf6369..0000000000
--- a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/main.py
+++ /dev/null
@@ -1,86 +0,0 @@
1# SPDX-License-Identifier: MIT
2
3import argparse
4from pathlib import Path
5
6
7from spdx_python_model import v3_0_1 as spdx_3_0_1
8from .version import VERSION
9
10
11def main():
12 parser = argparse.ArgumentParser(
13 description="Show the packaged files and checksums in an OE image from the SPDX SBoM"
14 )
15 parser.add_argument("file", help="SPDX 3 input file", type=Path)
16 parser.add_argument("--version", "-V", action="version", version=VERSION)
17
18 args = parser.parse_args()
19
20 # Load SPDX data from file into a new object set
21 objset = spdx_3_0_1.SHACLObjectSet()
22 with args.file.open("r") as f:
23 d = spdx_3_0_1.JSONLDDeserializer()
24 d.read(f, objset)
25
26 # Find the top level SPDX Document object
27 for o in objset.foreach_type(spdx_3_0_1.SpdxDocument):
28 doc = o
29 break
30 else:
31 print("ERROR: No SPDX Document found!")
32 return 1
33
34 # Find the root SBoM in the document
35 for o in doc.rootElement:
36 if isinstance(o, spdx_3_0_1.software_Sbom):
37 sbom = o
38 break
39 else:
40 print("ERROR: SBoM not found in document")
41 return 1
42
43 # Find the root file system package in the SBoM
44 for o in sbom.rootElement:
45 if (
46 isinstance(o, spdx_3_0_1.software_Package)
47 and o.software_primaryPurpose == spdx_3_0_1.software_SoftwarePurpose.archive
48 ):
49 root_package = o
50 break
51 else:
52 print("ERROR: Package not found in document")
53 return 1
54
55 # Find all relationships of type "contains" that go FROM the root file
56 # system
57 files = []
58 for rel in objset.foreach_type(spdx_3_0_1.Relationship):
59 if not rel.relationshipType == spdx_3_0_1.RelationshipType.contains:
60 continue
61
62 if not rel.from_ is root_package:
63 continue
64
65 # Iterate over all files in the TO of the relationship
66 for o in rel.to:
67 if not isinstance(o, spdx_3_0_1.software_File):
68 continue
69
70 # Find the SHA 256 hash of the file (if any)
71 for h in o.verifiedUsing:
72 if (
73 isinstance(h, spdx_3_0_1.Hash)
74 and h.algorithm == spdx_3_0_1.HashAlgorithm.sha256
75 ):
76 files.append((o.name, h.hashValue))
77 break
78 else:
79 files.append((o.name, ""))
80
81 # Print files
82 files.sort(key=lambda x: x[0])
83 for name, hash_val in files:
84 print(f"{name} - {hash_val}")
85
86 return 0
diff --git a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py b/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py
deleted file mode 100644
index 901e5110b2..0000000000
--- a/scripts/contrib/oe-image-files-spdx/src/oe_image_files/version.py
+++ /dev/null
@@ -1 +0,0 @@
1VERSION = "0.0.1"
diff --git a/scripts/contrib/patchreview.py b/scripts/contrib/patchreview.py
deleted file mode 100755
index d8d7b214e5..0000000000
--- a/scripts/contrib/patchreview.py
+++ /dev/null
@@ -1,280 +0,0 @@
1#! /usr/bin/env python3
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7
8import argparse
9import collections
10import json
11import os
12import os.path
13import pathlib
14import re
15import subprocess
16
17import sys
18sys.path.append(os.path.join(sys.path[0], '../../meta/lib'))
19import oe.qa
20
21# TODO
22# - option to just list all broken files
23# - test suite
24# - validate signed-off-by
25
26status_values = ("accepted", "pending", "inappropriate", "backport", "submitted", "denied", "inactive-upstream")
27
28class Result:
29 # Whether the patch has an Upstream-Status or not
30 missing_upstream_status = False
31 # If the Upstream-Status tag is malformed in some way (string for bad bit)
32 malformed_upstream_status = None
33 # If the Upstream-Status value is unknown (boolean)
34 unknown_upstream_status = False
35 # The upstream status value (Pending, etc)
36 upstream_status = None
37 # Whether the patch has a Signed-off-by or not
38 missing_sob = False
39 # Whether the Signed-off-by tag is malformed in some way
40 malformed_sob = False
41 # The Signed-off-by tag value
42 sob = None
43 # Whether a patch looks like a CVE but doesn't have a CVE tag
44 missing_cve = False
45
46def blame_patch(patch):
47 """
48 From a patch filename, return a list of "commit summary (author name <author
49 email>)" strings representing the history.
50 """
51 return subprocess.check_output(("git", "log",
52 "--follow", "--find-renames", "--diff-filter=A",
53 "--format=%s (%aN <%aE>)",
54 "--", patch), cwd=os.path.dirname(patch)).decode("utf-8").splitlines()
55
56def patchreview(patches):
57
58 # General pattern: start of line, optional whitespace, tag with optional
59 # hyphen or spaces, maybe a colon, some whitespace, then the value, all case
60 # insensitive.
61 sob_re = re.compile(r"^[\t ]*(Signed[-_ ]off[-_ ]by:?)[\t ]*(.+)", re.IGNORECASE | re.MULTILINE)
62 status_re = re.compile(r"^[\t ]*(Upstream[-_ ]Status:?)[\t ]*([\w-]*)", re.IGNORECASE | re.MULTILINE)
63 cve_tag_re = re.compile(r"^[\t ]*(CVE:)[\t ]*(.*)", re.IGNORECASE | re.MULTILINE)
64 cve_re = re.compile(r"cve-[0-9]{4}-[0-9]{4,6}", re.IGNORECASE)
65
66 results = {}
67
68 for patch in patches:
69
70 result = Result()
71 results[patch] = result
72
73 content = open(patch, encoding='ascii', errors='ignore').read()
74
75 # Find the Signed-off-by tag
76 match = sob_re.search(content)
77 if match:
78 value = match.group(1)
79 if value != "Signed-off-by:":
80 result.malformed_sob = value
81 result.sob = match.group(2)
82 else:
83 result.missing_sob = True
84
85 # Find the Upstream-Status tag
86 match = status_re.search(content)
87 if match:
88 value = oe.qa.check_upstream_status(patch)
89 if value:
90 result.malformed_upstream_status = value
91
92 value = match.group(2).lower()
93 # TODO: check case
94 if value not in status_values:
95 result.unknown_upstream_status = True
96 result.upstream_status = value
97 else:
98 result.missing_upstream_status = True
99
100 # Check that patches which looks like CVEs have CVE tags
101 if cve_re.search(patch) or cve_re.search(content):
102 if not cve_tag_re.search(content):
103 result.missing_cve = True
104 # TODO: extract CVE list
105
106 return results
107
108
109def analyse(results, want_blame=False, verbose=True):
110 """
111 want_blame: display blame data for each malformed patch
112 verbose: display per-file results instead of just summary
113 """
114
115 # want_blame requires verbose, so disable blame if we're not verbose
116 if want_blame and not verbose:
117 want_blame = False
118
119 total_patches = 0
120 missing_sob = 0
121 malformed_sob = 0
122 missing_status = 0
123 malformed_status = 0
124 missing_cve = 0
125 pending_patches = 0
126
127 for patch in sorted(results):
128 r = results[patch]
129 total_patches += 1
130 need_blame = False
131
132 # Build statistics
133 if r.missing_sob:
134 missing_sob += 1
135 if r.malformed_sob:
136 malformed_sob += 1
137 if r.missing_upstream_status:
138 missing_status += 1
139 if r.malformed_upstream_status or r.unknown_upstream_status:
140 malformed_status += 1
141 # Count patches with no status as pending
142 pending_patches +=1
143 if r.missing_cve:
144 missing_cve += 1
145 if r.upstream_status == "pending":
146 pending_patches += 1
147
148 # Output warnings
149 if r.missing_sob:
150 need_blame = True
151 if verbose:
152 print("Missing Signed-off-by tag (%s)" % patch)
153 if r.malformed_sob:
154 need_blame = True
155 if verbose:
156 print("Malformed Signed-off-by '%s' (%s)" % (r.malformed_sob, patch))
157 if r.missing_cve:
158 need_blame = True
159 if verbose:
160 print("Missing CVE tag (%s)" % patch)
161 if r.missing_upstream_status:
162 need_blame = True
163 if verbose:
164 print("Missing Upstream-Status tag (%s)" % patch)
165 if r.malformed_upstream_status:
166 need_blame = True
167 if verbose:
168 print("Malformed Upstream-Status '%s' (%s)" % (r.malformed_upstream_status, patch))
169 if r.unknown_upstream_status:
170 need_blame = True
171 if verbose:
172 print("Unknown Upstream-Status value '%s' (%s)" % (r.upstream_status, patch))
173
174 if want_blame and need_blame:
175 print("\n".join(blame_patch(patch)) + "\n")
176
177 def percent(num):
178 try:
179 return "%d (%d%%)" % (num, round(num * 100.0 / total_patches))
180 except ZeroDivisionError:
181 return "N/A"
182
183 if verbose:
184 print()
185
186 print("""Total patches found: %d
187Patches missing Signed-off-by: %s
188Patches with malformed Signed-off-by: %s
189Patches missing CVE: %s
190Patches missing Upstream-Status: %s
191Patches with malformed Upstream-Status: %s
192Patches in Pending state: %s""" % (total_patches,
193 percent(missing_sob),
194 percent(malformed_sob),
195 percent(missing_cve),
196 percent(missing_status),
197 percent(malformed_status),
198 percent(pending_patches)))
199
200
201
202def histogram(results):
203 from toolz import recipes, dicttoolz
204 import math
205
206 counts = recipes.countby(lambda r: r.upstream_status, results.values())
207 bars = dicttoolz.valmap(lambda v: "#" * int(math.ceil(float(v) / len(results) * 100)), counts)
208 for k in bars:
209 print("%-20s %s (%d)" % (k.capitalize() if k else "No status", bars[k], counts[k]))
210
211def find_layers(candidate):
212 # candidate can either be the path to a layer directly (eg meta-intel), or a
213 # repository that contains other layers (meta-arm). We can determine what by
214 # looking for a conf/layer.conf file. If that file exists then it's a layer,
215 # otherwise its a repository of layers and we can assume they're called
216 # meta-*.
217
218 if (candidate / "conf" / "layer.conf").exists():
219 return [candidate.absolute()]
220 else:
221 return [d.absolute() for d in candidate.iterdir() if d.is_dir() and (d.name == "meta" or d.name.startswith("meta-"))]
222
223# TODO these don't actually handle dynamic-layers/
224
225def gather_patches(layers):
226 patches = []
227 for directory in layers:
228 filenames = subprocess.check_output(("git", "-C", directory, "ls-files", "recipes-*/**/*.patch", "recipes-*/**/*.diff"), universal_newlines=True).split()
229 patches += [os.path.join(directory, f) for f in filenames]
230 return patches
231
232def count_recipes(layers):
233 count = 0
234 for directory in layers:
235 output = subprocess.check_output(["git", "-C", directory, "ls-files", "recipes-*/**/*.bb"], universal_newlines=True)
236 count += len(output.splitlines())
237 return count
238
239if __name__ == "__main__":
240 args = argparse.ArgumentParser(description="Patch Review Tool")
241 args.add_argument("-b", "--blame", action="store_true", help="show blame for malformed patches")
242 args.add_argument("-v", "--verbose", action="store_true", help="show per-patch results")
243 args.add_argument("-g", "--histogram", action="store_true", help="show patch histogram")
244 args.add_argument("-j", "--json", help="update JSON")
245 args.add_argument("directory", type=pathlib.Path, metavar="DIRECTORY", help="directory to scan (layer, or repository of layers)")
246 args = args.parse_args()
247
248 layers = find_layers(args.directory)
249 print(f"Found layers {' '.join((d.name for d in layers))}")
250 patches = gather_patches(layers)
251 results = patchreview(patches)
252 analyse(results, want_blame=args.blame, verbose=args.verbose)
253
254 if args.json:
255 if os.path.isfile(args.json):
256 data = json.load(open(args.json))
257 else:
258 data = []
259
260 row = collections.Counter()
261 row["total"] = len(results)
262 row["date"] = subprocess.check_output(["git", "-C", args.directory, "show", "-s", "--pretty=format:%cd", "--date=format:%s"], universal_newlines=True).strip()
263 row["commit"] = subprocess.check_output(["git", "-C", args.directory, "rev-parse", "HEAD"], universal_newlines=True).strip()
264 row['commit_count'] = subprocess.check_output(["git", "-C", args.directory, "rev-list", "--count", "HEAD"], universal_newlines=True).strip()
265 row['recipe_count'] = count_recipes(layers)
266
267 for r in results.values():
268 if r.upstream_status in status_values:
269 row[r.upstream_status] += 1
270 if r.malformed_upstream_status or r.missing_upstream_status:
271 row['malformed-upstream-status'] += 1
272 if r.malformed_sob or r.missing_sob:
273 row['malformed-sob'] += 1
274
275 data.append(row)
276 json.dump(data, open(args.json, "w"), sort_keys=True, indent="\t")
277
278 if args.histogram:
279 print()
280 histogram(results)
diff --git a/scripts/contrib/patchtest.sh b/scripts/contrib/patchtest.sh
deleted file mode 100755
index b1e1ea334b..0000000000
--- a/scripts/contrib/patchtest.sh
+++ /dev/null
@@ -1,104 +0,0 @@
1#!/bin/bash
2#
3# patchtest: Run patchtest on commits starting at master
4#
5# Copyright (c) 2017, Intel Corporation.
6#
7# SPDX-License-Identifier: GPL-2.0-or-later
8#
9
10set -o errexit
11
12# Default values
13pokydir=''
14
15usage() {
16CMD=$(basename $0)
17cat <<EOM
18Usage: $CMD [-h] [-p pokydir]
19 -p pokydir Defaults to current directory
20EOM
21>&2
22 exit 1
23}
24
25function clone() {
26 local REPOREMOTE=$1
27 local REPODIR=$2
28 if [ ! -d $REPODIR ]; then
29 git clone $REPOREMOTE $REPODIR --quiet
30 else
31 ( cd $REPODIR; git pull --quiet )
32 fi
33}
34
35while getopts ":p:h" opt; do
36 case $opt in
37 p)
38 pokydir=$OPTARG
39 ;;
40 h)
41 usage
42 ;;
43 \?)
44 echo "Invalid option: -$OPTARG" >&2
45 usage
46 ;;
47 :)
48 echo "Option -$OPTARG requires an argument." >&2
49 usage
50 ;;
51 esac
52done
53shift $((OPTIND-1))
54
55CDIR="$PWD"
56
57# default pokydir to current directory if user did not specify one
58if [ -z "$pokydir" ]; then
59 pokydir="$CDIR"
60fi
61
62PTENV="$PWD/patchtest"
63PT="$PTENV/patchtest"
64PTOE="$PTENV/patchtest-oe"
65
66if ! which virtualenv > /dev/null; then
67 echo "Install virtualenv before proceeding"
68 exit 1;
69fi
70
71# activate the virtual env
72virtualenv $PTENV --quiet
73source $PTENV/bin/activate
74
75cd $PTENV
76
77# clone or pull
78clone git://git.yoctoproject.org/patchtest $PT
79clone git://git.yoctoproject.org/patchtest-oe $PTOE
80
81# install requirements
82pip install -r $PT/requirements.txt --quiet
83pip install -r $PTOE/requirements.txt --quiet
84
85PATH="$PT:$PT/scripts:$PATH"
86
87# loop through parent to HEAD and execute patchtest on each commit
88for commit in $(git rev-list master..HEAD --reverse)
89do
90 shortlog="$(git log "$commit^1..$commit" --pretty='%h: %aN: %cd: %s')"
91 log="$(git format-patch "$commit^1..$commit" --stdout | patchtest - -r $pokydir -s $PTOE/tests --base-commit $commit^1 --json 2>/dev/null | create-summary --fail --only-results)"
92 if [ -z "$log" ]; then
93 shortlog="$shortlog: OK"
94 else
95 shortlog="$shortlog: FAIL"
96 fi
97 echo "$shortlog"
98 echo "$log" | sed -n -e '/Issue/p' -e '/Suggested fix/p'
99 echo ""
100done
101
102deactivate
103
104cd $CDIR
diff --git a/scripts/contrib/serdevtry b/scripts/contrib/serdevtry
deleted file mode 100755
index 9144730e7e..0000000000
--- a/scripts/contrib/serdevtry
+++ /dev/null
@@ -1,61 +0,0 @@
1#!/bin/sh
2
3# Copyright (C) 2014 Intel Corporation
4#
5# SPDX-License-Identifier: MIT
6#
7
8if [ "$1" = "" -o "$1" = "--help" ] ; then
9 echo "Usage: $0 <serial terminal command>"
10 echo
11 echo "Simple script to handle maintaining a terminal for serial devices that"
12 echo "disappear when a device is powered down or reset, such as the USB"
13 echo "serial console on the original BeagleBone (white version)."
14 echo
15 echo "e.g. $0 picocom -b 115200 /dev/ttyUSB0"
16 echo
17 exit
18fi
19
20args="$@"
21DEVICE=""
22while [ "$1" != "" ]; do
23 case "$1" in
24 /dev/*)
25 DEVICE=$1
26 break;;
27 esac
28 shift
29done
30
31if [ "$DEVICE" != "" ] ; then
32 while true; do
33 if [ ! -e $DEVICE ] ; then
34 echo "serdevtry: waiting for $DEVICE to exist..."
35 while [ ! -e $DEVICE ]; do
36 sleep 0.1
37 done
38 fi
39 if [ ! -w $DEVICE ] ; then
40 # Sometimes (presumably because of a race with udev) we get to
41 # the device before its permissions have been set up
42 RETRYNUM=0
43 while [ ! -w $DEVICE ]; do
44 if [ "$RETRYNUM" = "2" ] ; then
45 echo "Device $DEVICE exists but is not writable!"
46 exit 1
47 fi
48 RETRYNUM=$((RETRYNUM+1))
49 sleep 0.1
50 done
51 fi
52 $args
53 if [ -e $DEVICE ] ; then
54 break
55 fi
56 done
57else
58 echo "Unable to determine device node from command: $args"
59 exit 1
60fi
61
diff --git a/scripts/contrib/test_build_time.sh b/scripts/contrib/test_build_time.sh
deleted file mode 100755
index 4012ac7ba7..0000000000
--- a/scripts/contrib/test_build_time.sh
+++ /dev/null
@@ -1,223 +0,0 @@
1#!/bin/bash
2
3# Build performance regression test script
4#
5# Copyright 2011 Intel Corporation
6#
7# SPDX-License-Identifier: GPL-2.0-or-later
8#
9# DESCRIPTION
10# This script is intended to be used in conjunction with "git bisect run"
11# in order to find regressions in build time, however it can also be used
12# independently. It cleans out the build output directories, runs a
13# specified worker script (an example is test_build_time_worker.sh) under
14# TIME(1), logs the results to TEST_LOGDIR (default /tmp) and returns a
15# value telling "git bisect run" whether the build time is good (under
16# the specified threshold) or bad (over it). There is also a tolerance
17# option but it is not particularly useful as it only subtracts the
18# tolerance from the given threshold and uses it as the actual threshold.
19#
20# It is also capable of taking a file listing git revision hashes to be
21# test-applied to the repository in order to get past build failures that
22# would otherwise cause certain revisions to have to be skipped; if a
23# revision does not apply cleanly then the script assumes it does not
24# need to be applied and ignores it.
25#
26# Please see the help output (syntax below) for some important setup
27# instructions.
28#
29# AUTHORS
30# Paul Eggleton <paul.eggleton@linux.intel.com>
31
32
33syntax() {
34 echo "syntax: $0 <script> <time> <tolerance> [patchrevlist]"
35 echo ""
36 echo " script - worker script file (if in current dir, prefix with ./)"
37 echo " time - time threshold (in seconds, suffix m for minutes)"
38 echo " tolerance - tolerance (in seconds, suffix m for minutes or % for"
39 echo " percentage, can be 0)"
40 echo " patchrevlist - optional file listing revisions to apply as patches on top"
41 echo ""
42 echo "You must set TEST_BUILDDIR to point to a previously created build directory,"
43 echo "however please note that this script will wipe out the TMPDIR defined in"
44 echo "TEST_BUILDDIR/conf/local.conf as part of its initial setup (as well as your"
45 echo "~/.ccache)"
46 echo ""
47 echo "To get rid of the sudo prompt, please add the following line to /etc/sudoers"
48 echo "(use 'visudo' to edit this; also it is assumed that the user you are running"
49 echo "as is a member of the 'wheel' group):"
50 echo ""
51 echo "%wheel ALL=(ALL) NOPASSWD: /sbin/sysctl -w vm.drop_caches=[1-3]"
52 echo ""
53 echo "Note: it is recommended that you disable crond and any other process that"
54 echo "may cause significant CPU or I/O usage during build performance tests."
55}
56
57# Note - we exit with 250 here because that will tell git bisect run that
58# something bad happened and stop
59if [ "$1" = "" ] ; then
60 syntax
61 exit 250
62fi
63
64if [ "$2" = "" ] ; then
65 syntax
66 exit 250
67fi
68
69if [ "$3" = "" ] ; then
70 syntax
71 exit 250
72fi
73
74if ! [[ "$2" =~ ^[0-9][0-9m.]*$ ]] ; then
75 echo "'$2' is not a valid number for threshold"
76 exit 250
77fi
78
79if ! [[ "$3" =~ ^[0-9][0-9m.%]*$ ]] ; then
80 echo "'$3' is not a valid number for tolerance"
81 exit 250
82fi
83
84if [ "$TEST_BUILDDIR" = "" ] ; then
85 echo "Please set TEST_BUILDDIR to a previously created build directory"
86 exit 250
87fi
88
89if [ ! -d "$TEST_BUILDDIR" ] ; then
90 echo "TEST_BUILDDIR $TEST_BUILDDIR not found"
91 exit 250
92fi
93
94git diff --quiet
95if [ $? != 0 ] ; then
96 echo "Working tree is dirty, cannot proceed"
97 exit 251
98fi
99
100if [ "BB_ENV_PASSTHROUGH_ADDITIONS" != "" ] ; then
101 echo "WARNING: you are running after sourcing the build environment script, this is not recommended"
102fi
103
104runscript=$1
105timethreshold=$2
106tolerance=$3
107
108if [ "$4" != "" ] ; then
109 patchrevlist=`cat $4`
110else
111 patchrevlist=""
112fi
113
114if [[ timethreshold == *m* ]] ; then
115 timethreshold=`echo $timethreshold | sed s/m/*60/ | bc`
116fi
117
118if [[ $tolerance == *m* ]] ; then
119 tolerance=`echo $tolerance | sed s/m/*60/ | bc`
120elif [[ $tolerance == *%* ]] ; then
121 tolerance=`echo $tolerance | sed s/%//`
122 tolerance=`echo "scale = 2; (($tolerance * $timethreshold) / 100)" | bc`
123fi
124
125tmpdir=`grep "^TMPDIR" $TEST_BUILDDIR/conf/local.conf | sed -e 's/TMPDIR[ \t]*=[ \t\?]*"//' -e 's/"//'`
126if [ "x$tmpdir" = "x" ]; then
127 echo "Unable to determine TMPDIR from $TEST_BUILDDIR/conf/local.conf, bailing out"
128 exit 250
129fi
130sstatedir=`grep "^SSTATE_DIR" $TEST_BUILDDIR/conf/local.conf | sed -e 's/SSTATE_DIR[ \t\?]*=[ \t]*"//' -e 's/"//'`
131if [ "x$sstatedir" = "x" ]; then
132 echo "Unable to determine SSTATE_DIR from $TEST_BUILDDIR/conf/local.conf, bailing out"
133 exit 250
134fi
135
136if [ `expr length $tmpdir` -lt 4 ] ; then
137 echo "TMPDIR $tmpdir is less than 4 characters, bailing out"
138 exit 250
139fi
140
141if [ `expr length $sstatedir` -lt 4 ] ; then
142 echo "SSTATE_DIR $sstatedir is less than 4 characters, bailing out"
143 exit 250
144fi
145
146echo -n "About to wipe out TMPDIR $tmpdir, press Ctrl+C to break out... "
147for i in 9 8 7 6 5 4 3 2 1
148do
149 echo -ne "\x08$i"
150 sleep 1
151done
152echo
153
154pushd . > /dev/null
155
156rm -f pseudodone
157echo "Removing TMPDIR $tmpdir..."
158rm -rf $tmpdir
159echo "Removing TMPDIR $tmpdir-*libc..."
160rm -rf $tmpdir-*libc
161echo "Removing SSTATE_DIR $sstatedir..."
162rm -rf $sstatedir
163echo "Removing ~/.ccache..."
164rm -rf ~/.ccache
165
166echo "Syncing..."
167sync
168sync
169echo "Dropping VM cache..."
170#echo 3 > /proc/sys/vm/drop_caches
171sudo /sbin/sysctl -w vm.drop_caches=3 > /dev/null
172
173if [ "$TEST_LOGDIR" = "" ] ; then
174 logdir="/tmp"
175else
176 logdir="$TEST_LOGDIR"
177fi
178rev=`git rev-parse HEAD`
179logfile="$logdir/timelog_$rev.log"
180echo -n > $logfile
181
182gitroot=`git rev-parse --show-toplevel`
183cd $gitroot
184for patchrev in $patchrevlist ; do
185 echo "Applying $patchrev"
186 patchfile=`mktemp`
187 git show $patchrev > $patchfile
188 git apply --check $patchfile &> /dev/null
189 if [ $? != 0 ] ; then
190 echo " ... patch does not apply without errors, ignoring"
191 else
192 echo "Applied $patchrev" >> $logfile
193 git apply $patchfile &> /dev/null
194 fi
195 rm $patchfile
196done
197
198sync
199echo "Quiescing for 5s..."
200sleep 5
201
202echo "Running $runscript at $rev..."
203timeoutfile=`mktemp`
204/usr/bin/time -o $timeoutfile -f "%e\nreal\t%E\nuser\t%Us\nsys\t%Ss\nmaxm\t%Mk" $runscript 2>&1 | tee -a $logfile
205exitstatus=$PIPESTATUS
206
207git reset --hard HEAD > /dev/null
208popd > /dev/null
209
210timeresult=`head -n1 $timeoutfile`
211cat $timeoutfile | tee -a $logfile
212rm $timeoutfile
213
214if [ $exitstatus != 0 ] ; then
215 # Build failed, exit with 125 to tell git bisect run to skip this rev
216 echo "*** Build failed (exit code $exitstatus), skipping..." | tee -a $logfile
217 exit 125
218fi
219
220ret=`echo "scale = 2; $timeresult > $timethreshold - $tolerance" | bc`
221echo "Returning $ret" | tee -a $logfile
222exit $ret
223
diff --git a/scripts/contrib/test_build_time_worker.sh b/scripts/contrib/test_build_time_worker.sh
deleted file mode 100755
index a2879d2336..0000000000
--- a/scripts/contrib/test_build_time_worker.sh
+++ /dev/null
@@ -1,41 +0,0 @@
1#!/bin/bash
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7# This is an example script to be used in conjunction with test_build_time.sh
8
9if [ "$TEST_BUILDDIR" = "" ] ; then
10 echo "TEST_BUILDDIR is not set"
11 exit 1
12fi
13
14buildsubdir=`basename $TEST_BUILDDIR`
15if [ ! -d $buildsubdir ] ; then
16 echo "Unable to find build subdir $buildsubdir in current directory"
17 exit 1
18fi
19
20if [ -f oe-init-build-env ] ; then
21 . ./oe-init-build-env $buildsubdir
22elif [ -f poky-init-build-env ] ; then
23 . ./poky-init-build-env $buildsubdir
24else
25 echo "Unable to find build environment setup script"
26 exit 1
27fi
28
29if [ -f ../meta/recipes-sato/images/core-image-sato.bb ] ; then
30 target="core-image-sato"
31else
32 target="poky-image-sato"
33fi
34
35echo "Build started at `date "+%Y-%m-%d %H:%M:%S"`"
36echo "bitbake $target"
37bitbake $target
38ret=$?
39echo "Build finished at `date "+%Y-%m-%d %H:%M:%S"`"
40exit $ret
41
diff --git a/scripts/contrib/uncovered b/scripts/contrib/uncovered
deleted file mode 100755
index f16128cb7a..0000000000
--- a/scripts/contrib/uncovered
+++ /dev/null
@@ -1,26 +0,0 @@
1#!/bin/bash -eur
2#
3# Find python modules uncovered by oe-seltest
4#
5# Copyright (c) 2016, Intel Corporation
6#
7# SPDX-License-Identifier: GPL-2.0-only
8#
9# Author: Ed Bartosh <ed.bartosh@linux.intel.com>
10#
11
12if [ ! "$#" -eq 1 -o -t 0 ] ; then
13 echo 'Usage: coverage report | ./scripts/contrib/uncovered <dir>' 1>&2
14 exit 1
15fi
16
17path=$(readlink -ev $1)
18
19if [ ! -d "$path" ] ; then
20 echo "directory $1 doesn't exist" 1>&2
21 exit 1
22fi
23
24diff -u <(grep "$path" | grep -v '0%$' | cut -f1 -d: | sort) \
25 <(find $path | xargs file | grep 'Python script' | cut -f1 -d:| sort) | \
26 grep "^+$path" | cut -c2-
diff --git a/scripts/contrib/verify-homepage.py b/scripts/contrib/verify-homepage.py
deleted file mode 100755
index a90b5010bc..0000000000
--- a/scripts/contrib/verify-homepage.py
+++ /dev/null
@@ -1,66 +0,0 @@
1#!/usr/bin/env python3
2#
3# Copyright OpenEmbedded Contributors
4#
5# SPDX-License-Identifier: GPL-2.0-only
6#
7# This script can be used to verify HOMEPAGE values for all recipes in
8# the current configuration.
9# The result is influenced by network environment, since the timeout of connect url is 5 seconds as default.
10
11import sys
12import os
13import subprocess
14import urllib.request
15
16
17# Allow importing scripts/lib modules
18scripts_path = os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + '/..')
19lib_path = scripts_path + '/lib'
20sys.path = sys.path + [lib_path]
21import scriptpath
22import scriptutils
23
24# Allow importing bitbake modules
25bitbakepath = scriptpath.add_bitbake_lib_path()
26
27import bb.tinfoil
28
29logger = scriptutils.logger_create('verify_homepage')
30
31def wgetHomepage(pn, homepage):
32 result = subprocess.call('wget ' + '-q -T 5 -t 1 --spider ' + homepage, shell = True)
33 if result:
34 logger.warning("%s: failed to verify HOMEPAGE: %s " % (pn, homepage))
35 return 1
36 else:
37 return 0
38
39def verifyHomepage(bbhandler):
40 pkg_pn = bbhandler.cooker.recipecaches[''].pkg_pn
41 pnlist = sorted(pkg_pn)
42 count = 0
43 checked = []
44 for pn in pnlist:
45 for fn in pkg_pn[pn]:
46 # There's no point checking multiple BBCLASSEXTENDed variants of the same recipe
47 realfn, _, _ = bb.cache.virtualfn2realfn(fn)
48 if realfn in checked:
49 continue
50 data = bbhandler.parse_recipe_file(realfn)
51 homepage = data.getVar("HOMEPAGE")
52 if homepage:
53 try:
54 urllib.request.urlopen(homepage, timeout=5)
55 except Exception:
56 count = count + wgetHomepage(os.path.basename(realfn), homepage)
57 checked.append(realfn)
58 return count
59
60if __name__=='__main__':
61 with bb.tinfoil.Tinfoil() as bbhandler:
62 bbhandler.prepare()
63 logger.info("Start verifying HOMEPAGE:")
64 failcount = verifyHomepage(bbhandler)
65 logger.info("Finished verifying HOMEPAGE.")
66 logger.info("Summary: %s failed" % failcount)