diff options
Diffstat (limited to 'scripts/contrib')
| -rwxr-xr-x | scripts/contrib/bb-perf/bb-matrix-plot.sh | 137 | ||||
| -rwxr-xr-x | scripts/contrib/bb-perf/bb-matrix.sh | 79 | ||||
| -rwxr-xr-x | scripts/contrib/bbvars.py | 186 | ||||
| -rwxr-xr-x | scripts/contrib/build-perf-test.sh | 369 | ||||
| -rwxr-xr-x | scripts/contrib/ddimage | 89 | ||||
| -rwxr-xr-x | scripts/contrib/documentation-audit.sh | 94 | ||||
| -rwxr-xr-x | scripts/contrib/graph-tool | 92 | ||||
| -rwxr-xr-x | scripts/contrib/list-packageconfig-flags.py | 209 | ||||
| -rwxr-xr-x | scripts/contrib/mkefidisk.sh | 286 | ||||
| -rwxr-xr-x | scripts/contrib/python/generate-manifest-2.7.py | 388 | ||||
| -rwxr-xr-x | scripts/contrib/python/generate-manifest-3.3.py | 380 | ||||
| -rwxr-xr-x | scripts/contrib/test_build_time.sh | 237 | ||||
| -rwxr-xr-x | scripts/contrib/test_build_time_worker.sh | 37 |
13 files changed, 2583 insertions, 0 deletions
diff --git a/scripts/contrib/bb-perf/bb-matrix-plot.sh b/scripts/contrib/bb-perf/bb-matrix-plot.sh new file mode 100755 index 0000000000..136a25570d --- /dev/null +++ b/scripts/contrib/bb-perf/bb-matrix-plot.sh | |||
| @@ -0,0 +1,137 @@ | |||
| 1 | #!/bin/bash | ||
| 2 | # | ||
| 3 | # Copyright (c) 2011, Intel Corporation. | ||
| 4 | # All rights reserved. | ||
| 5 | # | ||
| 6 | # This program is free software; you can redistribute it and/or modify | ||
| 7 | # it under the terms of the GNU General Public License as published by | ||
| 8 | # the Free Software Foundation; either version 2 of the License, or | ||
| 9 | # (at your option) any later version. | ||
| 10 | # | ||
| 11 | # This program is distributed in the hope that it will be useful, | ||
| 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 14 | # GNU General Public License for more details. | ||
| 15 | # | ||
| 16 | # You should have received a copy of the GNU General Public License | ||
| 17 | # along with this program; if not, write to the Free Software | ||
| 18 | # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. | ||
| 19 | # | ||
| 20 | # DESCRIPTION | ||
| 21 | # This script operates on the .dat file generated by bb-matrix.sh. It tolerates | ||
| 22 | # the header by skipping the first line, but error messages and bad data records | ||
| 23 | # need to be removed first. It will generate three views of the plot, and leave | ||
| 24 | # an interactive view open for further analysis. | ||
| 25 | # | ||
| 26 | # AUTHORS | ||
| 27 | # Darren Hart <dvhart@linux.intel.com> | ||
| 28 | # | ||
| 29 | |||
| 30 | # Setup the defaults | ||
| 31 | DATFILE="bb-matrix.dat" | ||
| 32 | XLABEL="BB_NUMBER_THREADS" | ||
| 33 | YLABEL="PARALLEL_MAKE" | ||
| 34 | FIELD=3 | ||
| 35 | DEF_TITLE="Elapsed Time (seconds)" | ||
| 36 | PM3D_FRAGMENT="unset surface; set pm3d at s hidden3d 100" | ||
| 37 | SIZE="640,480" | ||
| 38 | |||
| 39 | function usage { | ||
| 40 | CMD=$(basename $0) | ||
| 41 | cat <<EOM | ||
| 42 | Usage: $CMD [-d datfile] [-f field] [-h] [-t title] [-w] | ||
| 43 | -d datfile The data file generated by bb-matrix.sh (default: $DATFILE) | ||
| 44 | -f field The field index to plot as the Z axis from the data file | ||
| 45 | (default: $FIELD, "$DEF_TITLE") | ||
| 46 | -h Display this help message | ||
| 47 | -s W,H PNG and window size in pixels (default: $SIZE) | ||
| 48 | -t title The title to display, should describe the field (-f) and units | ||
| 49 | (default: "$DEF_TITLE") | ||
| 50 | -w Render the plot as wireframe with a 2D colormap projected on the | ||
| 51 | XY plane rather than as the texture for the surface | ||
| 52 | EOM | ||
| 53 | } | ||
| 54 | |||
| 55 | # Parse and validate arguments | ||
| 56 | while getopts "d:f:hs:t:w" OPT; do | ||
| 57 | case $OPT in | ||
| 58 | d) | ||
| 59 | DATFILE="$OPTARG" | ||
| 60 | ;; | ||
| 61 | f) | ||
| 62 | FIELD="$OPTARG" | ||
| 63 | ;; | ||
| 64 | h) | ||
| 65 | usage | ||
| 66 | exit 0 | ||
| 67 | ;; | ||
| 68 | s) | ||
| 69 | SIZE="$OPTARG" | ||
| 70 | ;; | ||
| 71 | t) | ||
| 72 | TITLE="$OPTARG" | ||
| 73 | ;; | ||
| 74 | w) | ||
| 75 | PM3D_FRAGMENT="set pm3d at b" | ||
| 76 | W="-w" | ||
| 77 | ;; | ||
| 78 | *) | ||
| 79 | usage | ||
| 80 | exit 1 | ||
| 81 | ;; | ||
| 82 | esac | ||
| 83 | done | ||
| 84 | |||
| 85 | # Ensure the data file exists | ||
| 86 | if [ ! -f "$DATFILE" ]; then | ||
| 87 | echo "ERROR: $DATFILE does not exist" | ||
| 88 | usage | ||
| 89 | exit 1 | ||
| 90 | fi | ||
| 91 | PLOT_BASENAME=${DATFILE%.*}-f$FIELD$W | ||
| 92 | |||
| 93 | # Set a sane title | ||
| 94 | # TODO: parse the header and define titles for each format parameter for TIME(1) | ||
| 95 | if [ -z "$TITLE" ]; then | ||
| 96 | if [ ! "$FIELD" == "3" ]; then | ||
| 97 | TITLE="Field $FIELD" | ||
| 98 | else | ||
| 99 | TITLE="$DEF_TITLE" | ||
| 100 | fi | ||
| 101 | fi | ||
| 102 | |||
| 103 | # Determine the dgrid3d mesh dimensions size | ||
| 104 | MIN=$(tail -n +2 "$DATFILE" | cut -d ' ' -f 1 | sed 's/^0*//' | sort -n | uniq | head -n1) | ||
| 105 | MAX=$(tail -n +2 "$DATFILE" | cut -d ' ' -f 1 | sed 's/^0*//' | sort -n | uniq | tail -n1) | ||
| 106 | BB_CNT=$[${MAX} - $MIN + 1] | ||
| 107 | MIN=$(tail -n +2 "$DATFILE" | cut -d ' ' -f 2 | sed 's/^0*//' | sort -n | uniq | head -n1) | ||
| 108 | MAX=$(tail -n +2 "$DATFILE" | cut -d ' ' -f 2 | sed 's/^0*//' | sort -n | uniq | tail -n1) | ||
| 109 | PM_CNT=$[${MAX} - $MIN + 1] | ||
| 110 | |||
| 111 | |||
| 112 | (cat <<EOF | ||
| 113 | set title "$TITLE" | ||
| 114 | set xlabel "$XLABEL" | ||
| 115 | set ylabel "$YLABEL" | ||
| 116 | set style line 100 lt 5 lw 1.5 | ||
| 117 | $PM3D_FRAGMENT | ||
| 118 | set dgrid3d $PM_CNT,$BB_CNT splines | ||
| 119 | set ticslevel 0.2 | ||
| 120 | |||
| 121 | set term png size $SIZE | ||
| 122 | set output "$PLOT_BASENAME.png" | ||
| 123 | splot "$DATFILE" every ::1 using 1:2:$FIELD with lines ls 100 | ||
| 124 | |||
| 125 | set view 90,0 | ||
| 126 | set output "$PLOT_BASENAME-bb.png" | ||
| 127 | replot | ||
| 128 | |||
| 129 | set view 90,90 | ||
| 130 | set output "$PLOT_BASENAME-pm.png" | ||
| 131 | replot | ||
| 132 | |||
| 133 | set view 60,30 | ||
| 134 | set term wxt size $SIZE | ||
| 135 | replot | ||
| 136 | EOF | ||
| 137 | ) | gnuplot --persist | ||
diff --git a/scripts/contrib/bb-perf/bb-matrix.sh b/scripts/contrib/bb-perf/bb-matrix.sh new file mode 100755 index 0000000000..106456584d --- /dev/null +++ b/scripts/contrib/bb-perf/bb-matrix.sh | |||
| @@ -0,0 +1,79 @@ | |||
| 1 | #!/bin/bash | ||
| 2 | # | ||
| 3 | # Copyright (c) 2011, Intel Corporation. | ||
| 4 | # All rights reserved. | ||
| 5 | # | ||
| 6 | # This program is free software; you can redistribute it and/or modify | ||
| 7 | # it under the terms of the GNU General Public License as published by | ||
| 8 | # the Free Software Foundation; either version 2 of the License, or | ||
| 9 | # (at your option) any later version. | ||
| 10 | # | ||
| 11 | # This program is distributed in the hope that it will be useful, | ||
| 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 14 | # GNU General Public License for more details. | ||
| 15 | # | ||
| 16 | # You should have received a copy of the GNU General Public License | ||
| 17 | # along with this program; if not, write to the Free Software | ||
| 18 | # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. | ||
| 19 | # | ||
| 20 | # DESCRIPTION | ||
| 21 | # This script runs BB_CMD (typically building core-image-sato) for all | ||
| 22 | # combincations of BB_RANGE and PM_RANGE values. It saves off all the console | ||
| 23 | # logs, the buildstats directories, and creates a bb-pm-runtime.dat file which | ||
| 24 | # can be used to postprocess the results with a plotting tool, spreadsheet, etc. | ||
| 25 | # Before running this script, it is recommended that you pre-download all the | ||
| 26 | # necessary sources by performing the BB_CMD once manually. It is also a good | ||
| 27 | # idea to disable cron to avoid runtime variations caused by things like the | ||
| 28 | # locate process. Be sure to sanitize the dat file prior to post-processing as | ||
| 29 | # it may contain error messages or bad runs that should be removed. | ||
| 30 | # | ||
| 31 | # AUTHORS | ||
| 32 | # Darren Hart <dvhart@linux.intel.com> | ||
| 33 | # | ||
| 34 | |||
| 35 | # The following ranges are appropriate for a 4 core system with 8 logical units | ||
| 36 | # Use leading 0s to ensure all digits are the same string length, this results | ||
| 37 | # in nice log file names and columnar dat files. | ||
| 38 | BB_RANGE="04 05 06 07 08 09 10 11 12 13 14 15 16" | ||
| 39 | PM_RANGE="04 05 06 07 08 09 10 11 12 13 14 15 16" | ||
| 40 | |||
| 41 | DATADIR="bb-matrix-$$" | ||
| 42 | BB_CMD="bitbake core-image-minimal" | ||
| 43 | RUNTIME_LOG="$DATADIR/bb-matrix.dat" | ||
| 44 | |||
| 45 | # See TIME(1) for a description of the time format parameters | ||
| 46 | # The following all report 0: W K r s t w | ||
| 47 | TIME_STR="%e %S %U %P %c %w %R %F %M %x" | ||
| 48 | |||
| 49 | # Prepare the DATADIR | ||
| 50 | mkdir $DATADIR | ||
| 51 | if [ $? -ne 0 ]; then | ||
| 52 | echo "Failed to create $DATADIR." | ||
| 53 | exit 1 | ||
| 54 | fi | ||
| 55 | |||
| 56 | # Add a simple header | ||
| 57 | echo "BB PM $TIME_STR" > $RUNTIME_LOG | ||
| 58 | for BB in $BB_RANGE; do | ||
| 59 | for PM in $PM_RANGE; do | ||
| 60 | RUNDIR="$DATADIR/$BB-$PM-build" | ||
| 61 | mkdir $RUNDIR | ||
| 62 | BB_LOG=$RUNDIR/$BB-$PM-bitbake.log | ||
| 63 | date | ||
| 64 | echo "BB=$BB PM=$PM Logging to $BB_LOG" | ||
| 65 | |||
| 66 | echo -n " Preparing the work directory... " | ||
| 67 | rm -rf pseudodone tmp sstate-cache tmp-eglibc &> /dev/null | ||
| 68 | echo "done" | ||
| 69 | |||
| 70 | # Export the variables under test and run the bitbake command | ||
| 71 | # Strip any leading zeroes before passing to bitbake | ||
| 72 | export BB_NUMBER_THREADS=$(echo $BB | sed 's/^0*//') | ||
| 73 | export PARALLEL_MAKE="-j $(echo $PM | sed 's/^0*//')" | ||
| 74 | /usr/bin/time -f "$BB $PM $TIME_STR" -a -o $RUNTIME_LOG $BB_CMD &> $BB_LOG | ||
| 75 | |||
| 76 | echo " $(tail -n1 $RUNTIME_LOG)" | ||
| 77 | cp -a tmp/buildstats $RUNDIR/$BB-$PM-buildstats | ||
| 78 | done | ||
| 79 | done | ||
diff --git a/scripts/contrib/bbvars.py b/scripts/contrib/bbvars.py new file mode 100755 index 0000000000..0896d64445 --- /dev/null +++ b/scripts/contrib/bbvars.py | |||
| @@ -0,0 +1,186 @@ | |||
| 1 | #!/usr/bin/env python | ||
| 2 | |||
| 3 | # This program is free software; you can redistribute it and/or modify | ||
| 4 | # it under the terms of the GNU General Public License as published by | ||
| 5 | # the Free Software Foundation; either version 2 of the License, or | ||
| 6 | # (at your option) any later version. | ||
| 7 | # | ||
| 8 | # This program is distributed in the hope that it will be useful, | ||
| 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 11 | # GNU General Public License for more details. | ||
| 12 | # | ||
| 13 | # You should have received a copy of the GNU General Public License | ||
| 14 | # along with this program; if not, write to the Free Software | ||
| 15 | # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. | ||
| 16 | # | ||
| 17 | # Copyright (C) Darren Hart <dvhart@linux.intel.com>, 2010 | ||
| 18 | |||
| 19 | |||
| 20 | import sys | ||
| 21 | import getopt | ||
| 22 | import os | ||
| 23 | import os.path | ||
| 24 | import re | ||
| 25 | |||
| 26 | def usage(): | ||
| 27 | print 'Usage: %s -d FILENAME [-d FILENAME]* -m METADIR [-m MATADIR]*' % os.path.basename(sys.argv[0]) | ||
| 28 | print ' -d FILENAME documentation file to search' | ||
| 29 | print ' -h, --help display this help and exit' | ||
| 30 | print ' -m METADIR meta directory to search for recipes' | ||
| 31 | print ' -t FILENAME documentation config file (for doc tags)' | ||
| 32 | print ' -T Only display variables with doc tags (requires -t)' | ||
| 33 | |||
| 34 | def recipe_bbvars(recipe): | ||
| 35 | ''' Return a unique set of every bbvar encountered in the recipe ''' | ||
| 36 | prog = re.compile("[A-Z_]+") | ||
| 37 | vset = set() | ||
| 38 | try: | ||
| 39 | r = open(recipe) | ||
| 40 | except IOError as (errno, strerror): | ||
| 41 | print 'WARNING: Failed to open recipe ', recipe | ||
| 42 | print strerror | ||
| 43 | |||
| 44 | for line in r: | ||
| 45 | # Strip any comments from the line | ||
| 46 | line = line.rsplit('#')[0] | ||
| 47 | vset = vset.union(set(prog.findall(line))) | ||
| 48 | r.close() | ||
| 49 | |||
| 50 | bbvars = {} | ||
| 51 | for v in vset: | ||
| 52 | bbvars[v] = 1 | ||
| 53 | |||
| 54 | return bbvars | ||
| 55 | |||
| 56 | def collect_bbvars(metadir): | ||
| 57 | ''' Walk the metadir and collect the bbvars from each recipe found ''' | ||
| 58 | bbvars = {} | ||
| 59 | for root,dirs,files in os.walk(metadir): | ||
| 60 | for name in files: | ||
| 61 | if name.find(".bb") >= 0: | ||
| 62 | for key in recipe_bbvars(os.path.join(root,name)).iterkeys(): | ||
| 63 | if bbvars.has_key(key): | ||
| 64 | bbvars[key] = bbvars[key] + 1 | ||
| 65 | else: | ||
| 66 | bbvars[key] = 1 | ||
| 67 | return bbvars | ||
| 68 | |||
| 69 | def bbvar_is_documented(var, docfiles): | ||
| 70 | prog = re.compile(".*($|[^A-Z_])%s([^A-Z_]|$)" % (var)) | ||
| 71 | for doc in docfiles: | ||
| 72 | try: | ||
| 73 | f = open(doc) | ||
| 74 | except IOError as (errno, strerror): | ||
| 75 | print 'WARNING: Failed to open doc ', doc | ||
| 76 | print strerror | ||
| 77 | for line in f: | ||
| 78 | if prog.match(line): | ||
| 79 | return True | ||
| 80 | f.close() | ||
| 81 | return False | ||
| 82 | |||
| 83 | def bbvar_doctag(var, docconf): | ||
| 84 | prog = re.compile('^%s\[doc\] *= *"(.*)"' % (var)) | ||
| 85 | if docconf == "": | ||
| 86 | return "?" | ||
| 87 | |||
| 88 | try: | ||
| 89 | f = open(docconf) | ||
| 90 | except IOError as (errno, strerror): | ||
| 91 | return strerror | ||
| 92 | |||
| 93 | for line in f: | ||
| 94 | m = prog.search(line) | ||
| 95 | if m: | ||
| 96 | return m.group(1) | ||
| 97 | |||
| 98 | f.close() | ||
| 99 | return "" | ||
| 100 | |||
| 101 | def main(): | ||
| 102 | docfiles = [] | ||
| 103 | metadirs = [] | ||
| 104 | bbvars = {} | ||
| 105 | undocumented = [] | ||
| 106 | docconf = "" | ||
| 107 | onlydoctags = False | ||
| 108 | |||
| 109 | # Collect and validate input | ||
| 110 | try: | ||
| 111 | opts, args = getopt.getopt(sys.argv[1:], "d:hm:t:T", ["help"]) | ||
| 112 | except getopt.GetoptError, err: | ||
| 113 | print '%s' % str(err) | ||
| 114 | usage() | ||
| 115 | sys.exit(2) | ||
| 116 | |||
| 117 | for o, a in opts: | ||
| 118 | if o in ('-h', '--help'): | ||
| 119 | usage() | ||
| 120 | sys.exit(0) | ||
| 121 | elif o == '-d': | ||
| 122 | if os.path.isfile(a): | ||
| 123 | docfiles.append(a) | ||
| 124 | else: | ||
| 125 | print 'ERROR: documentation file %s is not a regular file' % (a) | ||
| 126 | sys.exit(3) | ||
| 127 | elif o == '-m': | ||
| 128 | if os.path.isdir(a): | ||
| 129 | metadirs.append(a) | ||
| 130 | else: | ||
| 131 | print 'ERROR: meta directory %s is not a directory' % (a) | ||
| 132 | sys.exit(4) | ||
| 133 | elif o == "-t": | ||
| 134 | if os.path.isfile(a): | ||
| 135 | docconf = a | ||
| 136 | elif o == "-T": | ||
| 137 | onlydoctags = True | ||
| 138 | else: | ||
| 139 | assert False, "unhandled option" | ||
| 140 | |||
| 141 | if len(docfiles) == 0: | ||
| 142 | print 'ERROR: no docfile specified' | ||
| 143 | usage() | ||
| 144 | sys.exit(5) | ||
| 145 | |||
| 146 | if len(metadirs) == 0: | ||
| 147 | print 'ERROR: no metadir specified' | ||
| 148 | usage() | ||
| 149 | sys.exit(6) | ||
| 150 | |||
| 151 | if onlydoctags and docconf == "": | ||
| 152 | print 'ERROR: no docconf specified' | ||
| 153 | usage() | ||
| 154 | sys.exit(7) | ||
| 155 | |||
| 156 | # Collect all the variable names from the recipes in the metadirs | ||
| 157 | for m in metadirs: | ||
| 158 | for key,cnt in collect_bbvars(m).iteritems(): | ||
| 159 | if bbvars.has_key(key): | ||
| 160 | bbvars[key] = bbvars[key] + cnt | ||
| 161 | else: | ||
| 162 | bbvars[key] = cnt | ||
| 163 | |||
| 164 | # Check each var for documentation | ||
| 165 | varlen = 0 | ||
| 166 | for v in bbvars.iterkeys(): | ||
| 167 | if len(v) > varlen: | ||
| 168 | varlen = len(v) | ||
| 169 | if not bbvar_is_documented(v, docfiles): | ||
| 170 | undocumented.append(v) | ||
| 171 | undocumented.sort() | ||
| 172 | varlen = varlen + 1 | ||
| 173 | |||
| 174 | # Report all undocumented variables | ||
| 175 | print 'Found %d undocumented bb variables (out of %d):' % (len(undocumented), len(bbvars)) | ||
| 176 | header = '%s%s%s' % (str("VARIABLE").ljust(varlen), str("COUNT").ljust(6), str("DOCTAG").ljust(7)) | ||
| 177 | print header | ||
| 178 | print str("").ljust(len(header), '=') | ||
| 179 | for v in undocumented: | ||
| 180 | doctag = bbvar_doctag(v, docconf) | ||
| 181 | if not onlydoctags or not doctag == "": | ||
| 182 | print '%s%s%s' % (v.ljust(varlen), str(bbvars[v]).ljust(6), doctag) | ||
| 183 | |||
| 184 | |||
| 185 | if __name__ == "__main__": | ||
| 186 | main() | ||
diff --git a/scripts/contrib/build-perf-test.sh b/scripts/contrib/build-perf-test.sh new file mode 100755 index 0000000000..be3b648046 --- /dev/null +++ b/scripts/contrib/build-perf-test.sh | |||
| @@ -0,0 +1,369 @@ | |||
| 1 | #!/bin/bash | ||
| 2 | # | ||
| 3 | # This script runs a series of tests (with and without sstate) and reports build time (and tmp/ size) | ||
| 4 | # | ||
| 5 | # Build performance test script | ||
| 6 | # | ||
| 7 | # Copyright 2013 Intel Corporation | ||
| 8 | # | ||
| 9 | # This program is free software; you can redistribute it and/or modify | ||
| 10 | # it under the terms of the GNU General Public License as published by | ||
| 11 | # the Free Software Foundation; either version 2 of the License, or | ||
| 12 | # (at your option) any later version. | ||
| 13 | # | ||
| 14 | # This program is distributed in the hope that it will be useful, | ||
| 15 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 16 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 17 | # GNU General Public License for more details. | ||
| 18 | # | ||
| 19 | # You should have received a copy of the GNU General Public License | ||
| 20 | # along with this program; if not, write to the Free Software | ||
| 21 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA | ||
| 22 | # | ||
| 23 | # | ||
| 24 | # AUTHORS: | ||
| 25 | # Stefan Stanacar <stefanx.stanacar@intel.com> | ||
| 26 | |||
| 27 | |||
| 28 | ME=$(basename $0) | ||
| 29 | |||
| 30 | # | ||
| 31 | # usage and setup | ||
| 32 | # | ||
| 33 | |||
| 34 | usage () { | ||
| 35 | cat << EOT | ||
| 36 | Usage: $ME [-h] | ||
| 37 | $ME [-c <commit>] [-v] [-m <val>] [-j <val>] [-t <val>] [-i <image-name>] [-d <path>] | ||
| 38 | Options: | ||
| 39 | -h | ||
| 40 | Display this help and exit. | ||
| 41 | -c <commit> | ||
| 42 | git checkout <commit> before anything else | ||
| 43 | -v | ||
| 44 | Show bitbake output, don't redirect it to a log. | ||
| 45 | -m <machine> | ||
| 46 | Value for MACHINE. Default is qemux86. | ||
| 47 | -j <val> | ||
| 48 | Value for PARALLEL_MAKE. Default is 8. | ||
| 49 | -t <val> | ||
| 50 | Value for BB_NUMBER_THREADS. Default is 8. | ||
| 51 | -i <image-name> | ||
| 52 | Instead of timing against core-image-sato, use <image-name> | ||
| 53 | -d <path> | ||
| 54 | Use <path> as DL_DIR | ||
| 55 | -p <githash> | ||
| 56 | Cherry pick githash onto the commit | ||
| 57 | |||
| 58 | Note: current working directory must be inside a poky git clone. | ||
| 59 | |||
| 60 | EOT | ||
| 61 | } | ||
| 62 | |||
| 63 | |||
| 64 | if clonedir=$(git rev-parse --show-toplevel); then | ||
| 65 | cd $clonedir | ||
| 66 | else | ||
| 67 | echo "The current working dir doesn't seem to be a poky git clone. Please cd there before running $ME" | ||
| 68 | exit 1 | ||
| 69 | fi | ||
| 70 | |||
| 71 | IMAGE="core-image-sato" | ||
| 72 | verbose=0 | ||
| 73 | dldir= | ||
| 74 | commit= | ||
| 75 | pmake= | ||
| 76 | cherrypicks= | ||
| 77 | while getopts "hvc:m:j:t:i:d:p:" opt; do | ||
| 78 | case $opt in | ||
| 79 | h) usage | ||
| 80 | exit 0 | ||
| 81 | ;; | ||
| 82 | v) verbose=1 | ||
| 83 | ;; | ||
| 84 | c) commit=$OPTARG | ||
| 85 | ;; | ||
| 86 | m) export MACHINE=$OPTARG | ||
| 87 | ;; | ||
| 88 | j) pmake=$OPTARG | ||
| 89 | ;; | ||
| 90 | t) export BB_NUMBER_THREADS=$OPTARG | ||
| 91 | ;; | ||
| 92 | i) IMAGE=$OPTARG | ||
| 93 | ;; | ||
| 94 | d) dldir=$OPTARG | ||
| 95 | ;; | ||
| 96 | p) cherrypicks="$cherrypicks $OPTARG" | ||
| 97 | ;; | ||
| 98 | *) usage | ||
| 99 | exit 1 | ||
| 100 | ;; | ||
| 101 | esac | ||
| 102 | done | ||
| 103 | |||
| 104 | |||
| 105 | #drop cached credentials and test for sudo access without a password | ||
| 106 | sudo -k -n ls > /dev/null 2>&1 | ||
| 107 | reqpass=$? | ||
| 108 | if [ $reqpass -ne 0 ]; then | ||
| 109 | echo "The script requires sudo access to drop caches between builds (echo 3 > /proc/sys/vm/drop_caches)" | ||
| 110 | read -s -p "Please enter your sudo password: " pass | ||
| 111 | echo | ||
| 112 | fi | ||
| 113 | |||
| 114 | if [ -n "$commit" ]; then | ||
| 115 | echo "git checkout -f $commit" | ||
| 116 | git pull > /dev/null 2>&1 | ||
| 117 | git checkout -f $commit || exit 1 | ||
| 118 | git pull > /dev/null 2>&1 | ||
| 119 | fi | ||
| 120 | |||
| 121 | if [ -n "$cherrypicks" ]; then | ||
| 122 | for c in $cherrypicks; do | ||
| 123 | git cherry-pick $c | ||
| 124 | done | ||
| 125 | fi | ||
| 126 | |||
| 127 | rev=$(git rev-parse --short HEAD) || exit 1 | ||
| 128 | OUTDIR="$clonedir/build-perf-test/results-$rev-`date "+%Y%m%d%H%M%S"`" | ||
| 129 | BUILDDIR="$OUTDIR/build" | ||
| 130 | resultsfile="$OUTDIR/results.log" | ||
| 131 | bboutput="$OUTDIR/bitbake.log" | ||
| 132 | myoutput="$OUTDIR/output.log" | ||
| 133 | globalres="$clonedir/build-perf-test/globalres.log" | ||
| 134 | |||
| 135 | mkdir -p $OUTDIR || exit 1 | ||
| 136 | |||
| 137 | log () { | ||
| 138 | local msg="$1" | ||
| 139 | echo "`date`: $msg" | tee -a $myoutput | ||
| 140 | } | ||
| 141 | |||
| 142 | |||
| 143 | # | ||
| 144 | # Config stuff | ||
| 145 | # | ||
| 146 | |||
| 147 | branch=`git branch 2>&1 | grep "^* " | tr -d "* "` | ||
| 148 | gitcommit=$(git rev-parse HEAD) || exit 1 | ||
| 149 | log "Running on $branch:$gitcommit" | ||
| 150 | |||
| 151 | source ./oe-init-build-env $OUTDIR/build >/dev/null || exit 1 | ||
| 152 | cd $OUTDIR/build | ||
| 153 | |||
| 154 | [ -n "$MACHINE" ] || export MACHINE="qemux86" | ||
| 155 | [ -n "$BB_NUMBER_THREADS" ] || export BB_NUMBER_THREADS="8" | ||
| 156 | |||
| 157 | if [ -n "$pmake" ]; then | ||
| 158 | export PARALLEL_MAKE="-j $pmake" | ||
| 159 | else | ||
| 160 | export PARALLEL_MAKE="-j 8" | ||
| 161 | fi | ||
| 162 | |||
| 163 | if [ -n "$dldir" ]; then | ||
| 164 | echo "DL_DIR = \"$dldir\"" >> conf/local.conf | ||
| 165 | else | ||
| 166 | echo "DL_DIR = \"$clonedir/build-perf-test/downloads\"" >> conf/local.conf | ||
| 167 | fi | ||
| 168 | |||
| 169 | # Sometimes I've noticed big differences in timings for the same commit, on the same machine | ||
| 170 | # Disabling the network sanity check helps a bit (because of my crappy network connection and/or proxy) | ||
| 171 | echo "CONNECTIVITY_CHECK_URIS =\"\"" >> conf/local.conf | ||
| 172 | |||
| 173 | |||
| 174 | # | ||
| 175 | # Functions | ||
| 176 | # | ||
| 177 | |||
| 178 | declare -a TIMES | ||
| 179 | time_count=0 | ||
| 180 | declare -a SIZES | ||
| 181 | size_count=0 | ||
| 182 | |||
| 183 | bbtime () { | ||
| 184 | local arg="$@" | ||
| 185 | log " Timing: bitbake ${arg}" | ||
| 186 | |||
| 187 | if [ $verbose -eq 0 ]; then | ||
| 188 | /usr/bin/time -v -o $resultsfile bitbake ${arg} >> $bboutput | ||
| 189 | else | ||
| 190 | /usr/bin/time -v -o $resultsfile bitbake ${arg} | ||
| 191 | fi | ||
| 192 | ret=$? | ||
| 193 | if [ $ret -eq 0 ]; then | ||
| 194 | t=`grep wall $resultsfile | sed 's/.*m:ss): //'` | ||
| 195 | log " TIME: $t" | ||
| 196 | TIMES[(( time_count++ ))]="$t" | ||
| 197 | else | ||
| 198 | log "ERROR: exit status was non-zero, will report time as 0." | ||
| 199 | TIMES[(( time_count++ ))]="0" | ||
| 200 | fi | ||
| 201 | |||
| 202 | #time by default overwrites the output file and we want to keep the results | ||
| 203 | #it has an append option but I don't want to clobber the results in the same file | ||
| 204 | i=`ls $OUTDIR/results.log* |wc -l` | ||
| 205 | mv $resultsfile "${resultsfile}.${i}" | ||
| 206 | log "More stats can be found in ${resultsfile}.${i}" | ||
| 207 | } | ||
| 208 | |||
| 209 | #we don't time bitbake here | ||
| 210 | bbnotime () { | ||
| 211 | local arg="$@" | ||
| 212 | log " Running: bitbake ${arg}" | ||
| 213 | if [ $verbose -eq 0 ]; then | ||
| 214 | bitbake ${arg} >> $bboutput | ||
| 215 | else | ||
| 216 | bitbake ${arg} | ||
| 217 | fi | ||
| 218 | ret=$? | ||
| 219 | if [ $ret -eq 0 ]; then | ||
| 220 | log " Finished bitbake ${arg}" | ||
| 221 | else | ||
| 222 | log "ERROR: exit status was non-zero. Exit.." | ||
| 223 | exit $ret | ||
| 224 | fi | ||
| 225 | |||
| 226 | } | ||
| 227 | |||
| 228 | do_rmtmp() { | ||
| 229 | log " Removing tmp" | ||
| 230 | rm -rf bitbake.lock pseudodone conf/sanity_info cache tmp | ||
| 231 | } | ||
| 232 | do_rmsstate () { | ||
| 233 | log " Removing sstate-cache" | ||
| 234 | rm -rf sstate-cache | ||
| 235 | } | ||
| 236 | do_sync () { | ||
| 237 | log " Syncing and dropping caches" | ||
| 238 | sync; sync | ||
| 239 | if [ $reqpass -eq 0 ]; then | ||
| 240 | sudo sh -c "echo 3 > /proc/sys/vm/drop_caches" | ||
| 241 | else | ||
| 242 | echo "$pass" | sudo -S sh -c "echo 3 > /proc/sys/vm/drop_caches" | ||
| 243 | echo | ||
| 244 | fi | ||
| 245 | sleep 3 | ||
| 246 | } | ||
| 247 | |||
| 248 | write_results() { | ||
| 249 | echo -n "`uname -n`,$branch:$gitcommit,`git describe`," >> $globalres | ||
| 250 | for i in "${TIMES[@]}"; do | ||
| 251 | echo -n "$i," >> $globalres | ||
| 252 | done | ||
| 253 | for i in "${SIZES[@]}"; do | ||
| 254 | echo -n "$i," >> $globalres | ||
| 255 | done | ||
| 256 | echo >> $globalres | ||
| 257 | sed -i '$ s/,$//' $globalres | ||
| 258 | } | ||
| 259 | |||
| 260 | #### | ||
| 261 | |||
| 262 | # | ||
| 263 | # Test 1 | ||
| 264 | # Measure: Wall clock of "bitbake core-image-sato" and size of tmp/dir (w/o rm_work and w/ rm_work) | ||
| 265 | # Pre: Downloaded sources, no sstate | ||
| 266 | # Steps: | ||
| 267 | # Part1: | ||
| 268 | # - fetchall | ||
| 269 | # - clean build dir | ||
| 270 | # - time bitbake core-image-sato | ||
| 271 | # - collect data | ||
| 272 | # Part2: | ||
| 273 | # - bitbake virtual/kernel -c cleansstate | ||
| 274 | # - time bitbake virtual/kernel | ||
| 275 | # Part3: | ||
| 276 | # - add INHERIT to local.conf | ||
| 277 | # - clean build dir | ||
| 278 | # - build | ||
| 279 | # - report size, remove INHERIT | ||
| 280 | |||
| 281 | test1_p1 () { | ||
| 282 | log "Running Test 1, part 1/3: Measure wall clock of bitbake $IMAGE and size of tmp/ dir" | ||
| 283 | bbnotime $IMAGE -c fetchall | ||
| 284 | do_rmtmp | ||
| 285 | do_rmsstate | ||
| 286 | do_sync | ||
| 287 | bbtime $IMAGE | ||
| 288 | s=`du -s tmp | sed 's/tmp//' | sed 's/[ \t]*$//'` | ||
| 289 | SIZES[(( size_count++ ))]="$s" | ||
| 290 | log "SIZE of tmp dir is: $s" | ||
| 291 | log "Buildstats are saved in $OUTDIR/buildstats-test1" | ||
| 292 | mv tmp/buildstats $OUTDIR/buildstats-test1 | ||
| 293 | } | ||
| 294 | |||
| 295 | |||
| 296 | test1_p2 () { | ||
| 297 | log "Running Test 1, part 2/3: bitbake virtual/kernel -c cleansstate and time bitbake virtual/kernel" | ||
| 298 | bbnotime virtual/kernel -c cleansstate | ||
| 299 | do_sync | ||
| 300 | bbtime virtual/kernel | ||
| 301 | } | ||
| 302 | |||
| 303 | test1_p3 () { | ||
| 304 | log "Running Test 1, part 3/3: Build $IMAGE w/o sstate and report size of tmp/dir with rm_work enabled" | ||
| 305 | echo "INHERIT += \"rm_work\"" >> conf/local.conf | ||
| 306 | do_rmtmp | ||
| 307 | do_rmsstate | ||
| 308 | do_sync | ||
| 309 | bbtime $IMAGE | ||
| 310 | sed -i 's/INHERIT += \"rm_work\"//' conf/local.conf | ||
| 311 | s=`du -s tmp | sed 's/tmp//' | sed 's/[ \t]*$//'` | ||
| 312 | SIZES[(( size_count++ ))]="$s" | ||
| 313 | log "SIZE of tmp dir is: $s" | ||
| 314 | log "Buildstats are saved in $OUTDIR/buildstats-test13" | ||
| 315 | mv tmp/buildstats $OUTDIR/buildstats-test13 | ||
| 316 | } | ||
| 317 | |||
| 318 | |||
| 319 | # | ||
| 320 | # Test 2 | ||
| 321 | # Measure: Wall clock of "bitbake core-image-sato" and size of tmp/dir | ||
| 322 | # Pre: populated sstate cache | ||
| 323 | |||
| 324 | test2 () { | ||
| 325 | # Assuming test 1 has run | ||
| 326 | log "Running Test 2: Measure wall clock of bitbake $IMAGE -c rootfs with sstate" | ||
| 327 | do_rmtmp | ||
| 328 | do_sync | ||
| 329 | bbtime $IMAGE -c rootfs | ||
| 330 | } | ||
| 331 | |||
| 332 | |||
| 333 | # Test 3 | ||
| 334 | # parsing time metrics | ||
| 335 | # | ||
| 336 | # Start with | ||
| 337 | # i) "rm -rf tmp/cache; time bitbake -p" | ||
| 338 | # ii) "rm -rf tmp/cache/default-eglibc/; time bitbake -p" | ||
| 339 | # iii) "time bitbake -p" | ||
| 340 | |||
| 341 | |||
| 342 | test3 () { | ||
| 343 | log "Running Test 3: Parsing time metrics (bitbake -p)" | ||
| 344 | log " Removing tmp/cache && cache" | ||
| 345 | rm -rf tmp/cache cache | ||
| 346 | bbtime -p | ||
| 347 | log " Removing tmp/cache/default-eglibc/" | ||
| 348 | rm -rf tmp/cache/default-eglibc/ | ||
| 349 | bbtime -p | ||
| 350 | bbtime -p | ||
| 351 | } | ||
| 352 | |||
| 353 | |||
| 354 | |||
| 355 | # RUN! | ||
| 356 | |||
| 357 | test1_p1 | ||
| 358 | test1_p2 | ||
| 359 | test1_p3 | ||
| 360 | test2 | ||
| 361 | test3 | ||
| 362 | |||
| 363 | # if we got til here write to global results | ||
| 364 | write_results | ||
| 365 | |||
| 366 | log "All done, cleaning up..." | ||
| 367 | |||
| 368 | do_rmtmp | ||
| 369 | do_rmsstate | ||
diff --git a/scripts/contrib/ddimage b/scripts/contrib/ddimage new file mode 100755 index 0000000000..93ebeafc31 --- /dev/null +++ b/scripts/contrib/ddimage | |||
| @@ -0,0 +1,89 @@ | |||
| 1 | #!/bin/sh | ||
| 2 | |||
| 3 | #BLACKLIST_DEVICES="/dev/sda /dev/sdb /dev/sdc /dev/sdd /dev/sde" | ||
| 4 | BLACKLIST_DEVICES="/dev/sda" | ||
| 5 | |||
| 6 | # 1MB blocksize | ||
| 7 | BLOCKSIZE=1048576 | ||
| 8 | |||
| 9 | usage() { | ||
| 10 | echo "Usage: $(basename $0) IMAGE DEVICE" | ||
| 11 | } | ||
| 12 | |||
| 13 | image_details() { | ||
| 14 | IMG=$1 | ||
| 15 | echo "Image details" | ||
| 16 | echo "=============" | ||
| 17 | echo " image: $(stat --printf '%N\n' $IMG)" | ||
| 18 | echo " size: $(stat -L --printf '%s bytes\n' $IMG)" | ||
| 19 | echo " modified: $(stat -L --printf '%y\n' $IMG)" | ||
| 20 | echo " type: $(file -L -b $IMG)" | ||
| 21 | echo "" | ||
| 22 | } | ||
| 23 | |||
| 24 | device_details() { | ||
| 25 | DEV=$1 | ||
| 26 | BLOCK_SIZE=512 | ||
| 27 | |||
| 28 | echo "Device details" | ||
| 29 | echo "==============" | ||
| 30 | echo " device: $DEVICE" | ||
| 31 | if [ -f "/sys/class/block/$DEV/device/vendor" ]; then | ||
| 32 | echo " vendor: $(cat /sys/class/block/$DEV/device/vendor)" | ||
| 33 | else | ||
| 34 | echo " vendor: UNKOWN" | ||
| 35 | fi | ||
| 36 | if [ -f "/sys/class/block/$DEV/device/model" ]; then | ||
| 37 | echo " model: $(cat /sys/class/block/$DEV/device/model)" | ||
| 38 | else | ||
| 39 | echo " model: UNKNOWN" | ||
| 40 | fi | ||
| 41 | if [ -f "/sys/class/block/$DEV/size" ]; then | ||
| 42 | echo " size: $(($(cat /sys/class/block/$DEV/size) * $BLOCK_SIZE)) bytes" | ||
| 43 | else | ||
| 44 | echo " size: UNKNOWN" | ||
| 45 | fi | ||
| 46 | echo "" | ||
| 47 | } | ||
| 48 | |||
| 49 | if [ $# -ne 2 ]; then | ||
| 50 | usage | ||
| 51 | exit 1 | ||
| 52 | fi | ||
| 53 | |||
| 54 | IMAGE=$1 | ||
| 55 | DEVICE=$2 | ||
| 56 | |||
| 57 | if [ ! -e "$IMAGE" ]; then | ||
| 58 | echo "ERROR: Image $IMAGE does not exist" | ||
| 59 | usage | ||
| 60 | exit 1 | ||
| 61 | fi | ||
| 62 | |||
| 63 | |||
| 64 | for i in ${BLACKLIST_DEVICES}; do | ||
| 65 | if [ "$i" = "$DEVICE" ]; then | ||
| 66 | echo "ERROR: Device $DEVICE is blacklisted" | ||
| 67 | exit 1 | ||
| 68 | fi | ||
| 69 | done | ||
| 70 | |||
| 71 | if [ ! -w "$DEVICE" ]; then | ||
| 72 | echo "ERROR: Device $DEVICE does not exist or is not writable" | ||
| 73 | usage | ||
| 74 | exit 1 | ||
| 75 | fi | ||
| 76 | |||
| 77 | image_details $IMAGE | ||
| 78 | device_details $(basename $DEVICE) | ||
| 79 | |||
| 80 | printf "Write $IMAGE to $DEVICE [y/N]? " | ||
| 81 | read RESPONSE | ||
| 82 | if [ "$RESPONSE" != "y" ]; then | ||
| 83 | echo "Write aborted" | ||
| 84 | exit 0 | ||
| 85 | fi | ||
| 86 | |||
| 87 | echo "Writing image..." | ||
| 88 | dd if="$IMAGE" of="$DEVICE" bs="$BLOCKSIZE" | ||
| 89 | sync | ||
diff --git a/scripts/contrib/documentation-audit.sh b/scripts/contrib/documentation-audit.sh new file mode 100755 index 0000000000..2144aac936 --- /dev/null +++ b/scripts/contrib/documentation-audit.sh | |||
| @@ -0,0 +1,94 @@ | |||
| 1 | #!/bin/bash | ||
| 2 | # | ||
| 3 | # Perform an audit of which packages provide documentation and which | ||
| 4 | # are missing -doc packages. | ||
| 5 | # | ||
| 6 | # Setup requirements: be sure to be building for MACHINE=qemux86. Run | ||
| 7 | # this script after source'ing the build environment script, so you're | ||
| 8 | # running it from build/ directory. | ||
| 9 | # | ||
| 10 | # Maintainer: Scott Garman <scott.a.garman@intel.com> | ||
| 11 | |||
| 12 | REPORT_DOC_SIMPLE="documentation_exists.txt" | ||
| 13 | REPORT_DOC_DETAIL="documentation_exists_detail.txt" | ||
| 14 | REPORT_MISSING_SIMPLE="documentation_missing.txt" | ||
| 15 | REPORT_MISSING_DETAIL="documentation_missing_detail.txt" | ||
| 16 | REPORT_BUILD_ERRORS="build_errors.txt" | ||
| 17 | |||
| 18 | rm -rf $REPORT_DOC_SIMPLE $REPORT_DOC_DETAIL $REPORT_MISSING_SIMPLE $REPORT_MISSING_DETAIL | ||
| 19 | |||
| 20 | BITBAKE=`which bitbake` | ||
| 21 | if [ -z "$BITBAKE" ]; then | ||
| 22 | echo "Error: bitbake command not found." | ||
| 23 | echo "Did you forget to source the build environment script?" | ||
| 24 | exit 1 | ||
| 25 | fi | ||
| 26 | |||
| 27 | echo "REMINDER: you need to build for MACHINE=qemux86 or you won't get useful results" | ||
| 28 | echo "REMINDER: you need to set LICENSE_FLAGS_WHITELIST appropriately in local.conf or " | ||
| 29 | echo " you'll get false positives. For example, LICENSE_FLAGS_WHITELIST = \"Commercial\"" | ||
| 30 | |||
| 31 | for pkg in `bitbake -s | awk '{ print \$1 }'`; do | ||
| 32 | if [[ "$pkg" == "Loading" || "$pkg" == "Loaded" || | ||
| 33 | "$pkg" == "Recipe" || | ||
| 34 | "$pkg" == "Parsing" || "$pkg" == "Package" || | ||
| 35 | "$pkg" == "NOTE:" || "$pkg" == "WARNING:" || | ||
| 36 | "$pkg" == "done." || "$pkg" == "===========" ]] | ||
| 37 | then | ||
| 38 | # Skip initial bitbake output | ||
| 39 | continue | ||
| 40 | fi | ||
| 41 | if [[ "$pkg" =~ -native$ || "$pkg" =~ -nativesdk$ || | ||
| 42 | "$pkg" =~ -cross-canadian ]]; then | ||
| 43 | # Skip native/nativesdk/cross-canadian recipes | ||
| 44 | continue | ||
| 45 | fi | ||
| 46 | if [[ "$pkg" =~ ^meta- || "$pkg" =~ ^packagegroup- || "$pkg" =~ -image ]]; then | ||
| 47 | # Skip meta, task and image recipes | ||
| 48 | continue | ||
| 49 | fi | ||
| 50 | if [[ "$pkg" =~ ^glibc- || "$pkg" =~ ^libiconv$ || | ||
| 51 | "$pkg" =~ -toolchain$ || "$pkg" =~ ^package-index$ || | ||
| 52 | "$pkg" =~ ^linux- || "$pkg" =~ ^adt-installer$ || | ||
| 53 | "$pkg" =~ ^eds-tools$ || "$pkg" =~ ^external-python-tarball$ || | ||
| 54 | "$pkg" =~ ^qt4-embedded$ || "$pkg" =~ ^qt-mobility ]]; then | ||
| 55 | # Skip glibc, libiconv, -toolchain, and other recipes known | ||
| 56 | # to cause build conflicts or trigger false positives. | ||
| 57 | continue | ||
| 58 | fi | ||
| 59 | |||
| 60 | echo "Building package $pkg..." | ||
| 61 | bitbake $pkg > /dev/null | ||
| 62 | if [ $? -ne 0 ]; then | ||
| 63 | echo "There was an error building package $pkg" >> "$REPORT_MISSING_DETAIL" | ||
| 64 | echo "$pkg" >> $REPORT_BUILD_ERRORS | ||
| 65 | |||
| 66 | # Do not skip the remaining tests, as sometimes the | ||
| 67 | # exit status is 1 due to QA errors, and we can still | ||
| 68 | # perform the -doc checks. | ||
| 69 | fi | ||
| 70 | |||
| 71 | echo "$pkg built successfully, checking for a documentation package..." | ||
| 72 | WORKDIR=`bitbake -e $pkg | grep ^WORKDIR | awk -F '=' '{ print \$2 }' | awk -F '"' '{ print \$2 }'` | ||
| 73 | FIND_DOC_PKG=`find $WORKDIR/packages-split/*-doc -maxdepth 0 -type d` | ||
| 74 | if [ -z "$FIND_DOC_PKG" ]; then | ||
| 75 | # No -doc package was generated: | ||
| 76 | echo "No -doc package: $pkg" >> "$REPORT_MISSING_DETAIL" | ||
| 77 | echo "$pkg" >> $REPORT_MISSING_SIMPLE | ||
| 78 | continue | ||
| 79 | fi | ||
| 80 | |||
| 81 | FIND_DOC_FILES=`find $FIND_DOC_PKG -type f` | ||
| 82 | if [ -z "$FIND_DOC_FILES" ]; then | ||
| 83 | # No files shipped with the -doc package: | ||
| 84 | echo "No files shipped with the -doc package: $pkg" >> "$REPORT_MISSING_DETAIL" | ||
| 85 | echo "$pkg" >> $REPORT_MISSING_SIMPLE | ||
| 86 | continue | ||
| 87 | fi | ||
| 88 | |||
| 89 | echo "Documentation shipped with $pkg:" >> "$REPORT_DOC_DETAIL" | ||
| 90 | echo "$FIND_DOC_FILES" >> "$REPORT_DOC_DETAIL" | ||
| 91 | echo "" >> "$REPORT_DOC_DETAIL" | ||
| 92 | |||
| 93 | echo "$pkg" >> "$REPORT_DOC_SIMPLE" | ||
| 94 | done | ||
diff --git a/scripts/contrib/graph-tool b/scripts/contrib/graph-tool new file mode 100755 index 0000000000..6dc7d337f8 --- /dev/null +++ b/scripts/contrib/graph-tool | |||
| @@ -0,0 +1,92 @@ | |||
| 1 | #!/usr/bin/env python | ||
| 2 | |||
| 3 | # Simple graph query utility | ||
| 4 | # useful for getting answers from .dot files produced by bitbake -g | ||
| 5 | # | ||
| 6 | # Written by: Paul Eggleton <paul.eggleton@linux.intel.com> | ||
| 7 | # | ||
| 8 | # Copyright 2013 Intel Corporation | ||
| 9 | # | ||
| 10 | # This program is free software; you can redistribute it and/or modify | ||
| 11 | # it under the terms of the GNU General Public License version 2 as | ||
| 12 | # published by the Free Software Foundation. | ||
| 13 | # | ||
| 14 | # This program is distributed in the hope that it will be useful, | ||
| 15 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 16 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 17 | # GNU General Public License for more details. | ||
| 18 | # | ||
| 19 | # You should have received a copy of the GNU General Public License along | ||
| 20 | # with this program; if not, write to the Free Software Foundation, Inc., | ||
| 21 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. | ||
| 22 | # | ||
| 23 | |||
| 24 | import sys | ||
| 25 | |||
| 26 | def get_path_networkx(dotfile, fromnode, tonode): | ||
| 27 | try: | ||
| 28 | import networkx | ||
| 29 | except ImportError: | ||
| 30 | print('ERROR: Please install the networkx python module') | ||
| 31 | sys.exit(1) | ||
| 32 | |||
| 33 | graph = networkx.DiGraph(networkx.read_dot(dotfile)) | ||
| 34 | |||
| 35 | def node_missing(node): | ||
| 36 | import difflib | ||
| 37 | close_matches = difflib.get_close_matches(node, graph.nodes(), cutoff=0.7) | ||
| 38 | if close_matches: | ||
| 39 | print('ERROR: no node "%s" in graph. Close matches:\n %s' % (node, '\n '.join(close_matches))) | ||
| 40 | sys.exit(1) | ||
| 41 | |||
| 42 | if not fromnode in graph: | ||
| 43 | node_missing(fromnode) | ||
| 44 | if not tonode in graph: | ||
| 45 | node_missing(tonode) | ||
| 46 | return networkx.all_simple_paths(graph, source=fromnode, target=tonode) | ||
| 47 | |||
| 48 | |||
| 49 | def find_paths(args, usage): | ||
| 50 | if len(args) < 3: | ||
| 51 | usage() | ||
| 52 | sys.exit(1) | ||
| 53 | |||
| 54 | fromnode = args[1] | ||
| 55 | tonode = args[2] | ||
| 56 | paths = list(get_path_networkx(args[0], fromnode, tonode)) | ||
| 57 | if paths: | ||
| 58 | for path in paths: | ||
| 59 | print ' -> '.join(path) | ||
| 60 | else: | ||
| 61 | print("ERROR: no path from %s to %s in graph" % (fromnode, tonode)) | ||
| 62 | sys.exit(1) | ||
| 63 | |||
| 64 | def main(): | ||
| 65 | import optparse | ||
| 66 | parser = optparse.OptionParser( | ||
| 67 | usage = '''%prog [options] <command> <arguments> | ||
| 68 | |||
| 69 | Available commands: | ||
| 70 | find-paths <dotfile> <from> <to> | ||
| 71 | Find all of the paths between two nodes in a dot graph''') | ||
| 72 | |||
| 73 | #parser.add_option("-d", "--debug", | ||
| 74 | # help = "Report all SRCREV values, not just ones where AUTOREV has been used", | ||
| 75 | # action="store_true", dest="debug", default=False) | ||
| 76 | |||
| 77 | options, args = parser.parse_args(sys.argv) | ||
| 78 | args = args[1:] | ||
| 79 | |||
| 80 | if len(args) < 1: | ||
| 81 | parser.print_help() | ||
| 82 | sys.exit(1) | ||
| 83 | |||
| 84 | if args[0] == "find-paths": | ||
| 85 | find_paths(args[1:], parser.print_help) | ||
| 86 | else: | ||
| 87 | parser.print_help() | ||
| 88 | sys.exit(1) | ||
| 89 | |||
| 90 | |||
| 91 | if __name__ == "__main__": | ||
| 92 | main() | ||
diff --git a/scripts/contrib/list-packageconfig-flags.py b/scripts/contrib/list-packageconfig-flags.py new file mode 100755 index 0000000000..371033a3d8 --- /dev/null +++ b/scripts/contrib/list-packageconfig-flags.py | |||
| @@ -0,0 +1,209 @@ | |||
| 1 | #!/usr/bin/env python | ||
| 2 | |||
| 3 | # This program is free software; you can redistribute it and/or modify | ||
| 4 | # it under the terms of the GNU General Public License as published by | ||
| 5 | # the Free Software Foundation; either version 2 of the License, or | ||
| 6 | # (at your option) any later version. | ||
| 7 | # | ||
| 8 | # This program is distributed in the hope that it will be useful, | ||
| 9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 11 | # GNU General Public License for more details. | ||
| 12 | # | ||
| 13 | # You should have received a copy of the GNU General Public License | ||
| 14 | # along with this program; if not, write to the Free Software Foundation. | ||
| 15 | # | ||
| 16 | # Copyright (C) 2013 Wind River Systems, Inc. | ||
| 17 | # | ||
| 18 | # - list available pkgs which have PACKAGECONFIG flags | ||
| 19 | # - list available PACKAGECONFIG flags and all affected pkgs | ||
| 20 | # - list all pkgs and PACKAGECONFIG information | ||
| 21 | |||
| 22 | import sys | ||
| 23 | import getopt | ||
| 24 | import os | ||
| 25 | |||
| 26 | def search_bitbakepath(): | ||
| 27 | bitbakepath = "" | ||
| 28 | |||
| 29 | # Search path to bitbake lib dir in order to load bb modules | ||
| 30 | if os.path.exists(os.path.join(os.path.dirname(sys.argv[0]), '../../bitbake/lib/bb')): | ||
| 31 | bitbakepath = os.path.join(os.path.dirname(sys.argv[0]), '../../bitbake/lib') | ||
| 32 | bitbakepath = os.path.abspath(bitbakepath) | ||
| 33 | else: | ||
| 34 | # Look for bitbake/bin dir in PATH | ||
| 35 | for pth in os.environ['PATH'].split(':'): | ||
| 36 | if os.path.exists(os.path.join(pth, '../lib/bb')): | ||
| 37 | bitbakepath = os.path.abspath(os.path.join(pth, '../lib')) | ||
| 38 | break | ||
| 39 | if not bitbakepath: | ||
| 40 | sys.stderr.write("Unable to find bitbake by searching parent directory of this script or PATH\n") | ||
| 41 | sys.exit(1) | ||
| 42 | return bitbakepath | ||
| 43 | |||
| 44 | # For importing the following modules | ||
| 45 | sys.path.insert(0, search_bitbakepath()) | ||
| 46 | import bb.cache | ||
| 47 | import bb.cooker | ||
| 48 | import bb.providers | ||
| 49 | import bb.tinfoil | ||
| 50 | |||
| 51 | usage_body = ''' list available pkgs which have PACKAGECONFIG flags | ||
| 52 | |||
| 53 | OPTION: | ||
| 54 | -h, --help display this help and exit | ||
| 55 | -f, --flag list available PACKAGECONFIG flags and all affected pkgs | ||
| 56 | -a, --all list all pkgs and PACKAGECONFIG information | ||
| 57 | -p, --prefer list pkgs with preferred version | ||
| 58 | |||
| 59 | EXAMPLE: | ||
| 60 | list-packageconfig-flags.py | ||
| 61 | list-packageconfig-flags.py -f | ||
| 62 | list-packageconfig-flags.py -a | ||
| 63 | list-packageconfig-flags.py -p | ||
| 64 | list-packageconfig-flags.py -f -p | ||
| 65 | list-packageconfig-flags.py -a -p | ||
| 66 | ''' | ||
| 67 | |||
| 68 | def usage(): | ||
| 69 | print 'Usage: %s [-f|-a] [-p]' % os.path.basename(sys.argv[0]) | ||
| 70 | print usage_body | ||
| 71 | |||
| 72 | def get_fnlist(bbhandler, pkg_pn, preferred): | ||
| 73 | ''' Get all recipe file names ''' | ||
| 74 | if preferred: | ||
| 75 | (latest_versions, preferred_versions) = bb.providers.findProviders(bbhandler.config_data, bbhandler.cooker.recipecache, pkg_pn) | ||
| 76 | |||
| 77 | fn_list = [] | ||
| 78 | for pn in sorted(pkg_pn): | ||
| 79 | if preferred: | ||
| 80 | fn_list.append(preferred_versions[pn][1]) | ||
| 81 | else: | ||
| 82 | fn_list.extend(pkg_pn[pn]) | ||
| 83 | |||
| 84 | return fn_list | ||
| 85 | |||
| 86 | def get_recipesdata(bbhandler, preferred): | ||
| 87 | ''' Get data of all available recipes which have PACKAGECONFIG flags ''' | ||
| 88 | pkg_pn = bbhandler.cooker.recipecache.pkg_pn | ||
| 89 | |||
| 90 | data_dict = {} | ||
| 91 | for fn in get_fnlist(bbhandler, pkg_pn, preferred): | ||
| 92 | data = bb.cache.Cache.loadDataFull(fn, bbhandler.cooker.collection.get_file_appends(fn), bbhandler.config_data) | ||
| 93 | if data.getVarFlags("PACKAGECONFIG"): | ||
| 94 | data_dict[fn] = data | ||
| 95 | |||
| 96 | return data_dict | ||
| 97 | |||
| 98 | def collect_pkgs(data_dict): | ||
| 99 | ''' Collect available pkgs in which have PACKAGECONFIG flags ''' | ||
| 100 | # pkg_dict = {'pkg1': ['flag1', 'flag2',...]} | ||
| 101 | pkg_dict = {} | ||
| 102 | for fn in data_dict: | ||
| 103 | pkgconfigflags = data_dict[fn].getVarFlags("PACKAGECONFIG") | ||
| 104 | pkgname = data_dict[fn].getVar("P", True) | ||
| 105 | pkg_dict[pkgname] = sorted(pkgconfigflags.keys()) | ||
| 106 | |||
| 107 | return pkg_dict | ||
| 108 | |||
| 109 | def collect_flags(pkg_dict): | ||
| 110 | ''' Collect available PACKAGECONFIG flags and all affected pkgs ''' | ||
| 111 | # flag_dict = {'flag': ['pkg1', 'pkg2',...]} | ||
| 112 | flag_dict = {} | ||
| 113 | for pkgname, flaglist in pkg_dict.iteritems(): | ||
| 114 | for flag in flaglist: | ||
| 115 | if flag == "defaultval": | ||
| 116 | continue | ||
| 117 | |||
| 118 | if flag in flag_dict: | ||
| 119 | flag_dict[flag].append(pkgname) | ||
| 120 | else: | ||
| 121 | flag_dict[flag] = [pkgname] | ||
| 122 | |||
| 123 | return flag_dict | ||
| 124 | |||
| 125 | def display_pkgs(pkg_dict): | ||
| 126 | ''' Display available pkgs which have PACKAGECONFIG flags ''' | ||
| 127 | pkgname_len = len("PACKAGE NAME") + 1 | ||
| 128 | for pkgname in pkg_dict: | ||
| 129 | if pkgname_len < len(pkgname): | ||
| 130 | pkgname_len = len(pkgname) | ||
| 131 | pkgname_len += 1 | ||
| 132 | |||
| 133 | header = '%-*s%s' % (pkgname_len, str("PACKAGE NAME"), str("PACKAGECONFIG FLAGS")) | ||
| 134 | print header | ||
| 135 | print str("").ljust(len(header), '=') | ||
| 136 | for pkgname in sorted(pkg_dict): | ||
| 137 | print('%-*s%s' % (pkgname_len, pkgname, ' '.join(pkg_dict[pkgname]))) | ||
| 138 | |||
| 139 | |||
| 140 | def display_flags(flag_dict): | ||
| 141 | ''' Display available PACKAGECONFIG flags and all affected pkgs ''' | ||
| 142 | flag_len = len("PACKAGECONFIG FLAG") + 5 | ||
| 143 | |||
| 144 | header = '%-*s%s' % (flag_len, str("PACKAGECONFIG FLAG"), str("PACKAGE NAMES")) | ||
| 145 | print header | ||
| 146 | print str("").ljust(len(header), '=') | ||
| 147 | |||
| 148 | for flag in sorted(flag_dict): | ||
| 149 | print('%-*s%s' % (flag_len, flag, ' '.join(sorted(flag_dict[flag])))) | ||
| 150 | |||
| 151 | def display_all(data_dict): | ||
| 152 | ''' Display all pkgs and PACKAGECONFIG information ''' | ||
| 153 | print str("").ljust(50, '=') | ||
| 154 | for fn in data_dict: | ||
| 155 | print('%s' % data_dict[fn].getVar("P", True)) | ||
| 156 | print fn | ||
| 157 | packageconfig = data_dict[fn].getVar("PACKAGECONFIG", True) or '' | ||
| 158 | if packageconfig.strip() == '': | ||
| 159 | packageconfig = 'None' | ||
| 160 | print('PACKAGECONFIG %s' % packageconfig) | ||
| 161 | |||
| 162 | for flag,flag_val in data_dict[fn].getVarFlags("PACKAGECONFIG").iteritems(): | ||
| 163 | if flag == "defaultval": | ||
| 164 | continue | ||
| 165 | print('PACKAGECONFIG[%s] %s' % (flag, flag_val)) | ||
| 166 | print '' | ||
| 167 | |||
| 168 | def main(): | ||
| 169 | listtype = 'pkgs' | ||
| 170 | preferred = False | ||
| 171 | pkg_dict = {} | ||
| 172 | flag_dict = {} | ||
| 173 | |||
| 174 | # Collect and validate input | ||
| 175 | try: | ||
| 176 | opts, args = getopt.getopt(sys.argv[1:], "hfap", ["help", "flag", "all", "prefer"]) | ||
| 177 | except getopt.GetoptError, err: | ||
| 178 | print >> sys.stderr,'%s' % str(err) | ||
| 179 | usage() | ||
| 180 | sys.exit(2) | ||
| 181 | for opt, value in opts: | ||
| 182 | if opt in ('-h', '--help'): | ||
| 183 | usage() | ||
| 184 | sys.exit(0) | ||
| 185 | elif opt in ('-f', '--flag'): | ||
| 186 | listtype = 'flags' | ||
| 187 | elif opt in ('-a', '--all'): | ||
| 188 | listtype = 'all' | ||
| 189 | elif opt in ('-p', '--prefer'): | ||
| 190 | preferred = True | ||
| 191 | else: | ||
| 192 | assert False, "unhandled option" | ||
| 193 | |||
| 194 | bbhandler = bb.tinfoil.Tinfoil() | ||
| 195 | bbhandler.prepare() | ||
| 196 | data_dict = get_recipesdata(bbhandler, preferred) | ||
| 197 | |||
| 198 | if listtype == 'flags': | ||
| 199 | pkg_dict = collect_pkgs(data_dict) | ||
| 200 | flag_dict = collect_flags(pkg_dict) | ||
| 201 | display_flags(flag_dict) | ||
| 202 | elif listtype == 'pkgs': | ||
| 203 | pkg_dict = collect_pkgs(data_dict) | ||
| 204 | display_pkgs(pkg_dict) | ||
| 205 | elif listtype == 'all': | ||
| 206 | display_all(data_dict) | ||
| 207 | |||
| 208 | if __name__ == "__main__": | ||
| 209 | main() | ||
diff --git a/scripts/contrib/mkefidisk.sh b/scripts/contrib/mkefidisk.sh new file mode 100755 index 0000000000..c86849d395 --- /dev/null +++ b/scripts/contrib/mkefidisk.sh | |||
| @@ -0,0 +1,286 @@ | |||
| 1 | #!/bin/sh | ||
| 2 | # | ||
| 3 | # Copyright (c) 2012, Intel Corporation. | ||
| 4 | # All rights reserved. | ||
| 5 | # | ||
| 6 | # This program is free software; you can redistribute it and/or modify | ||
| 7 | # it under the terms of the GNU General Public License as published by | ||
| 8 | # the Free Software Foundation; either version 2 of the License, or | ||
| 9 | # (at your option) any later version. | ||
| 10 | # | ||
| 11 | # This program is distributed in the hope that it will be useful, | ||
| 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See | ||
| 14 | # the GNU General Public License for more details. | ||
| 15 | # | ||
| 16 | # You should have received a copy of the GNU General Public License | ||
| 17 | # along with this program; if not, write to the Free Software | ||
| 18 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA | ||
| 19 | # | ||
| 20 | |||
| 21 | LANG=C | ||
| 22 | |||
| 23 | # | ||
| 24 | # Defaults | ||
| 25 | # | ||
| 26 | # 20 Mb for the boot partition | ||
| 27 | BOOT_SIZE=20 | ||
| 28 | # 5% for swap | ||
| 29 | SWAP_RATIO=5 | ||
| 30 | |||
| 31 | usage() { | ||
| 32 | echo "Usage: $(basename $0) DEVICE HDDIMG TARGET_DEVICE" | ||
| 33 | echo " DEVICE: The device to write the image to, e.g. /dev/sdh" | ||
| 34 | echo " HDDIMG: The hddimg file to generate the efi disk from" | ||
| 35 | echo " TARGET_DEVICE: The device the target will boot from, e.g. /dev/mmcblk0" | ||
| 36 | } | ||
| 37 | |||
| 38 | image_details() { | ||
| 39 | IMG=$1 | ||
| 40 | echo "Image details" | ||
| 41 | echo "=============" | ||
| 42 | echo " image: $(stat --printf '%N\n' $IMG)" | ||
| 43 | echo " size: $(stat -L --printf '%s bytes\n' $IMG)" | ||
| 44 | echo " modified: $(stat -L --printf '%y\n' $IMG)" | ||
| 45 | echo " type: $(file -L -b $IMG)" | ||
| 46 | echo "" | ||
| 47 | } | ||
| 48 | |||
| 49 | device_details() { | ||
| 50 | DEV=$1 | ||
| 51 | BLOCK_SIZE=512 | ||
| 52 | |||
| 53 | echo "Device details" | ||
| 54 | echo "==============" | ||
| 55 | echo " device: $DEVICE" | ||
| 56 | if [ -f "/sys/class/block/$DEV/device/vendor" ]; then | ||
| 57 | echo " vendor: $(cat /sys/class/block/$DEV/device/vendor)" | ||
| 58 | else | ||
| 59 | echo " vendor: UNKOWN" | ||
| 60 | fi | ||
| 61 | if [ -f "/sys/class/block/$DEV/device/model" ]; then | ||
| 62 | echo " model: $(cat /sys/class/block/$DEV/device/model)" | ||
| 63 | else | ||
| 64 | echo " model: UNKNOWN" | ||
| 65 | fi | ||
| 66 | if [ -f "/sys/class/block/$DEV/size" ]; then | ||
| 67 | echo " size: $(($(cat /sys/class/block/$DEV/size) * $BLOCK_SIZE)) bytes" | ||
| 68 | else | ||
| 69 | echo " size: UNKNOWN" | ||
| 70 | fi | ||
| 71 | echo "" | ||
| 72 | } | ||
| 73 | |||
| 74 | unmount_device() { | ||
| 75 | grep -q $DEVICE /proc/mounts | ||
| 76 | if [ $? -eq 0 ]; then | ||
| 77 | echo -n "$DEVICE listed in /proc/mounts, attempting to unmount..." | ||
| 78 | umount $DEVICE* 2>/dev/null | ||
| 79 | grep -q $DEVICE /proc/mounts | ||
| 80 | if [ $? -eq 0 ]; then | ||
| 81 | echo "FAILED" | ||
| 82 | exit 1 | ||
| 83 | fi | ||
| 84 | echo "OK" | ||
| 85 | fi | ||
| 86 | } | ||
| 87 | |||
| 88 | |||
| 89 | # | ||
| 90 | # Parse and validate arguments | ||
| 91 | # | ||
| 92 | if [ $# -ne 3 ]; then | ||
| 93 | usage | ||
| 94 | exit 1 | ||
| 95 | fi | ||
| 96 | |||
| 97 | DEVICE=$1 | ||
| 98 | HDDIMG=$2 | ||
| 99 | TARGET_DEVICE=$3 | ||
| 100 | |||
| 101 | if [ ! -w "$DEVICE" ]; then | ||
| 102 | echo "ERROR: Device $DEVICE does not exist or is not writable" | ||
| 103 | usage | ||
| 104 | exit 1 | ||
| 105 | fi | ||
| 106 | |||
| 107 | if [ ! -e "$HDDIMG" ]; then | ||
| 108 | echo "ERROR: HDDIMG $HDDIMG does not exist" | ||
| 109 | usage | ||
| 110 | exit 1 | ||
| 111 | fi | ||
| 112 | |||
| 113 | |||
| 114 | # | ||
| 115 | # Check if any $DEVICE partitions are mounted | ||
| 116 | # | ||
| 117 | unmount_device | ||
| 118 | |||
| 119 | |||
| 120 | # | ||
| 121 | # Confirm device with user | ||
| 122 | # | ||
| 123 | image_details $HDDIMG | ||
| 124 | device_details $(basename $DEVICE) | ||
| 125 | echo -n "Prepare EFI image on $DEVICE [y/N]? " | ||
| 126 | read RESPONSE | ||
| 127 | if [ "$RESPONSE" != "y" ]; then | ||
| 128 | echo "Image creation aborted" | ||
| 129 | exit 0 | ||
| 130 | fi | ||
| 131 | |||
| 132 | |||
| 133 | # | ||
| 134 | # Partition $DEVICE | ||
| 135 | # | ||
| 136 | DEVICE_SIZE=$(parted $DEVICE unit mb print | grep ^Disk | cut -d" " -f 3 | sed -e "s/MB//") | ||
| 137 | # If the device size is not reported there may not be a valid label | ||
| 138 | if [ "$DEVICE_SIZE" = "" ] ; then | ||
| 139 | parted $DEVICE mklabel msdos | ||
| 140 | DEVICE_SIZE=$(parted $DEVICE unit mb print | grep ^Disk | cut -d" " -f 3 | sed -e "s/MB//") | ||
| 141 | fi | ||
| 142 | SWAP_SIZE=$((DEVICE_SIZE*SWAP_RATIO/100)) | ||
| 143 | ROOTFS_SIZE=$((DEVICE_SIZE-BOOT_SIZE-SWAP_SIZE)) | ||
| 144 | ROOTFS_START=$((BOOT_SIZE)) | ||
| 145 | ROOTFS_END=$((ROOTFS_START+ROOTFS_SIZE)) | ||
| 146 | SWAP_START=$((ROOTFS_END)) | ||
| 147 | |||
| 148 | # MMC devices use a partition prefix character 'p' | ||
| 149 | PART_PREFIX="" | ||
| 150 | if [ ! "${DEVICE#/dev/mmcblk}" = "${DEVICE}" ] || [ ! "${DEVICE#/dev/loop}" = "${DEVICE}" ]; then | ||
| 151 | PART_PREFIX="p" | ||
| 152 | fi | ||
| 153 | BOOTFS=$DEVICE${PART_PREFIX}1 | ||
| 154 | ROOTFS=$DEVICE${PART_PREFIX}2 | ||
| 155 | SWAP=$DEVICE${PART_PREFIX}3 | ||
| 156 | |||
| 157 | TARGET_PART_PREFIX="" | ||
| 158 | if [ ! "${TARGET_DEVICE#/dev/mmcblk}" = "${TARGET_DEVICE}" ]; then | ||
| 159 | TARGET_PART_PREFIX="p" | ||
| 160 | fi | ||
| 161 | TARGET_ROOTFS=$TARGET_DEVICE${TARGET_PART_PREFIX}2 | ||
| 162 | TARGET_SWAP=$TARGET_DEVICE${TARGET_PART_PREFIX}3 | ||
| 163 | |||
| 164 | echo "*****************" | ||
| 165 | echo "Boot partition size: $BOOT_SIZE MB ($BOOTFS)" | ||
| 166 | echo "ROOTFS partition size: $ROOTFS_SIZE MB ($ROOTFS)" | ||
| 167 | echo "Swap partition size: $SWAP_SIZE MB ($SWAP)" | ||
| 168 | echo "*****************" | ||
| 169 | |||
| 170 | echo "Deleting partition table on $DEVICE ..." | ||
| 171 | dd if=/dev/zero of=$DEVICE bs=512 count=2 | ||
| 172 | |||
| 173 | # Use MSDOS by default as GPT cannot be reliably distributed in disk image form | ||
| 174 | # as it requires the backup table to be on the last block of the device, which | ||
| 175 | # of course varies from device to device. | ||
| 176 | echo "Creating new partition table (MSDOS) on $DEVICE ..." | ||
| 177 | parted $DEVICE mklabel msdos | ||
| 178 | |||
| 179 | echo "Creating boot partition on $BOOTFS" | ||
| 180 | parted $DEVICE mkpart primary 0% $BOOT_SIZE | ||
| 181 | |||
| 182 | echo "Enabling boot flag on $BOOTFS" | ||
| 183 | parted $DEVICE set 1 boot on | ||
| 184 | |||
| 185 | echo "Creating ROOTFS partition on $ROOTFS" | ||
| 186 | parted $DEVICE mkpart primary $ROOTFS_START $ROOTFS_END | ||
| 187 | |||
| 188 | echo "Creating swap partition on $SWAP" | ||
| 189 | parted $DEVICE mkpart primary $SWAP_START 100% | ||
| 190 | |||
| 191 | parted $DEVICE print | ||
| 192 | |||
| 193 | |||
| 194 | # | ||
| 195 | # Check if any $DEVICE partitions are mounted after partitioning | ||
| 196 | # | ||
| 197 | unmount_device | ||
| 198 | |||
| 199 | |||
| 200 | # | ||
| 201 | # Format $DEVICE partitions | ||
| 202 | # | ||
| 203 | echo "" | ||
| 204 | echo "Formatting $BOOTFS as vfat..." | ||
| 205 | if [ ! "${DEVICE#/dev/loop}" = "${DEVICE}" ]; then | ||
| 206 | mkfs.vfat -I $BOOTFS -n "efi" | ||
| 207 | else | ||
| 208 | mkfs.vfat $BOOTFS -n "efi" | ||
| 209 | |||
| 210 | fi | ||
| 211 | |||
| 212 | echo "Formatting $ROOTFS as ext3..." | ||
| 213 | mkfs.ext3 $ROOTFS -L "root" | ||
| 214 | |||
| 215 | echo "Formatting swap partition...($SWAP)" | ||
| 216 | mkswap $SWAP | ||
| 217 | |||
| 218 | |||
| 219 | # | ||
| 220 | # Installing to $DEVICE | ||
| 221 | # | ||
| 222 | echo "" | ||
| 223 | echo "Mounting images and device in preparation for installation..." | ||
| 224 | TMPDIR=$(mktemp -d mkefidisk-XXX) | ||
| 225 | if [ $? -ne 0 ]; then | ||
| 226 | echo "ERROR: Failed to create temporary mounting directory." | ||
| 227 | exit 1 | ||
| 228 | fi | ||
| 229 | HDDIMG_MNT=$TMPDIR/hddimg | ||
| 230 | HDDIMG_ROOTFS_MNT=$TMPDIR/hddimg_rootfs | ||
| 231 | ROOTFS_MNT=$TMPDIR/rootfs | ||
| 232 | BOOTFS_MNT=$TMPDIR/bootfs | ||
| 233 | mkdir $HDDIMG_MNT | ||
| 234 | mkdir $HDDIMG_ROOTFS_MNT | ||
| 235 | mkdir $ROOTFS_MNT | ||
| 236 | mkdir $BOOTFS_MNT | ||
| 237 | |||
| 238 | mount -o loop $HDDIMG $HDDIMG_MNT | ||
| 239 | mount -o loop $HDDIMG_MNT/rootfs.img $HDDIMG_ROOTFS_MNT | ||
| 240 | mount $ROOTFS $ROOTFS_MNT | ||
| 241 | mount $BOOTFS $BOOTFS_MNT | ||
| 242 | |||
| 243 | echo "Copying ROOTFS files..." | ||
| 244 | cp -a $HDDIMG_ROOTFS_MNT/* $ROOTFS_MNT | ||
| 245 | |||
| 246 | echo "$TARGET_SWAP swap swap defaults 0 0" >> $ROOTFS_MNT/etc/fstab | ||
| 247 | |||
| 248 | # We dont want udev to mount our root device while we're booting... | ||
| 249 | if [ -d $ROOTFS_MNT/etc/udev/ ] ; then | ||
| 250 | echo "$TARGET_DEVICE" >> $ROOTFS_MNT/etc/udev/mount.blacklist | ||
| 251 | fi | ||
| 252 | |||
| 253 | umount $ROOTFS_MNT | ||
| 254 | umount $HDDIMG_ROOTFS_MNT | ||
| 255 | |||
| 256 | echo "Preparing boot partition..." | ||
| 257 | EFIDIR="$BOOTFS_MNT/EFI/BOOT" | ||
| 258 | mkdir -p $EFIDIR | ||
| 259 | GRUBCFG="$EFIDIR/grub.cfg" | ||
| 260 | |||
| 261 | cp $HDDIMG_MNT/vmlinuz $BOOTFS_MNT | ||
| 262 | # Copy the efi loader and config (booti*.efi and grub.cfg) | ||
| 263 | cp $HDDIMG_MNT/EFI/BOOT/* $EFIDIR | ||
| 264 | |||
| 265 | # Update grub config for the installed image | ||
| 266 | # Delete the install entry | ||
| 267 | sed -i "/menuentry 'install'/,/^}/d" $GRUBCFG | ||
| 268 | # Delete the initrd lines | ||
| 269 | sed -i "/initrd /d" $GRUBCFG | ||
| 270 | # Delete any LABEL= strings | ||
| 271 | sed -i "s/ LABEL=[^ ]*/ /" $GRUBCFG | ||
| 272 | # Remove any existing root= kernel parameters and: | ||
| 273 | # o Add a root= parameter with the target rootfs | ||
| 274 | # o Specify ro so fsck can be run during boot | ||
| 275 | # o Specify rootwait in case the target media is an asyncronous block device | ||
| 276 | # such as MMC or USB disks | ||
| 277 | # o Specify "quiet" to minimize boot time when using slow serial consoles | ||
| 278 | sed -i "s@ root=[^ ]*@ @" $GRUBCFG | ||
| 279 | sed -i "s@vmlinuz @vmlinuz root=$TARGET_ROOTFS ro rootwait quiet @" $GRUBCFG | ||
| 280 | |||
| 281 | umount $BOOTFS_MNT | ||
| 282 | umount $HDDIMG_MNT | ||
| 283 | rm -rf $TMPDIR | ||
| 284 | sync | ||
| 285 | |||
| 286 | echo "Installation complete." | ||
diff --git a/scripts/contrib/python/generate-manifest-2.7.py b/scripts/contrib/python/generate-manifest-2.7.py new file mode 100755 index 0000000000..4356ad0ddd --- /dev/null +++ b/scripts/contrib/python/generate-manifest-2.7.py | |||
| @@ -0,0 +1,388 @@ | |||
| 1 | #!/usr/bin/env python | ||
| 2 | |||
| 3 | # generate Python Manifest for the OpenEmbedded build system | ||
| 4 | # (C) 2002-2010 Michael 'Mickey' Lauer <mlauer@vanille-media.de> | ||
| 5 | # (C) 2007 Jeremy Laine | ||
| 6 | # licensed under MIT, see COPYING.MIT | ||
| 7 | # | ||
| 8 | # June 22, 2011 -- Mark Hatle <mark.hatle@windriver.com> | ||
| 9 | # * Updated to no longer generate special -dbg package, instead use the | ||
| 10 | # single system -dbg | ||
| 11 | # * Update version with ".1" to indicate this change | ||
| 12 | |||
| 13 | import os | ||
| 14 | import sys | ||
| 15 | import time | ||
| 16 | |||
| 17 | VERSION = "2.7.2" | ||
| 18 | |||
| 19 | __author__ = "Michael 'Mickey' Lauer <mlauer@vanille-media.de>" | ||
| 20 | __version__ = "20110222.2" | ||
| 21 | |||
| 22 | class MakefileMaker: | ||
| 23 | |||
| 24 | def __init__( self, outfile ): | ||
| 25 | """initialize""" | ||
| 26 | self.packages = {} | ||
| 27 | self.targetPrefix = "${libdir}/python%s/" % VERSION[:3] | ||
| 28 | self.output = outfile | ||
| 29 | self.out( """ | ||
| 30 | # WARNING: This file is AUTO GENERATED: Manual edits will be lost next time I regenerate the file. | ||
| 31 | # Generator: '%s' Version %s (C) 2002-2010 Michael 'Mickey' Lauer <mlauer@vanille-media.de> | ||
| 32 | # Visit the Python for Embedded Systems Site => http://www.Vanille.de/projects/python.spy | ||
| 33 | """ % ( sys.argv[0], __version__ ) ) | ||
| 34 | |||
| 35 | # | ||
| 36 | # helper functions | ||
| 37 | # | ||
| 38 | |||
| 39 | def out( self, data ): | ||
| 40 | """print a line to the output file""" | ||
| 41 | self.output.write( "%s\n" % data ) | ||
| 42 | |||
| 43 | def setPrefix( self, targetPrefix ): | ||
| 44 | """set a file prefix for addPackage files""" | ||
| 45 | self.targetPrefix = targetPrefix | ||
| 46 | |||
| 47 | def doProlog( self ): | ||
| 48 | self.out( """ """ ) | ||
| 49 | self.out( "" ) | ||
| 50 | |||
| 51 | def addPackage( self, name, description, dependencies, filenames ): | ||
| 52 | """add a package to the Makefile""" | ||
| 53 | if type( filenames ) == type( "" ): | ||
| 54 | filenames = filenames.split() | ||
| 55 | fullFilenames = [] | ||
| 56 | for filename in filenames: | ||
| 57 | if filename[0] != "$": | ||
| 58 | fullFilenames.append( "%s%s" % ( self.targetPrefix, filename ) ) | ||
| 59 | else: | ||
| 60 | fullFilenames.append( filename ) | ||
| 61 | self.packages[name] = description, dependencies, fullFilenames | ||
| 62 | |||
| 63 | def doBody( self ): | ||
| 64 | """generate body of Makefile""" | ||
| 65 | |||
| 66 | global VERSION | ||
| 67 | |||
| 68 | # | ||
| 69 | # generate provides line | ||
| 70 | # | ||
| 71 | |||
| 72 | provideLine = 'PROVIDES+="' | ||
| 73 | for name in sorted(self.packages): | ||
| 74 | provideLine += "%s " % name | ||
| 75 | provideLine += '"' | ||
| 76 | |||
| 77 | self.out( provideLine ) | ||
| 78 | self.out( "" ) | ||
| 79 | |||
| 80 | # | ||
| 81 | # generate package line | ||
| 82 | # | ||
| 83 | |||
| 84 | packageLine = 'PACKAGES="${PN}-dbg ' | ||
| 85 | for name in sorted(self.packages): | ||
| 86 | if name.startswith("${PN}-distutils"): | ||
| 87 | if name == "${PN}-distutils": | ||
| 88 | packageLine += "%s-staticdev %s " % (name, name) | ||
| 89 | elif name != '${PN}-dbg': | ||
| 90 | packageLine += "%s " % name | ||
| 91 | packageLine += '${PN}-modules"' | ||
| 92 | |||
| 93 | self.out( packageLine ) | ||
| 94 | self.out( "" ) | ||
| 95 | |||
| 96 | # | ||
| 97 | # generate package variables | ||
| 98 | # | ||
| 99 | |||
| 100 | for name, data in sorted(self.packages.iteritems()): | ||
| 101 | desc, deps, files = data | ||
| 102 | |||
| 103 | # | ||
| 104 | # write out the description, revision and dependencies | ||
| 105 | # | ||
| 106 | self.out( 'SUMMARY_%s="%s"' % ( name, desc ) ) | ||
| 107 | self.out( 'RDEPENDS_%s="%s"' % ( name, deps ) ) | ||
| 108 | |||
| 109 | line = 'FILES_%s="' % name | ||
| 110 | |||
| 111 | # | ||
| 112 | # check which directories to make in the temporary directory | ||
| 113 | # | ||
| 114 | |||
| 115 | dirset = {} # if python had a set-datatype this would be sufficient. for now, we're using a dict instead. | ||
| 116 | for target in files: | ||
| 117 | dirset[os.path.dirname( target )] = True | ||
| 118 | |||
| 119 | # | ||
| 120 | # generate which files to copy for the target (-dfR because whole directories are also allowed) | ||
| 121 | # | ||
| 122 | |||
| 123 | for target in files: | ||
| 124 | line += "%s " % target | ||
| 125 | |||
| 126 | line += '"' | ||
| 127 | self.out( line ) | ||
| 128 | self.out( "" ) | ||
| 129 | |||
| 130 | self.out( 'SUMMARY_${PN}-modules="All Python modules"' ) | ||
| 131 | line = 'RDEPENDS_${PN}-modules="' | ||
| 132 | |||
| 133 | for name, data in sorted(self.packages.iteritems()): | ||
| 134 | if name not in ['${PN}-dev', '${PN}-distutils-staticdev']: | ||
| 135 | line += "%s " % name | ||
| 136 | |||
| 137 | self.out( "%s \"" % line ) | ||
| 138 | self.out( 'ALLOW_EMPTY_${PN}-modules = "1"' ) | ||
| 139 | |||
| 140 | def doEpilog( self ): | ||
| 141 | self.out( """""" ) | ||
| 142 | self.out( "" ) | ||
| 143 | |||
| 144 | def make( self ): | ||
| 145 | self.doProlog() | ||
| 146 | self.doBody() | ||
| 147 | self.doEpilog() | ||
| 148 | |||
| 149 | if __name__ == "__main__": | ||
| 150 | |||
| 151 | if len( sys.argv ) > 1: | ||
| 152 | try: | ||
| 153 | os.unlink(sys.argv[1]) | ||
| 154 | except Exception: | ||
| 155 | sys.exc_clear() | ||
| 156 | outfile = file( sys.argv[1], "w" ) | ||
| 157 | else: | ||
| 158 | outfile = sys.stdout | ||
| 159 | |||
| 160 | m = MakefileMaker( outfile ) | ||
| 161 | |||
| 162 | # Add packages here. Only specify dlopen-style library dependencies here, no ldd-style dependencies! | ||
| 163 | # Parameters: revision, name, description, dependencies, filenames | ||
| 164 | # | ||
| 165 | |||
| 166 | m.addPackage( "${PN}-core", "Python interpreter and core modules", "${PN}-lang ${PN}-re", | ||
| 167 | "__future__.* _abcoll.* abc.* copy.* copy_reg.* ConfigParser.* " + | ||
| 168 | "genericpath.* getopt.* linecache.* new.* " + | ||
| 169 | "os.* posixpath.* struct.* " + | ||
| 170 | "warnings.* site.* stat.* " + | ||
| 171 | "UserDict.* UserList.* UserString.* " + | ||
| 172 | "lib-dynload/binascii.so lib-dynload/_struct.so lib-dynload/time.so " + | ||
| 173 | "lib-dynload/xreadlines.so types.* platform.* ${bindir}/python* " + | ||
| 174 | "_weakrefset.* sysconfig.* config/Makefile " + | ||
| 175 | "${includedir}/python${PYTHON_MAJMIN}/pyconfig*.h " + | ||
| 176 | "${libdir}/python${PYTHON_MAJMIN}/sitecustomize.py ") | ||
| 177 | |||
| 178 | m.addPackage( "${PN}-dev", "Python development package", "${PN}-core", | ||
| 179 | "${includedir} " + | ||
| 180 | "${libdir}/lib*${SOLIBSDEV} " + | ||
| 181 | "${libdir}/*.la " + | ||
| 182 | "${libdir}/*.a " + | ||
| 183 | "${libdir}/*.o " + | ||
| 184 | "${libdir}/pkgconfig " + | ||
| 185 | "${base_libdir}/*.a " + | ||
| 186 | "${base_libdir}/*.o " + | ||
| 187 | "${datadir}/aclocal " + | ||
| 188 | "${datadir}/pkgconfig " ) | ||
| 189 | |||
| 190 | m.addPackage( "${PN}-2to3", "Python automated Python 2 to 3 code translator", "${PN}-core", | ||
| 191 | "${bindir}/2to3 lib2to3" ) # package | ||
| 192 | |||
| 193 | m.addPackage( "${PN}-idle", "Python Integrated Development Environment", "${PN}-core ${PN}-tkinter", | ||
| 194 | "${bindir}/idle idlelib" ) # package | ||
| 195 | |||
| 196 | m.addPackage( "${PN}-pydoc", "Python interactive help support", "${PN}-core ${PN}-lang ${PN}-stringold ${PN}-re", | ||
| 197 | "${bindir}/pydoc pydoc.* pydoc_data" ) | ||
| 198 | |||
| 199 | m.addPackage( "${PN}-smtpd", "Python Simple Mail Transport Daemon", "${PN}-core ${PN}-netserver ${PN}-email ${PN}-mime", | ||
| 200 | "${bindir}/smtpd.* smtpd.*" ) | ||
| 201 | |||
| 202 | m.addPackage( "${PN}-audio", "Python Audio Handling", "${PN}-core", | ||
| 203 | "wave.* chunk.* sndhdr.* lib-dynload/ossaudiodev.so lib-dynload/audioop.so audiodev.* sunaudio.* sunau.* toaiff.*" ) | ||
| 204 | |||
| 205 | m.addPackage( "${PN}-bsddb", "Python bindings for the Berkeley Database", "${PN}-core", | ||
| 206 | "bsddb lib-dynload/_bsddb.so" ) # package | ||
| 207 | |||
| 208 | m.addPackage( "${PN}-codecs", "Python codecs, encodings & i18n support", "${PN}-core ${PN}-lang", | ||
| 209 | "codecs.* encodings gettext.* locale.* lib-dynload/_locale.so lib-dynload/_codecs* lib-dynload/_multibytecodec.so lib-dynload/unicodedata.so stringprep.* xdrlib.*" ) | ||
| 210 | |||
| 211 | m.addPackage( "${PN}-compile", "Python bytecode compilation support", "${PN}-core", | ||
| 212 | "py_compile.* compileall.*" ) | ||
| 213 | |||
| 214 | m.addPackage( "${PN}-compiler", "Python compiler support", "${PN}-core", | ||
| 215 | "compiler" ) # package | ||
| 216 | |||
| 217 | m.addPackage( "${PN}-compression", "Python high-level compression support", "${PN}-core ${PN}-zlib", | ||
| 218 | "gzip.* zipfile.* tarfile.* lib-dynload/bz2.so" ) | ||
| 219 | |||
| 220 | m.addPackage( "${PN}-crypt", "Python basic cryptographic and hashing support", "${PN}-core", | ||
| 221 | "hashlib.* md5.* sha.* lib-dynload/crypt.so lib-dynload/_hashlib.so lib-dynload/_sha256.so lib-dynload/_sha512.so" ) | ||
| 222 | |||
| 223 | m.addPackage( "${PN}-textutils", "Python option parsing, text wrapping and CSV support", "${PN}-core ${PN}-io ${PN}-re ${PN}-stringold", | ||
| 224 | "lib-dynload/_csv.so csv.* optparse.* textwrap.*" ) | ||
| 225 | |||
| 226 | m.addPackage( "${PN}-curses", "Python curses support", "${PN}-core", | ||
| 227 | "curses lib-dynload/_curses.so lib-dynload/_curses_panel.so" ) # directory + low level module | ||
| 228 | |||
| 229 | m.addPackage( "${PN}-ctypes", "Python C types support", "${PN}-core", | ||
| 230 | "ctypes lib-dynload/_ctypes.so lib-dynload/_ctypes_test.so" ) # directory + low level module | ||
| 231 | |||
| 232 | m.addPackage( "${PN}-datetime", "Python calendar and time support", "${PN}-core ${PN}-codecs", | ||
| 233 | "_strptime.* calendar.* lib-dynload/datetime.so" ) | ||
| 234 | |||
| 235 | m.addPackage( "${PN}-db", "Python file-based database support", "${PN}-core", | ||
| 236 | "anydbm.* dumbdbm.* whichdb.* " ) | ||
| 237 | |||
| 238 | m.addPackage( "${PN}-debugger", "Python debugger", "${PN}-core ${PN}-io ${PN}-lang ${PN}-re ${PN}-stringold ${PN}-shell ${PN}-pprint", | ||
| 239 | "bdb.* pdb.*" ) | ||
| 240 | |||
| 241 | m.addPackage( "${PN}-difflib", "Python helpers for computing deltas between objects", "${PN}-lang ${PN}-re", | ||
| 242 | "difflib.*" ) | ||
| 243 | |||
| 244 | m.addPackage( "${PN}-distutils-staticdev", "Python distribution utilities (static libraries)", "${PN}-distutils", | ||
| 245 | "config/lib*.a" ) # package | ||
| 246 | |||
| 247 | m.addPackage( "${PN}-distutils", "Python Distribution Utilities", "${PN}-core", | ||
| 248 | "config distutils" ) # package | ||
| 249 | |||
| 250 | m.addPackage( "${PN}-doctest", "Python framework for running examples in docstrings", "${PN}-core ${PN}-lang ${PN}-io ${PN}-re ${PN}-unittest ${PN}-debugger ${PN}-difflib", | ||
| 251 | "doctest.*" ) | ||
| 252 | |||
| 253 | # FIXME consider adding to some higher level package | ||
| 254 | m.addPackage( "${PN}-elementtree", "Python elementree", "${PN}-core", | ||
| 255 | "lib-dynload/_elementtree.so" ) | ||
| 256 | |||
| 257 | m.addPackage( "${PN}-email", "Python email support", "${PN}-core ${PN}-io ${PN}-re ${PN}-mime ${PN}-audio ${PN}-image ${PN}-netclient", | ||
| 258 | "imaplib.* email" ) # package | ||
| 259 | |||
| 260 | m.addPackage( "${PN}-fcntl", "Python's fcntl interface", "${PN}-core", | ||
| 261 | "lib-dynload/fcntl.so" ) | ||
| 262 | |||
| 263 | m.addPackage( "${PN}-hotshot", "Python hotshot performance profiler", "${PN}-core", | ||
| 264 | "hotshot lib-dynload/_hotshot.so" ) | ||
| 265 | |||
| 266 | m.addPackage( "${PN}-html", "Python HTML processing support", "${PN}-core", | ||
| 267 | "formatter.* htmlentitydefs.* htmllib.* markupbase.* sgmllib.* HTMLParser.* " ) | ||
| 268 | |||
| 269 | m.addPackage( "${PN}-gdbm", "Python GNU database support", "${PN}-core", | ||
| 270 | "lib-dynload/gdbm.so" ) | ||
| 271 | |||
| 272 | m.addPackage( "${PN}-image", "Python graphical image handling", "${PN}-core", | ||
| 273 | "colorsys.* imghdr.* lib-dynload/imageop.so lib-dynload/rgbimg.so" ) | ||
| 274 | |||
| 275 | m.addPackage( "${PN}-io", "Python low-level I/O", "${PN}-core ${PN}-math ${PN}-textutils", | ||
| 276 | "lib-dynload/_socket.so lib-dynload/_io.so lib-dynload/_ssl.so lib-dynload/select.so lib-dynload/termios.so lib-dynload/cStringIO.so " + | ||
| 277 | "pipes.* socket.* ssl.* tempfile.* StringIO.* io.* _pyio.*" ) | ||
| 278 | |||
| 279 | m.addPackage( "${PN}-json", "Python JSON support", "${PN}-core ${PN}-math ${PN}-re", | ||
| 280 | "json lib-dynload/_json.so" ) # package | ||
| 281 | |||
| 282 | m.addPackage( "${PN}-lang", "Python low-level language support", "${PN}-core", | ||
| 283 | "lib-dynload/_bisect.so lib-dynload/_collections.so lib-dynload/_heapq.so lib-dynload/_weakref.so lib-dynload/_functools.so " + | ||
| 284 | "lib-dynload/array.so lib-dynload/itertools.so lib-dynload/operator.so lib-dynload/parser.so " + | ||
| 285 | "atexit.* bisect.* code.* codeop.* collections.* dis.* functools.* heapq.* inspect.* keyword.* opcode.* symbol.* repr.* token.* " + | ||
| 286 | "tokenize.* traceback.* weakref.*" ) | ||
| 287 | |||
| 288 | m.addPackage( "${PN}-logging", "Python logging support", "${PN}-core ${PN}-io ${PN}-lang ${PN}-pickle ${PN}-stringold", | ||
| 289 | "logging" ) # package | ||
| 290 | |||
| 291 | m.addPackage( "${PN}-mailbox", "Python mailbox format support", "${PN}-core ${PN}-mime", | ||
| 292 | "mailbox.*" ) | ||
| 293 | |||
| 294 | m.addPackage( "${PN}-math", "Python math support", "${PN}-core ${PN}-crypt", | ||
| 295 | "lib-dynload/cmath.so lib-dynload/math.so lib-dynload/_random.so random.* sets.*" ) | ||
| 296 | |||
| 297 | m.addPackage( "${PN}-mime", "Python MIME handling APIs", "${PN}-core ${PN}-io", | ||
| 298 | "mimetools.* uu.* quopri.* rfc822.* MimeWriter.*" ) | ||
| 299 | |||
| 300 | m.addPackage( "${PN}-mmap", "Python memory-mapped file support", "${PN}-core ${PN}-io", | ||
| 301 | "lib-dynload/mmap.so " ) | ||
| 302 | |||
| 303 | m.addPackage( "${PN}-multiprocessing", "Python multiprocessing support", "${PN}-core ${PN}-io ${PN}-lang ${PN}-pickle ${PN}-threading ${PN}-ctypes ${PN}-mmap", | ||
| 304 | "lib-dynload/_multiprocessing.so multiprocessing" ) # package | ||
| 305 | |||
| 306 | m.addPackage( "${PN}-netclient", "Python Internet Protocol clients", "${PN}-core ${PN}-crypt ${PN}-datetime ${PN}-io ${PN}-lang ${PN}-logging ${PN}-mime", | ||
| 307 | "*Cookie*.* " + | ||
| 308 | "base64.* cookielib.* ftplib.* gopherlib.* hmac.* httplib.* mimetypes.* nntplib.* poplib.* smtplib.* telnetlib.* urllib.* urllib2.* urlparse.* uuid.* rfc822.* mimetools.*" ) | ||
| 309 | |||
| 310 | m.addPackage( "${PN}-netserver", "Python Internet Protocol servers", "${PN}-core ${PN}-netclient", | ||
| 311 | "cgi.* *HTTPServer.* SocketServer.*" ) | ||
| 312 | |||
| 313 | m.addPackage( "${PN}-numbers", "Python number APIs", "${PN}-core ${PN}-lang ${PN}-re", | ||
| 314 | "decimal.* numbers.*" ) | ||
| 315 | |||
| 316 | m.addPackage( "${PN}-pickle", "Python serialisation/persistence support", "${PN}-core ${PN}-codecs ${PN}-io ${PN}-re", | ||
| 317 | "pickle.* shelve.* lib-dynload/cPickle.so pickletools.*" ) | ||
| 318 | |||
| 319 | m.addPackage( "${PN}-pkgutil", "Python package extension utility support", "${PN}-core", | ||
| 320 | "pkgutil.*") | ||
| 321 | |||
| 322 | m.addPackage( "${PN}-pprint", "Python pretty-print support", "${PN}-core ${PN}-io", | ||
| 323 | "pprint.*" ) | ||
| 324 | |||
| 325 | m.addPackage( "${PN}-profile", "Python basic performance profiling support", "${PN}-core ${PN}-textutils", | ||
| 326 | "profile.* pstats.* cProfile.* lib-dynload/_lsprof.so" ) | ||
| 327 | |||
| 328 | m.addPackage( "${PN}-re", "Python Regular Expression APIs", "${PN}-core", | ||
| 329 | "re.* sre.* sre_compile.* sre_constants* sre_parse.*" ) # _sre is builtin | ||
| 330 | |||
| 331 | m.addPackage( "${PN}-readline", "Python readline support", "${PN}-core", | ||
| 332 | "lib-dynload/readline.so rlcompleter.*" ) | ||
| 333 | |||
| 334 | m.addPackage( "${PN}-resource", "Python resource control interface", "${PN}-core", | ||
| 335 | "lib-dynload/resource.so" ) | ||
| 336 | |||
| 337 | m.addPackage( "${PN}-shell", "Python shell-like functionality", "${PN}-core ${PN}-re", | ||
| 338 | "cmd.* commands.* dircache.* fnmatch.* glob.* popen2.* shlex.* shutil.*" ) | ||
| 339 | |||
| 340 | m.addPackage( "${PN}-robotparser", "Python robots.txt parser", "${PN}-core ${PN}-netclient", | ||
| 341 | "robotparser.*") | ||
| 342 | |||
| 343 | m.addPackage( "${PN}-subprocess", "Python subprocess support", "${PN}-core ${PN}-io ${PN}-re ${PN}-fcntl ${PN}-pickle", | ||
| 344 | "subprocess.*" ) | ||
| 345 | |||
| 346 | m.addPackage( "${PN}-sqlite3", "Python Sqlite3 database support", "${PN}-core ${PN}-datetime ${PN}-lang ${PN}-crypt ${PN}-io ${PN}-threading ${PN}-zlib", | ||
| 347 | "lib-dynload/_sqlite3.so sqlite3/dbapi2.* sqlite3/__init__.* sqlite3/dump.*" ) | ||
| 348 | |||
| 349 | m.addPackage( "${PN}-sqlite3-tests", "Python Sqlite3 database support tests", "${PN}-core ${PN}-sqlite3", | ||
| 350 | "sqlite3/test" ) | ||
| 351 | |||
| 352 | m.addPackage( "${PN}-stringold", "Python string APIs [deprecated]", "${PN}-core ${PN}-re", | ||
| 353 | "lib-dynload/strop.so string.* stringold.*" ) | ||
| 354 | |||
| 355 | m.addPackage( "${PN}-syslog", "Python syslog interface", "${PN}-core", | ||
| 356 | "lib-dynload/syslog.so" ) | ||
| 357 | |||
| 358 | m.addPackage( "${PN}-terminal", "Python terminal controlling support", "${PN}-core ${PN}-io", | ||
| 359 | "pty.* tty.*" ) | ||
| 360 | |||
| 361 | m.addPackage( "${PN}-tests", "Python tests", "${PN}-core", | ||
| 362 | "test" ) # package | ||
| 363 | |||
| 364 | m.addPackage( "${PN}-threading", "Python threading & synchronization support", "${PN}-core ${PN}-lang", | ||
| 365 | "_threading_local.* dummy_thread.* dummy_threading.* mutex.* threading.* Queue.*" ) | ||
| 366 | |||
| 367 | m.addPackage( "${PN}-tkinter", "Python Tcl/Tk bindings", "${PN}-core", | ||
| 368 | "lib-dynload/_tkinter.so lib-tk" ) # package | ||
| 369 | |||
| 370 | m.addPackage( "${PN}-unittest", "Python unit testing framework", "${PN}-core ${PN}-stringold ${PN}-lang ${PN}-io ${PN}-difflib ${PN}-pprint ${PN}-shell", | ||
| 371 | "unittest/" ) | ||
| 372 | |||
| 373 | m.addPackage( "${PN}-unixadmin", "Python Unix administration support", "${PN}-core", | ||
| 374 | "lib-dynload/nis.so lib-dynload/grp.so lib-dynload/pwd.so getpass.*" ) | ||
| 375 | |||
| 376 | m.addPackage( "${PN}-xml", "Python basic XML support", "${PN}-core ${PN}-elementtree ${PN}-re", | ||
| 377 | "lib-dynload/pyexpat.so xml xmllib.*" ) # package | ||
| 378 | |||
| 379 | m.addPackage( "${PN}-xmlrpc", "Python XML-RPC support", "${PN}-core ${PN}-xml ${PN}-netserver ${PN}-lang", | ||
| 380 | "xmlrpclib.* SimpleXMLRPCServer.* DocXMLRPCServer.*" ) | ||
| 381 | |||
| 382 | m.addPackage( "${PN}-zlib", "Python zlib compression support", "${PN}-core", | ||
| 383 | "lib-dynload/zlib.so" ) | ||
| 384 | |||
| 385 | m.addPackage( "${PN}-mailbox", "Python mailbox format support", "${PN}-core ${PN}-mime", | ||
| 386 | "mailbox.*" ) | ||
| 387 | |||
| 388 | m.make() | ||
diff --git a/scripts/contrib/python/generate-manifest-3.3.py b/scripts/contrib/python/generate-manifest-3.3.py new file mode 100755 index 0000000000..1586c46868 --- /dev/null +++ b/scripts/contrib/python/generate-manifest-3.3.py | |||
| @@ -0,0 +1,380 @@ | |||
| 1 | #!/usr/bin/env python | ||
| 2 | |||
| 3 | # generate Python Manifest for the OpenEmbedded build system | ||
| 4 | # (C) 2002-2010 Michael 'Mickey' Lauer <mlauer@vanille-media.de> | ||
| 5 | # (C) 2007 Jeremy Laine | ||
| 6 | # licensed under MIT, see COPYING.MIT | ||
| 7 | # | ||
| 8 | # June 22, 2011 -- Mark Hatle <mark.hatle@windriver.com> | ||
| 9 | # * Updated to no longer generate special -dbg package, instead use the | ||
| 10 | # single system -dbg | ||
| 11 | # * Update version with ".1" to indicate this change | ||
| 12 | # | ||
| 13 | # 2014 Khem Raj <raj.khem@gmail.com> | ||
| 14 | # Added python3 support | ||
| 15 | # | ||
| 16 | import os | ||
| 17 | import sys | ||
| 18 | import time | ||
| 19 | |||
| 20 | VERSION = "3.3.3" | ||
| 21 | |||
| 22 | __author__ = "Michael 'Mickey' Lauer <mlauer@vanille-media.de>" | ||
| 23 | __version__ = "20140131" | ||
| 24 | |||
| 25 | class MakefileMaker: | ||
| 26 | |||
| 27 | def __init__( self, outfile ): | ||
| 28 | """initialize""" | ||
| 29 | self.packages = {} | ||
| 30 | self.targetPrefix = "${libdir}/python%s/" % VERSION[:3] | ||
| 31 | self.output = outfile | ||
| 32 | self.out( """ | ||
| 33 | # WARNING: This file is AUTO GENERATED: Manual edits will be lost next time I regenerate the file. | ||
| 34 | # Generator: '%s' Version %s (C) 2002-2010 Michael 'Mickey' Lauer <mlauer@vanille-media.de> | ||
| 35 | # Visit the Python for Embedded Systems Site => http://www.Vanille.de/projects/python.spy | ||
| 36 | """ % ( sys.argv[0], __version__ ) ) | ||
| 37 | |||
| 38 | # | ||
| 39 | # helper functions | ||
| 40 | # | ||
| 41 | |||
| 42 | def out( self, data ): | ||
| 43 | """print a line to the output file""" | ||
| 44 | self.output.write( "%s\n" % data ) | ||
| 45 | |||
| 46 | def setPrefix( self, targetPrefix ): | ||
| 47 | """set a file prefix for addPackage files""" | ||
| 48 | self.targetPrefix = targetPrefix | ||
| 49 | |||
| 50 | def doProlog( self ): | ||
| 51 | self.out( """ """ ) | ||
| 52 | self.out( "" ) | ||
| 53 | |||
| 54 | def addPackage( self, name, description, dependencies, filenames ): | ||
| 55 | """add a package to the Makefile""" | ||
| 56 | if type( filenames ) == type( "" ): | ||
| 57 | filenames = filenames.split() | ||
| 58 | fullFilenames = [] | ||
| 59 | for filename in filenames: | ||
| 60 | if filename[0] != "$": | ||
| 61 | fullFilenames.append( "%s%s" % ( self.targetPrefix, filename ) ) | ||
| 62 | else: | ||
| 63 | fullFilenames.append( filename ) | ||
| 64 | self.packages[name] = description, dependencies, fullFilenames | ||
| 65 | |||
| 66 | def doBody( self ): | ||
| 67 | """generate body of Makefile""" | ||
| 68 | |||
| 69 | global VERSION | ||
| 70 | |||
| 71 | # | ||
| 72 | # generate provides line | ||
| 73 | # | ||
| 74 | |||
| 75 | provideLine = 'PROVIDES+="' | ||
| 76 | for name in sorted(self.packages): | ||
| 77 | provideLine += "%s " % name | ||
| 78 | provideLine += '"' | ||
| 79 | |||
| 80 | self.out( provideLine ) | ||
| 81 | self.out( "" ) | ||
| 82 | |||
| 83 | # | ||
| 84 | # generate package line | ||
| 85 | # | ||
| 86 | |||
| 87 | packageLine = 'PACKAGES="${PN}-dbg ' | ||
| 88 | for name in sorted(self.packages): | ||
| 89 | if name.startswith("${PN}-distutils"): | ||
| 90 | if name == "${PN}-distutils": | ||
| 91 | packageLine += "%s-staticdev %s " % (name, name) | ||
| 92 | elif name != '${PN}-dbg': | ||
| 93 | packageLine += "%s " % name | ||
| 94 | packageLine += '${PN}-modules"' | ||
| 95 | |||
| 96 | self.out( packageLine ) | ||
| 97 | self.out( "" ) | ||
| 98 | |||
| 99 | # | ||
| 100 | # generate package variables | ||
| 101 | # | ||
| 102 | |||
| 103 | for name, data in sorted(self.packages.iteritems()): | ||
| 104 | desc, deps, files = data | ||
| 105 | |||
| 106 | # | ||
| 107 | # write out the description, revision and dependencies | ||
| 108 | # | ||
| 109 | self.out( 'SUMMARY_%s="%s"' % ( name, desc ) ) | ||
| 110 | self.out( 'RDEPENDS_%s="%s"' % ( name, deps ) ) | ||
| 111 | |||
| 112 | line = 'FILES_%s="' % name | ||
| 113 | |||
| 114 | # | ||
| 115 | # check which directories to make in the temporary directory | ||
| 116 | # | ||
| 117 | |||
| 118 | dirset = {} # if python had a set-datatype this would be sufficient. for now, we're using a dict instead. | ||
| 119 | for target in files: | ||
| 120 | dirset[os.path.dirname( target )] = True | ||
| 121 | |||
| 122 | # | ||
| 123 | # generate which files to copy for the target (-dfR because whole directories are also allowed) | ||
| 124 | # | ||
| 125 | |||
| 126 | for target in files: | ||
| 127 | line += "%s " % target | ||
| 128 | |||
| 129 | line += '"' | ||
| 130 | self.out( line ) | ||
| 131 | self.out( "" ) | ||
| 132 | |||
| 133 | self.out( 'SUMMARY_${PN}-modules="All Python modules"' ) | ||
| 134 | line = 'RDEPENDS_${PN}-modules="' | ||
| 135 | |||
| 136 | for name, data in sorted(self.packages.iteritems()): | ||
| 137 | if name not in ['${PN}-dev', '${PN}-distutils-staticdev']: | ||
| 138 | line += "%s " % name | ||
| 139 | |||
| 140 | self.out( "%s \"" % line ) | ||
| 141 | self.out( 'ALLOW_EMPTY_${PN}-modules = "1"' ) | ||
| 142 | |||
| 143 | def doEpilog( self ): | ||
| 144 | self.out( """""" ) | ||
| 145 | self.out( "" ) | ||
| 146 | |||
| 147 | def make( self ): | ||
| 148 | self.doProlog() | ||
| 149 | self.doBody() | ||
| 150 | self.doEpilog() | ||
| 151 | |||
| 152 | if __name__ == "__main__": | ||
| 153 | |||
| 154 | if len( sys.argv ) > 1: | ||
| 155 | try: | ||
| 156 | os.unlink(sys.argv[1]) | ||
| 157 | except Exception: | ||
| 158 | sys.exc_clear() | ||
| 159 | outfile = file( sys.argv[1], "w" ) | ||
| 160 | else: | ||
| 161 | outfile = sys.stdout | ||
| 162 | |||
| 163 | m = MakefileMaker( outfile ) | ||
| 164 | |||
| 165 | # Add packages here. Only specify dlopen-style library dependencies here, no ldd-style dependencies! | ||
| 166 | # Parameters: revision, name, description, dependencies, filenames | ||
| 167 | # | ||
| 168 | |||
| 169 | m.addPackage( "${PN}-core", "Python interpreter and core modules", "${PN}-lang ${PN}-re", | ||
| 170 | "__future__.* _abcoll.* abc.* copy.* copy_reg.* ConfigParser.* " + | ||
| 171 | "genericpath.* getopt.* linecache.* new.* " + | ||
| 172 | "os.* posixpath.* struct.* " + | ||
| 173 | "warnings.* site.* stat.* " + | ||
| 174 | "UserDict.* UserList.* UserString.* " + | ||
| 175 | "lib-dynload/binascii.*.so lib-dynload/_struct.*.so lib-dynload/time.*.so " + | ||
| 176 | "lib-dynload/xreadlines.*.so types.* platform.* ${bindir}/python* " + | ||
| 177 | "_weakrefset.* sysconfig.* config/Makefile " + | ||
| 178 | "${includedir}/python${PYTHON_MAJMIN}/pyconfig*.h " + | ||
| 179 | "${libdir}/python${PYTHON_MAJMIN}/collections " + | ||
| 180 | "${libdir}/python${PYTHON_MAJMIN}/sitecustomize.py ") | ||
| 181 | |||
| 182 | m.addPackage( "${PN}-dev", "Python development package", "${PN}-core", | ||
| 183 | "${includedir} " + | ||
| 184 | "${libdir}/lib*${SOLIBSDEV} " + | ||
| 185 | "${libdir}/*.la " + | ||
| 186 | "${libdir}/*.a " + | ||
| 187 | "${libdir}/*.o " + | ||
| 188 | "${libdir}/pkgconfig " + | ||
| 189 | "${base_libdir}/*.a " + | ||
| 190 | "${base_libdir}/*.o " + | ||
| 191 | "${datadir}/aclocal " + | ||
| 192 | "${datadir}/pkgconfig " ) | ||
| 193 | |||
| 194 | m.addPackage( "${PN}-2to3", "Python automated Python 2 to 3 code translator", "${PN}-core", | ||
| 195 | "${bindir}/2to3 lib2to3" ) # package | ||
| 196 | |||
| 197 | m.addPackage( "${PN}-idle", "Python Integrated Development Environment", "${PN}-core ${PN}-tkinter", | ||
| 198 | "${bindir}/idle idlelib" ) # package | ||
| 199 | |||
| 200 | m.addPackage( "${PN}-pydoc", "Python interactive help support", "${PN}-core ${PN}-lang ${PN}-stringold ${PN}-re", | ||
| 201 | "${bindir}/pydoc pydoc.* pydoc_data" ) | ||
| 202 | |||
| 203 | m.addPackage( "${PN}-smtpd", "Python Simple Mail Transport Daemon", "${PN}-core ${PN}-netserver ${PN}-email ${PN}-mime", | ||
| 204 | "${bindir}/smtpd.* smtpd.*" ) | ||
| 205 | |||
| 206 | m.addPackage( "${PN}-audio", "Python Audio Handling", "${PN}-core", | ||
| 207 | "wave.* chunk.* sndhdr.* lib-dynload/ossaudiodev.*.so lib-dynload/audioop.*.so audiodev.* sunaudio.* sunau.* toaiff.*" ) | ||
| 208 | |||
| 209 | m.addPackage( "${PN}-codecs", "Python codecs, encodings & i18n support", "${PN}-core ${PN}-lang", | ||
| 210 | "codecs.* encodings gettext.* locale.* lib-dynload/_locale.*.so lib-dynload/_codecs* lib-dynload/_multibytecodec.*.so lib-dynload/unicodedata.*.so stringprep.* xdrlib.*" ) | ||
| 211 | |||
| 212 | m.addPackage( "${PN}-compile", "Python bytecode compilation support", "${PN}-core", | ||
| 213 | "py_compile.* compileall.*" ) | ||
| 214 | |||
| 215 | m.addPackage( "${PN}-compression", "Python high-level compression support", "${PN}-core ${PN}-codecs", | ||
| 216 | "gzip.* zipfile.* tarfile.* lib-dynload/bz2.*.so" ) | ||
| 217 | |||
| 218 | m.addPackage( "${PN}-crypt", "Python basic cryptographic and hashing support", "${PN}-core", | ||
| 219 | "hashlib.* md5.* sha.* lib-dynload/crypt.*.so lib-dynload/_hashlib.*.so lib-dynload/_sha256.*.so lib-dynload/_sha512.*.so" ) | ||
| 220 | |||
| 221 | m.addPackage( "${PN}-textutils", "Python option parsing, text wrapping and CSV support", "${PN}-core ${PN}-io ${PN}-re ${PN}-stringold", | ||
| 222 | "lib-dynload/_csv.*.so csv.* optparse.* textwrap.*" ) | ||
| 223 | |||
| 224 | m.addPackage( "${PN}-curses", "Python curses support", "${PN}-core", | ||
| 225 | "curses lib-dynload/_curses.*.so lib-dynload/_curses_panel.*.so" ) # directory + low level module | ||
| 226 | |||
| 227 | m.addPackage( "${PN}-ctypes", "Python C types support", "${PN}-core", | ||
| 228 | "ctypes lib-dynload/_ctypes.*.so lib-dynload/_ctypes_test.*.so" ) # directory + low level module | ||
| 229 | |||
| 230 | m.addPackage( "${PN}-datetime", "Python calendar and time support", "${PN}-core ${PN}-codecs", | ||
| 231 | "_strptime.* calendar.* lib-dynload/datetime.*.so" ) | ||
| 232 | |||
| 233 | m.addPackage( "${PN}-db", "Python file-based database support", "${PN}-core", | ||
| 234 | "anydbm.* dumbdbm.* whichdb.* dbm lib-dynload/_dbm.*.so" ) | ||
| 235 | |||
| 236 | m.addPackage( "${PN}-debugger", "Python debugger", "${PN}-core ${PN}-io ${PN}-lang ${PN}-re ${PN}-stringold ${PN}-shell ${PN}-pprint", | ||
| 237 | "bdb.* pdb.*" ) | ||
| 238 | |||
| 239 | m.addPackage( "${PN}-difflib", "Python helpers for computing deltas between objects", "${PN}-lang ${PN}-re", | ||
| 240 | "difflib.*" ) | ||
| 241 | |||
| 242 | m.addPackage( "${PN}-distutils-staticdev", "Python distribution utilities (static libraries)", "${PN}-distutils", | ||
| 243 | "config/lib*.a" ) # package | ||
| 244 | |||
| 245 | m.addPackage( "${PN}-distutils", "Python Distribution Utilities", "${PN}-core", | ||
| 246 | "config distutils" ) # package | ||
| 247 | |||
| 248 | m.addPackage( "${PN}-doctest", "Python framework for running examples in docstrings", "${PN}-core ${PN}-lang ${PN}-io ${PN}-re ${PN}-unittest ${PN}-debugger ${PN}-difflib", | ||
| 249 | "doctest.*" ) | ||
| 250 | |||
| 251 | # FIXME consider adding to some higher level package | ||
| 252 | m.addPackage( "${PN}-elementtree", "Python elementree", "${PN}-core", | ||
| 253 | "lib-dynload/_elementtree.*.so" ) | ||
| 254 | |||
| 255 | m.addPackage( "${PN}-email", "Python email support", "${PN}-core ${PN}-io ${PN}-re ${PN}-mime ${PN}-audio ${PN}-image ${PN}-netclient", | ||
| 256 | "imaplib.* email" ) # package | ||
| 257 | |||
| 258 | m.addPackage( "${PN}-fcntl", "Python's fcntl interface", "${PN}-core", | ||
| 259 | "lib-dynload/fcntl.*.so" ) | ||
| 260 | |||
| 261 | m.addPackage( "${PN}-html", "Python HTML processing support", "${PN}-core", | ||
| 262 | "formatter.* htmlentitydefs.* htmllib.* markupbase.* sgmllib.* HTMLParser.* " ) | ||
| 263 | |||
| 264 | m.addPackage( "${PN}-gdbm", "Python GNU database support", "${PN}-core", | ||
| 265 | "lib-dynload/_gdbm.*.so" ) | ||
| 266 | |||
| 267 | m.addPackage( "${PN}-image", "Python graphical image handling", "${PN}-core", | ||
| 268 | "colorsys.* imghdr.* lib-dynload/imageop.*.so lib-dynload/rgbimg.*.so" ) | ||
| 269 | |||
| 270 | m.addPackage( "${PN}-io", "Python low-level I/O", "${PN}-core ${PN}-math", | ||
| 271 | "lib-dynload/_socket.*.so lib-dynload/_io.*.so lib-dynload/_ssl.*.so lib-dynload/select.*.so lib-dynload/termios.*.so lib-dynload/cStringIO.*.so " + | ||
| 272 | "pipes.* socket.* ssl.* tempfile.* StringIO.* io.* _pyio.*" ) | ||
| 273 | |||
| 274 | m.addPackage( "${PN}-json", "Python JSON support", "${PN}-core ${PN}-math ${PN}-re", | ||
| 275 | "json lib-dynload/_json.*.so" ) # package | ||
| 276 | |||
| 277 | m.addPackage( "${PN}-lang", "Python low-level language support", "${PN}-core", | ||
| 278 | "lib-dynload/_bisect.*.so lib-dynload/_collections.*.so lib-dynload/_heapq.*.so lib-dynload/_weakref.*.so lib-dynload/_functools.*.so " + | ||
| 279 | "lib-dynload/array.*.so lib-dynload/itertools.*.so lib-dynload/operator.*.so lib-dynload/parser.*.so " + | ||
| 280 | "atexit.* bisect.* code.* codeop.* collections.* dis.* functools.* heapq.* inspect.* keyword.* opcode.* symbol.* repr.* token.* " + | ||
| 281 | "tokenize.* traceback.* weakref.*" ) | ||
| 282 | |||
| 283 | m.addPackage( "${PN}-logging", "Python logging support", "${PN}-core ${PN}-io ${PN}-lang ${PN}-pickle ${PN}-stringold", | ||
| 284 | "logging" ) # package | ||
| 285 | |||
| 286 | m.addPackage( "${PN}-mailbox", "Python mailbox format support", "${PN}-core ${PN}-mime", | ||
| 287 | "mailbox.*" ) | ||
| 288 | |||
| 289 | m.addPackage( "${PN}-math", "Python math support", "${PN}-core", | ||
| 290 | "lib-dynload/cmath.*.so lib-dynload/math.*.so lib-dynload/_random.*.so random.* sets.*" ) | ||
| 291 | |||
| 292 | m.addPackage( "${PN}-mime", "Python MIME handling APIs", "${PN}-core ${PN}-io", | ||
| 293 | "mimetools.* uu.* quopri.* rfc822.* MimeWriter.*" ) | ||
| 294 | |||
| 295 | m.addPackage( "${PN}-mmap", "Python memory-mapped file support", "${PN}-core ${PN}-io", | ||
| 296 | "lib-dynload/mmap.*.so " ) | ||
| 297 | |||
| 298 | m.addPackage( "${PN}-multiprocessing", "Python multiprocessing support", "${PN}-core ${PN}-io ${PN}-lang ${PN}-pickle ${PN}-threading ${PN}-ctypes ${PN}-mmap", | ||
| 299 | "lib-dynload/_multiprocessing.*.so multiprocessing" ) # package | ||
| 300 | |||
| 301 | m.addPackage( "${PN}-netclient", "Python Internet Protocol clients", "${PN}-core ${PN}-crypt ${PN}-datetime ${PN}-io ${PN}-lang ${PN}-logging ${PN}-mime", | ||
| 302 | "*Cookie*.* " + | ||
| 303 | "base64.* cookielib.* ftplib.* gopherlib.* hmac.* httplib.* mimetypes.* nntplib.* poplib.* smtplib.* telnetlib.* urllib.* urllib2.* urlparse.* uuid.* rfc822.* mimetools.*" ) | ||
| 304 | |||
| 305 | m.addPackage( "${PN}-netserver", "Python Internet Protocol servers", "${PN}-core ${PN}-netclient", | ||
| 306 | "cgi.* *HTTPServer.* SocketServer.*" ) | ||
| 307 | |||
| 308 | m.addPackage( "${PN}-numbers", "Python number APIs", "${PN}-core ${PN}-lang ${PN}-re", | ||
| 309 | "decimal.* numbers.*" ) | ||
| 310 | |||
| 311 | m.addPackage( "${PN}-pickle", "Python serialisation/persistence support", "${PN}-core ${PN}-codecs ${PN}-io ${PN}-re", | ||
| 312 | "pickle.* shelve.* lib-dynload/cPickle.*.so pickletools.*" ) | ||
| 313 | |||
| 314 | m.addPackage( "${PN}-pkgutil", "Python package extension utility support", "${PN}-core", | ||
| 315 | "pkgutil.*") | ||
| 316 | |||
| 317 | m.addPackage( "${PN}-pprint", "Python pretty-print support", "${PN}-core ${PN}-io", | ||
| 318 | "pprint.*" ) | ||
| 319 | |||
| 320 | m.addPackage( "${PN}-profile", "Python basic performance profiling support", "${PN}-core ${PN}-textutils", | ||
| 321 | "profile.* pstats.* cProfile.* lib-dynload/_lsprof.*.so" ) | ||
| 322 | |||
| 323 | m.addPackage( "${PN}-re", "Python Regular Expression APIs", "${PN}-core", | ||
| 324 | "re.* sre.* sre_compile.* sre_constants* sre_parse.*" ) # _sre is builtin | ||
| 325 | |||
| 326 | m.addPackage( "${PN}-readline", "Python readline support", "${PN}-core", | ||
| 327 | "lib-dynload/readline.*.so rlcompleter.*" ) | ||
| 328 | |||
| 329 | m.addPackage( "${PN}-resource", "Python resource control interface", "${PN}-core", | ||
| 330 | "lib-dynload/resource.*.so" ) | ||
| 331 | |||
| 332 | m.addPackage( "${PN}-shell", "Python shell-like functionality", "${PN}-core ${PN}-re", | ||
| 333 | "cmd.* commands.* dircache.* fnmatch.* glob.* popen2.* shlex.* shutil.*" ) | ||
| 334 | |||
| 335 | m.addPackage( "${PN}-robotparser", "Python robots.txt parser", "${PN}-core ${PN}-netclient", | ||
| 336 | "urllib/robotparser.*") | ||
| 337 | |||
| 338 | m.addPackage( "${PN}-subprocess", "Python subprocess support", "${PN}-core ${PN}-io ${PN}-re ${PN}-fcntl ${PN}-pickle", | ||
| 339 | "subprocess.*" ) | ||
| 340 | |||
| 341 | m.addPackage( "${PN}-sqlite3", "Python Sqlite3 database support", "${PN}-core ${PN}-datetime ${PN}-lang ${PN}-crypt ${PN}-io ${PN}-threading", | ||
| 342 | "lib-dynload/_sqlite3.*.so sqlite3/dbapi2.* sqlite3/__init__.* sqlite3/dump.*" ) | ||
| 343 | |||
| 344 | m.addPackage( "${PN}-sqlite3-tests", "Python Sqlite3 database support tests", "${PN}-core ${PN}-sqlite3", | ||
| 345 | "sqlite3/test" ) | ||
| 346 | |||
| 347 | m.addPackage( "${PN}-stringold", "Python string APIs [deprecated]", "${PN}-core ${PN}-re", | ||
| 348 | "lib-dynload/strop.*.so string.* stringold.*" ) | ||
| 349 | |||
| 350 | m.addPackage( "${PN}-syslog", "Python syslog interface", "${PN}-core", | ||
| 351 | "lib-dynload/syslog.*.so" ) | ||
| 352 | |||
| 353 | m.addPackage( "${PN}-terminal", "Python terminal controlling support", "${PN}-core ${PN}-io", | ||
| 354 | "pty.* tty.*" ) | ||
| 355 | |||
| 356 | m.addPackage( "${PN}-tests", "Python tests", "${PN}-core", | ||
| 357 | "test" ) # package | ||
| 358 | |||
| 359 | m.addPackage( "${PN}-threading", "Python threading & synchronization support", "${PN}-core ${PN}-lang", | ||
| 360 | "_threading_local.* dummy_thread.* dummy_threading.* mutex.* threading.* Queue.*" ) | ||
| 361 | |||
| 362 | m.addPackage( "${PN}-tkinter", "Python Tcl/Tk bindings", "${PN}-core", | ||
| 363 | "lib-dynload/_tkinter.*.so lib-tk tkinter" ) # package | ||
| 364 | |||
| 365 | m.addPackage( "${PN}-unittest", "Python unit testing framework", "${PN}-core ${PN}-stringold ${PN}-lang ${PN}-io ${PN}-difflib ${PN}-pprint ${PN}-shell", | ||
| 366 | "unittest/" ) | ||
| 367 | |||
| 368 | m.addPackage( "${PN}-unixadmin", "Python Unix administration support", "${PN}-core", | ||
| 369 | "lib-dynload/nis.*.so lib-dynload/grp.*.so lib-dynload/pwd.*.so getpass.*" ) | ||
| 370 | |||
| 371 | m.addPackage( "${PN}-xml", "Python basic XML support", "${PN}-core ${PN}-elementtree ${PN}-re", | ||
| 372 | "lib-dynload/pyexpat.*.so xml xmllib.*" ) # package | ||
| 373 | |||
| 374 | m.addPackage( "${PN}-xmlrpc", "Python XML-RPC support", "${PN}-core ${PN}-xml ${PN}-netserver ${PN}-lang", | ||
| 375 | "xmlrpclib.* SimpleXMLRPCServer.* DocXMLRPCServer.* xmlrpc" ) | ||
| 376 | |||
| 377 | m.addPackage( "${PN}-mailbox", "Python mailbox format support", "${PN}-core ${PN}-mime", | ||
| 378 | "mailbox.*" ) | ||
| 379 | |||
| 380 | m.make() | ||
diff --git a/scripts/contrib/test_build_time.sh b/scripts/contrib/test_build_time.sh new file mode 100755 index 0000000000..9e5725ae54 --- /dev/null +++ b/scripts/contrib/test_build_time.sh | |||
| @@ -0,0 +1,237 @@ | |||
| 1 | #!/bin/bash | ||
| 2 | |||
| 3 | # Build performance regression test script | ||
| 4 | # | ||
| 5 | # Copyright 2011 Intel Corporation | ||
| 6 | # All rights reserved. | ||
| 7 | # | ||
| 8 | # This program is free software; you can redistribute it and/or modify | ||
| 9 | # it under the terms of the GNU General Public License as published by | ||
| 10 | # the Free Software Foundation; either version 2 of the License, or | ||
| 11 | # (at your option) any later version. | ||
| 12 | # | ||
| 13 | # This program is distributed in the hope that it will be useful, | ||
| 14 | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 15 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| 16 | # GNU General Public License for more details. | ||
| 17 | # | ||
| 18 | # You should have received a copy of the GNU General Public License | ||
| 19 | # along with this program; if not, write to the Free Software | ||
| 20 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA | ||
| 21 | # | ||
| 22 | # | ||
| 23 | # DESCRIPTION | ||
| 24 | # This script is intended to be used in conjunction with "git bisect run" | ||
| 25 | # in order to find regressions in build time, however it can also be used | ||
| 26 | # independently. It cleans out the build output directories, runs a | ||
| 27 | # specified worker script (an example is test_build_time_worker.sh) under | ||
| 28 | # TIME(1), logs the results to TEST_LOGDIR (default /tmp) and returns a | ||
| 29 | # value telling "git bisect run" whether the build time is good (under | ||
| 30 | # the specified threshold) or bad (over it). There is also a tolerance | ||
| 31 | # option but it is not particularly useful as it only subtracts the | ||
| 32 | # tolerance from the given threshold and uses it as the actual threshold. | ||
| 33 | # | ||
| 34 | # It is also capable of taking a file listing git revision hashes to be | ||
| 35 | # test-applied to the repository in order to get past build failures that | ||
| 36 | # would otherwise cause certain revisions to have to be skipped; if a | ||
| 37 | # revision does not apply cleanly then the script assumes it does not | ||
| 38 | # need to be applied and ignores it. | ||
| 39 | # | ||
| 40 | # Please see the help output (syntax below) for some important setup | ||
| 41 | # instructions. | ||
| 42 | # | ||
| 43 | # AUTHORS | ||
| 44 | # Paul Eggleton <paul.eggleton@linux.intel.com> | ||
| 45 | |||
| 46 | |||
| 47 | syntax() { | ||
| 48 | echo "syntax: $0 <script> <time> <tolerance> [patchrevlist]" | ||
| 49 | echo "" | ||
| 50 | echo " script - worker script file (if in current dir, prefix with ./)" | ||
| 51 | echo " time - time threshold (in seconds, suffix m for minutes)" | ||
| 52 | echo " tolerance - tolerance (in seconds, suffix m for minutes or % for" | ||
| 53 | echo " percentage, can be 0)" | ||
| 54 | echo " patchrevlist - optional file listing revisions to apply as patches on top" | ||
| 55 | echo "" | ||
| 56 | echo "You must set TEST_BUILDDIR to point to a previously created build directory," | ||
| 57 | echo "however please note that this script will wipe out the TMPDIR defined in" | ||
| 58 | echo "TEST_BUILDDIR/conf/local.conf as part of its initial setup (as well as your" | ||
| 59 | echo "~/.ccache)" | ||
| 60 | echo "" | ||
| 61 | echo "To get rid of the sudo prompt, please add the following line to /etc/sudoers" | ||
| 62 | echo "(use 'visudo' to edit this; also it is assumed that the user you are running" | ||
| 63 | echo "as is a member of the 'wheel' group):" | ||
| 64 | echo "" | ||
| 65 | echo "%wheel ALL=(ALL) NOPASSWD: /sbin/sysctl -w vm.drop_caches=[1-3]" | ||
| 66 | echo "" | ||
| 67 | echo "Note: it is recommended that you disable crond and any other process that" | ||
| 68 | echo "may cause significant CPU or I/O usage during build performance tests." | ||
| 69 | } | ||
| 70 | |||
| 71 | # Note - we exit with 250 here because that will tell git bisect run that | ||
| 72 | # something bad happened and stop | ||
| 73 | if [ "$1" = "" ] ; then | ||
| 74 | syntax | ||
| 75 | exit 250 | ||
| 76 | fi | ||
| 77 | |||
| 78 | if [ "$2" = "" ] ; then | ||
| 79 | syntax | ||
| 80 | exit 250 | ||
| 81 | fi | ||
| 82 | |||
| 83 | if [ "$3" = "" ] ; then | ||
| 84 | syntax | ||
| 85 | exit 250 | ||
| 86 | fi | ||
| 87 | |||
| 88 | if ! [[ "$2" =~ ^[0-9][0-9m.]*$ ]] ; then | ||
| 89 | echo "'$2' is not a valid number for threshold" | ||
| 90 | exit 250 | ||
| 91 | fi | ||
| 92 | |||
| 93 | if ! [[ "$3" =~ ^[0-9][0-9m.%]*$ ]] ; then | ||
| 94 | echo "'$3' is not a valid number for tolerance" | ||
| 95 | exit 250 | ||
| 96 | fi | ||
| 97 | |||
| 98 | if [ "$TEST_BUILDDIR" = "" ] ; then | ||
| 99 | echo "Please set TEST_BUILDDIR to a previously created build directory" | ||
| 100 | exit 250 | ||
| 101 | fi | ||
| 102 | |||
| 103 | if [ ! -d "$TEST_BUILDDIR" ] ; then | ||
| 104 | echo "TEST_BUILDDIR $TEST_BUILDDIR not found" | ||
| 105 | exit 250 | ||
| 106 | fi | ||
| 107 | |||
| 108 | git diff --quiet | ||
| 109 | if [ $? != 0 ] ; then | ||
| 110 | echo "Working tree is dirty, cannot proceed" | ||
| 111 | exit 251 | ||
| 112 | fi | ||
| 113 | |||
| 114 | if [ "$BB_ENV_EXTRAWHITE" != "" ] ; then | ||
| 115 | echo "WARNING: you are running after sourcing the build environment script, this is not recommended" | ||
| 116 | fi | ||
| 117 | |||
| 118 | runscript=$1 | ||
| 119 | timethreshold=$2 | ||
| 120 | tolerance=$3 | ||
| 121 | |||
| 122 | if [ "$4" != "" ] ; then | ||
| 123 | patchrevlist=`cat $4` | ||
| 124 | else | ||
| 125 | patchrevlist="" | ||
| 126 | fi | ||
| 127 | |||
| 128 | if [[ timethreshold == *m* ]] ; then | ||
| 129 | timethreshold=`echo $timethreshold | sed s/m/*60/ | bc` | ||
| 130 | fi | ||
| 131 | |||
| 132 | if [[ $tolerance == *m* ]] ; then | ||
| 133 | tolerance=`echo $tolerance | sed s/m/*60/ | bc` | ||
| 134 | elif [[ $tolerance == *%* ]] ; then | ||
| 135 | tolerance=`echo $tolerance | sed s/%//` | ||
| 136 | tolerance=`echo "scale = 2; (($tolerance * $timethreshold) / 100)" | bc` | ||
| 137 | fi | ||
| 138 | |||
| 139 | tmpdir=`grep "^TMPDIR" $TEST_BUILDDIR/conf/local.conf | sed -e 's/TMPDIR[ \t]*=[ \t\?]*"//' -e 's/"//'` | ||
| 140 | if [ "x$tmpdir" = "x" ]; then | ||
| 141 | echo "Unable to determine TMPDIR from $TEST_BUILDDIR/conf/local.conf, bailing out" | ||
| 142 | exit 250 | ||
| 143 | fi | ||
| 144 | sstatedir=`grep "^SSTATE_DIR" $TEST_BUILDDIR/conf/local.conf | sed -e 's/SSTATE_DIR[ \t\?]*=[ \t]*"//' -e 's/"//'` | ||
| 145 | if [ "x$sstatedir" = "x" ]; then | ||
| 146 | echo "Unable to determine SSTATE_DIR from $TEST_BUILDDIR/conf/local.conf, bailing out" | ||
| 147 | exit 250 | ||
| 148 | fi | ||
| 149 | |||
| 150 | if [ `expr length $tmpdir` -lt 4 ] ; then | ||
| 151 | echo "TMPDIR $tmpdir is less than 4 characters, bailing out" | ||
| 152 | exit 250 | ||
| 153 | fi | ||
| 154 | |||
| 155 | if [ `expr length $sstatedir` -lt 4 ] ; then | ||
| 156 | echo "SSTATE_DIR $sstatedir is less than 4 characters, bailing out" | ||
| 157 | exit 250 | ||
| 158 | fi | ||
| 159 | |||
| 160 | echo -n "About to wipe out TMPDIR $tmpdir, press Ctrl+C to break out... " | ||
| 161 | for i in 9 8 7 6 5 4 3 2 1 | ||
| 162 | do | ||
| 163 | echo -ne "\x08$i" | ||
| 164 | sleep 1 | ||
| 165 | done | ||
| 166 | echo | ||
| 167 | |||
| 168 | pushd . > /dev/null | ||
| 169 | |||
| 170 | rm -f pseudodone | ||
| 171 | echo "Removing TMPDIR $tmpdir..." | ||
| 172 | rm -rf $tmpdir | ||
| 173 | echo "Removing TMPDIR $tmpdir-*libc..." | ||
| 174 | rm -rf $tmpdir-*libc | ||
| 175 | echo "Removing SSTATE_DIR $sstatedir..." | ||
| 176 | rm -rf $sstatedir | ||
| 177 | echo "Removing ~/.ccache..." | ||
| 178 | rm -rf ~/.ccache | ||
| 179 | |||
| 180 | echo "Syncing..." | ||
| 181 | sync | ||
| 182 | sync | ||
| 183 | echo "Dropping VM cache..." | ||
| 184 | #echo 3 > /proc/sys/vm/drop_caches | ||
| 185 | sudo /sbin/sysctl -w vm.drop_caches=3 > /dev/null | ||
| 186 | |||
| 187 | if [ "$TEST_LOGDIR" = "" ] ; then | ||
| 188 | logdir="/tmp" | ||
| 189 | else | ||
| 190 | logdir="$TEST_LOGDIR" | ||
| 191 | fi | ||
| 192 | rev=`git rev-parse HEAD` | ||
| 193 | logfile="$logdir/timelog_$rev.log" | ||
| 194 | echo -n > $logfile | ||
| 195 | |||
| 196 | gitroot=`git rev-parse --show-toplevel` | ||
| 197 | cd $gitroot | ||
| 198 | for patchrev in $patchrevlist ; do | ||
| 199 | echo "Applying $patchrev" | ||
| 200 | patchfile=`mktemp` | ||
| 201 | git show $patchrev > $patchfile | ||
| 202 | git apply --check $patchfile &> /dev/null | ||
| 203 | if [ $? != 0 ] ; then | ||
| 204 | echo " ... patch does not apply without errors, ignoring" | ||
| 205 | else | ||
| 206 | echo "Applied $patchrev" >> $logfile | ||
| 207 | git apply $patchfile &> /dev/null | ||
| 208 | fi | ||
| 209 | rm $patchfile | ||
| 210 | done | ||
| 211 | |||
| 212 | sync | ||
| 213 | echo "Quiescing for 5s..." | ||
| 214 | sleep 5 | ||
| 215 | |||
| 216 | echo "Running $runscript at $rev..." | ||
| 217 | timeoutfile=`mktemp` | ||
| 218 | /usr/bin/time -o $timeoutfile -f "%e\nreal\t%E\nuser\t%Us\nsys\t%Ss\nmaxm\t%Mk" $runscript 2>&1 | tee -a $logfile | ||
| 219 | exitstatus=$PIPESTATUS | ||
| 220 | |||
| 221 | git reset --hard HEAD > /dev/null | ||
| 222 | popd > /dev/null | ||
| 223 | |||
| 224 | timeresult=`head -n1 $timeoutfile` | ||
| 225 | cat $timeoutfile | tee -a $logfile | ||
| 226 | rm $timeoutfile | ||
| 227 | |||
| 228 | if [ $exitstatus != 0 ] ; then | ||
| 229 | # Build failed, exit with 125 to tell git bisect run to skip this rev | ||
| 230 | echo "*** Build failed (exit code $exitstatus), skipping..." | tee -a $logfile | ||
| 231 | exit 125 | ||
| 232 | fi | ||
| 233 | |||
| 234 | ret=`echo "scale = 2; $timeresult > $timethreshold - $tolerance" | bc` | ||
| 235 | echo "Returning $ret" | tee -a $logfile | ||
| 236 | exit $ret | ||
| 237 | |||
diff --git a/scripts/contrib/test_build_time_worker.sh b/scripts/contrib/test_build_time_worker.sh new file mode 100755 index 0000000000..8e20a9ea7d --- /dev/null +++ b/scripts/contrib/test_build_time_worker.sh | |||
| @@ -0,0 +1,37 @@ | |||
| 1 | #!/bin/bash | ||
| 2 | |||
| 3 | # This is an example script to be used in conjunction with test_build_time.sh | ||
| 4 | |||
| 5 | if [ "$TEST_BUILDDIR" = "" ] ; then | ||
| 6 | echo "TEST_BUILDDIR is not set" | ||
| 7 | exit 1 | ||
| 8 | fi | ||
| 9 | |||
| 10 | buildsubdir=`basename $TEST_BUILDDIR` | ||
| 11 | if [ ! -d $buildsubdir ] ; then | ||
| 12 | echo "Unable to find build subdir $buildsubdir in current directory" | ||
| 13 | exit 1 | ||
| 14 | fi | ||
| 15 | |||
| 16 | if [ -f oe-init-build-env ] ; then | ||
| 17 | . ./oe-init-build-env $buildsubdir | ||
| 18 | elif [ -f poky-init-build-env ] ; then | ||
| 19 | . ./poky-init-build-env $buildsubdir | ||
| 20 | else | ||
| 21 | echo "Unable to find build environment setup script" | ||
| 22 | exit 1 | ||
| 23 | fi | ||
| 24 | |||
| 25 | if [ -f ../meta/recipes-sato/images/core-image-sato.bb ] ; then | ||
| 26 | target="core-image-sato" | ||
| 27 | else | ||
| 28 | target="poky-image-sato" | ||
| 29 | fi | ||
| 30 | |||
| 31 | echo "Build started at `date "+%Y-%m-%d %H:%M:%S"`" | ||
| 32 | echo "bitbake $target" | ||
| 33 | bitbake $target | ||
| 34 | ret=$? | ||
| 35 | echo "Build finished at `date "+%Y-%m-%d %H:%M:%S"`" | ||
| 36 | exit $ret | ||
| 37 | |||
