diff options
| -rw-r--r-- | classes/go-mod-discovery.bbclass | 441 | ||||
| -rw-r--r-- | classes/go-mod-vcs.bbclass | 1107 |
2 files changed, 1548 insertions, 0 deletions
diff --git a/classes/go-mod-discovery.bbclass b/classes/go-mod-discovery.bbclass new file mode 100644 index 00000000..0d703d0a --- /dev/null +++ b/classes/go-mod-discovery.bbclass | |||
| @@ -0,0 +1,441 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: MIT | ||
| 5 | # | ||
| 6 | |||
| 7 | # go-mod-discovery.bbclass | ||
| 8 | # | ||
| 9 | # Provides a do_discover_modules task for Go projects that downloads complete | ||
| 10 | # module metadata from proxy.golang.org for use with the bootstrap strategy. | ||
| 11 | # | ||
| 12 | # USAGE: | ||
| 13 | # 1. Add to recipe: inherit go-mod-discovery | ||
| 14 | # 2. Set required variables (see CONFIGURATION below) | ||
| 15 | # 3. Run discovery: bitbake <recipe> -c discover_modules | ||
| 16 | # (This automatically: downloads modules, extracts metadata, regenerates recipe) | ||
| 17 | # 4. Build normally: bitbake <recipe> | ||
| 18 | # | ||
| 19 | # CONFIGURATION: | ||
| 20 | # | ||
| 21 | # Required (must be set by recipe): | ||
| 22 | # | ||
| 23 | # GO_MOD_DISCOVERY_BUILD_TARGET - Build target for go build | ||
| 24 | # Example: "./cmd/server" or "./..." | ||
| 25 | # | ||
| 26 | # Optional (have sensible defaults): | ||
| 27 | # | ||
| 28 | # GO_MOD_DISCOVERY_SRCDIR - Directory containing go.mod | ||
| 29 | # Default: "${S}/src/import" (standard Go recipe layout) | ||
| 30 | # | ||
| 31 | # GO_MOD_DISCOVERY_BUILD_TAGS - Build tags for go build | ||
| 32 | # Default: "${TAGS}" (uses recipe's TAGS variable if set) | ||
| 33 | # Example: "netcgo osusergo static_build" | ||
| 34 | # | ||
| 35 | # GO_MOD_DISCOVERY_LDFLAGS - Linker flags for go build | ||
| 36 | # Default: "-w -s" | ||
| 37 | # Example: "-X main.version=${PV} -w -s" | ||
| 38 | # | ||
| 39 | # GO_MOD_DISCOVERY_GOPATH - GOPATH for discovery build | ||
| 40 | # Default: "${S}/src/import/.gopath:${S}/src/import/vendor" | ||
| 41 | # | ||
| 42 | # GO_MOD_DISCOVERY_OUTPUT - Output binary path | ||
| 43 | # Default: "${WORKDIR}/discovery-build-output" | ||
| 44 | # | ||
| 45 | # GO_MOD_DISCOVERY_DIR - Persistent cache location | ||
| 46 | # Default: "${TOPDIR}/go-mod-discovery/${PN}/${PV}" | ||
| 47 | # | ||
| 48 | # GO_MOD_DISCOVERY_MODULES_JSON - Output path for extracted module metadata | ||
| 49 | # Default: "${GO_MOD_DISCOVERY_DIR}/modules.json" | ||
| 50 | # | ||
| 51 | # GO_MOD_DISCOVERY_SKIP_EXTRACT - Set to "1" to skip automatic extraction | ||
| 52 | # Default: "0" (extraction runs automatically) | ||
| 53 | # | ||
| 54 | # GO_MOD_DISCOVERY_SKIP_GENERATE - Set to "1" to skip automatic recipe generation | ||
| 55 | # Default: "0" (generation runs automatically) | ||
| 56 | # | ||
| 57 | # GO_MOD_DISCOVERY_GIT_REPO - Git repository URL for recipe generation | ||
| 58 | # Example: "https://github.com/rancher/k3s.git" | ||
| 59 | # Required for automatic generation | ||
| 60 | # | ||
| 61 | # GO_MOD_DISCOVERY_GIT_REF - Git ref (commit/tag) for recipe generation | ||
| 62 | # Default: "${SRCREV}" (uses recipe's SRCREV) | ||
| 63 | # | ||
| 64 | # GO_MOD_DISCOVERY_RECIPEDIR - Output directory for generated .inc files | ||
| 65 | # Default: "${FILE_DIRNAME}" (recipe's directory) | ||
| 66 | # | ||
| 67 | # MINIMAL EXAMPLE (manual generation - no GIT_REPO set): | ||
| 68 | # | ||
| 69 | # TAGS = "netcgo osusergo" | ||
| 70 | # GO_MOD_DISCOVERY_BUILD_TARGET = "./cmd/myapp" | ||
| 71 | # inherit go-mod-discovery | ||
| 72 | # # Run: bitbake myapp -c discover_modules | ||
| 73 | # # Then manually: oe-go-mod-fetcher.py --discovered-modules ... --git-repo ... | ||
| 74 | # | ||
| 75 | # FULL AUTOMATIC EXAMPLE (all-in-one discovery + generation): | ||
| 76 | # | ||
| 77 | # TAGS = "netcgo osusergo" | ||
| 78 | # GO_MOD_DISCOVERY_BUILD_TARGET = "./cmd/myapp" | ||
| 79 | # GO_MOD_DISCOVERY_GIT_REPO = "https://github.com/example/myapp.git" | ||
| 80 | # inherit go-mod-discovery | ||
| 81 | # # Run: bitbake myapp -c discover_modules | ||
| 82 | # # Recipe files are automatically regenerated! | ||
| 83 | # | ||
| 84 | # See: meta-virtualization/scripts/BOOTSTRAP-STRATEGY.md (Approach B) | ||
| 85 | # | ||
| 86 | # This task is NOT part of the normal build - it must be explicitly invoked | ||
| 87 | # via bitbake <recipe> -c discover_modules | ||
| 88 | # | ||
| 89 | # PERSISTENT CACHE: The discovery cache is stored in ${TOPDIR}/go-mod-discovery/${PN}/${PV}/ | ||
| 90 | # instead of ${WORKDIR}. This ensures the cache survives `bitbake <recipe> -c cleanall` | ||
| 91 | # since TOPDIR is the build directory root (e.g., /path/to/build/). | ||
| 92 | # To clean the discovery cache, run: rm -rf ${TOPDIR}/go-mod-discovery/${PN}/${PV}/ | ||
| 93 | |||
| 94 | # Required variable (must be set by recipe) | ||
| 95 | GO_MOD_DISCOVERY_BUILD_TARGET ?= "" | ||
| 96 | |||
| 97 | # Optional variables with sensible defaults for standard Go recipe layout | ||
| 98 | GO_MOD_DISCOVERY_SRCDIR ?= "${S}/src/import" | ||
| 99 | GO_MOD_DISCOVERY_BUILD_TAGS ?= "${TAGS}" | ||
| 100 | GO_MOD_DISCOVERY_LDFLAGS ?= "-w -s" | ||
| 101 | GO_MOD_DISCOVERY_GOPATH ?= "${S}/src/import/.gopath:${S}/src/import/vendor" | ||
| 102 | GO_MOD_DISCOVERY_OUTPUT ?= "${WORKDIR}/discovery-build-output" | ||
| 103 | |||
| 104 | # Persistent discovery cache location - survives cleanall | ||
| 105 | GO_MOD_DISCOVERY_DIR ?= "${TOPDIR}/go-mod-discovery/${PN}/${PV}" | ||
| 106 | |||
| 107 | # Output JSON file for discovered modules (used by oe-go-mod-fetcher.py --discovered-modules) | ||
| 108 | GO_MOD_DISCOVERY_MODULES_JSON ?= "${GO_MOD_DISCOVERY_DIR}/modules.json" | ||
| 109 | |||
| 110 | # Set to "1" to skip automatic extraction (only download modules, don't extract metadata) | ||
| 111 | GO_MOD_DISCOVERY_SKIP_EXTRACT ?= "0" | ||
| 112 | |||
| 113 | # Set to "1" to skip automatic recipe regeneration (only discover and extract) | ||
| 114 | GO_MOD_DISCOVERY_SKIP_GENERATE ?= "0" | ||
| 115 | |||
| 116 | # Git repository URL for recipe generation (required if SKIP_GENERATE != "1") | ||
| 117 | # Example: "https://github.com/rancher/k3s.git" | ||
| 118 | GO_MOD_DISCOVERY_GIT_REPO ?= "" | ||
| 119 | |||
| 120 | # Git ref (commit/tag) for recipe generation - defaults to recipe's SRCREV | ||
| 121 | GO_MOD_DISCOVERY_GIT_REF ?= "${SRCREV}" | ||
| 122 | |||
| 123 | # Recipe directory for generated .inc files - defaults to recipe's directory | ||
| 124 | GO_MOD_DISCOVERY_RECIPEDIR ?= "${FILE_DIRNAME}" | ||
| 125 | |||
| 126 | # Empty default for TAGS if not set by recipe (avoids undefined variable errors) | ||
| 127 | TAGS ?= "" | ||
| 128 | |||
| 129 | # Shell task that mirrors do_compile but with network access and discovery GOMODCACHE | ||
| 130 | do_discover_modules() { | ||
| 131 | # Validate required variable | ||
| 132 | if [ -z "${GO_MOD_DISCOVERY_BUILD_TARGET}" ]; then | ||
| 133 | bbfatal "GO_MOD_DISCOVERY_BUILD_TARGET must be set (e.g., './cmd/server' or './...')" | ||
| 134 | fi | ||
| 135 | |||
| 136 | # Validate source directory exists and contains go.mod | ||
| 137 | if [ ! -d "${GO_MOD_DISCOVERY_SRCDIR}" ]; then | ||
| 138 | bbfatal "GO_MOD_DISCOVERY_SRCDIR does not exist: ${GO_MOD_DISCOVERY_SRCDIR} | ||
| 139 | Hint: Set GO_MOD_DISCOVERY_SRCDIR in your recipe if go.mod is not in \${S}/src/import" | ||
| 140 | fi | ||
| 141 | if [ ! -f "${GO_MOD_DISCOVERY_SRCDIR}/go.mod" ]; then | ||
| 142 | bbfatal "go.mod not found in GO_MOD_DISCOVERY_SRCDIR: ${GO_MOD_DISCOVERY_SRCDIR} | ||
| 143 | Hint: Set GO_MOD_DISCOVERY_SRCDIR to the directory containing go.mod" | ||
| 144 | fi | ||
| 145 | |||
| 146 | # Use PERSISTENT cache location outside WORKDIR to survive cleanall | ||
| 147 | # This is stored in ${TOPDIR}/go-mod-discovery/${PN}/${PV}/ so it persists | ||
| 148 | DISCOVERY_CACHE="${GO_MOD_DISCOVERY_DIR}/cache" | ||
| 149 | |||
| 150 | # Create required directories first | ||
| 151 | mkdir -p "${DISCOVERY_CACHE}" | ||
| 152 | mkdir -p "${WORKDIR}/go-tmp" | ||
| 153 | mkdir -p "$(dirname "${GO_MOD_DISCOVERY_OUTPUT}")" | ||
| 154 | |||
| 155 | # Use discovery-cache instead of the normal GOMODCACHE | ||
| 156 | export GOMODCACHE="${DISCOVERY_CACHE}" | ||
| 157 | |||
| 158 | # Enable network access to proxy.golang.org | ||
| 159 | export GOPROXY="https://proxy.golang.org,direct" | ||
| 160 | export GOSUMDB="sum.golang.org" | ||
| 161 | |||
| 162 | # Standard Go environment - use recipe-provided GOPATH or default | ||
| 163 | export GOPATH="${GO_MOD_DISCOVERY_GOPATH}:${STAGING_DIR_TARGET}/${prefix}/local/go" | ||
| 164 | export CGO_ENABLED="1" | ||
| 165 | export GOTOOLCHAIN="local" | ||
| 166 | |||
| 167 | # Use system temp directory for Go's work files | ||
| 168 | export GOTMPDIR="${WORKDIR}/go-tmp" | ||
| 169 | |||
| 170 | # Disable excessive debug output from BitBake environment | ||
| 171 | unset GODEBUG | ||
| 172 | |||
| 173 | # Build tags from recipe configuration | ||
| 174 | TAGS="${GO_MOD_DISCOVERY_BUILD_TAGS}" | ||
| 175 | |||
| 176 | # Change to source directory | ||
| 177 | cd "${GO_MOD_DISCOVERY_SRCDIR}" | ||
| 178 | |||
| 179 | echo "======================================================================" | ||
| 180 | echo "MODULE DISCOVERY: ${PN} ${PV}" | ||
| 181 | echo "======================================================================" | ||
| 182 | echo "GOMODCACHE: ${GOMODCACHE}" | ||
| 183 | echo "GOPROXY: ${GOPROXY}" | ||
| 184 | echo "Source dir: ${GO_MOD_DISCOVERY_SRCDIR}" | ||
| 185 | echo "Build target: ${GO_MOD_DISCOVERY_BUILD_TARGET}" | ||
| 186 | echo "Build tags: ${TAGS:-<none>}" | ||
| 187 | echo "LDFLAGS: ${GO_MOD_DISCOVERY_LDFLAGS}" | ||
| 188 | echo "" | ||
| 189 | |||
| 190 | # Use native go binary (not cross-compiler) | ||
| 191 | GO_NATIVE="${STAGING_DIR_NATIVE}${bindir_native}/go" | ||
| 192 | |||
| 193 | # NOTE: Do NOT run go mod tidy during discovery - it can upgrade versions in go.mod | ||
| 194 | # without adding checksums to go.sum, causing version mismatches. | ||
| 195 | # The source's go.mod/go.sum should already be correct for the commit. | ||
| 196 | # echo "Running: go mod tidy" | ||
| 197 | # ${GO_NATIVE} mod tidy | ||
| 198 | # ${GO_NATIVE} mod download # If tidy is re-enabled, this ensures go.sum gets all checksums | ||
| 199 | |||
| 200 | echo "" | ||
| 201 | echo "Running: go build (to discover all modules)..." | ||
| 202 | |||
| 203 | # Build to discover ALL modules that would be used at compile time | ||
| 204 | # This is better than 'go mod download' because it handles build tags correctly | ||
| 205 | BUILD_CMD="${GO_NATIVE} build -v -trimpath" | ||
| 206 | if [ -n "${TAGS}" ]; then | ||
| 207 | BUILD_CMD="${BUILD_CMD} -tags \"${TAGS}\"" | ||
| 208 | fi | ||
| 209 | BUILD_CMD="${BUILD_CMD} -ldflags \"${GO_MOD_DISCOVERY_LDFLAGS}\"" | ||
| 210 | BUILD_CMD="${BUILD_CMD} -o \"${GO_MOD_DISCOVERY_OUTPUT}\" ${GO_MOD_DISCOVERY_BUILD_TARGET}" | ||
| 211 | |||
| 212 | echo "Executing: ${BUILD_CMD}" | ||
| 213 | eval ${BUILD_CMD} | ||
| 214 | |||
| 215 | echo "" | ||
| 216 | echo "Fetching ALL modules referenced in go.sum..." | ||
| 217 | # go build downloads .zip files but not always .info files | ||
| 218 | # We need .info files for VCS metadata (Origin.URL, Origin.Hash) | ||
| 219 | # Extract unique module@version pairs from go.sum and download each | ||
| 220 | # go.sum format: "module version/go.mod hash" or "module version hash" | ||
| 221 | # | ||
| 222 | # IMPORTANT: We must download ALL versions, including /go.mod-only entries! | ||
| 223 | # When GOPROXY=off during compile, Go may need these for dependency resolution. | ||
| 224 | # Strip the /go.mod suffix to get the base version, then download it. | ||
| 225 | awk '{gsub(/\/go\.mod$/, "", $2); print $1 "@" $2}' go.sum | sort -u | while read modver; do | ||
| 226 | ${GO_NATIVE} mod download "$modver" 2>/dev/null || true | ||
| 227 | done | ||
| 228 | |||
| 229 | # Download ALL modules in the complete dependency graph. | ||
| 230 | # The go.sum loop above only gets direct dependencies. Replace directives | ||
| 231 | # can introduce transitive deps that aren't in go.sum but are needed at | ||
| 232 | # compile time when GOPROXY=off. `go mod download all` resolves and | ||
| 233 | # downloads the entire module graph, including transitive dependencies. | ||
| 234 | echo "" | ||
| 235 | echo "Downloading complete module graph (including transitive deps)..." | ||
| 236 | ${GO_NATIVE} mod download all 2>&1 || echo "Warning: some modules may have failed to download" | ||
| 237 | |||
| 238 | # Additionally scan for any modules that go build downloaded but don't have .info | ||
| 239 | # This ensures we capture everything that was fetched dynamically | ||
| 240 | echo "" | ||
| 241 | echo "Ensuring .info files for all cached modules..." | ||
| 242 | find "${GOMODCACHE}/cache/download" -name "*.zip" 2>/dev/null | while read zipfile; do | ||
| 243 | # Extract module@version from path like: .../module/@v/version.zip | ||
| 244 | version=$(basename "$zipfile" .zip) | ||
| 245 | moddir=$(dirname "$zipfile") | ||
| 246 | infofile="${moddir}/${version}.info" | ||
| 247 | if [ ! -f "$infofile" ]; then | ||
| 248 | # Reconstruct module path from directory structure | ||
| 249 | # cache/download/github.com/foo/bar/@v/v1.0.0.zip -> github.com/foo/bar@v1.0.0 | ||
| 250 | modpath=$(echo "$moddir" | sed "s|${GOMODCACHE}/cache/download/||" | sed 's|/@v$||') | ||
| 251 | echo " Fetching .info for: ${modpath}@${version}" | ||
| 252 | ${GO_NATIVE} mod download "${modpath}@${version}" 2>/dev/null || true | ||
| 253 | fi | ||
| 254 | done | ||
| 255 | |||
| 256 | # Download transitive deps of REPLACED modules. | ||
| 257 | # Replace directives can point to older versions whose deps aren't in the MVS | ||
| 258 | # graph. At compile time with GOPROXY=off, Go validates the replaced version's | ||
| 259 | # go.mod. We parse replace directives and download each replacement version, | ||
| 260 | # which fetches all its transitive dependencies. | ||
| 261 | echo "" | ||
| 262 | echo "Downloading dependencies of replaced modules..." | ||
| 263 | |||
| 264 | # Extract replace directives: "old_module => new_module new_version" | ||
| 265 | awk '/^replace \($/,/^\)$/ {if ($0 !~ /^replace|^\)/) print}' go.mod | \ | ||
| 266 | grep "=>" | while read line; do | ||
| 267 | # Parse: github.com/foo/bar => github.com/baz/qux v1.2.3 | ||
| 268 | new_module=$(echo "$line" | awk '{print $(NF-1)}') | ||
| 269 | new_version=$(echo "$line" | awk '{print $NF}') | ||
| 270 | |||
| 271 | if [ -n "$new_module" ] && [ -n "$new_version" ] && [ "$new_version" != "=>" ]; then | ||
| 272 | echo " Replace target: ${new_module}@${new_version}" | ||
| 273 | # Download this specific version - Go will fetch all its dependencies | ||
| 274 | ${GO_NATIVE} mod download "${new_module}@${new_version}" 2>/dev/null || true | ||
| 275 | fi | ||
| 276 | done | ||
| 277 | |||
| 278 | # Count modules discovered | ||
| 279 | MODULE_COUNT=$(find "${GOMODCACHE}/cache/download" -name "*.info" 2>/dev/null | wc -l) | ||
| 280 | |||
| 281 | echo "" | ||
| 282 | echo "======================================================================" | ||
| 283 | echo "DISCOVERY COMPLETE" | ||
| 284 | echo "======================================================================" | ||
| 285 | echo "Modules discovered: ${MODULE_COUNT}" | ||
| 286 | echo "Cache location: ${GOMODCACHE}" | ||
| 287 | |||
| 288 | # Extract module metadata automatically (unless skipped) | ||
| 289 | if [ "${GO_MOD_DISCOVERY_SKIP_EXTRACT}" != "1" ]; then | ||
| 290 | echo "" | ||
| 291 | echo "Extracting module metadata..." | ||
| 292 | |||
| 293 | # Find the extraction script relative to this class file | ||
| 294 | EXTRACT_SCRIPT="${COREBASE}/../meta-virtualization/scripts/extract-discovered-modules.py" | ||
| 295 | if [ ! -f "${EXTRACT_SCRIPT}" ]; then | ||
| 296 | # Try alternate location | ||
| 297 | EXTRACT_SCRIPT="$(dirname "${COREBASE}")/meta-virtualization/scripts/extract-discovered-modules.py" | ||
| 298 | fi | ||
| 299 | if [ ! -f "${EXTRACT_SCRIPT}" ]; then | ||
| 300 | # Last resort - search in layer path | ||
| 301 | for layer in ${BBLAYERS}; do | ||
| 302 | if [ -f "${layer}/scripts/extract-discovered-modules.py" ]; then | ||
| 303 | EXTRACT_SCRIPT="${layer}/scripts/extract-discovered-modules.py" | ||
| 304 | break | ||
| 305 | fi | ||
| 306 | done | ||
| 307 | fi | ||
| 308 | |||
| 309 | if [ -f "${EXTRACT_SCRIPT}" ]; then | ||
| 310 | python3 "${EXTRACT_SCRIPT}" \ | ||
| 311 | --gomodcache "${GOMODCACHE}" \ | ||
| 312 | --output "${GO_MOD_DISCOVERY_MODULES_JSON}" | ||
| 313 | EXTRACT_RC=$? | ||
| 314 | if [ $EXTRACT_RC -eq 0 ]; then | ||
| 315 | echo "" | ||
| 316 | echo "✓ Module metadata extracted to: ${GO_MOD_DISCOVERY_MODULES_JSON}" | ||
| 317 | else | ||
| 318 | bbwarn "Module extraction failed (exit code $EXTRACT_RC)" | ||
| 319 | bbwarn "You can run manually: python3 ${EXTRACT_SCRIPT} --gomodcache ${GOMODCACHE} --output ${GO_MOD_DISCOVERY_MODULES_JSON}" | ||
| 320 | EXTRACT_RC=1 # Mark as failed for generation check | ||
| 321 | fi | ||
| 322 | else | ||
| 323 | bbwarn "Could not find extract-discovered-modules.py script" | ||
| 324 | bbwarn "Run manually: extract-discovered-modules.py --gomodcache ${GOMODCACHE} --output ${GO_MOD_DISCOVERY_MODULES_JSON}" | ||
| 325 | EXTRACT_RC=1 # Mark as failed for generation check | ||
| 326 | fi | ||
| 327 | else | ||
| 328 | echo "" | ||
| 329 | echo "Skipping automatic extraction (GO_MOD_DISCOVERY_SKIP_EXTRACT=1)" | ||
| 330 | EXTRACT_RC=1 # Skip generation too if extraction skipped | ||
| 331 | fi | ||
| 332 | |||
| 333 | # Step 3: Generate recipe .inc files (unless skipped or extraction failed) | ||
| 334 | if [ "${GO_MOD_DISCOVERY_SKIP_GENERATE}" != "1" ] && [ "${EXTRACT_RC:-0}" = "0" ]; then | ||
| 335 | # Validate required git repo | ||
| 336 | if [ -z "${GO_MOD_DISCOVERY_GIT_REPO}" ]; then | ||
| 337 | bbwarn "GO_MOD_DISCOVERY_GIT_REPO not set - skipping recipe generation" | ||
| 338 | bbwarn "Set GO_MOD_DISCOVERY_GIT_REPO in your recipe to enable automatic generation" | ||
| 339 | echo "" | ||
| 340 | echo "NEXT STEP: Regenerate recipe manually:" | ||
| 341 | echo "" | ||
| 342 | echo " ./meta-virtualization/scripts/oe-go-mod-fetcher.py \\" | ||
| 343 | echo " --discovered-modules ${GO_MOD_DISCOVERY_MODULES_JSON} \\" | ||
| 344 | echo " --git-repo <your-git-repo-url> \\" | ||
| 345 | echo " --git-ref ${GO_MOD_DISCOVERY_GIT_REF} \\" | ||
| 346 | echo " --recipedir ${GO_MOD_DISCOVERY_RECIPEDIR}" | ||
| 347 | else | ||
| 348 | echo "" | ||
| 349 | echo "Generating recipe .inc files..." | ||
| 350 | |||
| 351 | # Find the fetcher script (same search as extraction script) | ||
| 352 | FETCHER_SCRIPT="${COREBASE}/../meta-virtualization/scripts/oe-go-mod-fetcher.py" | ||
| 353 | if [ ! -f "${FETCHER_SCRIPT}" ]; then | ||
| 354 | FETCHER_SCRIPT="$(dirname "${COREBASE}")/meta-virtualization/scripts/oe-go-mod-fetcher.py" | ||
| 355 | fi | ||
| 356 | if [ ! -f "${FETCHER_SCRIPT}" ]; then | ||
| 357 | for layer in ${BBLAYERS}; do | ||
| 358 | if [ -f "${layer}/scripts/oe-go-mod-fetcher.py" ]; then | ||
| 359 | FETCHER_SCRIPT="${layer}/scripts/oe-go-mod-fetcher.py" | ||
| 360 | break | ||
| 361 | fi | ||
| 362 | done | ||
| 363 | fi | ||
| 364 | |||
| 365 | if [ -f "${FETCHER_SCRIPT}" ]; then | ||
| 366 | python3 "${FETCHER_SCRIPT}" \ | ||
| 367 | --discovered-modules "${GO_MOD_DISCOVERY_MODULES_JSON}" \ | ||
| 368 | --git-repo "${GO_MOD_DISCOVERY_GIT_REPO}" \ | ||
| 369 | --git-ref "${GO_MOD_DISCOVERY_GIT_REF}" \ | ||
| 370 | --recipedir "${GO_MOD_DISCOVERY_RECIPEDIR}" | ||
| 371 | GENERATE_RC=$? | ||
| 372 | if [ $GENERATE_RC -eq 0 ]; then | ||
| 373 | echo "" | ||
| 374 | echo "✓ Recipe files regenerated in: ${GO_MOD_DISCOVERY_RECIPEDIR}" | ||
| 375 | else | ||
| 376 | bbwarn "Recipe generation failed (exit code $GENERATE_RC)" | ||
| 377 | bbwarn "Check the output above for errors" | ||
| 378 | fi | ||
| 379 | else | ||
| 380 | bbwarn "Could not find oe-go-mod-fetcher.py script" | ||
| 381 | bbwarn "Run manually: oe-go-mod-fetcher.py --discovered-modules ${GO_MOD_DISCOVERY_MODULES_JSON} --git-repo ${GO_MOD_DISCOVERY_GIT_REPO} --git-ref ${GO_MOD_DISCOVERY_GIT_REF} --recipedir ${GO_MOD_DISCOVERY_RECIPEDIR}" | ||
| 382 | fi | ||
| 383 | fi | ||
| 384 | elif [ "${GO_MOD_DISCOVERY_SKIP_GENERATE}" = "1" ]; then | ||
| 385 | echo "" | ||
| 386 | echo "Skipping automatic generation (GO_MOD_DISCOVERY_SKIP_GENERATE=1)" | ||
| 387 | echo "" | ||
| 388 | echo "NEXT STEP: Regenerate recipe manually:" | ||
| 389 | echo "" | ||
| 390 | echo " ./meta-virtualization/scripts/oe-go-mod-fetcher.py \\" | ||
| 391 | echo " --discovered-modules ${GO_MOD_DISCOVERY_MODULES_JSON} \\" | ||
| 392 | echo " --git-repo <your-git-repo-url> \\" | ||
| 393 | echo " --git-ref <your-git-ref> \\" | ||
| 394 | echo " --recipedir ${GO_MOD_DISCOVERY_RECIPEDIR}" | ||
| 395 | fi | ||
| 396 | |||
| 397 | echo "" | ||
| 398 | echo "NOTE: Cache is stored OUTSIDE WORKDIR in a persistent location." | ||
| 399 | echo " This cache survives 'bitbake ${PN} -c cleanall'!" | ||
| 400 | echo " To clean: rm -rf ${GO_MOD_DISCOVERY_DIR}" | ||
| 401 | echo "" | ||
| 402 | echo "======================================================================" | ||
| 403 | } | ||
| 404 | |||
| 405 | # Make this task manually runnable (not part of default build) | ||
| 406 | # Run after unpack and patch so source is available | ||
| 407 | addtask discover_modules after do_patch | ||
| 408 | |||
| 409 | # Task dependencies - need source unpacked and full toolchain available | ||
| 410 | # Depend on do_prepare_recipe_sysroot to get cross-compiler for CGO | ||
| 411 | do_discover_modules[depends] = "${PN}:do_prepare_recipe_sysroot" | ||
| 412 | |||
| 413 | # Enable network access for this task ONLY | ||
| 414 | do_discover_modules[network] = "1" | ||
| 415 | |||
| 416 | # Don't create stamp file - allow running multiple times | ||
| 417 | do_discover_modules[nostamp] = "1" | ||
| 418 | |||
| 419 | # Track all configuration variables for proper task hashing | ||
| 420 | do_discover_modules[vardeps] += "GO_MOD_DISCOVERY_DIR GO_MOD_DISCOVERY_SRCDIR \ | ||
| 421 | GO_MOD_DISCOVERY_BUILD_TARGET GO_MOD_DISCOVERY_BUILD_TAGS \ | ||
| 422 | GO_MOD_DISCOVERY_LDFLAGS GO_MOD_DISCOVERY_GOPATH GO_MOD_DISCOVERY_OUTPUT \ | ||
| 423 | GO_MOD_DISCOVERY_MODULES_JSON GO_MOD_DISCOVERY_SKIP_EXTRACT \ | ||
| 424 | GO_MOD_DISCOVERY_SKIP_GENERATE GO_MOD_DISCOVERY_GIT_REPO \ | ||
| 425 | GO_MOD_DISCOVERY_GIT_REF GO_MOD_DISCOVERY_RECIPEDIR" | ||
| 426 | |||
| 427 | # Task to clean the persistent discovery cache | ||
| 428 | # Usage: bitbake <recipe> -c clean_discovery | ||
| 429 | do_clean_discovery() { | ||
| 430 | if [ -d "${GO_MOD_DISCOVERY_DIR}" ]; then | ||
| 431 | echo "Removing discovery cache: ${GO_MOD_DISCOVERY_DIR}" | ||
| 432 | rm -rf "${GO_MOD_DISCOVERY_DIR}" | ||
| 433 | echo "Discovery cache removed." | ||
| 434 | else | ||
| 435 | echo "Discovery cache not found: ${GO_MOD_DISCOVERY_DIR}" | ||
| 436 | fi | ||
| 437 | } | ||
| 438 | |||
| 439 | addtask clean_discovery | ||
| 440 | do_clean_discovery[nostamp] = "1" | ||
| 441 | do_clean_discovery[vardeps] += "GO_MOD_DISCOVERY_DIR" | ||
diff --git a/classes/go-mod-vcs.bbclass b/classes/go-mod-vcs.bbclass new file mode 100644 index 00000000..26c8c7f3 --- /dev/null +++ b/classes/go-mod-vcs.bbclass | |||
| @@ -0,0 +1,1107 @@ | |||
| 1 | # | ||
| 2 | # Copyright OpenEmbedded Contributors | ||
| 3 | # | ||
| 4 | # SPDX-License-Identifier: MIT | ||
| 5 | # | ||
| 6 | |||
| 7 | # go-mod-vcs.bbclass | ||
| 8 | # | ||
| 9 | # Provides tasks for building Go module cache from VCS (git) sources. | ||
| 10 | # This enables fully offline Go builds using modules fetched via BitBake's | ||
| 11 | # git fetcher instead of the Go proxy. | ||
| 12 | # | ||
| 13 | # USAGE: | ||
| 14 | # 1. Add to recipe: inherit go-mod-vcs | ||
| 15 | # 2. Define GO_MODULE_CACHE_DATA as JSON array of module metadata | ||
| 16 | # 3. Include go-mod-git.inc for SRC_URI git entries | ||
| 17 | # 4. Include go-mod-cache.inc for GO_MODULE_CACHE_DATA | ||
| 18 | # | ||
| 19 | # DEPENDENCIES: | ||
| 20 | # - Works with oe-core's go.bbclass and go-mod.bbclass | ||
| 21 | # - h1: checksums calculated in pure Python (with go-dirhash-native fallback) | ||
| 22 | # - Optional: go-dirhash-native for fallback checksum calculation | ||
| 23 | # | ||
| 24 | # TASKS PROVIDED: | ||
| 25 | # - do_create_module_cache: Builds module cache from git repos | ||
| 26 | # - do_sync_go_files: Synchronizes go.sum with cache checksums | ||
| 27 | # | ||
| 28 | # GENERATED FILES: | ||
| 29 | # The oe-go-mod-fetcher.py script generates two .inc files per recipe: | ||
| 30 | # - go-mod-git.inc: SRC_URI and SRCREV entries for git fetching | ||
| 31 | # - go-mod-cache.inc: GO_MODULE_CACHE_DATA JSON + inherit go-mod-vcs | ||
| 32 | # | ||
| 33 | # This class extracts the reusable Python task code, so generated .inc files | ||
| 34 | # only contain recipe-specific data (SRC_URI entries and module metadata). | ||
| 35 | # | ||
| 36 | # ARCHITECTURE NOTES: | ||
| 37 | # - assemble_zip() must create zips INSIDE TemporaryDirectory context | ||
| 38 | # - synthesize_go_mod() preserves go version directive from original go.mod | ||
| 39 | # | ||
| 40 | # CONFIGURATION: | ||
| 41 | # GO_MOD_SKIP_ZIP_EXTRACTION - Set to "1" to skip extracting zips to pkg/mod | ||
| 42 | # Go can extract on-demand from cache (experimental) | ||
| 43 | # | ||
| 44 | |||
| 45 | python do_create_module_cache() { | ||
| 46 | """ | ||
| 47 | Build Go module cache from downloaded git repositories. | ||
| 48 | This creates the same cache structure as oe-core's gomod.bbclass. | ||
| 49 | |||
| 50 | NOTE: h1: checksums are calculated in pure Python during zip creation. | ||
| 51 | Falls back to go-dirhash-native if Python hash fails. | ||
| 52 | """ | ||
| 53 | import hashlib | ||
| 54 | import json | ||
| 55 | import os | ||
| 56 | import shutil | ||
| 57 | import subprocess | ||
| 58 | import zipfile | ||
| 59 | import stat | ||
| 60 | import base64 | ||
| 61 | from pathlib import Path | ||
| 62 | from datetime import datetime | ||
| 63 | |||
| 64 | # Check for optional go-dirhash-native fallback tool | ||
| 65 | go_dirhash_helper = Path(d.getVar('STAGING_BINDIR_NATIVE') or '') / "dirhash" | ||
| 66 | if not go_dirhash_helper.exists(): | ||
| 67 | go_dirhash_helper = None | ||
| 68 | bb.debug(1, "go-dirhash-native not available, using pure Python for h1: checksums") | ||
| 69 | |||
| 70 | def calculate_h1_hash_python(zip_path): | ||
| 71 | """Calculate Go module h1: hash in pure Python.""" | ||
| 72 | lines = [] | ||
| 73 | with zipfile.ZipFile(zip_path, 'r') as zf: | ||
| 74 | for info in sorted(zf.infolist(), key=lambda x: x.filename): | ||
| 75 | if info.is_dir(): | ||
| 76 | continue | ||
| 77 | file_data = zf.read(info.filename) | ||
| 78 | file_hash = hashlib.sha256(file_data).hexdigest() | ||
| 79 | lines.append(f"{file_hash} {info.filename}\n") | ||
| 80 | summary = "".join(lines).encode('utf-8') | ||
| 81 | final_hash = hashlib.sha256(summary).digest() | ||
| 82 | return "h1:" + base64.b64encode(final_hash).decode('ascii') | ||
| 83 | |||
| 84 | def calculate_h1_hash_native(zip_path): | ||
| 85 | """Calculate Go module h1: hash using go-dirhash-native (fallback).""" | ||
| 86 | if go_dirhash_helper is None: | ||
| 87 | return None | ||
| 88 | result = subprocess.run( | ||
| 89 | [str(go_dirhash_helper), str(zip_path)], | ||
| 90 | capture_output=True, text=True, check=False, timeout=60 | ||
| 91 | ) | ||
| 92 | if result.returncode != 0: | ||
| 93 | return None | ||
| 94 | hash_value = result.stdout.strip() | ||
| 95 | if not hash_value.startswith("h1:"): | ||
| 96 | return None | ||
| 97 | return hash_value | ||
| 98 | |||
| 99 | # Define helper functions BEFORE they are used | ||
| 100 | def escape_module_path(path): | ||
| 101 | """Escape capital letters using exclamation points (same as BitBake gomod.py)""" | ||
| 102 | import re | ||
| 103 | return re.sub(r'([A-Z])', lambda m: '!' + m.group(1).lower(), path) | ||
| 104 | |||
| 105 | def sanitize_module_name(name): | ||
| 106 | """Remove quotes from module names""" | ||
| 107 | if not name: | ||
| 108 | return name | ||
| 109 | stripped = name.strip() | ||
| 110 | if len(stripped) >= 2 and stripped[0] == '"' and stripped[-1] == '"': | ||
| 111 | return stripped[1:-1] | ||
| 112 | return stripped | ||
| 113 | |||
| 114 | go_sum_hashes = {} | ||
| 115 | go_sum_entries = {} | ||
| 116 | go_sum_path = Path(d.getVar('S')) / "src" / "import" / "go.sum" | ||
| 117 | if go_sum_path.exists(): | ||
| 118 | with open(go_sum_path, 'r') as f: | ||
| 119 | for line in f: | ||
| 120 | parts = line.strip().split() | ||
| 121 | if len(parts) != 3: | ||
| 122 | continue | ||
| 123 | mod, ver, hash_value = parts | ||
| 124 | mod = sanitize_module_name(mod) | ||
| 125 | go_sum_entries[(mod, ver)] = hash_value | ||
| 126 | if mod.endswith('/go.mod') or not hash_value.startswith('h1:'): | ||
| 127 | continue | ||
| 128 | key = f"{mod}@{ver}" | ||
| 129 | go_sum_hashes.setdefault(key, hash_value) | ||
| 130 | |||
| 131 | def load_require_versions(go_mod_path): | ||
| 132 | versions = {} | ||
| 133 | if not go_mod_path.exists(): | ||
| 134 | return versions | ||
| 135 | |||
| 136 | in_block = False | ||
| 137 | with go_mod_path.open('r', encoding='utf-8') as f: | ||
| 138 | for raw_line in f: | ||
| 139 | line = raw_line.strip() | ||
| 140 | |||
| 141 | if line.startswith('require ('): | ||
| 142 | in_block = True | ||
| 143 | continue | ||
| 144 | if in_block and line == ')': | ||
| 145 | in_block = False | ||
| 146 | continue | ||
| 147 | |||
| 148 | if line.startswith('require ') and '(' not in line: | ||
| 149 | parts = line.split() | ||
| 150 | if len(parts) >= 3: | ||
| 151 | versions[sanitize_module_name(parts[1])] = parts[2] | ||
| 152 | continue | ||
| 153 | |||
| 154 | if in_block and line and not line.startswith('//'): | ||
| 155 | parts = line.split() | ||
| 156 | if len(parts) >= 2: | ||
| 157 | versions[sanitize_module_name(parts[0])] = parts[1] | ||
| 158 | |||
| 159 | return versions | ||
| 160 | |||
| 161 | def load_replacements(go_mod_path): | ||
| 162 | replacements = {} | ||
| 163 | if not go_mod_path.exists(): | ||
| 164 | return replacements | ||
| 165 | |||
| 166 | def parse_replace_line(content): | ||
| 167 | if '//' in content: | ||
| 168 | content = content.split('//', 1)[0].strip() | ||
| 169 | if '=>' not in content: | ||
| 170 | return | ||
| 171 | left, right = [part.strip() for part in content.split('=>', 1)] | ||
| 172 | left_parts = left.split() | ||
| 173 | right_parts = right.split() | ||
| 174 | if not left_parts or not right_parts: | ||
| 175 | return | ||
| 176 | old_module = sanitize_module_name(left_parts[0]) | ||
| 177 | old_version = left_parts[1] if len(left_parts) > 1 else None | ||
| 178 | new_module = sanitize_module_name(right_parts[0]) | ||
| 179 | new_version = right_parts[1] if len(right_parts) > 1 else None | ||
| 180 | replacements[old_module] = { | ||
| 181 | "old_version": old_version, | ||
| 182 | "new_module": new_module, | ||
| 183 | "new_version": new_version, | ||
| 184 | } | ||
| 185 | |||
| 186 | in_block = False | ||
| 187 | with go_mod_path.open('r', encoding='utf-8') as f: | ||
| 188 | for raw_line in f: | ||
| 189 | line = raw_line.strip() | ||
| 190 | |||
| 191 | if line.startswith('replace ('): | ||
| 192 | in_block = True | ||
| 193 | continue | ||
| 194 | if in_block and line == ')': | ||
| 195 | in_block = False | ||
| 196 | continue | ||
| 197 | |||
| 198 | if line.startswith('replace ') and '(' not in line: | ||
| 199 | parse_replace_line(line[len('replace '):]) | ||
| 200 | continue | ||
| 201 | |||
| 202 | if in_block and line and not line.startswith('//'): | ||
| 203 | parse_replace_line(line) | ||
| 204 | |||
| 205 | return replacements | ||
| 206 | |||
| 207 | go_mod_path = Path(d.getVar('S')) / "src" / "import" / "go.mod" | ||
| 208 | require_versions = load_require_versions(go_mod_path) | ||
| 209 | replacements = load_replacements(go_mod_path) | ||
| 210 | |||
| 211 | def duplicate_module_version(module_path, source_version, alias_version, timestamp): | ||
| 212 | if alias_version == source_version: | ||
| 213 | return | ||
| 214 | |||
| 215 | escaped_module = escape_module_path(module_path) | ||
| 216 | cache_dir = Path(d.getVar('S')) / "pkg" / "mod" / "cache" / "download" | ||
| 217 | download_dir = cache_dir / escaped_module / "@v" | ||
| 218 | download_dir.mkdir(parents=True, exist_ok=True) | ||
| 219 | |||
| 220 | escaped_source_version = escape_module_path(source_version) | ||
| 221 | escaped_alias_version = escape_module_path(alias_version) | ||
| 222 | |||
| 223 | source_base = download_dir / escaped_source_version | ||
| 224 | alias_base = download_dir / escaped_alias_version | ||
| 225 | |||
| 226 | if not (source_base.with_suffix('.zip').exists() and source_base.with_suffix('.mod').exists()): | ||
| 227 | return | ||
| 228 | |||
| 229 | if alias_base.with_suffix('.zip').exists(): | ||
| 230 | return | ||
| 231 | |||
| 232 | import shutil | ||
| 233 | shutil.copy2(source_base.with_suffix('.zip'), alias_base.with_suffix('.zip')) | ||
| 234 | shutil.copy2(source_base.with_suffix('.mod'), alias_base.with_suffix('.mod')) | ||
| 235 | ziphash_src = source_base.with_suffix('.ziphash') | ||
| 236 | if ziphash_src.exists(): | ||
| 237 | shutil.copy2(ziphash_src, alias_base.with_suffix('.ziphash')) | ||
| 238 | |||
| 239 | info_path = alias_base.with_suffix('.info') | ||
| 240 | info_data = { | ||
| 241 | "Version": alias_version, | ||
| 242 | "Time": timestamp | ||
| 243 | } | ||
| 244 | with open(info_path, 'w') as f: | ||
| 245 | json.dump(info_data, f) | ||
| 246 | |||
| 247 | bb.note(f"Duplicated module version {module_path}@{alias_version} from {source_version} for replace directive") | ||
| 248 | |||
| 249 | def create_module_zip(module_path, version, vcs_path, subdir, timestamp): | ||
| 250 | """Create module zip file from git repository""" | ||
| 251 | module_path = sanitize_module_name(module_path) | ||
| 252 | |||
| 253 | # Detect canonical module path FIRST from go.mod. | ||
| 254 | # This prevents creating duplicate cache entries for replace directives. | ||
| 255 | # For "github.com/google/cadvisor => github.com/k3s-io/cadvisor", the | ||
| 256 | # k3s-io fork declares "module github.com/google/cadvisor" in its go.mod, | ||
| 257 | # so we create the cache ONLY at github.com/google/cadvisor. | ||
| 258 | def detect_canonical_module_path(vcs_path, subdir_hint, requested_module): | ||
| 259 | """ | ||
| 260 | Read go.mod file to determine the canonical module path. | ||
| 261 | This is critical for replace directives - always use the path declared | ||
| 262 | in the module's own go.mod, not the replacement path. | ||
| 263 | """ | ||
| 264 | path = Path(vcs_path) | ||
| 265 | |||
| 266 | # Build list of candidate subdirs to check | ||
| 267 | candidates = [] | ||
| 268 | if subdir_hint: | ||
| 269 | candidates.append(subdir_hint) | ||
| 270 | |||
| 271 | # Also try deriving subdir from module path | ||
| 272 | parts = requested_module.split('/') | ||
| 273 | if len(parts) > 3: | ||
| 274 | guess = '/'.join(parts[3:]) | ||
| 275 | if guess and guess not in candidates: | ||
| 276 | candidates.append(guess) | ||
| 277 | |||
| 278 | # Always check root directory last | ||
| 279 | if '' not in candidates: | ||
| 280 | candidates.append('') | ||
| 281 | |||
| 282 | # Search for go.mod file and read its module declaration | ||
| 283 | for candidate in candidates: | ||
| 284 | gomod_file = path / candidate / "go.mod" if candidate else path / "go.mod" | ||
| 285 | if not gomod_file.exists(): | ||
| 286 | continue | ||
| 287 | |||
| 288 | try: | ||
| 289 | with gomod_file.open('r', encoding='utf-8') as fh: | ||
| 290 | first_line = fh.readline().strip() | ||
| 291 | # Parse: "module github.com/example/repo" | ||
| 292 | if first_line.startswith('module '): | ||
| 293 | canonical = first_line[7:].strip() # Skip "module " | ||
| 294 | # Remove any inline comments | ||
| 295 | if '//' in canonical: | ||
| 296 | canonical = canonical.split('//')[0].strip() | ||
| 297 | # CRITICAL: Remove quotes from module names | ||
| 298 | canonical = sanitize_module_name(canonical) | ||
| 299 | return canonical, candidate | ||
| 300 | except (UnicodeDecodeError, IOError): | ||
| 301 | continue | ||
| 302 | |||
| 303 | # Fallback: if no go.mod found, use requested path | ||
| 304 | bb.warn(f"No go.mod found for {requested_module} in {vcs_path}, using requested path") | ||
| 305 | return requested_module, '' | ||
| 306 | |||
| 307 | canonical_module_path, detected_subdir = detect_canonical_module_path(vcs_path, subdir, module_path) | ||
| 308 | |||
| 309 | # Keep track of the original (requested) module path for replaced modules | ||
| 310 | # We'll need to create symlinks from requested -> canonical after cache creation | ||
| 311 | requested_module_path = module_path | ||
| 312 | |||
| 313 | # If canonical path differs from requested path, this is a replace directive | ||
| 314 | if canonical_module_path != module_path: | ||
| 315 | bb.note(f"Replace directive detected: {module_path} -> canonical {canonical_module_path}") | ||
| 316 | bb.note(f"Creating cache at canonical path, will symlink from requested path") | ||
| 317 | module_path = canonical_module_path | ||
| 318 | |||
| 319 | escaped_module = escape_module_path(module_path) | ||
| 320 | escaped_version = escape_module_path(version) | ||
| 321 | |||
| 322 | # Create cache directory structure using CANONICAL module path | ||
| 323 | workdir = Path(d.getVar('WORKDIR')) | ||
| 324 | s = Path(d.getVar('S')) | ||
| 325 | cache_dir = s / "pkg" / "mod" / "cache" / "download" | ||
| 326 | download_dir = cache_dir / escaped_module / "@v" | ||
| 327 | download_dir.mkdir(parents=True, exist_ok=True) | ||
| 328 | |||
| 329 | bb.note(f"Creating cache for {module_path}@{version}") | ||
| 330 | |||
| 331 | # Override subdir with detected subdir from canonical path detection | ||
| 332 | if detected_subdir: | ||
| 333 | subdir = detected_subdir | ||
| 334 | |||
| 335 | def detect_subdir() -> str: | ||
| 336 | hinted = subdir or "" | ||
| 337 | path = Path(vcs_path) | ||
| 338 | |||
| 339 | def path_exists(rel: str) -> bool: | ||
| 340 | if not rel: | ||
| 341 | return True | ||
| 342 | return (path / rel).exists() | ||
| 343 | |||
| 344 | candidate_order = [] | ||
| 345 | if hinted and hinted not in candidate_order: | ||
| 346 | candidate_order.append(hinted) | ||
| 347 | |||
| 348 | module_parts = module_path.split('/') | ||
| 349 | if len(module_parts) > 3: | ||
| 350 | guess = '/'.join(module_parts[3:]) | ||
| 351 | if guess and guess not in candidate_order: | ||
| 352 | candidate_order.append(guess) | ||
| 353 | |||
| 354 | target_header = f"module {module_path}\n" | ||
| 355 | found = None | ||
| 356 | try: | ||
| 357 | for go_mod in path.rglob('go.mod'): | ||
| 358 | rel = go_mod.relative_to(path) | ||
| 359 | if any(part.startswith('.') and part != '.' for part in rel.parts): | ||
| 360 | continue | ||
| 361 | if 'vendor' in rel.parts: | ||
| 362 | continue | ||
| 363 | try: | ||
| 364 | with go_mod.open('r', encoding='utf-8') as fh: | ||
| 365 | first_line = fh.readline() | ||
| 366 | except UnicodeDecodeError: | ||
| 367 | continue | ||
| 368 | if first_line.strip() == target_header.strip(): | ||
| 369 | rel_dir = go_mod.parent.relative_to(path).as_posix() | ||
| 370 | found = rel_dir | ||
| 371 | break | ||
| 372 | except Exception: | ||
| 373 | pass | ||
| 374 | |||
| 375 | if found is not None and found not in candidate_order: | ||
| 376 | candidate_order.insert(0, found) | ||
| 377 | |||
| 378 | candidate_order.append('') | ||
| 379 | |||
| 380 | for candidate in candidate_order: | ||
| 381 | if path_exists(candidate): | ||
| 382 | return candidate | ||
| 383 | return '' | ||
| 384 | |||
| 385 | subdir_resolved = detect_subdir() | ||
| 386 | |||
| 387 | # 1. Create .info file | ||
| 388 | info_path = download_dir / f"{escaped_version}.info" | ||
| 389 | info_data = { | ||
| 390 | "Version": version, | ||
| 391 | "Time": timestamp | ||
| 392 | } | ||
| 393 | with open(info_path, 'w') as f: | ||
| 394 | json.dump(info_data, f) | ||
| 395 | bb.debug(1, f"Created {info_path}") | ||
| 396 | |||
| 397 | # 2. Create .mod file | ||
| 398 | mod_path = download_dir / f"{escaped_version}.mod" | ||
| 399 | effective_subdir = subdir_resolved | ||
| 400 | |||
| 401 | def candidate_subdirs(): | ||
| 402 | candidates = [] | ||
| 403 | parts = module_path.split('/') | ||
| 404 | if len(parts) >= 4: | ||
| 405 | extra = '/'.join(parts[3:]) | ||
| 406 | if extra: | ||
| 407 | candidates.append(extra) | ||
| 408 | |||
| 409 | if effective_subdir: | ||
| 410 | candidates.insert(0, effective_subdir) | ||
| 411 | else: | ||
| 412 | candidates.append('') | ||
| 413 | |||
| 414 | suffix = parts[-1] | ||
| 415 | if suffix.startswith('v') and suffix[1:].isdigit(): | ||
| 416 | suffix_path = f"{effective_subdir}/{suffix}" if effective_subdir else suffix | ||
| 417 | if suffix_path not in candidates: | ||
| 418 | candidates.insert(0, suffix_path) | ||
| 419 | |||
| 420 | if '' not in candidates: | ||
| 421 | candidates.append('') | ||
| 422 | return candidates | ||
| 423 | |||
| 424 | gomod_file = None | ||
| 425 | for candidate in candidate_subdirs(): | ||
| 426 | path_candidate = Path(vcs_path) / candidate / "go.mod" if candidate else Path(vcs_path) / "go.mod" | ||
| 427 | if path_candidate.exists(): | ||
| 428 | gomod_file = path_candidate | ||
| 429 | if candidate != effective_subdir: | ||
| 430 | effective_subdir = candidate | ||
| 431 | break | ||
| 432 | |||
| 433 | subdir_resolved = effective_subdir | ||
| 434 | |||
| 435 | if gomod_file is None: | ||
| 436 | gomod_file = Path(vcs_path) / effective_subdir / "go.mod" if effective_subdir else Path(vcs_path) / "go.mod" | ||
| 437 | |||
| 438 | def synthesize_go_mod(modname, go_version=None): | ||
| 439 | sanitized = sanitize_module_name(modname) | ||
| 440 | if go_version: | ||
| 441 | return f"module {sanitized}\n\ngo {go_version}\n".encode('utf-8') | ||
| 442 | return f"module {sanitized}\n".encode('utf-8') | ||
| 443 | |||
| 444 | mod_content = None | ||
| 445 | |||
| 446 | def is_vendored_package(rel_path): | ||
| 447 | if rel_path.startswith("vendor/"): | ||
| 448 | prefix_len = len("vendor/") | ||
| 449 | else: | ||
| 450 | idx = rel_path.find("/vendor/") | ||
| 451 | if idx < 0: | ||
| 452 | return False | ||
| 453 | prefix_len = len("/vendor/") | ||
| 454 | return "/" in rel_path[prefix_len:] | ||
| 455 | |||
| 456 | if '+incompatible' in version: | ||
| 457 | mod_content = synthesize_go_mod(module_path) | ||
| 458 | bb.debug(1, f"Synthesizing go.mod for +incompatible module {module_path}@{version}") | ||
| 459 | elif gomod_file.exists(): | ||
| 460 | # Read the existing go.mod and check if module declaration matches | ||
| 461 | mod_content = gomod_file.read_bytes() | ||
| 462 | |||
| 463 | # Parse the module declaration to check for mismatch | ||
| 464 | import re | ||
| 465 | match = re.search(rb'^\s*module\s+(\S+)', mod_content, re.MULTILINE) | ||
| 466 | if match: | ||
| 467 | declared_module = match.group(1).decode('utf-8', errors='ignore') | ||
| 468 | if declared_module != module_path: | ||
| 469 | # Extract go version directive from original go.mod before synthesizing | ||
| 470 | go_version = None | ||
| 471 | go_match = re.search(rb'^\s*go\s+(\d+\.\d+(?:\.\d+)?)', mod_content, re.MULTILINE) | ||
| 472 | if go_match: | ||
| 473 | go_version = go_match.group(1).decode('utf-8', errors='ignore') | ||
| 474 | # Module declaration doesn't match import path - synthesize correct one | ||
| 475 | bb.warn(f"Module {module_path}@{version}: go.mod declares '{declared_module}' but should be '{module_path}', synthesizing correct go.mod (preserving go {go_version})") | ||
| 476 | mod_content = synthesize_go_mod(module_path, go_version) | ||
| 477 | else: | ||
| 478 | bb.debug(1, f"go.mod not found at {gomod_file}") | ||
| 479 | mod_content = synthesize_go_mod(module_path) | ||
| 480 | |||
| 481 | with open(mod_path, 'wb') as f: | ||
| 482 | f.write(mod_content) | ||
| 483 | bb.debug(1, f"Created {mod_path}") | ||
| 484 | |||
| 485 | license_blobs = [] | ||
| 486 | if effective_subdir: | ||
| 487 | license_candidates = [ | ||
| 488 | "LICENSE", | ||
| 489 | "LICENSE.txt", | ||
| 490 | "LICENSE.md", | ||
| 491 | "LICENCE", | ||
| 492 | "COPYING", | ||
| 493 | "COPYING.txt", | ||
| 494 | "COPYING.md", | ||
| 495 | ] | ||
| 496 | for candidate in license_candidates: | ||
| 497 | try: | ||
| 498 | content = subprocess.check_output( | ||
| 499 | ["git", "show", f"HEAD:{candidate}"], | ||
| 500 | cwd=vcs_path, | ||
| 501 | stderr=subprocess.DEVNULL, | ||
| 502 | ) | ||
| 503 | except subprocess.CalledProcessError: | ||
| 504 | continue | ||
| 505 | license_blobs.append((Path(candidate).name, content)) | ||
| 506 | break | ||
| 507 | |||
| 508 | # 3. Create .zip file using git archive + filtering | ||
| 509 | zip_path = download_dir / f"{escaped_version}.zip" | ||
| 510 | # IMPORTANT: For replaced modules, zip internal paths must use the REQUESTED module path, | ||
| 511 | # not the canonical path. Go expects to unzip files to requested_module@version/ directory. | ||
| 512 | zip_prefix = f"{requested_module_path}@{version}/" | ||
| 513 | module_key = f"{module_path}@{version}" | ||
| 514 | expected_hash = go_sum_hashes.get(module_key) | ||
| 515 | |||
| 516 | import tarfile | ||
| 517 | import tempfile | ||
| 518 | |||
| 519 | # IMPORTANT: assemble_zip() must run INSIDE TemporaryDirectory context. | ||
| 520 | # The add_zip_entry() and zipfile.ZipFile code MUST be indented inside the | ||
| 521 | # 'with tempfile.TemporaryDirectory()' block. If placed outside, the temp | ||
| 522 | # directory is deleted before files are added, resulting in empty zips. | ||
| 523 | def assemble_zip(include_vendor_modules: bool) -> str: | ||
| 524 | """ | ||
| 525 | Create module zip and compute h1: hash in single pass. | ||
| 526 | Returns h1: hash string on success, None on failure. | ||
| 527 | |||
| 528 | This avoids re-reading the zip file after creation by tracking | ||
| 529 | file hashes during the zip creation process. | ||
| 530 | """ | ||
| 531 | import base64 | ||
| 532 | |||
| 533 | try: | ||
| 534 | with tempfile.TemporaryDirectory(dir=str(download_dir)) as tmpdir: | ||
| 535 | tar_path = Path(tmpdir) / "archive.tar" | ||
| 536 | archive_cmd = ["git", "archive", "--format=tar", "-o", str(tar_path), "HEAD"] | ||
| 537 | if subdir_resolved: | ||
| 538 | archive_cmd.append(subdir_resolved) | ||
| 539 | |||
| 540 | subprocess.run(archive_cmd, cwd=str(vcs_path), check=True, capture_output=True) | ||
| 541 | |||
| 542 | with tarfile.open(tar_path, 'r') as tf: | ||
| 543 | tf.extractall(tmpdir) | ||
| 544 | tar_path.unlink(missing_ok=True) | ||
| 545 | |||
| 546 | extract_root = Path(tmpdir) | ||
| 547 | if subdir_resolved: | ||
| 548 | extract_root = extract_root / subdir_resolved | ||
| 549 | |||
| 550 | excluded_prefixes = [] | ||
| 551 | for gomod_file in extract_root.rglob("go.mod"): | ||
| 552 | rel_path = gomod_file.relative_to(extract_root).as_posix() | ||
| 553 | if rel_path != "go.mod": | ||
| 554 | prefix = gomod_file.parent.relative_to(extract_root).as_posix() | ||
| 555 | if prefix and not prefix.endswith("/"): | ||
| 556 | prefix += "/" | ||
| 557 | excluded_prefixes.append(prefix) | ||
| 558 | |||
| 559 | if zip_path.exists(): | ||
| 560 | zip_path.unlink() | ||
| 561 | |||
| 562 | # Track file hashes for h1: calculation during zip creation | ||
| 563 | hash_entries = [] # List of (arcname, sha256_hex) | ||
| 564 | |||
| 565 | def add_zip_entry(zf, arcname, data, mode=None): | ||
| 566 | info = zipfile.ZipInfo(arcname) | ||
| 567 | info.date_time = (1980, 1, 1, 0, 0, 0) | ||
| 568 | info.compress_type = zipfile.ZIP_DEFLATED | ||
| 569 | info.create_system = 3 # Unix | ||
| 570 | if mode is None: | ||
| 571 | mode = stat.S_IFREG | 0o644 | ||
| 572 | info.external_attr = ((mode & 0xFFFF) << 16) | ||
| 573 | zf.writestr(info, data) | ||
| 574 | # Track hash for h1: calculation | ||
| 575 | hash_entries.append((arcname, hashlib.sha256(data).hexdigest())) | ||
| 576 | |||
| 577 | with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zf: | ||
| 578 | for file_path in sorted(extract_root.rglob("*")): | ||
| 579 | if file_path.is_dir(): | ||
| 580 | continue | ||
| 581 | |||
| 582 | rel_path = file_path.relative_to(extract_root).as_posix() | ||
| 583 | |||
| 584 | if file_path.is_symlink(): | ||
| 585 | continue | ||
| 586 | |||
| 587 | if is_vendored_package(rel_path): | ||
| 588 | continue | ||
| 589 | |||
| 590 | if rel_path == "vendor/modules.txt" and not include_vendor_modules: | ||
| 591 | continue | ||
| 592 | |||
| 593 | if any(rel_path.startswith(prefix) for prefix in excluded_prefixes): | ||
| 594 | continue | ||
| 595 | if rel_path.endswith("go.mod") and rel_path != "go.mod": | ||
| 596 | continue | ||
| 597 | |||
| 598 | if rel_path == "go.mod": | ||
| 599 | data = mod_content | ||
| 600 | mode = stat.S_IFREG | 0o644 | ||
| 601 | else: | ||
| 602 | data = file_path.read_bytes() | ||
| 603 | try: | ||
| 604 | mode = file_path.stat().st_mode | ||
| 605 | except FileNotFoundError: | ||
| 606 | mode = stat.S_IFREG | 0o644 | ||
| 607 | |||
| 608 | add_zip_entry(zf, zip_prefix + rel_path, data, mode) | ||
| 609 | |||
| 610 | for license_name, content in license_blobs: | ||
| 611 | if (extract_root / license_name).exists(): | ||
| 612 | continue | ||
| 613 | add_zip_entry(zf, zip_prefix + license_name, content, stat.S_IFREG | 0o644) | ||
| 614 | |||
| 615 | # Calculate h1: hash from tracked entries (sorted by filename) | ||
| 616 | hash_entries.sort(key=lambda x: x[0]) | ||
| 617 | lines = [f"{h} {name}\n" for name, h in hash_entries] | ||
| 618 | summary = "".join(lines).encode('utf-8') | ||
| 619 | final_hash = hashlib.sha256(summary).digest() | ||
| 620 | inline_hash = "h1:" + base64.b64encode(final_hash).decode('ascii') | ||
| 621 | return inline_hash | ||
| 622 | |||
| 623 | except subprocess.CalledProcessError as e: | ||
| 624 | bb.error(f"Failed to create zip for {module_path}@{version}: {e.stderr.decode()}") | ||
| 625 | return None | ||
| 626 | except Exception as e: | ||
| 627 | bb.error(f"Failed to assemble zip for {module_path}@{version}: {e}") | ||
| 628 | # Fallback: try native tool if zip was created but hash calculation failed | ||
| 629 | if zip_path.exists(): | ||
| 630 | fallback_hash = calculate_h1_hash_native(zip_path) | ||
| 631 | if fallback_hash: | ||
| 632 | bb.warn(f"Using go-dirhash-native fallback for {module_path}@{version}") | ||
| 633 | return fallback_hash | ||
| 634 | return None | ||
| 635 | |||
| 636 | hash_value = assemble_zip(include_vendor_modules=True) | ||
| 637 | if hash_value is None: | ||
| 638 | return None | ||
| 639 | |||
| 640 | if expected_hash and hash_value and hash_value != expected_hash: | ||
| 641 | bb.debug(1, f"Hash mismatch for {module_key} ({hash_value} != {expected_hash}), retrying without vendor/modules.txt") | ||
| 642 | retry_hash = assemble_zip(include_vendor_modules=False) | ||
| 643 | if retry_hash is None: | ||
| 644 | return None | ||
| 645 | hash_value = retry_hash | ||
| 646 | |||
| 647 | if hash_value and hash_value != expected_hash: | ||
| 648 | bb.warn(f"{module_key} still mismatches expected hash after retry ({hash_value} != {expected_hash})") | ||
| 649 | |||
| 650 | if hash_value: | ||
| 651 | ziphash_path = download_dir / f"{escaped_version}.ziphash" | ||
| 652 | with open(ziphash_path, 'w') as f: | ||
| 653 | f.write(f"{hash_value}\n") | ||
| 654 | bb.debug(1, f"Created {ziphash_path}") | ||
| 655 | else: | ||
| 656 | bb.warn(f"Skipping ziphash for {module_key} due to calculation errors") | ||
| 657 | |||
| 658 | # 5. Extract zip to pkg/mod for offline builds | ||
| 659 | # This step can be skipped if Go extracts on-demand from cache (experimental) | ||
| 660 | skip_extraction = d.getVar('GO_MOD_SKIP_ZIP_EXTRACTION') == "1" | ||
| 661 | if not skip_extraction: | ||
| 662 | extract_dir = s / "pkg" / "mod" | ||
| 663 | try: | ||
| 664 | with zipfile.ZipFile(zip_path, 'r') as zip_ref: | ||
| 665 | zip_ref.extractall(extract_dir) | ||
| 666 | bb.debug(1, f"Extracted {module_path}@{version} to {extract_dir}") | ||
| 667 | except Exception as e: | ||
| 668 | bb.error(f"Failed to extract {module_path}@{version}: {e}") | ||
| 669 | return None | ||
| 670 | |||
| 671 | # 6. If this was a replaced module, create symlinks from requested path to canonical path | ||
| 672 | # This ensures Go can find the module by either name | ||
| 673 | if requested_module_path != module_path: | ||
| 674 | import os | ||
| 675 | escaped_requested = escape_module_path(requested_module_path) | ||
| 676 | requested_download_dir = cache_dir / escaped_requested / "@v" | ||
| 677 | requested_download_dir.mkdir(parents=True, exist_ok=True) | ||
| 678 | |||
| 679 | # Create symlinks for all cache files (.info, .mod, .zip, .ziphash) | ||
| 680 | for suffix in ['.info', '.mod', '.zip', '.ziphash']: | ||
| 681 | canonical_file = download_dir / f"{escaped_version}{suffix}" | ||
| 682 | requested_file = requested_download_dir / f"{escaped_version}{suffix}" | ||
| 683 | |||
| 684 | if canonical_file.exists() and not requested_file.exists(): | ||
| 685 | try: | ||
| 686 | # Calculate relative path from requested to canonical | ||
| 687 | rel_path = os.path.relpath(canonical_file, requested_file.parent) | ||
| 688 | os.symlink(rel_path, requested_file) | ||
| 689 | bb.debug(1, f"Created symlink: {requested_file} -> {rel_path}") | ||
| 690 | except Exception as e: | ||
| 691 | bb.warn(f"Failed to create symlink for {requested_module_path}: {e}") | ||
| 692 | |||
| 693 | bb.note(f"Created symlinks for replaced module: {requested_module_path} -> {module_path}") | ||
| 694 | |||
| 695 | # Return the canonical module path for post-processing (e.g., duplicate version handling) | ||
| 696 | return module_path | ||
| 697 | |||
| 698 | def regenerate_go_sum(): | ||
| 699 | s_path = Path(d.getVar('S')) | ||
| 700 | cache_dir = s_path / "pkg" / "mod" / "cache" / "download" | ||
| 701 | go_sum_path = s_path / "src" / "import" / "go.sum" | ||
| 702 | |||
| 703 | if not cache_dir.exists(): | ||
| 704 | bb.warn("Module cache directory not found - skipping go.sum regeneration") | ||
| 705 | return | ||
| 706 | |||
| 707 | def calculate_zip_checksum(zip_file): | ||
| 708 | """Calculate h1: hash for a module zip file (pure Python with native fallback)""" | ||
| 709 | try: | ||
| 710 | result = calculate_h1_hash_python(zip_file) | ||
| 711 | if result: | ||
| 712 | return result | ||
| 713 | except Exception as e: | ||
| 714 | bb.debug(1, f"Python hash failed for {zip_file}: {e}") | ||
| 715 | |||
| 716 | # Fallback to native tool | ||
| 717 | fallback = calculate_h1_hash_native(zip_file) | ||
| 718 | if fallback: | ||
| 719 | return fallback | ||
| 720 | |||
| 721 | bb.warn(f"Failed to calculate zip checksum for {zip_file}") | ||
| 722 | return None | ||
| 723 | |||
| 724 | def calculate_mod_checksum(mod_path): | ||
| 725 | try: | ||
| 726 | mod_bytes = mod_path.read_bytes() | ||
| 727 | except FileNotFoundError: | ||
| 728 | return None | ||
| 729 | |||
| 730 | import base64 | ||
| 731 | |||
| 732 | file_hash = hashlib.sha256(mod_bytes).hexdigest() | ||
| 733 | summary = f"{file_hash} go.mod\n".encode('ascii') | ||
| 734 | digest = hashlib.sha256(summary).digest() | ||
| 735 | return "h1:" + base64.b64encode(digest).decode('ascii') | ||
| 736 | |||
| 737 | def unescape(value): | ||
| 738 | import re | ||
| 739 | |||
| 740 | return re.sub(r'!([a-z])', lambda m: m.group(1).upper(), value) | ||
| 741 | |||
| 742 | existing_entries = {} | ||
| 743 | |||
| 744 | if go_sum_path.exists(): | ||
| 745 | with open(go_sum_path, 'r') as f: | ||
| 746 | for line in f: | ||
| 747 | parts = line.strip().split() | ||
| 748 | if len(parts) != 3: | ||
| 749 | continue | ||
| 750 | mod, ver, hash_value = parts | ||
| 751 | mod = sanitize_module_name(mod) | ||
| 752 | existing_entries[(mod, ver)] = hash_value | ||
| 753 | |||
| 754 | new_entries = {} | ||
| 755 | |||
| 756 | for zip_file in sorted(cache_dir.rglob("*.zip")): | ||
| 757 | zip_hash = calculate_zip_checksum(zip_file) | ||
| 758 | if not zip_hash: | ||
| 759 | continue | ||
| 760 | |||
| 761 | parts = zip_file.parts | ||
| 762 | try: | ||
| 763 | v_index = parts.index('@v') | ||
| 764 | download_index = parts.index('download') | ||
| 765 | except ValueError: | ||
| 766 | bb.warn(f"Unexpected cache layout for {zip_file}") | ||
| 767 | continue | ||
| 768 | |||
| 769 | escaped_module_parts = parts[download_index + 1:v_index] | ||
| 770 | escaped_module = '/'.join(escaped_module_parts) | ||
| 771 | escaped_version = zip_file.stem | ||
| 772 | |||
| 773 | module_path = unescape(escaped_module) | ||
| 774 | version = unescape(escaped_version) | ||
| 775 | |||
| 776 | new_entries[(module_path, version)] = zip_hash | ||
| 777 | |||
| 778 | mod_checksum = calculate_mod_checksum(zip_file.with_suffix('.mod')) | ||
| 779 | if mod_checksum: | ||
| 780 | new_entries[(module_path, f"{version}/go.mod")] = mod_checksum | ||
| 781 | |||
| 782 | if not new_entries and not existing_entries: | ||
| 783 | bb.warn("No go.sum entries available - skipping regeneration") | ||
| 784 | return | ||
| 785 | |||
| 786 | final_entries = existing_entries.copy() | ||
| 787 | final_entries.update(new_entries) | ||
| 788 | |||
| 789 | go_sum_path.parent.mkdir(parents=True, exist_ok=True) | ||
| 790 | with open(go_sum_path, 'w') as f: | ||
| 791 | for (mod, ver) in sorted(final_entries.keys()): | ||
| 792 | f.write(f"{mod} {ver} {final_entries[(mod, ver)]}\n") | ||
| 793 | |||
| 794 | bb.debug(1, f"Regenerated go.sum with {len(final_entries)} entries") | ||
| 795 | |||
| 796 | # Process modules sequentially - I/O bound workload, parallelization causes disk thrashing | ||
| 797 | workdir = Path(d.getVar('WORKDIR')) | ||
| 798 | modules_data = json.loads(d.getVar('GO_MODULE_CACHE_DATA')) | ||
| 799 | |||
| 800 | bb.note(f"Building module cache for {len(modules_data)} modules") | ||
| 801 | |||
| 802 | # Track results from processing | ||
| 803 | results = [] # List of (module_info, success, actual_module_path) | ||
| 804 | success_count = 0 | ||
| 805 | fail_count = 0 | ||
| 806 | |||
| 807 | for i, module in enumerate(modules_data, 1): | ||
| 808 | vcs_hash = module['vcs_hash'] | ||
| 809 | vcs_path = workdir / "sources" / "vcs_cache" / vcs_hash | ||
| 810 | |||
| 811 | # Create module cache files | ||
| 812 | actual_module_path = create_module_zip( | ||
| 813 | module['module'], | ||
| 814 | module['version'], | ||
| 815 | vcs_path, | ||
| 816 | module.get('subdir', ''), | ||
| 817 | module['timestamp'], | ||
| 818 | ) | ||
| 819 | |||
| 820 | if actual_module_path is not None: | ||
| 821 | success_count += 1 | ||
| 822 | results.append((module, True, actual_module_path)) | ||
| 823 | else: | ||
| 824 | fail_count += 1 | ||
| 825 | results.append((module, False, None)) | ||
| 826 | |||
| 827 | # Progress update every 100 modules | ||
| 828 | if i % 100 == 0: | ||
| 829 | bb.note(f"Progress: {i}/{len(modules_data)} modules processed") | ||
| 830 | |||
| 831 | bb.note(f"Module processing complete: {success_count} succeeded, {fail_count} failed") | ||
| 832 | |||
| 833 | # Post-processing: handle duplicate versions for replace directives (must be sequential) | ||
| 834 | for module_info, success, actual_module_path in results: | ||
| 835 | if success and actual_module_path: | ||
| 836 | alias_info = replacements.get(actual_module_path) | ||
| 837 | if alias_info: | ||
| 838 | alias_version = alias_info.get("old_version") or require_versions.get(actual_module_path) | ||
| 839 | if alias_version is None: | ||
| 840 | for (mod, ver), _hash in go_sum_entries.items(): | ||
| 841 | if mod == actual_module_path and not ver.endswith('/go.mod'): | ||
| 842 | alias_version = ver | ||
| 843 | break | ||
| 844 | if alias_version and alias_version != module_info['version']: | ||
| 845 | duplicate_module_version(actual_module_path, module_info['version'], alias_version, module_info['timestamp']) | ||
| 846 | |||
| 847 | if fail_count == 0: | ||
| 848 | regenerate_go_sum() | ||
| 849 | else: | ||
| 850 | bb.warn("Skipping go.sum regeneration due to module cache failures") | ||
| 851 | |||
| 852 | bb.note(f"Module cache complete: {success_count} succeeded, {fail_count} failed") | ||
| 853 | |||
| 854 | if fail_count > 0: | ||
| 855 | bb.fatal(f"Failed to create cache for {fail_count} modules") | ||
| 856 | } | ||
| 857 | |||
| 858 | addtask create_module_cache after do_unpack do_prepare_recipe_sysroot before do_configure | ||
| 859 | |||
| 860 | |||
| 861 | python do_sync_go_files() { | ||
| 862 | """ | ||
| 863 | Synchronize go.mod and go.sum with the module cache we built from git sources. | ||
| 864 | |||
| 865 | This task solves the "go: updates to go.mod needed" error by ensuring go.mod | ||
| 866 | declares ALL modules present in our module cache, and go.sum has checksums | ||
| 867 | matching our git-built modules. | ||
| 868 | |||
| 869 | Architecture: Option 2 (Rewrite go.mod/go.sum approach) | ||
| 870 | - Scans pkg/mod/cache/download/ for ALL modules we built | ||
| 871 | - Regenerates go.mod with complete require block | ||
| 872 | - Regenerates go.sum with our h1: checksums from .ziphash files | ||
| 873 | """ | ||
| 874 | import json | ||
| 875 | import hashlib | ||
| 876 | import re | ||
| 877 | from pathlib import Path | ||
| 878 | |||
| 879 | bb.note("Synchronizing go.mod and go.sum with module cache") | ||
| 880 | |||
| 881 | s = Path(d.getVar('S')) | ||
| 882 | cache_dir = s / "pkg" / "mod" / "cache" / "download" | ||
| 883 | go_mod_path = s / "src" / "import" / "go.mod" | ||
| 884 | go_sum_path = s / "src" / "import" / "go.sum" | ||
| 885 | |||
| 886 | if not cache_dir.exists(): | ||
| 887 | bb.fatal("Module cache directory not found - run do_create_module_cache first") | ||
| 888 | |||
| 889 | def unescape(escaped): | ||
| 890 | """Unescape capital letters (reverse of escape_module_path)""" | ||
| 891 | import re | ||
| 892 | return re.sub(r'!([a-z])', lambda m: m.group(1).upper(), escaped) | ||
| 893 | |||
| 894 | def sanitize_module_name(name): | ||
| 895 | """Remove surrounding quotes added by legacy tools""" | ||
| 896 | if not name: | ||
| 897 | return name | ||
| 898 | stripped = name.strip() | ||
| 899 | if len(stripped) >= 2 and stripped[0] == '"' and stripped[-1] == '"': | ||
| 900 | return stripped[1:-1] | ||
| 901 | return stripped | ||
| 902 | |||
| 903 | def load_require_versions(go_mod_path): | ||
| 904 | versions = {} | ||
| 905 | if not go_mod_path.exists(): | ||
| 906 | return versions | ||
| 907 | |||
| 908 | in_block = False | ||
| 909 | with go_mod_path.open('r', encoding='utf-8') as f: | ||
| 910 | for raw_line in f: | ||
| 911 | line = raw_line.strip() | ||
| 912 | |||
| 913 | if line.startswith('require ('): | ||
| 914 | in_block = True | ||
| 915 | continue | ||
| 916 | if in_block and line == ')': | ||
| 917 | in_block = False | ||
| 918 | continue | ||
| 919 | |||
| 920 | if line.startswith('require ') and '(' not in line: | ||
| 921 | parts = line.split() | ||
| 922 | if len(parts) >= 3: | ||
| 923 | versions[sanitize_module_name(parts[1])] = parts[2] | ||
| 924 | continue | ||
| 925 | |||
| 926 | if in_block and line and not line.startswith('//'): | ||
| 927 | parts = line.split() | ||
| 928 | if len(parts) >= 2: | ||
| 929 | versions[sanitize_module_name(parts[0])] = parts[1] | ||
| 930 | |||
| 931 | return versions | ||
| 932 | |||
| 933 | def load_replacements(go_mod_path): | ||
| 934 | replacements = {} | ||
| 935 | if not go_mod_path.exists(): | ||
| 936 | return replacements | ||
| 937 | |||
| 938 | def parse_replace_line(line): | ||
| 939 | if '//' in line: | ||
| 940 | line = line.split('//', 1)[0].strip() | ||
| 941 | if not line or '=>' not in line: | ||
| 942 | return | ||
| 943 | left, right = [part.strip() for part in line.split('=>', 1)] | ||
| 944 | left_parts = left.split() | ||
| 945 | right_parts = right.split() | ||
| 946 | if not left_parts or not right_parts: | ||
| 947 | return | ||
| 948 | old_module = sanitize_module_name(left_parts[0]) | ||
| 949 | old_version = left_parts[1] if len(left_parts) > 1 else None | ||
| 950 | new_module = sanitize_module_name(right_parts[0]) | ||
| 951 | new_version = right_parts[1] if len(right_parts) > 1 else None | ||
| 952 | replacements[old_module] = { | ||
| 953 | "old_version": old_version, | ||
| 954 | "new_module": new_module, | ||
| 955 | "new_version": new_version, | ||
| 956 | } | ||
| 957 | |||
| 958 | in_block = False | ||
| 959 | with go_mod_path.open('r', encoding='utf-8') as f: | ||
| 960 | for raw_line in f: | ||
| 961 | line = raw_line.strip() | ||
| 962 | |||
| 963 | if line.startswith('replace ('): | ||
| 964 | in_block = True | ||
| 965 | continue | ||
| 966 | if in_block and line == ')': | ||
| 967 | in_block = False | ||
| 968 | continue | ||
| 969 | |||
| 970 | if line.startswith('replace ') and '(' not in line: | ||
| 971 | parse_replace_line(line[len('replace '):]) | ||
| 972 | continue | ||
| 973 | |||
| 974 | if in_block and line and not line.startswith('//'): | ||
| 975 | parse_replace_line(line) | ||
| 976 | |||
| 977 | return replacements | ||
| 978 | |||
| 979 | require_versions = load_require_versions(go_mod_path) | ||
| 980 | replacements = load_replacements(go_mod_path) | ||
| 981 | |||
| 982 | # 1. Scan module cache to discover ALL modules we built | ||
| 983 | # Map: (module_path, version) -> {"zip_checksum": str, "mod_path": Path} | ||
| 984 | our_modules = {} | ||
| 985 | |||
| 986 | bb.note("Scanning module cache...") | ||
| 987 | for zip_file in sorted(cache_dir.rglob("*.zip")): | ||
| 988 | parts = zip_file.parts | ||
| 989 | try: | ||
| 990 | v_index = parts.index('@v') | ||
| 991 | download_index = parts.index('download') | ||
| 992 | except ValueError: | ||
| 993 | continue | ||
| 994 | |||
| 995 | escaped_module_parts = parts[download_index + 1:v_index] | ||
| 996 | escaped_module = '/'.join(escaped_module_parts) | ||
| 997 | escaped_version = zip_file.stem | ||
| 998 | |||
| 999 | module_path = unescape(escaped_module) | ||
| 1000 | module_path = sanitize_module_name(module_path) | ||
| 1001 | version = unescape(escaped_version) | ||
| 1002 | |||
| 1003 | # Read checksum from .ziphash file | ||
| 1004 | ziphash_file = zip_file.with_suffix('.ziphash') | ||
| 1005 | if ziphash_file.exists(): | ||
| 1006 | checksum = ziphash_file.read_text().strip() | ||
| 1007 | # Some .ziphash files have literal \\n at the end - remove it | ||
| 1008 | if checksum.endswith('\\\\n'): | ||
| 1009 | checksum = checksum[:-2] | ||
| 1010 | our_modules[(module_path, version)] = { | ||
| 1011 | "zip_checksum": checksum, | ||
| 1012 | "mod_path": zip_file.with_suffix('.mod'), | ||
| 1013 | } | ||
| 1014 | |||
| 1015 | if not our_modules: | ||
| 1016 | bb.fatal("No modules found in cache - cannot synchronize go.mod/go.sum") | ||
| 1017 | |||
| 1018 | bb.note(f"Found {len(our_modules)} modules in cache") | ||
| 1019 | |||
| 1020 | # 2. DO NOT modify go.mod - keep the original module declarations | ||
| 1021 | # The real problem is go.sum has wrong checksums (proxy vs git), not missing modules | ||
| 1022 | bb.note("Leaving go.mod unchanged - only updating go.sum with git-based checksums") | ||
| 1023 | |||
| 1024 | # 3. Read original go.sum to preserve entries for modules not in our cache | ||
| 1025 | original_sum_entries = {} | ||
| 1026 | if go_sum_path.exists(): | ||
| 1027 | for line in go_sum_path.read_text().splitlines(): | ||
| 1028 | line = line.strip() | ||
| 1029 | if not line: | ||
| 1030 | continue | ||
| 1031 | parts = line.split() | ||
| 1032 | if len(parts) >= 3: | ||
| 1033 | module = sanitize_module_name(parts[0]) | ||
| 1034 | version = parts[1] | ||
| 1035 | checksum = parts[2] | ||
| 1036 | original_sum_entries[(module, version)] = checksum | ||
| 1037 | |||
| 1038 | # 4. Build new go.sum by updating checksums for modules we built | ||
| 1039 | sum_entries_dict = original_sum_entries.copy() # Start with original | ||
| 1040 | |||
| 1041 | for (module, version), entry in our_modules.items(): | ||
| 1042 | # Update .zip checksum | ||
| 1043 | sum_entries_dict[(module, version)] = entry["zip_checksum"] | ||
| 1044 | |||
| 1045 | # Also update /go.mod entry if we have .mod file | ||
| 1046 | mod_file = entry["mod_path"] | ||
| 1047 | if mod_file.exists(): | ||
| 1048 | # Calculate h1: checksum for .mod file | ||
| 1049 | mod_bytes = mod_file.read_bytes() | ||
| 1050 | file_hash = hashlib.sha256(mod_bytes).hexdigest() | ||
| 1051 | summary = f"{file_hash} go.mod\n".encode('ascii') | ||
| 1052 | h1_bytes = hashlib.sha256(summary).digest() | ||
| 1053 | mod_checksum = "h1:" + __import__('base64').b64encode(h1_bytes).decode('ascii') | ||
| 1054 | sum_entries_dict[(module, f"{version}/go.mod")] = mod_checksum | ||
| 1055 | |||
| 1056 | # 5. Duplicate checksums for modules that use replace directives so the original | ||
| 1057 | # module path (e.g., github.com/Mirantis/...) keeps matching go.sum entries. | ||
| 1058 | for alias_module, repl in replacements.items(): | ||
| 1059 | alias_module = sanitize_module_name(alias_module) | ||
| 1060 | alias_version = repl.get("old_version") | ||
| 1061 | if alias_version is None: | ||
| 1062 | alias_version = require_versions.get(alias_module) | ||
| 1063 | if alias_version is None: | ||
| 1064 | # If go.mod didn't pin a replacement version, derive from go.sum | ||
| 1065 | for (mod, version) in list(original_sum_entries.keys()): | ||
| 1066 | if mod == alias_module and not version.endswith('/go.mod'): | ||
| 1067 | alias_version = version | ||
| 1068 | break | ||
| 1069 | if not alias_version: | ||
| 1070 | continue | ||
| 1071 | |||
| 1072 | target_module = repl.get("new_module") | ||
| 1073 | target_version = repl.get("new_version") | ||
| 1074 | if target_version is None: | ||
| 1075 | target_version = require_versions.get(target_module) | ||
| 1076 | if not target_module or not target_version: | ||
| 1077 | continue | ||
| 1078 | |||
| 1079 | entry = our_modules.get((target_module, target_version)) | ||
| 1080 | if not entry and alias_module != target_module: | ||
| 1081 | entry = our_modules.get((alias_module, target_version)) | ||
| 1082 | if not entry: | ||
| 1083 | continue | ||
| 1084 | |||
| 1085 | sum_entries_dict[(alias_module, alias_version)] = entry["zip_checksum"] | ||
| 1086 | |||
| 1087 | mod_file = entry["mod_path"] | ||
| 1088 | if mod_file.exists(): | ||
| 1089 | mod_bytes = mod_file.read_bytes() | ||
| 1090 | file_hash = hashlib.sha256(mod_bytes).hexdigest() | ||
| 1091 | summary = f"{file_hash} go.mod\n".encode('ascii') | ||
| 1092 | h1_bytes = hashlib.sha256(summary).digest() | ||
| 1093 | mod_checksum = "h1:" + __import__('base64').b64encode(h1_bytes).decode('ascii') | ||
| 1094 | sum_entries_dict[(alias_module, f"{alias_version}/go.mod")] = mod_checksum | ||
| 1095 | |||
| 1096 | # Write merged go.sum | ||
| 1097 | sum_lines = [] | ||
| 1098 | for (module, version), checksum in sorted(sum_entries_dict.items()): | ||
| 1099 | sum_lines.append(f"{module} {version} {checksum}") | ||
| 1100 | |||
| 1101 | go_sum_path.write_text('\n'.join(sum_lines) + '\n') | ||
| 1102 | bb.note(f"Updated go.sum: {len(sum_entries_dict)} total entries, {len(our_modules)} updated from cache") | ||
| 1103 | |||
| 1104 | bb.note("go.mod and go.sum synchronized successfully") | ||
| 1105 | } | ||
| 1106 | |||
| 1107 | addtask sync_go_files after do_create_module_cache before do_compile | ||
