aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAlberto Pianon <alberto@pianon.eu>2023-03-22 21:45:44 +0100
committerAlberto Pianon <alberto@pianon.eu>2023-03-22 21:45:44 +0100
commit6f00e3654069df5575b8fb628becf6b1b61b6b21 (patch)
tree52b8fad4fd5614a00d891d450485561a8d667c1d
parent54251b9bb27241eab9368c9facc150f709ccc3c5 (diff)
downloadbitbake-contrib-alpianon/srctrace.tar.gz
add upstream source data collection for unpackalpianon/srctrace
This patch subclasses TraceUnpackBase in order to implement full upstream metadata collection and processing for do_unpack. The final output is a compressed json file, stored in WORKDIR/temp for each recipe. Data format is described in the help text of bb.trace.unpack module, while some real-world examples can be found in lib/bb/tests/trace-testdata Signed-off-by: Alberto Pianon <alberto@pianon.eu>
-rwxr-xr-xbin/bitbake-selftest1
-rw-r--r--lib/bb/tests/trace-testdata/bzip2-1.0.8.unpack.trace.json.zstbin0 -> 5885 bytes
-rw-r--r--lib/bb/tests/trace-testdata/gettext-minimal-native-0.21.unpack.trace.json.zstbin0 -> 891 bytes
-rw-r--r--lib/bb/tests/trace-testdata/gosu-1.14.unpack.trace.json.zstbin0 -> 45305 bytes
-rw-r--r--lib/bb/tests/trace-testdata/npm-shrinkwrap-test.json309
-rw-r--r--lib/bb/tests/trace-testdata/npm-shrinkwrap-test.json.zstbin0 -> 4251 bytes
-rw-r--r--lib/bb/tests/trace-testdata/python3-cryptography-37.0.4.unpack.trace.json.zstbin0 -> 180902 bytes
-rw-r--r--lib/bb/tests/trace-testdata/snappy-1.1.9.unpack.trace.json.zstbin0 -> 17504 bytes
-rw-r--r--lib/bb/tests/trace-testdata/systemd-251.8.unpack.trace.json.zstbin0 -> 179603 bytes
-rw-r--r--lib/bb/tests/trace-testdata/test_npm-1.0.0.unpack.trace.json.zstbin0 -> 23286 bytes
-rw-r--r--lib/bb/tests/trace.py588
-rw-r--r--lib/bb/trace/unpack.py613
12 files changed, 1511 insertions, 0 deletions
diff --git a/bin/bitbake-selftest b/bin/bitbake-selftest
index 7be354f9e..4114bda3f 100755
--- a/bin/bitbake-selftest
+++ b/bin/bitbake-selftest
@@ -27,6 +27,7 @@ tests = ["bb.tests.codeparser",
"bb.tests.event",
"bb.tests.fetch",
"bb.tests.trace_base",
+ "bb.tests.trace",
"bb.tests.parse",
"bb.tests.persist_data",
"bb.tests.runqueue",
diff --git a/lib/bb/tests/trace-testdata/bzip2-1.0.8.unpack.trace.json.zst b/lib/bb/tests/trace-testdata/bzip2-1.0.8.unpack.trace.json.zst
new file mode 100644
index 000000000..1d7f975d3
--- /dev/null
+++ b/lib/bb/tests/trace-testdata/bzip2-1.0.8.unpack.trace.json.zst
Binary files differ
diff --git a/lib/bb/tests/trace-testdata/gettext-minimal-native-0.21.unpack.trace.json.zst b/lib/bb/tests/trace-testdata/gettext-minimal-native-0.21.unpack.trace.json.zst
new file mode 100644
index 000000000..95d9d6cc8
--- /dev/null
+++ b/lib/bb/tests/trace-testdata/gettext-minimal-native-0.21.unpack.trace.json.zst
Binary files differ
diff --git a/lib/bb/tests/trace-testdata/gosu-1.14.unpack.trace.json.zst b/lib/bb/tests/trace-testdata/gosu-1.14.unpack.trace.json.zst
new file mode 100644
index 000000000..cbc167c43
--- /dev/null
+++ b/lib/bb/tests/trace-testdata/gosu-1.14.unpack.trace.json.zst
Binary files differ
diff --git a/lib/bb/tests/trace-testdata/npm-shrinkwrap-test.json b/lib/bb/tests/trace-testdata/npm-shrinkwrap-test.json
new file mode 100644
index 000000000..ab3ae283c
--- /dev/null
+++ b/lib/bb/tests/trace-testdata/npm-shrinkwrap-test.json
@@ -0,0 +1,309 @@
+{
+ "name": "test_npm",
+ "version": "1.0.0",
+ "lockfileVersion": 1,
+ "requires": true,
+ "dependencies": {
+ "file-renamer": {
+ "version": "0.1.0",
+ "resolved": "https://registry.npmjs.org/file-renamer/-/file-renamer-0.1.0.tgz",
+ "integrity": "sha512-0X5vOIV03HCVcgDvxdKuh83AW6jhkhrh5QH6jMXA++r3Fo4B4gAiOJ39Fk/1nJ5DLLBzEglDMAxewo8xOhON4Q==",
+ "requires": {
+ "fs": "^0.0.1-security",
+ "glob": "^9.3.1",
+ "optimist": "^0.6.1",
+ "path": "^0.12.7"
+ },
+ "dependencies": {
+ "fs": {
+ "version": "0.0.1-security",
+ "resolved": "https://registry.npmjs.org/fs/-/fs-0.0.1-security.tgz",
+ "integrity": "sha512-3XY9e1pP0CVEUCdj5BmfIZxRBTSDycnbqhIOGec9QYtmVH2fbLpj86CFWkrNOkt/Fvty4KZG5lTglL9j/gJ87w=="
+ },
+ "glob": {
+ "version": "9.3.1",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-9.3.1.tgz",
+ "integrity": "sha512-qERvJb7IGsnkx6YYmaaGvDpf77c951hICMdWaFXyH3PlVob8sbPJJyJX0kWkiCWyXUzoy9UOTNjGg0RbD8bYIw==",
+ "requires": {
+ "fs.realpath": "^1.0.0",
+ "minimatch": "^7.4.1",
+ "minipass": "^4.2.4",
+ "path-scurry": "^1.6.1"
+ },
+ "dependencies": {
+ "fs.realpath": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
+ "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="
+ },
+ "minimatch": {
+ "version": "7.4.2",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-7.4.2.tgz",
+ "integrity": "sha512-xy4q7wou3vUoC9k1xGTXc+awNdGaGVHtFUaey8tiX4H1QRc04DZ/rmDFwNm2EBsuYEhAZ6SgMmYf3InGY6OauA==",
+ "requires": {
+ "brace-expansion": "^2.0.1"
+ },
+ "dependencies": {
+ "brace-expansion": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
+ "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
+ "requires": {
+ "balanced-match": "^1.0.0"
+ },
+ "dependencies": {
+ "balanced-match": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
+ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
+ }
+ }
+ }
+ }
+ },
+ "minipass": {
+ "version": "4.2.5",
+ "resolved": "https://registry.npmjs.org/minipass/-/minipass-4.2.5.tgz",
+ "integrity": "sha512-+yQl7SX3bIT83Lhb4BVorMAHVuqsskxRdlmO9kTpyukp8vsm2Sn/fUOV9xlnG8/a5JsypJzap21lz/y3FBMJ8Q=="
+ },
+ "path-scurry": {
+ "version": "1.6.1",
+ "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.6.1.tgz",
+ "integrity": "sha512-OW+5s+7cw6253Q4E+8qQ/u1fVvcJQCJo/VFD8pje+dbJCF1n5ZRMV2AEHbGp+5Q7jxQIYJxkHopnj6nzdGeZLA==",
+ "requires": {
+ "lru-cache": "^7.14.1",
+ "minipass": "^4.0.2"
+ },
+ "dependencies": {
+ "lru-cache": {
+ "version": "7.18.3",
+ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
+ "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA=="
+ }
+ }
+ }
+ }
+ },
+ "optimist": {
+ "version": "0.6.1",
+ "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz",
+ "integrity": "sha512-snN4O4TkigujZphWLN0E//nQmm7790RYaE53DdL7ZYwee2D8DDo9/EyYiKUfN3rneWUjhJnueija3G9I2i0h3g==",
+ "requires": {
+ "minimist": "~0.0.1",
+ "wordwrap": "~0.0.2"
+ },
+ "dependencies": {
+ "minimist": {
+ "version": "0.0.10",
+ "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.10.tgz",
+ "integrity": "sha512-iotkTvxc+TwOm5Ieim8VnSNvCDjCK9S8G3scJ50ZthspSxa7jx50jkhYduuAtAjvfDUwSgOwf8+If99AlOEhyw=="
+ },
+ "wordwrap": {
+ "version": "0.0.3",
+ "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz",
+ "integrity": "sha512-1tMA907+V4QmxV7dbRvb4/8MaRALK6q9Abid3ndMYnbyo8piisCmeONVqVSXqQA3KaP4SLt5b7ud6E2sqP8TFw=="
+ }
+ }
+ },
+ "path": {
+ "version": "0.12.7",
+ "resolved": "https://registry.npmjs.org/path/-/path-0.12.7.tgz",
+ "integrity": "sha512-aXXC6s+1w7otVF9UletFkFcDsJeO7lSZBPUQhtb5O0xJe8LtYhj/GxldoL09bBj9+ZmE2hNoHqQSFMN5fikh4Q==",
+ "requires": {
+ "process": "^0.11.1",
+ "util": "^0.10.3"
+ },
+ "dependencies": {
+ "process": {
+ "version": "0.11.10",
+ "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz",
+ "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A=="
+ },
+ "util": {
+ "version": "0.10.4",
+ "resolved": "https://registry.npmjs.org/util/-/util-0.10.4.tgz",
+ "integrity": "sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A==",
+ "requires": {
+ "inherits": "2.0.3"
+ },
+ "dependencies": {
+ "inherits": {
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz",
+ "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw=="
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ "shelljs": {
+ "version": "0.8.5",
+ "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz",
+ "integrity": "sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==",
+ "requires": {
+ "glob": "^7.0.0",
+ "interpret": "^1.0.0",
+ "rechoir": "^0.6.2"
+ },
+ "dependencies": {
+ "glob": {
+ "version": "7.2.3",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
+ "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
+ "requires": {
+ "fs.realpath": "^1.0.0",
+ "inflight": "^1.0.4",
+ "inherits": "2",
+ "minimatch": "^3.1.1",
+ "once": "^1.3.0",
+ "path-is-absolute": "^1.0.0"
+ },
+ "dependencies": {
+ "fs.realpath": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
+ "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="
+ },
+ "inflight": {
+ "version": "1.0.6",
+ "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
+ "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
+ "requires": {
+ "once": "^1.3.0",
+ "wrappy": "1"
+ },
+ "dependencies": {
+ "wrappy": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
+ "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
+ }
+ }
+ },
+ "inherits": {
+ "version": "2.0.4",
+ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
+ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
+ },
+ "minimatch": {
+ "version": "3.1.2",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
+ "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
+ "requires": {
+ "brace-expansion": "^1.1.7"
+ },
+ "dependencies": {
+ "brace-expansion": {
+ "version": "1.1.11",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
+ "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
+ "requires": {
+ "balanced-match": "^1.0.0",
+ "concat-map": "0.0.1"
+ },
+ "dependencies": {
+ "balanced-match": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
+ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
+ },
+ "concat-map": {
+ "version": "0.0.1",
+ "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
+ "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="
+ }
+ }
+ }
+ }
+ },
+ "once": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
+ "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
+ "requires": {
+ "wrappy": "1"
+ },
+ "dependencies": {
+ "wrappy": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
+ "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
+ }
+ }
+ },
+ "path-is-absolute": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
+ "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg=="
+ }
+ }
+ },
+ "interpret": {
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz",
+ "integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA=="
+ },
+ "rechoir": {
+ "version": "0.6.2",
+ "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz",
+ "integrity": "sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==",
+ "requires": {
+ "resolve": "^1.1.6"
+ },
+ "dependencies": {
+ "resolve": {
+ "version": "1.22.1",
+ "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz",
+ "integrity": "sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==",
+ "requires": {
+ "is-core-module": "^2.9.0",
+ "path-parse": "^1.0.7",
+ "supports-preserve-symlinks-flag": "^1.0.0"
+ },
+ "dependencies": {
+ "is-core-module": {
+ "version": "2.11.0",
+ "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.11.0.tgz",
+ "integrity": "sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==",
+ "requires": {
+ "has": "^1.0.3"
+ },
+ "dependencies": {
+ "has": {
+ "version": "1.0.3",
+ "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz",
+ "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==",
+ "requires": {
+ "function-bind": "^1.1.1"
+ },
+ "dependencies": {
+ "function-bind": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz",
+ "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A=="
+ }
+ }
+ }
+ }
+ },
+ "path-parse": {
+ "version": "1.0.7",
+ "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
+ "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw=="
+ },
+ "supports-preserve-symlinks-flag": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz",
+ "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w=="
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ \ No newline at end of file
diff --git a/lib/bb/tests/trace-testdata/npm-shrinkwrap-test.json.zst b/lib/bb/tests/trace-testdata/npm-shrinkwrap-test.json.zst
new file mode 100644
index 000000000..25b66223c
--- /dev/null
+++ b/lib/bb/tests/trace-testdata/npm-shrinkwrap-test.json.zst
Binary files differ
diff --git a/lib/bb/tests/trace-testdata/python3-cryptography-37.0.4.unpack.trace.json.zst b/lib/bb/tests/trace-testdata/python3-cryptography-37.0.4.unpack.trace.json.zst
new file mode 100644
index 000000000..a9b62a930
--- /dev/null
+++ b/lib/bb/tests/trace-testdata/python3-cryptography-37.0.4.unpack.trace.json.zst
Binary files differ
diff --git a/lib/bb/tests/trace-testdata/snappy-1.1.9.unpack.trace.json.zst b/lib/bb/tests/trace-testdata/snappy-1.1.9.unpack.trace.json.zst
new file mode 100644
index 000000000..55731e960
--- /dev/null
+++ b/lib/bb/tests/trace-testdata/snappy-1.1.9.unpack.trace.json.zst
Binary files differ
diff --git a/lib/bb/tests/trace-testdata/systemd-251.8.unpack.trace.json.zst b/lib/bb/tests/trace-testdata/systemd-251.8.unpack.trace.json.zst
new file mode 100644
index 000000000..3a65a5a4e
--- /dev/null
+++ b/lib/bb/tests/trace-testdata/systemd-251.8.unpack.trace.json.zst
Binary files differ
diff --git a/lib/bb/tests/trace-testdata/test_npm-1.0.0.unpack.trace.json.zst b/lib/bb/tests/trace-testdata/test_npm-1.0.0.unpack.trace.json.zst
new file mode 100644
index 000000000..7cbcc5ef7
--- /dev/null
+++ b/lib/bb/tests/trace-testdata/test_npm-1.0.0.unpack.trace.json.zst
Binary files differ
diff --git a/lib/bb/tests/trace.py b/lib/bb/tests/trace.py
new file mode 100644
index 000000000..4ec2b1b9d
--- /dev/null
+++ b/lib/bb/tests/trace.py
@@ -0,0 +1,588 @@
+
+# Copyright (C) 2023 Alberto Pianon <pianon@array.eu>
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import os
+import re
+import json
+import shutil
+import unittest
+import tempfile
+from pathlib import Path
+import subprocess
+
+import bb
+
+from bb.tests.trace_base import create_file
+
+def skipIfNoNetwork():
+ if os.environ.get("BB_SKIP_NETTESTS") == "yes":
+ return unittest.skip("network test")
+ return lambda f: f
+
+class SplitVarValueTest(unittest.TestCase):
+
+ def test_split_var_value_with_items_without_spaces(self):
+ items_without_spaces = [
+ "git://github.com/systemd/systemd-stable.git;protocol=https;branch=${SRCBRANCH}",
+ "${SRC_URI_MUSL}",
+ "file://0001-Adjust-for-musl-headers.patch"
+ ]
+ var_value = " ".join(items_without_spaces)
+ self.assertEqual(
+ bb.trace.unpack.split_var_value(var_value, False), items_without_spaces)
+
+ def test_split_var_value_with_items_with_spaces(self):
+ items_with_spaces = [
+ "https://github.com/shadow-maint/shadow/releases/download/v${PV}/${BP}.tar.gz",
+ "${@bb.utils.contains('PACKAGECONFIG', 'pam', '${PAM_SRC_URI}', '', d)}",
+ "file://shadow-relaxed-usernames.patch",
+ ]
+ var_value = " ".join(items_with_spaces)
+ self.assertEqual(
+ bb.trace.unpack.split_var_value(var_value, False), items_with_spaces)
+
+
+class GetUnexpSrcUriTest(unittest.TestCase):
+
+ def test_get_unexp_src_uri(self):
+ d = bb.data.init()
+ d.setVar("SRCBRANCH", "main")
+ d.setVar("SRC_URI", """
+ git://github.com/systemd/systemd-stable.git;protocol=https;branch=${SRCBRANCH}
+ file://0001-Adjust-for-musl-headers.patch
+ """)
+ src_uri = "git://github.com/systemd/systemd-stable.git;protocol=https;branch=main"
+ unexp_src_uri = "git://github.com/systemd/systemd-stable.git;protocol=https;branch=${SRCBRANCH}"
+ self.assertEqual(
+ bb.trace.unpack.get_unexp_src_uri(src_uri, d), unexp_src_uri)
+
+ def test_get_unexp_src_uri_that_expands_to_multiple_items(self):
+ d = bb.data.init()
+ d.setVar("SRC_URI_MUSL", """
+ file://0003-missing_type.h-add-comparison_fn_t.patch
+ file://0004-add-fallback-parse_printf_format-implementation.patch
+ file://0005-src-basic-missing.h-check-for-missing-strndupa.patch
+ """)
+ d.setVar("SRC_URI", """
+ git://github.com/systemd/systemd-stable.git;protocol=https;branch=main
+ ${SRC_URI_MUSL}
+ file://0001-Adjust-for-musl-headers.patch
+ """)
+ src_uris = [
+ "file://0003-missing_type.h-add-comparison_fn_t.patch",
+ "file://0004-add-fallback-parse_printf_format-implementation.patch",
+ "file://0005-src-basic-missing.h-check-for-missing-strndupa.patch",
+ ]
+ unexp_src_uri = "${SRC_URI_MUSL}"
+ for src_uri in src_uris:
+ self.assertEqual(
+ bb.trace.unpack.get_unexp_src_uri(src_uri, d), unexp_src_uri)
+
+
+class GetCleanSrcUriTest(unittest.TestCase):
+
+ def test_get_clean_src_uri_from_src_uri_with_abs_path_in_param(self):
+ src_uris = {
+ "git://git.example.com/foo/foo-plugin1.git;destsuffix=/home/user/poky/build/tmp/work/core2-64-poky-linux/foo/0.0.1/foo-0.0.1/plugins/1;name=plugin1;protocol=https" :
+ "git://git.example.com/foo/foo-plugin1.git;destsuffix=<local-path>;name=plugin1;protocol=https",
+ "git://git.example.com/foo/foo-plugin1.git;name=plugin1;protocol=https;destsuffix=/home/user/poky/build/tmp/work/core2-64-poky-linux/foo/0.0.1/foo-0.0.1/plugins/1" :
+ "git://git.example.com/foo/foo-plugin1.git;name=plugin1;protocol=https;destsuffix=<local-path>"
+ }
+ for src_uri, clean_src_uri in src_uris.items():
+ self.assertEqual(
+ bb.trace.unpack.get_clean_src_uri(src_uri), clean_src_uri)
+
+ def test_get_clean_src_uri_from_src_uri_with_abs_path_in_url_path(self):
+ src_uris = {
+ "file:///home/user/meta-foo/foo/foo_fix.patch;subdir=foo":
+ "file://<local-path>;subdir=foo",
+ "npmsw:///home/user/meta-example/npm-shrinkwrap.json":
+ "npmsw://<local-path>"
+ }
+ for src_uri, clean_src_uri in src_uris.items():
+ self.assertEqual(
+ bb.trace.unpack.get_clean_src_uri(src_uri), clean_src_uri)
+
+
+class BlameRecipeFileTest(unittest.TestCase):
+ # NOTE function bb.trace.unpack.blame_recipe_file() is not being used for now
+ # testing it anyway
+
+ class MockDataStore:
+
+ class MockVarHistory:
+ def __init__(self):
+ self.SRC_URI_varhistory = [{'variable': 'SRC_URI',
+ 'file': '/build/test/oe-core/meta/conf/bitbake.conf',
+ 'line': 721,
+ 'op': 'append',
+ 'detail': ' APACHE_MIRROR CPAN_MIRROR DEBIAN_MIRROR GENTOO_MIRROR GNOME_GIT GNOME_MIRROR GNU_MIRROR GNUPG_MIRROR GPE_MIRROR KERNELORG_MIRROR SAMBA_MIRROR SAVANNAH_GNU_MIRROR SAVANNAH_NONGNU_MIRROR SOURCEFORGE_MIRROR XLIBS_MIRROR XORG_MIRROR ',
+ 'flag': 'vardepsexclude'},
+ {'parsing': True,
+ 'variable': 'SRC_URI',
+ 'file': '/build/test/oe-core/meta/conf/bitbake.conf',
+ 'line': 735,
+ 'op': 'set',
+ 'detail': ''},
+ {'variable': 'SRC_URI',
+ 'file': '/build/test/oe-core/meta/conf/documentation.conf',
+ 'line': 393,
+ 'op': 'set',
+ 'detail': 'The list of source files - local or remote. This variable tells the OpenEmbedded build system what bits to pull in for the build and how to pull them in.',
+ 'flag': 'doc'},
+ {'parsing': True,
+ 'variable': 'SRC_URI',
+ 'file': '/build/test/oe-core/../meta-arm/meta-arm/recipes-security/optee/optee-client.inc',
+ 'line': 14,
+ 'op': 'set',
+ 'detail': ' git://github.com/OP-TEE/optee_client.git;branch=master;protocol=https file://tee-supplicant.service file://tee-supplicant.sh '},
+ {'parsing': True,
+ 'variable': 'SRC_URI',
+ 'file': '/build/test/oe-core/../meta-ledge-secure/meta-ledge-secure/recipes-security/optee/optee-client_3.16.0.bbappend',
+ 'line': 12,
+ 'op': ':append',
+ 'detail': ' file://0001-libckteec-add-support-for-ECDH-derive.patch \tfile://0002-tee-supplicant-introduce-struct-tee_supplicant_param.patch \tfile://0003-tee-supplicant-refactor-argument-parsing-in-main.patch \tfile://0004-tee-supplicant-rpmb-introduce-readn-wrapper-to-the-r.patch \tfile://0005-tee-supplicant-rpmb-read-CID-in-one-go.patch \tfile://0006-tee-supplicant-add-rpmb-cid-command-line-option.patch \tfile://create-tee-supplicant-env file://optee-udev.rules \t'}]
+
+ def variable(self, var):
+ if var == "SRC_URI":
+ return self.SRC_URI_varhistory
+
+ def __init__(self):
+ self.SRC_URI = ' git://github.com/OP-TEE/optee_client.git;branch=master;protocol=https file://tee-supplicant.service file://tee-supplicant.sh file://0001-libckteec-add-support-for-ECDH-derive.patch \tfile://0002-tee-supplicant-introduce-struct-tee_supplicant_param.patch \tfile://0003-tee-supplicant-refactor-argument-parsing-in-main.patch \tfile://0004-tee-supplicant-rpmb-introduce-readn-wrapper-to-the-r.patch \tfile://0005-tee-supplicant-rpmb-read-CID-in-one-go.patch \tfile://0006-tee-supplicant-add-rpmb-cid-command-line-option.patch \tfile://create-tee-supplicant-env file://optee-udev.rules \t'
+ self.varhistory = self.MockVarHistory()
+
+ def getVar(self, var):
+ if var == "SRC_URI":
+ return self.SRC_URI
+
+ def test_get_src_uri_recipe_file_bbappend(self):
+ d = self.MockDataStore()
+ recipe_file = bb.trace.unpack.blame_recipe_file("file://0001-libckteec-add-support-for-ECDH-derive.patch", d)
+ self.assertEqual(recipe_file, "/build/test/oe-core/../meta-ledge-secure/meta-ledge-secure/recipes-security/optee/optee-client_3.16.0.bbappend")
+
+ def test_get_src_uri_recipe_file_set_in_inc(self):
+ d = self.MockDataStore()
+ recipe_file = bb.trace.unpack.blame_recipe_file("file://tee-supplicant.sh", d)
+ self.assertEqual(recipe_file, "/build/test/oe-core/../meta-arm/meta-arm/recipes-security/optee/optee-client.inc")
+
+
+class GetDownloadLocationAndRelpathTest(unittest.TestCase):
+
+ # TODO add test with a git remote pointing to repo tool manifest
+
+ def test_get_dl_loc_for_dir_in_git_repo(self):
+ with tempfile.TemporaryDirectory() as tmpdir:
+ tmpdir = Path(tmpdir)
+
+ create_file(tmpdir / "repo/README", "hello")
+ create_file(tmpdir / "repo/doc/help.txt", "help")
+ git_dir = tmpdir/"repo"
+ subprocess.check_output(["git", "init"], cwd=git_dir)
+ subprocess.check_output(["git", "add", "-A"], cwd=git_dir)
+ subprocess.check_output(["git", "commit", "-m", "'initial commit'"], cwd=git_dir)
+ head = subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=git_dir).decode().strip("\n")
+ download_location, relpath = bb.trace.unpack.get_dl_loc(tmpdir/"repo/doc")
+ self.assertEqual((download_location, relpath), (None, None)) # no origin
+
+ os.rename(tmpdir/"repo/.git", tmpdir/"repo.git")
+ subprocess.check_output(["rm", "-Rf", "repo"], cwd=tmpdir)
+ subprocess.check_output(["git", "clone", "repo.git"], cwd=tmpdir, stderr=subprocess.DEVNULL)
+ download_location, relpath = bb.trace.unpack.get_dl_loc(tmpdir/"repo/doc")
+ self.assertEqual(download_location, "git+%s@%s" % (tmpdir/"repo.git", head))
+ self.assertEqual(relpath, "doc")
+
+ download_location, relpath = bb.trace.unpack.get_dl_loc(git_dir)
+ self.assertEqual(download_location, "git+%s@%s" % (tmpdir/"repo.git", head))
+ self.assertEqual(relpath, "")
+
+ create_file(tmpdir/"repo/LICENSE", "CC-0")
+ subprocess.check_output(["git", "add", "LICENSE"], cwd=git_dir)
+ subprocess.check_output(["git", "commit", "-m", "'add license'"], cwd=git_dir)
+ download_location, relpath = bb.trace.unpack.get_dl_loc(git_dir)
+ self.assertEqual((download_location, relpath), (None, None))
+
+ def test_get_dl_loc_on_file_with_no_git_repo(self):
+ with tempfile.TemporaryDirectory() as tmpdir:
+ tmpdir = Path(tmpdir)
+ create_file(tmpdir/"README", "hello")
+ download_location, relpath = bb.trace.unpack.get_dl_loc(tmpdir)
+ self.assertEqual((download_location, relpath), (None, None))
+
+
+class IsInCurrentBranchTest(unittest.TestCase):
+
+ def get_untracked_new_and_modified_files(self):
+ with tempfile.TemporaryDirectory() as tmpdir:
+ tmpdir = Path(tmpdir)
+ create_file(tmpdir / "repo/README", "hello")
+ create_file(tmpdir / "repo/doc/help.txt", "help")
+ git_dir = tmpdir/"repo"
+ subprocess.check_output(["git", "init"], cwd=git_dir)
+ subprocess.check_output(["git", "add", "-A"], cwd=git_dir)
+ subprocess.check_output(["git", "commit", "-m", "'initial commit'"], cwd=git_dir)
+
+ # modified
+ create_file(tmpdir / "repo/README", "hello there")
+ # untracked
+ create_file(tmpdir / "repo/test/test.txt", "test")
+ # staged, uncommitted
+ create_file(tmpdir / "repo/test/test2.txt", "test2")
+ subprocess.check_output(["git", "add", "test/test2.txt"], cwd=git_dir)
+
+ untracked_new_and_modified_files = bb.trace.unpack.get_get_untracked_new_and_modified_files(git_dir)
+
+ self.assertFalse("doc/help.txt" in untracked_new_and_modified_files)
+ self.assertTrue("README" in untracked_new_and_modified_files)
+ self.assertTrue("test/test.txt" in untracked_new_and_modified_files)
+ self.assertTrue("test/test2.txt" in untracked_new_and_modified_files)
+
+
+class TraceUnpackIntegrationTest(unittest.TestCase):
+
+ meta_repos = [(
+ "git://git.yoctoproject.org/poky",
+ "langdale",
+ "yocto-4.1.3",
+ "2023-03-05"
+ ),(
+ "git://git.openembedded.org/meta-openembedded",
+ "langdale",
+ "b5b732876da1885ecbab2aa45f80d7a3086c5262",
+ ""
+ )]
+
+ @classmethod
+ @skipIfNoNetwork()
+ def setUpClass(cls):
+ cls.meta_tempdir = tempfile.mkdtemp(prefix="meta-")
+ for repo, branch, commit, shallow_since in cls.meta_repos:
+ cmd = "git clone"
+ if shallow_since:
+ cmd += " --shallow-since %s" % shallow_since
+ cmd += " --branch %s --single-branch %s" % (branch, repo)
+ bb.process.run(cmd, cwd=cls.meta_tempdir)
+ basename = re.sub(r"\.git$", "", os.path.basename(repo))
+ git_dir = os.path.join(cls.meta_tempdir, basename)
+ bb.process.run("git checkout %s" % commit, cwd=git_dir)
+ cls.tempdir = tempfile.mkdtemp(prefix="bitbake-trace-")
+ cls.dldir = os.path.join(cls.tempdir, "download")
+ os.mkdir(cls.dldir)
+
+ @classmethod
+ @skipIfNoNetwork()
+ def tearDownClass(cls):
+ if os.environ.get("BB_TMPDIR_NOCLEAN") == "yes":
+ print("Not cleaning up %s. Please remove manually." % cls.meta_tempdir)
+ print("Not cleaning up %s. Please remove manually." % cls.tempdir)
+ else:
+ bb.process.run('chmod u+rw -R %s' % cls.meta_tempdir)
+ bb.utils.prunedir(cls.meta_tempdir)
+ bb.process.run('chmod u+rw -R %s' % cls.tempdir)
+ bb.utils.prunedir(cls.tempdir)
+
+ def run_do_unpack(self, var, var_flags, is_go=False):
+ self.d = bb.data.init()
+ self.d.setVar("DL_DIR", self.dldir)
+ for var_name, value in var.items():
+ self.d.setVar(var_name, value)
+ for var_name, flags in var_flags.items():
+ for flag_name, flag_value in flags.items():
+ self.d.setVarFlag(var_name, flag_name, flag_value)
+ bb.utils.mkdirhier(self.d.getVar("S"))
+ bb.utils.mkdirhier(self.d.getVar("WORKDIR") + "/temp")
+ fetcher = bb.fetch2.Fetch(None, self.d)
+ fetcher.download()
+ if is_go: # simulate go_do_unpack
+ for url in fetcher.urls:
+ if fetcher.ud[url].type == 'git':
+ if fetcher.ud[url].parm.get('destsuffix') is None:
+ s_dirname = os.path.basename(self.d.getVar('S'))
+ fetcher.ud[url].parm['destsuffix'] = os.path.join(
+ s_dirname, 'src', self.d.getVar('GO_IMPORT')) + '/'
+ fetcher.unpack(self.d.getVar("WORKDIR"))
+
+ def get_trace_data_and_expected_trace_data(self):
+ json_file = "%s-%s.unpack.trace.json.zst" % (self.d.getVar("PN"), self.d.getVar("PV"))
+ path = os.path.join(self.d.getVar("WORKDIR"), "temp", json_file)
+ with bb.compress.zstd.open(path, "rt", encoding="utf-8", num_threads=1) as f:
+ td = json.load(f)
+ this_dir = os.path.dirname(os.path.abspath(__file__))
+ testdata_path = os.path.join(this_dir, "trace-testdata", json_file)
+ with bb.compress.zstd.open(testdata_path, "rt", encoding="utf-8", num_threads=1) as f:
+ expected_td = json.load(f)
+ return td, expected_td
+
+ @skipIfNoNetwork()
+ def test_bzip2_case(self):
+ """ 1) check if https, git and file src uris are correctly traced
+ 2) local files configure.ac and Makefile.am from poky/meta layer are
+ added to bzip2 source dir (${WORKDIR}/bzip2-1.0.8/) through
+ file:// src uris with subdir param: check if their real upstream
+ source is correctly identified
+ 3) SRC_URI contains variables to be expanded: check if the
+ unexpanded src uris are correctly identified
+ """
+ var = {
+ "PN": "bzip2",
+ "BPN": "bzip2",
+ "PV": "1.0.8",
+ "BP": "${BPN}-${PV}",
+ "SRC_URI": """https://sourceware.org/pub/${BPN}/${BPN}-${PV}.tar.gz
+ git://sourceware.org/git/bzip2-tests.git;name=bzip2-tests;branch=master
+ file://configure.ac;subdir=${BP}
+ file://Makefile.am;subdir=${BP}
+ file://run-ptest
+ """,
+ "SRCREV_bzip2-tests": "f9061c030a25de5b6829e1abf373057309c734c0",
+ "FILE": self.meta_tempdir+"/poky/meta/recipes-extended/bzip2/bzip2_1.0.8.bb",
+ "FILE_DIRNAME": "${@os.path.dirname(d.getVar('FILE', False))}",
+ "FILESPATH": '${FILE_DIRNAME}/${BP}:${FILE_DIRNAME}/${BPN}:${FILE_DIRNAME}/files',
+ "WORKDIR": self.tempdir+"/work/core2-64-poky-linux/${PN}/${PV}-r0",
+ "S": "${WORKDIR}/${BP}",
+ "BBLAYERS": self.meta_tempdir+"/poky/meta",
+ }
+ var_flags = {
+ "SRC_URI": {
+ "md5sum": "67e051268d0c475ea773822f7500d0e5",
+ "sha256sum": "ab5a03176ee106d3f0fa90e381da478ddae405918153cca248e682cd0c4a2269"
+ }
+ }
+ self.run_do_unpack(var, var_flags)
+ td, expected_td = self.get_trace_data_and_expected_trace_data()
+ self.assertEqual(td, expected_td)
+
+ @skipIfNoNetwork()
+ def test_gettext_minimal_native_case(self):
+ """ check if file src uri pointing to a directory (aclocal/) is
+ correctly handled"""
+ var = {
+ "PN": "gettext-minimal-native",
+ "PV": "0.21",
+ "BPN": "gettext-minimal",
+ "BP": "${BPN}-${PV}",
+ "SRC_URI": """file://aclocal/
+ file://config.rpath
+ file://Makefile.in.in
+ file://remove-potcdate.sin
+ file://COPYING
+ """,
+ "FILE": self.meta_tempdir+"/poky/meta/recipes-core/gettext/gettext-minimal-native_0.21.1.bb",
+ "FILE_DIRNAME": "${@os.path.dirname(d.getVar('FILE', False))}",
+ "FILESPATH": '${FILE_DIRNAME}/${BP}:${FILE_DIRNAME}/${BPN}:${FILE_DIRNAME}/files',
+ "WORKDIR": self.tempdir+"/work/x86_64-linux/${PN}/${PV}-r0",
+ "S": "${WORKDIR}",
+ "BBLAYERS": self.meta_tempdir+"/poky/meta",
+ }
+ var_flags = {}
+ self.run_do_unpack(var, var_flags)
+ td, expected_td = self.get_trace_data_and_expected_trace_data()
+ self.assertEqual(td, expected_td)
+
+ @skipIfNoNetwork()
+ def test_python_cryptography_case(self):
+ """ 1) check if crate:// src_uris are handled correctly (download
+ location should be the corresponding https download url)
+ 2) check if package checksum data is handled correctly (we have
+ multiple SRC_URI entries supporting checksums here, but the
+ checksum var flag set in the recipe refers only to the first
+ found entry)
+ """
+ var = {
+ "PN": "python3-cryptography",
+ "PV": "37.0.4",
+ "BPN": "python3-cryptography",
+ "BP": "${BPN}-${PV}",
+ "PYPI_SRC_URI": "https://files.pythonhosted.org/packages/source/c/cryptography/cryptography-37.0.4.tar.gz",
+ "SRC_URI": """
+ ${PYPI_SRC_URI}
+ file://run-ptest
+ file://check-memfree.py
+ file://0001-Cargo.toml-specify-pem-version.patch
+ file://0002-Cargo.toml-edition-2018-2021.patch
+ file://0001-pyproject.toml-remove-benchmark-disable-option.patch
+ crate://crates.io/Inflector/0.11.4
+ crate://crates.io/aliasable/0.1.3
+ crate://crates.io/asn1/0.8.7
+ crate://crates.io/asn1_derive/0.8.7
+ crate://crates.io/autocfg/1.1.0
+ crate://crates.io/base64/0.13.0
+ crate://crates.io/bitflags/1.3.2
+ crate://crates.io/cfg-if/1.0.0
+ crate://crates.io/chrono/0.4.19
+ crate://crates.io/indoc-impl/0.3.6
+ crate://crates.io/indoc/0.3.6
+ crate://crates.io/instant/0.1.12
+ crate://crates.io/lazy_static/1.4.0
+ crate://crates.io/libc/0.2.124
+ crate://crates.io/lock_api/0.4.7
+ crate://crates.io/num-integer/0.1.44
+ crate://crates.io/num-traits/0.2.14
+ crate://crates.io/once_cell/1.10.0
+ crate://crates.io/ouroboros/0.15.0
+ crate://crates.io/ouroboros_macro/0.15.0
+ crate://crates.io/parking_lot/0.11.2
+ crate://crates.io/parking_lot_core/0.8.5
+ crate://crates.io/paste-impl/0.1.18
+ crate://crates.io/paste/0.1.18
+ crate://crates.io/pem/1.0.2
+ crate://crates.io/proc-macro-error-attr/1.0.4
+ crate://crates.io/proc-macro-error/1.0.4
+ crate://crates.io/proc-macro-hack/0.5.19
+ crate://crates.io/proc-macro2/1.0.37
+ crate://crates.io/pyo3-build-config/0.15.2
+ crate://crates.io/pyo3-macros-backend/0.15.2
+ crate://crates.io/pyo3-macros/0.15.2
+ crate://crates.io/pyo3/0.15.2
+ crate://crates.io/quote/1.0.18
+ crate://crates.io/redox_syscall/0.2.13
+ crate://crates.io/scopeguard/1.1.0
+ crate://crates.io/smallvec/1.8.0
+ crate://crates.io/stable_deref_trait/1.2.0
+ crate://crates.io/syn/1.0.91
+ crate://crates.io/unicode-xid/0.2.2
+ crate://crates.io/unindent/0.1.8
+ crate://crates.io/version_check/0.9.4
+ crate://crates.io/winapi-i686-pc-windows-gnu/0.4.0
+ crate://crates.io/winapi-x86_64-pc-windows-gnu/0.4.0
+ crate://crates.io/winapi/0.3.9
+ """,
+ "FILE": self.meta_tempdir+"/poky/meta/recipes-devtools/python/python3-cryptography_37.0.4.bb",
+ "FILE_DIRNAME": "${@os.path.dirname(d.getVar('FILE', False))}",
+ "FILESPATH": '${FILE_DIRNAME}/${BP}:${FILE_DIRNAME}/${BPN}:${FILE_DIRNAME}/files',
+ "WORKDIR": self.tempdir+"/work/core2-64-poky-linux/${PN}/${PV}-r0",
+ "S": "${WORKDIR}/${BP}",
+ "BBLAYERS": self.meta_tempdir+"/poky/meta",
+ }
+ var_flags = {
+ "SRC_URI": {
+ "sha256sum": "63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82",
+ }
+ }
+ self.run_do_unpack(var, var_flags)
+ td, expected_td = self.get_trace_data_and_expected_trace_data()
+ self.assertEqual(td, expected_td)
+
+ @skipIfNoNetwork()
+ def test_snappy_case(self):
+ """check if gitsm src uri is handled correctly"""
+ var = {
+ "PN": "snappy",
+ "PV": "1.1.9",
+ "BPN": "snappy",
+ "BP": "${BPN}-${PV}",
+ "SRC_URI": """
+ gitsm://github.com/google/snappy.git;protocol=https;branch=main
+ file://0001-Add-inline-with-SNAPPY_ATTRIBUTE_ALWAYS_INLINE.patch
+ """,
+ "SRCREV": "2b63814b15a2aaae54b7943f0cd935892fae628f",
+ "FILE": self.meta_tempdir+"/meta-openembedded/meta-oe/recipes-extended/snappy/snappy_1.1.9.bb",
+ "FILE_DIRNAME": "${@os.path.dirname(d.getVar('FILE', False))}",
+ "FILESPATH": '${FILE_DIRNAME}/${BP}:${FILE_DIRNAME}/${BPN}:${FILE_DIRNAME}/files',
+ "WORKDIR": self.tempdir+"/work/core2-64-poky-linux/${PN}/${PV}-r0",
+ "S": "${WORKDIR}/git",
+ "BBLAYERS": self.meta_tempdir+"/meta-openembedded/meta-oe",
+ }
+ var_flags = {}
+ self.run_do_unpack(var, var_flags)
+ td, expected_td = self.get_trace_data_and_expected_trace_data()
+ self.assertEqual(td, expected_td)
+
+ @skipIfNoNetwork()
+ def test_gosu_case(self):
+ """ 1) test if src uris pointing to go code are handled correctly
+ (mocking go_do_unpack)
+ 2) test if SRC_URI entries with local absolute path destsuffix param
+ are handled correctly
+ 3) test if symlinks in sources are handled correctly
+ """
+ var = {
+ "PN": "gosu",
+ "PV": "1.14",
+ "BPN": "gosu",
+ "BP": "${BPN}-${PV}",
+ "FILE": self.meta_tempdir+"/meta-openembedded/meta-oe/recipes-support/gosu/gosu_1.14.bb",
+ "WORKDIR": self.tempdir+"/work/core2-64-poky-linux/${PN}/${PV}-r0",
+ "S": "${WORKDIR}/${BP}",
+ "GO_IMPORT": "github.com/tianon/gosu",
+ "SRC_URI": """
+ git://${GO_IMPORT}.git;branch=master;protocol=https
+ git://github.com/opencontainers/runc;name=runc;destsuffix=${S}/src/github.com/opencontainers/runc;branch=main;protocol=https
+ """,
+ "SRCREV": "9f7cd138a1ebc0684d43ef6046bf723978e8741f",
+ "SRCREV_runc": "d7f7b22a85a2387557bdcda125710c2506f8d5c5",
+ "BBLAYERS": self.meta_tempdir+"/meta-openembedded/meta-oe",
+ }
+ var_flags = {}
+ self.run_do_unpack(var, var_flags, is_go=True)
+ td, expected_td = self.get_trace_data_and_expected_trace_data()
+ self.assertEqual(td, expected_td)
+
+ @skipIfNoNetwork()
+ def test_systemd_case(self):
+ """check if SRC_URI containing expressions are handled correctly"""
+ var = {
+ "PN": "systemd",
+ "PV": "251.8",
+ "BPN": "systemd",
+ "BP": "${BPN}-${PV}",
+ "SRCBRANCH": "v251-stable",
+ "SRCREV": "ae8b249af4acb055f920134f2ac584c4cbc86e3b",
+ "SRC_URI": """
+ git://github.com/systemd/systemd-stable.git;protocol=https;branch=${SRCBRANCH}
+ file://touchscreen.rules
+ file://00-create-volatile.conf
+ ${@bb.utils.contains('PACKAGECONFIG', 'polkit_hostnamed_fallback', 'file://org.freedesktop.hostname1_no_polkit.conf', '', d)}
+ ${@bb.utils.contains('PACKAGECONFIG', 'polkit_hostnamed_fallback', 'file://00-hostnamed-network-user.conf', '', d)}
+ file://init
+ file://99-default.preset
+ file://systemd-pager.sh
+ file://0001-binfmt-Don-t-install-dependency-links-at-install-tim.patch
+ file://0003-implment-systemd-sysv-install-for-OE.patch
+ file://0001-Move-sysusers.d-sysctl.d-binfmt.d-modules-load.d-to-.patch
+ """,
+ "FILE": self.meta_tempdir+"/poky/meta/recipes-core/systemd/systemd_251.8.bb",
+ "FILE_DIRNAME": "${@os.path.dirname(d.getVar('FILE', False))}",
+ "FILESPATH": '${FILE_DIRNAME}/${BP}:${FILE_DIRNAME}/${BPN}:${FILE_DIRNAME}/files',
+ "WORKDIR": self.tempdir+"/work/core2-64-poky-linux/${PN}/${PV}-r0",
+ "S": "${WORKDIR}/git",
+ "BBLAYERS": self.meta_tempdir+"/poky/meta",
+ }
+ var_flags = {}
+ self.run_do_unpack(var, var_flags)
+ td, expected_td = self.get_trace_data_and_expected_trace_data()
+ self.assertEqual(td, expected_td)
+
+ @skipIfNoNetwork()
+ def test_npmsw(self):
+ """ 1) test tracing with npmsw fetcher using a small made-up
+ npm-shrinkwrap.json file; check if nested non-dedup dependencies
+ are handled correctly (some upstream files are replicated in
+ multiple path in workdir)
+ 2) test if files added to existing layer local repos are handled
+ correctly (finding local provenance and not upstream provenance)
+ """
+ this_dir = os.path.dirname(os.path.abspath(__file__))
+ npmsw_file = this_dir+"/trace-testdata/npm-shrinkwrap-test.json"
+ shutil.copy2(npmsw_file, self.meta_tempdir+"/poky/meta")
+ var = {
+ "PN": "test_npm",
+ "PV": "1.0.0",
+ "BPN": "test_npm",
+ "BP": "${BPN}-${PV}",
+ "NPMSW_PATH": self.meta_tempdir+"/poky/meta",
+ "SRC_URI": "npmsw://${NPMSW_PATH}/npm-shrinkwrap-test.json",
+ "WORKDIR": self.tempdir+"/work/all-poky-linux/${PN}/${PV}-r0",
+ "S": "${WORKDIR}/${BP}",
+ "BBLAYERS": self.meta_tempdir+"/poky/meta",
+ }
+ var_flags = {}
+ self.run_do_unpack(var, var_flags)
+ td, expected_td = self.get_trace_data_and_expected_trace_data()
+ self.assertEqual(td, expected_td)
+
+
+if __name__ == '__main__':
+ unittest.main() \ No newline at end of file
diff --git a/lib/bb/trace/unpack.py b/lib/bb/trace/unpack.py
new file mode 100644
index 000000000..2819eda1e
--- /dev/null
+++ b/lib/bb/trace/unpack.py
@@ -0,0 +1,613 @@
+"""
+Module implementing upstream source tracing process for do_unpack.
+
+For the general process design, see .unpack_base module help texts.
+
+The final output is a compressed json file, stored in WORKDIR/temp for
+each recipe, with the following scheme:
+
+{
+ "<download location>": {
+ "download_location": "<download location>",
+ "src_uri": "<src_uri>",
+ "unexpanded_src_uri": "<unexpanded src uri>",
+ "checksums": {
+ "md5": "<package md5 checksum>",
+ "sha256": "<package sha256 checksum>"
+ },
+ "files": {
+ "<file/relpath/in/upstream>": {
+ "sha1": "<file sha1 checksum>",
+ "paths_in_workdir": [
+ "<file/relpath//in/workdir>",
+ "<other/file/relpath/in/workdir>"
+ ]
+ }
+ }
+ }
+}
+
+NOTE: "download location" is used as main key/index and follows SPDX specs, eg.:
+https://sourceware.org/pub/bzip2/bzip2-1.0.8.tar.gz
+git+git://sourceware.org/git/bzip2-tests.git@f9061c030a25de5b6829e1abf373057309c734c0:
+
+Special cases:
+
+- npmsw and gitsm fetchers generate and unpack multiple uris (one for each
+ (sub)module) from a single SRC_URI entry; each of such uris is represented by
+ a separate download location in the json file, while they will all share the
+ same SRC_URI entry
+
+- npmsw fetcher collects also npm deptree data; such data are collectively
+ stored within the download location entry for the npm-shrinkwrap.json file
+ (which corrensponds to the npmsw:// SRC_URI entry)
+
+- gitsm submodule dependencies are instead individually stored in the download
+ location entry for each submodule (
+ "submodule_of": "<main git repo's download location>")
+
+- file:// SRC_URI entries are mapped each to a single download location,
+ and file's path in upstream sources is put directly in the download
+ location, in this way:
+ git+git://git.yoctoproject.org/poky@91d0157d6daf4ea61d6b4e090c0b682d3f3ca60f#meta/recipes-extended/bzip2/bzip2/Makefile.am
+ In such case, the "<file/relpath/in/upstream>" key will be an empty string "".
+ The latter does not hold for file:// SRC_URI pointing to a directory or to an
+ archive; in such cases, "<file/relpath/in/upstream>" will be relative to the
+ directory or to the archive
+
+- if no download location is found for a file:// SRC_URI entry, a warning is
+ logged and an "invalid" local download location is used, trying to map it at least
+ to an existing local bblayer, if any
+
+- local absolute paths found SRC_URI entries are replaced by a placeholder
+ ("<local-path>"), to allow reproducibility of json results, while the
+ corresponding unexpanded SRC_URI entry is also stored to allow to trace it
+ back to the corresponding recipe
+
+For more details and handled corner cases, see help texts in
+bb.tests.trace.TraceUnpackIntegrationTest and real-world data examples in
+lib/bb/tests/trace-testdata.
+"""
+
+# Copyright (C) 2023 Alberto Pianon <pianon@array.eu>
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+import os
+import re
+import logging
+
+import bb.fetch2
+import bb.utils
+import bb.process
+
+from ..trace import TraceException
+from .unpack_base import TraceUnpackBase
+
+logger = logging.getLogger("BitBake.Fetcher")
+
+# function copied from https://git.openembedded.org/openembedded-core/plain/meta/lib/oe/recipeutils.py?id=ad3736d9ca14cac14a7da22c1cfdeda219665e6f
+# Copyright (C) 2013-2017 Intel Corporation
+def split_var_value(value, assignment=True):
+ """
+ Split a space-separated variable's value into a list of items,
+ taking into account that some of the items might be made up of
+ expressions containing spaces that should not be split.
+ Parameters:
+ value:
+ The string value to split
+ assignment:
+ True to assume that the value represents an assignment
+ statement, False otherwise. If True, and an assignment
+ statement is passed in the first item in
+ the returned list will be the part of the assignment
+ statement up to and including the opening quote character,
+ and the last item will be the closing quote.
+ """
+ inexpr = 0
+ lastchar = None
+ out = []
+ buf = ''
+ for char in value:
+ if char == '{':
+ if lastchar == '$':
+ inexpr += 1
+ elif char == '}':
+ inexpr -= 1
+ elif assignment and char in '"\'' and inexpr == 0:
+ if buf:
+ out.append(buf)
+ out.append(char)
+ char = ''
+ buf = ''
+ elif char.isspace() and inexpr == 0:
+ char = ''
+ if buf:
+ out.append(buf)
+ buf = ''
+ buf += char
+ lastchar = char
+ if buf:
+ out.append(buf)
+
+ # Join together assignment statement and opening quote
+ outlist = out
+ if assignment:
+ assigfound = False
+ for idx, item in enumerate(out):
+ if '=' in item:
+ assigfound = True
+ if assigfound:
+ if '"' in item or "'" in item:
+ outlist = [' '.join(out[:idx+1])]
+ outlist.extend(out[idx+1:])
+ break
+ return outlist
+
+def get_unexp_src_uri(src_uri, d):
+ """get unexpanded src_uri"""
+ src_uris = d.getVar("SRC_URI").split() if d.getVar("SRC_URI") else []
+ if src_uri not in src_uris:
+ raise TraceException("%s does not exist in d.getVar('SRC_URI')" % src_uri)
+ unexp_src_uris = split_var_value(
+ d.getVar("SRC_URI", expand=False), assignment=False)
+ for unexp_src_uri in unexp_src_uris:
+ if src_uri in d.expand(unexp_src_uri).split():
+ # some unexpanded src_uri with expressions may expand to multiple
+ # lines/src_uris
+ return unexp_src_uri
+ return src_uri
+
+find_abs_path_regex = [
+ r"(?<=://)/[^;]+$", # url path (as in file:/// or npmsw:///)
+ r"(?<=://)/[^;]+(?=;)", # url path followed by param
+ r"(?<==)/[^;]+$", # path in param
+ r"(?<==)/[^;]+(?=;)", # path in param followed by another param
+]
+find_abs_path_regex = [ re.compile(r) for r in find_abs_path_regex ]
+
+def get_clean_src_uri(src_uri):
+ """clean expanded src_uri from possible local absolute paths"""
+ for r in find_abs_path_regex:
+ src_uri = r.sub("<local-path>", src_uri)
+ return src_uri
+
+def blame_recipe_file(unexp_src_uri, d):
+ """return the .bb|.inc|.bbappend file(s) which set or appended the given
+ unexpanded src_uri element. Var history tracking must be enabled for this
+ to work."""
+ # NOTE this function is not being used for now
+ haystack = []
+ for el in d.varhistory.variable("SRC_URI"):
+ if not el.get("parsing"):
+ continue
+ if el["op"] == "set":
+ haystack = [ el ]
+ elif "append" in el["op"] or "prepend" in el["op"]:
+ haystack.append(el)
+ recipe_file = [
+ el["file"] for el in haystack if unexp_src_uri in el["detail"].split()
+ ]
+ return recipe_file[-1] if recipe_file else None
+
+def get_dl_loc(local_dir):
+ """get git upstream download location and relpath in git repo for local_dir"""
+ # copied and adapted from https://git.yoctoproject.org/poky-contrib/commit/?h=jpew/spdx-downloads&id=68c80f53e8c4f5fd2548773b450716a8027d1822
+ # download location cache is implemented in TraceUnpack class
+
+ local_dir = os.path.realpath(local_dir)
+ try:
+ stdout, _ = bb.process.run(
+ ["git", "branch", "-qr", "--format=%(refname)", "--contains", "HEAD"],
+ cwd=local_dir
+ )
+ branches = stdout.splitlines()
+ branches.sort()
+ for b in branches:
+ if b.startswith("refs/remotes") and not b.startswith("refs/remotes/m/"):
+ # refs/remotes/m/ -> repo manifest remote, it's not a real
+ # remote (see https://stackoverflow.com/a/63483426)
+ remote = b.split("/")[2]
+ break
+ else:
+ return None, None
+
+ stdout, _ = bb.process.run(
+ ["git", "remote", "get-url", remote], cwd=local_dir
+ )
+ dl_loc = "git+" + stdout.strip()
+
+ stdout, _ = bb.process.run(["git", "rev-parse", "HEAD"], cwd=local_dir)
+ dl_loc = dl_loc + "@" + stdout.strip()
+
+ stdout, _ = bb.process.run(
+ ["git", "rev-parse", "--show-prefix"], cwd=local_dir)
+ relpath = os.path.join(stdout.strip().rstrip("/"))
+
+ return dl_loc, relpath
+
+ except bb.process.ExecutionError:
+ return None, None
+
+def get_untracked_new_and_modified_files(git_dir):
+ """get list of untracked or uncommitted new or modified files in git_dir"""
+ try:
+ bb.process.run(
+ ["git", "rev-parse", "--is-inside-work-tree"], cwd=git_dir)
+ except bb.process.ExecutionError:
+ raise TraceException("%s is not a git repo" % git_dir)
+ stdout, _ = bb.process.run(["git", "status", "--porcelain"], cwd=git_dir)
+ return [ line[3:] for line in stdout.rstrip().split("\n") ]
+
+def get_path_in_upstream(f, u, ud, destdir):
+ """get relative path in upstream package, relative to download location"""
+ relpath = os.path.relpath(f, destdir)
+ if ud.type == "file":
+ is_unpacked_archive = getattr(ud, "is_unpacked_archive", False)
+ if os.path.isdir(ud.localpath) or is_unpacked_archive:
+ return os.path.relpath(relpath, ud.path)
+ else:
+ # it's a file, its path is already in download location, like
+ # in git+https://git.example.com/foo#example/foo.c so there is
+ # no relative path to download location
+ return ""
+ elif ud.type == "npmsw" and ud.url == u:
+ # npm shrinkwrap file
+ return ""
+ else:
+ return relpath
+
+class TraceUnpack(TraceUnpackBase):
+ """implement a process for upstream source tracing in do_unpack
+
+ Subclass of TraceUnpackBase, implementing _collect_data() and
+ _process_data() methods
+
+ See bb.trace.unpack_base module help for more details on the process.
+
+ See bb.tests.trace.TraceUnpackIntegrationTest and data examples in
+ lib/bb/tests/trace-testdata for details on the output json data format.
+
+ Method call order:
+ - __init__()
+ - commit()
+ - move2root()
+ - write_data()
+ - close()
+ """
+
+ def __init__(self, root, d):
+ """create temporary directory in root, and initialize cache"""
+ super(TraceUnpack, self).__init__(root, d)
+
+ self.local_path_cache = {}
+ self.src_uri_cache = {}
+ self.shrinkwrap_cache = {}
+ self.upstr_data_cache = {}
+ self.package_checksums_cache = {}
+ self.git_dir_cache = {}
+ if not self.is_fetcher_test:
+ self.layers = {
+ os.path.basename(l): os.path.realpath(l)
+ for l in d.getVar('BBLAYERS').split()
+ }
+ else:
+ self.layers = {}
+
+ def _collect_data(self, u, ud, files, links, destdir, gitsm_revision):
+ """collect data for the "committed" src uri entry (u)
+
+ data are saved using path_in_workdir as index; for each path_in_workdir,
+ sha1 checksum and upstream data are collected (from cache, if available,
+ because self._get_upstr_data_wrapper() uses a cache)
+
+ sha1 and upstream data are appended to a list for each path_in_workdir,
+ because it may happen that a file unpacked from a src uri gets
+ overwritten by a subsequent src uri, from which a file with the same
+ name/path is unpacked; the overwrite would be captured in the list.
+
+ At the end, all data will be processed and grouped by download location
+ by self._process_data(), that will keep only the last item of
+ sha1+upstream data list for each path_in_workdir
+ """
+ upstr_data = self._get_upstr_data_wrapper(u, ud, destdir, gitsm_revision)
+ for f in files:
+ sha1 = bb.utils.sha1_file(f)
+ path_in_workdir = os.path.relpath(f, self.tmpdir)
+ path_in_upstream = get_path_in_upstream(f, u, ud, destdir)
+ data = self.td.setdefault(path_in_workdir, [])
+ data.append({
+ "sha1": sha1,
+ "path_in_upstream": path_in_upstream,
+ "upstream": upstr_data,
+ })
+ for l in links:
+ link_target = os.readlink(l)
+ path_in_workdir = os.path.relpath(l, self.tmpdir)
+ path_in_upstream = get_path_in_upstream(l, u, ud, destdir)
+ data = self.td.setdefault(path_in_workdir, [])
+ data.append({
+ "symlink_to": link_target,
+ "path_in_upstream": path_in_upstream,
+ "upstream": upstr_data,
+ })
+
+ def _process_data(self):
+ """group data by download location"""
+ # it reduces json file size and allows faster processing by create-spdx
+ pd = self.upstr_data_cache
+ for workdir_path, data in self.td.items():
+ data = data[-1] # pick last overwrite of the file, if any
+ dl_loc = data["upstream"]["download_location"]
+ files = pd[dl_loc].setdefault("files", {})
+ path = data["path_in_upstream"]
+ if path in files:
+ files[path]["paths_in_workdir"].append(workdir_path)
+ # the same source file may be found in different locations in
+ # workdir, eg. with npmsw fetcher, where the same npm module
+ # may unpacked multiple times in different paths
+ else:
+ path_data = files[path] = {}
+ if data.get("sha1"):
+ path_data.update({ "sha1": data["sha1"] })
+ elif data.get("symlink_to"):
+ path_data.update({ "symlink_to": data["symlink_to"] })
+ path_data.update({ "paths_in_workdir": [workdir_path] } )
+ self.td = pd
+
+ def close(self):
+ super(TraceUnpack, self).close()
+ del self.local_path_cache
+ del self.src_uri_cache
+ del self.shrinkwrap_cache
+ del self.upstr_data_cache
+ del self.package_checksums_cache
+ del self.layers
+
+ def _get_layer(self, local_path):
+ """get bb layer for local_path (must be a realpath)"""
+ for layer, layer_path in self.layers.items():
+ if local_path.startswith(layer_path):
+ return layer
+ return None
+
+ def _is_in_current_branch(self, file_relpath, git_dir):
+ """wrapper for get_untracked_new_and_modified_files(), using cache
+ for already processed git dirs"""
+ if git_dir not in self.git_dir_cache:
+ self.git_dir_cache[git_dir] = get_untracked_new_and_modified_files(git_dir)
+ untracked_new_and_modified_files = self.git_dir_cache[git_dir]
+ return file_relpath not in untracked_new_and_modified_files
+
+ def _get_dl_loc_and_layer(self, local_path):
+ """get download location, upstream relative path and layer for local_path
+
+ Wrapper for get_dl_loc() and TraceUnpack._get_layer(), using cache for
+ already processed local paths, and handling also file local paths and
+ not only dirs.
+ """
+ local_path = os.path.realpath(local_path)
+ if local_path not in self.local_path_cache:
+ if os.path.isdir(local_path):
+ dl_loc, relpath = get_dl_loc(local_path)
+ layer = self._get_layer(local_path)
+ self.local_path_cache[local_path] = (dl_loc, relpath, layer)
+ else:
+ local_dir, basename = os.path.split(local_path)
+ dl_loc, dir_relpath, layer = self._get_dl_loc_and_layer(local_dir)
+ file_relpath = os.path.join(dir_relpath, basename) if dir_relpath else None
+ if file_relpath:
+ if local_path.endswith(file_relpath):
+ git_dir = local_path[:-(len(file_relpath))].rstrip("/")
+ else:
+ raise TraceException(
+ "relative path %s is not in %s" %
+ (file_relpath, local_path)
+ )
+ if not self._is_in_current_branch(file_relpath, git_dir):
+ dl_loc = file_relpath = None # is untracked|new|modified
+ self.local_path_cache[local_path] = (dl_loc, file_relpath, layer)
+ return self.local_path_cache[local_path]
+
+ def _get_unexp_and_clean_src_uri(self, src_uri):
+ """get unexpanded and clean (i.e. w/o local paths) expanded src uri
+
+ Wrapper for get_unexp_src_uri() and clean_src_uri(), using cache for
+ already processed src uris
+ """
+ if src_uri not in self.src_uri_cache:
+ try:
+ unexp_src_uri = get_unexp_src_uri(src_uri, self.d)
+ except TraceException:
+ unexp_src_uri = src_uri
+ clean_src_uri = get_clean_src_uri(src_uri)
+ self.src_uri_cache[src_uri] = (unexp_src_uri, clean_src_uri)
+ return self.src_uri_cache[src_uri]
+
+ def _get_package_checksums(self, ud):
+ """get package checksums for ud.url
+
+ Checksums, if available, apparently refer only to the first SRC_URI
+ entry that supports checksums, so if there are multiple SRC_URI entries
+ supporting checksum, we pick the checksums only for the first one.
+ For this reason self.package_checksums_cache dict will needs to contain
+ only one element
+ """
+ if not self.package_checksums_cache:
+ checksums = {}
+ if ud.method.supports_checksum(ud):
+ for checksum_id in bb.fetch2.CHECKSUM_LIST:
+ expected_checksum = getattr(ud, "%s_expected" % checksum_id)
+ if expected_checksum is None:
+ continue
+ checksums.update({checksum_id: expected_checksum})
+ self.package_checksums_cache[ud.url] = checksums
+ return self.package_checksums_cache.get(ud.url, {})
+
+ def _get_upstr_data(self, src_uri, ud=None, local_path=None, gitsm_revision=None):
+ """get upstream data for src_uri
+
+ ud is required for non-file src_uris, while local_path is required for
+ file src_uris; gitsm_revision is required for git submodule src_uris
+ """
+ if local_path:
+ # file src_uri
+ dl_loc, relpath, layer = self._get_dl_loc_and_layer(local_path)
+ if dl_loc:
+ dl_loc += "#" + relpath
+ else:
+ # we didn't find any download location so we set a fake (but
+ # unique) one because we need to use it as key in the final json
+ # output
+ if layer:
+ relpath_in_layer = os.path.relpath(
+ os.path.realpath(local_path), self.layers[layer])
+ dl_loc = "file://<local-path>/" + layer + "/" + relpath_in_layer
+ else:
+ dl_loc = "file://" + local_path
+ relpath = ""
+ logger.warning(
+ "Can't find upstream source for %s, using %s as download location" %
+ (local_path, dl_loc)
+ )
+ get_checksums = False
+ else:
+ # copied and adapted from https://git.yoctoproject.org/poky/plain/meta/classes/create-spdx-2.2.bbclass
+ if ud and src_uri == ud.url:
+ this_ud = ud
+ else:
+ this_ud = bb.fetch2.FetchData(src_uri, self.d)
+ if this_ud.type == "crate":
+ # crate fetcher converts crate:// urls to https://
+ this_ud = bb.fetch2.FetchData(this_ud.url, self.d)
+ dl_loc = this_ud.type
+ if dl_loc == "gitsm":
+ dl_loc = "git"
+ proto = getattr(this_ud, "proto", None)
+ if proto is not None:
+ dl_loc = dl_loc + "+" + proto
+ dl_loc = dl_loc + "://" + this_ud.host + this_ud.path
+ if gitsm_revision:
+ dl_loc = dl_loc + "@" + gitsm_revision
+ elif this_ud.method.supports_srcrev():
+ dl_loc = dl_loc + "@" + this_ud.revisions[this_ud.names[0]]
+ layer = None
+ get_checksums = True
+ if dl_loc not in self.upstr_data_cache:
+ self.upstr_data_cache[dl_loc] = {
+ "download_location": dl_loc,
+ }
+ uri = src_uri if not gitsm_revision else ud.url
+ unexp_src_uri, clean_src_uri = self._get_unexp_and_clean_src_uri(uri)
+ self.upstr_data_cache[dl_loc].update({
+ "src_uri": clean_src_uri
+ })
+ if unexp_src_uri != clean_src_uri:
+ self.upstr_data_cache[dl_loc].update({
+ "unexpanded_src_uri": unexp_src_uri
+ })
+ if get_checksums:
+ checksums = self._get_package_checksums(this_ud)
+ if checksums:
+ self.upstr_data_cache[dl_loc].update({
+ "checksums": checksums
+ })
+ if layer:
+ self.upstr_data_cache[dl_loc].update({
+ "layer": layer
+ })
+ return self.upstr_data_cache[dl_loc]
+
+ def _get_npm_shrinkwrap_deptree(self, ud):
+ """Process dep data from ud.deps (npm dependency data collected by npmsw
+ fetcher) and return a deptree in the following format:
+
+ "foo": {
+ "version": "0.0.1",
+ "download_location": "https://npm.example.com/foo/0.0.1",
+ "deps": {
+ "dummy": {
+ "version": "0.1.0",
+ "download_location": "https://npm.example.com/dummy/0.1.0",
+ }
+ }
+ }
+ """
+
+ def clean(deptree_node):
+ for module_name, data in deptree_node.items():
+ if data["deps"] != {}:
+ deptree_node = data["deps"]
+ clean(deptree_node)
+ else:
+ data.pop("deps")
+
+ indexed_deptree_data = {tuple(dep["deptree"]): dep for dep in ud.deps}
+ # ud.deps elements are npm module paths within deptree, in list
+ # format, such as [ 'foo', ], [ 'foo', 'dummy' ] etc. Converting
+ # them to tuples to use them as index
+ deptree = {}
+ for dep_path in sorted(indexed_deptree_data):
+ for i in range(1, len(dep_path)+1):
+ deptree_node = deptree
+ _dep_path = dep_path[:i]
+ data = indexed_deptree_data[_dep_path]
+ for module_name in _dep_path:
+ module = deptree_node.get(module_name)
+ if not module:
+ upstr_data = self._get_upstr_data(data["url"])
+ module = {
+ "version": data["version"],
+ "download_location": upstr_data["download_location"],
+ }
+ if upstr_data.get("checksums"):
+ module.update({"checksums": upstr_data["checksums"]})
+ module.update({"deps": {}})
+ deptree_node[module_name] = module
+ deptree_node = module["deps"]
+ clean(deptree)
+ return deptree
+
+ def _get_upstr_data_wrapper(self, u, ud, destdir, gitsm_revision=None):
+ """wrapper for self._get_upstr_data(), handling npmsw and gitsm fetchers
+ (that require some recursion)"""
+ if ud.type == "npmsw":
+ if ud.url == u:
+ # npm-shrinkwrap.json file
+ upstr_data = self._get_upstr_data(
+ ud.url, local_path=ud.shrinkwrap_file)
+ upstr_data.update({
+ "deptree": self._get_npm_shrinkwrap_deptree(ud)
+ })
+ self.shrinkwrap_cache[ud.url] = upstr_data["download_location"]
+ else:
+ # ud.url is the "main" src_uri of the shrinkwrap file,
+ # while u is the fetcher-generated src_uri for the module
+ module_ud = bb.fetch2.FetchData(u, self.d)
+ upstr_data = self._get_upstr_data_wrapper(u, module_ud, destdir)
+ # recursively call this method
+ upstr_data.update({
+ "described_by": self.shrinkwrap_cache[ud.url]
+ })
+ elif ud.type == "gitsm":
+ upstr_data = self._get_upstr_data(
+ u, ud=ud, gitsm_revision=gitsm_revision)
+ if ud.url != u:
+ # git submodule
+ parent_upstr_data = self._get_upstr_data(ud.url, ud)
+ upstr_data.update({
+ "submodule_of": parent_upstr_data["download_location"]
+ })
+ # known limitation: this logic cannot correctly handle
+ # nested git submodules, because it assumes only one level of
+ # submodule dependencies.
+ # TODO Some more logic would need to be added to gitsm fetcher,
+ # to add support for nested git submodule dependency mapping
+ elif ud.type == "file":
+ upstr_data = self._get_upstr_data(u, local_path=ud.localpath)
+ else:
+ upstr_data = self._get_upstr_data(u, ud)
+ return upstr_data
+
+