From 737a4630f66ef4ce52e336e8ed823393112f3748 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 14 Nov 2024 09:28:05 -0500 Subject: [PATCH 01/20] Bump express from 4.18.2 to 4.21.1 in /website (#1389) Bumps [express](https://github.com/expressjs/express) from 4.18.2 to 4.21.1. - [Release notes](https://github.com/expressjs/express/releases) - [Changelog](https://github.com/expressjs/express/blob/4.21.1/History.md) - [Commits](https://github.com/expressjs/express/compare/4.18.2...4.21.1) --- updated-dependencies: - dependency-name: express dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Jessica Way --- website/yarn.lock | 209 +++++++++++++++++++++++++++++++++------------- 1 file changed, 153 insertions(+), 56 deletions(-) diff --git a/website/yarn.lock b/website/yarn.lock index 4651f07771..1c1e9a37ae 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -3881,10 +3881,10 @@ binary-extensions@^2.0.0: resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== -body-parser@1.20.2: - version "1.20.2" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.2.tgz#6feb0e21c4724d06de7ff38da36dad4f57a747fd" - integrity sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA== +body-parser@1.20.3: + version "1.20.3" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.3.tgz#1953431221c6fb5cd63c4b36d53fab0928e548c6" + integrity sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g== dependencies: bytes "3.1.2" content-type "~1.0.5" @@ -3894,7 +3894,7 @@ body-parser@1.20.2: http-errors "2.0.0" iconv-lite "0.4.24" on-finished "2.4.1" - qs "6.11.0" + qs "6.13.0" raw-body "2.5.2" type-is "~1.6.18" unpipe "1.0.0" @@ -4021,6 +4021,17 @@ call-bind@^1.0.0, call-bind@^1.0.2: function-bind "^1.1.1" get-intrinsic "^1.0.2" +call-bind@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.7.tgz#06016599c40c56498c18769d2730be242b6fa3b9" + integrity sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w== + dependencies: + es-define-property "^1.0.0" + es-errors "^1.3.0" + function-bind "^1.1.2" + get-intrinsic "^1.2.4" + set-function-length "^1.2.1" + callsites@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" @@ -4390,10 +4401,10 @@ cookie-signature@1.0.6: resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== -cookie@0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.6.0.tgz#2798b04b071b0ecbff0dbb62a505a8efa4e19051" - integrity sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw== +cookie@0.7.1: + version "0.7.1" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.7.1.tgz#2f73c42142d5d5cf71310a74fc4ae61670e5dbc9" + integrity sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w== copy-text-to-clipboard@^3.0.1: version "3.0.1" @@ -4726,6 +4737,15 @@ defer-to-connect@^1.0.1: resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591" integrity sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ== +define-data-property@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.4.tgz#894dc141bb7d3060ae4366f6a0107e68fbe48c5e" + integrity sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A== + dependencies: + es-define-property "^1.0.0" + es-errors "^1.3.0" + gopd "^1.0.1" + define-lazy-prop@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" @@ -4977,6 +4997,11 @@ encodeurl@~1.0.2: resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== +encodeurl@~2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-2.0.0.tgz#7b8ea898077d7e409d3ac45474ea38eaf0857a58" + integrity sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg== + end-of-stream@^1.1.0: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" @@ -5031,6 +5056,18 @@ es-abstract@^1.17.2, es-abstract@^1.18.0-next.2: string.prototype.trimstart "^1.0.4" unbox-primitive "^1.0.0" +es-define-property@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/es-define-property/-/es-define-property-1.0.0.tgz#c7faefbdff8b2696cf5f46921edfb77cc4ba3845" + integrity sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ== + dependencies: + get-intrinsic "^1.2.4" + +es-errors@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/es-errors/-/es-errors-1.3.0.tgz#05f75a25dab98e4fb1dcd5e1472c0546d5057c8f" + integrity sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw== + es-module-lexer@^1.2.1: version "1.3.0" resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-1.3.0.tgz#6be9c9e0b4543a60cd166ff6f8b4e9dae0b0c16f" @@ -5149,36 +5186,36 @@ execa@^5.0.0: strip-final-newline "^2.0.0" express@^4.17.3: - version "4.19.2" - resolved "https://registry.yarnpkg.com/express/-/express-4.19.2.tgz#e25437827a3aa7f2a827bc8171bbbb664a356465" - integrity sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q== + version "4.21.1" + resolved "https://registry.yarnpkg.com/express/-/express-4.21.1.tgz#9dae5dda832f16b4eec941a4e44aa89ec481b281" + integrity sha512-YSFlK1Ee0/GC8QaO91tHcDxJiE/X4FbpAyQWkxAvG6AXCuR65YzK8ua6D9hvi/TzUfZMpc+BwuM1IPw8fmQBiQ== dependencies: accepts "~1.3.8" array-flatten "1.1.1" - body-parser "1.20.2" + body-parser "1.20.3" content-disposition "0.5.4" content-type "~1.0.4" - cookie "0.6.0" + cookie "0.7.1" cookie-signature "1.0.6" debug "2.6.9" depd "2.0.0" - encodeurl "~1.0.2" + encodeurl "~2.0.0" escape-html "~1.0.3" etag "~1.8.1" - finalhandler "1.2.0" + finalhandler "1.3.1" fresh "0.5.2" http-errors "2.0.0" - merge-descriptors "1.0.1" + merge-descriptors "1.0.3" methods "~1.1.2" on-finished "2.4.1" parseurl "~1.3.3" - path-to-regexp "0.1.7" + path-to-regexp "0.1.10" proxy-addr "~2.0.7" - qs "6.11.0" + qs "6.13.0" range-parser "~1.2.1" safe-buffer "5.2.1" - send "0.18.0" - serve-static "1.15.0" + send "0.19.0" + serve-static "1.16.2" setprototypeof "1.2.0" statuses "2.0.1" type-is "~1.6.18" @@ -5310,13 +5347,13 @@ fill-range@^7.1.1: dependencies: to-regex-range "^5.0.1" -finalhandler@1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" - integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== +finalhandler@1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.3.1.tgz#0c575f1d1d324ddd1da35ad7ece3df7d19088019" + integrity sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ== dependencies: debug "2.6.9" - encodeurl "~1.0.2" + encodeurl "~2.0.0" escape-html "~1.0.3" on-finished "2.4.1" parseurl "~1.3.3" @@ -5441,6 +5478,11 @@ function-bind@^1.1.1: resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== +function-bind@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" + integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== + gensync@^1.0.0-beta.1, gensync@^1.0.0-beta.2: version "1.0.0-beta.2" resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" @@ -5464,6 +5506,17 @@ get-intrinsic@^1.1.1: has "^1.0.3" has-symbols "^1.0.1" +get-intrinsic@^1.1.3, get-intrinsic@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.4.tgz#e385f5a4b5227d449c3eabbad05494ef0abbeadd" + integrity sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ== + dependencies: + es-errors "^1.3.0" + function-bind "^1.1.2" + has-proto "^1.0.1" + has-symbols "^1.0.3" + hasown "^2.0.0" + get-own-enumerable-property-symbols@^3.0.0: version "3.0.2" resolved "https://registry.yarnpkg.com/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664" @@ -5611,6 +5664,13 @@ globby@^13.1.1: merge2 "^1.4.1" slash "^4.0.0" +gopd@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c" + integrity sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA== + dependencies: + get-intrinsic "^1.1.3" + got@^9.6.0: version "9.6.0" resolved "https://registry.yarnpkg.com/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85" @@ -5680,6 +5740,18 @@ has-flag@^4.0.0: resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== +has-property-descriptors@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz#963ed7d071dc7bf5f084c5bfbe0d1b6222586854" + integrity sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg== + dependencies: + es-define-property "^1.0.0" + +has-proto@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.3.tgz#b31ddfe9b0e6e9914536a6ab286426d0214f77fd" + integrity sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q== + has-symbols@^1.0.1, has-symbols@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.2.tgz#165d3070c00309752a1236a479331e3ac56f1423" @@ -5702,6 +5774,13 @@ has@^1.0.3: dependencies: function-bind "^1.1.1" +hasown@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003" + integrity sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ== + dependencies: + function-bind "^1.1.2" + hast-to-hyperscript@^9.0.0: version "9.0.1" resolved "https://registry.yarnpkg.com/hast-to-hyperscript/-/hast-to-hyperscript-9.0.1.tgz#9b67fd188e4c81e8ad66f803855334173920218d" @@ -6666,10 +6745,10 @@ memfs@^3.1.2, memfs@^3.4.3: dependencies: fs-monkey "^1.0.4" -merge-descriptors@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" - integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== +merge-descriptors@1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.3.tgz#d80319a65f3c7935351e5cfdac8f9318504dbed5" + integrity sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ== merge-stream@^2.0.0: version "2.0.0" @@ -6921,6 +7000,11 @@ object-assign@^4.1.0, object-assign@^4.1.1: resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= +object-inspect@^1.13.1: + version "1.13.3" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.3.tgz#f14c183de51130243d6d18ae149375ff50ea488a" + integrity sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA== + object-inspect@^1.9.0: version "1.12.2" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea" @@ -7180,10 +7264,10 @@ path-parse@^1.0.6, path-parse@^1.0.7: resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== -path-to-regexp@0.1.7: - version "0.1.7" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" - integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== +path-to-regexp@0.1.10: + version "0.1.10" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.10.tgz#67e9108c5c0551b9e5326064387de4763c4d5f8b" + integrity sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w== path-to-regexp@2.2.1: version "2.2.1" @@ -7641,12 +7725,12 @@ q@^1.1.2: resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" integrity sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc= -qs@6.11.0: - version "6.11.0" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a" - integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q== +qs@6.13.0: + version "6.13.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.13.0.tgz#6ca3bd58439f7e245655798997787b0d88a51906" + integrity sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg== dependencies: - side-channel "^1.0.4" + side-channel "^1.0.6" queue-microtask@^1.2.2: version "1.2.3" @@ -8306,10 +8390,10 @@ semver@^7.3.2, semver@^7.3.4, semver@^7.3.7, semver@^7.3.8: dependencies: lru-cache "^6.0.0" -send@0.18.0: - version "0.18.0" - resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" - integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== +send@0.19.0: + version "0.19.0" + resolved "https://registry.yarnpkg.com/send/-/send-0.19.0.tgz#bbc5a388c8ea6c048967049dbeac0e4a3f09d7f8" + integrity sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw== dependencies: debug "2.6.9" depd "2.0.0" @@ -8366,15 +8450,27 @@ serve-index@^1.9.1: mime-types "~2.1.17" parseurl "~1.3.2" -serve-static@1.15.0: - version "1.15.0" - resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" - integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== +serve-static@1.16.2: + version "1.16.2" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.16.2.tgz#b6a5343da47f6bdd2673848bf45754941e803296" + integrity sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw== dependencies: - encodeurl "~1.0.2" + encodeurl "~2.0.0" escape-html "~1.0.3" parseurl "~1.3.3" - send "0.18.0" + send "0.19.0" + +set-function-length@^1.2.1: + version "1.2.2" + resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.2.2.tgz#aac72314198eaed975cf77b2c3b6b880695e5449" + integrity sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg== + dependencies: + define-data-property "^1.1.4" + es-errors "^1.3.0" + function-bind "^1.1.2" + get-intrinsic "^1.2.4" + gopd "^1.0.1" + has-property-descriptors "^1.0.2" setimmediate@^1.0.5: version "1.0.5" @@ -8429,14 +8525,15 @@ shelljs@^0.8.5: interpret "^1.0.0" rechoir "^0.6.2" -side-channel@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" - integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== +side-channel@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.6.tgz#abd25fb7cd24baf45466406b1096b7831c9215f2" + integrity sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA== dependencies: - call-bind "^1.0.0" - get-intrinsic "^1.0.2" - object-inspect "^1.9.0" + call-bind "^1.0.7" + es-errors "^1.3.0" + get-intrinsic "^1.2.4" + object-inspect "^1.13.1" signal-exit@^3.0.2, signal-exit@^3.0.3: version "3.0.3" From 2c883672935b3e7954dceb4cab6ae1e705a185a0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 14 Nov 2024 09:43:12 -0500 Subject: [PATCH 02/20] Bump webpack from 5.88.1 to 5.96.1 in /website (#1424) Bumps [webpack](https://github.com/webpack/webpack) from 5.88.1 to 5.96.1. - [Release notes](https://github.com/webpack/webpack/releases) - [Commits](https://github.com/webpack/webpack/compare/v5.88.1...v5.96.1) --- updated-dependencies: - dependency-name: webpack dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Nikelle Petrillo <38223776+nikellepetrillo@users.noreply.github.com> --- website/yarn.lock | 366 ++++++++++++++++++++++++++++------------------ 1 file changed, 221 insertions(+), 145 deletions(-) diff --git a/website/yarn.lock b/website/yarn.lock index 1c1e9a37ae..867d63c7a3 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -2724,6 +2724,11 @@ resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== +"@jridgewell/resolve-uri@^3.1.0": + version "3.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6" + integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw== + "@jridgewell/set-array@^1.0.1": version "1.1.2" resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" @@ -2747,6 +2752,11 @@ resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== +"@jridgewell/sourcemap-codec@^1.4.14": + version "1.5.0" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz#3188bcb273a414b0d215fd22a58540b989b9409a" + integrity sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ== + "@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.9": version "0.3.18" resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz#25783b2086daf6ff1dcb53c9249ae480e4dd4cd6" @@ -2755,6 +2765,14 @@ "@jridgewell/resolve-uri" "3.1.0" "@jridgewell/sourcemap-codec" "1.4.14" +"@jridgewell/trace-mapping@^0.3.20": + version "0.3.25" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz#15f190e98895f3fc23276ee14bc76b675c2e50f0" + integrity sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ== + dependencies: + "@jridgewell/resolve-uri" "^3.1.0" + "@jridgewell/sourcemap-codec" "^1.4.14" + "@leichtgewicht/ip-codec@^2.0.1": version "2.0.4" resolved "https://registry.yarnpkg.com/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz#b2ac626d6cb9c8718ab459166d4bb405b8ffa78b" @@ -3108,10 +3126,10 @@ dependencies: "@types/node" "*" -"@types/eslint-scope@^3.7.3": - version "3.7.4" - resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.4.tgz#37fc1223f0786c39627068a12e94d6e6fc61de16" - integrity sha512-9K4zoImiZc3HlIp6AVUDE4CWYx22a+lhSZMYNpbjW04+YF0KWj4pJXnEMjdnFTiQibFFmElcsasJXDbdI/EPhA== +"@types/eslint-scope@^3.7.7": + version "3.7.7" + resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.7.tgz#3108bd5f18b0cdb277c867b3dd449c9ed7079ac5" + integrity sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg== dependencies: "@types/eslint" "*" "@types/estree" "*" @@ -3129,10 +3147,10 @@ resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.47.tgz#d7a51db20f0650efec24cd04994f523d93172ed4" integrity sha512-c5ciR06jK8u9BstrmJyO97m+klJrrhCf9u3rLu3DEAJBirxRqSCvDQoYKmxuYwQI5SZChAWu+tq9oVlGRuzPAg== -"@types/estree@^1.0.0": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.1.tgz#aa22750962f3bf0e79d753d3cc067f010c95f194" - integrity sha512-LG4opVs2ANWZ1TJoKc937iMmNstM/d0ae1vNbnBvBhqCSezgVUOzcLCqbI5elV8Vy6WKwKjaqR+zO9VKirBBCA== +"@types/estree@^1.0.6": + version "1.0.6" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.6.tgz#628effeeae2064a1b4e79f78e81d87b7e5fc7b50" + integrity sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw== "@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.33": version "4.17.35" @@ -3381,125 +3399,125 @@ dependencies: "@types/yargs-parser" "*" -"@webassemblyjs/ast@1.11.6", "@webassemblyjs/ast@^1.11.5": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.11.6.tgz#db046555d3c413f8966ca50a95176a0e2c642e24" - integrity sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q== +"@webassemblyjs/ast@1.14.1", "@webassemblyjs/ast@^1.12.1": + version "1.14.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.14.1.tgz#a9f6a07f2b03c95c8d38c4536a1fdfb521ff55b6" + integrity sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ== dependencies: - "@webassemblyjs/helper-numbers" "1.11.6" - "@webassemblyjs/helper-wasm-bytecode" "1.11.6" + "@webassemblyjs/helper-numbers" "1.13.2" + "@webassemblyjs/helper-wasm-bytecode" "1.13.2" -"@webassemblyjs/floating-point-hex-parser@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.6.tgz#dacbcb95aff135c8260f77fa3b4c5fea600a6431" - integrity sha512-ejAj9hfRJ2XMsNHk/v6Fu2dGS+i4UaXBXGemOfQ/JfQ6mdQg/WXtwleQRLLS4OvfDhv8rYnVwH27YJLMyYsxhw== +"@webassemblyjs/floating-point-hex-parser@1.13.2": + version "1.13.2" + resolved "https://registry.yarnpkg.com/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.13.2.tgz#fcca1eeddb1cc4e7b6eed4fc7956d6813b21b9fb" + integrity sha512-6oXyTOzbKxGH4steLbLNOu71Oj+C8Lg34n6CqRvqfS2O71BxY6ByfMDRhBytzknj9yGUPVJ1qIKhRlAwO1AovA== -"@webassemblyjs/helper-api-error@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.6.tgz#6132f68c4acd59dcd141c44b18cbebbd9f2fa768" - integrity sha512-o0YkoP4pVu4rN8aTJgAyj9hC2Sv5UlkzCHhxqWj8butaLvnpdc2jOwh4ewE6CX0txSfLn/UYaV/pheS2Txg//Q== +"@webassemblyjs/helper-api-error@1.13.2": + version "1.13.2" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-api-error/-/helper-api-error-1.13.2.tgz#e0a16152248bc38daee76dd7e21f15c5ef3ab1e7" + integrity sha512-U56GMYxy4ZQCbDZd6JuvvNV/WFildOjsaWD3Tzzvmw/mas3cXzRJPMjP83JqEsgSbyrmaGjBfDtV7KDXV9UzFQ== -"@webassemblyjs/helper-buffer@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.6.tgz#b66d73c43e296fd5e88006f18524feb0f2c7c093" - integrity sha512-z3nFzdcp1mb8nEOFFk8DrYLpHvhKC3grJD2ardfKOzmbmJvEf/tPIqCY+sNcwZIY8ZD7IkB2l7/pqhUhqm7hLA== +"@webassemblyjs/helper-buffer@1.14.1": + version "1.14.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-buffer/-/helper-buffer-1.14.1.tgz#822a9bc603166531f7d5df84e67b5bf99b72b96b" + integrity sha512-jyH7wtcHiKssDtFPRB+iQdxlDf96m0E39yb0k5uJVhFGleZFoNw1c4aeIcVUPPbXUVJ94wwnMOAqUHyzoEPVMA== -"@webassemblyjs/helper-numbers@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.6.tgz#cbce5e7e0c1bd32cf4905ae444ef64cea919f1b5" - integrity sha512-vUIhZ8LZoIWHBohiEObxVm6hwP034jwmc9kuq5GdHZH0wiLVLIPcMCdpJzG4C11cHoQ25TFIQj9kaVADVX7N3g== +"@webassemblyjs/helper-numbers@1.13.2": + version "1.13.2" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-numbers/-/helper-numbers-1.13.2.tgz#dbd932548e7119f4b8a7877fd5a8d20e63490b2d" + integrity sha512-FE8aCmS5Q6eQYcV3gI35O4J789wlQA+7JrqTTpJqn5emA4U2hvwJmvFRC0HODS+3Ye6WioDklgd6scJ3+PLnEA== dependencies: - "@webassemblyjs/floating-point-hex-parser" "1.11.6" - "@webassemblyjs/helper-api-error" "1.11.6" + "@webassemblyjs/floating-point-hex-parser" "1.13.2" + "@webassemblyjs/helper-api-error" "1.13.2" "@xtuc/long" "4.2.2" -"@webassemblyjs/helper-wasm-bytecode@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.6.tgz#bb2ebdb3b83aa26d9baad4c46d4315283acd51e9" - integrity sha512-sFFHKwcmBprO9e7Icf0+gddyWYDViL8bpPjJJl0WHxCdETktXdmtWLGVzoHbqUcY4Be1LkNfwTmXOJUFZYSJdA== +"@webassemblyjs/helper-wasm-bytecode@1.13.2": + version "1.13.2" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.13.2.tgz#e556108758f448aae84c850e593ce18a0eb31e0b" + integrity sha512-3QbLKy93F0EAIXLh0ogEVR6rOubA9AoZ+WRYhNbFyuB70j3dRdwH9g+qXhLAO0kiYGlg3TxDV+I4rQTr/YNXkA== -"@webassemblyjs/helper-wasm-section@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.6.tgz#ff97f3863c55ee7f580fd5c41a381e9def4aa577" - integrity sha512-LPpZbSOwTpEC2cgn4hTydySy1Ke+XEu+ETXuoyvuyezHO3Kjdu90KK95Sh9xTbmjrCsUwvWwCOQQNta37VrS9g== +"@webassemblyjs/helper-wasm-section@1.14.1": + version "1.14.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.14.1.tgz#9629dda9c4430eab54b591053d6dc6f3ba050348" + integrity sha512-ds5mXEqTJ6oxRoqjhWDU83OgzAYjwsCV8Lo/N+oRsNDmx/ZDpqalmrtgOMkHwxsG0iI//3BwWAErYRHtgn0dZw== dependencies: - "@webassemblyjs/ast" "1.11.6" - "@webassemblyjs/helper-buffer" "1.11.6" - "@webassemblyjs/helper-wasm-bytecode" "1.11.6" - "@webassemblyjs/wasm-gen" "1.11.6" + "@webassemblyjs/ast" "1.14.1" + "@webassemblyjs/helper-buffer" "1.14.1" + "@webassemblyjs/helper-wasm-bytecode" "1.13.2" + "@webassemblyjs/wasm-gen" "1.14.1" -"@webassemblyjs/ieee754@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.11.6.tgz#bb665c91d0b14fffceb0e38298c329af043c6e3a" - integrity sha512-LM4p2csPNvbij6U1f19v6WR56QZ8JcHg3QIJTlSwzFcmx6WSORicYj6I63f9yU1kEUtrpG+kjkiIAkevHpDXrg== +"@webassemblyjs/ieee754@1.13.2": + version "1.13.2" + resolved "https://registry.yarnpkg.com/@webassemblyjs/ieee754/-/ieee754-1.13.2.tgz#1c5eaace1d606ada2c7fd7045ea9356c59ee0dba" + integrity sha512-4LtOzh58S/5lX4ITKxnAK2USuNEvpdVV9AlgGQb8rJDHaLeHciwG4zlGr0j/SNWlr7x3vO1lDEsuePvtcDNCkw== dependencies: "@xtuc/ieee754" "^1.2.0" -"@webassemblyjs/leb128@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.11.6.tgz#70e60e5e82f9ac81118bc25381a0b283893240d7" - integrity sha512-m7a0FhE67DQXgouf1tbN5XQcdWoNgaAuoULHIfGFIEVKA6tu/edls6XnIlkmS6FrXAquJRPni3ZZKjw6FSPjPQ== +"@webassemblyjs/leb128@1.13.2": + version "1.13.2" + resolved "https://registry.yarnpkg.com/@webassemblyjs/leb128/-/leb128-1.13.2.tgz#57c5c3deb0105d02ce25fa3fd74f4ebc9fd0bbb0" + integrity sha512-Lde1oNoIdzVzdkNEAWZ1dZ5orIbff80YPdHx20mrHwHrVNNTjNr8E3xz9BdpcGqRQbAEa+fkrCb+fRFTl/6sQw== dependencies: "@xtuc/long" "4.2.2" -"@webassemblyjs/utf8@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.11.6.tgz#90f8bc34c561595fe156603be7253cdbcd0fab5a" - integrity sha512-vtXf2wTQ3+up9Zsg8sa2yWiQpzSsMyXj0qViVP6xKGCUT8p8YJ6HqI7l5eCnWx1T/FYdsv07HQs2wTFbbof/RA== - -"@webassemblyjs/wasm-edit@^1.11.5": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.6.tgz#c72fa8220524c9b416249f3d94c2958dfe70ceab" - integrity sha512-Ybn2I6fnfIGuCR+Faaz7YcvtBKxvoLV3Lebn1tM4o/IAJzmi9AWYIPWpyBfU8cC+JxAO57bk4+zdsTjJR+VTOw== - dependencies: - "@webassemblyjs/ast" "1.11.6" - "@webassemblyjs/helper-buffer" "1.11.6" - "@webassemblyjs/helper-wasm-bytecode" "1.11.6" - "@webassemblyjs/helper-wasm-section" "1.11.6" - "@webassemblyjs/wasm-gen" "1.11.6" - "@webassemblyjs/wasm-opt" "1.11.6" - "@webassemblyjs/wasm-parser" "1.11.6" - "@webassemblyjs/wast-printer" "1.11.6" - -"@webassemblyjs/wasm-gen@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.6.tgz#fb5283e0e8b4551cc4e9c3c0d7184a65faf7c268" - integrity sha512-3XOqkZP/y6B4F0PBAXvI1/bky7GryoogUtfwExeP/v7Nzwo1QLcq5oQmpKlftZLbT+ERUOAZVQjuNVak6UXjPA== - dependencies: - "@webassemblyjs/ast" "1.11.6" - "@webassemblyjs/helper-wasm-bytecode" "1.11.6" - "@webassemblyjs/ieee754" "1.11.6" - "@webassemblyjs/leb128" "1.11.6" - "@webassemblyjs/utf8" "1.11.6" - -"@webassemblyjs/wasm-opt@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.6.tgz#d9a22d651248422ca498b09aa3232a81041487c2" - integrity sha512-cOrKuLRE7PCe6AsOVl7WasYf3wbSo4CeOk6PkrjS7g57MFfVUF9u6ysQBBODX0LdgSvQqRiGz3CXvIDKcPNy4g== - dependencies: - "@webassemblyjs/ast" "1.11.6" - "@webassemblyjs/helper-buffer" "1.11.6" - "@webassemblyjs/wasm-gen" "1.11.6" - "@webassemblyjs/wasm-parser" "1.11.6" - -"@webassemblyjs/wasm-parser@1.11.6", "@webassemblyjs/wasm-parser@^1.11.5": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.6.tgz#bb85378c527df824004812bbdb784eea539174a1" - integrity sha512-6ZwPeGzMJM3Dqp3hCsLgESxBGtT/OeCvCZ4TA1JUPYgmhAx38tTPR9JaKy0S5H3evQpO/h2uWs2j6Yc/fjkpTQ== - dependencies: - "@webassemblyjs/ast" "1.11.6" - "@webassemblyjs/helper-api-error" "1.11.6" - "@webassemblyjs/helper-wasm-bytecode" "1.11.6" - "@webassemblyjs/ieee754" "1.11.6" - "@webassemblyjs/leb128" "1.11.6" - "@webassemblyjs/utf8" "1.11.6" - -"@webassemblyjs/wast-printer@1.11.6": - version "1.11.6" - resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.11.6.tgz#a7bf8dd7e362aeb1668ff43f35cb849f188eff20" - integrity sha512-JM7AhRcE+yW2GWYaKeHL5vt4xqee5N2WcezptmgyhNS+ScggqcT1OtXykhAb13Sn5Yas0j2uv9tHgrjwvzAP4A== - dependencies: - "@webassemblyjs/ast" "1.11.6" +"@webassemblyjs/utf8@1.13.2": + version "1.13.2" + resolved "https://registry.yarnpkg.com/@webassemblyjs/utf8/-/utf8-1.13.2.tgz#917a20e93f71ad5602966c2d685ae0c6c21f60f1" + integrity sha512-3NQWGjKTASY1xV5m7Hr0iPeXD9+RDobLll3T9d2AO+g3my8xy5peVyjSag4I50mR1bBSN/Ct12lo+R9tJk0NZQ== + +"@webassemblyjs/wasm-edit@^1.12.1": + version "1.14.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-edit/-/wasm-edit-1.14.1.tgz#ac6689f502219b59198ddec42dcd496b1004d597" + integrity sha512-RNJUIQH/J8iA/1NzlE4N7KtyZNHi3w7at7hDjvRNm5rcUXa00z1vRz3glZoULfJ5mpvYhLybmVcwcjGrC1pRrQ== + dependencies: + "@webassemblyjs/ast" "1.14.1" + "@webassemblyjs/helper-buffer" "1.14.1" + "@webassemblyjs/helper-wasm-bytecode" "1.13.2" + "@webassemblyjs/helper-wasm-section" "1.14.1" + "@webassemblyjs/wasm-gen" "1.14.1" + "@webassemblyjs/wasm-opt" "1.14.1" + "@webassemblyjs/wasm-parser" "1.14.1" + "@webassemblyjs/wast-printer" "1.14.1" + +"@webassemblyjs/wasm-gen@1.14.1": + version "1.14.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-gen/-/wasm-gen-1.14.1.tgz#991e7f0c090cb0bb62bbac882076e3d219da9570" + integrity sha512-AmomSIjP8ZbfGQhumkNvgC33AY7qtMCXnN6bL2u2Js4gVCg8fp735aEiMSBbDR7UQIj90n4wKAFUSEd0QN2Ukg== + dependencies: + "@webassemblyjs/ast" "1.14.1" + "@webassemblyjs/helper-wasm-bytecode" "1.13.2" + "@webassemblyjs/ieee754" "1.13.2" + "@webassemblyjs/leb128" "1.13.2" + "@webassemblyjs/utf8" "1.13.2" + +"@webassemblyjs/wasm-opt@1.14.1": + version "1.14.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-opt/-/wasm-opt-1.14.1.tgz#e6f71ed7ccae46781c206017d3c14c50efa8106b" + integrity sha512-PTcKLUNvBqnY2U6E5bdOQcSM+oVP/PmrDY9NzowJjislEjwP/C4an2303MCVS2Mg9d3AJpIGdUFIQQWbPds0Sw== + dependencies: + "@webassemblyjs/ast" "1.14.1" + "@webassemblyjs/helper-buffer" "1.14.1" + "@webassemblyjs/wasm-gen" "1.14.1" + "@webassemblyjs/wasm-parser" "1.14.1" + +"@webassemblyjs/wasm-parser@1.14.1", "@webassemblyjs/wasm-parser@^1.12.1": + version "1.14.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wasm-parser/-/wasm-parser-1.14.1.tgz#b3e13f1893605ca78b52c68e54cf6a865f90b9fb" + integrity sha512-JLBl+KZ0R5qB7mCnud/yyX08jWFw5MsoalJ1pQ4EdFlgj9VdXKGuENGsiCIjegI1W7p91rUlcB/LB5yRJKNTcQ== + dependencies: + "@webassemblyjs/ast" "1.14.1" + "@webassemblyjs/helper-api-error" "1.13.2" + "@webassemblyjs/helper-wasm-bytecode" "1.13.2" + "@webassemblyjs/ieee754" "1.13.2" + "@webassemblyjs/leb128" "1.13.2" + "@webassemblyjs/utf8" "1.13.2" + +"@webassemblyjs/wast-printer@1.14.1": + version "1.14.1" + resolved "https://registry.yarnpkg.com/@webassemblyjs/wast-printer/-/wast-printer-1.14.1.tgz#3bb3e9638a8ae5fdaf9610e7a06b4d9f9aa6fe07" + integrity sha512-kPSSXE6De1XOR820C90RIo2ogvZG+c3KiHzqUoO/F34Y2shGzesfqv7o57xrxovZJH/MetF5UjroJ/R/3isoiw== + dependencies: + "@webassemblyjs/ast" "1.14.1" "@xtuc/long" "4.2.2" "@xtuc/ieee754@^1.2.0": @@ -3520,11 +3538,6 @@ accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: mime-types "~2.1.34" negotiator "0.6.3" -acorn-import-assertions@^1.9.0: - version "1.9.0" - resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz#507276249d684797c84e0734ef84860334cfb1ac" - integrity sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA== - acorn-walk@^8.0.0: version "8.1.0" resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.1.0.tgz#d3c6a9faf00987a5e2b9bdb506c2aa76cd707f83" @@ -3535,10 +3548,10 @@ acorn@^8.0.4: resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.2.4.tgz#caba24b08185c3b56e3168e97d15ed17f4d31fd0" integrity sha512-Ibt84YwBDDA890eDiDCEqcbwvHlBvzzDkU2cGBBDDI1QWT12jTiXIOn2CIw5KK4i6N5Z2HUxwYjzriDyqaqqZg== -acorn@^8.7.1: - version "8.8.2" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.2.tgz#1b2f25db02af965399b9776b0c2c391276d37c4a" - integrity sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw== +acorn@^8.14.0: + version "8.14.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.14.0.tgz#063e2c70cac5fb4f6467f0b11152e04c682795b0" + integrity sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA== acorn@^8.8.2: version "8.9.0" @@ -3985,6 +3998,16 @@ browserslist@^4.18.1, browserslist@^4.21.3, browserslist@^4.21.4, browserslist@^ node-releases "^2.0.12" update-browserslist-db "^1.0.11" +browserslist@^4.24.0: + version "4.24.2" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.24.2.tgz#f5845bc91069dbd55ee89faf9822e1d885d16580" + integrity sha512-ZIc+Q62revdMcqC6aChtW4jz3My3klmCO1fEmINZY/8J3EpBg5/A/D0AKmBveUh6pgoeycoMkVMko84tuYS+Gg== + dependencies: + caniuse-lite "^1.0.30001669" + electron-to-chromium "^1.5.41" + node-releases "^2.0.18" + update-browserslist-db "^1.1.1" + buffer-from@^1.0.0: version "1.1.2" resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" @@ -4075,6 +4098,11 @@ caniuse-lite@^1.0.30001464, caniuse-lite@^1.0.30001503: resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001509.tgz#2b7ad5265392d6d2de25cd8776d1ab3899570d14" integrity sha512-2uDDk+TRiTX5hMcUYT/7CSyzMZxjfGu0vAUjS2g0LSD8UoXOv0LtpH4LxGMemsiPq6LCVIUjNwVM0erkOkGCDA== +caniuse-lite@^1.0.30001669: + version "1.0.30001680" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001680.tgz#5380ede637a33b9f9f1fc6045ea99bd142f3da5e" + integrity sha512-rPQy70G6AGUMnbwS1z6Xg+RkHYPAi18ihs47GH0jcxIG7wArmPgY3XbS2sRdBbxJljp3thdT8BIqv9ccCypiPA== + ccount@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/ccount/-/ccount-1.1.0.tgz#246687debb6014735131be8abab2d93898f8d043" @@ -4967,6 +4995,11 @@ electron-to-chromium@^1.4.431: resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.445.tgz#058d2c5f3a2981ab1a37440f5a5e42d15672aa6d" integrity sha512-++DB+9VK8SBJwC+X1zlMfJ1tMA3F0ipi39GdEp+x3cV2TyBihqAgad8cNMWtLDEkbH39nlDQP7PfGrDr3Dr7HA== +electron-to-chromium@^1.5.41: + version "1.5.57" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.5.57.tgz#cb43af8784166bca24565b3418bf5f775a6b1c86" + integrity sha512-xS65H/tqgOwUBa5UmOuNSLuslDo7zho0y/lgQw35pnrqiZh7UOWHCeL/Bt6noJATbA6tpQJGCifsFsIRZj1Fqg== + emoji-regex@^7.0.1: version "7.0.3" resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" @@ -5009,10 +5042,10 @@ end-of-stream@^1.1.0: dependencies: once "^1.4.0" -enhanced-resolve@^5.15.0: - version "5.15.0" - resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz#1af946c7d93603eb88e9896cee4904dc012e9c35" - integrity sha512-LXYT42KJ7lpIKECr2mAXIaMldcNCh/7E0KBKOu4KSfkHmP+mZmSs+8V5gBAqisWBy0OO4W5Oyys0GO1Y8KtdKg== +enhanced-resolve@^5.17.1: + version "5.17.1" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.17.1.tgz#67bfbbcc2f81d511be77d686a90267ef7f898a15" + integrity sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg== dependencies: graceful-fs "^4.2.4" tapable "^2.2.0" @@ -5087,6 +5120,11 @@ escalade@^3.1.1: resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== +escalade@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.2.0.tgz#011a3f69856ba189dffa7dc8fcce99d2a87903e5" + integrity sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA== + escape-goat@^2.0.0: version "2.1.1" resolved "https://registry.yarnpkg.com/escape-goat/-/escape-goat-2.1.1.tgz#1b2dc77003676c457ec760b2dc68edb648188675" @@ -5693,7 +5731,7 @@ graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4: resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.6.tgz#ff040b2b0853b23c3d31027523706f1885d76bee" integrity sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ== -graceful-fs@^4.2.6: +graceful-fs@^4.2.11, graceful-fs@^4.2.6: version "4.2.11" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== @@ -6942,6 +6980,11 @@ node-releases@^2.0.12: resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.12.tgz#35627cc224a23bfb06fb3380f2b3afaaa7eb1039" integrity sha512-QzsYKWhXTWx8h1kIvqfnC++o0pEmpRQA/aenALsL2F4pqNVr7YzcdMlDij5WBnwftRbJCNJL/O7zdKaxKPHqgQ== +node-releases@^2.0.18: + version "2.0.18" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.18.tgz#f010e8d35e2fe8d6b2944f03f70213ecedc4ca3f" + integrity sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g== + normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" @@ -7291,6 +7334,11 @@ picocolors@^1.0.0: resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== +picocolors@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.1.1.tgz#3d321af3eab939b083c8f929a1d12cda81c26b6b" + integrity sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA== + picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.3: version "2.2.3" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.3.tgz#465547f359ccc206d3c48e46a1bcb89bf7ee619d" @@ -8871,7 +8919,18 @@ tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0: resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.0.tgz#5c373d281d9c672848213d0e037d1c4165ab426b" integrity sha512-FBk4IesMV1rBxX2tfiK8RAmogtWn53puLOQlvO8XuwlgxcYbP4mVPS9Ph4aeamSyyVjOl24aYWAuc8U5kCVwMw== -terser-webpack-plugin@^5.3.3, terser-webpack-plugin@^5.3.7: +terser-webpack-plugin@^5.3.10: + version "5.3.10" + resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.10.tgz#904f4c9193c6fd2a03f693a2150c62a92f40d199" + integrity sha512-BKFPWlPDndPs+NGGCr1U59t0XScL5317Y0UReNrHaw9/FwhPENlq6bfgs+4yPfyP51vqC1bQ4rp1EfXW5ZSH9w== + dependencies: + "@jridgewell/trace-mapping" "^0.3.20" + jest-worker "^27.4.5" + schema-utils "^3.1.1" + serialize-javascript "^6.0.1" + terser "^5.26.0" + +terser-webpack-plugin@^5.3.3: version "5.3.9" resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.9.tgz#832536999c51b46d468067f9e37662a3b96adfe1" integrity sha512-ZuXsqE07EcggTWQjXUj+Aot/OMcD0bMKGgF63f7UxYcu5/AJF53aIpK1YoP5xR9l6s/Hy2b+t1AM0bLNPRuhwA== @@ -8892,6 +8951,16 @@ terser@^5.10.0, terser@^5.16.8: commander "^2.20.0" source-map-support "~0.5.20" +terser@^5.26.0: + version "5.36.0" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.36.0.tgz#8b0dbed459ac40ff7b4c9fd5a3a2029de105180e" + integrity sha512-IYV9eNMuFAV4THUspIRXkLakHnV6XO7FEdtKjf/mDyrnqUg9LnlOn6/RwRvM9SZjR4GUq8Nk8zj67FzVARr74w== + dependencies: + "@jridgewell/source-map" "^0.3.3" + acorn "^8.8.2" + commander "^2.20.0" + source-map-support "~0.5.20" + text-table@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" @@ -9180,6 +9249,14 @@ update-browserslist-db@^1.0.11: escalade "^3.1.1" picocolors "^1.0.0" +update-browserslist-db@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.1.1.tgz#80846fba1d79e82547fb661f8d141e0945755fe5" + integrity sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A== + dependencies: + escalade "^3.2.0" + picocolors "^1.1.0" + update-notifier@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-5.1.0.tgz#4ab0d7c7f36a231dd7316cf7729313f0214d9ad9" @@ -9326,10 +9403,10 @@ wait-on@^6.0.1: minimist "^1.2.5" rxjs "^7.5.4" -watchpack@^2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.0.tgz#fa33032374962c78113f93c7f2fb4c54c9862a5d" - integrity sha512-Lcvm7MGST/4fup+ifyKi2hjyIAwcdI4HRgtvTpIUxBRhB+RFtUh8XtDOxUfctVCnhVi+QQj49i91OyvzkJl6cg== +watchpack@^2.4.1: + version "2.4.2" + resolved "https://registry.yarnpkg.com/watchpack/-/watchpack-2.4.2.tgz#2feeaed67412e7c33184e5a79ca738fbd38564da" + integrity sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw== dependencies: glob-to-regexp "^0.4.1" graceful-fs "^4.1.2" @@ -9428,33 +9505,32 @@ webpack-sources@^3.2.2, webpack-sources@^3.2.3: integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== webpack@^5.73.0: - version "5.88.1" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.88.1.tgz#21eba01e81bd5edff1968aea726e2fbfd557d3f8" - integrity sha512-FROX3TxQnC/ox4N+3xQoWZzvGXSuscxR32rbzjpXgEzWudJFEJBpdlkkob2ylrv5yzzufD1zph1OoFsLtm6stQ== - dependencies: - "@types/eslint-scope" "^3.7.3" - "@types/estree" "^1.0.0" - "@webassemblyjs/ast" "^1.11.5" - "@webassemblyjs/wasm-edit" "^1.11.5" - "@webassemblyjs/wasm-parser" "^1.11.5" - acorn "^8.7.1" - acorn-import-assertions "^1.9.0" - browserslist "^4.14.5" + version "5.96.1" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.96.1.tgz#3676d1626d8312b6b10d0c18cc049fba7ac01f0c" + integrity sha512-l2LlBSvVZGhL4ZrPwyr8+37AunkcYj5qh8o6u2/2rzoPc8gxFJkLj1WxNgooi9pnoc06jh0BjuXnamM4qlujZA== + dependencies: + "@types/eslint-scope" "^3.7.7" + "@types/estree" "^1.0.6" + "@webassemblyjs/ast" "^1.12.1" + "@webassemblyjs/wasm-edit" "^1.12.1" + "@webassemblyjs/wasm-parser" "^1.12.1" + acorn "^8.14.0" + browserslist "^4.24.0" chrome-trace-event "^1.0.2" - enhanced-resolve "^5.15.0" + enhanced-resolve "^5.17.1" es-module-lexer "^1.2.1" eslint-scope "5.1.1" events "^3.2.0" glob-to-regexp "^0.4.1" - graceful-fs "^4.2.9" + graceful-fs "^4.2.11" json-parse-even-better-errors "^2.3.1" loader-runner "^4.2.0" mime-types "^2.1.27" neo-async "^2.6.2" schema-utils "^3.2.0" tapable "^2.1.1" - terser-webpack-plugin "^5.3.7" - watchpack "^2.4.0" + terser-webpack-plugin "^5.3.10" + watchpack "^2.4.1" webpack-sources "^3.2.3" webpackbar@^5.0.2: From 9a87c5521b6ad8f182ad0eeb0ab809435a26eafd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 15 Nov 2024 07:49:28 -0500 Subject: [PATCH 03/20] Bump jinja2 from 3.1.3 to 3.1.4 in /verification/test-wdls/scripts (#1426) Bumps [jinja2](https://github.com/pallets/jinja) from 3.1.3 to 3.1.4. - [Release notes](https://github.com/pallets/jinja/releases) - [Changelog](https://github.com/pallets/jinja/blob/main/CHANGES.rst) - [Commits](https://github.com/pallets/jinja/compare/3.1.3...3.1.4) --- updated-dependencies: - dependency-name: jinja2 dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Nikelle Petrillo <38223776+nikellepetrillo@users.noreply.github.com> --- verification/test-wdls/scripts/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/verification/test-wdls/scripts/requirements.txt b/verification/test-wdls/scripts/requirements.txt index de9e3e0771..fe8e60644f 100644 --- a/verification/test-wdls/scripts/requirements.txt +++ b/verification/test-wdls/scripts/requirements.txt @@ -1 +1 @@ -jinja2==3.1.3 \ No newline at end of file +jinja2==3.1.4 \ No newline at end of file From 158772d35e6e83ecbdd43450e6cd2d7f3519bfc7 Mon Sep 17 00:00:00 2001 From: Nikelle Petrillo <38223776+nikellepetrillo@users.noreply.github.com> Date: Mon, 18 Nov 2024 16:39:57 -0500 Subject: [PATCH 04/20] Np starsolo fails silently when it runs out of disk (#1428) * use a fake file * more checks * more checks * more checks * use quickcheck * use quickcheck * missing quote * cleaning up * remove fake file * changelogs * Updated pipeline_versions.txt with all pipeline version information --------- Co-authored-by: GitHub Action --- pipeline_versions.txt | 10 +++++----- pipelines/skylab/multiome/Multiome.changelog.md | 5 +++++ pipelines/skylab/multiome/Multiome.wdl | 2 +- pipelines/skylab/optimus/Optimus.changelog.md | 5 +++++ pipelines/skylab/optimus/Optimus.wdl | 5 +++-- pipelines/skylab/paired_tag/PairedTag.changelog.md | 5 +++++ pipelines/skylab/paired_tag/PairedTag.wdl | 2 +- pipelines/skylab/slideseq/SlideSeq.changelog.md | 5 +++++ pipelines/skylab/slideseq/SlideSeq.wdl | 2 +- .../MultiSampleSmartSeq2SingleNucleus.changelog.md | 5 +++++ .../MultiSampleSmartSeq2SingleNucleus.wdl | 2 +- tasks/skylab/StarAlign.wdl | 12 ++++++++---- 12 files changed, 45 insertions(+), 15 deletions(-) diff --git a/pipeline_versions.txt b/pipeline_versions.txt index 0b12b2df5a..9964940111 100644 --- a/pipeline_versions.txt +++ b/pipeline_versions.txt @@ -30,11 +30,11 @@ ExomeReprocessing 3.3.3 2024-11-04 BuildIndices 3.0.0 2023-12-06 scATAC 1.3.2 2023-08-03 snm3C 4.0.4 2024-08-06 -Multiome 5.9.1 2024-11-12 -PairedTag 1.8.2 2024-11-12 +Multiome 5.9.2 2024-11-15 +PairedTag 1.8.3 2024-11-15 MultiSampleSmartSeq2 2.2.22 2024-09-11 -MultiSampleSmartSeq2SingleNucleus 2.0.4 2024-11-12 -Optimus 7.8.2 2024-11-12 +MultiSampleSmartSeq2SingleNucleus 2.0.5 2024-11-15 +Optimus 7.8.3 2024-11-15 atac 2.5.2 2024-11-12 SmartSeq2SingleSample 5.1.21 2024-09-11 -SlideSeq 3.4.5 2024-11-12 +SlideSeq 3.4.6 2024-11-15 diff --git a/pipelines/skylab/multiome/Multiome.changelog.md b/pipelines/skylab/multiome/Multiome.changelog.md index e5b7ca398e..4a05f926be 100644 --- a/pipelines/skylab/multiome/Multiome.changelog.md +++ b/pipelines/skylab/multiome/Multiome.changelog.md @@ -1,3 +1,8 @@ +# 5.9.2 +2024-11-15 (Date of Last Commit) + +* Added bam validation in the StarSoloFastq task; this does not affect the outputs of the pipeline + # 5.9.1 2024-11-12 (Date of Last Commit) diff --git a/pipelines/skylab/multiome/Multiome.wdl b/pipelines/skylab/multiome/Multiome.wdl index 0d291633de..438c49d264 100644 --- a/pipelines/skylab/multiome/Multiome.wdl +++ b/pipelines/skylab/multiome/Multiome.wdl @@ -9,7 +9,7 @@ import "../../../tasks/broad/Utilities.wdl" as utils workflow Multiome { - String pipeline_version = "5.9.1" + String pipeline_version = "5.9.2" input { String cloud_provider diff --git a/pipelines/skylab/optimus/Optimus.changelog.md b/pipelines/skylab/optimus/Optimus.changelog.md index 916c4ad800..d05e53bee5 100644 --- a/pipelines/skylab/optimus/Optimus.changelog.md +++ b/pipelines/skylab/optimus/Optimus.changelog.md @@ -1,3 +1,8 @@ +# 7.8.3 +2024-11-15 (Date of Last Commit) + +* Added bam validation in the StarSoloFastq task; this does not affect the outputs of the pipeline + # 7.8.2 2024-11-12 (Date of Last Commit) diff --git a/pipelines/skylab/optimus/Optimus.wdl b/pipelines/skylab/optimus/Optimus.wdl index 58e65d42fc..41a71d3cb6 100644 --- a/pipelines/skylab/optimus/Optimus.wdl +++ b/pipelines/skylab/optimus/Optimus.wdl @@ -71,7 +71,7 @@ workflow Optimus { # version of this pipeline - String pipeline_version = "7.8.2" + String pipeline_version = "7.8.3" # this is used to scatter matched [r1_fastq, r2_fastq, i1_fastq] arrays @@ -93,6 +93,7 @@ workflow Optimus { String star_docker = "star:1.0.1-2.7.11a-1692706072" String warp_tools_docker_2_2_0 = "warp-tools:2.4.0" String star_merge_docker = "star-merge-npz:1.3.0" + String samtools_star = "samtools-star:1.0.0-1.11-2.7.11a-1731516196" #TODO how do we handle these? @@ -183,7 +184,7 @@ workflow Optimus { count_exons = count_exons, output_bam_basename = output_bam_basename + "_" + idx, soloMultiMappers = soloMultiMappers, - star_docker_path = docker_prefix + star_docker + samtools_star_docker_path = docker_prefix + samtools_star } } call Merge.MergeSortBamFiles as MergeBam { diff --git a/pipelines/skylab/paired_tag/PairedTag.changelog.md b/pipelines/skylab/paired_tag/PairedTag.changelog.md index f2f9a7fe0a..e411e0f679 100644 --- a/pipelines/skylab/paired_tag/PairedTag.changelog.md +++ b/pipelines/skylab/paired_tag/PairedTag.changelog.md @@ -1,3 +1,8 @@ +# 1.8.3 +2024-11-15 (Date of Last Commit) + +* Added bam validation in the StarSoloFastq task; this does not affect the outputs of the pipeline + # 1.8.2 2024-11-12 (Date of Last Commit) diff --git a/pipelines/skylab/paired_tag/PairedTag.wdl b/pipelines/skylab/paired_tag/PairedTag.wdl index 155254056a..233dafc685 100644 --- a/pipelines/skylab/paired_tag/PairedTag.wdl +++ b/pipelines/skylab/paired_tag/PairedTag.wdl @@ -8,7 +8,7 @@ import "../../../tasks/broad/Utilities.wdl" as utils workflow PairedTag { - String pipeline_version = "1.8.2" + String pipeline_version = "1.8.3" input { diff --git a/pipelines/skylab/slideseq/SlideSeq.changelog.md b/pipelines/skylab/slideseq/SlideSeq.changelog.md index c0f4a9f3dc..54c9fb6890 100644 --- a/pipelines/skylab/slideseq/SlideSeq.changelog.md +++ b/pipelines/skylab/slideseq/SlideSeq.changelog.md @@ -1,3 +1,8 @@ +# 3.4.6 +2024-11-15 (Date of Last Commit) + +* Added bam validation in the StarSoloFastq task; this does not affect the outputs of the pipeline + # 3.4.5 2024-11-12 (Date of Last Commit) diff --git a/pipelines/skylab/slideseq/SlideSeq.wdl b/pipelines/skylab/slideseq/SlideSeq.wdl index 5ec74e3e2e..7c69d79232 100644 --- a/pipelines/skylab/slideseq/SlideSeq.wdl +++ b/pipelines/skylab/slideseq/SlideSeq.wdl @@ -25,7 +25,7 @@ import "../../../tasks/broad/Utilities.wdl" as utils workflow SlideSeq { - String pipeline_version = "3.4.5" + String pipeline_version = "3.4.6" input { Array[File] r1_fastq diff --git a/pipelines/skylab/smartseq2_single_nucleus_multisample/MultiSampleSmartSeq2SingleNucleus.changelog.md b/pipelines/skylab/smartseq2_single_nucleus_multisample/MultiSampleSmartSeq2SingleNucleus.changelog.md index 7f75d2c3bb..85dc657364 100644 --- a/pipelines/skylab/smartseq2_single_nucleus_multisample/MultiSampleSmartSeq2SingleNucleus.changelog.md +++ b/pipelines/skylab/smartseq2_single_nucleus_multisample/MultiSampleSmartSeq2SingleNucleus.changelog.md @@ -1,3 +1,8 @@ +# 2.0.5 +2024-11-15 (Date of Last Commit) + +* Added bam validation in the StarSoloFastq task; this does not affect the outputs of the pipeline + # 2.0.4 2024-11-12 (Date of Last Commit) diff --git a/pipelines/skylab/smartseq2_single_nucleus_multisample/MultiSampleSmartSeq2SingleNucleus.wdl b/pipelines/skylab/smartseq2_single_nucleus_multisample/MultiSampleSmartSeq2SingleNucleus.wdl index e5702147d1..871090e2be 100644 --- a/pipelines/skylab/smartseq2_single_nucleus_multisample/MultiSampleSmartSeq2SingleNucleus.wdl +++ b/pipelines/skylab/smartseq2_single_nucleus_multisample/MultiSampleSmartSeq2SingleNucleus.wdl @@ -57,7 +57,7 @@ workflow MultiSampleSmartSeq2SingleNucleus { } # Version of this pipeline - String pipeline_version = "2.0.4" + String pipeline_version = "2.0.5" if (false) { String? none = "None" diff --git a/tasks/skylab/StarAlign.wdl b/tasks/skylab/StarAlign.wdl index ffceb7ce17..1d0e5936ff 100644 --- a/tasks/skylab/StarAlign.wdl +++ b/tasks/skylab/StarAlign.wdl @@ -226,7 +226,7 @@ task STARsoloFastq { String? soloMultiMappers # runtime values - String star_docker_path + String samtools_star_docker_path Int machine_mem_mb = 64000 Int cpu = 8 # multiply input size by 2.2 to account for output bam file + 20% overhead, add size of reference. @@ -244,7 +244,7 @@ task STARsoloFastq { r2_fastq: "array of forward read FASTQ files" tar_star_reference: "star reference tarball built against the species that the bam_input is derived from" star_strand_mode: "STAR mode for handling stranded reads. Options are 'Forward', 'Reverse, or 'Unstranded'" - star_docker_path: "(optional) the docker image containing the runtime environment for this task" + samtools_star_docker_path: "(optional) the docker image containing the runtime environment for this task" machine_mem_mb: "(optional) the amount of memory (MiB) to provision for this task" cpu: "(optional) the number of cpus to provision for this task" disk: "(optional) the amount of disk space (GiB) to provision for this task" @@ -329,7 +329,11 @@ task STARsoloFastq { ~{"--soloMultiMappers " + soloMultiMappers} \ --soloUMIfiltering MultiGeneUMI_CR \ --soloCellFilter EmptyDrops_CR - + + # validate the bam with samtools quickcheck + samtools quickcheck -v Aligned.sortedByCoord.out.bam + + echo "UMI LEN " $UMILen touch barcodes_sn_rna.tsv @@ -409,7 +413,7 @@ task STARsoloFastq { >>> runtime { - docker: star_docker_path + docker: samtools_star_docker_path memory: "~{machine_mem_mb} MiB" disks: "local-disk ~{disk} HDD" disk: disk + " GB" # TES From 39c6aac72bcead9578c8c0eaf03dec5bfc4a0f29 Mon Sep 17 00:00:00 2001 From: Elizabeth Kiernan <55763654+ekiernan@users.noreply.github.com> Date: Tue, 3 Dec 2024 09:08:51 -0500 Subject: [PATCH 05/20] PD-2803: Fixing Optimus h5ad generation to handle gene_ids more flexibly (#1431) Updated docker for h5ad creation in Optimus allowing for more flexible gene_id handling --- pipeline_versions.txt | 8 ++++---- pipelines/skylab/atac/atac.changelog.md | 5 +++++ pipelines/skylab/atac/atac.wdl | 4 ++-- pipelines/skylab/multiome/Multiome.changelog.md | 3 ++- pipelines/skylab/optimus/Optimus.changelog.md | 3 ++- pipelines/skylab/optimus/Optimus.wdl | 2 +- pipelines/skylab/paired_tag/PairedTag.changelog.md | 3 ++- pipelines/skylab/slideseq/SlideSeq.changelog.md | 1 + pipelines/skylab/slideseq/SlideSeq.wdl | 2 +- tasks/skylab/FastqProcessing.wdl | 2 +- 10 files changed, 21 insertions(+), 12 deletions(-) diff --git a/pipeline_versions.txt b/pipeline_versions.txt index 9964940111..d06921a88f 100644 --- a/pipeline_versions.txt +++ b/pipeline_versions.txt @@ -30,11 +30,11 @@ ExomeReprocessing 3.3.3 2024-11-04 BuildIndices 3.0.0 2023-12-06 scATAC 1.3.2 2023-08-03 snm3C 4.0.4 2024-08-06 -Multiome 5.9.2 2024-11-15 -PairedTag 1.8.3 2024-11-15 +Multiome 5.9.2 2024-11-22 +PairedTag 1.8.3 2024-11-22 MultiSampleSmartSeq2 2.2.22 2024-09-11 MultiSampleSmartSeq2SingleNucleus 2.0.5 2024-11-15 -Optimus 7.8.3 2024-11-15 -atac 2.5.2 2024-11-12 +Optimus 7.8.3 2024-11-22 +atac 2.5.3 2024-11-22 SmartSeq2SingleSample 5.1.21 2024-09-11 SlideSeq 3.4.6 2024-11-15 diff --git a/pipelines/skylab/atac/atac.changelog.md b/pipelines/skylab/atac/atac.changelog.md index 5c2c0aea4b..578088a0d6 100644 --- a/pipelines/skylab/atac/atac.changelog.md +++ b/pipelines/skylab/atac/atac.changelog.md @@ -1,3 +1,8 @@ +# 2.5.3 +2024-11-22 (Date of Last Commit) + +* Updated the warp-tools docker; this update changes the way gene_names are identified when creating gene expression h5ad files; does not impact ATAC workflow + # 2.5.2 2024-11-12 (Date of Last Commit) diff --git a/pipelines/skylab/atac/atac.wdl b/pipelines/skylab/atac/atac.wdl index 521cb09dfd..c0c748c042 100644 --- a/pipelines/skylab/atac/atac.wdl +++ b/pipelines/skylab/atac/atac.wdl @@ -49,7 +49,7 @@ workflow ATAC { String adapter_seq_read3 = "TCGTCGGCAGCGTCAGATGTGTATAAGAGACAG" } - String pipeline_version = "2.5.2" + String pipeline_version = "2.5.3" # Determine docker prefix based on cloud provider String gcr_docker_prefix = "us.gcr.io/broad-gotc-prod/" @@ -57,7 +57,7 @@ workflow ATAC { String docker_prefix = if cloud_provider == "gcp" then gcr_docker_prefix else acr_docker_prefix # Docker image names - String warp_tools_2_2_0 = "warp-tools:2.2.0" + String warp_tools_2_2_0 = "warp-tools:2.5.0" String cutadapt_docker = "cutadapt:1.0.0-4.4-1686752919" String samtools_docker = "samtools-dist-bwa:3.0.0" String upstools_docker = "upstools:1.0.0-2023.03.03-1704300311" diff --git a/pipelines/skylab/multiome/Multiome.changelog.md b/pipelines/skylab/multiome/Multiome.changelog.md index 4a05f926be..b1577ed4be 100644 --- a/pipelines/skylab/multiome/Multiome.changelog.md +++ b/pipelines/skylab/multiome/Multiome.changelog.md @@ -1,7 +1,8 @@ # 5.9.2 -2024-11-15 (Date of Last Commit) +2024-11-22 (Date of Last Commit) * Added bam validation in the StarSoloFastq task; this does not affect the outputs of the pipeline +* Updated the warp-tools docker; this update changes the way gene_names are identified when creating gene expression h5ad files # 5.9.1 2024-11-12 (Date of Last Commit) diff --git a/pipelines/skylab/optimus/Optimus.changelog.md b/pipelines/skylab/optimus/Optimus.changelog.md index d05e53bee5..9e7e385c8a 100644 --- a/pipelines/skylab/optimus/Optimus.changelog.md +++ b/pipelines/skylab/optimus/Optimus.changelog.md @@ -1,7 +1,8 @@ # 7.8.3 -2024-11-15 (Date of Last Commit) +2024-11-22 (Date of Last Commit) * Added bam validation in the StarSoloFastq task; this does not affect the outputs of the pipeline +* Updated the warp-tools docker; this update changes the way gene_names are identified when creating gene expression h5ad files # 7.8.2 2024-11-12 (Date of Last Commit) diff --git a/pipelines/skylab/optimus/Optimus.wdl b/pipelines/skylab/optimus/Optimus.wdl index 41a71d3cb6..fc027f0ee0 100644 --- a/pipelines/skylab/optimus/Optimus.wdl +++ b/pipelines/skylab/optimus/Optimus.wdl @@ -91,7 +91,7 @@ workflow Optimus { String pytools_docker = "pytools:1.0.0-1661263730" String empty_drops_docker = "empty-drops:1.0.1-4.2" String star_docker = "star:1.0.1-2.7.11a-1692706072" - String warp_tools_docker_2_2_0 = "warp-tools:2.4.0" + String warp_tools_docker_2_2_0 = "warp-tools:2.5.0" String star_merge_docker = "star-merge-npz:1.3.0" String samtools_star = "samtools-star:1.0.0-1.11-2.7.11a-1731516196" diff --git a/pipelines/skylab/paired_tag/PairedTag.changelog.md b/pipelines/skylab/paired_tag/PairedTag.changelog.md index e411e0f679..3ada0c9188 100644 --- a/pipelines/skylab/paired_tag/PairedTag.changelog.md +++ b/pipelines/skylab/paired_tag/PairedTag.changelog.md @@ -1,7 +1,8 @@ # 1.8.3 -2024-11-15 (Date of Last Commit) +2024-11-22 (Date of Last Commit) * Added bam validation in the StarSoloFastq task; this does not affect the outputs of the pipeline +* Updated the warp-tools docker; this update changes the way gene_names are identified when creating gene expression h5ad files # 1.8.2 2024-11-12 (Date of Last Commit) diff --git a/pipelines/skylab/slideseq/SlideSeq.changelog.md b/pipelines/skylab/slideseq/SlideSeq.changelog.md index 54c9fb6890..a40017660d 100644 --- a/pipelines/skylab/slideseq/SlideSeq.changelog.md +++ b/pipelines/skylab/slideseq/SlideSeq.changelog.md @@ -2,6 +2,7 @@ 2024-11-15 (Date of Last Commit) * Added bam validation in the StarSoloFastq task; this does not affect the outputs of the pipeline +* Updated the warp-tools docker; this update changes the way gene_names are identified when creating gene expression h5ad files # 3.4.5 2024-11-12 (Date of Last Commit) diff --git a/pipelines/skylab/slideseq/SlideSeq.wdl b/pipelines/skylab/slideseq/SlideSeq.wdl index 7c69d79232..e258436f8a 100644 --- a/pipelines/skylab/slideseq/SlideSeq.wdl +++ b/pipelines/skylab/slideseq/SlideSeq.wdl @@ -48,7 +48,7 @@ workflow SlideSeq { # docker images String pytools_docker = "pytools:1.0.0-1661263730" String picard_cloud_docker = "picard-cloud:2.26.10" - String warp_tools_docker_2_2_0 = "warp-tools:2.4.0" + String warp_tools_docker_2_2_0 = "warp-tools:2.5.0" String star_merge_docker = "star-merge-npz:1.3.0" String ubuntu_docker = "ubuntu_16_0_4@sha256:025124e2f1cf4d29149958f17270596bffe13fc6acca6252977c572dd5ba01bf" diff --git a/tasks/skylab/FastqProcessing.wdl b/tasks/skylab/FastqProcessing.wdl index ea7363b738..5263f53ef2 100644 --- a/tasks/skylab/FastqProcessing.wdl +++ b/tasks/skylab/FastqProcessing.wdl @@ -138,7 +138,7 @@ task FastqProcessingSlidSeq { # Runtime attributes - String docker = "us.gcr.io/broad-gotc-prod/warp-tools:2.3.0" + String docker = "us.gcr.io/broad-gotc-prod/warp-tools:2.5.0" Int cpu = 16 Int machine_mb = 40000 Int disk = ceil(size(r1_fastq, "GiB")*3 + size(r2_fastq, "GiB")*3) + 50 From 9add5e9d1458cd49007aa80805109136e2b00d2d Mon Sep 17 00:00:00 2001 From: Nikelle Petrillo <38223776+nikellepetrillo@users.noreply.github.com> Date: Wed, 4 Dec 2024 20:20:08 -0500 Subject: [PATCH 06/20] Np add metadata.txt file to build indices (#1435) * add pipeline metadata txt to BuildIndices.wdl * more disk space * more disk space * more disk space * more disk space * more disk space * more disk space * more disk space * more disk space * more disk space * more disk space * more disk space * more disk space * need all as files * need all as files * need all as files * need all as files * grab the full paths * changelogs * Updated pipeline_versions.txt with all pipeline version information --------- Co-authored-by: GitHub Action --- pipeline_versions.txt | 2 +- .../build_indices/BuildIndices.changelog.md | 5 ++ .../skylab/build_indices/BuildIndices.wdl | 73 ++++++++++++++++++- 3 files changed, 78 insertions(+), 2 deletions(-) diff --git a/pipeline_versions.txt b/pipeline_versions.txt index d06921a88f..c05653874a 100644 --- a/pipeline_versions.txt +++ b/pipeline_versions.txt @@ -27,7 +27,7 @@ ExternalExomeReprocessing 3.3.3 2024-11-04 CramToUnmappedBams 1.1.3 2024-08-02 WholeGenomeReprocessing 3.3.3 2024-11-04 ExomeReprocessing 3.3.3 2024-11-04 -BuildIndices 3.0.0 2023-12-06 +BuildIndices 3.1.0 2024-11-26 scATAC 1.3.2 2023-08-03 snm3C 4.0.4 2024-08-06 Multiome 5.9.2 2024-11-22 diff --git a/pipelines/skylab/build_indices/BuildIndices.changelog.md b/pipelines/skylab/build_indices/BuildIndices.changelog.md index 3d61089107..b66e89081b 100644 --- a/pipelines/skylab/build_indices/BuildIndices.changelog.md +++ b/pipelines/skylab/build_indices/BuildIndices.changelog.md @@ -1,3 +1,8 @@ +# 3.1.0 +2024-11-26 (Date of Last Commit) + +* Added metadata.txt file as an output to the pipeline + # 3.0.0 2023-12-06 (Date of Last Commit) diff --git a/pipelines/skylab/build_indices/BuildIndices.wdl b/pipelines/skylab/build_indices/BuildIndices.wdl index 2dbb11ff27..58265fb176 100644 --- a/pipelines/skylab/build_indices/BuildIndices.wdl +++ b/pipelines/skylab/build_indices/BuildIndices.wdl @@ -16,7 +16,7 @@ workflow BuildIndices { } # version of this pipeline - String pipeline_version = "3.0.0" + String pipeline_version = "3.1.0" parameter_meta { @@ -49,12 +49,25 @@ workflow BuildIndices { organism = organism } + call RecordMetadata { + input: + pipeline_version = pipeline_version, + input_files = [annotations_gtf, genome_fa, biotypes], + output_files = [ + BuildStarSingleNucleus.star_index, + BuildStarSingleNucleus.modified_annotation_gtf, + CalculateChromosomeSizes.chrom_sizes, + BuildBWAreference.reference_bundle + ] + } + output { File snSS2_star_index = BuildStarSingleNucleus.star_index String pipeline_version_out = "BuildIndices_v~{pipeline_version}" File snSS2_annotation_gtf_modified = BuildStarSingleNucleus.modified_annotation_gtf File reference_bundle = BuildBWAreference.reference_bundle File chromosome_sizes = CalculateChromosomeSizes.chrom_sizes + File metadata = RecordMetadata.metadata_file } } @@ -195,3 +208,61 @@ String reference_name = "bwa-mem2-2.2.1-~{organism}-~{genome_source}-build-~{gen } } + +task RecordMetadata { + input { + String pipeline_version + Array[File] input_files + Array[File] output_files + } + + command <<< + set -euo pipefail + + # create metadata file + echo "Pipeline Version: ~{pipeline_version}" > metadata.txt + echo "Date of Workflow Run: $(date -u +%Y-%m-%dT%H:%M:%SZ)" >> metadata.txt + echo "" >> metadata.txt + + # echo paths and md5sums for input files + echo "Input Files and their md5sums:" >> metadata.txt + for file in ~{sep=" " input_files}; do + echo "$file : $(md5sum $file | awk '{print $1}')" >> metadata.txt + done + echo "" >> metadata.txt + + # echo paths and md5sums for input files + echo "Output Files and their md5sums:" >> metadata.txt + for file in ~{sep=" " output_files}; do + echo "$file : $(md5sum $file | awk '{print $1}')" >> metadata.txt + done + echo "" >> metadata.txt + + # grab workspace bucket + file="~{output_files[0]}" + workspace_bucket=$(echo $file | awk -F'/' '{print $3}') + echo "Workspace Bucket: $workspace_bucket" >> metadata.txt + + # grab submission ID + submission_id=$(echo $file | awk -F'/' '{print $5}') + echo "Submission ID: $submission_id" >> metadata.txt + + # grab workflow ID + workflow_id=$(echo $file | awk -F'/' '{print $7}') + echo "Workflow ID: $workflow_id" >> metadata.txt + + echo "" >> metadata.txt + >>> + + output { + File metadata_file = "metadata.txt" + } + + runtime { + docker: "ubuntu:20.04" + memory: "5 GiB" + disks: "local-disk 100 HDD" + cpu: "1" + } +} + From c5637742e61dff12e837199cf64cb2f50d89d342 Mon Sep 17 00:00:00 2001 From: Nikelle Petrillo <38223776+nikellepetrillo@users.noreply.github.com> Date: Thu, 5 Dec 2024 07:26:09 -0500 Subject: [PATCH 07/20] Np ek fix uniform matrix starsolofastq (#1442) * debugging * remove ls "$SoloDirectory"/*. * clean up commented out code * changelogs * Updated pipeline_versions.txt with all pipeline version information * Updated pipeline_versions.txt with all pipeline version information * Update tasks/skylab/StarAlign.wdl --------- Co-authored-by: GitHub Action --- pipeline_versions.txt | 10 ++--- .../skylab/multiome/Multiome.changelog.md | 5 +++ pipelines/skylab/multiome/Multiome.wdl | 2 +- pipelines/skylab/optimus/Optimus.changelog.md | 5 +++ pipelines/skylab/optimus/Optimus.wdl | 2 +- .../skylab/paired_tag/PairedTag.changelog.md | 5 +++ pipelines/skylab/paired_tag/PairedTag.wdl | 2 +- .../skylab/slideseq/SlideSeq.changelog.md | 5 +++ pipelines/skylab/slideseq/SlideSeq.wdl | 2 +- ...iSampleSmartSeq2SingleNucleus.changelog.md | 5 +++ .../MultiSampleSmartSeq2SingleNucleus.wdl | 2 +- tasks/skylab/StarAlign.wdl | 44 ++++++++++--------- 12 files changed, 59 insertions(+), 30 deletions(-) diff --git a/pipeline_versions.txt b/pipeline_versions.txt index c05653874a..5ecffd5530 100644 --- a/pipeline_versions.txt +++ b/pipeline_versions.txt @@ -30,11 +30,11 @@ ExomeReprocessing 3.3.3 2024-11-04 BuildIndices 3.1.0 2024-11-26 scATAC 1.3.2 2023-08-03 snm3C 4.0.4 2024-08-06 -Multiome 5.9.2 2024-11-22 -PairedTag 1.8.3 2024-11-22 +Multiome 5.9.3 2024-12-3 +PairedTag 1.8.4 2024-12-3 MultiSampleSmartSeq2 2.2.22 2024-09-11 -MultiSampleSmartSeq2SingleNucleus 2.0.5 2024-11-15 -Optimus 7.8.3 2024-11-22 +MultiSampleSmartSeq2SingleNucleus 2.0.6 2024-11-15 +Optimus 7.8.4 2024-12-3 atac 2.5.3 2024-11-22 SmartSeq2SingleSample 5.1.21 2024-09-11 -SlideSeq 3.4.6 2024-11-15 +SlideSeq 3.4.7 2024-12-3 diff --git a/pipelines/skylab/multiome/Multiome.changelog.md b/pipelines/skylab/multiome/Multiome.changelog.md index b1577ed4be..e444ccd6fd 100644 --- a/pipelines/skylab/multiome/Multiome.changelog.md +++ b/pipelines/skylab/multiome/Multiome.changelog.md @@ -1,3 +1,8 @@ +# 5.9.3 +2024-12-3 (Date of Last Commit) + +* Fixed a bug in the StarSoloFastq task that caused the pipeline to not output a UniqueAndMult-Uniform.mtx when --soloMultiMappers Uniform was passed to STAR + # 5.9.2 2024-11-22 (Date of Last Commit) diff --git a/pipelines/skylab/multiome/Multiome.wdl b/pipelines/skylab/multiome/Multiome.wdl index 438c49d264..93b88997a2 100644 --- a/pipelines/skylab/multiome/Multiome.wdl +++ b/pipelines/skylab/multiome/Multiome.wdl @@ -9,7 +9,7 @@ import "../../../tasks/broad/Utilities.wdl" as utils workflow Multiome { - String pipeline_version = "5.9.2" + String pipeline_version = "5.9.3" input { String cloud_provider diff --git a/pipelines/skylab/optimus/Optimus.changelog.md b/pipelines/skylab/optimus/Optimus.changelog.md index 9e7e385c8a..5fa565ef4a 100644 --- a/pipelines/skylab/optimus/Optimus.changelog.md +++ b/pipelines/skylab/optimus/Optimus.changelog.md @@ -1,3 +1,8 @@ +# 7.8.4 +2024-12-3 (Date of Last Commit) + +* Fixed a bug in the StarSoloFastq task that caused the pipeline to not output a UniqueAndMult-Uniform.mtx when --soloMultiMappers Uniform was passed to STAR + # 7.8.3 2024-11-22 (Date of Last Commit) diff --git a/pipelines/skylab/optimus/Optimus.wdl b/pipelines/skylab/optimus/Optimus.wdl index fc027f0ee0..55be72f690 100644 --- a/pipelines/skylab/optimus/Optimus.wdl +++ b/pipelines/skylab/optimus/Optimus.wdl @@ -71,7 +71,7 @@ workflow Optimus { # version of this pipeline - String pipeline_version = "7.8.3" + String pipeline_version = "7.8.4" # this is used to scatter matched [r1_fastq, r2_fastq, i1_fastq] arrays diff --git a/pipelines/skylab/paired_tag/PairedTag.changelog.md b/pipelines/skylab/paired_tag/PairedTag.changelog.md index 3ada0c9188..9d37ccc547 100644 --- a/pipelines/skylab/paired_tag/PairedTag.changelog.md +++ b/pipelines/skylab/paired_tag/PairedTag.changelog.md @@ -1,3 +1,8 @@ +# 1.8.4 +2024-12-3 (Date of Last Commit) + +* Fixed a bug in the StarSoloFastq task that caused the pipeline to not output a UniqueAndMult-Uniform.mtx when --soloMultiMappers Uniform was passed to STAR + # 1.8.3 2024-11-22 (Date of Last Commit) diff --git a/pipelines/skylab/paired_tag/PairedTag.wdl b/pipelines/skylab/paired_tag/PairedTag.wdl index 233dafc685..a15b67a21f 100644 --- a/pipelines/skylab/paired_tag/PairedTag.wdl +++ b/pipelines/skylab/paired_tag/PairedTag.wdl @@ -8,7 +8,7 @@ import "../../../tasks/broad/Utilities.wdl" as utils workflow PairedTag { - String pipeline_version = "1.8.3" + String pipeline_version = "1.8.4" input { diff --git a/pipelines/skylab/slideseq/SlideSeq.changelog.md b/pipelines/skylab/slideseq/SlideSeq.changelog.md index a40017660d..1cb8bf8aa6 100644 --- a/pipelines/skylab/slideseq/SlideSeq.changelog.md +++ b/pipelines/skylab/slideseq/SlideSeq.changelog.md @@ -1,3 +1,8 @@ +# 3.4.7 +2024-12-3 (Date of Last Commit) + +* Fixed a bug in the StarSoloFastq task that caused the pipeline to not output a UniqueAndMult-Uniform.mtx when --soloMultiMappers Uniform was passed to STAR; this does not affect the outputs of the pipeline + # 3.4.6 2024-11-15 (Date of Last Commit) diff --git a/pipelines/skylab/slideseq/SlideSeq.wdl b/pipelines/skylab/slideseq/SlideSeq.wdl index e258436f8a..e43f07979c 100644 --- a/pipelines/skylab/slideseq/SlideSeq.wdl +++ b/pipelines/skylab/slideseq/SlideSeq.wdl @@ -25,7 +25,7 @@ import "../../../tasks/broad/Utilities.wdl" as utils workflow SlideSeq { - String pipeline_version = "3.4.6" + String pipeline_version = "3.4.7" input { Array[File] r1_fastq diff --git a/pipelines/skylab/smartseq2_single_nucleus_multisample/MultiSampleSmartSeq2SingleNucleus.changelog.md b/pipelines/skylab/smartseq2_single_nucleus_multisample/MultiSampleSmartSeq2SingleNucleus.changelog.md index 85dc657364..d8f943e35c 100644 --- a/pipelines/skylab/smartseq2_single_nucleus_multisample/MultiSampleSmartSeq2SingleNucleus.changelog.md +++ b/pipelines/skylab/smartseq2_single_nucleus_multisample/MultiSampleSmartSeq2SingleNucleus.changelog.md @@ -1,3 +1,8 @@ +# 2.0.6 +2024-11-15 (Date of Last Commit) + +* Fixed a bug in the StarSoloFastq task that caused the pipeline to not output a UniqueAndMult-Uniform.mtx when --soloMultiMappers Uniform was passed to STAR; this does not affect the outputs of the pipeline + # 2.0.5 2024-11-15 (Date of Last Commit) diff --git a/pipelines/skylab/smartseq2_single_nucleus_multisample/MultiSampleSmartSeq2SingleNucleus.wdl b/pipelines/skylab/smartseq2_single_nucleus_multisample/MultiSampleSmartSeq2SingleNucleus.wdl index 871090e2be..5616b2546c 100644 --- a/pipelines/skylab/smartseq2_single_nucleus_multisample/MultiSampleSmartSeq2SingleNucleus.wdl +++ b/pipelines/skylab/smartseq2_single_nucleus_multisample/MultiSampleSmartSeq2SingleNucleus.wdl @@ -57,7 +57,7 @@ workflow MultiSampleSmartSeq2SingleNucleus { } # Version of this pipeline - String pipeline_version = "2.0.5" + String pipeline_version = "2.0.6" if (false) { String? none = "None" diff --git a/tasks/skylab/StarAlign.wdl b/tasks/skylab/StarAlign.wdl index 1d0e5936ff..122812d2ff 100644 --- a/tasks/skylab/StarAlign.wdl +++ b/tasks/skylab/StarAlign.wdl @@ -349,11 +349,12 @@ task STARsoloFastq { then SoloDirectory="Solo.out/Gene/raw" echo "SoloDirectory is $SoloDirectory" - #find "$SoloDirectory" -maxdepth 1 -type f -name "*.mtx" -print0 | xargs -0 -I{} echo mv {} /cromwell_root/ - #find "$SoloDirectory" -maxdepth 1 -type f -name "*.mtx" -print0 | xargs -0 -I{} mv {} /cromwell_root/ - echo "list matrix files in $SoloDirectory" - ls "$SoloDirectory"/*.mtx - mv $SoloDirectory/matrix.mtx matrix.mtx + find "$SoloDirectory" -maxdepth 1 -type f -name "*.mtx" -print0 | xargs -0 -I{} echo mv {} /cromwell_root/ + find "$SoloDirectory" -maxdepth 1 -type f -name "*.mtx" -print0 | xargs -0 -I{} mv {} /cromwell_root/ + + echo "Listing the files in the current directory:" + ls -l + mv "Solo.out/Gene/raw/barcodes.tsv" barcodes.tsv mv "Solo.out/Gene/raw/features.tsv" features.tsv mv "Solo.out/Gene/CellReads.stats" CellReads.stats @@ -366,11 +367,12 @@ task STARsoloFastq { then SoloDirectory="Solo.out/GeneFull_Ex50pAS/raw" echo "SoloDirectory is $SoloDirectory" - #find "$SoloDirectory" -maxdepth 1 -type f -name "*.mtx" -print0 | xargs -0 -I{} echo mv {} /cromwell_root/ - #find "$SoloDirectory" -maxdepth 1 -type f -name "*.mtx" -print0 | xargs -0 -I{} mv {} /cromwell_root/ - echo "list matrix files in $SoloDirectory" - ls "$SoloDirectory"/*.mtx - mv $SoloDirectory/matrix.mtx matrix.mtx + find "$SoloDirectory" -maxdepth 1 -type f -name "*.mtx" -print0 | xargs -0 -I{} echo mv {} /cromwell_root/ + find "$SoloDirectory" -maxdepth 1 -type f -name "*.mtx" -print0 | xargs -0 -I{} mv {} /cromwell_root/ + + echo "Listing the files in the current directory" + ls -l + mv "Solo.out/GeneFull_Ex50pAS/raw/barcodes.tsv" barcodes.tsv mv "Solo.out/GeneFull_Ex50pAS/raw/features.tsv" features.tsv mv "Solo.out/GeneFull_Ex50pAS/CellReads.stats" CellReads.stats @@ -380,18 +382,20 @@ task STARsoloFastq { else SoloDirectory="Solo.out/GeneFull_Ex50pAS/raw" echo "SoloDirectory is $SoloDirectory" - #find "$SoloDirectory" -maxdepth 1 -type f -name "*.mtx" -print0 | xargs -0 -I{} echo mv {} /cromwell_root/ - #find "$SoloDirectory" -maxdepth 1 -type f -name "*.mtx" -print0 | xargs -0 -I{} mv {} /cromwell_root/ - echo "list matrix files in $SoloDirectory" - ls "$SoloDirectory"/*.mtx - mv $SoloDirectory/matrix.mtx matrix.mtx + find "$SoloDirectory" -maxdepth 1 -type f -name "*.mtx" -print0 | xargs -0 -I{} echo mv {} /cromwell_root/ + find "$SoloDirectory" -maxdepth 1 -type f -name "*.mtx" -print0 | xargs -0 -I{} mv {} /cromwell_root/ + + echo "Listing the files in the current directory" + ls -l + SoloDirectory="Solo.out/Gene/raw" echo "SoloDirectory is $SoloDirectory" - #find "$SoloDirectory" -maxdepth 1 -type f -name "*.mtx" -print0 | xargs -0 -I{} sh -c 'new_name="$(basename {} .mtx)_sn_rna.mtx"; echo mv {} "/cromwell_root/$new_name"' - #find "$SoloDirectory" -maxdepth 1 -type f -name "*.mtx" -print0 | xargs -0 -I{} sh -c 'new_name="$(basename {} .mtx)_sn_rna.mtx"; mv {} "/cromwell_root/$new_name"' - echo "list matrix files in $SoloDirectory" - ls "$SoloDirectory"/*.mtx - mv $SoloDirectory/matrix.mtx matrix_sn_rna.mtx + find "$SoloDirectory" -maxdepth 1 -type f -name "*.mtx" -print0 | xargs -0 -I{} sh -c 'new_name="$(basename {} .mtx)_sn_rna.mtx"; echo mv {} "/cromwell_root/$new_name"' + find "$SoloDirectory" -maxdepth 1 -type f -name "*.mtx" -print0 | xargs -0 -I{} sh -c 'new_name="$(basename {} .mtx)_sn_rna.mtx"; mv {} "/cromwell_root/$new_name"' + + echo "Listing the files in the current directory" + ls -l + mv "Solo.out/GeneFull_Ex50pAS/raw/barcodes.tsv" barcodes.tsv mv "Solo.out/GeneFull_Ex50pAS/raw/features.tsv" features.tsv mv "Solo.out/GeneFull_Ex50pAS/CellReads.stats" CellReads.stats From 145b6bf820ea4056e8cacb43150a31e4d089c75c Mon Sep 17 00:00:00 2001 From: Robert Sidney Cox Date: Thu, 5 Dec 2024 10:09:24 -0500 Subject: [PATCH 08/20] Pd 2804 rc (#1441) * update optimus methods * add atac methods * add snm3c methods * update snm3c methods * atac snm3c and docsite * rewrite snm3c * Update website/docs/Pipelines/ATAC/atac.methods.md Co-authored-by: Elizabeth Kiernan <55763654+ekiernan@users.noreply.github.com> * Update website/docs/Pipelines/Optimus_Pipeline/optimus.methods.md Co-authored-by: Elizabeth Kiernan <55763654+ekiernan@users.noreply.github.com> * Update website/docs/Pipelines/Optimus_Pipeline/optimus.methods.md Co-authored-by: Elizabeth Kiernan <55763654+ekiernan@users.noreply.github.com> * Update website/docs/Pipelines/Optimus_Pipeline/optimus.methods.md Co-authored-by: Elizabeth Kiernan <55763654+ekiernan@users.noreply.github.com> * Update website/docs/Pipelines/Optimus_Pipeline/optimus.methods.md Co-authored-by: Elizabeth Kiernan <55763654+ekiernan@users.noreply.github.com> --------- Co-authored-by: Elizabeth Kiernan <55763654+ekiernan@users.noreply.github.com> --- website/docs/Pipelines/ATAC/atac.methods.md | 7 +++++++ .../Optimus_Pipeline/optimus.methods.md | 19 +++++++---------- website/docs/Pipelines/snM3C/snm3c.methods.md | 7 +++++++ .../docsite_maintenance.md | 21 +++++++++---------- 4 files changed, 32 insertions(+), 22 deletions(-) create mode 100644 website/docs/Pipelines/ATAC/atac.methods.md create mode 100644 website/docs/Pipelines/snM3C/snm3c.methods.md diff --git a/website/docs/Pipelines/ATAC/atac.methods.md b/website/docs/Pipelines/ATAC/atac.methods.md new file mode 100644 index 0000000000..30ac72fe30 --- /dev/null +++ b/website/docs/Pipelines/ATAC/atac.methods.md @@ -0,0 +1,7 @@ +# ATAC v2.3.1 Methods + +# Methods + +Data preprocessing and analysis for 10x chromatin accessibility was performed using the ATAC workflow v2.3.1 (RRID:SCR_025042). Briefly, FASTQ files were processed with a custom tool fastqprocess which corrects cell barcodes against a reference whitelist and splits reads by barcode to enable processing parallelization. Adaptor sequences were then removed from reads using Cutadapt v4.4. Reads were then aligned to the reference genome using BWA-MEM2 v2.2.1 with default parameters, which outputs corrected barcodes to a BAM in the CB:Z tag. The resulting BAM was then processed with SnapATAC2 v2.7.0 to produce a fragment file, index, and h5ad containing fragments as well as per-barcode quality metrics. + +An overview of the pipeline is available in [WARP Documentation](https://broadinstitute.github.io/warp/docs/Pipelines/ATAC/README) and examples of genomic references, whitelists, and other inputs are available in the [WARP repository](https://github.com/broadinstitute/warp/tree/master/pipelines/skylab/multiome/test_inputs). \ No newline at end of file diff --git a/website/docs/Pipelines/Optimus_Pipeline/optimus.methods.md b/website/docs/Pipelines/Optimus_Pipeline/optimus.methods.md index c2962131b6..f99af1f52a 100644 --- a/website/docs/Pipelines/Optimus_Pipeline/optimus.methods.md +++ b/website/docs/Pipelines/Optimus_Pipeline/optimus.methods.md @@ -2,31 +2,28 @@ sidebar_position: 2 --- -# Optimus v5.3.0 Methods +# Optimus v7.8.1 Methods Below we provide an example methods section for a publication, separated into single-cell or single-nucleus use cases. For the complete pipeline documentation, see the [Optimus Overview](./README.md). # Methods ## Single-cell (sc_rna mode) -Data preprocessing and count matrix construction were performed using the Optimus v5.3.0 pipeline (RRID:SCR_018908). Briefly, FASTQ files were partitioned by barcodes using sctools v0.3.13. The files were then trimmed, aligned, UMI-corrected against the 10x Genomics barcodes whitelist, and converted to a raw count matrix using STAR v2.7.9a. CB correction was performed using the `--soloCBmatchWLtype 1MM_multi_Nbase_pseudocounts` parameter which allowed for multiple matches in the whitelist with 1 mismatched base, used posterior probability calculation to choose one of the matches, added pseudocounts of 1 to all whitelist barcodes, and allowed multi-matching of CBs with N-bases to the whitelist. +Data preprocessing and count matrix construction were performed using the Optimus v7.8.1 pipeline (RRID:SCR_018908). Briefly, FASTQ files were partitioned by barcodes using fastqprocess. The files were then trimmed, aligned, UMI-corrected against the 10x Genomics barcodes whitelist, and converted to a raw count matrix using STARsolo v2.7.11a. CB correction was performed using the `--soloCBmatchWLtype 1MM_multi` parameter. -Reads were trimmed using the solo parameter `--clipAdapterType CellRanger4` and `--outFilterScoreMin 30` which matches read trimming performed by CellRanger4. Reads were then aligned to GENCODE mouse (M21) or human (V27) references in unstranded mode. Genes were annotated using the STAR "Gene" COUNTING_MODE and UMIs were corrected with the `--soloUMIdedup 1MM_Directional_UMItoolsdirectional` parameter, which uses a directional correction method. The resulting BAM was then used for cell and gene metric correction using the sctools v0.3.13 TagSortBam tool. The STAR TSV outputs for gene counts, features, and barcodes were converted to numpy arrays for downstream empty droplet detection using DropletUtils v1.2.1 emptyDrops with the parameters -```--fdr-cutoff 0.01 --emptydrops-niters 10000 --min-molecules 100 --emptydrops-lower 100```. +Reads were trimmed using the solo parameter `--clipAdapterType CellRanger4` and `--outFilterScoreMin 30` which matches read trimming performed by CellRanger4. Reads were then aligned to GENCODE mouse (M32) or human (V43) references in stranded mode. Genes were annotated using the STARsolo "Gene" COUNTING_MODE and UMIs were corrected with the `--soloUMIdedup 1MM_CR` parameter, which uses Cell Ranger's correction method. The resulting BAM was then used for cell and gene metric correction using the warp-tools TagSort tool. The STAR TSV outputs for gene counts, features, and barcodes were converted to numpy arrays for downstream empty droplet detection using DropletUtils v1.2.1 emptyDrops with the parameters `--fdr-cutoff 0.01 --emptydrops-niters 10000 --min-molecules 100 --emptydrops-lower 100.` -All cell and gene metrics (alignment, mitochondrial, and other QC metrics), count matrices, and emptyDrops results were aggregated into a final Loom-formatted cell-by-gene matrix. The final outputs included the unfiltered Loom and unfiltered (but tagged) BAM file. +All cell and gene metrics (alignment, mitochondrial, and other QC metrics), count matrices, and emptyDrops results were aggregated into a final h5ad-formatted cell-by-gene matrix. The final outputs included the unfiltered h5ad and unfiltered (but tagged) BAM file. -An example of the pipeline and outputs is available on the Terra HCA Optimus Pipeline Featured Workspace (https://app.terra.bio/#workspaces/featured-workspaces-hca/HCA_Optimus_Pipeline), and an additional pipeline overview is available in [WARP documentation](https://broadinstitute.github.io/warp/docs/Pipelines/Optimus_Pipeline/README). Examples of genomic references, whitelists, and other inputs are available in the WARP repository (see the [example inputs](https://github.com/broadinstitute/warp/tree/master/pipelines/skylab/optimus/example_inputs)). +An example of the pipeline and outputs is available on the [Terra HCA Optimus Pipeline Featured Workspace](https://app.terra.bio/#workspaces/featured-workspaces-hca/HCA_Optimus_Pipeline), and an additional pipeline overview is available in [WARP documentation.](https://broadinstitute.github.io/warp/docs/Pipelines/Optimus_Pipeline/README) Examples of genomic references, whitelists, and other inputs are available in the WARP repository [(see the example inputs).](https://github.com/broadinstitute/warp/tree/master/pipelines/skylab/optimus/example_inputs). ## Single-nucleus (sn_rna mode) -Data preprocessing and count matrix construction were performed using the Optimus v5.3.0 pipeline (RRID:SCR_018908). Briefly, FASTQ files were partitioned by barcodes using sctools v0.3.13. The files were then trimmed, aligned, UMI-corrected against the 10x Genomics barcodes whitelist, and converted to a raw count matrix using STAR v2.7.9a. CB correction was performed using the `--soloCBmatchWLtype 1MM_multi_Nbase_pseudocounts` parameter which allowed for multiple matches in the whitelist with 1 mismatched base, used posterior probability calculation to choose one of the matches, added pseudocounts of 1 to all whitelist barcodes, and allowed multi-matching of CBs with N-bases to the whitelist. +Data preprocessing and count matrix construction were performed using the Optimus v7.8.1 pipeline (RRID:SCR_018908). Briefly, FASTQ files were partitioned by barcodes using fastqprocess. The files were then trimmed, aligned, UMI-corrected against the 10x Genomics barcodes whitelist, and converted to a raw count matrix using STARsolo v2.7.11a. CB correction was performed using the `--soloCBmatchWLtype 1MM_multi` parameter. -Reads were trimmed using the solo parameter `--clipAdapterType CellRanger4` and `--outFilterScoreMin 30` which matches read trimming performed by CellRanger4. Reads were then aligned to GENCODE mouse (M21) or human (V27) references in unstranded mode. Genes were annotated using the STAR "GeneFull" COUNTING_MODE and UMIs were corrected with the `--soloUMIdedup 1MM_Directional_UMItoolsdirectional` parameter, which uses a directional correction method. The resulting BAM was then used for cell and gene metric correction using the sctools v0.3.13 TagSortBam tool. The STAR TSV outputs for gene counts, features, and barcodes were converted to numpy arrays for downstream Loom conversion. +Reads were trimmed using the solo parameter `--clipAdapterType CellRanger4` and `--outFilterScoreMin 30` which matches read trimming performed by CellRanger4. Reads were then aligned to GENCODE mouse (M32) or human (V43) references in stranded mode. Genes were annotated using the STAR "GeneFull_Ex50pAS" COUNTING_MODE and UMIs were corrected with the `--soloUMIdedup 1MM_CR`, which uses a Cell Ranger's correction method. The resulting BAM was then used for cell and gene metric correction using the warp-tools TagSort tool. The STAR TSV outputs for gene counts, features, and barcodes were converted to numpy arrays for downstream h5ad conversion. All cell and gene metrics (alignment, mitochondrial, and other QC metrics) and count matrices were aggregated into a final h5ad-formatted cell-by-gene matrix. The final outputs included the unfiltered h5ad and unfiltered (but tagged) BAM file. -All cell and gene metrics (alignment, mitochondrial, and other QC metrics) and count matrices were aggregated into a final Loom-formatted cell-by-gene matrix. The final outputs included the unfiltered Loom and unfiltered (but tagged) BAM file. - -An example of the pipeline and outputs is available on the [Terra HCA Optimus Pipeline Featured Workspace](https://app.terra.bio/#workspaces/featured-workspaces-hca/HCA_Optimus_Pipeline), and an additional pipeline overview is available in [WARP documentation](https://broadinstitute.github.io/warp/docs/Pipelines/Optimus_Pipeline/README). Examples of genomic references, whitelists, and other inputs are available in the WARP repository (see the [example inputs](https://github.com/broadinstitute/warp/tree/master/pipelines/skylab/optimus/example_inputs). +An example of the pipeline and outputs is available on the [Terra HCA Optimus Pipeline Featured Workspace](https://app.terra.bio/#workspaces/featured-workspaces-hca/HCA_Optimus_Pipeline), and an additional pipeline overview is available in [WARP documentation](https://broadinstitute.github.io/warp/docs/Pipelines/Optimus_Pipeline/README). Examples of genomic references, whitelists, and other inputs are available in the WARP repository (see the [example inputs.](https://github.com/broadinstitute/warp/tree/master/pipelines/skylab/optimus/example_inputs)). diff --git a/website/docs/Pipelines/snM3C/snm3c.methods.md b/website/docs/Pipelines/snM3C/snm3c.methods.md new file mode 100644 index 0000000000..41d4d00272 --- /dev/null +++ b/website/docs/Pipelines/snM3C/snm3c.methods.md @@ -0,0 +1,7 @@ +# snM3C v4.0.1 Methods + +# Methods + +Methylome and chromatin contact sequencing data was preprocessed for downstream analysis using the snm3C v4.0.1 pipeline (RRID:SCR_025041). Briefly, [Cutadapt software](https://cutadapt.readthedocs.io/en/stable/) was used to demultiplex paired-end sequencing reads from a single 384-well plate to cell-level FASTQ files based on a list of random primer indices, and then further used to sort, filter, and trim reads. Paired-end reads were then aligned to the human hg38 v43 reference genome using HISAT-3N. Custom python scripts from the [CEMBA GitHub repository](https://github.com/DingWB/cemba_data) were then called to separate unmapped reads, unique reads, and multi-mapped reads. The unmapped reads were saved to a FASTQ file and used for single-end alignment with HISAT-3N. Overlapping reads were removed and all resulting aligned reads merged into a single BAM. All mapped reads were deduplicated using samtools and Picard. The resulting BAM was used as input to a custom CEMBA python script for chromatin contact calling based on a 2,500 base pair threshold and as input to the [ALLCools software](https://lhqing.github.io/ALLCools/intro.html) for methylation site calling. Key summary statistics for read trimming, mapping, deduplication and chromatin contacts were then calculated and exported to a summary metrics file. + +Further details regarding tools, parameters, and references used in the pipeline are available in the [YAP documentation](https://hq-1.gitbook.io/mc). diff --git a/website/docs/contribution/contribute_to_warp_docs/docsite_maintenance.md b/website/docs/contribution/contribute_to_warp_docs/docsite_maintenance.md index 8a3d3d7204..3975684efd 100644 --- a/website/docs/contribution/contribute_to_warp_docs/docsite_maintenance.md +++ b/website/docs/contribution/contribute_to_warp_docs/docsite_maintenance.md @@ -6,6 +6,8 @@ sidebar_position: 1 This documentation site is built on top of the `React`-based framework [Docusaurus](https://docusaurus.io), so most of the recommended configuration can be found in the frameworks documentation. +## Versions and dependabot + Due to the nature of the frontend framework, most of the plugins this docsite is using can be deprecated or updated over the time. Github has the nice `dependabot` feature which periodically scans this code base and finds the potential vulnerbilities. There are usually 2 types of updates: - Major version nump @@ -15,17 +17,14 @@ The former one requires more human engagement and you should follow the official The latter one in most cases will be identified by `dependabot` and only requires a human review + approval, before commentting `@dependabot merge` on the PR. In rare cases, the PR opened by `dependabot` will have merge conflicts against the `develop` branch. Developers will need to `cherry-pick` the commit made by `dependabot`, resolve the conflicts and open a separate PR for it. -When you want to test your changes to the docs, you can deploy the site locally. -If doing this for the first time, install the dev version of the site using the root of the repo: -``` -yarn --cwd=website install -```` -Then deploy the dev version using: - -``` -yarn --cwd=website start -``` +## Deploying local site for testing and editing -to spin up a local server to preview your changes. +When you want to test your changes to the docs, you can deploy the site locally. +* first you need yarn, which is installed with npm: [mac install](https://classic.yarnpkg.com/lang/en/docs/install/#mac-stable) +* then you need to build the local website in the root of the docs directory `warp/website` on your desired branch + * `yarn --cwd=website install` # install the site + * `yarn --cwd=website start` # spin up a local server to preview your changes + * the local website should popup automatically in your default browser + * the website should update automatically as you edit the markdown files For more details on deployment, check this [page](https://docusaurus.io/docs/deployment). From 2fffa8750e4c42832aaafe65df63d06a44c31f96 Mon Sep 17 00:00:00 2001 From: Nikelle Petrillo <38223776+nikellepetrillo@users.noreply.github.com> Date: Tue, 10 Dec 2024 21:14:12 -0500 Subject: [PATCH 09/20] add cellbender to optimus (#1446) * add cellbender to optimus * changelogs * Updated pipeline_versions.txt with all pipeline version information * changelogs * try cellbender is true * try cellbender is true * extra slash --------- Co-authored-by: GitHub Action --- pipeline_versions.txt | 6 +- .../skylab/multiome/Multiome.changelog.md | 5 ++ pipelines/skylab/multiome/Multiome.wdl | 57 ++++--------------- pipelines/skylab/optimus/Optimus.changelog.md | 5 ++ pipelines/skylab/optimus/Optimus.wdl | 53 ++++++++++++++++- .../skylab/paired_tag/PairedTag.changelog.md | 5 ++ pipelines/skylab/paired_tag/PairedTag.wdl | 16 +++++- .../Plumbing/10k_pbmc_downsampled.json | 3 +- .../Plumbing/BC011_BC015_downsampled.json | 3 +- .../Plumbing/BI015_downsampled.json | 3 +- .../test_inputs/Scientific/10k_pbmc.json | 3 +- .../test_inputs/Scientific/BC011_10kPBMC.json | 3 +- 12 files changed, 103 insertions(+), 59 deletions(-) diff --git a/pipeline_versions.txt b/pipeline_versions.txt index 5ecffd5530..19b6185405 100644 --- a/pipeline_versions.txt +++ b/pipeline_versions.txt @@ -30,11 +30,11 @@ ExomeReprocessing 3.3.3 2024-11-04 BuildIndices 3.1.0 2024-11-26 scATAC 1.3.2 2023-08-03 snm3C 4.0.4 2024-08-06 -Multiome 5.9.3 2024-12-3 -PairedTag 1.8.4 2024-12-3 +Multiome 5.9.4 2024-12-05 +PairedTag 1.9.0 2024-12-05 MultiSampleSmartSeq2 2.2.22 2024-09-11 MultiSampleSmartSeq2SingleNucleus 2.0.6 2024-11-15 -Optimus 7.8.4 2024-12-3 +Optimus 7.9.0 2024-12-05 atac 2.5.3 2024-11-22 SmartSeq2SingleSample 5.1.21 2024-09-11 SlideSeq 3.4.7 2024-12-3 diff --git a/pipelines/skylab/multiome/Multiome.changelog.md b/pipelines/skylab/multiome/Multiome.changelog.md index e444ccd6fd..7edb86afb6 100644 --- a/pipelines/skylab/multiome/Multiome.changelog.md +++ b/pipelines/skylab/multiome/Multiome.changelog.md @@ -1,3 +1,8 @@ +# 5.9.4 +2024-12-05 (Date of Last Commit) + +* Moved the optional CellBender task to the Optimus.wdl + # 5.9.3 2024-12-3 (Date of Last Commit) diff --git a/pipelines/skylab/multiome/Multiome.wdl b/pipelines/skylab/multiome/Multiome.wdl index 93b88997a2..ac615983cc 100644 --- a/pipelines/skylab/multiome/Multiome.wdl +++ b/pipelines/skylab/multiome/Multiome.wdl @@ -3,13 +3,11 @@ version 1.0 import "../../../pipelines/skylab/atac/atac.wdl" as atac import "../../../pipelines/skylab/optimus/Optimus.wdl" as optimus import "../../../tasks/skylab/H5adUtils.wdl" as H5adUtils -import "https://raw.githubusercontent.com/aawdeh/CellBender/aa-cbwithoutcuda/wdl/cellbender_remove_background_azure.wdl" as CellBender_no_cuda -import "https://raw.githubusercontent.com/broadinstitute/CellBender/v0.3.0/wdl/cellbender_remove_background.wdl" as CellBender import "../../../tasks/broad/Utilities.wdl" as utils workflow Multiome { - String pipeline_version = "5.9.3" + String pipeline_version = "5.9.4" input { String cloud_provider @@ -103,7 +101,8 @@ workflow Multiome { count_exons = count_exons, soloMultiMappers = soloMultiMappers, cloud_provider = cloud_provider, - gex_expected_cells = expected_cells + gex_expected_cells = expected_cells, + run_cellbender = run_cellbender } # Call the ATAC workflow @@ -134,39 +133,6 @@ workflow Multiome { atac_fragment = Atac.fragment_file } - # Call CellBender - if (run_cellbender) { - if (cloud_provider == "gcp") { - call CellBender.run_cellbender_remove_background_gpu as CellBender { - input: - sample_name = input_id, - input_file_unfiltered = Optimus.h5ad_output_file, - hardware_boot_disk_size_GB = 20, - hardware_cpu_count = 4, - hardware_disk_size_GB = 50, - hardware_gpu_type = "nvidia-tesla-t4", - hardware_memory_GB = 32, - hardware_preemptible_tries = 2, - hardware_zones = "us-central1-a us-central1-c", - nvidia_driver_version = "470.82.01" - } - } - if (cloud_provider == "azure") { - call CellBender_no_cuda.run_cellbender_remove_background_gpu as CellBender_no_cuda { - input: - sample_name = input_id, - input_file_unfiltered = Optimus.h5ad_output_file, - hardware_boot_disk_size_GB = 20, - hardware_cpu_count = 4, - hardware_disk_size_GB = 50, - hardware_gpu_type = "nvidia-tesla-t4", - hardware_memory_GB = 32, - hardware_preemptible_tries = 2, - hardware_zones = "us-central1-a us-central1-c", - nvidia_driver_version = "470.82.01" - } - } - } meta { allowNestedInputs: true @@ -201,15 +167,14 @@ workflow Multiome { File? gex_aligner_metrics = Optimus.aligner_metrics File? library_metrics = Optimus.library_metrics File? mtx_files = Optimus.mtx_files + File? cell_barcodes_csv = Optimus.cell_barcodes_csv + File? checkpoint_file = Optimus.checkpoint_file + Array[File]? h5_array = Optimus.h5_array + Array[File]? html_report_array = Optimus.html_report_array + File? log = Optimus.log + Array[File]? metrics_csv_array = Optimus.metrics_csv_array + String? output_directory = Optimus.output_directory + File? summary_pdf = Optimus.summary_pdf - # cellbender outputs - File? cell_barcodes_csv = CellBender.cell_csv - File? checkpoint_file = CellBender.ckpt_file - Array[File]? h5_array = CellBender.h5_array - Array[File]? html_report_array = CellBender.report_array - File? log = CellBender.log - Array[File]? metrics_csv_array = CellBender.metrics_array - String? output_directory = CellBender.output_dir - File? summary_pdf = CellBender.pdf } } diff --git a/pipelines/skylab/optimus/Optimus.changelog.md b/pipelines/skylab/optimus/Optimus.changelog.md index 5fa565ef4a..adc98a1ee7 100644 --- a/pipelines/skylab/optimus/Optimus.changelog.md +++ b/pipelines/skylab/optimus/Optimus.changelog.md @@ -1,3 +1,8 @@ +# 7.9.0 +2024-12-05 (Date of Last Commit) + +* Added an optional task to the Optimus.wdl that will run CellBender on the Optimus output h5ad file + # 7.8.4 2024-12-3 (Date of Last Commit) diff --git a/pipelines/skylab/optimus/Optimus.wdl b/pipelines/skylab/optimus/Optimus.wdl index 55be72f690..37d28381e2 100644 --- a/pipelines/skylab/optimus/Optimus.wdl +++ b/pipelines/skylab/optimus/Optimus.wdl @@ -8,6 +8,8 @@ import "../../../tasks/skylab/CheckInputs.wdl" as OptimusInputChecks import "../../../tasks/skylab/MergeSortBam.wdl" as Merge import "../../../tasks/skylab/H5adUtils.wdl" as H5adUtils import "../../../tasks/broad/Utilities.wdl" as utils +import "https://raw.githubusercontent.com/aawdeh/CellBender/aa-cbwithoutcuda/wdl/cellbender_remove_background_azure.wdl" as CellBender_no_cuda +import "https://raw.githubusercontent.com/broadinstitute/CellBender/v0.3.0/wdl/cellbender_remove_background.wdl" as CellBender workflow Optimus { meta { @@ -38,6 +40,9 @@ workflow Optimus { String? soloMultiMappers = "Uniform" Int gex_expected_cells = 3000 + # CellBender + Boolean run_cellbender = false + # Chemistry options include: 2 or 3 Int tenx_chemistry_version # Whitelist is selected based on the input tenx_chemistry_version @@ -69,9 +74,7 @@ workflow Optimus { } # version of this pipeline - - - String pipeline_version = "7.8.4" + String pipeline_version = "7.9.0" # this is used to scatter matched [r1_fastq, r2_fastq, i1_fastq] arrays @@ -301,6 +304,40 @@ workflow Optimus { } } + # Call CellBender + if (run_cellbender) { + if (cloud_provider == "gcp") { + call CellBender.run_cellbender_remove_background_gpu as CellBender { + input: + sample_name = input_id, + input_file_unfiltered = final_h5ad_output, + hardware_boot_disk_size_GB = 20, + hardware_cpu_count = 4, + hardware_disk_size_GB = 50, + hardware_gpu_type = "nvidia-tesla-t4", + hardware_memory_GB = 32, + hardware_preemptible_tries = 2, + hardware_zones = "us-central1-a us-central1-c", + nvidia_driver_version = "470.82.01" + } + } + if (cloud_provider == "azure") { + call CellBender_no_cuda.run_cellbender_remove_background_gpu as CellBender_no_cuda { + input: + sample_name = input_id, + input_file_unfiltered = final_h5ad_output, + hardware_boot_disk_size_GB = 20, + hardware_cpu_count = 4, + hardware_disk_size_GB = 50, + hardware_gpu_type = "nvidia-tesla-t4", + hardware_memory_GB = 32, + hardware_preemptible_tries = 2, + hardware_zones = "us-central1-a us-central1-c", + nvidia_driver_version = "470.82.01" + } + } + } + File final_h5ad_output = select_first([OptimusH5adGenerationWithExons.h5ad_output, OptimusH5adGeneration.h5ad_output]) File final_library_metrics = select_first([OptimusH5adGenerationWithExons.library_metrics, OptimusH5adGeneration.library_metrics]) @@ -327,5 +364,15 @@ workflow Optimus { # h5ad File h5ad_output_file = final_h5ad_output + + # cellbender outputs + File? cell_barcodes_csv = CellBender.cell_csv + File? checkpoint_file = CellBender.ckpt_file + Array[File]? h5_array = CellBender.h5_array + Array[File]? html_report_array = CellBender.report_array + File? log = CellBender.log + Array[File]? metrics_csv_array = CellBender.metrics_array + String? output_directory = CellBender.output_dir + File? summary_pdf = CellBender.pdf } } diff --git a/pipelines/skylab/paired_tag/PairedTag.changelog.md b/pipelines/skylab/paired_tag/PairedTag.changelog.md index 9d37ccc547..dcc91d6fe6 100644 --- a/pipelines/skylab/paired_tag/PairedTag.changelog.md +++ b/pipelines/skylab/paired_tag/PairedTag.changelog.md @@ -1,3 +1,8 @@ +# 1.9.0 +2024-12-05 (Date of Last Commit) + +* Added an optional task to the Optimus.wdl that will run CellBender on the Optimus output h5ad file + # 1.8.4 2024-12-3 (Date of Last Commit) diff --git a/pipelines/skylab/paired_tag/PairedTag.wdl b/pipelines/skylab/paired_tag/PairedTag.wdl index a15b67a21f..29759736e9 100644 --- a/pipelines/skylab/paired_tag/PairedTag.wdl +++ b/pipelines/skylab/paired_tag/PairedTag.wdl @@ -8,7 +8,7 @@ import "../../../tasks/broad/Utilities.wdl" as utils workflow PairedTag { - String pipeline_version = "1.8.4" + String pipeline_version = "1.9.0" input { @@ -56,6 +56,9 @@ workflow PairedTag { # Expected to be either 'gcp' or 'azure' String cloud_provider + + # If true, run cellbender + Boolean run_cellbender = false } # All docker images that are needed for tasks in this workflow @@ -98,7 +101,8 @@ workflow PairedTag { count_exons = count_exons, cloud_provider = cloud_provider, soloMultiMappers = soloMultiMappers, - gex_nhash_id = gex_nhash_id + gex_nhash_id = gex_nhash_id, + run_cellbender = run_cellbender } # Call the ATAC workflow @@ -176,5 +180,13 @@ workflow PairedTag { Array[File?] multimappers_Uniform_matrix = Optimus.multimappers_Uniform_matrix Array[File?] multimappers_Rescue_matrix = Optimus.multimappers_Rescue_matrix Array[File?] multimappers_PropUnique_matrix = Optimus.multimappers_PropUnique_matrix + File? cell_barcodes_csv = Optimus.cell_barcodes_csv + File? checkpoint_file = Optimus.checkpoint_file + Array[File]? h5_array = Optimus.h5_array + Array[File]? html_report_array = Optimus.html_report_array + File? log = Optimus.log + Array[File]? metrics_csv_array = Optimus.metrics_csv_array + String? output_directory = Optimus.output_directory + File? summary_pdf = Optimus.summary_pdf } } diff --git a/pipelines/skylab/paired_tag/test_inputs/Plumbing/10k_pbmc_downsampled.json b/pipelines/skylab/paired_tag/test_inputs/Plumbing/10k_pbmc_downsampled.json index a1df3f587c..a9a58c7955 100644 --- a/pipelines/skylab/paired_tag/test_inputs/Plumbing/10k_pbmc_downsampled.json +++ b/pipelines/skylab/paired_tag/test_inputs/Plumbing/10k_pbmc_downsampled.json @@ -26,5 +26,6 @@ "PairedTag.soloMultiMappers":"Uniform", "PairedTag.cloud_provider": "gcp", "PairedTag.gex_nhash_id":"example_1234", - "PairedTag.atac_nhash_id":"example_1234" + "PairedTag.atac_nhash_id":"example_1234", + "PairedTag.run_cellbender":"false" } diff --git a/pipelines/skylab/paired_tag/test_inputs/Plumbing/BC011_BC015_downsampled.json b/pipelines/skylab/paired_tag/test_inputs/Plumbing/BC011_BC015_downsampled.json index fd2ffd1510..418063d6eb 100644 --- a/pipelines/skylab/paired_tag/test_inputs/Plumbing/BC011_BC015_downsampled.json +++ b/pipelines/skylab/paired_tag/test_inputs/Plumbing/BC011_BC015_downsampled.json @@ -26,5 +26,6 @@ "PairedTag.soloMultiMappers":"Uniform", "PairedTag.cloud_provider": "gcp", "PairedTag.gex_nhash_id":"example_1234", - "PairedTag.atac_nhash_id":"example_1234" + "PairedTag.atac_nhash_id":"example_1234", + "PairedTag.run_cellbender":"false" } diff --git a/pipelines/skylab/paired_tag/test_inputs/Plumbing/BI015_downsampled.json b/pipelines/skylab/paired_tag/test_inputs/Plumbing/BI015_downsampled.json index 1b185c8d47..f682f59a1c 100644 --- a/pipelines/skylab/paired_tag/test_inputs/Plumbing/BI015_downsampled.json +++ b/pipelines/skylab/paired_tag/test_inputs/Plumbing/BI015_downsampled.json @@ -26,5 +26,6 @@ "PairedTag.soloMultiMappers":"Uniform", "PairedTag.cloud_provider": "gcp", "PairedTag.gex_nhash_id":"example_1234", - "PairedTag.atac_nhash_id":"example_1234" + "PairedTag.atac_nhash_id":"example_1234", + "PairedTag.run_cellbender":"false" } diff --git a/pipelines/skylab/paired_tag/test_inputs/Scientific/10k_pbmc.json b/pipelines/skylab/paired_tag/test_inputs/Scientific/10k_pbmc.json index 47c8ab54bc..d4692bf352 100644 --- a/pipelines/skylab/paired_tag/test_inputs/Scientific/10k_pbmc.json +++ b/pipelines/skylab/paired_tag/test_inputs/Scientific/10k_pbmc.json @@ -35,5 +35,6 @@ "PairedTag.soloMultiMappers":"Uniform", "PairedTag.gex_nhash_id":"example_1234", "PairedTag.atac_nhash_id":"example_1234", - "PairedTag.cloud_provider": "gcp" + "PairedTag.cloud_provider": "gcp", + "PairedTag.run_cellbender":"false" } \ No newline at end of file diff --git a/pipelines/skylab/paired_tag/test_inputs/Scientific/BC011_10kPBMC.json b/pipelines/skylab/paired_tag/test_inputs/Scientific/BC011_10kPBMC.json index b4ffd4d14c..80aa4ee457 100644 --- a/pipelines/skylab/paired_tag/test_inputs/Scientific/BC011_10kPBMC.json +++ b/pipelines/skylab/paired_tag/test_inputs/Scientific/BC011_10kPBMC.json @@ -32,5 +32,6 @@ "PairedTag.soloMultiMappers":"Uniform", "PairedTag.gex_nhash_id":"example_1234", "PairedTag.atac_nhash_id":"example_1234", - "PairedTag.cloud_provider": "gcp" + "PairedTag.cloud_provider": "gcp", + "PairedTag.run_cellbender":"false" } \ No newline at end of file From d37b6da646eb40ed82e312b881094060fcd84083 Mon Sep 17 00:00:00 2001 From: npetrill Date: Fri, 3 Jan 2025 11:20:44 -0500 Subject: [PATCH 10/20] add http imports to TestMultiome.wdl and Multiome.wdl --- pipelines/skylab/multiome/Multiome.wdl | 2 ++ verification/test-wdls/TestMultiome.wdl | 3 +++ 2 files changed, 5 insertions(+) diff --git a/pipelines/skylab/multiome/Multiome.wdl b/pipelines/skylab/multiome/Multiome.wdl index ac615983cc..07dd9d9486 100644 --- a/pipelines/skylab/multiome/Multiome.wdl +++ b/pipelines/skylab/multiome/Multiome.wdl @@ -4,6 +4,8 @@ import "../../../pipelines/skylab/atac/atac.wdl" as atac import "../../../pipelines/skylab/optimus/Optimus.wdl" as optimus import "../../../tasks/skylab/H5adUtils.wdl" as H5adUtils import "../../../tasks/broad/Utilities.wdl" as utils +import "https://raw.githubusercontent.com/aawdeh/CellBender/aa-cbwithoutcuda/wdl/cellbender_remove_background_azure.wdl" as CellBender_no_cuda +import "https://raw.githubusercontent.com/broadinstitute/CellBender/v0.3.0/wdl/cellbender_remove_background.wdl" as CellBender workflow Multiome { diff --git a/verification/test-wdls/TestMultiome.wdl b/verification/test-wdls/TestMultiome.wdl index d2fcd0eb9a..f3b22a1a52 100644 --- a/verification/test-wdls/TestMultiome.wdl +++ b/verification/test-wdls/TestMultiome.wdl @@ -5,6 +5,9 @@ import "../../pipelines/skylab/multiome/Multiome.wdl" as Multiome import "../../verification/VerifyMultiome.wdl" as VerifyMultiome import "../../tasks/broad/Utilities.wdl" as Utilities import "../../tasks/broad/CopyFilesFromCloudToCloud.wdl" as Copy +import "https://raw.githubusercontent.com/aawdeh/CellBender/aa-cbwithoutcuda/wdl/cellbender_remove_background_azure.wdl" as CellBender_no_cuda +import "https://raw.githubusercontent.com/broadinstitute/CellBender/v0.3.0/wdl/cellbender_remove_background.wdl" as CellBender + workflow TestMultiome { From e08b2c4e99873a69886a10236900c0e5755fb726 Mon Sep 17 00:00:00 2001 From: npetrill Date: Fri, 3 Jan 2025 13:14:03 -0500 Subject: [PATCH 11/20] add TestReblockGVCF.wdl to the new testing framework --- .dockstore.yml | 4 + .github/workflows/test_reblockGVCF.yml | 400 +++++++++++++++++++++ verification/test-wdls/TestReblockGVCF.wdl | 12 +- 3 files changed, 407 insertions(+), 9 deletions(-) create mode 100644 .github/workflows/test_reblockGVCF.yml diff --git a/.dockstore.yml b/.dockstore.yml index 1eb7c3af09..fe57295cc3 100644 --- a/.dockstore.yml +++ b/.dockstore.yml @@ -139,6 +139,10 @@ workflows: subclass: WDL primaryDescriptorPath: /verification/test-wdls/TestMultiome.wdl + - name: TestReblockGVCF + subclass: WDL + primaryDescriptorPath: /verification/test-wdls/TestReblockGVCF.wdl + - name: Testsnm3C subclass: WDL primaryDescriptorPath: /verification/test-wdls/Testsnm3C.wdl diff --git a/.github/workflows/test_reblockGVCF.yml b/.github/workflows/test_reblockGVCF.yml new file mode 100644 index 0000000000..7d430b46bf --- /dev/null +++ b/.github/workflows/test_reblockGVCF.yml @@ -0,0 +1,400 @@ +name: Test ReblockGVCF + +# Controls when the workflow will run +on: + #run on push to feature branch "kp_GHA_Terra_auth_PD-2682" - REMOVE WHEN DONE TESTING + # push: + # branches: + # - kp_GHA_Terra_auth_PD-2682 + pull_request: + branches: [ "develop", "staging", "master" ] + # Only run if files in these paths changed: + #################################### + # SET PIPELINE SPECIFIC PATHS HERE # + #################################### + paths: + - 'pipelines/broad/dna_seq/germline/joint_genotyping/reblocking/**' + - 'tasks/broad/GermlineVariantDiscovery.wdl' + - 'tasks/broad/Qc.wdl' + - 'tasks/broad/Utilities.wdl' + - 'verification/VerifyGvcf.wdl' + - 'verification/VerifyTasks.wdl' + - 'verification/test-wdls/TestReblockGVCF.wdl' + - 'tasks/broad/TerraCopyFilesFromCloudToCloud.wdl' + - '.github/workflows/test_reblockGVCF.yml' + + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + inputs: + useCallCache: + description: 'Use call cache (default: true)' + required: false + default: "true" + updateTruth: + description: 'Update truth files (default: false)' + required: false + default: "false" + testType: + description: 'Specify the type of test (Plumbing or Scientific)' + required: true + truthBranch: + description: 'Specify the branch for truth files (default: master)' + required: false + default: "master" + + +env: + # pipeline configuration + PROJECT_NAME: WARP + PIPELINE_NAME: TestReblockGVCF + DOCKSTORE_PIPELINE_NAME: ReblockGVCF + PIPELINE_DIR: "pipelines/broad/dna_seq/germline/joint_genotyping/reblocking" + + # workspace configuration + TESTING_WORKSPACE: WARP Tests + WORKSPACE_NAMESPACE: warp-pipelines + + # github repo configuration + REPOSITORY_NAME: ${{ github.event.repository.name }} + + # service account configuration + SA_JSON_B64: ${{ secrets.PDT_TESTER_SA_B64 }} + USER: pdt-tester@warp-pipeline-dev.iam.gserviceaccount.com + + +jobs: + run_pipeline: + runs-on: ubuntu-latest + # Add "id-token" with the intended permissions. + permissions: + contents: 'read' + id-token: 'write' + + steps: + # actions/checkout MUST come before auth action + - uses: actions/checkout@v3 + with: + ref: ${{ github.ref }} + + - name: Set up python + id: setup-python + uses: actions/setup-python@v4 + with: + python-version: '3.11' + + - name: Install dependencies + run: | + cd scripts/firecloud_api/ + pip install -r requirements.txt + + - name: Set Branch Name + id: set_branch + run: | + if [ -z "${{ github.head_ref }}" ]; then + echo "Branch name is missing, using ${GITHUB_REF##*/}" + echo "BRANCH_NAME=${GITHUB_REF##*/}" >> $GITHUB_ENV + else + echo "Branch name from PR: ${{ github.head_ref }}" + echo "BRANCH_NAME=${{ github.head_ref }}" >> $GITHUB_ENV + fi + + - name: Determine Github Commit Hash + id: determine_github_commit_hash + run: | + if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then + echo "Using github.sha for manually triggered workflow." + echo "GITHUB_COMMIT_HASH=${{ github.sha }}" >> $GITHUB_ENV + elif [ "${{ github.event_name }}" == "pull_request" ]; then + echo "Using github.event.pull_request.head.sha for PR-triggered workflow." + echo "GITHUB_COMMIT_HASH=${{ github.event.pull_request.head.sha }}" >> $GITHUB_ENV + else + echo "Unsupported event type: ${{ github.event_name }}" + exit 1 + fi + + - name: Fetch Dockstore Workflow Commit Hash + run: | + # Wait 5.5 minutes for Dockstore to update + sleep 330 + + DOCKSTORE_COMMIT_HASH_FROM_FETCH=$(python scripts/dockstore_api/fetch_dockstore_commit.py \ + $DOCKSTORE_TOKEN \ + $DOCKSTORE_PIPELINE_NAME \ + $BRANCH_NAME) + + # Export the commit hash as an environment variable + echo "DOCKSTORE_COMMIT_HASH=$DOCKSTORE_COMMIT_HASH_FROM_FETCH" >> $GITHUB_ENV + echo "Dockstore Commit Hash: $DOCKSTORE_COMMIT_HASH_FROM_FETCH" + env: + ## TODO NEED TO ADD DOCKSTORE_TOKEN FOR SERVICE ACCOUNT ## + DOCKSTORE_TOKEN: ${{ secrets.DOCKSTORE_TOKEN }} + DOCKSTORE_PIPELINE_NAME: ${{ env.DOCKSTORE_PIPELINE_NAME }} + BRANCH_NAME: ${{ env.BRANCH_NAME }} + + - name: Compare Dockstore and Commit Hashes + id: compare_hashes + run: | + echo "Comparing hashes..." + echo "Dockstore Commit Hash: $DOCKSTORE_COMMIT_HASH" + echo "GitHub Commit Hash: $GITHUB_COMMIT_HASH" + + if [ "$DOCKSTORE_COMMIT_HASH" != "$GITHUB_COMMIT_HASH" ]; then + echo "Error: The Dockstore Commit Hash does not match the GitHub Commit Hash!" + echo "Mismatch found: $DOCKSTORE_COMMIT_HASH != $GITHUB_COMMIT_HASH" + exit 1 + else + echo "Success: The Dockstore Commit Hash matches the GitHub Commit Hash." + fi + env: + DOCKSTORE_COMMIT_HASH: ${{ env.DOCKSTORE_COMMIT_HASH }} + GITHUB_COMMIT_HASH: ${{ env.GITHUB_COMMIT_HASH }} + + - name: Set Test Type + id: set_test_type + run: | + if [ "${{ github.event_name }}" == "pull_request" ]; then + # For PRs, set based on target branch + if [ "${{ github.base_ref }}" == "master" ]; then + echo "testType=Scientific" >> $GITHUB_ENV + echo "testType=Scientific" + else + echo "testType=Plumbing" >> $GITHUB_ENV + echo "testType=Plumbing" + fi + else + # For workflow_dispatch, use provided test type + echo "testType=${{ github.event.inputs.testType }}" >> $GITHUB_ENV + echo "testType=${{ github.event.inputs.testType }}" + fi + + + - name: Create new method configuration + run: | + echo "Creating new method configuration for branch: $BRANCH_NAME" + + METHOD_CONFIG_NAME=$(python3 scripts/firecloud_api/firecloud_api.py \ + create_new_method_config \ + --workspace-namespace $WORKSPACE_NAMESPACE \ + --workspace-name "$TESTING_WORKSPACE" \ + --pipeline_name "$PIPELINE_NAME" \ + --branch_name "$BRANCH_NAME" \ + --sa-json-b64 "$SA_JSON_B64" \ + --user "$USER") + + echo "METHOD_CONFIG_NAME=$METHOD_CONFIG_NAME" >> $GITHUB_ENV + env: + PIPELINE_NAME: ${{ env.PIPELINE_NAME }} + TESTING_WORKSPACE: ${{ env.TESTING_WORKSPACE }} + WORKSPACE_NAMESPACE: ${{ env.WORKSPACE_NAMESPACE }} + USER: ${{ env.USER }} + + - name: Update test inputs, Upload to Terra, Submit, Monitor and Retrieve Outputs + run: | + UPDATE_TRUTH="${{ github.event.inputs.updateTruth || 'false' }}" + USE_CALL_CACHE="${{ github.event.inputs.useCallCache || 'true' }}" + TRUTH_BRANCH="${{ github.event.inputs.truthBranch || 'master' }}" + CURRENT_TIME=$(date +"%Y-%m-%d-%H-%M-%S") + MAX_RETRIES=2 + RETRY_DELAY=300 # 300 seconds = 5 minutes + # Initialize variables to aggregate statuses and outputs + ALL_WORKFLOW_STATUSES="Workflow ID | Status"$'\n'"--- | ---" + ALL_OUTPUTS="" + # Initialize arrays to track submission and workflow statuses + declare -a SUBMISSION_IDS + declare -A WORKFLOW_STATUSES + + + # Convert UPDATE_TRUTH and USE_CALL_CACHE to a boolean-friendly format ("true" -> true, "false" -> false) + if [ "$UPDATE_TRUTH" = "true" ]; then + UPDATE_TRUTH_BOOL=true + else + UPDATE_TRUTH_BOOL=false + fi + + if [ "$USE_CALL_CACHE" == "true" ]; then + USE_CALL_CACHE_BOOL=true + else + USE_CALL_CACHE_BOOL=false + fi + + TEST_TYPE="${{ env.testType }}" + INPUTS_DIR="$PIPELINE_DIR/test_inputs/$TEST_TYPE" + echo "Running tests with test type: $TEST_TYPE" + + TRUTH_PATH="gs://broad-gotc-test-storage/$DOCKSTORE_PIPELINE_NAME/truth/$(echo "$TEST_TYPE" | tr '[:upper:]' '[:lower:]')/$TRUTH_BRANCH" + echo "Truth path: $TRUTH_PATH" + RESULTS_PATH="gs://broad-gotc-test-storage/$DOCKSTORE_PIPELINE_NAME/results/$CURRENT_TIME" + + # Create the submission_data.json file which will be the same for all inputs + SUBMISSION_DATA_FILE="submission_data.json" + + # Use a heredoc to generate the JSON file content dynamically + cat < "$SUBMISSION_DATA_FILE" + { + "methodConfigurationNamespace": "$WORKSPACE_NAMESPACE", + "methodConfigurationName": "$METHOD_CONFIG_NAME", + "useCallCache": $USE_CALL_CACHE_BOOL, + "deleteIntermediateOutputFiles": false, + "useReferenceDisks": true, + "memoryRetryMultiplier": 1.2, + "workflowFailureMode": "NoNewCalls", + "userComment": "Automated submission", + "ignoreEmptyOutputs": false + } + EOF + + echo "Created submission data file: $SUBMISSION_DATA_FILE" + + # 1. Submit all jobs first and store their submission IDs + for input_file in "$INPUTS_DIR"/*.json; do + test_input_file=$(python3 scripts/firecloud_api/UpdateTestInputs.py --truth_path "$TRUTH_PATH" \ + --results_path "$RESULTS_PATH" \ + --inputs_json "$input_file" \ + --update_truth "$UPDATE_TRUTH_BOOL" \ + --branch_name "$BRANCH_NAME" ) + echo "Uploading the test input file: $test_input_file" + python3 scripts/firecloud_api/firecloud_api.py \ + upload_test_inputs \ + --workspace-namespace $WORKSPACE_NAMESPACE \ + --workspace-name "$TESTING_WORKSPACE" \ + --pipeline_name "$PIPELINE_NAME" \ + --test_input_file "$test_input_file" \ + --branch_name "$BRANCH_NAME" \ + --sa-json-b64 "$SA_JSON_B64" \ + --user "$USER" + + attempt=1 + while [ $attempt -le $MAX_RETRIES ]; do + SUBMISSION_ID=$(python3 scripts/firecloud_api/firecloud_api.py submit_job \ + --workspace-namespace "$WORKSPACE_NAMESPACE" \ + --workspace-name "$TESTING_WORKSPACE" \ + --sa-json-b64 "$SA_JSON_B64" \ + --user "$USER" \ + --submission_data_file "$SUBMISSION_DATA_FILE") + + echo "Submission ID: $SUBMISSION_ID" + + if [[ "$SUBMISSION_ID" == *"404"* || -z "$SUBMISSION_ID" ]]; then + echo "Error in submission, retrying in $RETRY_DELAY seconds..." + ((attempt++)) + if [ $attempt -gt $MAX_RETRIES ]; then + echo "Max retries reached. Exiting..." + exit 1 + fi + sleep $RETRY_DELAY + continue + fi + + echo "Submission successful. Submission ID: $SUBMISSION_ID" + SUBMISSION_IDS+=("$SUBMISSION_ID") + break + done + done + + echo "All jobs have been submitted. Starting to poll for statuses..." + + # 2. After all submissions are done, start polling for statuses of all jobs + for SUBMISSION_ID in "${SUBMISSION_IDS[@]}"; do + attempt=1 + while [ $attempt -le $MAX_RETRIES ]; do + echo "Polling for Submission ID: $SUBMISSION_ID" + RESPONSE=$(python3 scripts/firecloud_api/firecloud_api.py poll_job_status \ + --submission_id "$SUBMISSION_ID" \ + --sa-json-b64 "$SA_JSON_B64" \ + --user "$USER" \ + --workspace-namespace "$WORKSPACE_NAMESPACE" \ + --workspace-name "$TESTING_WORKSPACE") + + if [ -z "$RESPONSE" ]; then + echo "Failed to retrieve Workflow IDs for submission: $SUBMISSION_ID" + ((attempt++)) + if [ $attempt -gt $MAX_RETRIES ]; then + echo "Max retries reached. Exiting..." + exit 1 + fi + sleep $RETRY_DELAY + continue + fi + + WORKFLOW_STATUSES_FOR_SUBMISSION=$(echo "$RESPONSE" | jq -r 'to_entries | map(.key + " | " + .value) | .[]') + WORKFLOW_STATUSES["$SUBMISSION_ID"]="$WORKFLOW_STATUSES_FOR_SUBMISSION" + + # retrieve workflow outputs + echo "Retrieving workflow outputs for Submission ID: $SUBMISSION_ID..." + for WORKFLOW_ID in $(echo "$RESPONSE" | jq -r 'keys[]'); do + WORKFLOW_OUTPUT=$(python3 scripts/firecloud_api/firecloud_api.py get_workflow_outputs \ + --user "$USER" \ + --sa-json-b64 "$SA_JSON_B64" \ + --submission_id "$SUBMISSION_ID" \ + --workspace-namespace $WORKSPACE_NAMESPACE \ + --workspace-name "$TESTING_WORKSPACE" \ + --workflow_id "$WORKFLOW_ID" \ + --pipeline_name "$PIPELINE_NAME") + ALL_OUTPUTS+="$WORKFLOW_OUTPUT"$'\n' + done + break + done + # Generate final summary tables with hyperlinks for Submission IDs + echo "## Combined Workflow Statuses" >> $GITHUB_STEP_SUMMARY + for SUBMISSION_ID in "${!WORKFLOW_STATUSES[@]}"; do + # Generate the Terra URL for the submission + SUBMISSION_URL="https://app.terra.bio/#workspaces/$WORKSPACE_NAMESPACE/WARP%20Tests/job_history/$SUBMISSION_ID" + + # Add the Submission ID as a hyperlink + echo "[Submission ID: $SUBMISSION_ID]($SUBMISSION_URL)" >> $GITHUB_STEP_SUMMARY + + # Add the workflows and statuses for this submission + echo "${WORKFLOW_STATUSES[$SUBMISSION_ID]}" >> $GITHUB_STEP_SUMMARY + + # Add a blank line for separation + echo "" >> $GITHUB_STEP_SUMMARY + done + done + env: + PIPELINE_NAME: ${{ env.PIPELINE_NAME }} + TESTING_WORKSPACE: ${{ env.TESTING_WORKSPACE }} + METHOD_CONFIG_NAME: ${{ env.METHOD_CONFIG_NAME }} + WORKSPACE_NAMESPACE: ${{ env.WORKSPACE_NAMESPACE }} + USER: ${{ env.USER }} + DOCKSTORE_PIPELINE_NAME: ${{ env.DOCKSTORE_PIPELINE_NAME }} + PIPELINE_DIR: ${{ env.PIPELINE_DIR }} + + - name: Delete Method Configuration + if: always() # Ensures it runs regardless of success or failure + run: | + echo "Deleting method configuration for branch: $BRANCH_NAME" + DELETE_RESPONSE=$(python3 scripts/firecloud_api/firecloud_api.py delete_method_config \ + --workspace-namespace $WORKSPACE_NAMESPACE \ + --workspace-name "$TESTING_WORKSPACE" \ + --pipeline_name "$PIPELINE_NAME" \ + --branch_name "$BRANCH_NAME" \ + --sa-json-b64 "$SA_JSON_B64" \ + --user "$USER" \ + --method_config_name "$METHOD_CONFIG_NAME") + echo "Delete response: $DELETE_RESPONSE" + if [ "$DELETE_RESPONSE" == "True" ]; then + echo "Method configuration deleted successfully." + else + echo "Error: Method configuration deletion failed." + exit 1 + fi + + env: + PIPELINE_NAME: ${{ env.PIPELINE_NAME }} + BRANCH_NAME: ${{ env.BRANCH_NAME }} + SA_JSON_B64: ${{ secrets.PDT_TESTER_SA_B64 }} + METHOD_CONFIG_NAME: ${{ env.METHOD_CONFIG_NAME }} + WORKSPACE_NAMESPACE: ${{ env.WORKSPACE_NAMESPACE }} + TESTING_WORKSPACE: ${{ env.TESTING_WORKSPACE }} + USER: ${{ env.USER }} + + - name: Print Summary on Success + if: success() + run: | + echo "# :white_check_mark: Pipeline Execution Summary :white_check_mark:" >> $GITHUB_STEP_SUMMARY + + - name: Print Summary on Failure + if: failure() + run: | + echo "# :x: Pipeline Execution Summary (on Failure) :x: " >> $GITHUB_STEP_SUMMARY \ No newline at end of file diff --git a/verification/test-wdls/TestReblockGVCF.wdl b/verification/test-wdls/TestReblockGVCF.wdl index 01607636c7..e162fcc5a3 100644 --- a/verification/test-wdls/TestReblockGVCF.wdl +++ b/verification/test-wdls/TestReblockGVCF.wdl @@ -4,7 +4,7 @@ version 1.0 import "../../pipelines/broad/dna_seq/germline/joint_genotyping/reblocking/ReblockGVCF.wdl" as ReblockGVCF import "../../verification/VerifyGvcf.wdl" as VerifyGvcf import "../../tasks/broad/Utilities.wdl" as Utilities -import "../../tasks/broad/CopyFilesFromCloudToCloud.wdl" as Copy +import "../../tasks/broad/TerraCopyFilesFromCloudToCloud.wdl" as Copy workflow TestReblockGVCF { @@ -25,8 +25,6 @@ workflow TestReblockGVCF { String truth_path String results_path Boolean update_truth - String vault_token_path - String google_account_vault_path String cloud_provider } @@ -63,21 +61,17 @@ workflow TestReblockGVCF { # Copy results of pipeline to test results bucket - call Copy.CopyFilesFromCloudToCloud as CopyToTestResults { + call Copy.TerraCopyFilesFromCloudToCloud as CopyToTestResults { input: files_to_copy = flatten([pipeline_outputs]), - vault_token_path = vault_token_path, - google_account_vault_path = google_account_vault_path, destination_cloud_path = results_path } # If updating truth then copy output to truth bucket if (update_truth){ - call Copy.CopyFilesFromCloudToCloud as CopyToTruth { + call Copy.TerraCopyFilesFromCloudToCloud as CopyToTruth { input: files_to_copy = flatten([pipeline_outputs]), - vault_token_path = vault_token_path, - google_account_vault_path = google_account_vault_path, destination_cloud_path = truth_path } } From fbdfaa5e487dad1a288169e4f1b60116cf253d76 Mon Sep 17 00:00:00 2001 From: npetrill Date: Mon, 6 Jan 2025 09:09:30 -0500 Subject: [PATCH 12/20] try to handle stucts --- .../test_exome_germline_single_sample.yml | 410 ++++++++++++++++++ scripts/firecloud_api/firecloud_api.py | 8 +- .../TestExomeGermlineSingleSample.wdl | 332 +++++++------- 3 files changed, 580 insertions(+), 170 deletions(-) create mode 100644 .github/workflows/test_exome_germline_single_sample.yml diff --git a/.github/workflows/test_exome_germline_single_sample.yml b/.github/workflows/test_exome_germline_single_sample.yml new file mode 100644 index 0000000000..b2e83b8686 --- /dev/null +++ b/.github/workflows/test_exome_germline_single_sample.yml @@ -0,0 +1,410 @@ +name: Nikelle's Test ExomeGermlineSingleSample + +# Controls when the workflow will run +on: + #run on push to feature branch "kp_GHA_Terra_auth_PD-2682" - REMOVE WHEN DONE TESTING + # push: + # branches: + # - kp_GHA_Terra_auth_PD-2682 + pull_request: + branches: [ "develop", "staging", "master" ] + # Only run if files in these paths changed: + #################################### + # SET PIPELINE SPECIFIC PATHS HERE # + #################################### + paths: + - 'pipelines/broad/dna_seq/germline/single_sample/exome/**' + - 'tasks/broad/UnmappedBamToAlignedBam.wdl' + - 'tasks/broad/AggregatedBamQC.wdl' + - 'tasks/broad/Qc.wdl' + - 'tasks/broad/BamProcessing.wdl' + - 'tasks/broad/BamToCram.wdl' + - 'structs/dna_seq/DNASeqStructs.wdl' + - 'pipelines/broad/dna_seq/germline/variant_calling/VariantCalling.wdl' + - 'tasks/broad/GermlineVariantDiscovery.wdl' + - 'tasks/broad/Utilities.wdl' + - 'tasks/broad/DragenTasks.wdl' + - 'tasks/broad/Qc.wdl' + - 'tasks/broad/Utilities.wdl' + - 'verification/VerifyGermlineSingleSample.wdl' + - 'verification/VerifyMetrics.wdl' + - 'verification/VerifyTasks.wdl' + - 'verification/test-wdls/TestExomeGermlineSingleSample.wdl' + - 'tasks/broad/TerraCopyFilesFromCloudToCloud.wdl' + - '.github/workflows/test_exome_germline_single_sample.yml' + + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + inputs: + useCallCache: + description: 'Use call cache (default: true)' + required: false + default: "true" + updateTruth: + description: 'Update truth files (default: false)' + required: false + default: "false" + testType: + description: 'Specify the type of test (Plumbing or Scientific)' + required: true + truthBranch: + description: 'Specify the branch for truth files (default: master)' + required: false + default: "master" + + +env: + # pipeline configuration + PROJECT_NAME: WARP + PIPELINE_NAME: TestExomeGermlineSingleSample + DOCKSTORE_PIPELINE_NAME: ExomeGermlineSingleSample + PIPELINE_DIR: "pipelines/broad/dna_seq/germline/single_sample/exome" + + # workspace configuration + TESTING_WORKSPACE: WARP Tests + WORKSPACE_NAMESPACE: warp-pipelines + + # github repo configuration + REPOSITORY_NAME: ${{ github.event.repository.name }} + + # service account configuration + SA_JSON_B64: ${{ secrets.PDT_TESTER_SA_B64 }} + USER: pdt-tester@warp-pipeline-dev.iam.gserviceaccount.com + + +jobs: + run_pipeline: + runs-on: ubuntu-latest + # Add "id-token" with the intended permissions. + permissions: + contents: 'read' + id-token: 'write' + + steps: + # actions/checkout MUST come before auth action + - uses: actions/checkout@v3 + with: + ref: ${{ github.ref }} + + - name: Set up python + id: setup-python + uses: actions/setup-python@v4 + with: + python-version: '3.11' + + - name: Install dependencies + run: | + cd scripts/firecloud_api/ + pip install -r requirements.txt + + - name: Set Branch Name + id: set_branch + run: | + if [ -z "${{ github.head_ref }}" ]; then + echo "Branch name is missing, using ${GITHUB_REF##*/}" + echo "BRANCH_NAME=${GITHUB_REF##*/}" >> $GITHUB_ENV + else + echo "Branch name from PR: ${{ github.head_ref }}" + echo "BRANCH_NAME=${{ github.head_ref }}" >> $GITHUB_ENV + fi + + - name: Determine Github Commit Hash + id: determine_github_commit_hash + run: | + if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then + echo "Using github.sha for manually triggered workflow." + echo "GITHUB_COMMIT_HASH=${{ github.sha }}" >> $GITHUB_ENV + elif [ "${{ github.event_name }}" == "pull_request" ]; then + echo "Using github.event.pull_request.head.sha for PR-triggered workflow." + echo "GITHUB_COMMIT_HASH=${{ github.event.pull_request.head.sha }}" >> $GITHUB_ENV + else + echo "Unsupported event type: ${{ github.event_name }}" + exit 1 + fi + + - name: Fetch Dockstore Workflow Commit Hash + run: | + # Wait 5.5 minutes for Dockstore to update + sleep 330 + + DOCKSTORE_COMMIT_HASH_FROM_FETCH=$(python scripts/dockstore_api/fetch_dockstore_commit.py \ + $DOCKSTORE_TOKEN \ + $DOCKSTORE_PIPELINE_NAME \ + $BRANCH_NAME) + + # Export the commit hash as an environment variable + echo "DOCKSTORE_COMMIT_HASH=$DOCKSTORE_COMMIT_HASH_FROM_FETCH" >> $GITHUB_ENV + echo "Dockstore Commit Hash: $DOCKSTORE_COMMIT_HASH_FROM_FETCH" + env: + ## TODO NEED TO ADD DOCKSTORE_TOKEN FOR SERVICE ACCOUNT ## + DOCKSTORE_TOKEN: ${{ secrets.DOCKSTORE_TOKEN }} + DOCKSTORE_PIPELINE_NAME: ${{ env.DOCKSTORE_PIPELINE_NAME }} + BRANCH_NAME: ${{ env.BRANCH_NAME }} + + - name: Compare Dockstore and Commit Hashes + id: compare_hashes + run: | + echo "Comparing hashes..." + echo "Dockstore Commit Hash: $DOCKSTORE_COMMIT_HASH" + echo "GitHub Commit Hash: $GITHUB_COMMIT_HASH" + + if [ "$DOCKSTORE_COMMIT_HASH" != "$GITHUB_COMMIT_HASH" ]; then + echo "Error: The Dockstore Commit Hash does not match the GitHub Commit Hash!" + echo "Mismatch found: $DOCKSTORE_COMMIT_HASH != $GITHUB_COMMIT_HASH" + exit 1 + else + echo "Success: The Dockstore Commit Hash matches the GitHub Commit Hash." + fi + env: + DOCKSTORE_COMMIT_HASH: ${{ env.DOCKSTORE_COMMIT_HASH }} + GITHUB_COMMIT_HASH: ${{ env.GITHUB_COMMIT_HASH }} + + - name: Set Test Type + id: set_test_type + run: | + if [ "${{ github.event_name }}" == "pull_request" ]; then + # For PRs, set based on target branch + if [ "${{ github.base_ref }}" == "master" ]; then + echo "testType=Scientific" >> $GITHUB_ENV + echo "testType=Scientific" + else + echo "testType=Plumbing" >> $GITHUB_ENV + echo "testType=Plumbing" + fi + else + # For workflow_dispatch, use provided test type + echo "testType=${{ github.event.inputs.testType }}" >> $GITHUB_ENV + echo "testType=${{ github.event.inputs.testType }}" + fi + + + - name: Create new method configuration + run: | + echo "Creating new method configuration for branch: $BRANCH_NAME" + + METHOD_CONFIG_NAME=$(python3 scripts/firecloud_api/firecloud_api.py \ + create_new_method_config \ + --workspace-namespace $WORKSPACE_NAMESPACE \ + --workspace-name "$TESTING_WORKSPACE" \ + --pipeline_name "$PIPELINE_NAME" \ + --branch_name "$BRANCH_NAME" \ + --sa-json-b64 "$SA_JSON_B64" \ + --user "$USER") + + echo "METHOD_CONFIG_NAME=$METHOD_CONFIG_NAME" >> $GITHUB_ENV + env: + PIPELINE_NAME: ${{ env.PIPELINE_NAME }} + TESTING_WORKSPACE: ${{ env.TESTING_WORKSPACE }} + WORKSPACE_NAMESPACE: ${{ env.WORKSPACE_NAMESPACE }} + USER: ${{ env.USER }} + + - name: Update test inputs, Upload to Terra, Submit, Monitor and Retrieve Outputs + run: | + UPDATE_TRUTH="${{ github.event.inputs.updateTruth || 'false' }}" + USE_CALL_CACHE="${{ github.event.inputs.useCallCache || 'true' }}" + TRUTH_BRANCH="${{ github.event.inputs.truthBranch || 'master' }}" + CURRENT_TIME=$(date +"%Y-%m-%d-%H-%M-%S") + MAX_RETRIES=2 + RETRY_DELAY=300 # 300 seconds = 5 minutes + # Initialize variables to aggregate statuses and outputs + ALL_WORKFLOW_STATUSES="Workflow ID | Status"$'\n'"--- | ---" + ALL_OUTPUTS="" + # Initialize arrays to track submission and workflow statuses + declare -a SUBMISSION_IDS + declare -A WORKFLOW_STATUSES + + + # Convert UPDATE_TRUTH and USE_CALL_CACHE to a boolean-friendly format ("true" -> true, "false" -> false) + if [ "$UPDATE_TRUTH" = "true" ]; then + UPDATE_TRUTH_BOOL=true + else + UPDATE_TRUTH_BOOL=false + fi + + if [ "$USE_CALL_CACHE" == "true" ]; then + USE_CALL_CACHE_BOOL=true + else + USE_CALL_CACHE_BOOL=false + fi + + TEST_TYPE="${{ env.testType }}" + INPUTS_DIR="$PIPELINE_DIR/test_inputs/$TEST_TYPE" + echo "Running tests with test type: $TEST_TYPE" + + TRUTH_PATH="gs://broad-gotc-test-storage/$DOCKSTORE_PIPELINE_NAME/truth/$(echo "$TEST_TYPE" | tr '[:upper:]' '[:lower:]')/$TRUTH_BRANCH" + echo "Truth path: $TRUTH_PATH" + RESULTS_PATH="gs://broad-gotc-test-storage/$DOCKSTORE_PIPELINE_NAME/results/$CURRENT_TIME" + + # Create the submission_data.json file which will be the same for all inputs + SUBMISSION_DATA_FILE="submission_data.json" + + # Use a heredoc to generate the JSON file content dynamically + cat < "$SUBMISSION_DATA_FILE" + { + "methodConfigurationNamespace": "$WORKSPACE_NAMESPACE", + "methodConfigurationName": "$METHOD_CONFIG_NAME", + "useCallCache": $USE_CALL_CACHE_BOOL, + "deleteIntermediateOutputFiles": false, + "useReferenceDisks": true, + "memoryRetryMultiplier": 1.2, + "workflowFailureMode": "NoNewCalls", + "userComment": "Automated submission", + "ignoreEmptyOutputs": false + } + EOF + + echo "Created submission data file: $SUBMISSION_DATA_FILE" + + # 1. Submit all jobs first and store their submission IDs + for input_file in "$INPUTS_DIR"/*.json; do + test_input_file=$(python3 scripts/firecloud_api/UpdateTestInputs.py --truth_path "$TRUTH_PATH" \ + --results_path "$RESULTS_PATH" \ + --inputs_json "$input_file" \ + --update_truth "$UPDATE_TRUTH_BOOL" \ + --branch_name "$BRANCH_NAME" ) + echo "Uploading the test input file: $test_input_file" + python3 scripts/firecloud_api/firecloud_api.py \ + upload_test_inputs \ + --workspace-namespace $WORKSPACE_NAMESPACE \ + --workspace-name "$TESTING_WORKSPACE" \ + --pipeline_name "$PIPELINE_NAME" \ + --test_input_file "$test_input_file" \ + --branch_name "$BRANCH_NAME" \ + --sa-json-b64 "$SA_JSON_B64" \ + --user "$USER" + + attempt=1 + while [ $attempt -le $MAX_RETRIES ]; do + SUBMISSION_ID=$(python3 scripts/firecloud_api/firecloud_api.py submit_job \ + --workspace-namespace "$WORKSPACE_NAMESPACE" \ + --workspace-name "$TESTING_WORKSPACE" \ + --sa-json-b64 "$SA_JSON_B64" \ + --user "$USER" \ + --submission_data_file "$SUBMISSION_DATA_FILE") + + echo "Submission ID: $SUBMISSION_ID" + + if [[ "$SUBMISSION_ID" == *"404"* || -z "$SUBMISSION_ID" ]]; then + echo "Error in submission, retrying in $RETRY_DELAY seconds..." + ((attempt++)) + if [ $attempt -gt $MAX_RETRIES ]; then + echo "Max retries reached. Exiting..." + exit 1 + fi + sleep $RETRY_DELAY + continue + fi + + echo "Submission successful. Submission ID: $SUBMISSION_ID" + SUBMISSION_IDS+=("$SUBMISSION_ID") + break + done + done + + echo "All jobs have been submitted. Starting to poll for statuses..." + + # 2. After all submissions are done, start polling for statuses of all jobs + for SUBMISSION_ID in "${SUBMISSION_IDS[@]}"; do + attempt=1 + while [ $attempt -le $MAX_RETRIES ]; do + echo "Polling for Submission ID: $SUBMISSION_ID" + RESPONSE=$(python3 scripts/firecloud_api/firecloud_api.py poll_job_status \ + --submission_id "$SUBMISSION_ID" \ + --sa-json-b64 "$SA_JSON_B64" \ + --user "$USER" \ + --workspace-namespace "$WORKSPACE_NAMESPACE" \ + --workspace-name "$TESTING_WORKSPACE") + + if [ -z "$RESPONSE" ]; then + echo "Failed to retrieve Workflow IDs for submission: $SUBMISSION_ID" + ((attempt++)) + if [ $attempt -gt $MAX_RETRIES ]; then + echo "Max retries reached. Exiting..." + exit 1 + fi + sleep $RETRY_DELAY + continue + fi + + WORKFLOW_STATUSES_FOR_SUBMISSION=$(echo "$RESPONSE" | jq -r 'to_entries | map(.key + " | " + .value) | .[]') + WORKFLOW_STATUSES["$SUBMISSION_ID"]="$WORKFLOW_STATUSES_FOR_SUBMISSION" + + # retrieve workflow outputs + echo "Retrieving workflow outputs for Submission ID: $SUBMISSION_ID..." + for WORKFLOW_ID in $(echo "$RESPONSE" | jq -r 'keys[]'); do + WORKFLOW_OUTPUT=$(python3 scripts/firecloud_api/firecloud_api.py get_workflow_outputs \ + --user "$USER" \ + --sa-json-b64 "$SA_JSON_B64" \ + --submission_id "$SUBMISSION_ID" \ + --workspace-namespace $WORKSPACE_NAMESPACE \ + --workspace-name "$TESTING_WORKSPACE" \ + --workflow_id "$WORKFLOW_ID" \ + --pipeline_name "$PIPELINE_NAME") + ALL_OUTPUTS+="$WORKFLOW_OUTPUT"$'\n' + done + break + done + # Generate final summary tables with hyperlinks for Submission IDs + echo "## Combined Workflow Statuses" >> $GITHUB_STEP_SUMMARY + for SUBMISSION_ID in "${!WORKFLOW_STATUSES[@]}"; do + # Generate the Terra URL for the submission + SUBMISSION_URL="https://app.terra.bio/#workspaces/$WORKSPACE_NAMESPACE/WARP%20Tests/job_history/$SUBMISSION_ID" + + # Add the Submission ID as a hyperlink + echo "[Submission ID: $SUBMISSION_ID]($SUBMISSION_URL)" >> $GITHUB_STEP_SUMMARY + + # Add the workflows and statuses for this submission + echo "${WORKFLOW_STATUSES[$SUBMISSION_ID]}" >> $GITHUB_STEP_SUMMARY + + # Add a blank line for separation + echo "" >> $GITHUB_STEP_SUMMARY + done + done + env: + PIPELINE_NAME: ${{ env.PIPELINE_NAME }} + TESTING_WORKSPACE: ${{ env.TESTING_WORKSPACE }} + METHOD_CONFIG_NAME: ${{ env.METHOD_CONFIG_NAME }} + WORKSPACE_NAMESPACE: ${{ env.WORKSPACE_NAMESPACE }} + USER: ${{ env.USER }} + DOCKSTORE_PIPELINE_NAME: ${{ env.DOCKSTORE_PIPELINE_NAME }} + PIPELINE_DIR: ${{ env.PIPELINE_DIR }} + + - name: Delete Method Configuration + if: always() # Ensures it runs regardless of success or failure + run: | + echo "Deleting method configuration for branch: $BRANCH_NAME" + DELETE_RESPONSE=$(python3 scripts/firecloud_api/firecloud_api.py delete_method_config \ + --workspace-namespace $WORKSPACE_NAMESPACE \ + --workspace-name "$TESTING_WORKSPACE" \ + --pipeline_name "$PIPELINE_NAME" \ + --branch_name "$BRANCH_NAME" \ + --sa-json-b64 "$SA_JSON_B64" \ + --user "$USER" \ + --method_config_name "$METHOD_CONFIG_NAME") + echo "Delete response: $DELETE_RESPONSE" + if [ "$DELETE_RESPONSE" == "True" ]; then + echo "Method configuration deleted successfully." + else + echo "Error: Method configuration deletion failed." + exit 1 + fi + + env: + PIPELINE_NAME: ${{ env.PIPELINE_NAME }} + BRANCH_NAME: ${{ env.BRANCH_NAME }} + SA_JSON_B64: ${{ secrets.PDT_TESTER_SA_B64 }} + METHOD_CONFIG_NAME: ${{ env.METHOD_CONFIG_NAME }} + WORKSPACE_NAMESPACE: ${{ env.WORKSPACE_NAMESPACE }} + TESTING_WORKSPACE: ${{ env.TESTING_WORKSPACE }} + USER: ${{ env.USER }} + + - name: Print Summary on Success + if: success() + run: | + echo "# :white_check_mark: Pipeline Execution Summary :white_check_mark:" >> $GITHUB_STEP_SUMMARY + + - name: Print Summary on Failure + if: failure() + run: | + echo "# :x: Pipeline Execution Summary (on Failure) :x: " >> $GITHUB_STEP_SUMMARY \ No newline at end of file diff --git a/scripts/firecloud_api/firecloud_api.py b/scripts/firecloud_api/firecloud_api.py index f03faa8f4b..753120b1ff 100644 --- a/scripts/firecloud_api/firecloud_api.py +++ b/scripts/firecloud_api/firecloud_api.py @@ -256,7 +256,7 @@ def poll_job_status(self, submission_id): def quote_values(self, inputs_json): """ - Quote JSON values with proper array handling + Quote JSON values with proper WDL struct handling """ def format_value(val): if isinstance(val, bool): @@ -264,6 +264,12 @@ def format_value(val): if isinstance(val, list): array_items = [f'"{item}"' for item in val] return f'[{", ".join(array_items)}]' + if isinstance(val, str) and val.startswith('{') and val.endswith('}'): + # Handle WDL struct format + return f'${{{val}}}' + if isinstance(val, dict): + # Convert dict to WDL struct format + return f'${{{json.dumps(val)}}}' return f'"{val}"' return {key: format_value(value) for key, value in inputs_json.items()} diff --git a/verification/test-wdls/TestExomeGermlineSingleSample.wdl b/verification/test-wdls/TestExomeGermlineSingleSample.wdl index 59110d09be..bb6424100b 100644 --- a/verification/test-wdls/TestExomeGermlineSingleSample.wdl +++ b/verification/test-wdls/TestExomeGermlineSingleSample.wdl @@ -3,187 +3,181 @@ version 1.0 import "../../pipelines/broad/dna_seq/germline/single_sample/exome/ExomeGermlineSingleSample.wdl" as ExomeGermlineSingleSample import "../../verification/VerifyGermlineSingleSample.wdl" as VerifyGermlineSingleSample import "../../tasks/broad/Utilities.wdl" as Utilities -import "../../tasks/broad/CopyFilesFromCloudToCloud.wdl" as Copy +import "../../tasks/broad/TerraCopyFilesFromCloudToCloud.wdl" as Copy workflow TestExomeGermlineSingleSample { - input { - PapiSettings papi_settings - SampleAndUnmappedBams sample_and_unmapped_bams - DNASeqSingleSampleReferences references - VariantCallingScatterSettings scatter_settings - - File? fingerprint_genotypes_file - File? fingerprint_genotypes_index - - File target_interval_list - File bait_interval_list - String bait_set_name - - Boolean provide_bam_output = false - - # These values will be determined and injected into the inputs by the scala test framework - String truth_path - String results_path - Boolean update_truth - String vault_token_path - String google_account_vault_path - String cloud_provider - } - - meta { - allowNestedInputs: true - } - - # Run the pipeline - call ExomeGermlineSingleSample.ExomeGermlineSingleSample { - input: - sample_and_unmapped_bams = sample_and_unmapped_bams, - references = references, - scatter_settings = scatter_settings, - fingerprint_genotypes_file = fingerprint_genotypes_file, - fingerprint_genotypes_index = fingerprint_genotypes_index, - papi_settings = papi_settings, - target_interval_list = target_interval_list, - bait_interval_list = bait_interval_list, - bait_set_name = bait_set_name, - provide_bam_output = provide_bam_output, - cloud_provider = cloud_provider - } - - # Collect all of the pipeline outputs into a single Array[String]] - Array[String] pipeline_outputs = flatten([ - [ # File outputs - ExomeGermlineSingleSample.selfSM, - ExomeGermlineSingleSample.agg_insert_size_histogram_pdf, - ExomeGermlineSingleSample.agg_quality_distribution_pdf, - ExomeGermlineSingleSample.calculate_read_group_checksum_md5, - ExomeGermlineSingleSample.agg_insert_size_histogram_pdf, - ExomeGermlineSingleSample.agg_quality_distribution_pdf, - ExomeGermlineSingleSample.output_cram, - ExomeGermlineSingleSample.output_cram_index, - ExomeGermlineSingleSample.output_cram_md5, - ExomeGermlineSingleSample.validate_cram_file_report, - ExomeGermlineSingleSample.output_vcf, - ExomeGermlineSingleSample.output_vcf_index - ], # Array[File] outputs - ExomeGermlineSingleSample.unsorted_read_group_base_distribution_by_cycle_pdf, - ExomeGermlineSingleSample.unsorted_read_group_insert_size_histogram_pdf, - ExomeGermlineSingleSample.unsorted_read_group_quality_by_cycle_pdf, - ExomeGermlineSingleSample.unsorted_read_group_quality_distribution_pdf, - # File? outputs - select_all([ExomeGermlineSingleSample.output_bqsr_reports]), - select_all([ExomeGermlineSingleSample.output_bam]), - select_all([ExomeGermlineSingleSample.output_bam_index]), - ]) - - # Collect all of the pipeline metrics into a single Array[String] - Array[String] pipeline_metrics = flatten([ - [ # File outputs - ExomeGermlineSingleSample.read_group_alignment_summary_metrics, - ExomeGermlineSingleSample.agg_alignment_summary_metrics, - ExomeGermlineSingleSample.agg_bait_bias_detail_metrics, - ExomeGermlineSingleSample.agg_bait_bias_summary_metrics, - ExomeGermlineSingleSample.agg_insert_size_metrics, - ExomeGermlineSingleSample.agg_pre_adapter_detail_metrics, - ExomeGermlineSingleSample.agg_pre_adapter_summary_metrics, - ExomeGermlineSingleSample.agg_quality_distribution_metrics, - ExomeGermlineSingleSample.agg_error_summary_metrics, - ExomeGermlineSingleSample.duplicate_metrics, - ExomeGermlineSingleSample.gvcf_summary_metrics, - ExomeGermlineSingleSample.gvcf_detail_metrics, - ExomeGermlineSingleSample.hybrid_selection_metrics, - ], # Array[File] outputs - ExomeGermlineSingleSample.quality_yield_metrics, - ExomeGermlineSingleSample.unsorted_read_group_base_distribution_by_cycle_metrics, - ExomeGermlineSingleSample.unsorted_read_group_insert_size_metrics, - ExomeGermlineSingleSample.unsorted_read_group_quality_by_cycle_metrics, - ExomeGermlineSingleSample.unsorted_read_group_quality_distribution_metrics, - # File? outputs - select_all([ExomeGermlineSingleSample.cross_check_fingerprints_metrics]), - select_all([ExomeGermlineSingleSample.fingerprint_summary_metrics]), - select_all([ExomeGermlineSingleSample.fingerprint_detail_metrics]), - ]) - - # Copy results of pipeline to test results bucket - call Copy.CopyFilesFromCloudToCloud as CopyToTestResults { - input: - files_to_copy = flatten([pipeline_outputs, pipeline_metrics]), - vault_token_path = vault_token_path, - google_account_vault_path = google_account_vault_path, - contamination = ExomeGermlineSingleSample.contamination, - destination_cloud_path = results_path - } - - # If updating truth then copy pipeline results to truth bucket - if (update_truth){ - call Copy.CopyFilesFromCloudToCloud as CopyToTruth { - input: - files_to_copy = flatten([pipeline_outputs, pipeline_metrics]), - vault_token_path = vault_token_path, - google_account_vault_path = google_account_vault_path, - contamination = ExomeGermlineSingleSample.contamination, - destination_cloud_path = truth_path - } - } - - # If not updating truth then we need to collect all input for the validation WDL - # This is achieved by passing each desired file/array[files] to GetValidationInputs - if (!update_truth){ - call Utilities.GetValidationInputs as GetMetricsInputs { - input: - input_files = pipeline_metrics, - results_path = results_path, - truth_path = truth_path + input { + PapiSettings papi_settings + SampleAndUnmappedBams sample_and_unmapped_bams + DNASeqSingleSampleReferences references + VariantCallingScatterSettings scatter_settings + + File? fingerprint_genotypes_file + File? fingerprint_genotypes_index + + File target_interval_list + File bait_interval_list + String bait_set_name + + Boolean provide_bam_output = false + + # These values will be determined and injected into the inputs by the scala test framework + String truth_path + String results_path + Boolean update_truth + String cloud_provider } - call Utilities.GetValidationInputs as GetCrams { - input: - input_file = ExomeGermlineSingleSample.output_cram, - results_path = results_path, - truth_path = truth_path + meta { + allowNestedInputs: true } - call Utilities.GetValidationInputs as GetCrais { - input: - input_file = ExomeGermlineSingleSample.output_cram_index, - results_path = results_path, - truth_path = truth_path + # Run the pipeline + call ExomeGermlineSingleSample.ExomeGermlineSingleSample { + input: + sample_and_unmapped_bams = sample_and_unmapped_bams, + references = references, + scatter_settings = scatter_settings, + fingerprint_genotypes_file = fingerprint_genotypes_file, + fingerprint_genotypes_index = fingerprint_genotypes_index, + papi_settings = papi_settings, + target_interval_list = target_interval_list, + bait_interval_list = bait_interval_list, + bait_set_name = bait_set_name, + provide_bam_output = provide_bam_output, + cloud_provider = cloud_provider } - call Utilities.GetValidationInputs as GetGVCFs { - input: - input_file = ExomeGermlineSingleSample.output_vcf, - results_path = results_path, - truth_path = truth_path + # Collect all of the pipeline outputs into a single Array[String]] + Array[String] pipeline_outputs = flatten([ + [ # File outputs + ExomeGermlineSingleSample.selfSM, + ExomeGermlineSingleSample.agg_insert_size_histogram_pdf, + ExomeGermlineSingleSample.agg_quality_distribution_pdf, + ExomeGermlineSingleSample.calculate_read_group_checksum_md5, + ExomeGermlineSingleSample.agg_insert_size_histogram_pdf, + ExomeGermlineSingleSample.agg_quality_distribution_pdf, + ExomeGermlineSingleSample.output_cram, + ExomeGermlineSingleSample.output_cram_index, + ExomeGermlineSingleSample.output_cram_md5, + ExomeGermlineSingleSample.validate_cram_file_report, + ExomeGermlineSingleSample.output_vcf, + ExomeGermlineSingleSample.output_vcf_index + ], # Array[File] outputs + ExomeGermlineSingleSample.unsorted_read_group_base_distribution_by_cycle_pdf, + ExomeGermlineSingleSample.unsorted_read_group_insert_size_histogram_pdf, + ExomeGermlineSingleSample.unsorted_read_group_quality_by_cycle_pdf, + ExomeGermlineSingleSample.unsorted_read_group_quality_distribution_pdf, + # File? outputs + select_all([ExomeGermlineSingleSample.output_bqsr_reports]), + select_all([ExomeGermlineSingleSample.output_bam]), + select_all([ExomeGermlineSingleSample.output_bam_index]), + ]) + + # Collect all of the pipeline metrics into a single Array[String] + Array[String] pipeline_metrics = flatten([ + [ # File outputs + ExomeGermlineSingleSample.read_group_alignment_summary_metrics, + ExomeGermlineSingleSample.agg_alignment_summary_metrics, + ExomeGermlineSingleSample.agg_bait_bias_detail_metrics, + ExomeGermlineSingleSample.agg_bait_bias_summary_metrics, + ExomeGermlineSingleSample.agg_insert_size_metrics, + ExomeGermlineSingleSample.agg_pre_adapter_detail_metrics, + ExomeGermlineSingleSample.agg_pre_adapter_summary_metrics, + ExomeGermlineSingleSample.agg_quality_distribution_metrics, + ExomeGermlineSingleSample.agg_error_summary_metrics, + ExomeGermlineSingleSample.duplicate_metrics, + ExomeGermlineSingleSample.gvcf_summary_metrics, + ExomeGermlineSingleSample.gvcf_detail_metrics, + ExomeGermlineSingleSample.hybrid_selection_metrics, + ], # Array[File] outputs + ExomeGermlineSingleSample.quality_yield_metrics, + ExomeGermlineSingleSample.unsorted_read_group_base_distribution_by_cycle_metrics, + ExomeGermlineSingleSample.unsorted_read_group_insert_size_metrics, + ExomeGermlineSingleSample.unsorted_read_group_quality_by_cycle_metrics, + ExomeGermlineSingleSample.unsorted_read_group_quality_distribution_metrics, + # File? outputs + select_all([ExomeGermlineSingleSample.cross_check_fingerprints_metrics]), + select_all([ExomeGermlineSingleSample.fingerprint_summary_metrics]), + select_all([ExomeGermlineSingleSample.fingerprint_detail_metrics]), + ]) + + # Copy results of pipeline to test results bucket + call Copy.TerraCopyFilesFromCloudToCloud as CopyToTestResults { + input: + files_to_copy = flatten([pipeline_outputs, pipeline_metrics]), + contamination = ExomeGermlineSingleSample.contamination, + destination_cloud_path = results_path } - call Utilities.GetValidationInputs as GetGVCFIndexes { - input: - input_file = ExomeGermlineSingleSample.output_vcf_index, - results_path = results_path, - truth_path = truth_path + # If updating truth then copy pipeline results to truth bucket + if (update_truth){ + call Copy.TerraCopyFilesFromCloudToCloud as CopyToTruth { + input: + files_to_copy = flatten([pipeline_outputs, pipeline_metrics]), + contamination = ExomeGermlineSingleSample.contamination, + destination_cloud_path = truth_path + } } - - # done is dummy input to force copy completion before verification - call VerifyGermlineSingleSample.VerifyGermlineSingleSample as Verify { - input: - truth_metrics = GetMetricsInputs.truth_files, - truth_cram = GetCrams.truth_file, - truth_crai = GetCrais.truth_file, - truth_gvcf = GetGVCFs.truth_file, - truth_gvcf_index = GetGVCFIndexes.truth_file, - test_metrics = GetMetricsInputs.results_files, - test_cram = GetCrams.results_file, - test_crai = GetCrais.results_file, - test_gvcf = GetGVCFs.results_file, - test_gvcf_index = GetGVCFIndexes.results_file, - done = CopyToTestResults.done + + # If not updating truth then we need to collect all input for the validation WDL + # This is achieved by passing each desired file/array[files] to GetValidationInputs + if (!update_truth){ + call Utilities.GetValidationInputs as GetMetricsInputs { + input: + input_files = pipeline_metrics, + results_path = results_path, + truth_path = truth_path + } + + call Utilities.GetValidationInputs as GetCrams { + input: + input_file = ExomeGermlineSingleSample.output_cram, + results_path = results_path, + truth_path = truth_path + } + + call Utilities.GetValidationInputs as GetCrais { + input: + input_file = ExomeGermlineSingleSample.output_cram_index, + results_path = results_path, + truth_path = truth_path + } + + call Utilities.GetValidationInputs as GetGVCFs { + input: + input_file = ExomeGermlineSingleSample.output_vcf, + results_path = results_path, + truth_path = truth_path + } + + call Utilities.GetValidationInputs as GetGVCFIndexes { + input: + input_file = ExomeGermlineSingleSample.output_vcf_index, + results_path = results_path, + truth_path = truth_path + } + + # done is dummy input to force copy completion before verification + call VerifyGermlineSingleSample.VerifyGermlineSingleSample as Verify { + input: + truth_metrics = GetMetricsInputs.truth_files, + truth_cram = GetCrams.truth_file, + truth_crai = GetCrais.truth_file, + truth_gvcf = GetGVCFs.truth_file, + truth_gvcf_index = GetGVCFIndexes.truth_file, + test_metrics = GetMetricsInputs.results_files, + test_cram = GetCrams.results_file, + test_crai = GetCrais.results_file, + test_gvcf = GetGVCFs.results_file, + test_gvcf_index = GetGVCFIndexes.results_file, + done = CopyToTestResults.done + } } - } - output { - Array[File]? metric_comparison_report_files = Verify.metric_comparison_report_files - } + output { + Array[File]? metric_comparison_report_files = Verify.metric_comparison_report_files + } -} +} \ No newline at end of file From db9c669f4ea0334e2619456e8054d5f54e6c07ed Mon Sep 17 00:00:00 2001 From: npetrill Date: Mon, 6 Jan 2025 09:32:24 -0500 Subject: [PATCH 13/20] try to handle stucts --- scripts/firecloud_api/firecloud_api.py | 35 ++++++++++++++++++-------- 1 file changed, 24 insertions(+), 11 deletions(-) diff --git a/scripts/firecloud_api/firecloud_api.py b/scripts/firecloud_api/firecloud_api.py index 753120b1ff..bf60e2754f 100644 --- a/scripts/firecloud_api/firecloud_api.py +++ b/scripts/firecloud_api/firecloud_api.py @@ -256,21 +256,34 @@ def poll_job_status(self, submission_id): def quote_values(self, inputs_json): """ - Quote JSON values with proper WDL struct handling + Format JSON values with proper handling of nested structures + """ + def quote_values(self, inputs_json): + """ + Format JSON values with proper handling of nested structures """ def format_value(val): if isinstance(val, bool): return str(val).lower() - if isinstance(val, list): - array_items = [f'"{item}"' for item in val] - return f'[{", ".join(array_items)}]' - if isinstance(val, str) and val.startswith('{') and val.endswith('}'): - # Handle WDL struct format - return f'${{{val}}}' - if isinstance(val, dict): - # Convert dict to WDL struct format - return f'${{{json.dumps(val)}}}' - return f'"{val}"' + elif isinstance(val, dict): + return json.dumps(val, indent=2) + elif isinstance(val, list): + if all(isinstance(x, str) for x in val): + return json.dumps(val) + return json.dumps([format_value(x) for x in val]) + elif isinstance(val, (int, float)): + return str(val) + elif val is None: + return "" + elif isinstance(val, str): + if val.startswith("{") and val.endswith("}"): + try: + parsed = json.loads(val) + return json.dumps(parsed, indent=2) + except json.JSONDecodeError: + return f'"{val}"' + return f'"{val}"' + return f'"{str(val)}"' return {key: format_value(value) for key, value in inputs_json.items()} From a40af64c84c519af3a565308107a7a74964b0a5e Mon Sep 17 00:00:00 2001 From: npetrill Date: Mon, 6 Jan 2025 09:34:05 -0500 Subject: [PATCH 14/20] try to handle stucts --- scripts/firecloud_api/firecloud_api.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/scripts/firecloud_api/firecloud_api.py b/scripts/firecloud_api/firecloud_api.py index bf60e2754f..71b6b536a9 100644 --- a/scripts/firecloud_api/firecloud_api.py +++ b/scripts/firecloud_api/firecloud_api.py @@ -254,10 +254,6 @@ def poll_job_status(self, submission_id): return workflow_status_map - def quote_values(self, inputs_json): - """ - Format JSON values with proper handling of nested structures - """ def quote_values(self, inputs_json): """ Format JSON values with proper handling of nested structures From 9314d93692f1bfc182b28835069983c3cbf06871 Mon Sep 17 00:00:00 2001 From: npetrill Date: Mon, 6 Jan 2025 09:43:08 -0500 Subject: [PATCH 15/20] try to handle stucts --- .dockstore.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.dockstore.yml b/.dockstore.yml index fe57295cc3..beec222931 100644 --- a/.dockstore.yml +++ b/.dockstore.yml @@ -123,6 +123,10 @@ workflows: subclass: WDL primaryDescriptorPath: /pipelines/skylab/atac/atac.wdl + - name: TestExomeGermlineSingleSample + subclass: WDL + primaryDescriptorPath: /verification/test-wdls/TestExomeGermlineSingleSample.wdl + - name: TestIlluminaGenotypingArray subclass: WDL primaryDescriptorPath: /verification/test-wdls/TestIlluminaGenotypingArray.wdl From b61ab668695cb2aa2464bc7d3428f5539be73ae8 Mon Sep 17 00:00:00 2001 From: npetrill Date: Mon, 6 Jan 2025 09:51:48 -0500 Subject: [PATCH 16/20] try to handle stucts --- tasks/broad/TerraCopyFilesFromCloudToCloud.wdl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tasks/broad/TerraCopyFilesFromCloudToCloud.wdl b/tasks/broad/TerraCopyFilesFromCloudToCloud.wdl index 05415985ee..b5af1a22f8 100644 --- a/tasks/broad/TerraCopyFilesFromCloudToCloud.wdl +++ b/tasks/broad/TerraCopyFilesFromCloudToCloud.wdl @@ -19,6 +19,7 @@ task TerraCopyFilesFromCloudToCloud { input { Array[String] files_to_copy String destination_cloud_path + Float? contamination } command { @@ -27,6 +28,9 @@ task TerraCopyFilesFromCloudToCloud { gcloud config set storage/process_count 16 gcloud config set storage/thread_count 2 + if ! grep -q no_contamination contamination; then + gcloud storage cp -m -L cp.log contamination ~{destination_cloud_path}.contamination + fi gcloud storage cp ~{sep=' ' files_to_copy} ~{destination_cloud_path} } From ebbba9112b3572e2e40173e9d426f9d1d8177ee0 Mon Sep 17 00:00:00 2001 From: npetrill Date: Mon, 6 Jan 2025 12:11:12 -0500 Subject: [PATCH 17/20] try to handle stucts --- scripts/firecloud_api/firecloud_api.py | 31 +++++++++++++------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/scripts/firecloud_api/firecloud_api.py b/scripts/firecloud_api/firecloud_api.py index 71b6b536a9..557b85e334 100644 --- a/scripts/firecloud_api/firecloud_api.py +++ b/scripts/firecloud_api/firecloud_api.py @@ -256,33 +256,34 @@ def poll_job_status(self, submission_id): def quote_values(self, inputs_json): """ - Format JSON values with proper handling of nested structures + Format JSON values with proper handling of WDL structs and nested structures """ def format_value(val): if isinstance(val, bool): return str(val).lower() - elif isinstance(val, dict): - return json.dumps(val, indent=2) - elif isinstance(val, list): - if all(isinstance(x, str) for x in val): - return json.dumps(val) - return json.dumps([format_value(x) for x in val]) elif isinstance(val, (int, float)): return str(val) elif val is None: return "" elif isinstance(val, str): - if val.startswith("{") and val.endswith("}"): - try: - parsed = json.loads(val) - return json.dumps(parsed, indent=2) - except json.JSONDecodeError: - return f'"{val}"' - return f'"{val}"' + # Check if it's already a JSON string + try: + parsed = json.loads(val) + if isinstance(parsed, dict): + # For WDL structs, return compact JSON without newlines + return json.dumps(parsed, separators=(',', ':')) + return val + except json.JSONDecodeError: + # If it's a regular string, quote it + return f'"{val}"' + elif isinstance(val, dict): + # For dictionaries, return compact JSON without newlines + return json.dumps(val, separators=(',', ':')) + elif isinstance(val, list): + return json.dumps([format_value(x) for x in val]) return f'"{str(val)}"' return {key: format_value(value) for key, value in inputs_json.items()} - def get_workflow_outputs(self, submission_id, workflow_id, pipeline_name): """ Fetches workflow outputs from the Firecloud API. From 0723f3daa9251844cb239bcacce3567ceed6c499 Mon Sep 17 00:00:00 2001 From: npetrill Date: Mon, 6 Jan 2025 12:27:27 -0500 Subject: [PATCH 18/20] try to handle stucts --- .../test_exome_germline_single_sample.yml | 92 +++++++++---------- scripts/firecloud_api/firecloud_api.py | 31 +++---- 2 files changed, 61 insertions(+), 62 deletions(-) diff --git a/.github/workflows/test_exome_germline_single_sample.yml b/.github/workflows/test_exome_germline_single_sample.yml index b2e83b8686..b2049f56c1 100644 --- a/.github/workflows/test_exome_germline_single_sample.yml +++ b/.github/workflows/test_exome_germline_single_sample.yml @@ -126,7 +126,7 @@ jobs: - name: Fetch Dockstore Workflow Commit Hash run: | # Wait 5.5 minutes for Dockstore to update - sleep 330 + sleep 3 DOCKSTORE_COMMIT_HASH_FROM_FETCH=$(python scripts/dockstore_api/fetch_dockstore_commit.py \ $DOCKSTORE_TOKEN \ @@ -142,23 +142,23 @@ jobs: DOCKSTORE_PIPELINE_NAME: ${{ env.DOCKSTORE_PIPELINE_NAME }} BRANCH_NAME: ${{ env.BRANCH_NAME }} - - name: Compare Dockstore and Commit Hashes - id: compare_hashes - run: | - echo "Comparing hashes..." - echo "Dockstore Commit Hash: $DOCKSTORE_COMMIT_HASH" - echo "GitHub Commit Hash: $GITHUB_COMMIT_HASH" - - if [ "$DOCKSTORE_COMMIT_HASH" != "$GITHUB_COMMIT_HASH" ]; then - echo "Error: The Dockstore Commit Hash does not match the GitHub Commit Hash!" - echo "Mismatch found: $DOCKSTORE_COMMIT_HASH != $GITHUB_COMMIT_HASH" - exit 1 - else - echo "Success: The Dockstore Commit Hash matches the GitHub Commit Hash." - fi - env: - DOCKSTORE_COMMIT_HASH: ${{ env.DOCKSTORE_COMMIT_HASH }} - GITHUB_COMMIT_HASH: ${{ env.GITHUB_COMMIT_HASH }} + #- name: Compare Dockstore and Commit Hashes + # id: compare_hashes + # run: | + # echo "Comparing hashes..." + # echo "Dockstore Commit Hash: $DOCKSTORE_COMMIT_HASH" + # echo "GitHub Commit Hash: $GITHUB_COMMIT_HASH" +# + # if [ "$DOCKSTORE_COMMIT_HASH" != "$GITHUB_COMMIT_HASH" ]; then + # echo "Error: The Dockstore Commit Hash does not match the GitHub Commit Hash!" + # echo "Mismatch found: $DOCKSTORE_COMMIT_HASH != $GITHUB_COMMIT_HASH" + # exit 1 + # else + # echo "Success: The Dockstore Commit Hash matches the GitHub Commit Hash." + # fi + # env: + # DOCKSTORE_COMMIT_HASH: ${{ env.DOCKSTORE_COMMIT_HASH }} + # GITHUB_COMMIT_HASH: ${{ env.GITHUB_COMMIT_HASH }} - name: Set Test Type id: set_test_type @@ -370,34 +370,34 @@ jobs: DOCKSTORE_PIPELINE_NAME: ${{ env.DOCKSTORE_PIPELINE_NAME }} PIPELINE_DIR: ${{ env.PIPELINE_DIR }} - - name: Delete Method Configuration - if: always() # Ensures it runs regardless of success or failure - run: | - echo "Deleting method configuration for branch: $BRANCH_NAME" - DELETE_RESPONSE=$(python3 scripts/firecloud_api/firecloud_api.py delete_method_config \ - --workspace-namespace $WORKSPACE_NAMESPACE \ - --workspace-name "$TESTING_WORKSPACE" \ - --pipeline_name "$PIPELINE_NAME" \ - --branch_name "$BRANCH_NAME" \ - --sa-json-b64 "$SA_JSON_B64" \ - --user "$USER" \ - --method_config_name "$METHOD_CONFIG_NAME") - echo "Delete response: $DELETE_RESPONSE" - if [ "$DELETE_RESPONSE" == "True" ]; then - echo "Method configuration deleted successfully." - else - echo "Error: Method configuration deletion failed." - exit 1 - fi - - env: - PIPELINE_NAME: ${{ env.PIPELINE_NAME }} - BRANCH_NAME: ${{ env.BRANCH_NAME }} - SA_JSON_B64: ${{ secrets.PDT_TESTER_SA_B64 }} - METHOD_CONFIG_NAME: ${{ env.METHOD_CONFIG_NAME }} - WORKSPACE_NAMESPACE: ${{ env.WORKSPACE_NAMESPACE }} - TESTING_WORKSPACE: ${{ env.TESTING_WORKSPACE }} - USER: ${{ env.USER }} + #- name: Delete Method Configuration + # if: always() # Ensures it runs regardless of success or failure + # run: | + # echo "Deleting method configuration for branch: $BRANCH_NAME" + # DELETE_RESPONSE=$(python3 scripts/firecloud_api/firecloud_api.py delete_method_config \ + # --workspace-namespace $WORKSPACE_NAMESPACE \ + # --workspace-name "$TESTING_WORKSPACE" \ + # --pipeline_name "$PIPELINE_NAME" \ + # --branch_name "$BRANCH_NAME" \ + # --sa-json-b64 "$SA_JSON_B64" \ + # --user "$USER" \ + # --method_config_name "$METHOD_CONFIG_NAME") + # echo "Delete response: $DELETE_RESPONSE" + # if [ "$DELETE_RESPONSE" == "True" ]; then + # echo "Method configuration deleted successfully." + # else + # echo "Error: Method configuration deletion failed." + # exit 1 + # fi + # + # env: + # PIPELINE_NAME: ${{ env.PIPELINE_NAME }} + # BRANCH_NAME: ${{ env.BRANCH_NAME }} + # SA_JSON_B64: ${{ secrets.PDT_TESTER_SA_B64 }} + # METHOD_CONFIG_NAME: ${{ env.METHOD_CONFIG_NAME }} + # WORKSPACE_NAMESPACE: ${{ env.WORKSPACE_NAMESPACE }} + # TESTING_WORKSPACE: ${{ env.TESTING_WORKSPACE }} + # USER: ${{ env.USER }} - name: Print Summary on Success if: success() diff --git a/scripts/firecloud_api/firecloud_api.py b/scripts/firecloud_api/firecloud_api.py index 557b85e334..71b6b536a9 100644 --- a/scripts/firecloud_api/firecloud_api.py +++ b/scripts/firecloud_api/firecloud_api.py @@ -256,34 +256,33 @@ def poll_job_status(self, submission_id): def quote_values(self, inputs_json): """ - Format JSON values with proper handling of WDL structs and nested structures + Format JSON values with proper handling of nested structures """ def format_value(val): if isinstance(val, bool): return str(val).lower() + elif isinstance(val, dict): + return json.dumps(val, indent=2) + elif isinstance(val, list): + if all(isinstance(x, str) for x in val): + return json.dumps(val) + return json.dumps([format_value(x) for x in val]) elif isinstance(val, (int, float)): return str(val) elif val is None: return "" elif isinstance(val, str): - # Check if it's already a JSON string - try: - parsed = json.loads(val) - if isinstance(parsed, dict): - # For WDL structs, return compact JSON without newlines - return json.dumps(parsed, separators=(',', ':')) - return val - except json.JSONDecodeError: - # If it's a regular string, quote it - return f'"{val}"' - elif isinstance(val, dict): - # For dictionaries, return compact JSON without newlines - return json.dumps(val, separators=(',', ':')) - elif isinstance(val, list): - return json.dumps([format_value(x) for x in val]) + if val.startswith("{") and val.endswith("}"): + try: + parsed = json.loads(val) + return json.dumps(parsed, indent=2) + except json.JSONDecodeError: + return f'"{val}"' + return f'"{val}"' return f'"{str(val)}"' return {key: format_value(value) for key, value in inputs_json.items()} + def get_workflow_outputs(self, submission_id, workflow_id, pipeline_name): """ Fetches workflow outputs from the Firecloud API. From f692d509202e1767a2fa58e47c2e02f324585791 Mon Sep 17 00:00:00 2001 From: npetrill Date: Mon, 6 Jan 2025 13:19:45 -0500 Subject: [PATCH 19/20] handle nested inputs --- scripts/firecloud_api/UpdateTestInputs.py | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/scripts/firecloud_api/UpdateTestInputs.py b/scripts/firecloud_api/UpdateTestInputs.py index 8d172ff3a0..39b6df29cd 100644 --- a/scripts/firecloud_api/UpdateTestInputs.py +++ b/scripts/firecloud_api/UpdateTestInputs.py @@ -4,6 +4,10 @@ import ast def update_test_inputs(inputs_json, truth_path, results_path, update_truth, branch_name): + import json + import os + import ast + with open(inputs_json, 'r') as file: test_inputs = json.load(file) @@ -16,19 +20,28 @@ def update_test_inputs(inputs_json, truth_path, results_path, update_truth, bran # Append "Test" in front of the pipeline name test_name = f"Test{pipeline_name}" - # Update all keys and ensure arrays are preserved + # Update all keys and ensure nested inputs are handled correctly updated_inputs = {} for key, value in test_inputs.items(): - new_key = key.replace(pipeline_name, test_name) + # Split the key to analyze its structure + key_parts = key.split('.') + + # Replace the top-level component with the test_name + key_parts[0] = test_name + + # For nested keys (more than two parts), append the original pipeline name with a `.` + if len(key_parts) > 2: + key_parts[1] = f"{pipeline_name}.{key_parts[1]}" + + # Reconstruct the updated key + new_key = '.'.join(key_parts) - # Handle the case where value might be a string representation of a list + # Handle the value (ensure lists and nested values are preserved correctly) if isinstance(value, list): - # Check if any element in the list is a string representation of another list processed_value = [] for item in value: if isinstance(item, str) and item.startswith('[') and item.endswith(']'): try: - # Use ast.literal_eval to safely evaluate string representation of list inner_list = ast.literal_eval(item) processed_value.extend(inner_list) except (ValueError, SyntaxError): From 5fbfd41988780be5ab9f2a8723b9f928468209b6 Mon Sep 17 00:00:00 2001 From: npetrill Date: Mon, 6 Jan 2025 13:34:11 -0500 Subject: [PATCH 20/20] handle nested inputs --- .../test_exome_germline_single_sample.yml | 94 +++++++++---------- 1 file changed, 47 insertions(+), 47 deletions(-) diff --git a/.github/workflows/test_exome_germline_single_sample.yml b/.github/workflows/test_exome_germline_single_sample.yml index b2049f56c1..106a2d80b1 100644 --- a/.github/workflows/test_exome_germline_single_sample.yml +++ b/.github/workflows/test_exome_germline_single_sample.yml @@ -1,4 +1,4 @@ -name: Nikelle's Test ExomeGermlineSingleSample +name: Test ExomeGermlineSingleSample # Controls when the workflow will run on: @@ -126,7 +126,7 @@ jobs: - name: Fetch Dockstore Workflow Commit Hash run: | # Wait 5.5 minutes for Dockstore to update - sleep 3 + sleep 330 DOCKSTORE_COMMIT_HASH_FROM_FETCH=$(python scripts/dockstore_api/fetch_dockstore_commit.py \ $DOCKSTORE_TOKEN \ @@ -142,23 +142,23 @@ jobs: DOCKSTORE_PIPELINE_NAME: ${{ env.DOCKSTORE_PIPELINE_NAME }} BRANCH_NAME: ${{ env.BRANCH_NAME }} - #- name: Compare Dockstore and Commit Hashes - # id: compare_hashes - # run: | - # echo "Comparing hashes..." - # echo "Dockstore Commit Hash: $DOCKSTORE_COMMIT_HASH" - # echo "GitHub Commit Hash: $GITHUB_COMMIT_HASH" -# - # if [ "$DOCKSTORE_COMMIT_HASH" != "$GITHUB_COMMIT_HASH" ]; then - # echo "Error: The Dockstore Commit Hash does not match the GitHub Commit Hash!" - # echo "Mismatch found: $DOCKSTORE_COMMIT_HASH != $GITHUB_COMMIT_HASH" - # exit 1 - # else - # echo "Success: The Dockstore Commit Hash matches the GitHub Commit Hash." - # fi - # env: - # DOCKSTORE_COMMIT_HASH: ${{ env.DOCKSTORE_COMMIT_HASH }} - # GITHUB_COMMIT_HASH: ${{ env.GITHUB_COMMIT_HASH }} + - name: Compare Dockstore and Commit Hashes + id: compare_hashes + run: | + echo "Comparing hashes..." + echo "Dockstore Commit Hash: $DOCKSTORE_COMMIT_HASH" + echo "GitHub Commit Hash: $GITHUB_COMMIT_HASH" + + if [ "$DOCKSTORE_COMMIT_HASH" != "$GITHUB_COMMIT_HASH" ]; then + echo "Error: The Dockstore Commit Hash does not match the GitHub Commit Hash!" + echo "Mismatch found: $DOCKSTORE_COMMIT_HASH != $GITHUB_COMMIT_HASH" + exit 1 + else + echo "Success: The Dockstore Commit Hash matches the GitHub Commit Hash." + fi + env: + DOCKSTORE_COMMIT_HASH: ${{ env.DOCKSTORE_COMMIT_HASH }} + GITHUB_COMMIT_HASH: ${{ env.GITHUB_COMMIT_HASH }} - name: Set Test Type id: set_test_type @@ -370,34 +370,34 @@ jobs: DOCKSTORE_PIPELINE_NAME: ${{ env.DOCKSTORE_PIPELINE_NAME }} PIPELINE_DIR: ${{ env.PIPELINE_DIR }} - #- name: Delete Method Configuration - # if: always() # Ensures it runs regardless of success or failure - # run: | - # echo "Deleting method configuration for branch: $BRANCH_NAME" - # DELETE_RESPONSE=$(python3 scripts/firecloud_api/firecloud_api.py delete_method_config \ - # --workspace-namespace $WORKSPACE_NAMESPACE \ - # --workspace-name "$TESTING_WORKSPACE" \ - # --pipeline_name "$PIPELINE_NAME" \ - # --branch_name "$BRANCH_NAME" \ - # --sa-json-b64 "$SA_JSON_B64" \ - # --user "$USER" \ - # --method_config_name "$METHOD_CONFIG_NAME") - # echo "Delete response: $DELETE_RESPONSE" - # if [ "$DELETE_RESPONSE" == "True" ]; then - # echo "Method configuration deleted successfully." - # else - # echo "Error: Method configuration deletion failed." - # exit 1 - # fi - # - # env: - # PIPELINE_NAME: ${{ env.PIPELINE_NAME }} - # BRANCH_NAME: ${{ env.BRANCH_NAME }} - # SA_JSON_B64: ${{ secrets.PDT_TESTER_SA_B64 }} - # METHOD_CONFIG_NAME: ${{ env.METHOD_CONFIG_NAME }} - # WORKSPACE_NAMESPACE: ${{ env.WORKSPACE_NAMESPACE }} - # TESTING_WORKSPACE: ${{ env.TESTING_WORKSPACE }} - # USER: ${{ env.USER }} + - name: Delete Method Configuration + if: always() # Ensures it runs regardless of success or failure + run: | + echo "Deleting method configuration for branch: $BRANCH_NAME" + DELETE_RESPONSE=$(python3 scripts/firecloud_api/firecloud_api.py delete_method_config \ + --workspace-namespace $WORKSPACE_NAMESPACE \ + --workspace-name "$TESTING_WORKSPACE" \ + --pipeline_name "$PIPELINE_NAME" \ + --branch_name "$BRANCH_NAME" \ + --sa-json-b64 "$SA_JSON_B64" \ + --user "$USER" \ + --method_config_name "$METHOD_CONFIG_NAME") + echo "Delete response: $DELETE_RESPONSE" + if [ "$DELETE_RESPONSE" == "True" ]; then + echo "Method configuration deleted successfully." + else + echo "Error: Method configuration deletion failed." + exit 1 + fi + + env: + PIPELINE_NAME: ${{ env.PIPELINE_NAME }} + BRANCH_NAME: ${{ env.BRANCH_NAME }} + SA_JSON_B64: ${{ secrets.PDT_TESTER_SA_B64 }} + METHOD_CONFIG_NAME: ${{ env.METHOD_CONFIG_NAME }} + WORKSPACE_NAMESPACE: ${{ env.WORKSPACE_NAMESPACE }} + TESTING_WORKSPACE: ${{ env.TESTING_WORKSPACE }} + USER: ${{ env.USER }} - name: Print Summary on Success if: success()