diff --git a/.github/workflows/semver-check.yaml b/.github/workflows/semver-check.yaml index e3ae757b..a7a4bf6f 100644 --- a/.github/workflows/semver-check.yaml +++ b/.github/workflows/semver-check.yaml @@ -10,6 +10,6 @@ jobs: steps: - name: Comment id: comment - uses: adobe-rnd/github-semantic-release-comment-action@master + uses: adobe-rnd/github-semantic-release-comment-action@main with: - repo-token: ${{ secrets.GITHUB_TOKEN }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/package-lock.json b/package-lock.json index e6e0448f..ce6d300d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -123,6 +123,10 @@ "resolved": "packages/helix-shared-secrets", "link": true }, + "node_modules/@adobe/helix-shared-storage": { + "resolved": "packages/helix-shared-storage", + "link": true + }, "node_modules/@adobe/helix-shared-string": { "resolved": "packages/helix-shared-string", "link": true @@ -8907,10 +8911,9 @@ } }, "node_modules/mime": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/mime/-/mime-4.0.1.tgz", - "integrity": "sha512-5lZ5tyrIfliMXzFtkYyekWbtRXObT9OWa8IwQ5uxTBDHucNNwniRqo0yInflj+iYi5CBa6qxadGzGarDfuEOxA==", - "dev": true, + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/mime/-/mime-4.0.3.tgz", + "integrity": "sha512-KgUb15Oorc0NEKPbvfa0wRU+PItIEZmiv+pyAO2i0oTIVTJhlzMclU7w4RXWQrSOVH5ax/p/CkIO7KI4OyFJTQ==", "funding": [ "https://github.com/sponsors/broofa" ], @@ -13487,6 +13490,12 @@ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", "dev": true }, + "node_modules/sax": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.3.0.tgz", + "integrity": "sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA==", + "dev": true + }, "node_modules/saxes": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/saxes/-/saxes-6.0.0.tgz", @@ -15772,6 +15781,28 @@ "node": ">=18" } }, + "node_modules/xml2js": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.2.tgz", + "integrity": "sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==", + "dev": true, + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, "node_modules/xmlchars": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz", @@ -15979,7 +16010,7 @@ }, "packages/helix-shared-config": { "name": "@adobe/helix-shared-config", - "version": "10.4.2", + "version": "10.4.3", "license": "Apache-2.0", "dependencies": { "@adobe/fetch": "^4.0.1", @@ -16342,6 +16373,22 @@ "resolved": "https://registry.npmjs.org/@adobe/helix-shared-wrap/-/helix-shared-wrap-1.0.5.tgz", "integrity": "sha512-g8bap0KhWI6Y6USlf9Se4t+Og0A6udYkoQH2NBdj/HOLnLozwn+60wbVLJhHIW2Ldt81xmhBqjhW0j1BtCQ3uw==" }, + "packages/helix-shared-storage": { + "name": "@adobe/helix-shared-storage", + "version": "0.0.1", + "license": "Apache-2.0", + "dependencies": { + "@adobe/fetch": "^4.1.2", + "@adobe/helix-shared-process-queue": "^3.0.4", + "@aws-sdk/client-s3": "^3.564.0", + "@smithy/node-http-handler": "^2.5.0", + "mime": "^4.0.3" + }, + "devDependencies": { + "nock": "13.5.4", + "xml2js": "^0.6.2" + } + }, "packages/helix-shared-string": { "name": "@adobe/helix-shared-string", "version": "2.0.2", @@ -16349,7 +16396,7 @@ }, "packages/helix-shared-tokencache": { "name": "@adobe/helix-shared-tokencache", - "version": "1.4.2", + "version": "1.4.3", "license": "Apache-2.0", "dependencies": { "@adobe/fetch": "^4.0.10", diff --git a/packages/helix-shared-storage/.jsdoc.json b/packages/helix-shared-storage/.jsdoc.json new file mode 100644 index 00000000..405090f4 --- /dev/null +++ b/packages/helix-shared-storage/.jsdoc.json @@ -0,0 +1,17 @@ +{ + "plugins": [], + "recurseDepth": 10, + "source": { + "includePattern": ".+\\.js(doc|x)?$", + "excludePattern": "(^|\\/|\\\\)_" + }, + "sourceType": "module", + "tags": { + "allowUnknownTags": true, + "dictionaries": ["jsdoc","closure"] + }, + "templates": { + "cleverLinks": false, + "monospaceLinks": false + } +} \ No newline at end of file diff --git a/packages/helix-shared-storage/.mocha-multi.json b/packages/helix-shared-storage/.mocha-multi.json new file mode 100644 index 00000000..aa2be2a2 --- /dev/null +++ b/packages/helix-shared-storage/.mocha-multi.json @@ -0,0 +1,6 @@ +{ + "reporterEnabled": "spec,xunit", + "xunitReporterOptions": { + "output": "junit/test-results.xml" + } +} diff --git a/packages/helix-shared-storage/.npmignore b/packages/helix-shared-storage/.npmignore new file mode 100644 index 00000000..868317d2 --- /dev/null +++ b/packages/helix-shared-storage/.npmignore @@ -0,0 +1,9 @@ +coverage/ +node_modules/ +junit/ +test/ +docs/ +logs/ +test-results.xml +renovate.json +.* diff --git a/packages/helix-shared-storage/.nycrc.json b/packages/helix-shared-storage/.nycrc.json new file mode 100644 index 00000000..f6bb44c2 --- /dev/null +++ b/packages/helix-shared-storage/.nycrc.json @@ -0,0 +1,10 @@ +{ + "reporter": [ + "lcov", + "text" + ], + "check-coverage": true, + "lines": 100, + "branches": 100, + "statements": 100 +} diff --git a/packages/helix-shared-storage/.releaserc.cjs b/packages/helix-shared-storage/.releaserc.cjs new file mode 120000 index 00000000..7ab45368 --- /dev/null +++ b/packages/helix-shared-storage/.releaserc.cjs @@ -0,0 +1 @@ +../../.releaserc.cjs \ No newline at end of file diff --git a/packages/helix-shared-storage/CHANGELOG.md b/packages/helix-shared-storage/CHANGELOG.md new file mode 100644 index 00000000..e69de29b diff --git a/packages/helix-shared-storage/LICENSE.txt b/packages/helix-shared-storage/LICENSE.txt new file mode 100644 index 00000000..883ab098 --- /dev/null +++ b/packages/helix-shared-storage/LICENSE.txt @@ -0,0 +1,264 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +APACHE JACKRABBIT SUBCOMPONENTS + +Apache Jackrabbit includes parts with separate copyright notices and license +terms. Your use of these subcomponents is subject to the terms and conditions +of the following licenses: + + XPath 2.0/XQuery 1.0 Parser: + http://www.w3.org/2002/11/xquery-xpath-applets/xgrammar.zip + + Copyright (C) 2002 World Wide Web Consortium, (Massachusetts Institute of + Technology, European Research Consortium for Informatics and Mathematics, + Keio University). All Rights Reserved. + + This work is distributed under the W3C(R) Software License in the hope + that it will be useful, but WITHOUT ANY WARRANTY; without even the + implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + + W3C(R) SOFTWARE NOTICE AND LICENSE + http://www.w3.org/Consortium/Legal/2002/copyright-software-20021231 + + This work (and included software, documentation such as READMEs, or + other related items) is being provided by the copyright holders under + the following license. By obtaining, using and/or copying this work, + you (the licensee) agree that you have read, understood, and will comply + with the following terms and conditions. + + Permission to copy, modify, and distribute this software and its + documentation, with or without modification, for any purpose and + without fee or royalty is hereby granted, provided that you include + the following on ALL copies of the software and documentation or + portions thereof, including modifications: + + 1. The full text of this NOTICE in a location viewable to users + of the redistributed or derivative work. + + 2. Any pre-existing intellectual property disclaimers, notices, + or terms and conditions. If none exist, the W3C Software Short + Notice should be included (hypertext is preferred, text is + permitted) within the body of any redistributed or derivative code. + + 3. Notice of any changes or modifications to the files, including + the date changes were made. (We recommend you provide URIs to the + location from which the code is derived.) + + THIS SOFTWARE AND DOCUMENTATION IS PROVIDED "AS IS," AND COPYRIGHT + HOLDERS MAKE NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED, + INCLUDING BUT NOT LIMITED TO, WARRANTIES OF MERCHANTABILITY OR FITNESS + FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE OR + DOCUMENTATION WILL NOT INFRINGE ANY THIRD PARTY PATENTS, COPYRIGHTS, + TRADEMARKS OR OTHER RIGHTS. + + COPYRIGHT HOLDERS WILL NOT BE LIABLE FOR ANY DIRECT, INDIRECT, SPECIAL + OR CONSEQUENTIAL DAMAGES ARISING OUT OF ANY USE OF THE SOFTWARE OR + DOCUMENTATION. + + The name and trademarks of copyright holders may NOT be used in + advertising or publicity pertaining to the software without specific, + written prior permission. Title to copyright in this software and + any associated documentation will at all times remain with + copyright holders. diff --git a/packages/helix-shared-storage/README.md b/packages/helix-shared-storage/README.md new file mode 100644 index 00000000..e3575130 --- /dev/null +++ b/packages/helix-shared-storage/README.md @@ -0,0 +1 @@ +# Helix Shared - storage diff --git a/packages/helix-shared-storage/package.json b/packages/helix-shared-storage/package.json new file mode 100644 index 00000000..7e836edc --- /dev/null +++ b/packages/helix-shared-storage/package.json @@ -0,0 +1,44 @@ +{ + "name": "@adobe/helix-shared-storage", + "version": "0.0.1", + "description": "Shared modules of the Helix Project - storage", + "main": "src/index.js", + "type": "module", + "types": "src/index.d.ts", + "scripts": { + "test": "c8 mocha", + "lint": "eslint .", + "clean": "rm -rf package-lock.json node_modules" + }, + "mocha": { + "reporter": "mocha-multi-reporters", + "reporter-options": "configFile=.mocha-multi.json", + "require": [ + "test/setup-env.js" + ] + }, + "repository": { + "type": "git", + "url": "https://github.com/adobe/helix-shared.git" + }, + "author": "", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/adobe/helix-shared/issues" + }, + "homepage": "https://github.com/adobe/helix-shared#readme", + "publishConfig": { + "access": "public" + }, + "devDependencies": { + "nock": "13.5.4", + "xml2js": "^0.6.2" + }, + "dependencies": { + "@adobe/fetch": "^4.1.2", + "@adobe/helix-shared-process-queue": "^3.0.4", + "@aws-sdk/client-s3": "^3.564.0", + "@smithy/node-http-handler": "^2.5.0", + "mime": "^4.0.3" + } +} diff --git a/packages/helix-shared-storage/src/index.d.ts b/packages/helix-shared-storage/src/index.d.ts new file mode 100644 index 00000000..8d4c7a0b --- /dev/null +++ b/packages/helix-shared-storage/src/index.d.ts @@ -0,0 +1,12 @@ +/* + * Copyright 2024 Adobe. All rights reserved. + * This file is licensed to you under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. You may obtain a copy + * of the License at http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under + * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS + * OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ +export * from './storage'; diff --git a/packages/helix-shared-storage/src/index.js b/packages/helix-shared-storage/src/index.js new file mode 100644 index 00000000..b0a46ebf --- /dev/null +++ b/packages/helix-shared-storage/src/index.js @@ -0,0 +1,12 @@ +/* + * Copyright 2024 Adobe. All rights reserved. + * This file is licensed to you under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. You may obtain a copy + * of the License at http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under + * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS + * OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ +export { HelixStorage } from './storage.js'; diff --git a/packages/helix-shared-storage/src/storage.d.ts b/packages/helix-shared-storage/src/storage.d.ts new file mode 100644 index 00000000..e28aa2f6 --- /dev/null +++ b/packages/helix-shared-storage/src/storage.d.ts @@ -0,0 +1,142 @@ +/* + * Copyright 2024 Adobe. All rights reserved. + * This file is licensed to you under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. You may obtain a copy + * of the License at http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under + * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS + * OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ + +import { S3Client } from "@aws-sdk/client-s3"; + +export declare interface Bucket { + get client():S3Client; + + get bucket():string; + + get log():Console; + + get(key: string, meta?: object): Promise; + + head(path: string): Promise; + + /** + * Return an object's metadata. + * + * @param {string} key object key + * @returns object metadata or null + * @throws an error if the object could not be loaded due to an unexpected error. + */ + metadata(key: string): Promise; + + /** + * Store an object contents, along with headers. + * + * @param {string} key object key + * @param {Response} res response to store + * @returns result obtained from S3 + */ + store(key: string, res: Response): Promise; + + /** + * Store an object contents, along with metadata. + * + * @param {string} path object key + * @param {Buffer|string} body data to store + * @param {string} [contentType] content type. defaults to 'application/octet-stream' + * @param {object} [meta] metadata to store with the object. defaults to '{}' + * @param {boolean} [compress = true] + * @returns result obtained from S3 + */ + put(path: string, body: Buffer, contentType?: string, meta?: object, compress?: bool): Promise; + + /** + * Updates the metadata + * @param {string} path + * @param {object} meta + * @param {object} opts + * @returns {Promise<*>} + */ + putMeta(path: string, meta: object, opts?: object): Promise; + + /** + * Copy an object in the same bucket. + * + * @param {string} src source key + * @param {string} dst destination key + * @returns result obtained from S3 + */ + copy(src: string, dst: string): Promise; + + /** + * Remove object(s) + * + * @param {string|string[]} path source key(s) + * @returns result obtained from S3 + */ + remove(path: string): Promise; + + /** + * Returns a list of object below the given prefix + * @param {string} prefix + * @returns {Promise} + */ + list(prefix: string): Promise; + + listFolders(prefix: string): Promise; + + /** + * Copies the tree below src to dst. + * @param {string} src Source prefix + * @param {string} dst Destination prefix + * @param {ObjectFilter} filter Filter function + * @returns {Promise<*[]>} + */ + copyDeep(src: string, dst: string, filter?: function): Promise; + + rmdir(src: string): Promise; +} + +/** + * The Helix Storage provides a factory for simplified bucket operations to S3 and R2 + */ +export class HelixStorage { + static fromContext(context:AdminContext):HelixStorage; + + s3():S3Client; + + /** + * creates a bucket instance that allows to perform storage related operations. + * @param bucketId + * @returns {Bucket} + */ + bucket(bucketId:string):Bucket;; + + /** + * @returns {Bucket} + */ + contentBus():Bucket; + + /** + * @returns {Bucket} + */ + codeBus():Bucket; + + /** + * @returns {Bucket} + */ + mediaBus():Bucket; + + /** + * @returns {Bucket} + */ + configBus():Bucket; + + /** + * Close this storage. Destroys the S3 client used. + */ + close() +} diff --git a/packages/helix-shared-storage/src/storage.js b/packages/helix-shared-storage/src/storage.js new file mode 100644 index 00000000..127f220e --- /dev/null +++ b/packages/helix-shared-storage/src/storage.js @@ -0,0 +1,677 @@ +/* + * Copyright 2022 Adobe. All rights reserved. + * This file is licensed to you under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. You may obtain a copy + * of the License at http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under + * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS + * OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ + +/* eslint-disable max-classes-per-file,no-param-reassign */ +import { Agent } from 'node:https'; +import { promisify } from 'util'; +import zlib from 'zlib'; +import { + CopyObjectCommand, + DeleteObjectCommand, + DeleteObjectsCommand, + GetObjectCommand, + HeadObjectCommand, + ListObjectsV2Command, + PutObjectCommand, + S3Client, +} from '@aws-sdk/client-s3'; +import { NodeHttpHandler } from '@smithy/node-http-handler'; + +import { Response } from '@adobe/fetch'; +import mime from 'mime'; +import processQueue from '@adobe/helix-shared-process-queue'; + +const gzip = promisify(zlib.gzip); +const gunzip = promisify(zlib.gunzip); + +/** + * @typedef {import('@aws-sdk/client-s3').CommandInput} CommandInput + */ + +/** + * @typedef ObjectInfo + * @property {string} key + * @property {string} path the path to the object, w/o the prefix + * @property {string} lastModified + * @property {number} contentLength + * @property {string} contentType + */ + +/** + * @callback ObjectFilter + * @param {ObjectInfo} info of the object to filter + * @returns {boolean} {@code true} if the object is accepted + */ + +/** + * Header names that AWS considers system defined. + */ +const AWS_S3_SYSTEM_HEADERS = [ + 'cache-control', + 'content-type', + 'expires', +]; + +/** + * result object headers + */ +const AWS_META_HEADERS = [ + 'CacheControl', + 'ContentType', + 'ContentEncoding', + 'Expires', +]; + +/** + * Response header names that need a different metadata name. + */ +const METADATA_HEADER_MAP = new Map([ + ['last-modified', 'x-source-last-modified'], +]); + +/** + * Sanitizes the input key or path and returns a bucket relative key (without leading / ). + * @param {string} keyOrPath + * @returns {string} + */ +function sanitizeKey(keyOrPath) { + if (keyOrPath.charAt(0) === '/') { + return keyOrPath.substring(1); + } + return keyOrPath; +} + +/** + * Bucket class + */ +class Bucket { + constructor(opts) { + Object.assign(this, { + _s3: opts.s3, + _r2: opts.r2, + _log: opts.log, + _clients: [opts.s3], + _bucket: opts.bucketId, + }); + if (opts.r2) { + this._clients.push(opts.r2); + } + } + + get client() { + return this._s3; + } + + get bucket() { + return this._bucket; + } + + get log() { + return this._log; + } + + /** + * Return an object contents. + * + * @param {string} key object key + * @param {object} [meta] output object to receive metadata if specified + * @returns object contents as a Buffer or null if no found. + * @throws an error if the object could not be loaded due to an unexpected error. + */ + async get(key, meta = null) { + const { log } = this; + const input = { + Bucket: this.bucket, + Key: sanitizeKey(key), + }; + + try { + const result = await this.client.send(new GetObjectCommand(input)); + log.info(`object downloaded from: ${input.Bucket}/${input.Key}`); + + const buf = await new Response(result.Body, {}).buffer(); + if (meta) { + Object.assign(meta, result.Metadata); + for (const name of AWS_META_HEADERS) { + if (name in result) { + meta[name] = result[name]; + } + } + } + if (result.ContentEncoding === 'gzip') { + return await gunzip(buf); + } + return buf; + } catch (e) { + /* c8 ignore next 3 */ + if (e.$metadata.httpStatusCode !== 404) { + throw e; + } + return null; + } + } + + async head(path) { + const input = { + Bucket: this._bucket, + Key: sanitizeKey(path), + }; + try { + const result = await this.client.send(new HeadObjectCommand(input)); + this.log.info(`Object metadata downloaded from: ${input.Bucket}/${input.Key}`); + return result; + } catch (e) { + /* c8 ignore next 3 */ + if (e.$metadata.httpStatusCode !== 404) { + throw e; + } + return null; + } + } + + /** + * Return an object's metadata. + * + * @param {string} key object key + * @returns object metadata or null + * @throws an error if the object could not be loaded due to an unexpected error. + */ + async metadata(key) { + const res = await this.head(key); + return res?.Metadata; + } + + /** + * Internal helper for sending a command to both S3 and R2 clients. + * @param {function} CommandConstructor constructor of command to send to the client + * @param {CommandInput} input command input + * @returns {Promise<*>} the command result + */ + async sendToS3andR2(CommandConstructor, input) { + // send cmd to s3 and r2 (mirror) in parallel + const tasks = this._clients.map((c) => c.send(new CommandConstructor(input))); + const result = await Promise.allSettled(tasks); + + const rejected = result.filter(({ status }) => status === 'rejected'); + if (!rejected.length) { + return result[0].value; + } else { + // at least 1 cmd failed + /* c8 ignore next */ + const type = result[0].status === 'rejected' ? 'S3' : 'R2'; + const err = rejected[0].reason; + err.message = `[${type}] ${err.message}`; + throw err; + } + } + + /** + * Store an object contents, along with headers. + * + * @param {string} key object key + * @param {Response} res response to store + * @returns result obtained from S3 + */ + async store(key, res) { + const { log } = this; + const body = await res.buffer(); + const zipped = await gzip(body); + + const input = { + Body: zipped, + Bucket: this.bucket, + ContentEncoding: 'gzip', + Metadata: {}, + Key: sanitizeKey(key), + }; + + Array.from(res.headers.entries()).forEach(([name, value]) => { + if (AWS_S3_SYSTEM_HEADERS.includes(name)) { + // system headers are stored in the command itself, e.g. + // `content-type` header is stored as `ContentType` property + const property = name.split('-').map((seg) => seg.charAt(0).toUpperCase() + seg.slice(1)).join(''); + input[property] = value; + } else { + // use preferred name in metadata if any + input.Metadata[METADATA_HEADER_MAP.get(name) || name] = value; + } + }); + + // write to s3 and r2 (mirror) in parallel + await this.sendToS3andR2(PutObjectCommand, input); + log.info(`object uploaded to: ${input.Bucket}/${input.Key}`); + } + + /** + * Store an object contents, along with metadata. + * + * @param {string} path object key + * @param {Buffer|string} body data to store + * @param {string} [contentType] content type. defaults to 'application/octet-stream' + * @param {object} [meta] metadata to store with the object. defaults to '{}' + * @param {boolean} [compress = true] + * @returns result obtained from S3 + */ + async put(path, body, contentType = 'application/octet-stream', meta = {}, compress = true) { + const input = { + Body: body, + Bucket: this.bucket, + ContentType: contentType, + Metadata: meta, + Key: sanitizeKey(path), + }; + if (compress) { + input.ContentEncoding = 'gzip'; + input.Body = await gzip(body); + } + // write to s3 and r2 (mirror) in parallel + const res = await this.sendToS3andR2(PutObjectCommand, input); + this.log.info(`object uploaded to: ${input.Bucket}/${input.Key}`); + return res; + } + + /** + * Updates the metadata + * @param {string} path + * @param {object} meta + * @param {object} opts + * @returns {Promise<*>} + */ + async putMeta(path, meta, opts = {}) { + const key = sanitizeKey(path); + const input = { + Bucket: this._bucket, + Key: key, + CopySource: `${this.bucket}/${key}`, + Metadata: meta, + MetadataDirective: 'REPLACE', + ...opts, + }; + + // write to s3 and r2 (mirror) in parallel + const result = await this.sendToS3andR2(CopyObjectCommand, input); + this.log.info(`Metadata updated for: ${input.CopySource}`); + return result; + } + + /** + * Copy an object in the same bucket. + * + * @param {string} src source key + * @param {string} dst destination key + * @returns result obtained from S3 + */ + async copy(src, dst) { + const input = { + Bucket: this.bucket, + CopySource: `${this.bucket}/${sanitizeKey(src)}`, + Key: sanitizeKey(dst), + }; + + try { + // write to s3 and r2 (mirror) in parallel + await this.sendToS3andR2(CopyObjectCommand, input); + this.log.info(`object copied from ${input.CopySource} to: ${input.Bucket}/${input.Key}`); + } catch (e) { + /* c8 ignore next 3 */ + if (e.Code !== 'NoSuchKey') { + throw e; + } + const e2 = new Error(`source does not exist: ${input.CopySource}`); + e2.status = 404; + throw e2; + } + } + + /** + * Remove object(s) + * + * @param {string|string[]} path source key(s) + * @returns result obtained from S3 + */ + async remove(path) { + if (Array.isArray(path)) { + const input = { + Bucket: this.bucket, + Delete: { + Objects: path.map((p) => ({ Key: sanitizeKey(p) })), + }, + }; + // delete on s3 and r2 (mirror) in parallel + try { + const result = await this.sendToS3andR2(DeleteObjectsCommand, input); + this.log.info(`${result.Deleted.length} objects deleted from bucket ${input.Bucket}.`); + return result; + } catch (e) { + const msg = `removing ${input.Delete.length} objects from bucket ${input.Bucket} failed: ${e.message}`; + this.log.error(msg); + const e2 = new Error(msg); + e2.status = e.$metadata.httpStatusCode; + throw e2; + } + } + + const input = { + Bucket: this.bucket, + Key: sanitizeKey(path), + }; + // delete on s3 and r2 (mirror) in parallel + try { + const result = await this.sendToS3andR2(DeleteObjectCommand, input); + this.log.info(`object deleted: ${input.Bucket}/${input.Key}`); + return result; + } catch (e) { + const msg = `removing ${input.Bucket}/${input.Key} from storage failed: ${e.message}`; + this.log.error(msg); + const e2 = new Error(msg); + e2.status = e.$metadata.httpStatusCode; + throw e2; + } + } + + /** + * Returns a list of object below the given prefix + * @param {string} prefix + * @returns {Promise} + */ + async list(prefix) { + let ContinuationToken; + const objects = []; + do { + // eslint-disable-next-line no-await-in-loop + const result = await this.client.send(new ListObjectsV2Command({ + Bucket: this.bucket, + ContinuationToken, + Prefix: prefix, + })); + ContinuationToken = result.IsTruncated ? result.NextContinuationToken : ''; + (result.Contents || []).forEach((content) => { + const key = content.Key; + objects.push({ + key, + lastModified: content.LastModified, + contentLength: content.Size, + contentType: mime.getType(key), + path: `${key.substring(prefix.length)}`, + }); + }); + } while (ContinuationToken); + return objects; + } + + async listFolders(prefix) { + let ContinuationToken; + const folders = []; + do { + // eslint-disable-next-line no-await-in-loop + const result = await this.client.send(new ListObjectsV2Command({ + Bucket: this.bucket, + ContinuationToken, + Prefix: prefix, + Delimiter: '/', + })); + ContinuationToken = result.IsTruncated ? result.NextContinuationToken : ''; + (result.CommonPrefixes || []).forEach(({ Prefix }) => { + folders.push(Prefix); + }); + } while (ContinuationToken); + return folders; + } + + /** + * Copies the tree below src to dst. + * @param {string} src Source prefix + * @param {string} dst Destination prefix + * @param {ObjectFilter} filter Filter function + * @returns {Promise<*[]>} + */ + async copyDeep(src, dst, filter = () => true) { + const { log } = this; + const tasks = []; + const Prefix = sanitizeKey(src); + const dstPrefix = sanitizeKey(dst); + this.log.info(`fetching list of files to copy ${this.bucket}/${Prefix} => ${dstPrefix}`); + (await this.list(Prefix)).forEach((obj) => { + const { + path, key, contentLength, contentType, + } = obj; + if (filter(obj)) { + tasks.push({ + src: key, + path, + contentLength, + contentType, + dst: `${dstPrefix}${path}`, + }); + } + }); + + let errors = 0; + const changes = []; + await processQueue(tasks, async (task) => { + log.info(`copy to ${task.dst}`); + const input = { + Bucket: this.bucket, + CopySource: `${this.bucket}/${task.src}`, + Key: task.dst, + }; + try { + // write to s3 and r2 (mirror) in parallel + await this.sendToS3andR2(CopyObjectCommand, input); + changes.push(task); + } catch (e) { + // at least 1 cmd failed + log.warn(`error while copying ${task.dst}: ${e}`); + errors += 1; + } + }, 64); + log.info(`copied ${changes.length} files to ${dst} (${errors} errors)`); + return changes; + } + + async rmdir(src) { + const { log } = this; + src = sanitizeKey(src); + log.info(`fetching list of files to delete from ${this.bucket}/${src}`); + const items = await this.list(src); + + let oks = 0; + let errors = 0; + await processQueue(items, async (item) => { + const { key } = item; + log.info(`deleting ${this.bucket}/${key}`); + const input = { + Bucket: this.bucket, + Key: key, + }; + + try { + // delete on s3 and r2 (mirror) in parallel + await this.sendToS3andR2(DeleteObjectCommand, input); + oks += 1; + } catch (e) { + // at least 1 cmd failed + log.warn(`error while deleting ${key}: ${e.$metadata.httpStatusCode}`); + errors += 1; + } + }, 64); + log.info(`deleted ${oks} files (${errors} errors)`); + } +} + +/** + * The Helix Storage provides a factory for simplified bucket operations to S3 and R2 + */ +export class HelixStorage { + static fromContext(context) { + if (!context.attributes.storage) { + const { + HELIX_HTTP_CONNECTION_TIMEOUT: connectionTimeout = 5000, + HELIX_HTTP_SOCKET_TIMEOUT: socketTimeout = 15000, + HELIX_HTTP_S3_KEEP_ALIVE: keepAlive, + CLOUDFLARE_ACCOUNT_ID: r2AccountId, + CLOUDFLARE_R2_ACCESS_KEY_ID: r2AccessKeyId, + CLOUDFLARE_R2_SECRET_ACCESS_KEY: r2SecretAccessKey, + } = context.env; + + context.attributes.storage = new HelixStorage({ + connectionTimeout, + socketTimeout, + r2AccountId, + r2AccessKeyId, + r2SecretAccessKey, + keepAlive: String(keepAlive) === 'true', + log: context.log, + }); + } + return context.attributes.storage; + } + + static AWS_S3_SYSTEM_HEADERS = { + 'content-type': 'ContentType', + 'content-disposition': 'ContentDisposition', + 'content-encoding': 'ContentEncoding', + 'content-language': 'ContentLanguage', + }; + + /** + * Create an instance + * + * @param {object} [opts] options + * @param {string} [opts.region] AWS region + * @param {string} [opts.accessKeyId] AWS access key + * @param {string} [opts.secretAccessKey] AWS secret access key + * @param {strong} [opts.r2AccountId] + * @param {strong} [opts.r2AccessKeyId] + * @param {strong} [opts.r2SecretAccessKey] + * @param {object} [opts.log] logger + */ + constructor(opts = {}) { + const { + region, accessKeyId, secretAccessKey, + connectionTimeout, socketTimeout, + r2AccountId, r2AccessKeyId, r2SecretAccessKey, + log = console, + keepAlive = true, + } = opts; + + if (region && accessKeyId && secretAccessKey) { + log.debug('Creating S3Client with credentials'); + this._s3 = new S3Client({ + region, + credentials: { + accessKeyId, + secretAccessKey, + }, + requestHandler: new NodeHttpHandler({ + httpsAgent: new Agent({ + keepAlive, + }), + connectionTimeout, + socketTimeout, + }), + }); + } else { + log.debug('Creating S3Client without credentials'); + this._s3 = new S3Client({ + requestHandler: new NodeHttpHandler({ + httpsAgent: new Agent({ + keepAlive, + }), + connectionTimeout, + socketTimeout, + }), + }); + } + + // initializing the R2 client which is used for mirroring all S3 writes to R2 + log.debug('Creating R2 S3Client'); + this._r2 = new S3Client({ + endpoint: `https://${r2AccountId}.r2.cloudflarestorage.com`, + region: 'us-east-1', // https://github.com/aws/aws-sdk-js-v3/issues/1845#issuecomment-754832210 + credentials: { + accessKeyId: r2AccessKeyId, + secretAccessKey: r2SecretAccessKey, + }, + requestHandler: new NodeHttpHandler({ + httpsAgent: new Agent({ + keepAlive, + }), + connectionTimeout, + socketTimeout, + }), + }); + this._log = log; + } + + s3() { + return this._s3; + } + + /** + * creates a bucket instance that allows to perform storage related operations. + * @param bucketId + * @returns {Bucket} + */ + bucket(bucketId) { + if (!this._s3) { + throw new Error('storage already closed.'); + } + if (!bucketId) { + throw new Error('bucketId is required.'); + } + return new Bucket({ + bucketId, + s3: this._s3, + r2: this._r2, + log: this._log, + }); + } + + /** + * @returns {Bucket} + */ + contentBus() { + return this.bucket('helix-content-bus'); + } + + /** + * @returns {Bucket} + */ + codeBus() { + return this.bucket('helix-code-bus'); + } + + /** + * @returns {Bucket} + */ + mediaBus() { + return this.bucket('helix-media-bus'); + } + + /** + * @returns {Bucket} + */ + configBus() { + return this.bucket('helix-config-bus'); + } + + /** + * Close this storage. Destroys the S3 client used. + */ + close() { + this._s3?.destroy(); + this._r2?.destroy(); + delete this._s3; + delete this._r2; + } +} diff --git a/packages/helix-shared-storage/test/fixtures/list-folders-reply.json b/packages/helix-shared-storage/test/fixtures/list-folders-reply.json new file mode 100644 index 00000000..cc7f1c7e --- /dev/null +++ b/packages/helix-shared-storage/test/fixtures/list-folders-reply.json @@ -0,0 +1,29 @@ +[ + { + "ListBucketResult": { + "Name": "helix-code-bus", + "Prefix": "", + "NextContinuationToken": "next", + "KeyCount": 1, + "MaxKeys": 1, + "Delimiter": "/", + "IsTruncated": true, + "CommonPrefixes": [{ + "Prefix": "owner/" + }] + } + }, + { + "ListBucketResult": { + "Name": "helix-code-bus", + "Prefix": "", + "KeyCount": 1, + "MaxKeys": 1, + "Delimiter": "/", + "IsTruncated": false, + "CommonPrefixes": [{ + "Prefix": "other/" + }] + } + } +] diff --git a/packages/helix-shared-storage/test/fixtures/list-reply-copy.json b/packages/helix-shared-storage/test/fixtures/list-reply-copy.json new file mode 100644 index 00000000..df3b6e0d --- /dev/null +++ b/packages/helix-shared-storage/test/fixtures/list-reply-copy.json @@ -0,0 +1,4 @@ +[ + "\nhelix-code-busowner/repo/ref/1/s4dr7BSKNScrN4njX9+CpBNimYkuEzMWg3niTSAPMdculBmycyUPM6kv0xi46j4hdc1lFPkE/ICI8TxG+VNV9Hh91Ou0hqeBYzqTRzSBSs=55trueowner/repo/ref/.circleci/config.yml2021-05-05T08:00:30.000Z"f278c0035a9b4398629613a33abe6451"1875STANDARDowner/repo/ref/.gitignore2021-05-05T08:00:30.000Z"4a3bf29ef03ede1a8ae22475e661d3c8"111STANDARDowner/repo/ref/.vscode/launch.json2021-05-05T08:00:30.000Z"b372a503c30e88e0cc3bf3dbc02c3518"395STANDARDowner/repo/ref/.vscode/settings.json2021-05-05T08:00:30.000Z"296a9b1d1b4b21dda32793f7561164fe"97STANDARDowner/repo/ref/README.md2021-05-05T04:14:59.000Z"afb94b3baee5ec44f0848c2d88856236"91STANDARD", + "\nhelix-code-busowner/repo/ref/1/s4dr7BSKNScrN4njX9+CpBNimYkuEzMWg3niTSAPMdculBmycyUPM6kv0xi46j4hdc1lFPkE/ICI8TxG+VNV9Hh91Ou0hqeBYzqTRzSBSs=55falseowner/repo/ref/helix_logo.png2021-05-05T08:00:30.000Z"c6b6f1bd5bcf4f5147839ceb547719ee"4695STANDARDowner/repo/ref/htdocs/favicon.ico2021-05-05T08:00:30.000Z"5e53b594547162b5c66bc5fb1448730b"921STANDARDowner/repo/ref/htdocs/style.css2021-05-05T08:00:30.000Z"c5239c45de68c51e17585ae6e199b17b"711STANDARDowner/repo/ref/index.md2021-05-05T08:00:30.000Z"4b0a049c9a829275193ffcc3d74642c7"1421STANDARDowner/repo/ref/src/html.pre.js2021-05-05T08:00:30.000Z"234f53aaafe3384bbd4d4dbfa3f79b9f"1076STANDARD" +] diff --git a/packages/helix-shared-storage/test/fixtures/list-reply.json b/packages/helix-shared-storage/test/fixtures/list-reply.json new file mode 100644 index 00000000..a2f465ba --- /dev/null +++ b/packages/helix-shared-storage/test/fixtures/list-reply.json @@ -0,0 +1,25 @@ +{ + "scope": "https://helix-code-bus.s3.us-east-1.amazonaws.com:443", + "method": "GET", + "path": "/?list-type=2&prefix=owner%2Frepo%2Fref%2F", + "body": "", + "status": 200, + "response": "\nhelix-code-busowner/repo/ref/101000falseowner/repo/ref/.circleci/config.yml2021-05-05T08:00:30.000Z"f278c0035a9b4398629613a33abe6451"1875STANDARDowner/repo/ref/.gitignore2021-05-05T08:00:30.000Z"4a3bf29ef03ede1a8ae22475e661d3c8"111STANDARDowner/repo/ref/.vscode/launch.json2021-05-05T08:00:30.000Z"b372a503c30e88e0cc3bf3dbc02c3518"395STANDARDowner/repo/ref/.vscode/settings.json2021-05-05T08:00:30.000Z"296a9b1d1b4b21dda32793f7561164fe"97STANDARDowner/repo/ref/README.md2021-05-05T04:14:59.000Z"afb94b3baee5ec44f0848c2d88856236"91STANDARDowner/repo/ref/helix_logo.png2021-05-05T08:00:30.000Z"c6b6f1bd5bcf4f5147839ceb547719ee"4695STANDARDowner/repo/ref/htdocs/favicon.ico2021-05-05T08:00:30.000Z"5e53b594547162b5c66bc5fb1448730b"921STANDARDowner/repo/ref/htdocs/style.css2021-05-05T08:00:30.000Z"c5239c45de68c51e17585ae6e199b17b"711STANDARDowner/repo/ref/index.md2021-05-05T08:00:30.000Z"4b0a049c9a829275193ffcc3d74642c7"1421STANDARDowner/repo/ref/src/html.pre.js2021-05-05T08:00:30.000Z"234f53aaafe3384bbd4d4dbfa3f79b9f"1076STANDARD", + "rawHeaders": [ + "x-amz-id-2", + "xDfRvc5H7Hbm9+0s1zUHhCGosfeEc9eO5ussnALVPN+uc8gJcQ95YEtZws+/Qg7bSE2Xm4EruDU=", + "x-amz-request-id", + "EZFM2AP27PP97F85", + "Date", + "Wed, 05 May 2021 08:37:22 GMT", + "x-amz-bucket-region", + "us-east-1", + "Content-Type", + "application/xml", + "Transfer-Encoding", + "chunked", + "Server", + "AmazonS3" + ], + "responseIsBinary": false +} diff --git a/packages/helix-shared-storage/test/setup-env.js b/packages/helix-shared-storage/test/setup-env.js new file mode 100644 index 00000000..3385334c --- /dev/null +++ b/packages/helix-shared-storage/test/setup-env.js @@ -0,0 +1,24 @@ +/* + * Copyright 2021 Adobe. All rights reserved. + * This file is licensed to you under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. You may obtain a copy + * of the License at http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under + * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS + * OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ +import { resolve } from 'path'; +import { fileURLToPath } from 'url'; + +// eslint-disable-next-line no-console +console.log('Forcing HTTP/1.1 for @adobe/fetch'); +process.env.HELIX_FETCH_FORCE_HTTP1 = 'true'; +process.env.HELIX_ONEDRIVE_LOCAL_AUTH_CACHE = 'true'; +process.env.HELIX_ONEDRIVE_NO_SHARE_LINK_CACHE = 'true'; + +// eslint-disable-next-line no-underscore-dangle +global.__rootdir = resolve(fileURLToPath(import.meta.url), '..', '..'); +// eslint-disable-next-line no-underscore-dangle +global.__testdir = resolve(fileURLToPath(import.meta.url), '..'); diff --git a/packages/helix-shared-storage/test/storage.test.js b/packages/helix-shared-storage/test/storage.test.js new file mode 100644 index 00000000..98368265 --- /dev/null +++ b/packages/helix-shared-storage/test/storage.test.js @@ -0,0 +1,616 @@ +/* + * Copyright 2021 Adobe. All rights reserved. + * This file is licensed to you under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. You may obtain a copy + * of the License at http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under + * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS + * OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ + +/* eslint-env mocha */ +import { Response } from '@adobe/fetch'; +import assert from 'assert'; +import { promises as fs } from 'fs'; +import path from 'path'; +import { promisify } from 'util'; +import xml2js from 'xml2js'; +import zlib from 'zlib'; +import { Nock } from './utils.js'; +import { HelixStorage } from '../src/storage.js'; + +const gzip = promisify(zlib.gzip); + +const AWS_REGION = 'fake'; +const AWS_ACCESS_KEY_ID = 'fake'; +const AWS_SECRET_ACCESS_KEY = 'fake'; + +const CLOUDFLARE_ACCOUNT_ID = 'fake'; +const CLOUDFLARE_R2_ACCESS_KEY_ID = 'fake'; +const CLOUDFLARE_R2_SECRET_ACCESS_KEY = 'fake'; + +const TEST_HEADERS = [ + 'content-type', + 'content-encoding', + 'x-amz-meta-myid', +]; + +describe('Storage test', () => { + let nock; + let storage; + beforeEach(() => { + nock = new Nock().env(); + storage = new HelixStorage({ + region: AWS_REGION, + accessKeyId: AWS_ACCESS_KEY_ID, + secretAccessKey: AWS_SECRET_ACCESS_KEY, + r2AccountId: CLOUDFLARE_ACCOUNT_ID, + r2AccessKeyId: CLOUDFLARE_R2_ACCESS_KEY_ID, + r2SecretAccessKey: CLOUDFLARE_R2_SECRET_ACCESS_KEY, + }); + }); + + afterEach(() => { + nock.done(); + storage.close(); + }); + + it('bucket() needs bucket', () => { + assert.throws(() => storage.bucket(), Error('bucketId is required.')); + }); + + it('contentBus() fails on closed storage', () => { + storage.close(); + assert.throws(() => storage.contentBus(), Error('storage already closed.')); + }); + + it('creates a storage from context', () => { + const ctx = { + env: { + AWS_REGION, + AWS_ACCESS_KEY_ID, + AWS_SECRET_ACCESS_KEY, + CLOUDFLARE_ACCOUNT_ID, + CLOUDFLARE_R2_ACCESS_KEY_ID, + CLOUDFLARE_R2_SECRET_ACCESS_KEY, + }, + attributes: {}, + }; + const stor = HelixStorage.fromContext(ctx); + assert.ok(stor); + }); + + it('can get the s3 client', () => { + assert.ok(storage.s3()); + }); + + it('can get the content-bus', () => { + assert.strictEqual(storage.contentBus().bucket, 'helix-content-bus'); + }); + + it('can get the code-bus', () => { + assert.strictEqual(storage.codeBus().bucket, 'helix-code-bus'); + }); + + it('can get the media-bus', () => { + assert.strictEqual(storage.mediaBus().bucket, 'helix-media-bus'); + }); + + it('can get the config-bus', () => { + assert.strictEqual(storage.configBus().bucket, 'helix-config-bus'); + }); + + it('can get an object', async () => { + nock('https://helix-code-bus.s3.fake.amazonaws.com') + .get('/foo?x-id=GetObject') + .reply(200, 'hello, world.'); + const bus = storage.codeBus(); + const ret = await bus.get('/foo'); + assert.strictEqual(ret.toString(), 'hello, world.'); + }); + + it('get compressed object and populates the meta object', async () => { + nock('https://helix-code-bus.s3.fake.amazonaws.com') + .get('/foo?x-id=GetObject') + .reply(200, await gzip('hello, world.'), { + 'content-type': 'text/plain', + 'content-encoding': 'gzip', + 'cache-control': 'no-store', + expires: 'Thu, 23 Nov 2023 10:35:10 GMT', + 'x-amz-meta-x-source-location': 'github', + }); + const bus = storage.codeBus(); + const meta = {}; + const ret = await bus.get('foo', meta); + assert.strictEqual(ret.toString(), 'hello, world.'); + assert.deepStrictEqual(meta, { + 'x-source-location': 'github', + CacheControl: 'no-store', + ContentEncoding: 'gzip', + ContentType: 'text/plain', + Expires: new Date('Thu, 23 Nov 2023 10:35:10 GMT'), + }); + }); + + it('get returns null for not found', async () => { + nock('https://helix-code-bus.s3.fake.amazonaws.com') + .get('/foo?x-id=GetObject') + .reply(404); + const bus = storage.codeBus(); + const ret = await bus.get('/foo'); + assert.strictEqual(ret, null); + }); + + it('get throws error', async () => { + nock('https://helix-code-bus.s3.fake.amazonaws.com') + .get('/foo?x-id=GetObject') + .reply(401); + const bus = storage.codeBus(); + const error = Error('UnknownError'); + error.name = '401'; + await assert.rejects(bus.get('/foo'), error); + }); + + it('can get metadata of an object', async () => { + nock('https://helix-code-bus.s3.fake.amazonaws.com') + .head('/foo') + .reply(200, 'hello, world.', { + 'content-type': 'text/plain', + 'x-amz-meta-test-location': 'some-location', + }); + const bus = storage.codeBus(); + const ret = await bus.metadata('foo'); + assert.deepStrictEqual(ret, { + 'test-location': 'some-location', + }); + }); + + it('can get metadata of an object (404)', async () => { + nock('https://helix-code-bus.s3.fake.amazonaws.com') + .head('/foo') + .reply(404); + const bus = storage.codeBus(); + const ret = await bus.metadata('foo'); + assert.strictEqual(ret, undefined); + }); + + it('can put object', async () => { + const reqs = { s3: {}, r2: {} }; + nock('https://helix-code-bus.s3.fake.amazonaws.com') + .put('/foo?x-id=PutObject') + .reply(function cb(uri) { + reqs.s3[uri] = { + body: Buffer.concat(this.req.requestBodyBuffers), + headers: Object.fromEntries(Object.entries(this.req.headers) + .filter(([key]) => TEST_HEADERS.indexOf(key) >= 0)), + }; + return [201]; + }); + nock(`https://helix-code-bus.${CLOUDFLARE_ACCOUNT_ID}.r2.cloudflarestorage.com`) + .put('/foo?x-id=PutObject') + .reply(function cb(uri) { + reqs.r2[uri] = { + body: Buffer.concat(this.req.requestBodyBuffers), + headers: Object.fromEntries(Object.entries(this.req.headers) + .filter(([key]) => TEST_HEADERS.indexOf(key) >= 0)), + }; + return [201]; + }); + + const bus = storage.codeBus(); + await bus.put('/foo', 'hello, world.', 'text/plain', { + myid: '1234', + }); + + const req = { + '/foo?x-id=PutObject': { + body: await gzip(Buffer.from('hello, world.', 'utf-8')), + headers: { + 'content-encoding': 'gzip', + 'content-type': 'text/plain', + 'x-amz-meta-myid': '1234', + }, + }, + }; + assert.deepEqual(reqs.s3, req); + assert.deepEqual(reqs.r2, req); + }); + + it('can put object uncompressed', async () => { + const reqs = { s3: {}, r2: {} }; + nock('https://helix-code-bus.s3.fake.amazonaws.com') + .put('/foo?x-id=PutObject') + .reply(function cb(uri) { + reqs.s3[uri] = { + body: Buffer.concat(this.req.requestBodyBuffers), + headers: Object.fromEntries(Object.entries(this.req.headers) + .filter(([key]) => TEST_HEADERS.indexOf(key) >= 0)), + }; + return [201]; + }); + nock(`https://helix-code-bus.${CLOUDFLARE_ACCOUNT_ID}.r2.cloudflarestorage.com`) + .put('/foo?x-id=PutObject') + .reply(function cb(uri) { + reqs.r2[uri] = { + body: Buffer.concat(this.req.requestBodyBuffers), + headers: Object.fromEntries(Object.entries(this.req.headers) + .filter(([key]) => TEST_HEADERS.indexOf(key) >= 0)), + }; + return [201]; + }); + + const bus = storage.codeBus(); + await bus.put('/foo', 'hello, world.', 'text/plain', { + myid: '1234', + }, false); + + const req = { + '/foo?x-id=PutObject': { + body: Buffer.from('hello, world.', 'utf-8'), + headers: { + 'content-type': 'text/plain', + 'x-amz-meta-myid': '1234', + }, + }, + }; + assert.deepEqual(reqs.s3, req); + assert.deepEqual(reqs.r2, req); + }); + + it('can store object', async () => { + const reqs = { s3: {}, r2: {} }; + nock('https://helix-code-bus.s3.fake.amazonaws.com') + .put('/foo?x-id=PutObject') + .reply(function cb(uri) { + reqs.s3[uri] = { + body: Buffer.concat(this.req.requestBodyBuffers), + headers: Object.fromEntries(Object.entries(this.req.headers) + .filter(([key]) => TEST_HEADERS.indexOf(key) >= 0)), + }; + return [201]; + }); + nock(`https://helix-code-bus.${CLOUDFLARE_ACCOUNT_ID}.r2.cloudflarestorage.com`) + .put('/foo?x-id=PutObject') + .reply(function cb(uri) { + reqs.r2[uri] = { + body: Buffer.concat(this.req.requestBodyBuffers), + headers: Object.fromEntries(Object.entries(this.req.headers) + .filter(([key]) => TEST_HEADERS.indexOf(key) >= 0)), + }; + return [201]; + }); + + const bus = storage.codeBus(); + const data = new Response('hello, world.', { + headers: { + 'content-type': 'text/plain', + myid: '1234', + }, + }); + await bus.store('/foo', data); + + const req = { + '/foo?x-id=PutObject': { + body: await gzip(Buffer.from('hello, world.', 'utf-8')), + headers: { + 'content-encoding': 'gzip', + 'content-type': 'text/plain', + 'x-amz-meta-myid': '1234', + }, + }, + }; + assert.deepEqual(reqs.s3, req); + assert.deepEqual(reqs.r2, req); + }); + + it('can remove object', async () => { + const reqs = { s3: {}, r2: {} }; + nock('https://helix-code-bus.s3.fake.amazonaws.com') + .delete('/foo?x-id=DeleteObject') + .reply(function cb(uri, body) { + reqs.s3[uri] = { + body, + headers: Object.fromEntries(Object.entries(this.req.headers) + .filter(([key]) => TEST_HEADERS.indexOf(key) >= 0)), + }; + return [204]; + }); + nock(`https://helix-code-bus.${CLOUDFLARE_ACCOUNT_ID}.r2.cloudflarestorage.com`) + .delete('/foo?x-id=DeleteObject') + .reply(function cb(uri, body) { + reqs.r2[uri] = { + body, + headers: Object.fromEntries(Object.entries(this.req.headers) + .filter(([key]) => TEST_HEADERS.indexOf(key) >= 0)), + }; + return [204]; + }); + + const bus = storage.codeBus(); + await bus.remove('/foo'); + const req = { + '/foo?x-id=DeleteObject': { + body: '', + headers: {}, + }, + }; + assert.deepEqual(reqs.s3, req); + assert.deepEqual(reqs.r2, req); + }); + + it('can update metadata', async () => { + nock('https://helix-code-bus.s3.fake.amazonaws.com') + .put('/owner/repo/ref?x-id=CopyObject') + .reply(function test() { + assert.strictEqual(this.req.headers['x-amz-copy-source'], 'helix-code-bus/owner/repo/ref'); + assert.strictEqual(this.req.headers['x-amz-meta-source-location'], 'new-location'); + return [200, '\n2021-05-05T08:37:23.000Z"f278c0035a9b4398629613a33abe6451"']; + }); + nock(`https://helix-code-bus.${CLOUDFLARE_ACCOUNT_ID}.r2.cloudflarestorage.com`) + .put('/owner/repo/ref?x-id=CopyObject') + .reply(function test() { + assert.strictEqual(this.req.headers['x-amz-copy-source'], 'helix-code-bus/owner/repo/ref'); + assert.strictEqual(this.req.headers['x-amz-meta-source-location'], 'new-location'); + return [200, '\n2021-05-05T08:37:23.000Z"f278c0035a9b4398629613a33abe6451"']; + }); + + const bus = storage.codeBus(); + const res = await bus.putMeta('/owner/repo/ref', { + 'source-location': 'new-location', + }); + assert.strictEqual(res.$metadata.httpStatusCode, 200); + }); + + it('remove non-existing object fails', async () => { + nock('https://helix-code-bus.s3.fake.amazonaws.com') + .delete('/does-not-exist?x-id=DeleteObject') + .reply(404); + nock(`https://helix-code-bus.${CLOUDFLARE_ACCOUNT_ID}.r2.cloudflarestorage.com`) + .delete('/does-not-exist?x-id=DeleteObject') + .reply(404); + + const bus = storage.codeBus(); + await assert.rejects(async () => bus.remove('/does-not-exist')); + }); + + it('remove objects can fail', async () => { + nock('https://helix-code-bus.s3.fake.amazonaws.com') + .post('/?delete=&x-id=DeleteObjects') + .reply(404); + nock(`https://helix-code-bus.${CLOUDFLARE_ACCOUNT_ID}.r2.cloudflarestorage.com`) + .post('/?delete=&x-id=DeleteObjects') + .reply(404); + + const bus = storage.codeBus(); + await assert.rejects(async () => bus.remove(['/foo', '/bar'])); + }); + + it('can remove objects', async () => { + const reqs = { s3: {}, r2: {} }; + nock('https://helix-code-bus.s3.fake.amazonaws.com') + .post('/?delete=&x-id=DeleteObjects') + .reply(function cb(uri, body) { + reqs.s3[uri] = { + body, + headers: Object.fromEntries(Object.entries(this.req.headers) + .filter(([key]) => TEST_HEADERS.indexOf(key) >= 0)), + }; + return [200, '\n/foo/bar']; + }); + nock(`https://helix-code-bus.${CLOUDFLARE_ACCOUNT_ID}.r2.cloudflarestorage.com`) + .post('/?delete=&x-id=DeleteObjects') + .reply(function cb(uri, body) { + reqs.r2[uri] = { + body, + headers: Object.fromEntries(Object.entries(this.req.headers) + .filter(([key]) => TEST_HEADERS.indexOf(key) >= 0)), + }; + return [200, '\n/foo/bar']; + }); + + const bus = storage.codeBus(); + await bus.remove(['/foo', '/bar']); + + const req = { + '/?delete=&x-id=DeleteObjects': { + body: 'foobar', + headers: { + 'content-type': 'application/xml', + }, + }, + }; + assert.deepEqual(reqs.s3, req); + assert.deepEqual(reqs.r2, req); + }); + + it('can copy objects', async () => { + const listReply = JSON.parse(await fs.readFile(path.resolve(__testdir, 'fixtures', 'list-reply-copy.json'), 'utf-8')); + const puts = { s3: [], r2: [] }; + nock('https://helix-code-bus.s3.fake.amazonaws.com') + .get('/?list-type=2&prefix=owner%2Frepo%2Fref%2F') + .reply(200, listReply[0]) + .get('/?continuation-token=1%2Fs4dr7BSKNScrN4njX9%2BCpBNimYkuEzMWg3niTSAPMdculBmycyUPM6kv0xi46j4hdc1lFPkE%2FICI8TxG%2BVNV9Hh91Ou0hqeBYzqTRzSBSs%3D&list-type=2&prefix=owner%2Frepo%2Fref%2F') + .reply(200, listReply[1]) + .put(/.*/) + .times(10) + .reply((uri) => { + puts.s3.push(uri); + // reject first uri + if (puts.s3.length === 1) { + return [404]; + } + return [200, '\n2021-05-05T08:37:23.000Z"f278c0035a9b4398629613a33abe6451"']; + }); + nock(`https://helix-code-bus.${CLOUDFLARE_ACCOUNT_ID}.r2.cloudflarestorage.com`) + .put(/.*/) + .times(10) + .reply((uri) => { + puts.r2.push(uri); + // reject first uri + if (puts.s3.length === 1) { + return [404]; + } + return [200, '\n2021-05-05T08:37:23.000Z"f278c0035a9b4398629613a33abe6451"']; + }); + + const bus = storage.codeBus(); + await bus.copyDeep('/owner/repo/ref/', '/bar/'); + + puts.s3.sort(); + puts.r2.sort(); + const expectedPuts = [ + '/bar/.circleci/config.yml?x-id=CopyObject', + '/bar/.gitignore?x-id=CopyObject', + '/bar/.vscode/launch.json?x-id=CopyObject', + '/bar/.vscode/settings.json?x-id=CopyObject', + '/bar/README.md?x-id=CopyObject', + '/bar/helix_logo.png?x-id=CopyObject', + '/bar/htdocs/favicon.ico?x-id=CopyObject', + '/bar/htdocs/style.css?x-id=CopyObject', + '/bar/index.md?x-id=CopyObject', + '/bar/src/html.pre.js?x-id=CopyObject', + ]; + assert.deepEqual(puts.s3, expectedPuts); + assert.deepEqual(puts.r2, expectedPuts); + }); + + it('can copy object (non deep)', async () => { + const puts = { s3: [], r2: [] }; + nock('https://helix-code-bus.s3.fake.amazonaws.com') + .put('/owner/repo/ref/foo/bar.md?x-id=CopyObject') + .reply((uri) => { + puts.s3.push(uri); + return [200, '\n2021-05-05T08:37:23.000Z"f278c0035a9b4398629613a33abe6451"']; + }); + nock(`https://helix-code-bus.${CLOUDFLARE_ACCOUNT_ID}.r2.cloudflarestorage.com`) + .put('/owner/repo/ref/foo/bar.md?x-id=CopyObject') + .reply((uri) => { + puts.r2.push(uri); + return [200, '\n2021-05-05T08:37:23.000Z"f278c0035a9b4398629613a33abe6451"']; + }); + + const bus = storage.codeBus(); + await bus.copy('/owner/repo/ref/foo.md', '/owner/repo/ref/foo/bar.md'); + + puts.s3.sort(); + puts.r2.sort(); + const expectedPuts = [ + '/owner/repo/ref/foo/bar.md?x-id=CopyObject', + ]; + assert.deepEqual(puts.s3, expectedPuts); + assert.deepEqual(puts.r2, expectedPuts); + }); + + it('can copy object can fail (non deep)', async () => { + nock('https://helix-code-bus.s3.fake.amazonaws.com') + .put('/owner/repo/ref/foo/bar.md?x-id=CopyObject') + .reply(404); + nock(`https://helix-code-bus.${CLOUDFLARE_ACCOUNT_ID}.r2.cloudflarestorage.com`) + .put('/owner/repo/ref/foo/bar.md?x-id=CopyObject') + .reply(404); + + const bus = storage.codeBus(); + await assert.rejects(bus.copy('/owner/repo/ref/foo.md', '/owner/repo/ref/foo/bar.md')); + }); + + it('can copy object can fail if not found (non deep)', async () => { + nock('https://helix-code-bus.s3.fake.amazonaws.com') + .put('/owner/repo/ref/foo/bar.md?x-id=CopyObject') + .reply(200, 'NoSuchKey'); + nock(`https://helix-code-bus.${CLOUDFLARE_ACCOUNT_ID}.r2.cloudflarestorage.com`) + .put('/owner/repo/ref/foo/bar.md?x-id=CopyObject') + .reply(200, 'NoSuchKey'); + + const bus = storage.codeBus(); + await assert.rejects(bus.copy('/owner/repo/ref/foo.md', '/owner/repo/ref/foo/bar.md')); + }); + + it('can delete objects', async () => { + const listReply = JSON.parse(await fs.readFile(path.resolve(__testdir, 'fixtures', 'list-reply.json'), 'utf-8')); + const deletes = { s3: [], r2: [] }; + nock('https://helix-code-bus.s3.fake.amazonaws.com') + .get('/?list-type=2&prefix=owner%2Frepo%2Fnew-branch%2F') + .reply(200, listReply.response) + .delete(/.*/) + .times(10) + .reply((uri) => { + deletes.s3.push(uri); + // reject first uri + if (deletes.s3.length === 1) { + return [404]; + } + return [204]; + }); + nock(`https://helix-code-bus.${CLOUDFLARE_ACCOUNT_ID}.r2.cloudflarestorage.com`) + .delete(/.*/) + .times(10) + .reply((uri) => { + deletes.r2.push(uri); + // reject first uri + if (deletes.r2.length === 1) { + return [404]; + } + return [204]; + }); + + const bus = storage.codeBus(); + await bus.rmdir('/owner/repo/new-branch/'); + + deletes.s3.sort(); + deletes.r2.sort(); + const expectedDeletes = [ + '/owner/repo/ref/.circleci/config.yml?x-id=DeleteObject', + '/owner/repo/ref/.gitignore?x-id=DeleteObject', + '/owner/repo/ref/.vscode/launch.json?x-id=DeleteObject', + '/owner/repo/ref/.vscode/settings.json?x-id=DeleteObject', + '/owner/repo/ref/README.md?x-id=DeleteObject', + '/owner/repo/ref/helix_logo.png?x-id=DeleteObject', + '/owner/repo/ref/htdocs/favicon.ico?x-id=DeleteObject', + '/owner/repo/ref/htdocs/style.css?x-id=DeleteObject', + '/owner/repo/ref/index.md?x-id=DeleteObject', + '/owner/repo/ref/src/html.pre.js?x-id=DeleteObject', + ]; + assert.deepEqual(deletes.s3, expectedDeletes); + assert.deepEqual(deletes.r2, expectedDeletes); + }); + + it('rmdir works for empty dir', async () => { + nock('https://helix-code-bus.s3.fake.amazonaws.com') + .get('/?list-type=2&prefix=owner%2Frepo%2Fnew-branch%2F') + .reply(200, '\nhelix-code-busowner/repo/new-branch/01000false'); + + const bus = storage.codeBus(); + await bus.rmdir('/owner/repo/new-branch/'); + }); + + it('can list folders', async () => { + const listReply = JSON.parse(await fs.readFile(path.resolve(__testdir, 'fixtures', 'list-folders-reply.json'), 'utf-8')); + nock('https://helix-code-bus.s3.fake.amazonaws.com') + .get('/?delimiter=%2F&list-type=2&prefix=') + .reply(200, new xml2js.Builder().buildObject(listReply[0])) + .get('/?continuation-token=next&delimiter=%2F&list-type=2&prefix=') + .reply(200, new xml2js.Builder().buildObject(listReply[1])); + + const bus = storage.codeBus(); + const folders = await bus.listFolders(''); + + assert.deepStrictEqual(folders, ['owner/', 'other/']); + }); + + it('can return an empty list of folders', async () => { + nock('https://helix-code-bus.s3.fake.amazonaws.com') + .get('/?delimiter=%2F&list-type=2&prefix=foo%2f') + .reply(200, ` + + 0 + +`); + + const bus = storage.codeBus(); + const folders = await bus.listFolders('foo/'); + + assert.deepStrictEqual(folders, []); + }); +}); diff --git a/packages/helix-shared-storage/test/utils.js b/packages/helix-shared-storage/test/utils.js new file mode 100644 index 00000000..6c5bab34 --- /dev/null +++ b/packages/helix-shared-storage/test/utils.js @@ -0,0 +1,79 @@ +/* + * Copyright 2020 Adobe. All rights reserved. + * This file is licensed to you under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. You may obtain a copy + * of the License at http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under + * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS + * OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ +import assert from 'assert'; +// eslint-disable-next-line import/no-extraneous-dependencies +import nock from 'nock'; + +/** + * @constructor + */ +export function Nock() { + /** @type {Record} */ + let savedEnv; + + function noMatchHandler(req) { + unmatched.push(req); + } + + /** + * @param {string} url + * @returns {nock.Scope} + */ + function nocker(url) { + let scope = scopes[url]; + if (!scope) { + scope = nock(url); + scopes[url] = scope; + } + if (!unmatched) { + unmatched = []; + nock.emitter.on('no match', noMatchHandler); + } + nock.disableNetConnect(); + return scope; + } + + nocker.env = (overrides = {}) => { + savedEnv = { ...process.env }; + Object.assign(process.env, { + AWS_REGION: 'us-east-1', + AWS_ACCESS_KEY_ID: 'dummy-id', + AWS_SECRET_ACCESS_KEY: 'dummy-key', + ...overrides, + }); + return nocker; + }; + + nocker.done = () => { + if (savedEnv) { + process.env = savedEnv; + } + + if (unmatched) { + assert.deepStrictEqual(unmatched.map((req) => req.options || req), []); + nock.emitter.off('no match', noMatchHandler); + } + try { + Object.values(scopes).forEach((s) => s.done()); + } finally { + nock.cleanAll(); + } + }; + + return nocker; +}