diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2463901a91..7c56b7cc38 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,7 +6,7 @@ on: branches: - '**' env: - NODE_VERSION: 16.13.0 + NODE_VERSION: 16.14.2 PARSE_SERVER_TEST_TIMEOUT: 20000 jobs: check-ci: @@ -101,47 +101,57 @@ jobs: strategy: matrix: include: + - name: MongoDB 5.2, ReplicaSet, WiredTiger + MONGODB_VERSION: 5.2.1 + MONGODB_TOPOLOGY: replicaset + MONGODB_STORAGE_ENGINE: wiredTiger + NODE_VERSION: 14.19.1 + - name: MongoDB 5.1, ReplicaSet, WiredTiger + MONGODB_VERSION: 5.1.1 + MONGODB_TOPOLOGY: replicaset + MONGODB_STORAGE_ENGINE: wiredTiger + NODE_VERSION: 14.19.1 - name: MongoDB 5.0, ReplicaSet, WiredTiger - MONGODB_VERSION: 5.0.3 + MONGODB_VERSION: 5.0.6 MONGODB_TOPOLOGY: replicaset MONGODB_STORAGE_ENGINE: wiredTiger - NODE_VERSION: 16.13.0 + NODE_VERSION: 16.14.2 - name: MongoDB 4.4, ReplicaSet, WiredTiger - MONGODB_VERSION: 4.4.10 + MONGODB_VERSION: 4.4.13 MONGODB_TOPOLOGY: replicaset MONGODB_STORAGE_ENGINE: wiredTiger - NODE_VERSION: 16.13.0 + NODE_VERSION: 16.14.2 - name: MongoDB 4.2, ReplicaSet, WiredTiger - MONGODB_VERSION: 4.2.17 + MONGODB_VERSION: 4.2.19 MONGODB_TOPOLOGY: replicaset MONGODB_STORAGE_ENGINE: wiredTiger - NODE_VERSION: 16.13.0 + NODE_VERSION: 16.14.2 - name: MongoDB 4.0, ReplicaSet, WiredTiger - MONGODB_VERSION: 4.0.27 + MONGODB_VERSION: 4.0.28 MONGODB_TOPOLOGY: replicaset MONGODB_STORAGE_ENGINE: wiredTiger - NODE_VERSION: 16.13.0 + NODE_VERSION: 16.14.2 - name: MongoDB 4.0, Standalone, MMAPv1 - MONGODB_VERSION: 4.0.27 + MONGODB_VERSION: 4.0.28 MONGODB_TOPOLOGY: standalone MONGODB_STORAGE_ENGINE: mmapv1 - NODE_VERSION: 16.13.0 + NODE_VERSION: 16.14.2 - name: Redis Cache PARSE_SERVER_TEST_CACHE: redis - MONGODB_VERSION: 4.4.10 + MONGODB_VERSION: 4.4.13 MONGODB_TOPOLOGY: standalone MONGODB_STORAGE_ENGINE: wiredTiger - NODE_VERSION: 16.13.0 + NODE_VERSION: 16.14.2 - name: Node 12 - MONGODB_VERSION: 4.4.10 + MONGODB_VERSION: 4.4.13 MONGODB_TOPOLOGY: standalone MONGODB_STORAGE_ENGINE: wiredTiger - NODE_VERSION: 12.22.7 + NODE_VERSION: 12.22.11 - name: Node 14 - MONGODB_VERSION: 4.4.10 + MONGODB_VERSION: 4.4.13 MONGODB_TOPOLOGY: standalone MONGODB_STORAGE_ENGINE: wiredTiger - NODE_VERSION: 14.18.1 + NODE_VERSION: 14.19.1 fail-fast: false name: ${{ matrix.name }} timeout-minutes: 15 @@ -183,22 +193,22 @@ jobs: include: - name: PostgreSQL 11, PostGIS 3.0 POSTGRES_IMAGE: postgis/postgis:11-3.0 - NODE_VERSION: 16.13.0 + NODE_VERSION: 16.14.2 - name: PostgreSQL 11, PostGIS 3.1 POSTGRES_IMAGE: postgis/postgis:11-3.1 - NODE_VERSION: 16.13.0 + NODE_VERSION: 16.14.2 - name: PostgreSQL 11, PostGIS 3.2 POSTGRES_IMAGE: postgis/postgis:11-3.2 - NODE_VERSION: 16.13.0 + NODE_VERSION: 16.14.2 - name: PostgreSQL 12, PostGIS 3.2 POSTGRES_IMAGE: postgis/postgis:12-3.2 - NODE_VERSION: 16.13.0 + NODE_VERSION: 16.14.2 - name: PostgreSQL 13, PostGIS 3.2 POSTGRES_IMAGE: postgis/postgis:13-3.2 - NODE_VERSION: 16.13.0 + NODE_VERSION: 16.14.2 - name: PostgreSQL 14, PostGIS 3.2 POSTGRES_IMAGE: postgis/postgis:14-3.2 - NODE_VERSION: 16.13.0 + NODE_VERSION: 16.14.2 fail-fast: false name: ${{ matrix.name }} timeout-minutes: 15 diff --git a/DEPRECATIONS.md b/DEPRECATIONS.md index dc913dd0e7..347fc2a9cd 100644 --- a/DEPRECATIONS.md +++ b/DEPRECATIONS.md @@ -8,6 +8,7 @@ The following is a list of deprecations, according to the [Deprecation Policy](h | DEPPS2 | Config option `directAccess` defaults to `true` | [#6636](https://github.com/parse-community/parse-server/pull/6636) | 5.0.0 (2022) | 6.0.0 (2023) | deprecated | - | | DEPPS3 | Config option `enforcePrivateUsers` defaults to `true` | [#7319](https://github.com/parse-community/parse-server/pull/7319) | 5.0.0 (2022) | 6.0.0 (2023) | deprecated | - | | DEPPS4 | Remove convenience method for http request `Parse.Cloud.httpRequest` | [#7589](https://github.com/parse-community/parse-server/pull/7589) | 5.0.0 (2022) | 6.0.0 (2023) | deprecated | - | +| DEPPS5 | Remove file triggers in preference of normal `Parse.Cloud` syntax `Parse.Cloud.beforeDelete(Parse.File` | [#7927](https://github.com/parse-community/parse-server/pull/7927) |6.0.0 (2022) | 6.0.0 (2023) | deprecated | - | [i_deprecation]: ## "The version and date of the deprecation." [i_removal]: ## "The version and date of the planned removal." diff --git a/README.md b/README.md index 7b507e9c80..b321392fa5 100644 --- a/README.md +++ b/README.md @@ -112,32 +112,34 @@ Before you start make sure you have installed: #### Node.js Parse Server is continuously tested with the most recent releases of Node.js to ensure compatibility. We follow the [Node.js Long Term Support plan](https://github.com/nodejs/Release) and only test against versions that are officially supported and have not reached their end-of-life date. -| Version | Latest Version | End-of-Life | Compatible | -|------------|----------------|-------------|---------------| -| Node.js 12 | 12.22.7 | April 2022 | ✅ Yes | -| Node.js 14 | 14.18.1 | April 2023 | ✅ Yes | -| Node.js 16 | 16.13.0 | April 2024 | ✅ Yes | +| Version | Latest Version | End-of-Life | Compatible | +|------------|----------------|-------------|--------------| +| Node.js 12 | 12.22.11 | April 2022 | ✅ Yes | +| Node.js 14 | 14.19.1 | April 2023 | ✅ Yes | +| Node.js 16 | 16.14.2 | April 2024 | ✅ Yes | | Node.js 17 | 17.x | June 2022 | ❌ Not tested | #### MongoDB Parse Server is continuously tested with the most recent releases of MongoDB to ensure compatibility. We follow the [MongoDB support schedule](https://www.mongodb.com/support-policy) and only test against versions that are officially supported and have not reached their end-of-life date. -| Version | Latest Version | End-of-Life | Compatible | -|-------------|----------------|--------------|------------| -| MongoDB 4.0 | 4.0.27 | April 2022 | ✅ Yes | -| MongoDB 4.2 | 4.2.17 | TBD | ✅ Yes | -| MongoDB 4.4 | 4.4.10 | TBD | ✅ Yes | -| MongoDB 5.0 | 5.0.3 | January 2024 | ✅ Yes | - +| Version | Latest Version | End-of-Life | Compatible | +|-------------|----------------|-------------|------------| +| MongoDB 4.0 | 4.0.28 | April 2022 | ✅ Yes | +| MongoDB 4.2 | 4.2.19 | TBD | ✅ Yes | +| MongoDB 4.4 | 4.4.13 | TBD | ✅ Yes | +| MongoDB 5.0 | 5.0.6 | TBD | ✅ Yes | +| MongoDB 5.1 | 5.1.1 | TBD | ✅ Yes | +| MongoDB 5.2 | 5.2.1 | TBD | ✅ Yes | + #### PostgreSQL Parse Server is continuously tested with the most recent releases of PostgreSQL and PostGIS to ensure compatibility, using [PostGIS docker images](https://registry.hub.docker.com/r/postgis/postgis/tags?page=1&ordering=last_updated). We follow the [PostgreSQL support schedule](https://www.postgresql.org/support/versioning) and [PostGIS support schedule](https://www.postgis.net/eol_policy/) and only test against versions that are officially supported and have not reached their end-of-life date. Due to the extensive PostgreSQL support duration of 5 years, Parse Server drops support if a version is older than 3.5 years and a newer version has been available for at least 2.5 years. | Version | PostGIS Version | End-of-Life | Parse Server Support End | Compatible | |-------------|-----------------|---------------|--------------------------|------------| -| Postgres 11 | 3.0, 3.1, 3.2 | November 2023 | April 2022 | ✅ Yes | -| Postgres 12 | 3.2 | November 2024 | April 2023 | ✅ Yes | -| Postgres 13 | 3.2 | November 2025 | April 2024 | ✅ Yes | -| Postgres 14 | 3.2 | November 2026 | April 2025 | ✅ Yes | +| Postgres 11 | 3.0, 3.1, 3.2 | November 2023 | April 2022 | ✅ Yes | +| Postgres 12 | 3.2 | November 2024 | April 2023 | ✅ Yes | +| Postgres 13 | 3.2 | November 2025 | April 2024 | ✅ Yes | +| Postgres 14 | 3.2 | November 2026 | April 2025 | ✅ Yes | ### Locally ```bash diff --git a/changelogs/CHANGELOG_alpha.md b/changelogs/CHANGELOG_alpha.md index 6938ec3c74..dcd26cb178 100644 --- a/changelogs/CHANGELOG_alpha.md +++ b/changelogs/CHANGELOG_alpha.md @@ -1,3 +1,59 @@ +# [5.3.0-alpha.6](https://github.com/parse-community/parse-server/compare/5.3.0-alpha.5...5.3.0-alpha.6) (2022-04-11) + + +### Bug Fixes + +* peer dependency mismatch for GraphQL dependencies ([#7934](https://github.com/parse-community/parse-server/issues/7934)) ([b7a1d76](https://github.com/parse-community/parse-server/commit/b7a1d7617b4bcac677cecedfeb6ac4a27447083b)) + +# [5.3.0-alpha.5](https://github.com/parse-community/parse-server/compare/5.3.0-alpha.4...5.3.0-alpha.5) (2022-04-09) + + +### Bug Fixes + +* security upgrade moment from 2.29.1 to 2.29.2 ([#7931](https://github.com/parse-community/parse-server/issues/7931)) ([6b68593](https://github.com/parse-community/parse-server/commit/6b68593eaec17e8b183899d2b92699c9ede7625b)) + +# [5.3.0-alpha.4](https://github.com/parse-community/parse-server/compare/5.3.0-alpha.3...5.3.0-alpha.4) (2022-04-04) + + +### Bug Fixes + +* custom database options are not passed to MongoDB GridFS ([#7911](https://github.com/parse-community/parse-server/issues/7911)) ([a72b384](https://github.com/parse-community/parse-server/commit/a72b384f76137a3d83ffb69f65cb25aff1bbab4f)) + +# [5.3.0-alpha.3](https://github.com/parse-community/parse-server/compare/5.3.0-alpha.2...5.3.0-alpha.3) (2022-03-27) + + +### Features + +* add MongoDB 5.2 support ([#7894](https://github.com/parse-community/parse-server/issues/7894)) ([6b4b358](https://github.com/parse-community/parse-server/commit/6b4b358f0842ae920e45652f5e8b2afebc6caf3a)) + +# [5.3.0-alpha.2](https://github.com/parse-community/parse-server/compare/5.3.0-alpha.1...5.3.0-alpha.2) (2022-03-27) + + +### Bug Fixes + +* security upgrade parse push adapter from 4.1.0 to 4.1.2 ([#7893](https://github.com/parse-community/parse-server/issues/7893)) ([ef56e98](https://github.com/parse-community/parse-server/commit/ef56e98ef65041b4d3b7b82cce3473269c27f6fd)) + +# [5.3.0-alpha.1](https://github.com/parse-community/parse-server/compare/5.2.1-alpha.2...5.3.0-alpha.1) (2022-03-27) + + +### Features + +* add MongoDB 5.1 compatibility ([#7682](https://github.com/parse-community/parse-server/issues/7682)) ([90155cf](https://github.com/parse-community/parse-server/commit/90155cf1680e5e0499b0000e071c6cb0ce3aef96)) + +## [5.2.1-alpha.2](https://github.com/parse-community/parse-server/compare/5.2.1-alpha.1...5.2.1-alpha.2) (2022-03-26) + + +### Performance Improvements + +* reduce database operations when using the constant parameter in Cloud Function validation ([#7892](https://github.com/parse-community/parse-server/issues/7892)) ([48bd512](https://github.com/parse-community/parse-server/commit/48bd512eeb47666967dff8c5e723ddc5b7801daa)) + +## [5.2.1-alpha.1](https://github.com/parse-community/parse-server/compare/5.2.0...5.2.1-alpha.1) (2022-03-26) + + +### Bug Fixes + +* return correct response when revert is used in beforeSave ([#7839](https://github.com/parse-community/parse-server/issues/7839)) ([f63fb2b](https://github.com/parse-community/parse-server/commit/f63fb2b338c908f0e7a648d338c26b9daa50c8f2)) + # [5.2.0-alpha.3](https://github.com/parse-community/parse-server/compare/5.2.0-alpha.2...5.2.0-alpha.3) (2022-03-24) diff --git a/ci/nodeEngineCheck.js b/ci/nodeEngineCheck.js index da68f314b1..a68f2c593c 100644 --- a/ci/nodeEngineCheck.js +++ b/ci/nodeEngineCheck.js @@ -75,17 +75,21 @@ class NodeEngineCheck { // For each file for (const file of files) { - // Get node version const contentString = await fs.readFile(file, 'utf-8'); - const contentJson = JSON.parse(contentString); - const version = ((contentJson || {}).engines || {}).node; - - // Add response - response.push({ - file: file, - nodeVersion: version - }); + try { + const contentJson = JSON.parse(contentString); + const version = ((contentJson || {}).engines || {}).node; + + // Add response + response.push({ + file: file, + nodeVersion: version + }); + } catch(e) { + console.log(`Ignoring file because it is not valid JSON: ${file}`); + core.warning(`Ignoring file because it is not valid JSON: ${file}`); + } } // If results should be cleaned by removing undefined node versions diff --git a/package-lock.json b/package-lock.json index 32800119bb..8a30449748 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "parse-server", - "version": "5.2.0", + "version": "5.3.0-alpha.6", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -11,9 +11,9 @@ "dev": true }, "@apollo/client": { - "version": "3.5.8", - "resolved": "https://registry.npmjs.org/@apollo/client/-/client-3.5.8.tgz", - "integrity": "sha512-MAm05+I1ullr64VLpZwon/ISnkMuNLf6vDqgo9wiMhHYBGT4yOAbAIseRdjCHZwfSx/7AUuBgaTNOssZPIr6FQ==", + "version": "3.5.9", + "resolved": "https://registry.npmjs.org/@apollo/client/-/client-3.5.9.tgz", + "integrity": "sha512-Qq3OE3GpyPG2fYXBzi1n4QXcKZ11c6jHdrXK2Kkn9SD+vUymSrllXsldqnKUK9tslxKqkKzNrkCXkLv7PxwfSQ==", "requires": { "@graphql-typed-document-node/core": "^3.0.0", "@wry/context": "^0.6.0", @@ -1109,11 +1109,11 @@ } }, "@babel/runtime-corejs3": { - "version": "7.14.6", - "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.14.6.tgz", - "integrity": "sha512-Xl8SPYtdjcMoCsIM4teyVRg7jIcgl8F2kRtoCcXuHzXswt9UxZCS6BzRo8fcnCuP6u2XtPgvyonmEPF57Kxo9Q==", + "version": "7.14.7", + "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.14.7.tgz", + "integrity": "sha512-Wvzcw4mBYbTagyBVZpAJWI06auSIj033T/yNE0Zn1xcup83MieCddZA7ls3kme17L4NOGBrQ09Q+nKB41RLWBA==", "requires": { - "core-js-pure": "^3.14.0", + "core-js-pure": "^3.15.0", "regenerator-runtime": "^0.13.4" } }, @@ -1198,20 +1198,20 @@ } }, "@graphql-tools/batch-execute": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/@graphql-tools/batch-execute/-/batch-execute-8.3.2.tgz", - "integrity": "sha512-ICWqM+MvEkIPHm18Q0cmkvm134zeQMomBKmTRxyxMNhL/ouz6Nqld52/brSlaHnzA3fczupeRJzZ0YatruGBcQ==", + "version": "8.3.3", + "resolved": "https://registry.npmjs.org/@graphql-tools/batch-execute/-/batch-execute-8.3.3.tgz", + "integrity": "sha512-22q/uCMUf+z3EWoM3ZM6DopDBGkni2TsfUb/mJIysunh5u8btAuXeju++De7RFwwUw+awdJXfunFQJG+OoH5Dg==", "requires": { - "@graphql-tools/utils": "^8.6.2", + "@graphql-tools/utils": "8.6.3", "dataloader": "2.0.0", "tslib": "~2.3.0", "value-or-promise": "1.0.11" }, "dependencies": { "@graphql-tools/utils": { - "version": "8.6.2", - "resolved": "https://registry.npmjs.org/@graphql-tools/utils/-/utils-8.6.2.tgz", - "integrity": "sha512-x1DG0cJgpJtImUlNE780B/dfp8pxvVxOD6UeykFH5rHes26S4kGokbgU8F1IgrJ1vAPm/OVBHtd2kicTsPfwdA==", + "version": "8.6.3", + "resolved": "https://registry.npmjs.org/@graphql-tools/utils/-/utils-8.6.3.tgz", + "integrity": "sha512-CNyP7Uu7dlVMQ32IpHWOxz4yic9BYXXVkDhG0UdTKSszvzHdgMilemE9MpUrGzzBPsTe3aYTtNGyPUkyh9yTXA==", "requires": { "tslib": "~2.3.0" } @@ -1242,12 +1242,12 @@ } }, "@graphql-tools/links": { - "version": "8.2.2", - "resolved": "https://registry.npmjs.org/@graphql-tools/links/-/links-8.2.2.tgz", - "integrity": "sha512-lWyRvG4KqVj/3dpuQzZN34TXs9+5ETaT1MxhPHe6LIF/DdNQk4Q4Y7VeET/fZ8ZhbzgweMy0AA+ZkrS2HxBcgw==", + "version": "8.2.4", + "resolved": "https://registry.npmjs.org/@graphql-tools/links/-/links-8.2.4.tgz", + "integrity": "sha512-110WRC6VORKl+/9olmJva3gEhhK/qB4kJ6rrKVP2l9YfNdPQ50y/NemtixHNSP6d+WQ77sdvDPDXqLaT/v+q1w==", "requires": { - "@graphql-tools/delegate": "^8.5.1", - "@graphql-tools/utils": "^8.6.2", + "@graphql-tools/delegate": "8.5.4", + "@graphql-tools/utils": "8.6.3", "apollo-upload-client": "17.0.0", "form-data": "^4.0.0", "node-fetch": "^2.6.5", @@ -1255,43 +1255,43 @@ }, "dependencies": { "@graphql-tools/delegate": { - "version": "8.5.1", - "resolved": "https://registry.npmjs.org/@graphql-tools/delegate/-/delegate-8.5.1.tgz", - "integrity": "sha512-/YPmVxitt57F8sH50pnfXASzOOjEfaUDkX48eF5q6f16+JBncej2zeu+Zm2c68q8MbIxhPlEGfpd0QZeqTvAxw==", + "version": "8.5.4", + "resolved": "https://registry.npmjs.org/@graphql-tools/delegate/-/delegate-8.5.4.tgz", + "integrity": "sha512-+3BCgSPCp/HoeOBjhz6X7RY7HMCNBanz/wkxo0/e4rk8TqJ3sjZCH470SHvsxCsBIlMwx4FYwkmxePgX/V+0Cg==", "requires": { - "@graphql-tools/batch-execute": "^8.3.2", - "@graphql-tools/schema": "^8.3.2", - "@graphql-tools/utils": "^8.6.2", + "@graphql-tools/batch-execute": "8.3.3", + "@graphql-tools/schema": "8.3.3", + "@graphql-tools/utils": "8.6.3", "dataloader": "2.0.0", - "graphql-executor": "0.0.18", + "graphql-executor": "0.0.19", "tslib": "~2.3.0", "value-or-promise": "1.0.11" } }, "@graphql-tools/merge": { - "version": "8.2.3", - "resolved": "https://registry.npmjs.org/@graphql-tools/merge/-/merge-8.2.3.tgz", - "integrity": "sha512-XCSmL6/Xg8259OTWNp69B57CPWiVL69kB7pposFrufG/zaAlI9BS68dgzrxmmSqZV5ZHU4r/6Tbf6fwnEJGiSw==", + "version": "8.2.4", + "resolved": "https://registry.npmjs.org/@graphql-tools/merge/-/merge-8.2.4.tgz", + "integrity": "sha512-hiNRTsS948F+BB4Q7CZXLaGFOIHQzmimVq3EEI/+PQZsPb7kYDzg0Ow0GyV4conDdEiooLqHf7I1dWzTYwvs0A==", "requires": { - "@graphql-tools/utils": "^8.6.2", + "@graphql-tools/utils": "8.6.3", "tslib": "~2.3.0" } }, "@graphql-tools/schema": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/@graphql-tools/schema/-/schema-8.3.2.tgz", - "integrity": "sha512-77feSmIuHdoxMXRbRyxE8rEziKesd/AcqKV6fmxe7Zt+PgIQITxNDew2XJJg7qFTMNM43W77Ia6njUSBxNOkwg==", + "version": "8.3.3", + "resolved": "https://registry.npmjs.org/@graphql-tools/schema/-/schema-8.3.3.tgz", + "integrity": "sha512-OrRLU9/7UmkDemeyNUy62uH+FofgV3bpVVZJprc9bhe3gZsY7kQNIdY7H1unINlepjLvGOgk7u7iLo2+EhjyWw==", "requires": { - "@graphql-tools/merge": "^8.2.3", - "@graphql-tools/utils": "^8.6.2", + "@graphql-tools/merge": "8.2.4", + "@graphql-tools/utils": "8.6.3", "tslib": "~2.3.0", "value-or-promise": "1.0.11" } }, "@graphql-tools/utils": { - "version": "8.6.2", - "resolved": "https://registry.npmjs.org/@graphql-tools/utils/-/utils-8.6.2.tgz", - "integrity": "sha512-x1DG0cJgpJtImUlNE780B/dfp8pxvVxOD6UeykFH5rHes26S4kGokbgU8F1IgrJ1vAPm/OVBHtd2kicTsPfwdA==", + "version": "8.6.3", + "resolved": "https://registry.npmjs.org/@graphql-tools/utils/-/utils-8.6.3.tgz", + "integrity": "sha512-CNyP7Uu7dlVMQ32IpHWOxz4yic9BYXXVkDhG0UdTKSszvzHdgMilemE9MpUrGzzBPsTe3aYTtNGyPUkyh9yTXA==", "requires": { "tslib": "~2.3.0" } @@ -1850,20 +1850,20 @@ "dev": true }, "@parse/node-apn": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/@parse/node-apn/-/node-apn-5.1.0.tgz", - "integrity": "sha512-WT3iVwr1Y/Jf4nq4RGNwBdLwm3gTodsb+g3IY98MPSJ7LCNf+R81Nj/nQO5r/twJfN1v5B8cAgfvPGs2rPelvg==", + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/@parse/node-apn/-/node-apn-5.1.3.tgz", + "integrity": "sha512-Bwhmbm895lEIF2772PJ8dSvBjrtOG9/q/TDMxmX40IgZxQFoXS73+JUIKTq3CA7SUB/Szu5roJINQ0L2U/1MJw==", "requires": { - "debug": "4.3.2", + "debug": "4.3.3", "jsonwebtoken": "8.5.1", - "node-forge": "0.10.0", - "verror": "1.10.0" + "node-forge": "1.3.0", + "verror": "1.10.1" }, "dependencies": { "debug": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", - "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", + "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==", "requires": { "ms": "2.1.2" } @@ -1872,6 +1872,16 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "verror": { + "version": "1.10.1", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.1.tgz", + "integrity": "sha512-veufcmxri4e3XSrT0xwfUR7kguIkaxBeosDg00yDWhk49wdwkSUrvvsm7nc75e1PUyvIeZj6nS8VQRYz2/S4Xg==", + "requires": { + "assert-plus": "^1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "^1.2.0" + } } } }, @@ -1886,42 +1896,36 @@ } }, "@parse/push-adapter": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/@parse/push-adapter/-/push-adapter-4.1.0.tgz", - "integrity": "sha512-8SOU4zgIr3+wn6Hbge4X/zAYAcJR7puJ3aY2ri+8fqMARgBria4JkIeAyKaTG/mUMHw6Qy5DpYYRe0LjImjZNw==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/@parse/push-adapter/-/push-adapter-4.1.2.tgz", + "integrity": "sha512-034vZTlAzgdfefIY4+Q4j8DHS/VwUAIVoh1JeRkHNfyQmUQ++uKbQbUQdJ/nf11HHS69kwLENs13BmhlHMpyHQ==", "requires": { - "@parse/node-apn": "5.1.0", + "@parse/node-apn": "5.1.3", "@parse/node-gcm": "1.0.2", "npmlog": "4.1.2", - "parse": "3.3.0" + "parse": "3.4.0" }, "dependencies": { "@babel/runtime": { - "version": "7.14.6", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.14.6.tgz", - "integrity": "sha512-/PCB2uJ7oM44tz8YhC4Z/6PeOKXp4K588f+5M3clr1M4zbqztlo0XEfJ2LEzj/FgwfgGcIdl8n7YYjTCI0BYwg==", + "version": "7.15.4", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.15.4.tgz", + "integrity": "sha512-99catp6bHCaxr4sJ/DbTGgHS4+Rs2RVd2g7iOap6SLGPDknRK9ztKNsE/Fg6QhSeh1FGE5f6gHGQmvvn3I3xhw==", "requires": { "regenerator-runtime": "^0.13.4" } }, - "crypto-js": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/crypto-js/-/crypto-js-4.0.0.tgz", - "integrity": "sha512-bzHZN8Pn+gS7DQA6n+iUmBfl0hO5DJq++QP3U6uTucDtk/0iGpXd/Gg7CGR0p8tJhofJyaKoWBuJI4eAO00BBg==", - "optional": true - }, "parse": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/parse/-/parse-3.3.0.tgz", - "integrity": "sha512-SQkTDupU7JQBJpYFIpO8TlQjUtjboUdkXaak57pjoC1ZVbhaiNyLsdYbrlM0B+sNYhlvcMh7zwZW48u10+zm0A==", + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/parse/-/parse-3.4.0.tgz", + "integrity": "sha512-FMZLxPW6PvrBgxkXc9AmnYsFKvPwiS4G2n9OI4mdfiSoNzIVLc+bXzlUdJ+I7hiqHsBTP0BrdQczw2/cnVkJ6w==", "requires": { - "@babel/runtime": "7.14.6", - "@babel/runtime-corejs3": "7.14.6", - "crypto-js": "4.0.0", + "@babel/runtime": "7.15.4", + "@babel/runtime-corejs3": "7.14.7", + "crypto-js": "4.1.1", "idb-keyval": "5.0.6", "react-native-crypto-js": "1.0.0", "uuid": "3.4.0", - "ws": "7.5.0", + "ws": "7.5.1", "xmlhttprequest": "1.8.0" } }, @@ -1931,9 +1935,9 @@ "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" }, "ws": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.0.tgz", - "integrity": "sha512-6ezXvzOZupqKj4jUqbQ9tXuJNo+BR2gU8fFRk3XCP3e0G6WT414u5ELe6Y0vtp7kmSJ3F7YWObSNr1ESsgi4vw==" + "version": "7.5.1", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.1.tgz", + "integrity": "sha512-2c6faOUH/nhoQN6abwMloF7Iyl0ZS2E9HGtsiLrWn0zOOMWlhtDmdf/uihDt6jnuCxgtwGBNy6Onsoy2s2O2Ow==" } } }, @@ -7503,14 +7507,14 @@ "dev": true }, "graphql": { - "version": "15.7.1", - "resolved": "https://registry.npmjs.org/graphql/-/graphql-15.7.1.tgz", - "integrity": "sha512-x34S6gC0/peBZnlK60zCJox/d45A7p6At9oN9EPA3qhoIAlR4LNZmXRLkICBckwwTMJzVdA8cx3QIQZMOl606A==" + "version": "15.8.0", + "resolved": "https://registry.npmjs.org/graphql/-/graphql-15.8.0.tgz", + "integrity": "sha512-5gghUc24tP9HRznNpV2+FIoq3xKkj5dTQqf4v0CpdPbFVwFkWoxOM+o+2OC9ZSvjEMTjfmG9QT+gcvggTwW1zw==" }, "graphql-executor": { - "version": "0.0.18", - "resolved": "https://registry.npmjs.org/graphql-executor/-/graphql-executor-0.0.18.tgz", - "integrity": "sha512-upUSl7tfZCZ5dWG1XkOvpG70Yk3duZKcCoi/uJso4WxJVT6KIrcK4nZ4+2X/hzx46pL8wAukgYHY6iNmocRN+g==" + "version": "0.0.19", + "resolved": "https://registry.npmjs.org/graphql-executor/-/graphql-executor-0.0.19.tgz", + "integrity": "sha512-AFOcsk/yMtl9jcO/f/0Our7unWxJ5m3FS5HjWfsXRHCyjjaubXpSHiOZO/hSYv6brayIrupDoVAzCuJpBc3elg==" }, "graphql-extensions": { "version": "0.15.0", @@ -10438,26 +10442,26 @@ } }, "moment": { - "version": "2.29.1", - "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.1.tgz", - "integrity": "sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ==" + "version": "2.29.2", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.2.tgz", + "integrity": "sha512-UgzG4rvxYpN15jgCmVJwac49h9ly9NurikMWGPdVxm8GZD6XjkKPxDTjQQ43gtGgnV3X0cAyWDdP2Wexoquifg==" }, "mongodb": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-4.3.1.tgz", - "integrity": "sha512-sNa8APSIk+r4x31ZwctKjuPSaeKuvUeNb/fu/3B6dRM02HpEgig7hTHM8A/PJQTlxuC/KFWlDlQjhsk/S43tBg==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-4.4.1.tgz", + "integrity": "sha512-IAD3nFtCR4s22vi5qjqkCBnuyDDrOW8WVSSmgHquOvGaP1iTD+XpC5tr8wAUbZ2EeZkaswwBKQFHDvl4qYcKqQ==", "requires": { "bson": "^4.6.1", "denque": "^2.0.1", - "mongodb-connection-string-url": "^2.4.1", + "mongodb-connection-string-url": "^2.5.2", "saslprep": "^1.0.3", - "socks": "^2.6.1" + "socks": "^2.6.2" }, "dependencies": { "bson": { - "version": "4.6.1", - "resolved": "https://registry.npmjs.org/bson/-/bson-4.6.1.tgz", - "integrity": "sha512-I1LQ7Hz5zgwR4QquilLNZwbhPw0Apx7i7X9kGMBTsqPdml/03Q9NBtD9nt/19ahjlphktQImrnderxqpzeVDjw==", + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/bson/-/bson-4.6.2.tgz", + "integrity": "sha512-VeJKHShcu1b/ugl0QiujlVuBepab714X9nNyBdA1kfekuDGecxgpTA2Z6nYbagrWFeiIyzSWIOzju3lhj+RNyQ==", "requires": { "buffer": "^5.6.0" } @@ -10470,9 +10474,9 @@ } }, "mongodb-connection-string-url": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-2.4.1.tgz", - "integrity": "sha512-d5Kd2bVsKcSA7YI/yo57fSTtMwRQdFkvc5IZwod1RRxJtECeWPPSo7zqcUGJELifRA//Igs4spVtYAmvFCatug==", + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-2.5.2.tgz", + "integrity": "sha512-tWDyIG8cQlI5k3skB6ywaEA5F9f5OntrKKsT/Lteub2zgwSUlhqEN2inGgBTm8bpYJf8QYBdA/5naz65XDpczA==", "requires": { "@types/whatwg-url": "^8.2.1", "whatwg-url": "^11.0.0" @@ -10899,9 +10903,9 @@ } }, "node-forge": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", - "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==" + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.0.tgz", + "integrity": "sha512-08ARB91bUi6zNKzVmaj3QO7cr397uiDT2nJ63cHjyNtCTWIgvS47j3eT0WfzUwS9+6Z5YshRaoasFkXCKrIYbA==" }, "node-netstat": { "version": "1.8.0", @@ -15532,12 +15536,12 @@ } }, "socks": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/socks/-/socks-2.6.1.tgz", - "integrity": "sha512-kLQ9N5ucj8uIcxrDwjm0Jsqk06xdpBjGNQtpXy4Q8/QY2k+fY7nZH8CARy+hkbG+SGAovmzzuauCpBlb8FrnBA==", + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/socks/-/socks-2.6.2.tgz", + "integrity": "sha512-zDZhHhZRY9PxRruRMR7kMhnf3I8hDs4S3f9RecfnGxvcBHQcKcIH/oUcEWffsfl1XxdYlA7nnlGbbTvPz9D8gA==", "requires": { "ip": "^1.1.5", - "smart-buffer": "^4.1.0" + "smart-buffer": "^4.2.0" } }, "sort-keys": { diff --git a/package.json b/package.json index 2bc25fba02..6236fa9795 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "parse-server", - "version": "5.2.0", + "version": "5.3.0-alpha.6", "description": "An express module providing a Parse-compatible API server", "main": "lib/index.js", "repository": { @@ -19,13 +19,13 @@ ], "license": "BSD-3-Clause", "dependencies": { + "@apollo/client": "3.5.9", "@apollographql/graphql-playground-html": "1.6.29", - "@graphql-tools/links": "8.2.2", - "@apollo/client": "3.5.8", + "@graphql-tools/links": "8.2.4", "@graphql-tools/stitch": "6.2.4", "@graphql-tools/utils": "6.2.4", "@parse/fs-files-adapter": "1.2.1", - "@parse/push-adapter": "4.1.0", + "@parse/push-adapter": "4.1.2", "apollo-server-express": "2.25.2", "bcryptjs": "2.4.3", "body-parser": "1.19.1", @@ -34,7 +34,7 @@ "deepcopy": "2.1.0", "express": "4.17.2", "follow-redirects": "1.14.8", - "graphql": "15.7.1", + "graphql": "15.8.0", "graphql-list-fields": "2.0.2", "graphql-relay": "0.7.0", "graphql-tag": "2.12.6", @@ -46,7 +46,7 @@ "lodash": "4.17.21", "lru-cache": "6.0.0", "mime": "3.0.0", - "mongodb": "4.3.1", + "mongodb": "4.4.1", "mustache": "4.2.0", "parse": "3.4.1", "pg-monitor": "1.4.1", @@ -116,16 +116,18 @@ "test:mongodb:runnerstart": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=$npm_config_dbversion} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=wiredTiger} mongodb-runner start", "test:mongodb:testonly": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=$npm_config_dbversion} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=wiredTiger} TESTING=1 jasmine", "test:mongodb": "npm run test:mongodb:runnerstart --dbversion=$npm_config_dbversion && npm run test:mongodb:testonly --dbversion=$npm_config_dbversion", - "test:mongodb:4.0.27": "npm run test:mongodb --dbversion=4.0.27", - "test:mongodb:4.2.17": "npm run test:mongodb --dbversion=4.2.17", - "test:mongodb:4.4.10": "npm run test:mongodb --dbversion=4.4.10", - "test:mongodb:5.0.5": "npm run test:mongodb --dbversion=5.0.5", + "test:mongodb:4.0.28": "npm run test:mongodb --dbversion=4.0.28", + "test:mongodb:4.2.19": "npm run test:mongodb --dbversion=4.2.19", + "test:mongodb:4.4.13": "npm run test:mongodb --dbversion=4.4.13", + "test:mongodb:5.0.6": "npm run test:mongodb --dbversion=5.0.6", + "test:mongodb:5.1.1": "npm run test:mongodb --dbversion=5.1.1", + "test:mongodb:5.2.1": "npm run test:mongodb --dbversion=5.2.1", "posttest:mongodb": "mongodb-runner stop", - "pretest": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=5.0.5} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=wiredTiger} mongodb-runner start", - "testonly": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=5.0.5} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=wiredTiger} TESTING=1 jasmine", + "pretest": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=5.2.1} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=wiredTiger} mongodb-runner start", + "testonly": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=5.2.1} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=wiredTiger} TESTING=1 jasmine", "test": "npm run testonly", - "posttest": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=5.0.5} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=wiredTiger} mongodb-runner stop", - "coverage": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=5.0.5} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=wiredTiger} TESTING=1 nyc jasmine", + "posttest": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=5.2.1} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=wiredTiger} mongodb-runner stop", + "coverage": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=5.2.1} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} MONGODB_STORAGE_ENGINE=${MONGODB_STORAGE_ENGINE:=wiredTiger} TESTING=1 nyc jasmine", "start": "node ./bin/parse-server", "prettier": "prettier --write {src,spec}/{**/*,*}.js", "prepare": "npm run build", diff --git a/resources/buildConfigDefinitions.js b/resources/buildConfigDefinitions.js index 670aafad38..b41c53f07c 100644 --- a/resources/buildConfigDefinitions.js +++ b/resources/buildConfigDefinitions.js @@ -23,23 +23,25 @@ const nestedOptionTypes = [ 'PagesRoute', 'PasswordPolicyOptions', 'SecurityOptions', + 'SchemaOptions', ]; /** The prefix of environment variables for nested options. */ const nestedOptionEnvPrefix = { - 'AccountLockoutOptions' : 'PARSE_SERVER_ACCOUNT_LOCKOUT_', - 'CustomPagesOptions' : 'PARSE_SERVER_CUSTOM_PAGES_', + 'AccountLockoutOptions': 'PARSE_SERVER_ACCOUNT_LOCKOUT_', + 'CustomPagesOptions': 'PARSE_SERVER_CUSTOM_PAGES_', 'DatabaseOptions': 'PARSE_SERVER_DATABASE_', - 'FileUploadOptions' : 'PARSE_SERVER_FILE_UPLOAD_', - 'IdempotencyOptions' : 'PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_', - 'LiveQueryOptions' : 'PARSE_SERVER_LIVEQUERY_', - 'LiveQueryServerOptions' : 'PARSE_LIVE_QUERY_SERVER_', - 'PagesCustomUrlsOptions' : 'PARSE_SERVER_PAGES_CUSTOM_URL_', - 'PagesOptions' : 'PARSE_SERVER_PAGES_', + 'FileUploadOptions': 'PARSE_SERVER_FILE_UPLOAD_', + 'IdempotencyOptions': 'PARSE_SERVER_EXPERIMENTAL_IDEMPOTENCY_', + 'LiveQueryOptions': 'PARSE_SERVER_LIVEQUERY_', + 'LiveQueryServerOptions': 'PARSE_LIVE_QUERY_SERVER_', + 'PagesCustomUrlsOptions': 'PARSE_SERVER_PAGES_CUSTOM_URL_', + 'PagesOptions': 'PARSE_SERVER_PAGES_', 'PagesRoute': 'PARSE_SERVER_PAGES_ROUTE_', - 'ParseServerOptions' : 'PARSE_SERVER_', - 'PasswordPolicyOptions' : 'PARSE_SERVER_PASSWORD_POLICY_', + 'ParseServerOptions': 'PARSE_SERVER_', + 'PasswordPolicyOptions': 'PARSE_SERVER_PASSWORD_POLICY_', 'SecurityOptions': 'PARSE_SERVER_SECURITY_', + 'SchemaOptions': 'PARSE_SERVER_SCHEMA_', }; function last(array) { @@ -50,7 +52,7 @@ const letters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' function toENV(key) { let str = ''; let previousIsUpper = false; - for(let i = 0; i < key.length; i++) { + for (let i = 0; i < key.length; i++) { const char = key[i]; if (letters.indexOf(char) >= 0) { if (!previousIsUpper) { @@ -273,8 +275,8 @@ function inject(t, list) { return { results, comments }; } -const makeRequire = function(variableName, module, t) { - const decl = t.variableDeclarator(t.identifier(variableName), t.callExpression(t.identifier('require'), [t.stringLiteral(module)])); +const makeRequire = function (variableName, module, t) { + const decl = t.variableDeclarator(t.identifier(variableName), t.callExpression(t.identifier('require'), [t.stringLiteral(module)])); return t.variableDeclaration('var', [decl]) } let docs = ``; @@ -283,14 +285,14 @@ const plugin = function (babel) { const moduleExports = t.memberExpression(t.identifier('module'), t.identifier('exports')); return { visitor: { - ImportDeclaration: function(path) { + ImportDeclaration: function (path) { path.remove(); }, - Program: function(path) { + Program: function (path) { // Inject the parser's loader path.unshiftContainer('body', makeRequire('parsers', './parsers', t)); }, - ExportDeclaration: function(path) { + ExportDeclaration: function (path) { // Export declaration on an interface if (path.node && path.node.declaration && path.node.declaration.type == 'InterfaceDeclaration') { const { results, comments } = inject(t, doInterface(path.node.declaration)); @@ -313,6 +315,6 @@ Do not edit manually, but update Options/index.js ` const babel = require("@babel/core"); -const res = babel.transformFileSync('./src/Options/index.js', { plugins: [ plugin, '@babel/transform-flow-strip-types' ], babelrc: false, auxiliaryCommentBefore, sourceMaps: false }); +const res = babel.transformFileSync('./src/Options/index.js', { plugins: [plugin, '@babel/transform-flow-strip-types'], babelrc: false, auxiliaryCommentBefore, sourceMaps: false }); require('fs').writeFileSync('./src/Options/Definitions.js', res.code + '\n'); require('fs').writeFileSync('./src/Options/docs.js', docs); diff --git a/spec/CloudCode.spec.js b/spec/CloudCode.spec.js index 4b8df9f9c9..a96702e019 100644 --- a/spec/CloudCode.spec.js +++ b/spec/CloudCode.spec.js @@ -1494,6 +1494,110 @@ describe('Cloud Code', () => { }); }); + it('before save can revert fields', async () => { + Parse.Cloud.beforeSave('TestObject', ({ object }) => { + object.revert('foo'); + return object; + }); + + Parse.Cloud.afterSave('TestObject', ({ object }) => { + expect(object.get('foo')).toBeUndefined(); + return object; + }); + + const obj = new TestObject(); + obj.set('foo', 'bar'); + await obj.save(); + + expect(obj.get('foo')).toBeUndefined(); + await obj.fetch(); + + expect(obj.get('foo')).toBeUndefined(); + }); + + it('before save can revert fields with existing object', async () => { + Parse.Cloud.beforeSave( + 'TestObject', + ({ object }) => { + object.revert('foo'); + return object; + }, + { + skipWithMasterKey: true, + } + ); + + Parse.Cloud.afterSave( + 'TestObject', + ({ object }) => { + expect(object.get('foo')).toBe('bar'); + return object; + }, + { + skipWithMasterKey: true, + } + ); + + const obj = new TestObject(); + obj.set('foo', 'bar'); + await obj.save(null, { useMasterKey: true }); + + expect(obj.get('foo')).toBe('bar'); + obj.set('foo', 'yolo'); + await obj.save(); + expect(obj.get('foo')).toBe('bar'); + }); + + it('can unset in afterSave', async () => { + Parse.Cloud.beforeSave('TestObject', ({ object }) => { + if (!object.existed()) { + object.set('secret', true); + return object; + } + object.revert('secret'); + }); + + Parse.Cloud.afterSave('TestObject', ({ object }) => { + object.unset('secret'); + }); + + Parse.Cloud.beforeFind( + 'TestObject', + ({ query }) => { + query.exclude('secret'); + }, + { + skipWithMasterKey: true, + } + ); + + const obj = new TestObject(); + await obj.save(); + expect(obj.get('secret')).toBeUndefined(); + await obj.fetch(); + expect(obj.get('secret')).toBeUndefined(); + await obj.fetch({ useMasterKey: true }); + expect(obj.get('secret')).toBe(true); + }); + + it('should revert in beforeSave', async () => { + Parse.Cloud.beforeSave('MyObject', ({ object }) => { + if (!object.existed()) { + object.set('count', 0); + return object; + } + object.revert('count'); + return object; + }); + const obj = await new Parse.Object('MyObject').save(); + expect(obj.get('count')).toBe(0); + obj.set('count', 10); + await obj.save(); + expect(obj.get('count')).toBe(0); + await obj.fetch(); + expect(obj.get('count')).toBe(0); + }); + it('beforeSave should not sanitize database', async done => { const { adapter } = Config.get(Parse.applicationId).database; const spy = spyOn(adapter, 'findOneAndUpdate').and.callThrough(); @@ -1860,6 +1964,36 @@ describe('afterSave hooks', () => { const myObject = new MyObject(); myObject.save().then(() => done()); }); + + it('should unset in afterSave', async () => { + Parse.Cloud.afterSave( + 'MyObject', + ({ object }) => { + object.unset('secret'); + }, + { + skipWithMasterKey: true, + } + ); + const obj = new Parse.Object('MyObject'); + obj.set('secret', 'bar'); + await obj.save(); + expect(obj.get('secret')).toBeUndefined(); + await obj.fetch(); + expect(obj.get('secret')).toBe('bar'); + }); + + it('should unset', async () => { + Parse.Cloud.beforeSave('MyObject', ({ object }) => { + object.set('secret', 'hidden'); + }); + + Parse.Cloud.afterSave('MyObject', ({ object }) => { + object.unset('secret'); + }); + const obj = await new Parse.Object('MyObject').save(); + expect(obj.get('secret')).toBeUndefined(); + }); }); describe('beforeDelete hooks', () => { @@ -3241,7 +3375,71 @@ describe('afterLogin hook', () => { }); }); +const str = 'Hello World!'; +const fileData = []; +for (let i = 0; i < str.length; i++) { + fileData.push(str.charCodeAt(i)); +} + describe('saveFile hooks', () => { + it('file hooks should be called', async () => { + const triggers = { + beforeSave({ file }) { + file.setTags({ tagA: 'some-tag' }); + file.setMetadata({ foo: 'bar' }); + }, + afterSave(req) { + expect(req.master).toBe(true); + expect(req.file._tags).toEqual({ tagA: 'some-tag' }); + expect(req.file._metadata).toEqual({ foo: 'bar' }); + }, + beforeCreate(req) { + expect(req.file).toBeDefined(); + expect(req.file._name.includes('hello.txt')).toBeTrue(); + expect(req.master).toBeFalse(); + expect(req.contentType).toBe('text/plain'); + }, + beforeFind(req) { + expect(req.file).toBeDefined(); + expect(req.file._name.includes('hello.txt')).toBeTrue(); + expect(req.master).toBeFalse(); + expect(req.contentType).toBe('text/plain'); + }, + afterFind(req) { + expect(req.file).toBeDefined(); + expect(req.fileSize).toBe(12); + expect(req.master).toBeFalse(); + expect(req.download).toBeFalse(); + expect(req.contentType).toBe('text/plain'); + req.download = true; + }, + }; + for (const key in triggers) { + spyOn(triggers, key).and.callThrough(); + } + Parse.Cloud.beforeCreate(Parse.File, triggers.beforeCreate); + Parse.Cloud.beforeSave(Parse.File, triggers.beforeSave); + Parse.Cloud.afterSave(Parse.File, triggers.afterSave); + Parse.Cloud.beforeFind(Parse.File, triggers.beforeFind); + Parse.Cloud.afterFind(Parse.File, triggers.afterFind); + + const file = new Parse.File('hello.txt', fileData, 'text/plain'); + await file.save({ useMasterKey: true }); + + const fileObject = await new Parse.Object('File', { file }).save(); + await fileObject.fetch(); + + const response = await request({ + url: file.url(), + }); + expect(response.text).toBe('Hello World!'); + expect(Object.keys(response.headers)).toContain('content-disposition'); + await new Promise(resolve => setTimeout(resolve, 100)); + for (const key in triggers) { + expect(triggers[key]).toHaveBeenCalled(); + } + }); + it('beforeSaveFile should return file that is already saved and not save anything to files adapter', async () => { await reconfigureServer({ filesAdapter: mockAdapter }); const createFileSpy = spyOn(mockAdapter, 'createFile').and.callThrough(); diff --git a/spec/FilesController.spec.js b/spec/FilesController.spec.js index 9ea66da6ea..8fee5aca2f 100644 --- a/spec/FilesController.spec.js +++ b/spec/FilesController.spec.js @@ -43,6 +43,20 @@ describe('FilesController', () => { done(); }); + it_only_db('mongo')('should pass databaseOptions to GridFSBucketAdapter', async () => { + await reconfigureServer({ + databaseURI: 'mongodb://localhost:27017/parse', + filesAdapter: null, + databaseAdapter: null, + databaseOptions: { + retryWrites: true, + }, + }); + const config = Config.get(Parse.applicationId); + expect(config.database.adapter._mongoOptions.retryWrites).toBeTrue(); + expect(config.filesController.adapter._mongoOptions.retryWrites).toBeTrue(); + }); + it('should create a server log on failure', done => { const logController = new LoggerController(new WinstonLoggerAdapter()); diff --git a/spec/MongoStorageAdapter.spec.js b/spec/MongoStorageAdapter.spec.js index a31a6134f7..130971a535 100644 --- a/spec/MongoStorageAdapter.spec.js +++ b/spec/MongoStorageAdapter.spec.js @@ -308,7 +308,7 @@ describe_only_db('mongo')('MongoStorageAdapter', () => { await expectAsync(adapter.getClass('UnknownClass')).toBeRejectedWith(undefined); }); - it('should use index for caseInsensitive query', async () => { + it_only_mongodb_version('<5.1')('should use index for caseInsensitive query', async () => { const user = new Parse.User(); user.set('username', 'Bugs'); user.set('password', 'Bunny'); @@ -342,6 +342,40 @@ describe_only_db('mongo')('MongoStorageAdapter', () => { expect(postIndexPlan.executionStats.executionStages.stage).toBe('FETCH'); }); + it_only_mongodb_version('>=5.1')('should use index for caseInsensitive query', async () => { + const user = new Parse.User(); + user.set('username', 'Bugs'); + user.set('password', 'Bunny'); + await user.signUp(); + + const database = Config.get(Parse.applicationId).database; + await database.adapter.dropAllIndexes('_User'); + + const preIndexPlan = await database.find( + '_User', + { username: 'bugs' }, + { caseInsensitive: true, explain: true } + ); + + const schema = await new Parse.Schema('_User').get(); + + await database.adapter.ensureIndex( + '_User', + schema, + ['username'], + 'case_insensitive_username', + true + ); + + const postIndexPlan = await database.find( + '_User', + { username: 'bugs' }, + { caseInsensitive: true, explain: true } + ); + expect(preIndexPlan.queryPlanner.winningPlan.queryPlan.stage).toBe('COLLSCAN'); + expect(postIndexPlan.queryPlanner.winningPlan.queryPlan.stage).toBe('FETCH'); + }); + it('should delete field without index', async () => { const database = Config.get(Parse.applicationId).database; const obj = new Parse.Object('MyObject'); diff --git a/spec/ParseQuery.hint.spec.js b/spec/ParseQuery.hint.spec.js index 2685137801..db45106359 100644 --- a/spec/ParseQuery.hint.spec.js +++ b/spec/ParseQuery.hint.spec.js @@ -27,7 +27,7 @@ describe_only_db('mongo')('Parse.Query hint', () => { await TestUtils.destroyAllDataPermanently(false); }); - it('query find with hint string', async () => { + it_only_mongodb_version('<5.1')('query find with hint string', async () => { const object = new TestObject(); await object.save(); @@ -39,7 +39,18 @@ describe_only_db('mongo')('Parse.Query hint', () => { expect(explain.queryPlanner.winningPlan.inputStage.indexName).toBe('_id_'); }); - it('query find with hint object', async () => { + it_only_mongodb_version('>=5.1')('query find with hint string', async () => { + const object = new TestObject(); + await object.save(); + + const collection = await config.database.adapter._adaptiveCollection('TestObject'); + const explain = await collection._rawFind({ _id: object.id }, { hint: '_id_', explain: true }); + expect(explain.queryPlanner.winningPlan.queryPlan.stage).toBe('FETCH'); + expect(explain.queryPlanner.winningPlan.queryPlan.inputStage.stage).toBe('IXSCAN'); + expect(explain.queryPlanner.winningPlan.queryPlan.inputStage.indexName).toBe('_id_'); + }); + + it_only_mongodb_version('<5.1')('query find with hint object', async () => { const object = new TestObject(); await object.save(); @@ -53,6 +64,20 @@ describe_only_db('mongo')('Parse.Query hint', () => { }); }); + it_only_mongodb_version('>=5.1')('query find with hint object', async () => { + const object = new TestObject(); + await object.save(); + + const collection = await config.database.adapter._adaptiveCollection('TestObject'); + const explain = await collection._rawFind( + { _id: object.id }, + { hint: { _id: 1 }, explain: true } + ); + expect(explain.queryPlanner.winningPlan.queryPlan.stage).toBe('FETCH'); + expect(explain.queryPlanner.winningPlan.queryPlan.inputStage.stage).toBe('IXSCAN'); + expect(explain.queryPlanner.winningPlan.queryPlan.inputStage.keyPattern).toEqual({ _id: 1 }); + }); + it_only_mongodb_version('<4.4')('query aggregate with hint string', async () => { const object = new TestObject({ foo: 'bar' }); await object.save(); @@ -73,7 +98,7 @@ describe_only_db('mongo')('Parse.Query hint', () => { expect(queryPlanner.winningPlan.inputStage.indexName).toBe('_id_'); }); - it_only_mongodb_version('>=4.4')('query aggregate with hint string', async () => { + it_only_mongodb_version('>=4.4<5.1')('query aggregate with hint string', async () => { const object = new TestObject({ foo: 'bar' }); await object.save(); @@ -97,6 +122,54 @@ describe_only_db('mongo')('Parse.Query hint', () => { expect(queryPlanner.winningPlan.inputStage.inputStage.indexName).toBe('_id_'); }); + it_only_mongodb_version('>=5.1<5.2')('query aggregate with hint string', async () => { + const object = new TestObject({ foo: 'bar' }); + await object.save(); + + const collection = await config.database.adapter._adaptiveCollection('TestObject'); + let result = await collection.aggregate([{ $group: { _id: '$foo' } }], { + explain: true, + }); + let { queryPlanner } = result[0].stages[0].$cursor; + expect(queryPlanner.winningPlan.queryPlan.stage).toBe('PROJECTION_SIMPLE'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.stage).toBe('COLLSCAN'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.inputStage).toBeUndefined(); + + result = await collection.aggregate([{ $group: { _id: '$foo' } }], { + hint: '_id_', + explain: true, + }); + queryPlanner = result[0].stages[0].$cursor.queryPlanner; + expect(queryPlanner.winningPlan.queryPlan.stage).toBe('PROJECTION_SIMPLE'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.stage).toBe('FETCH'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.inputStage.stage).toBe('IXSCAN'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.inputStage.indexName).toBe('_id_'); + }); + + it_only_mongodb_version('>=5.2')('query aggregate with hint string', async () => { + const object = new TestObject({ foo: 'bar' }); + await object.save(); + + const collection = await config.database.adapter._adaptiveCollection('TestObject'); + let result = await collection.aggregate([{ $group: { _id: '$foo' } }], { + explain: true, + }); + let queryPlanner = result[0].queryPlanner; + expect(queryPlanner.winningPlan.queryPlan.stage).toBe('GROUP'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.stage).toBe('COLLSCAN'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.inputStage).toBeUndefined(); + + result = await collection.aggregate([{ $group: { _id: '$foo' } }], { + hint: '_id_', + explain: true, + }); + queryPlanner = result[0].queryPlanner; + expect(queryPlanner.winningPlan.queryPlan.stage).toBe('GROUP'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.stage).toBe('FETCH'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.inputStage.stage).toBe('IXSCAN'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.inputStage.indexName).toBe('_id_'); + }); + it_only_mongodb_version('<4.4')('query aggregate with hint object', async () => { const object = new TestObject({ foo: 'bar' }); await object.save(); @@ -117,7 +190,7 @@ describe_only_db('mongo')('Parse.Query hint', () => { expect(queryPlanner.winningPlan.inputStage.keyPattern).toEqual({ _id: 1 }); }); - it_only_mongodb_version('>=4.4')('query aggregate with hint object', async () => { + it_only_mongodb_version('>=4.4<5.1')('query aggregate with hint object', async () => { const object = new TestObject({ foo: 'bar' }); await object.save(); @@ -142,7 +215,57 @@ describe_only_db('mongo')('Parse.Query hint', () => { expect(queryPlanner.winningPlan.inputStage.inputStage.keyPattern).toEqual({ _id: 1 }); }); - it('query find with hint (rest)', async () => { + it_only_mongodb_version('>=5.1<5.2')('query aggregate with hint object', async () => { + const object = new TestObject({ foo: 'bar' }); + await object.save(); + + const collection = await config.database.adapter._adaptiveCollection('TestObject'); + let result = await collection.aggregate([{ $group: { _id: '$foo' } }], { + explain: true, + }); + let { queryPlanner } = result[0].stages[0].$cursor; + expect(queryPlanner.winningPlan.queryPlan.stage).toBe('PROJECTION_SIMPLE'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.stage).toBe('COLLSCAN'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.inputStage).toBeUndefined(); + + result = await collection.aggregate([{ $group: { _id: '$foo' } }], { + hint: { _id: 1 }, + explain: true, + }); + queryPlanner = result[0].stages[0].$cursor.queryPlanner; + expect(queryPlanner.winningPlan.queryPlan.stage).toBe('PROJECTION_SIMPLE'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.stage).toBe('FETCH'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.inputStage.stage).toBe('IXSCAN'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.inputStage.indexName).toBe('_id_'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.inputStage.keyPattern).toEqual({ _id: 1 }); + }); + + it_only_mongodb_version('>=5.2')('query aggregate with hint object', async () => { + const object = new TestObject({ foo: 'bar' }); + await object.save(); + + const collection = await config.database.adapter._adaptiveCollection('TestObject'); + let result = await collection.aggregate([{ $group: { _id: '$foo' } }], { + explain: true, + }); + let queryPlanner = result[0].queryPlanner; + expect(queryPlanner.winningPlan.queryPlan.stage).toBe('GROUP'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.stage).toBe('COLLSCAN'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.inputStage).toBeUndefined(); + + result = await collection.aggregate([{ $group: { _id: '$foo' } }], { + hint: { _id: 1 }, + explain: true, + }); + queryPlanner = result[0].queryPlanner; + expect(queryPlanner.winningPlan.queryPlan.stage).toBe('GROUP'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.stage).toBe('FETCH'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.inputStage.stage).toBe('IXSCAN'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.inputStage.indexName).toBe('_id_'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.inputStage.keyPattern).toEqual({ _id: 1 }); + }); + + it_only_mongodb_version('<5.1')('query find with hint (rest)', async () => { const object = new TestObject(); await object.save(); let options = Object.assign({}, masterKeyOptions, { @@ -167,6 +290,31 @@ describe_only_db('mongo')('Parse.Query hint', () => { expect(explain.queryPlanner.winningPlan.inputStage.inputStage.indexName).toBe('_id_'); }); + it_only_mongodb_version('>=5.1')('query find with hint (rest)', async () => { + const object = new TestObject(); + await object.save(); + let options = Object.assign({}, masterKeyOptions, { + url: Parse.serverURL + '/classes/TestObject', + qs: { + explain: true, + }, + }); + let response = await request(options); + let explain = response.data.results; + expect(explain.queryPlanner.winningPlan.queryPlan.inputStage.stage).toBe('COLLSCAN'); + + options = Object.assign({}, masterKeyOptions, { + url: Parse.serverURL + '/classes/TestObject', + qs: { + explain: true, + hint: '_id_', + }, + }); + response = await request(options); + explain = response.data.results; + expect(explain.queryPlanner.winningPlan.queryPlan.inputStage.inputStage.indexName).toBe('_id_'); + }); + it_only_mongodb_version('<4.4')('query aggregate with hint (rest)', async () => { const object = new TestObject({ foo: 'bar' }); await object.save(); @@ -194,7 +342,7 @@ describe_only_db('mongo')('Parse.Query hint', () => { expect(queryPlanner.winningPlan.inputStage.keyPattern).toEqual({ _id: 1 }); }); - it_only_mongodb_version('>=4.4')('query aggregate with hint (rest)', async () => { + it_only_mongodb_version('>=4.4<5.1')('query aggregate with hint (rest)', async () => { const object = new TestObject({ foo: 'bar' }); await object.save(); let options = Object.assign({}, masterKeyOptions, { @@ -226,4 +374,70 @@ describe_only_db('mongo')('Parse.Query hint', () => { expect(queryPlanner.winningPlan.inputStage.inputStage.indexName).toBe('_id_'); expect(queryPlanner.winningPlan.inputStage.inputStage.keyPattern).toEqual({ _id: 1 }); }); + + it_only_mongodb_version('>=5.1<5.2')('query aggregate with hint (rest)', async () => { + const object = new TestObject({ foo: 'bar' }); + await object.save(); + let options = Object.assign({}, masterKeyOptions, { + url: Parse.serverURL + '/aggregate/TestObject', + qs: { + explain: true, + group: JSON.stringify({ objectId: '$foo' }), + }, + }); + let response = await request(options); + let { queryPlanner } = response.data.results[0].stages[0].$cursor; + expect(queryPlanner.winningPlan.queryPlan.stage).toBe('PROJECTION_SIMPLE'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.stage).toBe('COLLSCAN'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.inputStage).toBeUndefined(); + + options = Object.assign({}, masterKeyOptions, { + url: Parse.serverURL + '/aggregate/TestObject', + qs: { + explain: true, + hint: '_id_', + group: JSON.stringify({ objectId: '$foo' }), + }, + }); + response = await request(options); + queryPlanner = response.data.results[0].stages[0].$cursor.queryPlanner; + expect(queryPlanner.winningPlan.queryPlan.stage).toBe('PROJECTION_SIMPLE'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.stage).toBe('FETCH'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.inputStage.stage).toBe('IXSCAN'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.inputStage.indexName).toBe('_id_'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.inputStage.keyPattern).toEqual({ _id: 1 }); + }); + + it_only_mongodb_version('>=5.2')('query aggregate with hint (rest)', async () => { + const object = new TestObject({ foo: 'bar' }); + await object.save(); + let options = Object.assign({}, masterKeyOptions, { + url: Parse.serverURL + '/aggregate/TestObject', + qs: { + explain: true, + group: JSON.stringify({ objectId: '$foo' }), + }, + }); + let response = await request(options); + let queryPlanner = response.data.results[0].queryPlanner; + expect(queryPlanner.winningPlan.queryPlan.stage).toBe('GROUP'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.stage).toBe('COLLSCAN'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.inputStage).toBeUndefined(); + + options = Object.assign({}, masterKeyOptions, { + url: Parse.serverURL + '/aggregate/TestObject', + qs: { + explain: true, + hint: '_id_', + group: JSON.stringify({ objectId: '$foo' }), + }, + }); + response = await request(options); + queryPlanner = response.data.results[0].queryPlanner; + expect(queryPlanner.winningPlan.queryPlan.stage).toBe('GROUP'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.stage).toBe('FETCH'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.inputStage.stage).toBe('IXSCAN'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.inputStage.indexName).toBe('_id_'); + expect(queryPlanner.winningPlan.queryPlan.inputStage.inputStage.keyPattern).toEqual({ _id: 1 }); + }); }); diff --git a/src/Controllers/FilesController.js b/src/Controllers/FilesController.js index aaff8511fe..611c63ecf2 100644 --- a/src/Controllers/FilesController.js +++ b/src/Controllers/FilesController.js @@ -2,14 +2,11 @@ import { randomHexString } from '../cryptoUtils'; import AdaptableController from './AdaptableController'; import { validateFilename, FilesAdapter } from '../Adapters/Files/FilesAdapter'; +import { maybeRunFileTrigger, Types } from '../triggers'; import path from 'path'; import mime from 'mime'; const Parse = require('parse').Parse; -const legacyFilesRegex = new RegExp( - '^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}-.*' -); - export class FilesController extends AdaptableController { getFileData(config, filename) { return this.adapter.getFileData(filename); @@ -55,36 +52,31 @@ export class FilesController extends AdaptableController { * with the current mount point and app id. * Object may be a single object or list of REST-format objects. */ - expandFilesInObject(config, object) { + async expandFilesInObject(config, object, auth) { if (object instanceof Array) { - object.map(obj => this.expandFilesInObject(config, obj)); + await Promise.all(object.map(async obj => await this.expandFilesInObject(config, obj, auth))); return; } if (typeof object !== 'object') { return; } for (const key in object) { - const fileObject = object[key]; + let fileObject = object[key]; if (fileObject && fileObject['__type'] === 'File') { if (fileObject['url']) { continue; } const filename = fileObject['name']; - // all filenames starting with "tfss-" should be from files.parsetfss.com - // all filenames starting with a "-" seperated UUID should be from files.parse.com - // all other filenames have been migrated or created from Parse Server - if (config.fileKey === undefined) { - fileObject['url'] = this.adapter.getFileLocation(config, filename); - } else { - if (filename.indexOf('tfss-') === 0) { - fileObject['url'] = - 'http://files.parsetfss.com/' + config.fileKey + '/' + encodeURIComponent(filename); - } else if (legacyFilesRegex.test(filename)) { - fileObject['url'] = - 'http://files.parse.com/' + config.fileKey + '/' + encodeURIComponent(filename); - } else { - fileObject['url'] = this.adapter.getFileLocation(config, filename); - } + fileObject['url'] = this.adapter.getFileLocation(config, filename); + const contentType = mime.getType(filename); + const triggerResult = await maybeRunFileTrigger( + Types.beforeCreate, + { file: new Parse.File(filename, [], contentType), contentType }, + config, + auth + ); + if (triggerResult instanceof Parse.File) { + fileObject = triggerResult.toJSON(); } } } diff --git a/src/Controllers/index.js b/src/Controllers/index.js index b2feff0fc2..0a9b3db57d 100644 --- a/src/Controllers/index.js +++ b/src/Controllers/index.js @@ -91,12 +91,20 @@ export function getLoggerController(options: ParseServerOptions): LoggerControll } export function getFilesController(options: ParseServerOptions): FilesController { - const { appId, databaseURI, filesAdapter, databaseAdapter, preserveFileName, fileKey } = options; + const { + appId, + databaseURI, + databaseOptions = {}, + filesAdapter, + databaseAdapter, + preserveFileName, + fileKey, + } = options; if (!filesAdapter && databaseAdapter) { throw 'When using an explicit database adapter, you must also use an explicit filesAdapter.'; } const filesControllerAdapter = loadAdapter(filesAdapter, () => { - return new GridFSBucketAdapter(databaseURI, {}, fileKey); + return new GridFSBucketAdapter(databaseURI, databaseOptions, fileKey); }); return new FilesController(filesControllerAdapter, appId, { preserveFileName, diff --git a/src/Options/Definitions.js b/src/Options/Definitions.js index 1edd704bd6..f8b8eab633 100644 --- a/src/Options/Definitions.js +++ b/src/Options/Definitions.js @@ -5,6 +5,51 @@ Do not edit manually, but update Options/index.js */ var parsers = require('./parsers'); +module.exports.SchemaOptions = { + afterMigration: { + env: 'PARSE_SERVER_SCHEMA_AFTER_MIGRATION', + help: 'Execute a callback after running schema migrations.', + }, + beforeMigration: { + env: 'PARSE_SERVER_SCHEMA_BEFORE_MIGRATION', + help: 'Execute a callback before running schema migrations.', + }, + definitions: { + env: 'PARSE_SERVER_SCHEMA_DEFINITIONS', + help: + 'Rest representation on Parse.Schema https://docs.parseplatform.org/rest/guide/#adding-a-schema', + required: true, + action: parsers.objectParser, + default: [], + }, + deleteExtraFields: { + env: 'PARSE_SERVER_SCHEMA_DELETE_EXTRA_FIELDS', + help: + 'Is true if Parse Server should delete any fields not defined in a schema definition. This should only be used during development.', + action: parsers.booleanParser, + default: false, + }, + lockSchemas: { + env: 'PARSE_SERVER_SCHEMA_LOCK_SCHEMAS', + help: + 'Is true if Parse Server will reject any attempts to modify the schema while the server is running.', + action: parsers.booleanParser, + default: false, + }, + recreateModifiedFields: { + env: 'PARSE_SERVER_SCHEMA_RECREATE_MODIFIED_FIELDS', + help: + 'Is true if Parse Server should recreate any fields that are different between the current database schema and theschema definition. This should only be used during development.', + action: parsers.booleanParser, + default: false, + }, + strict: { + env: 'PARSE_SERVER_SCHEMA_STRICT', + help: 'Is true if Parse Server should exit if schema update fail.', + action: parsers.booleanParser, + default: false, + }, +}; module.exports.ParseServerOptions = { accountLockout: { env: 'PARSE_SERVER_ACCOUNT_LOCKOUT', @@ -385,6 +430,11 @@ module.exports.ParseServerOptions = { action: parsers.booleanParser, default: false, }, + schema: { + env: 'PARSE_SERVER_SCHEMA', + help: 'Defined schema', + action: parsers.objectParser, + }, security: { env: 'PARSE_SERVER_SECURITY', help: 'The security options to identify and report weak security settings.', @@ -464,45 +514,6 @@ module.exports.SecurityOptions = { default: false, }, }; -module.exports.SchemaOptions = { - definitions: { - help: 'The schema definitions.', - default: [], - }, - strict: { - env: 'PARSE_SERVER_SCHEMA_STRICT', - help: 'Is true if Parse Server should exit if schema update fail.', - action: parsers.booleanParser, - default: true, - }, - deleteExtraFields: { - env: 'PARSE_SERVER_SCHEMA_DELETE_EXTRA_FIELDS', - help: - 'Is true if Parse Server should delete any fields not defined in a schema definition. This should only be used during development.', - action: parsers.booleanParser, - default: false, - }, - recreateModifiedFields: { - env: 'PARSE_SERVER_SCHEMA_RECREATE_MODIFIED_FIELDS', - help: - 'Is true if Parse Server should recreate any fields that are different between the current database schema and theschema definition. This should only be used during development.', - action: parsers.booleanParser, - default: false, - }, - lockSchemas: { - env: 'PARSE_SERVER_SCHEMA_LOCK', - help: - 'Is true if Parse Server will reject any attempts to modify the schema while the server is running.', - action: parsers.booleanParser, - default: false, - }, - beforeMigration: { - help: 'Execute a callback before running schema migrations.', - }, - afterMigration: { - help: 'Execute a callback after running schema migrations.', - }, -}; module.exports.PagesOptions = { customRoutes: { env: 'PARSE_SERVER_PAGES_CUSTOM_ROUTES', diff --git a/src/Options/docs.js b/src/Options/docs.js index fc0ff3b799..24b60c46a9 100644 --- a/src/Options/docs.js +++ b/src/Options/docs.js @@ -1,3 +1,14 @@ +/** + * @interface SchemaOptions + * @property {Function} afterMigration Execute a callback after running schema migrations. + * @property {Function} beforeMigration Execute a callback before running schema migrations. + * @property {Any} definitions Rest representation on Parse.Schema https://docs.parseplatform.org/rest/guide/#adding-a-schema + * @property {Boolean} deleteExtraFields Is true if Parse Server should delete any fields not defined in a schema definition. This should only be used during development. + * @property {Boolean} lockSchemas Is true if Parse Server will reject any attempts to modify the schema while the server is running. + * @property {Boolean} recreateModifiedFields Is true if Parse Server should recreate any fields that are different between the current database schema and theschema definition. This should only be used during development. + * @property {Boolean} strict Is true if Parse Server should exit if schema update fail. + */ + /** * @interface ParseServerOptions * @property {AccountLockoutOptions} accountLockout The account lockout policy for failed login attempts. @@ -68,6 +79,7 @@ * @property {String} restAPIKey Key for REST calls * @property {Boolean} revokeSessionOnPasswordReset When a user changes their password, either through the reset password email or while logged in, all sessions are revoked if this is true. Set to false if you don't want to revoke sessions. * @property {Boolean} scheduledPush Configuration for push scheduling, defaults to false. + * @property {SchemaOptions} schema Defined schema * @property {SecurityOptions} security The security options to identify and report weak security settings. * @property {Function} serverCloseComplete Callback when server has closed * @property {Function} serverStartComplete Callback when server has started diff --git a/src/Options/index.js b/src/Options/index.js index 3482d88c50..8124446f99 100644 --- a/src/Options/index.js +++ b/src/Options/index.js @@ -8,7 +8,28 @@ import { MailAdapter } from '../Adapters/Email/MailAdapter'; import { PubSubAdapter } from '../Adapters/PubSub/PubSubAdapter'; import { WSSAdapter } from '../Adapters/WebSocketServer/WSSAdapter'; import { CheckGroup } from '../Security/CheckGroup'; -import type { SchemaOptions } from '../SchemaMigrations/Migrations'; + +export interface SchemaOptions { + /* Rest representation on Parse.Schema https://docs.parseplatform.org/rest/guide/#adding-a-schema + :DEFAULT: [] */ + definitions: any; + /* Is true if Parse Server should exit if schema update fail. + :DEFAULT: false */ + strict: ?boolean; + /* Is true if Parse Server should delete any fields not defined in a schema definition. This should only be used during development. + :DEFAULT: false */ + deleteExtraFields: ?boolean; + /* Is true if Parse Server should recreate any fields that are different between the current database schema and theschema definition. This should only be used during development. + :DEFAULT: false */ + recreateModifiedFields: ?boolean; + /* Is true if Parse Server will reject any attempts to modify the schema while the server is running. + :DEFAULT: false */ + lockSchemas: ?boolean; + /* Execute a callback before running schema migrations. */ + beforeMigration: ?() => void | Promise; + /* Execute a callback after running schema migrations. */ + afterMigration: ?() => void | Promise; +} type Adapter = string | any | T; type NumberOrBoolean = number | boolean; @@ -246,7 +267,9 @@ export interface ParseServerOptions { playgroundPath: ?string; /* Callback when server has started */ serverStartComplete: ?(error: ?Error) => void; - /* Rest representation on Parse.Schema https://docs.parseplatform.org/rest/guide/#adding-a-schema */ + /* Defined schema + :ENV: PARSE_SERVER_SCHEMA + */ schema: ?SchemaOptions; /* Callback when server has closed */ serverCloseComplete: ?() => void; diff --git a/src/RestQuery.js b/src/RestQuery.js index be96683451..b6fba29e02 100644 --- a/src/RestQuery.js +++ b/src/RestQuery.js @@ -640,7 +640,7 @@ RestQuery.prototype.replaceEquality = function () { // Returns a promise for whether it was successful. // Populates this.response with an object that only has 'results'. -RestQuery.prototype.runFind = function (options = {}) { +RestQuery.prototype.runFind = async function (options = {}) { if (this.findOptions.limit === 0) { this.response = { results: [] }; return Promise.resolve(); @@ -654,24 +654,25 @@ RestQuery.prototype.runFind = function (options = {}) { if (options.op) { findOptions.op = options.op; } - return this.config.database - .find(this.className, this.restWhere, findOptions, this.auth) - .then(results => { - if (this.className === '_User' && !findOptions.explain) { - for (var result of results) { - cleanResultAuthData(result); - } - } - - this.config.filesController.expandFilesInObject(this.config, results); + const results = await this.config.database.find( + this.className, + this.restWhere, + findOptions, + this.auth + ); + if (this.className === '_User' && !findOptions.explain) { + for (var result of results) { + cleanResultAuthData(result); + } + } + await this.config.filesController.expandFilesInObject(this.config, results, this.auth); - if (this.redirectClassName) { - for (var r of results) { - r.className = this.redirectClassName; - } - } - this.response = { results: results }; - }); + if (this.redirectClassName) { + for (var r of results) { + r.className = this.redirectClassName; + } + } + this.response = { results: results }; }; // Returns a promise for whether it was successful. diff --git a/src/RestWrite.js b/src/RestWrite.js index 8b728731da..22a8228aa7 100644 --- a/src/RestWrite.js +++ b/src/RestWrite.js @@ -95,6 +95,7 @@ function RestWrite(config, auth, className, query, data, originalData, clientSDK // Shared SchemaController to be reused to reduce the number of loadSchema() calls per request // Once set the schemaData should be immutable this.validSchemaController = null; + this.pendingOps = {}; } // A convenient method to perform all the steps of processing the @@ -225,18 +226,11 @@ RestWrite.prototype.runBeforeSaveTrigger = function () { return Promise.resolve(); } - // Cloud code gets a bit of extra data for its objects - var extraData = { className: this.className }; - if (this.query && this.query.objectId) { - extraData.objectId = this.query.objectId; - } + const { originalObject, updatedObject } = this.buildParseObjects(); - let originalObject = null; - const updatedObject = this.buildUpdatedObject(extraData); - if (this.query && this.query.objectId) { - // This is an update for existing object. - originalObject = triggers.inflate(extraData, this.originalData); - } + const stateController = Parse.CoreManager.getObjectStateController(); + const [pending] = stateController.getPendingOps(updatedObject._getStateIdentifier()); + this.pendingOps = { ...pending }; return Promise.resolve() .then(() => { @@ -311,7 +305,7 @@ RestWrite.prototype.runBeforeLoginTrigger = async function (userData) { const extraData = { className: this.className }; // Expand file objects - this.config.filesController.expandFilesInObject(this.config, userData); + await this.config.filesController.expandFilesInObject(this.config, userData, this.auth); const user = triggers.inflate(extraData, userData); @@ -1312,7 +1306,11 @@ RestWrite.prototype.handleInstallation = function () { RestWrite.prototype.expandFilesForExistingObjects = function () { // Check whether we have a short-circuited response - only then run expansion. if (this.response && this.response.response) { - this.config.filesController.expandFilesInObject(this.config, this.response.response); + return this.config.filesController.expandFilesInObject( + this.config, + this.response.response, + this.auth + ); } }; @@ -1531,20 +1529,7 @@ RestWrite.prototype.runAfterSaveTrigger = function () { return Promise.resolve(); } - var extraData = { className: this.className }; - if (this.query && this.query.objectId) { - extraData.objectId = this.query.objectId; - } - - // Build the original object, we only do this for a update write. - let originalObject; - if (this.query && this.query.objectId) { - originalObject = triggers.inflate(extraData, this.originalData); - } - - // Build the inflated object, different from beforeSave, originalData is not empty - // since developers can change data in the beforeSave. - const updatedObject = this.buildUpdatedObject(extraData); + const { originalObject, updatedObject } = this.buildParseObjects(); updatedObject._handleSaveResponse(this.response.response, this.response.status || 200); this.config.database.loadSchema().then(schemaController => { @@ -1569,8 +1554,15 @@ RestWrite.prototype.runAfterSaveTrigger = function () { this.context ) .then(result => { - if (result && typeof result === 'object') { + const jsonReturned = result && !result._toFullJSON; + if (jsonReturned) { + this.pendingOps = {}; this.response.response = result; + } else { + this.response.response = this._updateResponseWithData( + (result || updatedObject)._toFullJSON(), + this.data + ); } }) .catch(function (err) { @@ -1604,7 +1596,13 @@ RestWrite.prototype.sanitizedData = function () { }; // Returns an updated copy of the object -RestWrite.prototype.buildUpdatedObject = function (extraData) { +RestWrite.prototype.buildParseObjects = function () { + const extraData = { className: this.className, objectId: this.query?.objectId }; + let originalObject; + if (this.query && this.query.objectId) { + originalObject = triggers.inflate(extraData, this.originalData); + } + const className = Parse.Object.fromJSON(extraData); const readOnlyAttributes = className.constructor.readOnlyAttributes ? className.constructor.readOnlyAttributes() @@ -1642,7 +1640,7 @@ RestWrite.prototype.buildUpdatedObject = function (extraData) { delete sanitized[attribute]; } updatedObject.set(sanitized); - return updatedObject; + return { updatedObject, originalObject }; }; RestWrite.prototype.cleanUserAuthData = function () { @@ -1662,6 +1660,15 @@ RestWrite.prototype.cleanUserAuthData = function () { }; RestWrite.prototype._updateResponseWithData = function (response, data) { + const { updatedObject } = this.buildParseObjects(); + const stateController = Parse.CoreManager.getObjectStateController(); + const [pending] = stateController.getPendingOps(updatedObject._getStateIdentifier()); + for (const key in this.pendingOps) { + if (!pending[key]) { + data[key] = this.originalData ? this.originalData[key] : { __op: 'Delete' }; + this.storage.fieldsChangedByTrigger.push(key); + } + } if (_.isEmpty(this.storage.fieldsChangedByTrigger)) { return response; } diff --git a/src/Routers/FilesRouter.js b/src/Routers/FilesRouter.js index c0c7e00f13..d96141653d 100644 --- a/src/Routers/FilesRouter.js +++ b/src/Routers/FilesRouter.js @@ -64,31 +64,49 @@ export class FilesRouter { return router; } - getHandler(req, res) { + async getHandler(req, res) { const config = Config.get(req.params.appId); const filesController = config.filesController; const filename = req.params.filename; const contentType = mime.getType(filename); - if (isFileStreamable(req, filesController)) { - filesController.handleFileStream(config, filename, req, res, contentType).catch(() => { - res.status(404); - res.set('Content-Type', 'text/plain'); - res.end('File not found.'); - }); - } else { - filesController - .getFileData(config, filename) - .then(data => { - res.status(200); - res.set('Content-Type', contentType); - res.set('Content-Length', data.length); - res.end(data); - }) - .catch(() => { - res.status(404); - res.set('Content-Type', 'text/plain'); - res.end('File not found.'); - }); + try { + await triggers.maybeRunFileTrigger( + triggers.Types.beforeFind, + { file: new Parse.File(filename, [], contentType), contentType }, + config, + req.auth + ); + if (isFileStreamable(req, filesController)) { + filesController.handleFileStream(config, filename, req, res, contentType); + return; + } + const data = await filesController.getFileData(config, filename); + const base64 = data.toString('base64'); + const file = new Parse.File(filename, { base64 }, contentType); + const fileSize = Buffer.byteLength(data); + const fileObject = { file, fileSize, download: false, contentType }; + const triggerResult = await triggers.maybeRunFileTrigger( + triggers.Types.afterFind, + fileObject, + config, + req.auth + ); + if (triggerResult instanceof Parse.File) { + fileObject.file = triggerResult; + } + res.status(200); + res.set('Content-Type', fileObject.file.contentType); + if (fileObject.download) { + res.set('Content-Disposition', `attachment;filename=${fileObject.file.name()}`); + } + const bufferData = Buffer.from(fileObject.file._data, 'base64'); + res.set('Content-Length', bufferData.length); + res.end(bufferData); + } catch (e) { + console.log(e); + res.status(404); + res.set('Content-Type', 'text/plain'); + res.end((e && e.message) || 'File not found.'); } } @@ -141,7 +159,7 @@ export class FilesRouter { try { // run beforeSaveFile trigger const triggerResult = await triggers.maybeRunFileTrigger( - triggers.Types.beforeSaveFile, + triggers.Types.beforeSave, fileObject, config, req.auth @@ -194,12 +212,7 @@ export class FilesRouter { }; } // run afterSaveFile trigger - await triggers.maybeRunFileTrigger( - triggers.Types.afterSaveFile, - fileObject, - config, - req.auth - ); + await triggers.maybeRunFileTrigger(triggers.Types.afterSave, fileObject, config, req.auth); res.status(201); res.set('Location', saveResult.url); res.json(saveResult); @@ -222,7 +235,7 @@ export class FilesRouter { file._url = filesController.adapter.getFileLocation(req.config, filename); const fileObject = { file, fileSize: null }; await triggers.maybeRunFileTrigger( - triggers.Types.beforeDeleteFile, + triggers.Types.beforeDelete, fileObject, req.config, req.auth @@ -231,7 +244,7 @@ export class FilesRouter { await filesController.deleteFile(req.config, filename); // run afterDeleteFile trigger await triggers.maybeRunFileTrigger( - triggers.Types.afterDeleteFile, + triggers.Types.afterDelete, fileObject, req.config, req.auth diff --git a/src/Routers/UsersRouter.js b/src/Routers/UsersRouter.js index cdce6a1348..d1556c71b3 100644 --- a/src/Routers/UsersRouter.js +++ b/src/Routers/UsersRouter.js @@ -217,7 +217,7 @@ export class UsersRouter extends ClassesRouter { // Remove hidden properties. UsersRouter.removeHiddenProperties(user); - req.config.filesController.expandFilesInObject(req.config, user); + await req.config.filesController.expandFilesInObject(req.config, user, req.auth); // Before login trigger; throws if failure await maybeRunTrigger( diff --git a/src/SchemaMigrations/Migrations.js b/src/SchemaMigrations/Migrations.js index 9142cdbcde..8768911189 100644 --- a/src/SchemaMigrations/Migrations.js +++ b/src/SchemaMigrations/Migrations.js @@ -1,5 +1,15 @@ // @flow +export interface SchemaOptions { + definitions: JSONSchema[]; + strict: ?boolean; + deleteExtraFields: ?boolean; + recreateModifiedFields: ?boolean; + lockSchemas: ?boolean; + beforeMigration: ?() => void | Promise; + afterMigration: ?() => void | Promise; +} + export type FieldValueType = | 'String' | 'Boolean' @@ -35,17 +45,6 @@ export interface IndexesInterface { [key: string]: IndexInterface; } -export interface SchemaOptions { - definitions: JSONSchema[]; - strict: ?boolean; - deleteExtraFields: ?boolean; - recreateModifiedFields: ?boolean; - lockSchemas: ?boolean; - /* Callback when server has started and before running schemas migration operations if schemas key provided */ - beforeMigration: ?() => void | Promise; - afterMigration: ?() => void | Promise; -} - export type CLPOperation = 'find' | 'count' | 'get' | 'update' | 'create' | 'delete'; // @Typescript 4.1+ // type CLPPermission = 'requiresAuthentication' | '*' | `user:${string}` | `role:${string}` diff --git a/src/cloud-code/Parse.Cloud.js b/src/cloud-code/Parse.Cloud.js index 9fb437cada..61026a2952 100644 --- a/src/cloud-code/Parse.Cloud.js +++ b/src/cloud-code/Parse.Cloud.js @@ -440,128 +440,65 @@ ParseCloud.afterFind = function (parseClass, handler, validationHandler) { ); }; -/** - * Registers a before save file function. - * - * **Available in Cloud Code only.** - * - * ``` - * Parse.Cloud.beforeSaveFile(async (request) => { - * // code here - * }, (request) => { - * // validation code here - * }); - * - * Parse.Cloud.beforeSaveFile(async (request) => { - * // code here - * }, { ...validationObject }); - *``` - * - * @method beforeSaveFile - * @name Parse.Cloud.beforeSaveFile - * @param {Function} func The function to run before saving a file. This function can be async and should take just one parameter, {@link Parse.Cloud.FileTriggerRequest}. - * @param {(Object|Function)} validator An optional function to help validating cloud code. This function can be an async function and should take one parameter a {@link Parse.Cloud.FileTriggerRequest}, or a {@link Parse.Cloud.ValidatorObject}. - */ ParseCloud.beforeSaveFile = function (handler, validationHandler) { - validateValidator(validationHandler); - triggers.addFileTrigger( - triggers.Types.beforeSaveFile, - handler, - Parse.applicationId, - validationHandler - ); + Deprecator.logRuntimeDeprecation({ + usage: 'Parse.Cloud.beforeSaveFile', + solution: 'Use Parse.Cloud.beforeSave(Parse.File.', + }); + ParseCloud.beforeSave(Parse.File, handler, validationHandler); }; -/** - * Registers an after save file function. - * - * **Available in Cloud Code only.** - * - * ``` - * Parse.Cloud.afterSaveFile(async (request) => { - * // code here - * }, (request) => { - * // validation code here - * }); - * - * Parse.Cloud.afterSaveFile(async (request) => { - * // code here - * }, { ...validationObject }); - *``` - * - * @method afterSaveFile - * @name Parse.Cloud.afterSaveFile - * @param {Function} func The function to run after saving a file. This function can be async and should take just one parameter, {@link Parse.Cloud.FileTriggerRequest}. - * @param {(Object|Function)} validator An optional function to help validating cloud code. This function can be an async function and should take one parameter a {@link Parse.Cloud.FileTriggerRequest}, or a {@link Parse.Cloud.ValidatorObject}. - */ ParseCloud.afterSaveFile = function (handler, validationHandler) { - validateValidator(validationHandler); - triggers.addFileTrigger( - triggers.Types.afterSaveFile, - handler, - Parse.applicationId, - validationHandler - ); + Deprecator.logRuntimeDeprecation({ + usage: 'Parse.Cloud.afterSaveFile', + solution: 'Use Parse.Cloud.afterSave(Parse.File.', + }); + ParseCloud.afterSave(Parse.File, handler, validationHandler); }; -/** - * Registers a before delete file function. - * - * **Available in Cloud Code only.** - * - * ``` - * Parse.Cloud.beforeDeleteFile(async (request) => { - * // code here - * }, (request) => { - * // validation code here - * }); - * - * Parse.Cloud.beforeDeleteFile(async (request) => { - * // code here - * }, { ...validationObject }); - *``` - * - * @method beforeDeleteFile - * @name Parse.Cloud.beforeDeleteFile - * @param {Function} func The function to run before deleting a file. This function can be async and should take just one parameter, {@link Parse.Cloud.FileTriggerRequest}. - * @param {(Object|Function)} validator An optional function to help validating cloud code. This function can be an async function and should take one parameter a {@link Parse.Cloud.FileTriggerRequest}, or a {@link Parse.Cloud.ValidatorObject}. - */ ParseCloud.beforeDeleteFile = function (handler, validationHandler) { - validateValidator(validationHandler); - triggers.addFileTrigger( - triggers.Types.beforeDeleteFile, - handler, - Parse.applicationId, - validationHandler - ); + Deprecator.logRuntimeDeprecation({ + usage: 'Parse.Cloud.beforeDeleteFile', + solution: 'Use Parse.Cloud.beforeDelete(Parse.File.', + }); + ParseCloud.beforeDelete(Parse.File, handler, validationHandler); +}; + +ParseCloud.afterDeleteFile = function (handler, validationHandler) { + Deprecator.logRuntimeDeprecation({ + usage: 'Parse.Cloud.afterDeleteFile', + solution: 'Use Parse.Cloud.afterDelete(Parse.File.', + }); + ParseCloud.afterDelete(Parse.File, handler, validationHandler); }; /** - * Registers an after delete file function. + * + * Registers a before create function * * **Available in Cloud Code only.** * * ``` - * Parse.Cloud.afterDeleteFile(async (request) => { + * Parse.Cloud.beforeCreate(Parse.File, (request) => { * // code here * }, (request) => { * // validation code here * }); * - * Parse.Cloud.afterDeleteFile(async (request) => { - * // code here - * }, { ...validationObject }); - *``` + * ``` * - * @method afterDeleteFile - * @name Parse.Cloud.afterDeleteFile - * @param {Function} func The function to after before deleting a file. This function can be async and should take just one parameter, {@link Parse.Cloud.FileTriggerRequest}. - * @param {(Object|Function)} validator An optional function to help validating cloud code. This function can be an async function and should take one parameter a {@link Parse.Cloud.FileTriggerRequest}, or a {@link Parse.Cloud.ValidatorObject}. + * @method beforeCreate + * @name Parse.Cloud.beforeCreate + * @param {(String|Parse.Object)} arg1 The Parse.Object subclass to register the after save function for. This can instead be a String that is the className of the subclass. + * @param {Function} func The function to run before a save. This function can be async and should take one parameter a {@link Parse.Cloud.TriggerRequest}; + * @param {(Object|Function)} validator An optional function to help validating cloud code. This function can be an async function and should take one parameter a {@link Parse.Cloud.TriggerRequest}, or a {@link Parse.Cloud.ValidatorObject}. */ -ParseCloud.afterDeleteFile = function (handler, validationHandler) { +ParseCloud.beforeCreate = function (_, handler, validationHandler) { + const className = triggers.getClassName(Parse.File); validateValidator(validationHandler); - triggers.addFileTrigger( - triggers.Types.afterDeleteFile, + triggers.addTrigger( + triggers.Types.beforeCreate, + className, handler, Parse.applicationId, validationHandler diff --git a/src/triggers.js b/src/triggers.js index 8320b5fb74..664b2ed4de 100644 --- a/src/triggers.js +++ b/src/triggers.js @@ -12,10 +12,7 @@ export const Types = { afterDelete: 'afterDelete', beforeFind: 'beforeFind', afterFind: 'afterFind', - beforeSaveFile: 'beforeSaveFile', - afterSaveFile: 'afterSaveFile', - beforeDeleteFile: 'beforeDeleteFile', - afterDeleteFile: 'afterDeleteFile', + beforeCreate: 'beforeCreate', beforeConnect: 'beforeConnect', beforeSubscribe: 'beforeSubscribe', afterEvent: 'afterEvent', @@ -50,6 +47,9 @@ export function getClassName(parseClass) { if (parseClass && parseClass.className) { return parseClass.className; } + if (parseClass && parseClass.name) { + return parseClass.name.replace('Parse', '@'); + } return parseClass; } @@ -140,11 +140,6 @@ export function addTrigger(type, className, handler, applicationId, validationHa add(Category.Validators, `${type}.${className}`, validationHandler, applicationId); } -export function addFileTrigger(type, handler, applicationId, validationHandler) { - add(Category.Triggers, `${type}.${FileClassName}`, handler, applicationId); - add(Category.Validators, `${type}.${FileClassName}`, validationHandler, applicationId); -} - export function addConnectTrigger(type, handler, applicationId, validationHandler) { add(Category.Triggers, `${type}.${ConnectClassName}`, handler, applicationId); add(Category.Validators, `${type}.${ConnectClassName}`, validationHandler, applicationId); @@ -738,7 +733,7 @@ async function builtInTriggerValidator(options, request, auth) { } if (opt.constant && request.object) { if (request.original) { - request.object.set(key, request.original.get(key)); + request.object.revert(key); } else if (opt.default != null) { request.object.set(key, opt.default); } @@ -970,6 +965,9 @@ export async function maybeRunFileTrigger(triggerType, fileObject, config, auth) return fileObject; } const result = await fileTrigger(request); + if (request.download) { + fileObject.download = true; + } logTriggerSuccessBeforeHook( triggerType, 'Parse.File',