diff --git a/.env.example b/.env.example index febae2d29..78a8fbebb 100644 --- a/.env.example +++ b/.env.example @@ -1,38 +1,44 @@ -# ===== -# Dev Configuration -# The devShell reads this file to set defaults, so changing values here -# affects local development. -# ===== +# ----------------------------------------------------------------------------- +# Server Configuration (dev defaults, required in all environments) +# ----------------------------------------------------------------------------- -# Server port - used by both the server and E2E tests +# Port the registry server listens on +# - Dev/Test: 9000 (from this file) +# - Prod: Set in deployment config SERVER_PORT=9000 # SQLite database path (relative to working directory) +# - Dev: Uses local ./db directory +# - Test: Overridden to use temp state directory +# - Prod: Set to production database path DATABASE_URL="sqlite:db/registry.sqlite3" -# ===== -# Dev Secrets -# these must be set in .env when running scripts like legacy-importer -# ===== +# ----------------------------------------------------------------------------- +# Secrets (required for production, use dummy values for local dev) +# ----------------------------------------------------------------------------- +# IMPORTANT: Never commit real secrets. The values below are dummies for testing. -# GitHub personal access token for API requests when running scripts -GITHUB_TOKEN="ghp_your_personal_access_token" - -# ===== -# Prod Secrets -# these must be set in .env to run the production server and some scripts -# ===== - -# DigitalOcean Spaces credentials for S3-compatible storage -SPACES_KEY="digitalocean_spaces_key" -SPACES_SECRET="digitalocean_spaces_secret" - -# Pacchettibotti bot account credentials -# Used for automated registry operations (commits, releases, etc.) +# GitHub personal access token for pacchettibotti bot +# Used for: commits to registry repos, issue management PACCHETTIBOTTI_TOKEN="ghp_pacchettibotti_token" # Pacchettibotti SSH keys (base64-encoded) +# Used for: signing authenticated operations (unpublish, transfer) # Generate with: ssh-keygen -t ed25519 -C "pacchettibotti@purescript.org" # Encode with: cat key | base64 | tr -d '\n' PACCHETTIBOTTI_ED25519_PUB="c3NoLWVkMjU1MTkgYWJjeHl6IHBhY2NoZXR0aWJvdHRpQHB1cmVzY3JpcHQub3Jn" PACCHETTIBOTTI_ED25519="YWJjeHl6" + +# DigitalOcean Spaces credentials for S3-compatible storage +# Used for: uploading/downloading package tarballs +SPACES_KEY="digitalocean_spaces_key" +SPACES_SECRET="digitalocean_spaces_secret" + + +# ----------------------------------------------------------------------------- +# Script-only Secrets (not used by server, used by scripts like legacy-importer) +# ----------------------------------------------------------------------------- + +# Personal GitHub token for API requests when running scripts +# This is YOUR token, not pacchettibotti's +GITHUB_TOKEN="ghp_your_personal_access_token" diff --git a/AGENTS.md b/AGENTS.md index 43e474c2a..5ce5268dc 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -10,26 +10,63 @@ This project uses Nix with direnv. You should already be in the Nix shell automa nix develop ``` -### Build and Test +Watch out for these Nix quirks: +- If Nix tries to fetch from git during a build, it is likely that spago.yaml files were changed but the lock file was not updated; if so, update the lockfile with `spago build` +- If a Nix build appears to be stale, then it is likely files were modified but are untracked by Git; if so, add modified files with `git add` and retry. -The registry is implemented in PureScript. Use spago to build it and run PureScript tests. These are cheap and fast and should be used when working on the registry packages. +### Build + +The registry is implemented in PureScript. Use spago to build it. ```sh spago build # Build all PureScript code -spago test # Run unit tests ``` -Integration tests require two terminals (or the use of test-env in detached mode). The integration tests are only necessary to run if working on the server (app). +The registry infrastructure is defined in Nix. Build it with Nix: + +```sh +nix build .#server +``` + +### Test + +The registry contains a mixture of unit tests, e2e tests, and nix flake checks. When you complete a change you should generally run the unit tests. When working on the server, you should generally also run the e2e tests. If you are on a Linux system, you can run `nix flake check -L` to run the flake checks prior to committing code to ensure it works. + +#### Unit Tests + +Unit tests can be run with `spago`. They are fast and cheap. + +```sh +spago test # Run all unit tests +spago test -p # Run tests for a specific package +``` + +#### End-to-End Tests + +The end-to-end (integration) tests are in `app-e2e`. They can be run via Nix on Linux: + +```sh +nix build .#checks.x86_64-linux.integration +``` + +Alternately, they can be run on macOS or for more iterative development of tests using two terminals: one to start the test env, and one to execute the tests. ```sh # Terminal 1: Start test environment (wiremock mocks + registry server on port 9000) nix run .#test-env # Terminal 2: Run E2E tests once server is ready -spago run -p registry-app-e2e +spago-test-e2e ``` -Options: `nix run .#test-env -- --tui` for interactive TUI, `-- --detached` for background mode. +Options: `nix run .#test-env -- --tui` for interactive TUI, `-- --detached` for background mode to use a single terminal. + +State is stored in `/tmp/registry-test-env` and cleaned up on each `nix run .#test-env`. To examine state after a test run (for debugging), stop the test-env but don't restart it. This is useful, for example, to read the logs of the most recent run. For example: + +```sh +# after a test run, see the logs (log name is today's date) +cat /tmp/registry-test-env/scratch/logs/*.log +``` #### Smoke Test (Linux only) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 92f5f9dcf..ebe38a0dd 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -72,20 +72,29 @@ nix build .#checks.x86_64-linux.smoke -L ### Integration Test +You can run the integration tests with the following on Linux: + +```sh +nix build .#checks.x86_64-linux.integration -L +``` + +On macOS or for iterative development, you can instead start the test environment and run the tests separately. + ```sh # Terminal 1: Start the test environment (wiremock mocks + registry server) nix run .#test-env -# Terminal 2: Once the server is ready, run the E2E tests -spago run -p registry-app-e2e +# Terminal 2: Run E2E tests once server is ready +spago-test-e2e ``` The test environment: - Starts wiremock services mocking GitHub, S3, Pursuit, etc. -- Starts the registry server on port 9000 with a temporary SQLite database +- Starts the registry server with a temporary SQLite database - Uses fixture data from `app/fixtures/` +- State is stored in `/tmp/registry-test-env` and cleaned up on each `nix run .#test-env` -Press `Ctrl+C` in Terminal 1 to stop all services. State is cleaned up automatically. +Press `Ctrl+C` in Terminal 1 to stop all services. All arguments after `--` are passed directly to process-compose: @@ -101,7 +110,11 @@ process-compose attach # Attach TUI process-compose down # Stop all services ``` -You can also set `STATE_DIR` to use a persistent state directory instead of a temp dir. +To examine state after a test run (e.g., for debugging), stop the test-env but don't restart it. The state remains in `/tmp/registry-test-env`: +- `db/registry.sqlite3` — SQLite database +- `scratch/registry/` — Local registry clone with metadata +- `scratch/registry-index/` — Local manifest index clone +- `repo-fixtures/` — Git fixture repositories ## Available Nix Commands diff --git a/SPEC.md b/SPEC.md index 423d0d80d..54c627d05 100644 --- a/SPEC.md +++ b/SPEC.md @@ -197,6 +197,7 @@ All packages in the registry contain a `purs.json` manifest file in their root d - `version`: a valid [`Version`](#version) - `license`: a valid [`License`](#license) - `location`: a valid [`Location`](#location) +- `ref`: a `string` representing the reference (e.g., a Git commit or Git tag) at the `location` that was used to fetch this version's source code - `owners` (optional): a non-empty array of [`Owner`](#owner) - `description` (optional): a description of your library as a plain text string, not markdown, up to 300 characters - `includeFiles` (optional): a non-empty array of globs, where globs are used to match file paths (in addition to the `src` directory and other [always-included files](#always-included-files)) that you want included in your package tarball @@ -221,6 +222,7 @@ For example: "githubOwner": "purescript", "githubRepo": "purescript-control" }, + "ref": "v4.2.0", "include": ["test/**/*.purs"], "exclude": ["test/graphs"], "dependencies": { "newtype": ">=3.0.0 <4.0.0", "prelude": ">=4.0.0 <5.0.0" } diff --git a/app-e2e/spago.yaml b/app-e2e/spago.yaml index 1fa902f14..fb3804b90 100644 --- a/app-e2e/spago.yaml +++ b/app-e2e/spago.yaml @@ -5,16 +5,27 @@ package: dependencies: - aff - arrays + - codec-json - console - datetime - - effect - - either - - maybe - - prelude + - exceptions + - fetch + - integers + - json + - node-child-process + - node-execa + - node-fs + - node-path + - node-process + - ordered-collections + - registry-app + - registry-foreign - registry-lib - registry-test-utils + - routing-duplex - spec - spec-node - strings + - transformers run: main: Test.E2E.Main diff --git a/app-e2e/src/Test/E2E/Endpoint/Jobs.purs b/app-e2e/src/Test/E2E/Endpoint/Jobs.purs new file mode 100644 index 000000000..e02b623b5 --- /dev/null +++ b/app-e2e/src/Test/E2E/Endpoint/Jobs.purs @@ -0,0 +1,63 @@ +module Test.E2E.Endpoint.Jobs (spec) where + +import Registry.App.Prelude + +import Data.Array as Array +import Registry.API.V1 (JobId(..)) +import Registry.API.V1 as V1 +import Registry.Test.Assert as Assert +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Status endpoint" do + Spec.it "can reach the status endpoint" do + Client.getStatus + + Spec.describe "Jobs API" do + Spec.it "query parameters and filtering work correctly" do + -- Publish once and test all Jobs API features + { jobId } <- Client.publish Fixtures.effectPublishData + job <- Env.pollJobOrFail jobId + let info = V1.jobInfo job + + -- Test: include_completed filtering + recentJobs <- Client.getJobsWith Client.ActiveOnly + allJobs <- Client.getJobsWith Client.IncludeCompleted + let allCount = Array.length allJobs + Assert.shouldSatisfy allCount (_ > 0) + let recentCount = Array.length recentJobs + Assert.shouldSatisfy recentCount (_ <= allCount) + let completedJob = Array.find (\j -> isJust (V1.jobInfo j).finishedAt) allJobs + case completedJob of + Just completed -> do + let + completedId = (V1.jobInfo completed).jobId + inRecent = Array.any (\j -> (V1.jobInfo j).jobId == completedId) recentJobs + when inRecent do + Assert.fail $ "Completed job " <> unwrap completedId <> " should be excluded from include_completed=false results" + Nothing -> pure unit + + -- Test: query parameters (level and since) + baseJob <- Client.getJob jobId Nothing Nothing + Assert.shouldEqual (V1.jobInfo baseJob).jobId info.jobId + debugJob <- Client.getJob jobId (Just V1.Debug) Nothing + Assert.shouldEqual (V1.jobInfo debugJob).jobId info.jobId + let sinceTime = fromMaybe info.createdAt info.finishedAt + sinceJob <- Client.getJob jobId Nothing (Just sinceTime) + Assert.shouldEqual (V1.jobInfo sinceJob).jobId info.jobId + + Spec.it "returns HTTP 404 for non-existent job ID" do + let fakeJobId = JobId "nonexistent-job-id-12345" + result <- Client.tryGetJob fakeJobId Nothing Nothing + case result of + Right _ -> + Assert.fail "Expected HTTP 404 for non-existent job" + Left err -> + case Client.clientErrorStatus err of + Just 404 -> pure unit + _ -> Assert.fail $ "Expected HTTP 404, got: " <> Client.printClientError err diff --git a/app-e2e/src/Test/E2E/Endpoint/PackageSets.purs b/app-e2e/src/Test/E2E/Endpoint/PackageSets.purs new file mode 100644 index 000000000..502853fbd --- /dev/null +++ b/app-e2e/src/Test/E2E/Endpoint/PackageSets.purs @@ -0,0 +1,52 @@ +module Test.E2E.Endpoint.PackageSets (spec) where + +import Registry.App.Prelude + +import Control.Monad.Reader (ask) +import Effect.Aff as Aff +import Registry.API.V1 as V1 +import Registry.Test.Assert as Assert +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Package Sets endpoint" do + Spec.it "accepts unauthenticated add/upgrade requests" do + { jobId } <- Client.packageSets Fixtures.packageSetAddRequest + job <- Env.pollJobOrFail jobId + Assert.shouldSatisfy (V1.jobInfo job).finishedAt isJust + + Spec.it "rejects unauthenticated compiler change requests" do + result <- Client.tryPackageSets Fixtures.packageSetCompilerChangeRequest + case result of + Left err -> do + Assert.shouldSatisfy (Client.clientErrorStatus err) (_ == Just 400) + Right _ -> + Assert.fail "Expected 400 error for unauthenticated compiler change" + + Spec.it "rejects unauthenticated package removal requests" do + result <- Client.tryPackageSets Fixtures.packageSetRemoveRequest + case result of + Left err -> do + Assert.shouldSatisfy (Client.clientErrorStatus err) (_ == Just 400) + Right _ -> + Assert.fail "Expected 400 error for unauthenticated package removal" + + Spec.it "accepts authenticated compiler change requests" do + { privateKey } <- ask + case Fixtures.signPackageSet privateKey Fixtures.packageSetCompilerChangeRequest of + Left err -> + liftAff $ Aff.throwError $ Aff.error $ "Failed to sign request: " <> err + Right signedRequest -> do + { jobId } <- Client.packageSets signedRequest + job <- Env.pollJobOrFail jobId + Assert.shouldSatisfy (V1.jobInfo job).finishedAt isJust + + Spec.it "returns existing job for duplicate requests" do + { jobId: firstJobId } <- Client.packageSets Fixtures.packageSetAddRequest + { jobId: secondJobId } <- Client.packageSets Fixtures.packageSetAddRequest + Assert.shouldEqual firstJobId secondJobId diff --git a/app-e2e/src/Test/E2E/Endpoint/Publish.purs b/app-e2e/src/Test/E2E/Endpoint/Publish.purs new file mode 100644 index 000000000..47e51c959 --- /dev/null +++ b/app-e2e/src/Test/E2E/Endpoint/Publish.purs @@ -0,0 +1,76 @@ +module Test.E2E.Endpoint.Publish (spec) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.Array.NonEmpty as NEA +import Data.Map as Map +import Data.Set as Set +import Data.String as String +import Registry.API.V1 (Job(..)) +import Registry.API.V1 as V1 +import Registry.Manifest (Manifest(..)) +import Registry.Metadata (Metadata(..)) +import Registry.Sha256 as Sha256 +import Registry.Test.Assert as Assert +import Registry.Version as Version +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.WireMock as WireMock +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Publish workflow" do + Spec.it "can publish effect@4.0.0 and verify all state changes" do + { jobId } <- Client.publish Fixtures.effectPublishData + job <- Env.pollJobOrFail jobId + Assert.shouldSatisfy (V1.jobInfo job).finishedAt isJust + + uploadOccurred <- Env.hasStorageUpload Fixtures.effect + unless uploadOccurred do + storageRequests <- WireMock.getStorageRequests + WireMock.failWithRequests "Expected S3 PUT for effect/4.0.0.tar.gz" storageRequests + + Metadata metadata <- Env.readMetadata Fixtures.effect.name + case Map.lookup Fixtures.effect.version metadata.published of + Nothing -> Assert.fail $ "Expected version " <> Version.print Fixtures.effect.version <> " in metadata published versions" + Just publishedMeta -> do + Assert.shouldSatisfy (Sha256.print publishedMeta.hash) (not <<< String.null) + + manifestEntries <- Env.readManifestIndexEntry Fixtures.effect.name + let hasVersion = Array.any (\(Manifest m) -> m.version == Fixtures.effect.version) manifestEntries + unless hasVersion do + Assert.fail $ "Expected version " <> Version.print Fixtures.effect.version <> " in manifest index" + + Env.waitForAllMatrixJobs Fixtures.effect + + -- Collect the compilers from the matrix jobs that ran for this package + allJobs <- Client.getJobsWith Client.IncludeCompleted + let + matrixCompilers = Array.mapMaybe + ( case _ of + MatrixJob { packageName, packageVersion, compilerVersion } -> + if packageName == Fixtures.effect.name && packageVersion == Fixtures.effect.version then Just compilerVersion + else Nothing + _ -> Nothing + ) + allJobs + -- The expected compilers are: the publish compiler + all matrix job compilers + expectedCompilers = Set.fromFoldable $ Array.cons Fixtures.effectPublishData.compiler matrixCompilers + + Metadata metadataAfter <- Env.readMetadata Fixtures.effect.name + case Map.lookup Fixtures.effect.version metadataAfter.published of + Nothing -> Assert.fail "Version missing after matrix jobs" + Just publishedMetaAfter -> do + let actualCompilers = Set.fromFoldable $ NEA.toArray publishedMetaAfter.compilers + Assert.shouldEqual actualCompilers expectedCompilers + + Spec.describe "Publish state machine" do + Spec.it "returns same jobId for duplicate publish requests" do + { jobId: id1 } <- Client.publish Fixtures.effectPublishData + _ <- Env.pollJobOrFail id1 + { jobId: id2 } <- Client.publish Fixtures.effectPublishData + Assert.shouldEqual id1 id2 diff --git a/app-e2e/src/Test/E2E/Endpoint/Transfer.purs b/app-e2e/src/Test/E2E/Endpoint/Transfer.purs new file mode 100644 index 000000000..6e3d49eef --- /dev/null +++ b/app-e2e/src/Test/E2E/Endpoint/Transfer.purs @@ -0,0 +1,51 @@ +module Test.E2E.Endpoint.Transfer (spec) where + +import Registry.App.Prelude + +import Data.Array as Array +import Registry.API.V1 as V1 +import Registry.Location (Location(..)) +import Registry.Metadata (Metadata(..)) +import Registry.PackageName as PackageName +import Registry.Test.Assert as Assert +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.WireMock as WireMock +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Transfer workflow" do + Spec.it "can transfer effect to a new location with full state verification" do + { jobId: publishJobId } <- Client.publish Fixtures.effectPublishData + _ <- Env.pollJobOrFail publishJobId + -- Note: we don't wait for matrix jobs - transfer only modifies metadata + + Metadata originalMetadata <- Env.readMetadata Fixtures.effect.name + case originalMetadata.location of + GitHub { owner } -> Assert.shouldEqual owner "purescript" + Git _ -> Assert.fail "Expected GitHub location, got Git" + + -- clear the publish PUT so we can verify transfers leave storage unaffected + WireMock.clearStorageRequests + + authData <- Env.signTransferOrFail Fixtures.effectTransferData + { jobId: transferJobId } <- Client.transfer authData + transferJob <- Env.pollJobOrFail transferJobId + Assert.shouldSatisfy (V1.jobInfo transferJob).finishedAt isJust + + Metadata newMetadata <- Env.readMetadata Fixtures.effect.name + case newMetadata.location of + GitHub { owner } -> Assert.shouldEqual owner "new-owner" + Git _ -> Assert.fail "Expected GitHub location after transfer, got Git" + + storageRequests <- WireMock.getStorageRequests + let + packagePath = PackageName.print Fixtures.effect.name + putOrDeleteRequests = Array.filter + (\r -> (r.method == "PUT" || r.method == "DELETE") && WireMock.filterByUrlContaining packagePath [ r ] /= []) + storageRequests + unless (Array.null putOrDeleteRequests) do + WireMock.failWithRequests "Transfer should not PUT or DELETE to storage" putOrDeleteRequests diff --git a/app-e2e/src/Test/E2E/Endpoint/Unpublish.purs b/app-e2e/src/Test/E2E/Endpoint/Unpublish.purs new file mode 100644 index 000000000..c58e88ea6 --- /dev/null +++ b/app-e2e/src/Test/E2E/Endpoint/Unpublish.purs @@ -0,0 +1,95 @@ +module Test.E2E.Endpoint.Unpublish (spec) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.Map as Map +import Data.String as String +import Registry.API.V1 as V1 +import Registry.Metadata (Metadata(..)) +import Registry.Test.Assert as Assert +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.WireMock as WireMock +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Publish-Unpublish workflow" do + Spec.it "can publish then unpublish with full state verification" do + { jobId: publishJobId } <- Client.publish Fixtures.effectPublishData + _ <- Env.pollJobOrFail publishJobId + + existsBefore <- Env.manifestIndexEntryExists Fixtures.effect + unless existsBefore do + Assert.fail "Expected version to exist in manifest index before unpublish" + + authData <- Env.signUnpublishOrFail Fixtures.effectUnpublishData + { jobId: unpublishJobId } <- Client.unpublish authData + unpublishJob <- Env.pollJobOrFail unpublishJobId + Assert.shouldSatisfy (V1.jobInfo unpublishJob).finishedAt isJust + + Metadata metadata <- Env.readMetadata Fixtures.effect.name + + case Map.lookup Fixtures.effect.version metadata.unpublished of + Nothing -> + Assert.fail "Expected version 4.0.0 to be in 'unpublished' metadata" + Just unpublishedInfo -> + Assert.shouldSatisfy unpublishedInfo.reason (not <<< String.null) + + when (Map.member Fixtures.effect.version metadata.published) do + Assert.fail "Version 4.0.0 should not be in 'published' metadata after unpublish" + + deleteOccurred <- Env.hasStorageDelete Fixtures.effect + unless deleteOccurred do + storageRequests <- WireMock.getStorageRequests + WireMock.failWithRequests "Expected S3 DELETE for effect/4.0.0.tar.gz" storageRequests + + existsAfter <- Env.manifestIndexEntryExists Fixtures.effect + when existsAfter do + Assert.fail "Expected version to be removed from manifest index after unpublish" + + -- Test race condition: submit unpublish while publish is still running. + -- Job priority (Unpublish > Matrix) ensures unpublish runs before matrix jobs. + Spec.it "unpublishing before matrix jobs complete causes them to fail gracefully" do + -- Submit publish, don't wait for it to complete + { jobId: publishJobId } <- Client.publish Fixtures.effectPublishData + + -- Immediately submit unpublish - it will be queued and run after publish + -- but BEFORE matrix jobs due to job priority ordering + authData <- Env.signUnpublishOrFail Fixtures.effectUnpublishData + { jobId: unpublishJobId } <- Client.unpublish authData + + -- Now wait for publish to complete + _ <- Env.pollJobOrFail publishJobId + + -- Wait for unpublish to complete + unpublishJob <- Env.pollJobOrFail unpublishJobId + Assert.shouldSatisfy (V1.jobInfo unpublishJob).finishedAt isJust + + -- Verify unpublish succeeded + Metadata metadata <- Env.readMetadata Fixtures.effect.name + case Map.lookup Fixtures.effect.version metadata.unpublished of + Nothing -> + Assert.fail "Expected version 4.0.0 to be in 'unpublished' metadata" + Just _ -> pure unit + + -- Wait for matrix jobs to complete + Env.waitForAllMatrixJobs Fixtures.effect + + -- Verify matrix jobs failed (they tried to download deleted tarball) + jobs <- Client.getJobs + let + matrixJobs = Array.filter (Env.isMatrixJobFor Fixtures.effect) jobs + allFailed = Array.all (\j -> not (V1.jobInfo j).success) matrixJobs + + unless (Array.null matrixJobs || allFailed) do + Assert.fail "Expected matrix jobs to fail after unpublish deleted the tarball" + + -- Critical: verify no bad writes occurred - the version should NOT be + -- back in published metadata (Map.update on missing key is a no-op) + Metadata metadataAfterMatrix <- Env.readMetadata Fixtures.effect.name + when (Map.member Fixtures.effect.version metadataAfterMatrix.published) do + Assert.fail "Matrix job incorrectly wrote to published metadata for unpublished version" diff --git a/app-e2e/src/Test/E2E/GitHubIssue.purs b/app-e2e/src/Test/E2E/GitHubIssue.purs new file mode 100644 index 000000000..c4598313a --- /dev/null +++ b/app-e2e/src/Test/E2E/GitHubIssue.purs @@ -0,0 +1,149 @@ +-- | End-to-end tests for the GitHubIssue workflow. +-- | Tests the full flow: parsing GitHub event → submitting to registry API → +-- | polling for completion → posting comments. +module Test.E2E.GitHubIssue (spec) where + +import Registry.App.Prelude + +import Control.Monad.Reader (ask) +import Data.Array as Array +import Data.Codec.JSON as CJ +import Data.Codec.JSON.Record as CJ.Record +import Data.String as String +import Effect.Aff (Milliseconds(..)) +import JSON as JSON +import Node.FS.Aff as FS.Aff +import Node.Path as Path +import Node.Process as Process +import Registry.App.GitHubIssue as GitHubIssue +import Registry.Foreign.Tmp as Tmp +import Registry.Operation as Operation +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2E, E2ESpec) +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.WireMock as WireMock +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "GitHubIssue end-to-end" do + Spec.it "handles publish via GitHub issue, posts comments, and closes issue on success" do + requests <- runWorkflow $ mkPublishEvent Fixtures.effectPublishData + assertComment "Job started" requests + assertComment "Job completed successfully" requests + assertClosed requests + + Spec.it "posts failure comment and leaves issue open when job fails" do + requests <- runWorkflow $ mkAuthenticatedEvent "random-user" Fixtures.failingTransferData + assertComment "Job started" requests + assertComment "Job failed" requests + assertNoComment "Job completed successfully" requests + assertOpen requests + + Spec.it "calls Teams API to verify trustee membership for authenticated operation" do + requests <- runWorkflow $ mkAuthenticatedEvent packagingTeamUser Fixtures.trusteeAuthenticatedData + assertComment "Job started" requests + assertTeamsApiCalled requests + + Spec.it "posts error comment when issue body contains invalid JSON" do + requests <- runWorkflow Fixtures.invalidJsonIssueEvent + assertComment "malformed" requests + assertOpen requests + +-- Constants +testIssueNumber :: Int +testIssueNumber = 101 + +packagingTeamUser :: String +packagingTeamUser = "packaging-team-user" + +-- Event builders +githubEventCodec :: CJ.Codec { sender :: { login :: String }, issue :: { number :: Int, body :: String } } +githubEventCodec = CJ.named "GitHubEvent" $ CJ.Record.object + { sender: CJ.Record.object { login: CJ.string } + , issue: CJ.Record.object { number: CJ.int, body: CJ.string } + } + +mkPublishEvent :: Operation.PublishData -> String +mkPublishEvent publishData = + let + body = "```json\n" <> JSON.print (CJ.encode Operation.publishCodec publishData) <> "\n```" + in + JSON.print $ CJ.encode githubEventCodec + { sender: { login: packagingTeamUser }, issue: { number: testIssueNumber, body } } + +mkAuthenticatedEvent :: String -> Operation.AuthenticatedData -> String +mkAuthenticatedEvent username authData = + let + body = "```json\n" <> JSON.print (CJ.encode Operation.authenticatedCodec authData) <> "\n```" + in + JSON.print $ CJ.encode githubEventCodec + { sender: { login: username }, issue: { number: testIssueNumber, body } } + +-- Workflow runner +runWorkflow :: String -> E2E (Array WireMock.WireMockRequest) +runWorkflow eventJson = do + { stateDir } <- ask + + Client.getStatus + + tmpDir <- liftAff Tmp.mkTmpDir + let eventPath = Path.concat [ tmpDir, "github-event.json" ] + liftAff $ FS.Aff.writeTextFile UTF8 eventPath eventJson + liftEffect $ Process.setEnv "GITHUB_EVENT_PATH" eventPath + + originalCwd <- liftEffect Process.cwd + liftEffect $ Process.chdir stateDir + + envResult <- liftAff GitHubIssue.initializeGitHub + for_ envResult \env -> do + let testEnv = env { pollConfig = { maxAttempts: 60, interval: Milliseconds 500.0 }, logVerbosity = Quiet } + liftAff $ void $ GitHubIssue.runGitHubIssue testEnv + + liftEffect $ Process.chdir originalCwd + + WireMock.getGithubRequests + +-- Assertions (all operate on captured requests) +assertComment :: String -> Array WireMock.WireMockRequest -> E2E Unit +assertComment text requests = do + let + comments = requests # Array.filter \r -> + r.method == "POST" && String.contains (String.Pattern $ "/issues/" <> show testIssueNumber <> "/comments") r.url + unless (Array.any (bodyContains text) comments) do + WireMock.failWithRequests ("Expected '" <> text <> "' comment but not found") requests + +assertNoComment :: String -> Array WireMock.WireMockRequest -> E2E Unit +assertNoComment text requests = do + let + comments = requests # Array.filter \r -> + r.method == "POST" && String.contains (String.Pattern $ "/issues/" <> show testIssueNumber <> "/comments") r.url + when (Array.any (bodyContains text) comments) do + WireMock.failWithRequests ("Did not expect '" <> text <> "' comment") requests + +assertClosed :: Array WireMock.WireMockRequest -> E2E Unit +assertClosed requests = do + let + closes = requests # Array.filter \r -> + r.method == "PATCH" && String.contains (String.Pattern $ "/issues/" <> show testIssueNumber) r.url + when (Array.null closes) do + WireMock.failWithRequests "Expected issue to be closed" requests + +assertOpen :: Array WireMock.WireMockRequest -> E2E Unit +assertOpen requests = do + let + closes = requests # Array.filter \r -> + r.method == "PATCH" && String.contains (String.Pattern $ "/issues/" <> show testIssueNumber) r.url + unless (Array.null closes) do + WireMock.failWithRequests "Expected issue to remain open" requests + +assertTeamsApiCalled :: Array WireMock.WireMockRequest -> E2E Unit +assertTeamsApiCalled requests = do + let + teams = requests # Array.filter \r -> + r.method == "GET" && String.contains (String.Pattern "/orgs/purescript/teams/packaging/members") r.url + when (Array.null teams) do + WireMock.failWithRequests "Expected Teams API to be called" requests + +bodyContains :: String -> WireMock.WireMockRequest -> Boolean +bodyContains text r = fromMaybe false (String.contains (String.Pattern text) <$> r.body) diff --git a/app-e2e/src/Test/E2E/Main.purs b/app-e2e/src/Test/E2E/Main.purs deleted file mode 100644 index 7bc030d76..000000000 --- a/app-e2e/src/Test/E2E/Main.purs +++ /dev/null @@ -1,22 +0,0 @@ -module Test.E2E.Main (main) where - -import Prelude - -import Data.Maybe (Maybe(..)) -import Data.Time.Duration (Milliseconds(..)) -import Effect (Effect) -import Test.E2E.Publish as Test.E2E.Publish -import Test.Spec as Spec -import Test.Spec.Reporter.Console (consoleReporter) -import Test.Spec.Runner.Node (runSpecAndExitProcess') -import Test.Spec.Runner.Node.Config as Cfg - -main :: Effect Unit -main = runSpecAndExitProcess' config [ consoleReporter ] do - Spec.describe "E2E Tests" do - Spec.describe "Publish" Test.E2E.Publish.spec - where - config = - { defaultConfig: Cfg.defaultConfig { timeout = Just $ Milliseconds 120_000.0 } - , parseCLIOptions: false - } diff --git a/app-e2e/src/Test/E2E/Publish.purs b/app-e2e/src/Test/E2E/Publish.purs deleted file mode 100644 index f7bd1d63e..000000000 --- a/app-e2e/src/Test/E2E/Publish.purs +++ /dev/null @@ -1,84 +0,0 @@ --- | End-to-end tests for the Publish API endpoint. --- | These tests exercise the actual registry server via HTTP requests. -module Test.E2E.Publish (spec) where - -import Prelude - -import Data.Array as Array -import Data.Either (Either(..)) -import Data.Maybe (Maybe(..), isJust) -import Data.String as String -import Effect.Aff (Aff) -import Effect.Class (liftEffect) -import Effect.Class.Console as Console -import Registry.API.V1 as V1 -import Registry.Location as Registry.Location -import Registry.Test.Assert as Assert -import Registry.Test.E2E.Client as Client -import Registry.Test.Utils as Utils -import Test.Spec (Spec) -import Test.Spec as Spec - --- | Get client config from environment -getConfig :: Aff Client.Config -getConfig = liftEffect Client.configFromEnv - -spec :: Spec Unit -spec = do - Spec.describe "Server connectivity" do - Spec.it "can reach the status endpoint" do - config <- getConfig - result <- Client.getStatus config - case result of - Left err -> Assert.fail $ "Failed to reach status endpoint: " <> Client.printClientError err - Right _ -> pure unit - - Spec.it "can list jobs (initially empty)" do - config <- getConfig - result <- Client.getJobs config - case result of - Left err -> Assert.fail $ "Failed to list jobs: " <> Client.printClientError err - Right _ -> pure unit -- Jobs list may not be empty if other tests ran - - Spec.describe "Publish workflow" do - Spec.it "can publish effect@4.0.0" do - config <- getConfig - let - -- Location must match what's in the fixture metadata - effectLocation = Registry.Location.GitHub - { owner: "purescript" - , repo: "purescript-effect" - , subdir: Nothing - } - publishData = - { name: Utils.unsafePackageName "effect" - , location: Just effectLocation - , ref: "v4.0.0" - , compiler: Utils.unsafeVersion "0.15.9" - , resolutions: Nothing - } - - -- Submit publish request - publishResult <- Client.publish config publishData - case publishResult of - Left err -> Assert.fail $ "Failed to submit publish request: " <> Client.printClientError err - Right { jobId } -> do - -- Poll until job completes - job <- Client.pollJob config jobId - - -- If job failed, print logs for debugging - unless job.success do - Console.log "Job failed! Logs:" - let logMessages = map (\l -> "[" <> V1.printLogLevel l.level <> "] " <> l.message) job.logs - Console.log $ String.joinWith "\n" logMessages - - -- Verify job completed successfully - when (not job.success) do - let errorLogs = Array.filter (\l -> l.level == V1.Error) job.logs - let errorMessages = map _.message errorLogs - Assert.fail $ "Job failed with errors:\n" <> String.joinWith "\n" errorMessages - - Assert.shouldSatisfy job.finishedAt isJust - Assert.shouldEqual job.jobType V1.PublishJob - Assert.shouldEqual job.packageName (Utils.unsafePackageName "effect") - Assert.shouldEqual job.ref "v4.0.0" diff --git a/app-e2e/src/Test/E2E/Support/Client.purs b/app-e2e/src/Test/E2E/Support/Client.purs new file mode 100644 index 000000000..3c1c02e62 --- /dev/null +++ b/app-e2e/src/Test/E2E/Support/Client.purs @@ -0,0 +1,211 @@ +-- | HTTP client for making requests to the registry server during E2E tests. +-- | This module provides typed helpers for interacting with the Registry API. +-- | +-- | All client functions operate in the E2E monad (ReaderT TestEnv Aff) and +-- | throw on HTTP or parse errors. Use the `try*` variants (e.g., `tryGetJob`) +-- | when testing error responses - they return `Either ClientError a` with +-- | typed HTTP status codes. +module Test.E2E.Support.Client + ( ClientError(..) + , JobFilter(..) + , getJobs + , getJobsWith + , getJob + , tryGetJob + , getStatus + , publish + , unpublish + , transfer + , packageSets + , tryPackageSets + , pollJob + , printClientError + , clientErrorStatus + ) where + +import Registry.App.Prelude + +import Codec.JSON.DecodeError as CJ.DecodeError +import Control.Monad.Reader (ask) +import Data.Codec.JSON as CJ +import Data.DateTime (DateTime) +import Data.Int as Int +import Effect.Aff (delay) +import Effect.Aff as Aff +import Effect.Class.Console as Console +import Effect.Exception (Error) +import Effect.Exception as Exception +import Fetch (Method(..)) +import Fetch as Fetch +import JSON as JSON +import Registry.API.V1 (Job, JobId, LogLevel, Route(..)) +import Registry.API.V1 as V1 +import Registry.Operation (AuthenticatedData, PackageSetUpdateRequest, PublishData) +import Registry.Operation as Operation +import Routing.Duplex as Routing +import Test.E2E.Support.Types (E2E) + +-- | Errors that can occur during client operations +data ClientError + = HttpError { status :: Int, body :: String } + | ParseError { msg :: String, raw :: String } + | Timeout String + +printClientError :: ClientError -> String +printClientError = case _ of + HttpError { status, body } -> "HTTP Error " <> Int.toStringAs Int.decimal status <> ": " <> body + ParseError { msg, raw } -> "Parse Error: " <> msg <> "\nOriginal: " <> raw + Timeout msg -> "Timeout: " <> msg + +-- | Extract the HTTP status code from a ClientError, if it's an HttpError +clientErrorStatus :: ClientError -> Maybe Int +clientErrorStatus = case _ of + HttpError { status } -> Just status + _ -> Nothing + +-- | Convert a ClientError to an Effect Error for throwing +toError :: ClientError -> Error +toError = Exception.error <<< printClientError + +-- | Throw a ClientError as an Aff error +throw :: forall a. ClientError -> Aff a +throw = Aff.throwError <<< toError + +-- | Print a Route to its URL path using the route codec +printRoute :: Route -> String +printRoute = Routing.print V1.routes + +-- | Make a GET request and decode the response, returning Either on error. +tryGet :: forall a. CJ.Codec a -> String -> String -> Aff (Either ClientError a) +tryGet codec baseUrl path = do + response <- Fetch.fetch (baseUrl <> path) { method: GET } + body <- response.text + if response.status >= 200 && response.status < 300 then + case parseJson codec body of + Left err -> pure $ Left $ ParseError { msg: CJ.DecodeError.print err, raw: body } + Right a -> pure $ Right a + else + pure $ Left $ HttpError { status: response.status, body } + +-- | Make a GET request and decode the response. Throws on error. +get :: forall a. CJ.Codec a -> String -> String -> Aff a +get codec baseUrl path = tryGet codec baseUrl path >>= either throw pure + +-- | Make a POST request with JSON body, returning Either on error. +tryPost :: forall req res. CJ.Codec req -> CJ.Codec res -> String -> String -> req -> Aff (Either ClientError res) +tryPost reqCodec resCodec baseUrl path reqBody = do + let jsonBody = JSON.print $ CJ.encode reqCodec reqBody + response <- Fetch.fetch (baseUrl <> path) + { method: POST + , headers: { "Content-Type": "application/json" } + , body: jsonBody + } + responseBody <- response.text + if response.status >= 200 && response.status < 300 then + case parseJson resCodec responseBody of + Left err -> pure $ Left $ ParseError { msg: CJ.DecodeError.print err, raw: responseBody } + Right a -> pure $ Right a + else + pure $ Left $ HttpError { status: response.status, body: responseBody } + +-- | Make a POST request with JSON body and decode the response. Throws on error. +post :: forall req res. CJ.Codec req -> CJ.Codec res -> String -> String -> req -> Aff res +post reqCodec resCodec baseUrl path reqBody = tryPost reqCodec resCodec baseUrl path reqBody >>= either throw pure + +data JobFilter = ActiveOnly | IncludeCompleted + +-- | Get the list of jobs with a configurable filter +getJobsWith :: JobFilter -> E2E (Array Job) +getJobsWith filter = do + { clientConfig } <- ask + let + includeCompleted = case filter of + ActiveOnly -> Just false + IncludeCompleted -> Just true + route = Jobs { since: Nothing, include_completed: includeCompleted } + liftAff $ get (CJ.array V1.jobCodec) clientConfig.baseUrl (printRoute route) + +-- | Get the list of jobs (includes completed jobs) +getJobs :: E2E (Array Job) +getJobs = getJobsWith IncludeCompleted + +-- | Get a specific job by ID, with optional log filtering +getJob :: JobId -> Maybe LogLevel -> Maybe DateTime -> E2E Job +getJob jobId level since = do + { clientConfig } <- ask + let route = Job jobId { level, since } + liftAff $ get V1.jobCodec clientConfig.baseUrl (printRoute route) + +-- | Try to get a specific job by ID, returning Left on HTTP/parse errors. +-- | Use this when testing error responses (e.g., expecting 404). +tryGetJob :: JobId -> Maybe LogLevel -> Maybe DateTime -> E2E (Either ClientError Job) +tryGetJob jobId level since = do + { clientConfig } <- ask + let route = Job jobId { level, since } + liftAff $ tryGet V1.jobCodec clientConfig.baseUrl (printRoute route) + +-- | Check if the server is healthy +getStatus :: E2E Unit +getStatus = do + { clientConfig } <- ask + liftAff do + response <- Fetch.fetch (clientConfig.baseUrl <> printRoute Status) { method: GET } + if response.status == 200 then + pure unit + else do + body <- response.text + throw $ HttpError { status: response.status, body } + +-- | Publish a package +publish :: PublishData -> E2E V1.JobCreatedResponse +publish reqBody = do + { clientConfig } <- ask + liftAff $ post Operation.publishCodec V1.jobCreatedResponseCodec clientConfig.baseUrl (printRoute Publish) reqBody + +-- | Unpublish a package (requires authentication) +unpublish :: AuthenticatedData -> E2E V1.JobCreatedResponse +unpublish authData = do + { clientConfig } <- ask + liftAff $ post Operation.authenticatedCodec V1.jobCreatedResponseCodec clientConfig.baseUrl (printRoute Unpublish) authData + +-- | Transfer a package to a new location (requires authentication) +transfer :: AuthenticatedData -> E2E V1.JobCreatedResponse +transfer authData = do + { clientConfig } <- ask + liftAff $ post Operation.authenticatedCodec V1.jobCreatedResponseCodec clientConfig.baseUrl (printRoute Transfer) authData + +-- | Submit a package set update request +packageSets :: PackageSetUpdateRequest -> E2E V1.JobCreatedResponse +packageSets request = do + { clientConfig } <- ask + liftAff $ post Operation.packageSetUpdateRequestCodec V1.jobCreatedResponseCodec clientConfig.baseUrl (printRoute PackageSets) request + +-- | Try to submit a package set update, returning Left on HTTP/parse errors. +-- | Use this when testing error responses (e.g., expecting 400 for unauthorized restricted ops). +tryPackageSets :: PackageSetUpdateRequest -> E2E (Either ClientError V1.JobCreatedResponse) +tryPackageSets request = do + { clientConfig } <- ask + liftAff $ tryPost Operation.packageSetUpdateRequestCodec V1.jobCreatedResponseCodec clientConfig.baseUrl (printRoute PackageSets) request + +-- | Poll a job until it completes or times out. +-- | +-- | This is the recommended way to wait for job completion in E2E tests. +-- | Do not implement custom polling loops; use this function or the higher-level +-- | helpers in Test.E2E.Support.Env (pollJobOrFail, pollJobExpectFailure). +pollJob :: JobId -> E2E Job +pollJob jobId = do + { clientConfig } <- ask + go clientConfig 1 + where + go config attempt + | attempt > config.maxPollAttempts = + liftAff $ throw $ Timeout $ "Job " <> unwrap jobId <> " did not complete after " <> Int.toStringAs Int.decimal config.maxPollAttempts <> " attempts" + | otherwise = do + liftAff $ delay config.pollInterval + job <- getJob jobId (Just V1.Debug) Nothing + case (V1.jobInfo job).finishedAt of + Just _ -> pure job + Nothing -> do + when (attempt `mod` 10 == 0) do + Console.log $ "Polling job " <> unwrap jobId <> " (attempt " <> Int.toStringAs Int.decimal attempt <> ")" + go config (attempt + 1) diff --git a/app-e2e/src/Test/E2E/Support/Env.purs b/app-e2e/src/Test/E2E/Support/Env.purs new file mode 100644 index 000000000..06c8d47b9 --- /dev/null +++ b/app-e2e/src/Test/E2E/Support/Env.purs @@ -0,0 +1,312 @@ +-- | Shared environment and helper functions for E2E tests. +-- | +-- | This module provides: +-- | - TestEnv type and E2E monad for test helpers (re-exported from Types) +-- | - Environment construction from env vars (mkTestEnv) +-- | - WireMock reset helpers for test isolation +-- | - Job polling with automatic failure handling +-- | - Git and metadata state inspection +-- | +-- | All functions operate in the E2E monad (ReaderT TestEnv Aff), so they +-- | have access to the shared test environment without explicit passing. +module Test.E2E.Support.Env + ( module ReExports + , mkTestEnv + , runE2E + , resetTestState + , resetDatabase + , resetGitFixtures + , resetLogs + , resetGitHubRequestCache + , pollJobOrFail + , pollJobExpectFailure + , signUnpublishOrFail + , signTransferOrFail + , gitStatus + , isCleanGitStatus + , waitForAllMatrixJobs + , isMatrixJobFor + , readMetadata + , readManifestIndexEntry + , manifestIndexEntryExists + , assertReposClean + , hasStorageUpload + , hasStorageDelete + ) where + +import Registry.App.Prelude + +import Control.Monad.Reader (ask, runReaderT) +import Data.Array as Array +import Data.String as String +import Effect.Aff (Milliseconds(..)) +import Effect.Aff as Aff +import Effect.Class.Console as Console +import Node.ChildProcess.Types (Exit(..)) +import Node.FS.Aff as FS.Aff +import Node.Library.Execa as Execa +import Node.Path as Path +import Registry.API.V1 (Job(..)) +import Registry.API.V1 as V1 +import Registry.App.CLI.Git as Git +import Registry.App.Effect.Env as Env +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Manifest (Manifest(..)) +import Registry.ManifestIndex as ManifestIndex +import Registry.Metadata (Metadata) +import Registry.Metadata as Metadata +import Registry.Operation (AuthenticatedData, TransferData, UnpublishData) +import Registry.PackageName as PackageName +import Registry.Test.Assert as Assert +import Registry.Version as Version +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Fixtures (PackageFixture) +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.Types (ClientConfig, E2E, E2ESpec, TestEnv, WireMockConfig) as ReExports +import Test.E2E.Support.Types (E2E, TestEnv) +import Test.E2E.Support.WireMock as WireMock + +-- | Build the test environment from environment variables. +-- | Called once at startup in Main, before running any tests. +mkTestEnv :: Effect TestEnv +mkTestEnv = do + port <- Env.lookupRequired Env.serverPort + let + clientConfig = + { baseUrl: "http://localhost:" <> show port + , pollInterval: Milliseconds 2000.0 + , maxPollAttempts: 30 + } + + githubUrl <- Env.lookupRequired Env.githubApiUrl + storageUrl <- Env.lookupRequired Env.s3ApiUrl + let + githubWireMock = { baseUrl: githubUrl } + storageWireMock = { baseUrl: storageUrl } + + stateDir <- Env.lookupRequired Env.stateDir + privateKey <- Env.lookupRequired Env.pacchettibottiED25519 + + pure { clientConfig, githubWireMock, storageWireMock, stateDir, privateKey } + +-- | Run an E2E computation with a given environment. +-- | Primarily used by hoistSpec in Main. +runE2E :: forall a. TestEnv -> E2E a -> Aff a +runE2E env = flip runReaderT env + +-- | Reset all test state for isolation between tests. +-- | This is the recommended way to set up test isolation in Spec.before_. +-- | Resets: database, git fixtures, storage mock, and logs. +resetTestState :: E2E Unit +resetTestState = do + resetDatabase + resetGitFixtures + WireMock.clearStorageRequests + WireMock.resetStorageScenarios + WireMock.clearGithubRequests + resetGitHubRequestCache + resetLogs + +-- | Reset the database by clearing all job-related tables. +-- | +-- | This works because all job tables (publish_jobs, unpublish_jobs, transfer_jobs, +-- | matrix_jobs, package_set_jobs, logs) have foreign keys to job_info with +-- | ON DELETE CASCADE. See db/schema.sql for the schema definition. +resetDatabase :: E2E Unit +resetDatabase = do + { stateDir } <- ask + let dbPath = Path.concat [ stateDir, "db", "registry.sqlite3" ] + result <- liftAff $ _.getResult =<< Execa.execa "sqlite3" [ dbPath, "DELETE FROM job_info;" ] identity + case result.exit of + Normally 0 -> pure unit + _ -> liftAff $ Aff.throwError $ Aff.error $ "Failed to reset database: " <> result.stderr + +-- | Reset the git fixtures to restore original state. +-- | This restores metadata files modified by unpublish/transfer operations. +-- | +-- | Strategy: Reset the origin repos to their initial-fixture tag (created during +-- | setup), then delete the server's scratch git clones. The server will +-- | re-clone fresh copies on the next operation, ensuring a clean cache state. +resetGitFixtures :: E2E Unit +resetGitFixtures = do + { stateDir } <- ask + fixturesDir <- liftEffect $ Env.lookupRequired Env.repoFixturesDir + let + registryOrigin = Path.concat [ fixturesDir, "purescript", "registry" ] + registryIndexOrigin = Path.concat [ fixturesDir, "purescript", "registry-index" ] + scratchDir = Path.concat [ stateDir, "scratch" ] + resetOrigin registryOrigin + resetOrigin registryIndexOrigin + deleteGitClones scratchDir + where + resetOrigin dir = do + void $ gitOrFail [ "reset", "--hard", "initial-fixture" ] dir + void $ gitOrFail [ "clean", "-fd" ] dir + + deleteGitClones scratchDir = do + liftAff $ FS.Extra.remove $ Path.concat [ scratchDir, "registry" ] + liftAff $ FS.Extra.remove $ Path.concat [ scratchDir, "registry-index" ] + +-- | Clear server log files for test isolation. +-- | Deletes *.log files from the scratch/logs directory but preserves the directory itself. +resetLogs :: E2E Unit +resetLogs = do + { stateDir } <- ask + let logsDir = Path.concat [ stateDir, "scratch", "logs" ] + let cmd = "rm -f '" <> logsDir <> "'/*.log 2>/dev/null || true" + result <- liftAff $ _.getResult =<< Execa.execa "sh" [ "-c", cmd ] identity + case result.exit of + Normally _ -> pure unit + _ -> pure unit + +-- | Clear cached GitHub API requests from the scratch cache directory. +-- | This ensures each test makes fresh API calls rather than using cached responses. +resetGitHubRequestCache :: E2E Unit +resetGitHubRequestCache = do + { stateDir } <- ask + let cacheDir = Path.concat [ stateDir, "scratch", ".cache" ] + liftAff $ Aff.attempt (FS.Aff.readdir cacheDir) >>= case _ of + Left _ -> pure unit + Right files -> for_ files \file -> + when (String.Pattern "Request__" `String.contains` file) do + FS.Extra.remove (Path.concat [ cacheDir, file ]) + +-- | Poll a job until completion, failing the test if the job fails. +-- | Prints error logs on failure for debugging. +pollJobOrFail :: V1.JobId -> E2E V1.Job +pollJobOrFail jobId = do + job <- Client.pollJob jobId + unless (V1.jobInfo job).success do + Console.log "Job failed! Logs:" + let logMessages = map (\l -> "[" <> V1.printLogLevel l.level <> "] " <> l.message) (V1.jobInfo job).logs + Console.log $ String.joinWith "\n" logMessages + let errorLogs = Array.filter (\l -> l.level == V1.Error) (V1.jobInfo job).logs + let errorMessages = map _.message errorLogs + Assert.fail $ "Job failed with errors:\n" <> String.joinWith "\n" errorMessages + pure job + +-- | Poll a job until completion, expecting it to fail. +-- | Returns the job for further assertions on error messages. +pollJobExpectFailure :: V1.JobId -> E2E V1.Job +pollJobExpectFailure jobId = do + job <- Client.pollJob jobId + when (V1.jobInfo job).success do + Assert.fail "Expected job to fail, but it succeeded" + pure job + +-- | Sign an unpublish operation using the pacchettibotti private key from environment. +signUnpublishOrFail :: UnpublishData -> E2E AuthenticatedData +signUnpublishOrFail unpublishData = do + { privateKey } <- ask + case Fixtures.signUnpublish privateKey unpublishData of + Left err -> liftAff $ Aff.throwError $ Aff.error $ "Failed to sign unpublish: " <> err + Right authData -> pure authData + +-- | Sign a transfer operation using the pacchettibotti private key from environment. +signTransferOrFail :: TransferData -> E2E AuthenticatedData +signTransferOrFail transferData = do + { privateKey } <- ask + case Fixtures.signTransfer privateKey transferData of + Left err -> liftAff $ Aff.throwError $ Aff.error $ "Failed to sign transfer: " <> err + Right authData -> pure authData + +-- | Run git status --porcelain in a directory and return the output. +gitStatus :: String -> E2E String +gitStatus cwd = gitOrFail [ "status", "--porcelain" ] cwd + +-- | Run a git command, throwing an exception on failure. +gitOrFail :: Array String -> FilePath -> E2E String +gitOrFail args cwd = liftAff $ Git.gitCLI args (Just cwd) >>= case _ of + Left err -> Aff.throwError $ Aff.error err + Right out -> pure out + +-- | Check if git status output indicates a clean working tree (no changes). +isCleanGitStatus :: String -> Boolean +isCleanGitStatus status = String.null status + +-- | Wait for all matrix jobs for a package to complete. +waitForAllMatrixJobs :: PackageFixture -> E2E Unit +waitForAllMatrixJobs pkg = go 120 0 + where + go :: Int -> Int -> E2E Unit + go 0 _ = liftAff $ Aff.throwError $ Aff.error "Timed out waiting for matrix jobs to complete" + go attempts lastCount = do + jobs <- Client.getJobs + let + matrixJobs = Array.filter (isMatrixJobFor pkg) jobs + totalCount = Array.length matrixJobs + finishedCount = Array.length $ Array.filter (\j -> isJust (V1.jobInfo j).finishedAt) matrixJobs + allFinished = finishedCount == totalCount + stillCreating = totalCount > lastCount + if totalCount >= 1 && allFinished && not stillCreating then + pure unit + else do + when (attempts `mod` 10 == 0) do + Console.log $ "Waiting for matrix jobs: " <> show finishedCount <> "/" <> show totalCount <> " finished" + liftAff $ Aff.delay (Milliseconds 1000.0) + go (attempts - 1) totalCount + +-- | Check if a job is a matrix job for the given package. +isMatrixJobFor :: PackageFixture -> Job -> Boolean +isMatrixJobFor pkg = case _ of + MatrixJob { packageName, packageVersion } -> + packageName == pkg.name && packageVersion == pkg.version + _ -> false + +-- | Read and parse the metadata file for a package from the server's scratch clone. +readMetadata :: PackageName -> E2E Metadata +readMetadata packageName = do + { stateDir } <- ask + let metadataPath = Path.concat [ stateDir, "scratch", "registry", "metadata", PackageName.print packageName <> ".json" ] + liftAff (readJsonFile Metadata.codec metadataPath) >>= case _ of + Left err -> liftAff $ Aff.throwError $ Aff.error $ "Failed to read metadata for " <> PackageName.print packageName <> ": " <> err + Right metadata -> pure metadata + +-- | Read and parse the manifest index entry for a package from the server's scratch clone. +readManifestIndexEntry :: PackageName -> E2E (Array Manifest) +readManifestIndexEntry packageName = do + { stateDir } <- ask + let indexPath = Path.concat [ stateDir, "scratch", "registry-index" ] + liftAff $ ManifestIndex.readEntryFile indexPath packageName >>= case _ of + Left err -> Aff.throwError $ Aff.error $ "Failed to read manifest index for " <> PackageName.print packageName <> ": " <> err + Right manifests -> pure $ Array.fromFoldable manifests + +-- | Check if a specific package version exists in the manifest index. +manifestIndexEntryExists :: PackageFixture -> E2E Boolean +manifestIndexEntryExists pkg = do + { stateDir } <- ask + let indexPath = Path.concat [ stateDir, "scratch", "registry-index" ] + liftAff $ ManifestIndex.readEntryFile indexPath pkg.name >>= case _ of + Left _ -> pure false + Right manifests -> pure $ Array.any (\(Manifest m) -> m.version == pkg.version) $ Array.fromFoldable manifests + +-- | Assert that both git repos (registry and registry-index) have no uncommitted changes. +assertReposClean :: E2E Unit +assertReposClean = do + { stateDir } <- ask + let scratchRegistry = Path.concat [ stateDir, "scratch", "registry" ] + let scratchRegistryIndex = Path.concat [ stateDir, "scratch", "registry-index" ] + registryStatus <- gitStatus scratchRegistry + registryIndexStatus <- gitStatus scratchRegistryIndex + unless (isCleanGitStatus registryStatus) do + Assert.fail $ "registry repo has uncommitted changes:\n" <> registryStatus + unless (isCleanGitStatus registryIndexStatus) do + Assert.fail $ "registry-index repo has uncommitted changes:\n" <> registryIndexStatus + +-- | Check if a storage upload (PUT) occurred for a specific package. +hasStorageUpload :: PackageFixture -> E2E Boolean +hasStorageUpload pkg = do + requests <- WireMock.getStorageRequests + let + expectedPath = PackageName.print pkg.name <> "/" <> Version.print pkg.version <> ".tar.gz" + putRequests = WireMock.filterByMethod "PUT" requests + pure $ Array.any (\r -> String.contains (String.Pattern expectedPath) r.url) putRequests + +-- | Check if a storage delete (DELETE) occurred for a specific package. +hasStorageDelete :: PackageFixture -> E2E Boolean +hasStorageDelete pkg = do + requests <- WireMock.getStorageRequests + let + expectedPath = PackageName.print pkg.name <> "/" <> Version.print pkg.version <> ".tar.gz" + deleteRequests = WireMock.filterByMethod "DELETE" requests + pure $ Array.any (\r -> String.contains (String.Pattern expectedPath) r.url) deleteRequests diff --git a/app-e2e/src/Test/E2E/Support/Fixtures.purs b/app-e2e/src/Test/E2E/Support/Fixtures.purs new file mode 100644 index 000000000..7fe0b556a --- /dev/null +++ b/app-e2e/src/Test/E2E/Support/Fixtures.purs @@ -0,0 +1,286 @@ +-- | Test fixtures for E2E tests. +-- | Contains package operation data used across multiple test suites. +module Test.E2E.Support.Fixtures + ( PackageFixture + , effect + , console + , prelude + , effectPublishData + , effectPublishDataDifferentLocation + , consolePublishData + , failingTransferData + , nonexistentTransferData + , trusteeAuthenticatedData + , effectUnpublishData + , effectTransferData + , nonexistentUnpublishData + , preludeUnpublishData + , signUnpublish + , signTransfer + , packageSetAddRequest + , packageSetCompilerChangeRequest + , packageSetRemoveRequest + , signPackageSet + , invalidJsonIssueEvent + ) where + +import Registry.App.Prelude + +import Data.Codec.JSON as CJ +import Data.Map as Map +import JSON as JSON +import Registry.Location (Location(..)) +import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PackageSetOperation(..), PackageSetUpdateRequest, TransferData, UnpublishData) +import Registry.Operation as Operation +import Registry.PackageName (PackageName) +import Registry.SSH as SSH +import Registry.Test.Utils as Utils +import Registry.Version (Version) + +type PackageFixture = { name :: PackageName, version :: Version } + +-- | effect@4.0.0 fixture package +effect :: PackageFixture +effect = { name: Utils.unsafePackageName "effect", version: Utils.unsafeVersion "4.0.0" } + +-- | console@6.1.0 fixture package +console :: PackageFixture +console = { name: Utils.unsafePackageName "console", version: Utils.unsafeVersion "6.1.0" } + +-- | prelude@6.0.1 fixture package +prelude :: PackageFixture +prelude = { name: Utils.unsafePackageName "prelude", version: Utils.unsafeVersion "6.0.1" } + +-- | Standard publish data for effect@4.0.0, used by E2E tests. +-- | This matches the fixtures in app/fixtures/github-packages/effect-4.0.0 +effectPublishData :: Operation.PublishData +effectPublishData = + { name: effect.name + , location: Just $ GitHub + { owner: "purescript" + , repo: "purescript-effect" + , subdir: Nothing + } + , ref: "v4.0.0" + , compiler: Utils.unsafeVersion "0.15.10" + , resolutions: Nothing + , version: effect.version + } + +-- | Publish data for effect@99.0.0 with a DIFFERENT location. +-- | Uses a non-existent version to avoid duplicate job detection, +-- | but still targets an existing package to test location conflicts. +effectPublishDataDifferentLocation :: Operation.PublishData +effectPublishDataDifferentLocation = + effectPublishData + { location = Just $ GitHub + { owner: "someone-else" + , repo: "purescript-effect" + , subdir: Nothing + } + , version = Utils.unsafeVersion "99.0.0" + , ref = "v99.0.0" + } + +-- | Publish data for console@6.1.0, used for concurrency tests. +-- | Console depends on effect ^4.0.0 and prelude ^6.0.0. +-- | This matches the fixtures in app/fixtures/github-packages/console-6.1.0 +consolePublishData :: Operation.PublishData +consolePublishData = + { name: console.name + , location: Just $ GitHub + { owner: "purescript" + , repo: "purescript-console" + , subdir: Nothing + } + , ref: "v6.1.0" + , compiler: Utils.unsafeVersion "0.15.10" + , resolutions: Nothing + , version: console.version + } + +-- | Unpublish data for effect@4.0.0, used for publish-then-unpublish tests. +effectUnpublishData :: UnpublishData +effectUnpublishData = + { name: effect.name + , version: effect.version + , reason: "Testing unpublish flow" + } + +-- | Transfer data for effect, used for transfer tests. +-- | Transfers effect to a different GitHub owner. +effectTransferData :: TransferData +effectTransferData = + { name: effect.name + , newLocation: GitHub + { owner: "new-owner" + , repo: "purescript-effect" + , subdir: Nothing + } + } + +-- | Unpublish data for a nonexistent package. +-- | Used to test error handling when unpublishing an unknown package. +nonexistentUnpublishData :: UnpublishData +nonexistentUnpublishData = + { name: Utils.unsafePackageName "nonexistent-package" + , version: Utils.unsafeVersion "1.0.0" + , reason: "Testing error handling for unknown package" + } + +-- | Unpublish data for prelude@6.0.1. +-- | This package was published long ago (in fixtures), so it should fail +-- | the 48-hour time limit check. +preludeUnpublishData :: UnpublishData +preludeUnpublishData = + { name: prelude.name + , version: prelude.version + , reason: "Testing 48-hour limit enforcement" + } + +-- | Sign an unpublish operation using the given private key. +-- | The private key should be the base64-decoded PACCHETTIBOTTI_ED25519 env var. +signUnpublish :: String -> UnpublishData -> Either String AuthenticatedData +signUnpublish privateKey unpublishData = do + let rawPayload = JSON.print $ CJ.encode Operation.unpublishCodec unpublishData + private <- SSH.parsePrivateKey { key: privateKey, passphrase: Nothing } + # lmap SSH.printPrivateKeyParseError + let signature = SSH.sign private rawPayload + pure + { payload: Unpublish unpublishData + , rawPayload + , signature + } + +-- | Authenticated transfer data for prelude, which has no owners in fixtures. +-- | Used to test failure scenarios in E2E tests - will fail because no owners +-- | are listed to verify the signature against. +failingTransferData :: AuthenticatedData +failingTransferData = do + let + transferPayload :: TransferData + transferPayload = + { name: prelude.name + , newLocation: GitHub + { owner: "someone-else" + , repo: "purescript-prelude" + , subdir: Nothing + } + } + + rawPayload :: String + rawPayload = JSON.print $ CJ.encode Operation.transferCodec transferPayload + + { payload: Transfer transferPayload + , rawPayload + , signature: SSH.Signature "invalid-signature-for-testing" + } + +-- | Authenticated data with an intentionally invalid signature. +-- | When submitted by a trustee (packaging-team-user), pacchettibotti will re-sign it. +-- | If re-signing works, the job succeeds; if not, signature verification fails. +-- | Uses prelude@6.0.1 which exists in app/fixtures/registry/metadata/prelude.json. +trusteeAuthenticatedData :: AuthenticatedData +trusteeAuthenticatedData = do + let + unpublishPayload :: UnpublishData + unpublishPayload = + { name: prelude.name + , version: prelude.version + , reason: "Testing trustee re-signing" + } + rawPayload = JSON.print $ CJ.encode Operation.unpublishCodec unpublishPayload + + { payload: Unpublish unpublishPayload + , rawPayload + , signature: SSH.Signature "invalid-signature-for-testing" + } + +-- | Transfer data for a nonexistent package. +-- | Used to test error handling when transferring an unknown package. +-- | Job should fail with "has not been published before" error. +nonexistentTransferData :: TransferData +nonexistentTransferData = + { name: Utils.unsafePackageName "nonexistent-package" + , newLocation: GitHub + { owner: "someone" + , repo: "purescript-nonexistent" + , subdir: Nothing + } + } + +-- | Sign a transfer operation using the given private key. +-- | The private key should be the base64-decoded PACCHETTIBOTTI_ED25519 env var. +signTransfer :: String -> TransferData -> Either String AuthenticatedData +signTransfer privateKey transferData = do + let rawPayload = JSON.print $ CJ.encode Operation.transferCodec transferData + private <- lmap SSH.printPrivateKeyParseError $ SSH.parsePrivateKey { key: privateKey, passphrase: Nothing } + let signature = SSH.sign private rawPayload + pure + { payload: Transfer transferData + , rawPayload + , signature + } + +-- | type-equality@4.0.1 fixture package (exists in registry-index but not in initial package set) +typeEquality :: PackageFixture +typeEquality = { name: Utils.unsafePackageName "type-equality", version: Utils.unsafeVersion "4.0.1" } + +-- | Package set request to add type-equality@4.0.1. +-- | This is an unauthenticated request (no signature) since adding packages +-- | doesn't require trustee authentication. +packageSetAddRequest :: PackageSetUpdateRequest +packageSetAddRequest = + let + payload = PackageSetUpdate + { compiler: Nothing + , packages: Map.singleton typeEquality.name (Just typeEquality.version) + } + rawPayload = JSON.print $ CJ.encode Operation.packageSetOperationCodec payload + in + { payload, rawPayload, signature: Nothing } + +-- | Package set request to change the compiler version. +-- | This requires authentication (pacchettibotti signature) since changing +-- | the compiler is a restricted operation. +packageSetCompilerChangeRequest :: PackageSetUpdateRequest +packageSetCompilerChangeRequest = + let + payload = PackageSetUpdate + { compiler: Just (Utils.unsafeVersion "0.15.11") + , packages: Map.empty + } + rawPayload = JSON.print $ CJ.encode Operation.packageSetOperationCodec payload + in + { payload, rawPayload, signature: Nothing } + +-- | Package set request to remove a package. +-- | This requires authentication (pacchettibotti signature) since removing +-- | packages is a restricted operation. +packageSetRemoveRequest :: PackageSetUpdateRequest +packageSetRemoveRequest = + let + payload = PackageSetUpdate + { compiler: Nothing + , packages: Map.singleton effect.name Nothing + } + rawPayload = JSON.print $ CJ.encode Operation.packageSetOperationCodec payload + in + { payload, rawPayload, signature: Nothing } + +-- | Sign a package set update request using the given private key. +-- | The private key should be the base64-decoded PACCHETTIBOTTI_ED25519 env var. +signPackageSet :: String -> PackageSetUpdateRequest -> Either String PackageSetUpdateRequest +signPackageSet privateKey request = do + private <- SSH.parsePrivateKey { key: privateKey, passphrase: Nothing } + # lmap SSH.printPrivateKeyParseError + let signature = SSH.sign private request.rawPayload + pure request { signature = Just signature } + +-- | GitHub issue event with invalid JSON in the body. +-- | Used to test that malformed JSON is handled gracefully with an error comment. +-- | Note: The inner JSON has a trailing comma (`"v1.0.0",}`) which is intentionally +-- | malformed to trigger a parse error. +invalidJsonIssueEvent :: String +invalidJsonIssueEvent = + """{"sender":{"login":"packaging-team-user"},"issue":{"number":101,"body":"```json\n{\"name\": \"effect\", \"ref\": \"v1.0.0\",}\n```"}}""" diff --git a/app-e2e/src/Test/E2E/Support/Types.purs b/app-e2e/src/Test/E2E/Support/Types.purs new file mode 100644 index 000000000..2e4429057 --- /dev/null +++ b/app-e2e/src/Test/E2E/Support/Types.purs @@ -0,0 +1,48 @@ +-- | Core types for E2E tests. +-- | +-- | This module defines the shared environment and monad types used by all +-- | E2E test helpers. It's kept separate to avoid circular dependencies +-- | between Env, Client, and WireMock modules. +module Test.E2E.Support.Types + ( TestEnv + , ClientConfig + , WireMockConfig + , E2E + , E2ESpec + ) where + +import Registry.App.Prelude + +import Control.Monad.Reader (ReaderT) +import Effect.Aff (Milliseconds) +import Test.Spec (SpecT) + +-- | Configuration for the E2E test client +type ClientConfig = + { baseUrl :: String + , pollInterval :: Milliseconds + , maxPollAttempts :: Int + } + +-- | Configuration for connecting to WireMock admin API +type WireMockConfig = + { baseUrl :: String + } + +-- | The shared test environment available to all E2E helpers. +-- | Constructed once at startup from environment variables. +type TestEnv = + { clientConfig :: ClientConfig + , githubWireMock :: WireMockConfig + , storageWireMock :: WireMockConfig + , stateDir :: String + , privateKey :: String + } + +-- | The base monad for E2E test helpers. +-- | All Client, Env, and WireMock functions operate in this monad. +type E2E = ReaderT TestEnv Aff + +-- | The spec type for E2E tests. +-- | Test modules export `spec :: E2ESpec` instead of `spec :: Spec Unit`. +type E2ESpec = SpecT E2E Unit Identity Unit diff --git a/app-e2e/src/Test/E2E/Support/WireMock.purs b/app-e2e/src/Test/E2E/Support/WireMock.purs new file mode 100644 index 000000000..4e3789fca --- /dev/null +++ b/app-e2e/src/Test/E2E/Support/WireMock.purs @@ -0,0 +1,173 @@ +-- | WireMock admin API client for verifying HTTP requests in E2E tests. +-- | +-- | This module provides helpers to query WireMock's request journal, allowing +-- | tests to assert on what HTTP requests were made to mock services. +-- | +-- | Also provides helpers for managing WireMock scenarios (stateful mocking). +-- | Scenarios allow responses to change based on state transitions - e.g., a +-- | package tarball returns 404 until it's been "uploaded" via PUT, after which +-- | it returns 200. +module Test.E2E.Support.WireMock + ( WireMockRequest + , WireMockError(..) + , getGithubRequests + , getStorageRequests + , clearGithubRequests + , clearStorageRequests + , resetStorageScenarios + , filterByMethod + , filterByUrlContaining + , printWireMockError + , formatRequests + , failWithRequests + ) where + +import Registry.App.Prelude + +import Codec.JSON.DecodeError as CJ.DecodeError +import Control.Monad.Error.Class (class MonadThrow, throwError) +import Control.Monad.Except (runExceptT) +import Control.Monad.Reader (ask) +import Data.Array as Array +import Data.Codec.JSON as CJ +import Data.Codec.JSON.Record as CJ.Record +import Data.Int as Int +import Data.String as String +import Effect.Aff as Aff +import Effect.Exception (Error) +import Effect.Exception as Effect.Exception +import Fetch (Method(..)) +import Fetch as Fetch +import JSON as JSON +import Test.E2E.Support.Types (E2E) + +-- | A recorded request from WireMock's journal +type WireMockRequest = + { method :: String + , url :: String + , body :: Maybe String + } + +-- | Error type for WireMock operations +data WireMockError + = HttpError { status :: Int, body :: String } + | ParseError { msg :: String, raw :: String } + +printWireMockError :: WireMockError -> String +printWireMockError = case _ of + HttpError { status, body } -> "HTTP Error " <> Int.toStringAs Int.decimal status <> ": " <> body + ParseError { msg, raw } -> "Parse Error: " <> msg <> "\nOriginal: " <> raw + +-- | Codec for a single request entry in WireMock's response +requestCodec :: CJ.Codec WireMockRequest +requestCodec = CJ.named "WireMockRequest" $ CJ.Record.object + { method: CJ.string + , url: CJ.string + , body: CJ.Record.optional CJ.string + } + +-- | Codec for the nested request object in WireMock's journal response +journalEntryCodec :: CJ.Codec { request :: WireMockRequest } +journalEntryCodec = CJ.named "JournalEntry" $ CJ.Record.object + { request: requestCodec + } + +-- | Codec for the full journal response +journalCodec :: CJ.Codec { requests :: Array { request :: WireMockRequest } } +journalCodec = CJ.named "Journal" $ CJ.Record.object + { requests: CJ.array journalEntryCodec + } + +-- | Parse JSON response body using a codec +parseResponse :: forall a. CJ.Codec a -> String -> Either String a +parseResponse codec body = do + json <- lmap (append "JSON parse error: ") $ JSON.parse body + lmap CJ.DecodeError.print $ CJ.decode codec json + +-- | Get all recorded requests from a WireMock instance +getRequestsFrom :: String -> Aff (Either WireMockError (Array WireMockRequest)) +getRequestsFrom baseUrl = runExceptT do + response <- lift $ Fetch.fetch (baseUrl <> "/__admin/requests") { method: GET } + body <- lift response.text + if response.status == 200 then + case parseResponse journalCodec body of + Left err -> throwError $ ParseError { msg: err, raw: body } + Right journal -> pure $ map _.request journal.requests + else + throwError $ HttpError { status: response.status, body } + +-- | Clear all recorded requests from a WireMock instance +clearRequestsFrom :: String -> Aff (Either WireMockError Unit) +clearRequestsFrom baseUrl = runExceptT do + response <- lift $ Fetch.fetch (baseUrl <> "/__admin/requests") { method: DELETE } + if response.status == 200 then + pure unit + else do + body <- lift response.text + throwError $ HttpError { status: response.status, body } + +-- | Reset all scenarios to initial state on a WireMock instance +resetScenariosOn :: String -> Aff (Either WireMockError Unit) +resetScenariosOn baseUrl = runExceptT do + response <- lift $ Fetch.fetch (baseUrl <> "/__admin/scenarios/reset") { method: POST } + if response.status == 200 then + pure unit + else do + body <- lift response.text + throwError $ HttpError { status: response.status, body } + +-- | Helper to run a WireMock operation and throw on error +orFail :: forall a. String -> Either WireMockError a -> Aff a +orFail context = case _ of + Left err -> Aff.throwError $ Aff.error $ context <> ": " <> printWireMockError err + Right a -> pure a + +-- | Get captured requests from the GitHub WireMock. +getGithubRequests :: E2E (Array WireMockRequest) +getGithubRequests = do + { githubWireMock } <- ask + liftAff $ getRequestsFrom githubWireMock.baseUrl >>= orFail "Failed to get GitHub WireMock requests" + +-- | Get captured requests from the storage WireMock (S3, Pursuit). +getStorageRequests :: E2E (Array WireMockRequest) +getStorageRequests = do + { storageWireMock } <- ask + liftAff $ getRequestsFrom storageWireMock.baseUrl >>= orFail "Failed to get storage WireMock requests" + +-- | Clear the GitHub WireMock request journal. +clearGithubRequests :: E2E Unit +clearGithubRequests = do + { githubWireMock } <- ask + liftAff $ clearRequestsFrom githubWireMock.baseUrl >>= orFail "Failed to clear GitHub WireMock requests" + +-- | Clear the storage WireMock request journal. +clearStorageRequests :: E2E Unit +clearStorageRequests = do + { storageWireMock } <- ask + liftAff $ clearRequestsFrom storageWireMock.baseUrl >>= orFail "Failed to clear storage WireMock requests" + +-- | Reset all storage WireMock scenarios to their initial state. +resetStorageScenarios :: E2E Unit +resetStorageScenarios = do + { storageWireMock } <- ask + liftAff $ resetScenariosOn storageWireMock.baseUrl >>= orFail "Failed to reset storage WireMock scenarios" + +-- | Filter requests by HTTP method +filterByMethod :: String -> Array WireMockRequest -> Array WireMockRequest +filterByMethod method = Array.filter (\r -> r.method == method) + +-- | Filter requests by URL substring +filterByUrlContaining :: String -> Array WireMockRequest -> Array WireMockRequest +filterByUrlContaining substring = Array.filter (\r -> String.contains (String.Pattern substring) r.url) + +-- | Format an array of requests for debugging output +formatRequests :: Array WireMockRequest -> String +formatRequests = String.joinWith "\n" <<< map formatRequest + where + formatRequest req = req.method <> " " <> req.url <> case req.body of + Nothing -> "" + Just body -> "\n Body: " <> body + +-- | Fail a test with a message and debug info about captured requests. +failWithRequests :: forall m a. MonadThrow Error m => String -> Array WireMockRequest -> m a +failWithRequests msg requests = throwError $ Effect.Exception.error $ String.joinWith "\n" [ msg, "\nCaptured requests:", formatRequests requests ] diff --git a/app-e2e/src/Test/E2E/Workflow.purs b/app-e2e/src/Test/E2E/Workflow.purs new file mode 100644 index 000000000..a03be0562 --- /dev/null +++ b/app-e2e/src/Test/E2E/Workflow.purs @@ -0,0 +1,88 @@ +-- | End-to-end tests for multi-operation workflows. +-- | +-- | These tests verify complex scenarios involving multiple operations, +-- | specifically dependency state validation across publish/unpublish sequences. +module Test.E2E.Workflow (spec) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.Map as Map +import Data.String as String +import Registry.API.V1 as V1 +import Registry.Metadata (Metadata(..)) +import Registry.Test.Assert as Assert +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.WireMock as WireMock +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Dependency and unpublish interactions" do + Spec.it "publishing a package fails when its dependency was unpublished" do + { jobId: effectJobId } <- Client.publish Fixtures.effectPublishData + _ <- Env.pollJobOrFail effectJobId + + authData <- Env.signUnpublishOrFail Fixtures.effectUnpublishData + { jobId: unpublishJobId } <- Client.unpublish authData + _ <- Env.pollJobOrFail unpublishJobId + + deleteOccurred <- Env.hasStorageDelete Fixtures.effect + unless deleteOccurred do + Assert.fail "Expected tarball delete from S3 for effect@4.0.0" + + manifestExists <- Env.manifestIndexEntryExists Fixtures.effect + when manifestExists do + Assert.fail "Expected effect@4.0.0 to be removed from manifest index after unpublish" + + WireMock.clearStorageRequests + + { jobId: consoleJobId } <- Client.publish Fixtures.consolePublishData + consoleJob <- Env.pollJobExpectFailure consoleJobId + + let + logs = (V1.jobInfo consoleJob).logs + logMessages = map _.message logs + hasDependencyError = Array.any (String.contains (String.Pattern "Could not produce valid dependencies")) logMessages + unless hasDependencyError do + Assert.fail $ "Expected dependency resolution error, got:\n" <> String.joinWith "\n" logMessages + + consoleUploadOccurred <- Env.hasStorageUpload Fixtures.console + when consoleUploadOccurred do + Assert.fail "Expected no tarball upload for console@6.1.0 after failed publish" + + Spec.it "unpublishing a package fails when dependents exist in manifest index" do + { jobId: effectJobId } <- Client.publish Fixtures.effectPublishData + _ <- Env.pollJobOrFail effectJobId + + { jobId: consoleJobId } <- Client.publish Fixtures.consolePublishData + _ <- Env.pollJobOrFail consoleJobId + + WireMock.clearStorageRequests + + authData <- Env.signUnpublishOrFail Fixtures.effectUnpublishData + { jobId: unpublishJobId } <- Client.unpublish authData + unpublishJob <- Env.pollJobExpectFailure unpublishJobId + + let + logs = (V1.jobInfo unpublishJob).logs + logMessages = map _.message logs + hasDependencyError = Array.any (String.contains (String.Pattern "unsatisfied dependencies")) logMessages + unless hasDependencyError do + Assert.fail $ "Expected unsatisfied dependencies error, got:\n" <> + String.joinWith "\n" logMessages + + deleteOccurred <- Env.hasStorageDelete Fixtures.effect + when deleteOccurred do + Assert.fail "Expected no tarball delete for effect@4.0.0 after failed unpublish" + + manifestExists <- Env.manifestIndexEntryExists Fixtures.effect + unless manifestExists do + Assert.fail "Expected effect@4.0.0 to still exist in manifest index after failed unpublish" + + Metadata effectMeta <- Env.readMetadata Fixtures.effect.name + unless (isJust $ Map.lookup Fixtures.effect.version effectMeta.published) do + Assert.fail "Expected effect@4.0.0 to still be in published metadata after failed unpublish" diff --git a/app-e2e/src/Test/Main.purs b/app-e2e/src/Test/Main.purs new file mode 100644 index 000000000..a5b18d43c --- /dev/null +++ b/app-e2e/src/Test/Main.purs @@ -0,0 +1,40 @@ +module Test.E2E.Main (main) where + +import Registry.App.Prelude + +import Data.Time.Duration (Milliseconds(..)) +import Test.E2E.Endpoint.Jobs as Jobs +import Test.E2E.Endpoint.PackageSets as PackageSets +import Test.E2E.Endpoint.Publish as Publish +import Test.E2E.Endpoint.Transfer as Transfer +import Test.E2E.Endpoint.Unpublish as Unpublish +import Test.E2E.GitHubIssue as GitHubIssue +import Test.E2E.Support.Env (assertReposClean, mkTestEnv, resetTestState, runE2E) +import Test.E2E.Workflow as Workflow +import Test.Spec (hoistSpec) +import Test.Spec as Spec +import Test.Spec.Reporter.Console (consoleReporter) +import Test.Spec.Runner.Node (runSpecAndExitProcess') +import Test.Spec.Runner.Node.Config as Cfg + +main :: Effect Unit +main = do + env <- mkTestEnv + runSpecAndExitProcess' config [ consoleReporter ] $ hoistE2E env do + Spec.before_ resetTestState $ Spec.after_ assertReposClean $ Spec.describe "E2E Tests" do + Spec.describe "Endpoints" do + Spec.describe "Publish" Publish.spec + Spec.describe "Jobs" Jobs.spec + Spec.describe "Unpublish" Unpublish.spec + Spec.describe "Transfer" Transfer.spec + Spec.describe "PackageSets" PackageSets.spec + + Spec.describe "Workflows" do + Spec.describe "GitHubIssue" GitHubIssue.spec + Spec.describe "Multi-operation" Workflow.spec + where + hoistE2E env = hoistSpec identity (\_ m -> runE2E env m) + config = + { defaultConfig: Cfg.defaultConfig { timeout = Just $ Milliseconds 60_000.0 } + , parseCLIOptions: false + } diff --git a/app/fixtures/addition_issue_created.json b/app/fixtures/addition_issue_created.json index d0b205555..b0aa93e6c 100644 --- a/app/fixtures/addition_issue_created.json +++ b/app/fixtures/addition_issue_created.json @@ -5,7 +5,7 @@ "assignee": null, "assignees": [], "author_association": "CONTRIBUTOR", - "body": "{\"location\": {\"githubOwner\": \"purescript\",\"githubRepo\": \"purescript-prelude\"},\"ref\": \"v5.0.0\",\"name\": \"prelude\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }", + "body": "{\"location\": {\"githubOwner\": \"purescript\",\"githubRepo\": \"purescript-prelude\"},\"ref\": \"v5.0.0\",\"name\": \"prelude\", \"version\": \"5.0.0\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }", "closed_at": null, "comments": 0, "comments_url": "https://api.github.com/repos/purescript/registry/issues/149/comments", diff --git a/app/fixtures/github-packages/console-6.1.0/LICENSE b/app/fixtures/github-packages/console-6.1.0/LICENSE new file mode 100644 index 000000000..311379c1e --- /dev/null +++ b/app/fixtures/github-packages/console-6.1.0/LICENSE @@ -0,0 +1,26 @@ +Copyright 2018 PureScript + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors +may be used to endorse or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/app/fixtures/github-packages/console-6.1.0/bower.json b/app/fixtures/github-packages/console-6.1.0/bower.json new file mode 100644 index 000000000..da93c7f6e --- /dev/null +++ b/app/fixtures/github-packages/console-6.1.0/bower.json @@ -0,0 +1,22 @@ +{ + "name": "purescript-console", + "homepage": "https://github.com/purescript/purescript-console", + "license": "BSD-3-Clause", + "repository": { + "type": "git", + "url": "https://github.com/purescript/purescript-console.git" + }, + "ignore": [ + "**/.*", + "bower_components", + "node_modules", + "output", + "test", + "bower.json", + "package.json" + ], + "dependencies": { + "purescript-effect": "^4.0.0", + "purescript-prelude": "^6.0.0" + } +} diff --git a/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.js b/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.js new file mode 100644 index 000000000..432a4241b --- /dev/null +++ b/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.js @@ -0,0 +1,9 @@ +export const log = s => () => console.log(s); +export const warn = s => () => console.warn(s); +export const error = s => () => console.error(s); +export const info = s => () => console.info(s); +export const debug = s => () => console.debug(s); +export const time = s => () => console.time(s); +export const timeLog = s => () => console.timeLog(s); +export const timeEnd = s => () => console.timeEnd(s); +export const clear = () => console.clear(); diff --git a/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.purs b/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.purs new file mode 100644 index 000000000..364ee2b1c --- /dev/null +++ b/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.purs @@ -0,0 +1,46 @@ +-- | This module provides functions for outputting strings to the console. +module Effect.Console + ( log + , logShow + , warn + , warnShow + , error + , errorShow + , info + , infoShow + , debug + , debugShow + , time + , timeLog + , timeEnd + , clear + ) where + +import Prelude + +import Effect (Effect) + +foreign import log :: String -> Effect Unit +foreign import warn :: String -> Effect Unit +foreign import error :: String -> Effect Unit +foreign import info :: String -> Effect Unit +foreign import debug :: String -> Effect Unit +foreign import time :: String -> Effect Unit +foreign import timeLog :: String -> Effect Unit +foreign import timeEnd :: String -> Effect Unit +foreign import clear :: Effect Unit + +logShow :: forall a. Show a => a -> Effect Unit +logShow = log <<< show + +warnShow :: forall a. Show a => a -> Effect Unit +warnShow = warn <<< show + +errorShow :: forall a. Show a => a -> Effect Unit +errorShow = error <<< show + +infoShow :: forall a. Show a => a -> Effect Unit +infoShow = info <<< show + +debugShow :: forall a. Show a => a -> Effect Unit +debugShow = debug <<< show diff --git a/app/fixtures/package-sets/latest-compatible-sets.json b/app/fixtures/package-sets/latest-compatible-sets.json index ceba8dd7a..5cdbbb9c2 100644 --- a/app/fixtures/package-sets/latest-compatible-sets.json +++ b/app/fixtures/package-sets/latest-compatible-sets.json @@ -1,3 +1,3 @@ { - "0.15.9": "psc-0.15.9-20230105" + "0.15.10": "psc-0.15.10-20230105" } diff --git a/app/fixtures/registry-archive/prelude-6.0.2.tar.gz b/app/fixtures/registry-archive/prelude-6.0.2.tar.gz index 2ef880dff..c06e9b276 100644 Binary files a/app/fixtures/registry-archive/prelude-6.0.2.tar.gz and b/app/fixtures/registry-archive/prelude-6.0.2.tar.gz differ diff --git a/app/fixtures/registry-index/pr/el/prelude b/app/fixtures/registry-index/pr/el/prelude index e6c7d0759..8a01e7d6c 100644 --- a/app/fixtures/registry-index/pr/el/prelude +++ b/app/fixtures/registry-index/pr/el/prelude @@ -1 +1 @@ -{"name":"prelude","version":"6.0.1","license":"BSD-3-Clause","location":{"githubOwner":"purescript","githubRepo":"purescript-prelude"},"description":"The PureScript Prelude","dependencies":{}} +{"name":"prelude","version":"6.0.1","license":"BSD-3-Clause","location":{"githubOwner":"purescript","githubRepo":"purescript-prelude"},"ref":"v6.0.1","description":"The PureScript Prelude","dependencies":{}} diff --git a/app/fixtures/registry-index/ty/pe/type-equality b/app/fixtures/registry-index/ty/pe/type-equality index 8fbce8f14..8d5fc1d6e 100644 --- a/app/fixtures/registry-index/ty/pe/type-equality +++ b/app/fixtures/registry-index/ty/pe/type-equality @@ -1 +1 @@ -{"name":"type-equality","version":"4.0.1","license":"BSD-3-Clause","location":{"githubOwner":"purescript","githubRepo":"purescript-type-equality"},"dependencies":{}} \ No newline at end of file +{"name":"type-equality","version":"4.0.1","license":"BSD-3-Clause","location":{"githubOwner":"purescript","githubRepo":"purescript-type-equality"},"ref":"v4.0.1","dependencies":{}} \ No newline at end of file diff --git a/app/fixtures/registry-storage/console-6.1.0.tar.gz b/app/fixtures/registry-storage/console-6.1.0.tar.gz new file mode 100644 index 000000000..52c94f426 Binary files /dev/null and b/app/fixtures/registry-storage/console-6.1.0.tar.gz differ diff --git a/app/fixtures/registry-storage/effect-4.0.0.tar.gz b/app/fixtures/registry-storage/effect-4.0.0.tar.gz new file mode 100644 index 000000000..e86537b25 Binary files /dev/null and b/app/fixtures/registry-storage/effect-4.0.0.tar.gz differ diff --git a/app/fixtures/registry-storage/prelude-6.0.1.tar.gz b/app/fixtures/registry-storage/prelude-6.0.1.tar.gz index 1df21a580..87d64d2c1 100644 Binary files a/app/fixtures/registry-storage/prelude-6.0.1.tar.gz and b/app/fixtures/registry-storage/prelude-6.0.1.tar.gz differ diff --git a/app/fixtures/registry/metadata/prelude.json b/app/fixtures/registry/metadata/prelude.json index d25e9a0f6..8c14057ad 100644 --- a/app/fixtures/registry/metadata/prelude.json +++ b/app/fixtures/registry/metadata/prelude.json @@ -7,8 +7,8 @@ "6.0.1": { "bytes": 31129, "compilers": [ - "0.15.9", - "0.15.10" + "0.15.10", + "0.15.11" ], "hash": "sha256-EbbFV0J5xV0WammfgCv6HRFSK7Zd803kkofE8aEoam0=", "publishedTime": "2022-08-18T20:04:00.000Z", diff --git a/app/fixtures/registry/metadata/type-equality.json b/app/fixtures/registry/metadata/type-equality.json index b57b9fd09..e51b52614 100644 --- a/app/fixtures/registry/metadata/type-equality.json +++ b/app/fixtures/registry/metadata/type-equality.json @@ -7,8 +7,8 @@ "4.0.1": { "bytes": 2179, "compilers": [ - "0.15.9", - "0.15.10" + "0.15.10", + "0.15.11" ], "hash": "sha256-3lDTQdbTM6/0oxav/0V8nW9fWn3lsSM3b2XxwreDxqs=", "publishedTime": "2022-04-27T18:00:18.000Z", diff --git a/app/fixtures/registry/package-sets/0.0.1.json b/app/fixtures/registry/package-sets/0.0.1.json new file mode 100644 index 000000000..cc82ad7d4 --- /dev/null +++ b/app/fixtures/registry/package-sets/0.0.1.json @@ -0,0 +1,8 @@ +{ + "version": "0.0.1", + "compiler": "0.15.10", + "published": "2024-01-01", + "packages": { + "prelude": "6.0.1" + } +} diff --git a/app/fixtures/update_issue_comment.json b/app/fixtures/update_issue_comment.json index 5400a7c2e..c5673c4da 100644 --- a/app/fixtures/update_issue_comment.json +++ b/app/fixtures/update_issue_comment.json @@ -2,7 +2,7 @@ "action": "created", "comment": { "author_association": "MEMBER", - "body": "```json\n{\"name\":\"something\",\"ref\":\"v1.2.3\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }```", + "body": "```json\n{\"name\":\"something\",\"ref\":\"v1.2.3\", \"version\": \"1.2.3\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }```", "created_at": "2021-03-09T02:03:56Z", "html_url": "https://github.com/purescript/registry/issues/43#issuecomment-793265839", "id": 793265839, diff --git a/app/spago.yaml b/app/spago.yaml index be3c3bec6..03a600425 100644 --- a/app/spago.yaml +++ b/app/spago.yaml @@ -1,7 +1,7 @@ package: name: registry-app run: - main: Registry.App.Server + main: Registry.App.Main publish: license: BSD-3-Clause version: 0.0.1 diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 8d78d7ca7..8ebc66ba5 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -9,11 +9,10 @@ module Registry.App.API , copyPackageSourceFiles , findAllCompilers , formatPursuitResolutions - , installBuildPlan + , getPacchettiBotti , packageSetUpdate , packagingTeam , publish - , readCompilerIndex , removeIgnoredTarballFiles ) where @@ -31,7 +30,7 @@ import Data.FoldableWithIndex (foldMapWithIndex) import Data.List.NonEmpty as NonEmptyList import Data.Map (SemigroupMap(..)) import Data.Map as Map -import Data.Newtype (over, unwrap) +import Data.Newtype (over) import Data.Number.Format as Number.Format import Data.Set as Set import Data.Set.NonEmpty as NonEmptySet @@ -52,6 +51,7 @@ import Parsing as Parsing import Parsing.Combinators as Parsing.Combinators import Parsing.Combinators.Array as Parsing.Combinators.Array import Parsing.String as Parsing.String +import Registry.API.V1 (PackageSetJobData) import Registry.App.Auth as Auth import Registry.App.CLI.Purs (CompilerFailure(..), compilerFailureCodec) import Registry.App.CLI.Purs as Purs @@ -61,12 +61,9 @@ import Registry.App.Effect.Archive (ARCHIVE) import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache (class FsEncodable, Cache) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment -import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) +import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV) import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) -import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log import Registry.App.Effect.PackageSets (Change(..), PACKAGE_SETS) @@ -85,18 +82,18 @@ import Registry.App.Legacy.Manifest (LEGACY_CACHE) import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.App.Legacy.Types (RawPackageName(..), RawVersion(..), rawPackageNameMapCodec) import Registry.App.Manifest.SpagoYaml as SpagoYaml +import Registry.App.Server.MatrixBuilder as MatrixBuilder import Registry.Constants (ignoredDirectories, ignoredFiles, ignoredGlobs, includedGlobs, includedInsensitiveGlobs) import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.FastGlob as FastGlob -import Registry.Foreign.Octokit (IssueNumber(..), Team) -import Registry.Foreign.Octokit as Octokit +import Registry.Foreign.Octokit (Team) import Registry.Foreign.Tmp as Tmp import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Path as Internal.Path import Registry.Location as Location import Registry.Manifest as Manifest import Registry.Metadata as Metadata -import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PackageSetUpdateData, PublishData) +import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PublishData) import Registry.Operation as Operation import Registry.Operation.Validation (UnpublishError(..), ValidateDepsError(..), validateNoExcludedObligatoryFiles) import Registry.Operation.Validation as Operation.Validation @@ -116,18 +113,17 @@ import Run.Except (EXCEPT) import Run.Except as Except import Safe.Coerce as Safe.Coerce -type PackageSetUpdateEffects r = (REGISTRY + PACKAGE_SETS + GITHUB + GITHUB_EVENT_ENV + COMMENT + LOG + EXCEPT String + r) +-- | Effect row for package set updates. Authentication is done at the API +-- | boundary, so we don't need GITHUB or GITHUB_EVENT_ENV effects here. +type PackageSetUpdateEffects r = (REGISTRY + PACKAGE_SETS + LOG + EXCEPT String + r) --- | Process a package set update. Package set updates are only processed via --- | GitHub and not the HTTP API, so they require access to the GitHub env. -packageSetUpdate :: forall r. PackageSetUpdateData -> Run (PackageSetUpdateEffects + r) Unit -packageSetUpdate payload = do - { issue, username } <- Env.askGitHubEvent +-- | Process a package set update from a queued job. Authentication has already +-- | been verified at the API boundary, so we don't need to check team membership. +packageSetUpdate :: forall r. PackageSetJobData -> Run (PackageSetUpdateEffects + r) Unit +packageSetUpdate details = do + let Operation.PackageSetUpdate payload = details.payload - Log.debug $ Array.fold - [ "Package set update created from issue " <> show (un IssueNumber issue) <> " by user " <> username - , " with payload:\n" <> stringifyJson Operation.packageSetUpdateCodec payload - ] + Log.debug $ "Package set update job starting with payload:\n" <> stringifyJson Operation.packageSetUpdateCodec payload latestPackageSet <- Registry.readLatestPackageSet >>= case _ of Nothing -> do @@ -139,34 +135,8 @@ packageSetUpdate payload = do let prevCompiler = (un PackageSet latestPackageSet).compiler let prevPackages = (un PackageSet latestPackageSet).packages - Log.debug "Determining whether authentication is required (the compiler changed or packages were removed)..." - let didChangeCompiler = maybe false (not <<< eq prevCompiler) payload.compiler - let didRemovePackages = any isNothing payload.packages - - -- Changing the compiler version or removing packages are both restricted - -- to only the packaging team. We throw here if this is an authenticated - -- operation and we can't verify they are a member of the packaging team. - when (didChangeCompiler || didRemovePackages) do - Log.debug "Authentication is required. Verifying the user can take authenticated actions..." - GitHub.listTeamMembers packagingTeam >>= case _ of - Left githubError -> do - Log.error $ "Failed to retrieve the members of the packaging team from GitHub: " <> Octokit.printGitHubError githubError - Except.throw $ Array.fold - [ "This package set update changes the compiler version or removes a " - , "package from the package set. Only members of the " - , "@purescript/packaging team can take these actions, but we were " - , "unable to authenticate your account." - ] - Right members -> do - unless (Array.elem username members) do - Log.error $ "Username " <> username <> " is not a member of the packaging team, aborting..." - Except.throw $ Array.fold - [ "This package set update changes the compiler version or " - , "removes a package from the package set. Only members of the " - , "@purescript/packaging team can take these actions, but your " - , "username is not a member of the packaging team." - ] - Log.debug $ "Authentication verified for package set update by user " <> username + -- Note: authentication for restricted operations (compiler change, package removal) + -- is handled at the API boundary in the Router, not here. -- The compiler version cannot be downgraded. for_ payload.compiler \version -> when (version < prevCompiler) do @@ -224,18 +194,18 @@ packageSetUpdate payload = do Except.throw "No packages in the suggested batch can be processed (all failed validation checks) and the compiler version was not upgraded, so there is no upgrade to perform." let changeSet = candidates.accepted <#> maybe Remove Update - Comment.comment "Attempting to build package set update." + Log.notice "Attempting to build package set update." PackageSets.upgradeAtomic latestPackageSet (fromMaybe prevCompiler payload.compiler) changeSet >>= case _ of Left error -> Except.throw $ "The package set produced from this suggested update does not compile:\n\n" <> error Right packageSet -> do let commitMessage = PackageSets.commitMessage latestPackageSet changeSet (un PackageSet packageSet).version Registry.writePackageSet packageSet commitMessage - Comment.comment "Built and released a new package set! Now mirroring to the package-sets repo..." + Log.notice "Built and released a new package set! Now mirroring to the package-sets repo..." Registry.mirrorPackageSet packageSet - Comment.comment "Mirrored a new legacy package set." + Log.notice "Mirrored a new legacy package set." -type AuthenticatedEffects r = (REGISTRY + STORAGE + GITHUB + PACCHETTIBOTTI_ENV + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) +type AuthenticatedEffects r = (REGISTRY + STORAGE + GITHUB + PACCHETTIBOTTI_ENV + LOG + EXCEPT String + AFF + EFFECT + r) -- | Run an authenticated package operation, ie. an unpublish or a transfer. authenticated :: forall r. AuthenticatedData -> Run (AuthenticatedEffects + r) Unit @@ -292,10 +262,14 @@ authenticated auth = case auth.payload of { published = Map.delete payload.version prev.published , unpublished = Map.insert payload.version unpublished prev.unpublished } + -- Delete the manifest entry first so ManifestIndex.delete can fail if other + -- packages still depend on this version. This way, we detect dependency + -- violations before performing any irreversible side effects like deleting + -- the tarball from storage. + Registry.deleteManifest payload.name payload.version Storage.delete payload.name payload.version Registry.writeMetadata payload.name updated - Registry.deleteManifest payload.name payload.version - Comment.comment $ "Unpublished " <> formatted <> "!" + Log.notice $ "Unpublished " <> formatted <> "!" Transfer payload -> do Log.debug $ "Processing authorized transfer operation with payload: " <> stringifyJson Operation.authenticatedCodec auth @@ -326,11 +300,11 @@ authenticated auth = case auth.payload of Log.debug $ "Successfully authenticated ownership of " <> PackageName.print payload.name <> ", transferring..." let updated = metadata # over Metadata _ { location = payload.newLocation } Registry.writeMetadata payload.name updated - Comment.comment "Successfully transferred your package!" + Log.notice "Successfully transferred your package!" Registry.mirrorLegacyRegistry payload.name payload.newLocation - Comment.comment "Mirrored registry operation to the legacy registry." + Log.notice "Mirrored registry operation to the legacy registry." -type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + ARCHIVE + GITHUB + COMPILER_CACHE + LEGACY_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) +type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + ARCHIVE + GITHUB + COMPILER_CACHE + LEGACY_CACHE + LOG + EXCEPT String + AFF + EFFECT + r) -- | Publish a package via the 'publish' operation. If the package has not been -- | published before then it will be registered and the given version will be @@ -340,7 +314,7 @@ type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + -- The legacyIndex argument contains the unverified manifests produced by the -- legacy importer; these manifests can be used on legacy packages to conform -- them to the registry rule that transitive dependencies are not allowed. -publish :: forall r. Maybe Solver.TransitivizedRegistry -> PublishData -> Run (PublishEffects + r) Unit +publish :: forall r. Maybe Solver.TransitivizedRegistry -> PublishData -> Run (PublishEffects + r) (Maybe { dependencies :: Map PackageName Range, version :: Version }) publish maybeLegacyIndex payload = do let printedName = PackageName.print payload.name @@ -472,13 +446,13 @@ publish maybeLegacyIndex payload = do pure manifest else if hasSpagoYaml then do - Comment.comment $ "Package source does not have a purs.json file, creating one from your spago.yaml file..." + Log.notice $ "Package source does not have a purs.json file, creating one from your spago.yaml file..." SpagoYaml.readSpagoYaml packageSpagoYaml >>= case _ of Left readErr -> Except.throw $ "Could not publish your package - a spago.yaml was present, but it was not possible to read it:\n" <> readErr - Right config -> case SpagoYaml.spagoYamlToManifest config of + Right config -> case SpagoYaml.spagoYamlToManifest payload.ref config of Left err -> Except.throw $ "Could not publish your package - there was an error while converting your spago.yaml into a purs.json manifest:\n" <> err Right manifest -> do - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "Converted your spago.yaml into a purs.json manifest to use for publishing:" , "\n```json\n" , printJson Manifest.codec manifest @@ -487,7 +461,7 @@ publish maybeLegacyIndex payload = do pure manifest else do - Comment.comment $ "Package source does not have a purs.json file. Creating one from your bower.json and/or spago.dhall files..." + Log.notice $ "Package source does not have a purs.json file. Creating one from your bower.json and/or spago.dhall files..." version <- case LenientVersion.parse payload.ref of Left _ -> Except.throw $ "The provided ref " <> payload.ref <> " is not a version of the form X.Y.Z or vX.Y.Z, so it cannot be used." @@ -502,8 +476,8 @@ publish maybeLegacyIndex payload = do ] Right legacyManifest -> do Log.debug $ "Successfully produced a legacy manifest from the package source." - let manifest = Legacy.Manifest.toManifest payload.name version existingMetadata.location legacyManifest - Comment.comment $ Array.fold + let manifest = Legacy.Manifest.toManifest payload.name version existingMetadata.location payload.ref legacyManifest + Log.notice $ Array.fold [ "Converted your legacy manifest(s) into a purs.json manifest to use for publishing:" , "\n```json\n" , printJson Manifest.codec manifest @@ -578,22 +552,23 @@ publish maybeLegacyIndex payload = do ] Nothing | payload.compiler < Purs.minPursuitPublish -> do - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "This version has already been published to the registry, but the docs have not been " , "uploaded to Pursuit. Unfortunately, it is not possible to publish to Pursuit via the " , "registry using compiler versions prior to " <> Version.print Purs.minPursuitPublish , ". Please try with a later compiler." ] + pure Nothing Nothing -> do - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "This version has already been published to the registry, but the docs have not been " , "uploaded to Pursuit. Skipping registry publishing and retrying Pursuit publishing..." ] - compilerIndex <- readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex verifiedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest receivedManifest) payload.resolutions let installedResolutions = Path.concat [ tmp, ".registry" ] - installBuildPlan verifiedResolutions installedResolutions + MatrixBuilder.installBuildPlan verifiedResolutions installedResolutions compilationResult <- Run.liftAff $ Purs.callCompiler { command: Purs.Compile { globs: [ "src/**/*.purs", Path.concat [ installedResolutions, "*/src/**/*.purs" ] ] } , version: Just payload.compiler @@ -601,7 +576,7 @@ publish maybeLegacyIndex payload = do } case compilationResult of Left compileFailure -> do - let error = printCompilerFailure payload.compiler compileFailure + let error = MatrixBuilder.printCompilerFailure payload.compiler compileFailure Log.error $ "Compilation failed, cannot upload to pursuit: " <> error Except.throw "Cannot publish to Pursuit because this package failed to compile." Right _ -> do @@ -617,22 +592,23 @@ publish maybeLegacyIndex payload = do Left publishErr -> Except.throw publishErr Right _ -> do FS.Extra.remove tmp - Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" + Log.notice "Successfully uploaded package docs to Pursuit! 🎉 🚀" + pure Nothing -- In this case the package version has not been published, so we proceed -- with ordinary publishing. Nothing -> do Log.info "Verifying the package build plan..." - compilerIndex <- readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex validatedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest receivedManifest) payload.resolutions - Comment.comment "Verifying unused and/or missing dependencies..." + Log.notice "Verifying unused and/or missing dependencies..." -- First we install the resolutions and call 'purs graph' to adjust the -- manifest as needed, but we defer compilation until after this check -- in case the package manifest and resolutions are adjusted. let installedResolutions = Path.concat [ tmp, ".registry" ] - installBuildPlan validatedResolutions installedResolutions + MatrixBuilder.installBuildPlan validatedResolutions installedResolutions let srcGlobs = Path.concat [ downloadedPackage, "src", "**", "*.purs" ] let depGlobs = Path.concat [ installedResolutions, "*", "src", "**", "*.purs" ] @@ -715,7 +691,7 @@ publish maybeLegacyIndex payload = do -- Now that we have the package source contents we can verify we can compile -- the package with exactly what is going to be uploaded. - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "Verifying package compiles using compiler " , Version.print payload.compiler , " and resolutions:\n" @@ -727,7 +703,7 @@ publish maybeLegacyIndex payload = do -- We clear the installation directory so that no old installed resolutions -- stick around. Run.liftAff $ FS.Extra.remove installedResolutions - installBuildPlan resolutions installedResolutions + MatrixBuilder.installBuildPlan resolutions installedResolutions compilationResult <- Run.liftAff $ Purs.callCompiler { command: Purs.Compile { globs: [ Path.concat [ packageSource, "src/**/*.purs" ], Path.concat [ installedResolutions, "*/src/**/*.purs" ] ] } , version: Just payload.compiler @@ -736,11 +712,11 @@ publish maybeLegacyIndex payload = do case compilationResult of Left compileFailure -> do - let error = printCompilerFailure payload.compiler compileFailure + let error = MatrixBuilder.printCompilerFailure payload.compiler compileFailure Except.throw $ "Publishing failed due to a compiler error:\n\n" <> error Right _ -> pure unit - Comment.comment "Package source is verified! Packaging tarball and uploading to the storage backend..." + Log.notice "Package source is verified! Packaging tarball and uploading to the storage backend..." let tarballName = packageDirname <> ".tar.gz" let tarballPath = Path.concat [ tmp, tarballName ] Tar.create { cwd: tmp, folderName: packageDirname } @@ -751,7 +727,7 @@ publish maybeLegacyIndex payload = do Operation.Validation.ExceedsMaximum maxPackageBytes -> Except.throw $ "Package tarball is " <> show bytes <> " bytes, which exceeds the maximum size of " <> show maxPackageBytes <> " bytes." Operation.Validation.WarnPackageSize maxWarnBytes -> - Comment.comment $ "WARNING: Package tarball is " <> show bytes <> "bytes, which exceeds the warning threshold of " <> show maxWarnBytes <> " bytes." + Log.notice $ "WARNING: Package tarball is " <> show bytes <> "bytes, which exceeds the warning threshold of " <> show maxWarnBytes <> " bytes." -- If a package has under ~30 bytes it's about guaranteed that packaging the -- tarball failed. This can happen if the system running the API has a non- @@ -766,11 +742,11 @@ publish maybeLegacyIndex payload = do Storage.upload (un Manifest manifest).name (un Manifest manifest).version tarballPath Log.debug $ "Adding the new version " <> Version.print (un Manifest manifest).version <> " to the package metadata file." - let newPublishedVersion = { hash, ref: payload.ref, compilers: NonEmptyArray.singleton payload.compiler, publishedTime, bytes } + let newPublishedVersion = { hash, compilers: NonEmptyArray.singleton payload.compiler, publishedTime, bytes } let newMetadata = metadata { published = Map.insert (un Manifest manifest).version newPublishedVersion metadata.published } Registry.writeMetadata (un Manifest manifest).name (Metadata newMetadata) - Comment.comment "Successfully uploaded package to the registry! 🎉 🚀" + Log.notice "Successfully uploaded package to the registry! 🎉 🚀" -- We write to the registry index if possible. If this fails, the packaging -- team should manually insert the entry. @@ -778,7 +754,7 @@ publish maybeLegacyIndex payload = do Registry.writeManifest manifest Registry.mirrorLegacyRegistry payload.name newMetadata.location - Comment.comment "Mirrored registry operation to the legacy registry!" + Log.notice "Mirrored registry operation to the legacy registry!" Log.debug "Uploading package documentation to Pursuit" if payload.compiler >= Purs.minPursuitPublish then @@ -788,38 +764,45 @@ publish maybeLegacyIndex payload = do publishToPursuit { source: downloadedPackage, compiler: payload.compiler, resolutions, installedResolutions } >>= case _ of Left publishErr -> do Log.error publishErr - Comment.comment $ "Failed to publish package docs to Pursuit: " <> publishErr + Log.notice $ "Failed to publish package docs to Pursuit: " <> publishErr Right _ -> - Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" + Log.notice "Successfully uploaded package docs to Pursuit! 🎉 🚀" else do - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "Skipping Pursuit publishing because this package was published with a pre-0.14.7 compiler (" , Version.print payload.compiler , "). If you want to publish documentation, please try again with a later compiler." ] - Comment.comment "Determining all valid compiler versions for this package..." - allCompilers <- PursVersions.pursVersions - { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromFoldable $ NonEmptyArray.delete payload.compiler allCompilers of - Nothing -> pure { failed: Map.empty, succeeded: NonEmptySet.singleton payload.compiler } - Just try -> do - found <- findAllCompilers - { source: packageSource - , manifest - , compilers: try - } - pure { failed: found.failed, succeeded: NonEmptySet.cons payload.compiler found.succeeded } + -- Note: this only runs for the Legacy Importer. In daily circumstances (i.e. + -- when running the server) this will be taken care of by followup jobs invoking + -- the MatrixBuilder for each compiler version + for_ maybeLegacyIndex \_idx -> do + Log.notice "Determining all valid compiler versions for this package..." + allCompilers <- PursVersions.pursVersions + { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromFoldable $ NonEmptyArray.delete payload.compiler allCompilers of + Nothing -> pure { failed: Map.empty, succeeded: NonEmptySet.singleton payload.compiler } + Just try -> do + found <- findAllCompilers + { source: packageSource + , manifest + , compilers: try + } + pure { failed: found.failed, succeeded: NonEmptySet.cons payload.compiler found.succeeded } - unless (Map.isEmpty invalidCompilers) do - Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) + unless (Map.isEmpty invalidCompilers) do + Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) - Comment.comment $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) - let compilersMetadata = newMetadata { published = Map.update (Just <<< (_ { compilers = NonEmptySet.toUnfoldable1 validCompilers })) (un Manifest manifest).version newMetadata.published } - Registry.writeMetadata (un Manifest manifest).name (Metadata compilersMetadata) - Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata compilersMetadata) + Log.notice $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) + let metadataWithCompilers = newMetadata { published = Map.update (Just <<< (_ { compilers = NonEmptySet.toUnfoldable1 validCompilers })) (un Manifest manifest).version newMetadata.published } + + Registry.writeMetadata (un Manifest manifest).name (Metadata metadataWithCompilers) + Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata metadataWithCompilers) + + Log.notice "Wrote completed metadata to the registry!" - Comment.comment "Wrote completed metadata to the registry!" FS.Extra.remove tmp + pure $ Just { dependencies: (un Manifest manifest).dependencies, version: (un Manifest manifest).version } -- | Verify the build plan for the package. If the user provided a build plan, -- | we ensure that the provided versions are within the ranges listed in the @@ -904,32 +887,30 @@ findAllCompilers . { source :: FilePath, manifest :: Manifest, compilers :: NonEmptyArray Version } -> Run (REGISTRY + STORAGE + COMPILER_CACHE + LOG + AFF + EFFECT + EXCEPT String + r) FindAllCompilersResult findAllCompilers { source, manifest, compilers } = do - compilerIndex <- readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex checkedCompilers <- for compilers \target -> do Log.debug $ "Trying compiler " <> Version.print target case Solver.solveWithCompiler (Range.exact target) compilerIndex (un Manifest manifest).dependencies of Left solverErrors -> do Log.info $ "Failed to solve with compiler " <> Version.print target pure $ Left $ Tuple target (Left solverErrors) - Right (Tuple mbCompiler resolutions) -> do + Right (Tuple compiler resolutions) -> do Log.debug $ "Solved with compiler " <> Version.print target <> " and got resolutions:\n" <> printJson (Internal.Codec.packageMap Version.codec) resolutions - case mbCompiler of - Nothing -> Except.throw "Produced a compiler-derived build plan with no compiler!" - Just selected | selected /= target -> Except.throw $ Array.fold + when (compiler /= target) do + Except.throw $ Array.fold [ "Produced a compiler-derived build plan that selects a compiler (" - , Version.print selected + , Version.print compiler , ") that differs from the target compiler (" , Version.print target , ")." ] - Just _ -> pure unit Cache.get _compilerCache (Compilation manifest resolutions target) >>= case _ of Nothing -> do Log.debug $ "No cached compilation, compiling with compiler " <> Version.print target workdir <- Tmp.mkTmpDir let installed = Path.concat [ workdir, ".registry" ] FS.Extra.ensureDirectory installed - installBuildPlan resolutions installed + MatrixBuilder.installBuildPlan resolutions installed result <- Run.liftAff $ Purs.callCompiler { command: Purs.Compile { globs: [ Path.concat [ source, "src/**/*.purs" ], Path.concat [ installed, "*/src/**/*.purs" ] ] } , version: Just target @@ -938,7 +919,7 @@ findAllCompilers { source, manifest, compilers } = do FS.Extra.remove workdir case result of Left err -> do - Log.info $ "Compilation failed with compiler " <> Version.print target <> ":\n" <> printCompilerFailure target err + Log.info $ "Compilation failed with compiler " <> Version.print target <> ":\n" <> MatrixBuilder.printCompilerFailure target err Right _ -> do Log.debug $ "Compilation succeeded with compiler " <> Version.print target Cache.put _compilerCache (Compilation manifest resolutions target) { target, result: map (const unit) result } @@ -949,49 +930,6 @@ findAllCompilers { source, manifest, compilers } = do let results = partitionEithers $ NonEmptyArray.toArray checkedCompilers pure { failed: Map.fromFoldable results.fail, succeeded: Set.fromFoldable results.success } -printCompilerFailure :: Version -> CompilerFailure -> String -printCompilerFailure compiler = case _ of - MissingCompiler -> Array.fold - [ "Compilation failed because the build plan compiler version " - , Version.print compiler - , " is not supported. Please try again with a different compiler." - ] - CompilationError errs -> String.joinWith "\n" - [ "Compilation failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" - , "```" - , Purs.printCompilerErrors errs - , "```" - ] - UnknownError err -> String.joinWith "\n" - [ "Compilation failed with version " <> Version.print compiler <> " because of an error :" - , "```" - , err - , "```" - ] - --- | Install all dependencies indicated by the build plan to the specified --- | directory. Packages will be installed at 'dir/package-name-x.y.z'. -installBuildPlan :: forall r. Map PackageName Version -> FilePath -> Run (STORAGE + LOG + AFF + EXCEPT String + r) Unit -installBuildPlan resolutions dependenciesDir = do - Run.liftAff $ FS.Extra.ensureDirectory dependenciesDir - -- We fetch every dependency at its resolved version, unpack the tarball, and - -- store the resulting source code in a specified directory for dependencies. - forWithIndex_ resolutions \name version -> do - let - -- This filename uses the format the directory name will have once - -- unpacked, ie. package-name-major.minor.patch - filename = PackageName.print name <> "-" <> Version.print version <> ".tar.gz" - filepath = Path.concat [ dependenciesDir, filename ] - Storage.download name version filepath - Run.liftAff (Aff.attempt (Tar.extract { cwd: dependenciesDir, archive: filename })) >>= case _ of - Left error -> do - Log.error $ "Failed to unpack " <> filename <> ": " <> Aff.message error - Except.throw "Failed to unpack dependency tarball, cannot continue." - Right _ -> - Log.debug $ "Unpacked " <> filename - Run.liftAff $ FS.Aff.unlink filepath - Log.debug $ "Installed " <> formatPackageVersion name version - -- | Parse the name and version from a path to a module installed in the standard -- | form: '-...' parseModulePath :: FilePath -> Either String { name :: PackageName, version :: Version } @@ -1027,7 +965,7 @@ type PublishToPursuit = publishToPursuit :: forall r . PublishToPursuit - -> Run (PURSUIT + COMMENT + LOG + AFF + EFFECT + r) (Either String Unit) + -> Run (PURSUIT + LOG + AFF + EFFECT + r) (Either String Unit) publishToPursuit { source, compiler, resolutions, installedResolutions } = Except.runExcept do Log.debug "Generating a resolutions file" tmp <- Tmp.mkTmpDir @@ -1062,7 +1000,7 @@ publishToPursuit { source, compiler, resolutions, installedResolutions } = Excep publishJson <- case compilerOutput of Left error -> - Except.throw $ printCompilerFailure compiler error + Except.throw $ MatrixBuilder.printCompilerFailure compiler error Right publishResult -> do -- The output contains plenty of diagnostic lines, ie. "Compiling ..." -- but we only want the final JSON payload. @@ -1209,13 +1147,6 @@ getPacchettiBotti = do packagingTeam :: Team packagingTeam = { org: "purescript", team: "packaging" } -readCompilerIndex :: forall r. Run (REGISTRY + AFF + EXCEPT String + r) Solver.CompilerIndex -readCompilerIndex = do - metadata <- Registry.readAllMetadata - manifests <- Registry.readAllManifests - allCompilers <- PursVersions.pursVersions - pure $ Solver.buildCompilerIndex allCompilers manifests metadata - type AdjustManifest = { source :: FilePath , compiler :: Version @@ -1235,7 +1166,7 @@ conformLegacyManifest -> CompilerIndex -> Solver.TransitivizedRegistry -> ValidateDepsError - -> Run (COMMENT + LOG + EXCEPT String + r) (Tuple Manifest (Map PackageName Version)) + -> Run (LOG + EXCEPT String + r) (Tuple Manifest (Map PackageName Version)) conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry problem = do let manifestRequired :: SemigroupMap PackageName Intersection @@ -1332,7 +1263,7 @@ conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry p UnusedDependencies names -> do Tuple deps resolutions <- fixUnused names (Manifest manifest) let newManifest = Manifest (manifest { dependencies = deps }) - Comment.comment $ Array.fold + Log.notice $ Array.fold [ previousDepsMessage , "\nWe have removed the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable names)) <> "\n" , newDepsMessage newManifest @@ -1341,7 +1272,7 @@ conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry p MissingDependencies names -> do Tuple deps resolutions <- fixMissing names (Manifest manifest) let newManifest = Manifest (manifest { dependencies = deps }) - Comment.comment $ Array.fold + Log.notice $ Array.fold [ previousDepsMessage , "\nWe have added the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable names)) <> "\n" , newDepsMessage newManifest @@ -1352,7 +1283,7 @@ conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry p let trimmed = Map.difference manifest.dependencies unused' Tuple newDeps newResolutions <- fixMissing missing (Manifest (manifest { dependencies = trimmed })) let newManifest = Manifest (manifest { dependencies = newDeps }) - Comment.comment $ Array.fold + Log.notice $ Array.fold [ previousDepsMessage , "\nWe have removed the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable unused)) <> "\n" , "We have added the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable missing)) <> "\n" diff --git a/app/src/App/Auth.purs b/app/src/App/Auth.purs index c8647304f..f9303fea8 100644 --- a/app/src/App/Auth.purs +++ b/app/src/App/Auth.purs @@ -1,6 +1,7 @@ module Registry.App.Auth ( SignAuthenticated , signPayload + , verifyPackageSetPayload , verifyPayload ) where @@ -8,7 +9,7 @@ import Registry.App.Prelude import Data.Array as Array import Data.String as String -import Registry.Operation (AuthenticatedData) +import Registry.Operation (AuthenticatedData, PackageSetUpdateRequest) import Registry.SSH as SSH -- We take pacchettibotti as an extra owner because pacchettibotti can always @@ -35,3 +36,20 @@ signPayload :: SignAuthenticated -> Either String SSH.Signature signPayload { privateKey, rawPayload } = do private <- lmap SSH.printPrivateKeyParseError $ SSH.parsePrivateKey { key: privateKey, passphrase: Nothing } pure $ SSH.sign private rawPayload + +-- | Verify a package set update request using pacchettibotti's key. +-- | Returns an error if the signature is invalid or missing. +verifyPackageSetPayload :: Owner -> PackageSetUpdateRequest -> Aff (Either String Unit) +verifyPackageSetPayload pacchettiBotti request = do + case request.signature of + Nothing -> + pure $ Left "Package set update requires a signature for restricted operations." + Just signature -> do + let eitherKey = SSH.parsePublicKey (formatOwner pacchettiBotti) + pure do + key <- eitherKey + unless (SSH.verify key request.rawPayload signature) do + Left "The pacchettibotti signature is not valid for this payload." + where + formatOwner (Owner owner) = + String.joinWith " " [ owner.keytype, owner.public, fromMaybe "id" owner.id ] diff --git a/app/src/App/CLI/Git.purs b/app/src/App/CLI/Git.purs index ac9ffc398..baf513748 100644 --- a/app/src/App/CLI/Git.purs +++ b/app/src/App/CLI/Git.purs @@ -214,8 +214,8 @@ gitCommit { address: { owner, repo }, committer, commit, message } cwd = Except. -- Git will error if we try to commit without any changes actually staged, -- so the below command lists file paths (--name-only) that have changed -- between the index and current HEAD (--cached), only including files that - -- have been added or modified (--diff-filter=AM). - staged <- exec [ "diff", "--name-only", "--cached", "--diff-filter=AM" ] \error -> + -- have been added, modified, or deleted (--diff-filter=AMD). + staged <- exec [ "diff", "--name-only", "--cached", "--diff-filter=AMD" ] \error -> "Failed to check whether any changes are staged " <> inRepoErr error -- If there are no staged files, then we have nothing to commit. diff --git a/app/src/App/Effect/Archive.purs b/app/src/App/Effect/Archive.purs index 8c26092ad..17ca0675e 100644 --- a/app/src/App/Effect/Archive.purs +++ b/app/src/App/Effect/Archive.purs @@ -35,13 +35,13 @@ import Node.Buffer as Buffer import Node.FS.Aff as FS.Aff import Node.Path as Path import Registry.App.CLI.Tar as Tar -import Registry.Foreign.FSExtra as FS.Extra import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log import Registry.App.Legacy.Types (RawVersion(..)) import Registry.Constants as Constants +import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit as Octokit import Registry.Foreign.Tar as Foreign.Tar import Registry.Internal.Format as Internal.Format diff --git a/app/src/App/Effect/Comment.purs b/app/src/App/Effect/Comment.purs deleted file mode 100644 index 848a1b3ae..000000000 --- a/app/src/App/Effect/Comment.purs +++ /dev/null @@ -1,68 +0,0 @@ --- | An effect for notifying users of important events in the application, such --- | as failures that prevent their package from being uploaded, or successful --- | events that indicate progress. --- | --- | This is not a general logging effect. For that, you should use the Log --- | effect. This effect should be used sparingly to notify registry users of --- | events with formatted, human-readable messages providing context. -module Registry.App.Effect.Comment where - -import Registry.App.Prelude - -import Ansi.Codes (GraphicsParam) -import Data.Int as Int -import Dodo (Doc) -import Dodo as Dodo -import Dodo.Ansi as Ansi -import Registry.App.Effect.Log (LOG) -import Registry.App.Effect.Log as Log -import Registry.Foreign.Octokit (Address, IssueNumber(..), Octokit) -import Registry.Foreign.Octokit as Octokit -import Run (AFF, EFFECT, Run) -import Run as Run - -data Comment a = Comment (Doc GraphicsParam) a - -derive instance Functor Comment - --- | An effect for notifying consumers of important events in the application -type COMMENT r = (comment :: Comment | r) - -_comment :: Proxy "comment" -_comment = Proxy - -comment :: forall a r. Log.Loggable a => a -> Run (COMMENT + r) Unit -comment message = Run.lift _comment (Comment (Log.toLog message) unit) - -interpret :: forall r a. (Comment ~> Run r) -> Run (COMMENT + r) a -> Run r a -interpret handler = Run.interpret (Run.on _comment handler Run.send) - --- | Handle a notification by converting it to an info-level LOG -handleLog :: forall a r. Comment a -> Run (LOG + r) a -handleLog = case _ of - Comment message next -> do - Log.info $ Ansi.foreground Ansi.BrightBlue (Dodo.text "[NOTIFY] ") <> message - pure next - -type CommentGitHubEnv = - { octokit :: Octokit - , issue :: IssueNumber - , registry :: Address - } - --- | Handle a notification by commenting on the relevant GitHub issue. -handleGitHub :: forall a r. CommentGitHubEnv -> Comment a -> Run (LOG + AFF + EFFECT + r) a -handleGitHub env = case _ of - Comment message next -> do - let issueNumber = Int.toStringAs Int.decimal $ un IssueNumber env.issue - Log.debug $ "Commenting via a GitHub comment on issue " <> issueNumber - handleLog (Comment message unit) - let body = Dodo.print Dodo.plainText Dodo.twoSpaces (Log.toLog message) - let request = Octokit.createCommentRequest { address: env.registry, issue: env.issue, body } - Octokit.request env.octokit request >>= case _ of - Left error -> do - Log.error $ "Could not send comment to GitHub due to an unexpected error." - Log.debug $ Octokit.printGitHubError error - Right _ -> - Log.debug $ "Created GitHub comment on issue " <> issueNumber - pure next diff --git a/app/src/App/Effect/Db.purs b/app/src/App/Effect/Db.purs index c2c6dc67c..96b75ca94 100644 --- a/app/src/App/Effect/Db.purs +++ b/app/src/App/Effect/Db.purs @@ -5,13 +5,16 @@ import Registry.App.Prelude import Data.Array as Array import Data.DateTime (DateTime) import Data.String as String -import Registry.API.V1 (JobId, LogLevel, LogLine) +import Registry.API.V1 (Job, JobId, LogLevel, LogLine) import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log -import Registry.App.SQLite (JobResult, NewJob, SQLite) +import Registry.App.SQLite (FinishJob, InsertMatrixJob, InsertPackageSetJob, InsertPublishJob, InsertTransferJob, InsertUnpublishJob, MatrixJobDetails, PackageSetJobDetails, PublishJobDetails, SQLite, SelectJobRequest, SelectJobsRequest, StartJob, TransferJobDetails, UnpublishJobDetails) import Registry.App.SQLite as SQLite +import Registry.Operation (PackageSetOperation) import Run (EFFECT, Run) import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except -- We could separate these by database if it grows too large. Also, for now these -- simply lift their Effect-based equivalents in the SQLite module, but ideally @@ -21,13 +24,29 @@ import Run as Run -- Also, this does not currently include setup and teardown (those are handled -- outside the effect), but we may wish to add those in the future if they'll -- be part of app code we want to test. + data Db a - = InsertLog LogLine a - | SelectLogsByJob JobId LogLevel (Maybe DateTime) (Array LogLine -> a) - | CreateJob NewJob a - | FinishJob JobResult a - | SelectJob JobId (Either String SQLite.Job -> a) - | RunningJobForPackage PackageName (Either String SQLite.Job -> a) + = InsertPublishJob InsertPublishJob (JobId -> a) + | InsertUnpublishJob InsertUnpublishJob (JobId -> a) + | InsertTransferJob InsertTransferJob (JobId -> a) + | InsertMatrixJob InsertMatrixJob (JobId -> a) + | InsertPackageSetJob InsertPackageSetJob (JobId -> a) + | FinishJob FinishJob a + | StartJob StartJob a + | SelectJob SelectJobRequest (Either String (Maybe Job) -> a) + | SelectJobs SelectJobsRequest (Array Job -> a) + | SelectNextPublishJob (Either String (Maybe PublishJobDetails) -> a) + | SelectNextUnpublishJob (Either String (Maybe UnpublishJobDetails) -> a) + | SelectNextTransferJob (Either String (Maybe TransferJobDetails) -> a) + | SelectNextMatrixJob (Either String (Maybe MatrixJobDetails) -> a) + | SelectNextPackageSetJob (Either String (Maybe PackageSetJobDetails) -> a) + | SelectPublishJob PackageName Version (Either String (Maybe PublishJobDetails) -> a) + | SelectUnpublishJob PackageName Version (Either String (Maybe UnpublishJobDetails) -> a) + | SelectTransferJob PackageName (Either String (Maybe TransferJobDetails) -> a) + | SelectPackageSetJobByPayload PackageSetOperation (Either String (Maybe PackageSetJobDetails) -> a) + | InsertLogLine LogLine a + | SelectLogsByJob JobId LogLevel DateTime (Array LogLine -> a) + | ResetIncompleteJobs a derive instance Functor Db @@ -39,28 +58,87 @@ _db = Proxy -- | Insert a new log line into the database. insertLog :: forall r. LogLine -> Run (DB + r) Unit -insertLog log = Run.lift _db (InsertLog log unit) +insertLog log = Run.lift _db (InsertLogLine log unit) --- | Select all logs for a given job, filtered by loglevel and a time cutoff. -selectLogsByJob :: forall r. JobId -> LogLevel -> Maybe DateTime -> Run (DB + r) (Array LogLine) +-- | Select all logs for a given job, filtered by loglevel. +selectLogsByJob :: forall r. JobId -> LogLevel -> DateTime -> Run (DB + r) (Array LogLine) selectLogsByJob jobId logLevel since = Run.lift _db (SelectLogsByJob jobId logLevel since identity) --- | Create a new job in the database. -createJob :: forall r. NewJob -> Run (DB + r) Unit -createJob newJob = Run.lift _db (CreateJob newJob unit) - -- | Set a job in the database to the 'finished' state. -finishJob :: forall r. JobResult -> Run (DB + r) Unit -finishJob jobResult = Run.lift _db (FinishJob jobResult unit) +finishJob :: forall r. FinishJob -> Run (DB + r) Unit +finishJob job = Run.lift _db (FinishJob job unit) -- | Select a job by ID from the database. -selectJob :: forall r. JobId -> Run (DB + r) (Either String SQLite.Job) -selectJob jobId = Run.lift _db (SelectJob jobId identity) +selectJob :: forall r. SelectJobRequest -> Run (DB + EXCEPT String + r) (Maybe Job) +selectJob request = Run.lift _db (SelectJob request identity) >>= Except.rethrow + +-- | Select a list of the latest jobs from the database +selectJobs :: forall r. SelectJobsRequest -> Run (DB + EXCEPT String + r) (Array Job) +selectJobs request = Run.lift _db (SelectJobs request identity) + +-- | Insert a new publish job into the database. +insertPublishJob :: forall r. InsertPublishJob -> Run (DB + r) JobId +insertPublishJob job = Run.lift _db (InsertPublishJob job identity) + +-- | Insert a new unpublish job into the database. +insertUnpublishJob :: forall r. InsertUnpublishJob -> Run (DB + r) JobId +insertUnpublishJob job = Run.lift _db (InsertUnpublishJob job identity) + +-- | Insert a new transfer job into the database. +insertTransferJob :: forall r. InsertTransferJob -> Run (DB + r) JobId +insertTransferJob job = Run.lift _db (InsertTransferJob job identity) + +-- | Insert a new matrix job into the database. +insertMatrixJob :: forall r. InsertMatrixJob -> Run (DB + r) JobId +insertMatrixJob job = Run.lift _db (InsertMatrixJob job identity) + +-- | Insert a new package set job into the database. +insertPackageSetJob :: forall r. InsertPackageSetJob -> Run (DB + r) JobId +insertPackageSetJob job = Run.lift _db (InsertPackageSetJob job identity) + +-- | Start a job in the database. +startJob :: forall r. StartJob -> Run (DB + r) Unit +startJob job = Run.lift _db (StartJob job unit) + +-- | Select the next publish job from the database. +selectNextPublishJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PublishJobDetails) +selectNextPublishJob = Run.lift _db (SelectNextPublishJob identity) >>= Except.rethrow --- | Select a job by package name from the database, failing if there is no --- | current job available for that package name. -runningJobForPackage :: forall r. PackageName -> Run (DB + r) (Either String SQLite.Job) -runningJobForPackage name = Run.lift _db (RunningJobForPackage name identity) +-- | Select the next unpublish job from the database. +selectNextUnpublishJob :: forall r. Run (DB + EXCEPT String + r) (Maybe UnpublishJobDetails) +selectNextUnpublishJob = Run.lift _db (SelectNextUnpublishJob identity) >>= Except.rethrow + +-- | Select the next transfer job from the database. +selectNextTransferJob :: forall r. Run (DB + EXCEPT String + r) (Maybe TransferJobDetails) +selectNextTransferJob = Run.lift _db (SelectNextTransferJob identity) >>= Except.rethrow + +-- | Select the next matrix job from the database. +selectNextMatrixJob :: forall r. Run (DB + EXCEPT String + r) (Maybe MatrixJobDetails) +selectNextMatrixJob = Run.lift _db (SelectNextMatrixJob identity) >>= Except.rethrow + +-- | Select the next package set job from the database. +selectNextPackageSetJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PackageSetJobDetails) +selectNextPackageSetJob = Run.lift _db (SelectNextPackageSetJob identity) >>= Except.rethrow + +-- | Lookup a publish job from the database by name and version. +selectPublishJob :: forall r. PackageName -> Version -> Run (DB + EXCEPT String + r) (Maybe PublishJobDetails) +selectPublishJob packageName packageVersion = Run.lift _db (SelectPublishJob packageName packageVersion identity) >>= Except.rethrow + +-- | Lookup an unpublish job from the database by name and version. +selectUnpublishJob :: forall r. PackageName -> Version -> Run (DB + EXCEPT String + r) (Maybe UnpublishJobDetails) +selectUnpublishJob packageName packageVersion = Run.lift _db (SelectUnpublishJob packageName packageVersion identity) >>= Except.rethrow + +-- | Lookop a transfer job from the database by name. +selectTransferJob :: forall r. PackageName -> Run (DB + EXCEPT String + r) (Maybe TransferJobDetails) +selectTransferJob packageName = Run.lift _db (SelectTransferJob packageName identity) >>= Except.rethrow + +-- | Lookup a pending package set job from the database by payload (for duplicate detection). +selectPackageSetJobByPayload :: forall r. PackageSetOperation -> Run (DB + EXCEPT String + r) (Maybe PackageSetJobDetails) +selectPackageSetJobByPayload payload = Run.lift _db (SelectPackageSetJobByPayload payload identity) >>= Except.rethrow + +-- | Delete all incomplete jobs from the database. +resetIncompleteJobs :: forall r. Run (DB + r) Unit +resetIncompleteJobs = Run.lift _db (ResetIncompleteJobs unit) interpret :: forall r a. (Db ~> Run r) -> Run (DB + r) a -> Run r a interpret handler = Run.interpret (Run.on _db handler Run.send) @@ -70,28 +148,92 @@ type SQLiteEnv = { db :: SQLite } -- | Interpret DB by interacting with the SQLite database on disk. handleSQLite :: forall r a. SQLiteEnv -> Db a -> Run (LOG + EFFECT + r) a handleSQLite env = case _ of - InsertLog log next -> do - Run.liftEffect $ SQLite.insertLog env.db log - pure next + InsertPublishJob job reply -> do + result <- Run.liftEffect $ SQLite.insertPublishJob env.db job + pure $ reply result - SelectLogsByJob jobId logLevel since reply -> do - logs <- Run.liftEffect $ SQLite.selectLogsByJob env.db jobId logLevel since - unless (Array.null logs.fail) do - Log.warn $ "Some logs are not readable: " <> String.joinWith "\n" logs.fail - pure $ reply logs.success + InsertUnpublishJob job reply -> do + result <- Run.liftEffect $ SQLite.insertUnpublishJob env.db job + pure $ reply result - CreateJob newJob next -> do - Run.liftEffect $ SQLite.createJob env.db newJob + InsertTransferJob job reply -> do + result <- Run.liftEffect $ SQLite.insertTransferJob env.db job + pure $ reply result + + InsertMatrixJob job reply -> do + result <- Run.liftEffect $ SQLite.insertMatrixJob env.db job + pure $ reply result + + InsertPackageSetJob job reply -> do + result <- Run.liftEffect $ SQLite.insertPackageSetJob env.db job + pure $ reply result + + FinishJob job next -> do + Run.liftEffect $ SQLite.finishJob env.db job pure next - FinishJob jobResult next -> do - Run.liftEffect $ SQLite.finishJob env.db jobResult + StartJob job next -> do + Run.liftEffect $ SQLite.startJob env.db job pure next - SelectJob jobId reply -> do - job <- Run.liftEffect $ SQLite.selectJob env.db jobId + SelectJob request reply -> do + { unreadableLogs, job } <- Run.liftEffect $ SQLite.selectJob env.db request + unless (Array.null unreadableLogs) do + Log.warn $ "Some logs were not readable: " <> String.joinWith "\n" unreadableLogs pure $ reply job - RunningJobForPackage name reply -> do - job <- Run.liftEffect $ SQLite.runningJobForPackage env.db name - pure $ reply job + SelectJobs request reply -> do + { failed, jobs } <- Run.liftEffect $ SQLite.selectJobs env.db request + unless (Array.null failed) do + Log.warn $ "Some jobs were not readable: " <> String.joinWith "\n" failed + pure $ reply jobs + + SelectNextPublishJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextPublishJob env.db + pure $ reply result + + SelectNextUnpublishJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextUnpublishJob env.db + pure $ reply result + + SelectNextTransferJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextTransferJob env.db + pure $ reply result + + SelectNextMatrixJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextMatrixJob env.db + pure $ reply result + + SelectNextPackageSetJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextPackageSetJob env.db + pure $ reply result + + SelectPublishJob packageName packageVersion reply -> do + result <- Run.liftEffect $ SQLite.selectPublishJob env.db packageName packageVersion + pure $ reply result + + SelectUnpublishJob packageName packageVersion reply -> do + result <- Run.liftEffect $ SQLite.selectUnpublishJob env.db packageName packageVersion + pure $ reply result + + SelectTransferJob packageName reply -> do + result <- Run.liftEffect $ SQLite.selectTransferJob env.db packageName + pure $ reply result + + SelectPackageSetJobByPayload payload reply -> do + result <- Run.liftEffect $ SQLite.selectPackageSetJobByPayload env.db payload + pure $ reply result + + InsertLogLine log next -> do + Run.liftEffect $ SQLite.insertLogLine env.db log + pure next + + SelectLogsByJob jobId logLevel since reply -> do + { fail, success } <- Run.liftEffect $ SQLite.selectLogsByJob env.db jobId logLevel since + unless (Array.null fail) do + Log.warn $ "Some logs are not readable: " <> String.joinWith "\n" fail + pure $ reply success + + ResetIncompleteJobs next -> do + Run.liftEffect $ SQLite.resetIncompleteJobs env.db + pure next diff --git a/app/src/App/Effect/Env.purs b/app/src/App/Effect/Env.purs index e832d4b84..873162264 100644 --- a/app/src/App/Effect/Env.purs +++ b/app/src/App/Effect/Env.purs @@ -30,6 +30,7 @@ type ResourceEnv = , s3BucketUrl :: URL , githubApiUrl :: URL , pursuitApiUrl :: URL + , registryApiUrl :: URL , healthchecksUrl :: Maybe URL } @@ -55,6 +56,7 @@ lookupResourceEnv = do s3BucketUrlEnv <- lookupWithDefault s3BucketUrl productionS3BucketUrl githubApiUrlEnv <- lookupWithDefault githubApiUrl productionGitHubApiUrl pursuitApiUrlEnv <- lookupWithDefault pursuitApiUrl productionPursuitApiUrl + registryApiUrlEnv <- lookupWithDefault registryApiUrl productionRegistryApiUrl -- Optional - if not set, healthcheck pinging is disabled healthchecksUrlEnv <- lookupOptional healthchecksUrl @@ -65,6 +67,7 @@ lookupResourceEnv = do , s3BucketUrl: s3BucketUrlEnv , githubApiUrl: githubApiUrlEnv , pursuitApiUrl: pursuitApiUrlEnv + , registryApiUrl: registryApiUrlEnv , healthchecksUrl: healthchecksUrlEnv } @@ -209,6 +212,12 @@ githubApiUrl = EnvKey { key: "GITHUB_API_URL", decode: pure } pursuitApiUrl :: EnvKey URL pursuitApiUrl = EnvKey { key: "PURSUIT_API_URL", decode: pure } +-- | Override for the Registry API URL. +-- | If not set, uses productionRegistryApiUrl. +-- | Set this to point to the local server during testing. +registryApiUrl :: EnvKey URL +registryApiUrl = EnvKey { key: "REGISTRY_API_URL", decode: pure } + -- Production URL defaults (only used by the app, not exposed to library users) -- | The URL of the package storage backend (S3-compatible) @@ -227,6 +236,10 @@ productionGitHubApiUrl = "https://api.github.com" productionPursuitApiUrl :: URL productionPursuitApiUrl = "https://pursuit.purescript.org" +-- | The Registry API base URL +productionRegistryApiUrl :: URL +productionRegistryApiUrl = "https://registry.purescript.org/api" + -- | The URL of the health checks endpoint. -- | Optional - if not set, healthcheck pinging is disabled. healthchecksUrl :: EnvKey URL @@ -272,6 +285,16 @@ pacchettibottiED25519Pub = EnvKey githubEventPath :: EnvKey FilePath githubEventPath = EnvKey { key: "GITHUB_EVENT_PATH", decode: pure } +-- Test environment variables (used by E2E tests) + +-- | Root directory for test state (database, scratch repos, etc). +stateDir :: EnvKey FilePath +stateDir = EnvKey { key: "STATE_DIR", decode: pure } + +-- | Directory containing git repository fixtures for tests. +repoFixturesDir :: EnvKey FilePath +repoFixturesDir = EnvKey { key: "REPO_FIXTURES_DIR", decode: pure } + decodeDatabaseUrl :: String -> Either String DatabaseUrl decodeDatabaseUrl input = do let prefix = "sqlite:" diff --git a/app/src/App/Effect/Log.purs b/app/src/App/Effect/Log.purs index 6fc4b31b6..b99af947d 100644 --- a/app/src/App/Effect/Log.purs +++ b/app/src/App/Effect/Log.purs @@ -1,6 +1,6 @@ -- | A general logging effect suitable for recording events as they happen in --- | the application, including debugging logs. Should not be used to report --- | important events to registry users; for that, use the Comment effect. +-- | the application, including debugging logs. Use the `notice` level to report +-- | important events to registry users (these are posted as GitHub comments). module Registry.App.Effect.Log where import Registry.App.Prelude @@ -65,6 +65,9 @@ info = log Info <<< toLog warn :: forall a r. Loggable a => a -> Run (LOG + r) Unit warn = log Warn <<< toLog +notice :: forall a r. Loggable a => a -> Run (LOG + r) Unit +notice = log Notice <<< toLog + error :: forall a r. Loggable a => a -> Run (LOG + r) Unit error = log Error <<< toLog @@ -80,6 +83,7 @@ handleTerminal verbosity = case _ of Debug -> Ansi.foreground Ansi.Blue message Info -> message Warn -> Ansi.foreground Ansi.Yellow (Dodo.text "[WARNING] ") <> message + Notice -> Ansi.foreground Ansi.BrightBlue (Dodo.text "[NOTICE] ") <> message Error -> Ansi.foreground Ansi.Red (Dodo.text "[ERROR] ") <> message Run.liftEffect case verbosity of @@ -134,5 +138,5 @@ handleDb env = case _ of let msg = Dodo.print Dodo.plainText Dodo.twoSpaces (toLog message) row = { timestamp, level, jobId: env.job, message: msg } - Run.liftEffect $ SQLite.insertLog env.db row + Run.liftEffect $ SQLite.insertLogLine env.db row pure next diff --git a/app/src/App/Effect/Registry.purs b/app/src/App/Effect/Registry.purs index bd406ff25..48fbdf4a8 100644 --- a/app/src/App/Effect/Registry.purs +++ b/app/src/App/Effect/Registry.purs @@ -388,6 +388,7 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << Right Git.Changed -> do Log.info "Registry repo has changed, clearing metadata cache..." + Cache.delete _registryCache AllMetadata resetFromDisk WriteMetadata name metadata reply -> map (map reply) Except.runExcept do @@ -501,10 +502,9 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << Log.info $ "Mirroring legacy package set " <> name <> " to the legacy package sets repo" manifests <- Except.rethrow =<< handle env (ReadAllManifests identity) - metadata <- Except.rethrow =<< handle env (ReadAllMetadata identity) Log.debug $ "Converting package set..." - converted <- case Legacy.PackageSet.convertPackageSet manifests metadata set of + converted <- case Legacy.PackageSet.convertPackageSet manifests set of Left error -> Except.throw $ "Failed to convert package set " <> name <> " to a legacy package set: " <> error Right converted -> pure converted @@ -733,17 +733,30 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << result <- Git.gitPull { address, pullMode: env.pull } path pure result - now <- nowUTC - debouncers <- Run.liftEffect $ Ref.read env.debouncer - case Map.lookup path debouncers of - -- We will be behind the upstream by at most this amount of time. - Just prev | DateTime.diff now prev <= Duration.Minutes 1.0 -> - pure $ Right Git.NoChange - -- If we didn't debounce, then we should fetch the upstream. - _ -> do + -- Check if the repo directory exists before consulting the debouncer. + -- This ensures that if the scratch directory is deleted (e.g., for test + -- isolation), we always re-clone rather than returning a stale NoChange. + repoExists <- Run.liftAff $ Aff.attempt (FS.Aff.stat path) + case repoExists of + Left _ -> do + -- Repo doesn't exist, bypass debouncer entirely and clone fresh result <- fetchLatest + now <- nowUTC Run.liftEffect $ Ref.modify_ (Map.insert path now) env.debouncer pure result + Right _ -> do + -- Repo exists, check debouncer + now <- nowUTC + debouncers <- Run.liftEffect $ Ref.read env.debouncer + case Map.lookup path debouncers of + -- We will be behind the upstream by at most this amount of time. + Just prev | DateTime.diff now prev <= Duration.Minutes 1.0 -> + pure $ Right Git.NoChange + -- If we didn't debounce, then we should fetch the upstream. + _ -> do + result <- fetchLatest + Run.liftEffect $ Ref.modify_ (Map.insert path now) env.debouncer + pure result -- | Commit the file(s) indicated by the commit key with a commit message. commit :: CommitKey -> String -> Run _ (Either String GitResult) diff --git a/app/src/App/Effect/Storage.purs b/app/src/App/Effect/Storage.purs index c9a52a7bb..b6d6a0ad4 100644 --- a/app/src/App/Effect/Storage.purs +++ b/app/src/App/Effect/Storage.purs @@ -199,6 +199,7 @@ handleS3 env = Cache.interpret _storageCache (Cache.handleFs env.cache) <<< case Except.throw $ "Could not delete package " <> package <> " due to an error connecting to the storage backend." Succeeded _ -> do Log.debug $ "Deleted release of " <> package <> " from S3 at the path " <> packagePath + Cache.delete _storageCache (Package name version) pure unit else do Log.error $ packagePath <> " does not exist on S3 (available: " <> String.joinWith ", " published <> ")" diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index c0bb2750b..b0ab0f02c 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -1,3 +1,12 @@ +-- | A thin client that proxies GitHub issue operations to the registry API server. +-- | +-- | When a GitHub issue is created or commented on in the purescript/registry repo, +-- | this module: +-- | 1. Parses the issue body to determine the operation type +-- | 2. Re-signs authenticated operations with pacchettibotti keys if submitted by a trustee +-- | 3. POSTs the operation to the registry API server +-- | 4. Polls for job completion, posting logs as GitHub comments +-- | 5. Closes the issue on success module Registry.App.GitHubIssue where import Registry.App.Prelude @@ -5,123 +14,249 @@ import Registry.App.Prelude import Codec.JSON.DecodeError as CJ.DecodeError import Data.Array as Array import Data.Codec.JSON as CJ -import Data.Foldable (traverse_) +import Data.DateTime (DateTime) +import Data.Formatter.DateTime as DateTime import Data.String as String import Effect.Aff as Aff import Effect.Class.Console as Console -import Effect.Ref as Ref +import Fetch (Method(..)) +import Fetch as Fetch import JSON as JSON import JSON.Object as CJ.Object import Node.FS.Aff as FS.Aff import Node.Path as Path import Node.Process as Process +import Registry.API.V1 as V1 import Registry.App.API as API import Registry.App.Auth as Auth -import Registry.App.CLI.Git as Git -import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment -import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV) +import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log -import Registry.App.Effect.PackageSets as PackageSets -import Registry.App.Effect.Pursuit as Pursuit -import Registry.App.Effect.Registry as Registry -import Registry.App.Effect.Source as Source -import Registry.App.Effect.Storage as Storage -import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.Constants as Constants -import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.JsonRepair as JsonRepair import Registry.Foreign.Octokit (GitHubToken, IssueNumber(..), Octokit) import Registry.Foreign.Octokit as Octokit -import Registry.Foreign.S3 (SpaceKey) -import Registry.Operation (AuthenticatedData, PackageOperation(..), PackageSetOperation(..)) +import Registry.Internal.Format as Internal.Format +import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PackageOperation(..), PackageSetOperation(..)) import Registry.Operation as Operation -import Run (Run) +import Run (AFF, EFFECT, Run) import Run as Run import Run.Except (EXCEPT) import Run.Except as Except main :: Effect Unit main = launchAff_ $ do - -- For now we only support GitHub events, and no formal API, so we'll jump - -- straight into the GitHub event workflow. - initializeGitHub >>= traverse_ \env -> do - let - run = case env.operation of - Left packageSetOperation -> case packageSetOperation of - PackageSetUpdate payload -> - API.packageSetUpdate payload - - Right packageOperation -> case packageOperation of - Publish payload -> - API.publish Nothing payload - Authenticated payload -> do - -- If we receive an authenticated operation via GitHub, then we - -- re-sign it with pacchettibotti credentials if and only if the - -- operation was opened by a trustee. - signed <- signPacchettiBottiIfTrustee payload - API.authenticated signed - - -- Caching - let cache = Path.concat [ scratchDir, ".cache" ] - FS.Extra.ensureDirectory cache - githubCacheRef <- Cache.newCacheRef - legacyCacheRef <- Cache.newCacheRef - registryCacheRef <- Cache.newCacheRef - - -- Registry env - debouncer <- Registry.newDebouncer - let - registryEnv :: Registry.RegistryEnv - registryEnv = - { repos: Registry.defaultRepos - , pull: Git.ForceClean - , write: Registry.CommitAs (Git.pacchettibottiCommitter env.token) - , workdir: scratchDir - , debouncer - , cacheRef: registryCacheRef - } - - -- Package sets - let workdir = Path.concat [ scratchDir, "package-sets-work" ] - FS.Extra.ensureDirectory workdir + initializeGitHub >>= case _ of + Nothing -> pure unit + Just env -> do + result <- runGitHubIssue env + case result of + Left err -> do + -- Post error as comment and exit with failure + void $ Octokit.request env.octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue: env.issue + , body: "❌ " <> err + } + liftEffect $ Process.exit' 1 + Right _ -> + -- Issue closing is handled inside runGitHubIssue + pure unit - thrownRef <- liftEffect $ Ref.new false +runGitHubIssue :: GitHubEventEnv -> Aff (Either String Boolean) +runGitHubIssue env = do + let cache = Path.concat [ scratchDir, ".cache" ] + githubCacheRef <- Cache.newCacheRef - run - -- App effects - # PackageSets.interpret (PackageSets.handle { workdir }) - # Registry.interpret (Registry.handle registryEnv) - # Archive.interpret Archive.handle - # Storage.interpret (Storage.handleS3 { s3: env.spacesConfig, cache }) - # Pursuit.interpret (Pursuit.handleAff env.token) - # Source.interpret (Source.handle Source.Recent) + let + run :: forall a. Run (GITHUB + RESOURCE_ENV + PACCHETTIBOTTI_ENV + GITHUB_EVENT_ENV + LOG + EXCEPT String + AFF + EFFECT + ()) a -> Aff (Either String a) + run action = action # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache, ref: githubCacheRef }) - -- Caching & logging - # Cache.interpret Legacy.Manifest._legacyCache (Cache.handleMemoryFs { cache, ref: legacyCacheRef }) - # Cache.interpret API._compilerCache (Cache.handleFs cache) - # Except.catch (\msg -> Log.error msg *> Comment.comment msg *> Run.liftEffect (Ref.write true thrownRef)) - # Comment.interpret (Comment.handleGitHub { octokit: env.octokit, issue: env.issue, registry: Registry.defaultRepos.registry }) - # Log.interpret (Log.handleTerminal Verbose) - -- Environments + # Except.runExcept # Env.runResourceEnv env.resourceEnv # Env.runGitHubEventEnv { username: env.username, issue: env.issue } # Env.runPacchettiBottiEnv { publicKey: env.publicKey, privateKey: env.privateKey } - -- Base effects + # Log.interpret (Log.handleTerminal env.logVerbosity) # Run.runBaseAff' - liftEffect (Ref.read thrownRef) >>= case _ of - true -> - liftEffect $ Process.exit' 1 - _ -> do - -- After the run, close the issue. If an exception was thrown then the issue will remain open. - _ <- Octokit.request env.octokit (Octokit.closeIssueRequest { address: Constants.registry, issue: env.issue }) - pure unit + run do + -- Determine endpoint and prepare the JSON payload + { endpoint, jsonBody } <- case env.operation of + Left packageSetOp@(PackageSetUpdate payload) -> do + -- Sign with pacchettibotti if submitter is a trustee + request <- signPackageSetIfTrustee packageSetOp payload + pure + { endpoint: "/v1/package-sets" + , jsonBody: JSON.print $ CJ.encode Operation.packageSetUpdateRequestCodec request + } + + Right (Publish payload) -> pure + { endpoint: "/v1/publish" + , jsonBody: JSON.print $ CJ.encode Operation.publishCodec payload + } + + Right (Authenticated auth) -> do + -- Re-sign with pacchettibotti if submitter is a trustee + signed <- signPacchettiBottiIfTrustee auth + let + endpoint = case signed.payload of + Unpublish _ -> "/v1/unpublish" + Transfer _ -> "/v1/transfer" + pure { endpoint, jsonBody: JSON.print $ CJ.encode Operation.authenticatedCodec signed } + + -- Submit to the registry API + let registryApiUrl = env.resourceEnv.registryApiUrl + Log.debug $ "Submitting to " <> registryApiUrl <> endpoint + submitResult <- Run.liftAff $ submitJob (registryApiUrl <> endpoint) jsonBody + case submitResult of + Left err -> Except.throw $ "Failed to submit job: " <> err + Right { jobId } -> do + let jobIdStr = unwrap jobId + Log.debug $ "Job created: " <> jobIdStr + + -- Post initial comment with job ID + Run.liftAff $ void $ Octokit.request env.octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue: env.issue + , body: "Job started: `" <> jobIdStr <> "`\nLogs: " <> registryApiUrl <> "/v1/jobs/" <> jobIdStr + } + + -- Poll for completion, posting logs as comments + pollAndReport env.octokit env.issue env.pollConfig registryApiUrl jobId + +-- | Submit a job to the registry API +submitJob :: String -> String -> Aff (Either String V1.JobCreatedResponse) +submitJob url body = do + result <- Aff.attempt $ Fetch.fetch url + { method: POST + , headers: { "Content-Type": "application/json" } + , body + } + case result of + Left err -> pure $ Left $ "Network error: " <> Aff.message err + Right response -> do + responseBody <- response.text + if response.status >= 200 && response.status < 300 then + case JSON.parse responseBody >>= \json -> lmap CJ.DecodeError.print (CJ.decode V1.jobCreatedResponseCodec json) of + Left err -> pure $ Left $ "Failed to parse response: " <> err + Right r -> pure $ Right r + else + pure $ Left $ "HTTP " <> show response.status <> ": " <> responseBody + +-- | Poll a job until it completes, posting logs as GitHub comments. +-- | Returns true if the job succeeded, false otherwise. +pollAndReport + :: forall r + . Octokit + -> IssueNumber + -> PollConfig + -> URL + -> V1.JobId + -> Run (LOG + EXCEPT String + AFF + r) Boolean +pollAndReport octokit issue pollConfig registryApiUrl jobId = go Nothing 0 0 + where + maxConsecutiveErrors :: Int + maxConsecutiveErrors = 5 + + go :: Maybe DateTime -> Int -> Int -> Run (LOG + EXCEPT String + AFF + r) Boolean + go lastTimestamp attempt consecutiveErrors + | attempt >= pollConfig.maxAttempts = do + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: "⏱️ Job timed out" + } + pure false + | consecutiveErrors >= maxConsecutiveErrors = do + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: "❌ Failed to poll job status after " <> show maxConsecutiveErrors <> " consecutive errors" + } + pure false + | otherwise = do + Run.liftAff $ Aff.delay pollConfig.interval + result <- Run.liftAff $ fetchJob registryApiUrl jobId lastTimestamp + case result of + Left err -> do + Log.error $ "Error polling job: " <> err + go lastTimestamp (attempt + 1) (consecutiveErrors + 1) + Right job -> do + let info = V1.jobInfo job + + -- Post any new logs (filtered to Notice level and above, and after lastTimestamp) + let + newLogs = Array.filter isNewLog info.logs + isNewLog l = l.level >= V1.Notice && case lastTimestamp of + Nothing -> true + Just ts -> l.timestamp > ts + unless (Array.null newLogs) do + let + formatLog l = "[" <> V1.printLogLevel l.level <> "] " <> l.message + logText = String.joinWith "\n" $ map formatLog newLogs + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: "```\n" <> logText <> "\n```" + } + + -- Check if job is done + case info.finishedAt of + Just _ -> do + let statusMsg = if info.success then "✅ Job completed successfully" else "❌ Job failed" + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: statusMsg + } + -- Close the issue on success, leave open on failure + when info.success do + Run.liftAff $ void $ Octokit.request octokit $ Octokit.closeIssueRequest + { address: Constants.registry + , issue + } + pure info.success + Nothing -> do + -- Continue polling with updated timestamp, reset consecutive errors on success + let newTimestamp = Array.last newLogs <#> _.timestamp + go (newTimestamp <|> lastTimestamp) (attempt + 1) 0 + +-- | Fetch job status from the API +fetchJob :: String -> V1.JobId -> Maybe DateTime -> Aff (Either String V1.Job) +fetchJob registryApiUrl (V1.JobId jobId) since = do + let + baseUrl = registryApiUrl <> "/v1/jobs/" <> jobId + url = case since of + Nothing -> baseUrl <> "?level=NOTICE" + Just ts -> baseUrl <> "?level=NOTICE&since=" <> DateTime.format Internal.Format.iso8601DateTime ts + result <- Aff.attempt $ Fetch.fetch url { method: GET } + case result of + Left err -> pure $ Left $ "Network error: " <> Aff.message err + Right response -> do + responseBody <- response.text + if response.status == 200 then + case JSON.parse responseBody >>= \json -> lmap CJ.DecodeError.print (CJ.decode V1.jobCodec json) of + Left err -> pure $ Left $ "Failed to parse job: " <> err + Right job -> pure $ Right job + else + pure $ Left $ "HTTP " <> show response.status <> ": " <> responseBody + +-- | Configuration for polling job status +type PollConfig = + { maxAttempts :: Int + , interval :: Aff.Milliseconds + } + +-- | Default poll config: 30 minutes at 5 second intervals +defaultPollConfig :: PollConfig +defaultPollConfig = + { maxAttempts: 360 + , interval: Aff.Milliseconds 5000.0 + } type GitHubEventEnv = { octokit :: Octokit @@ -129,10 +264,11 @@ type GitHubEventEnv = , issue :: IssueNumber , username :: String , operation :: Either PackageSetOperation PackageOperation - , spacesConfig :: SpaceKey , publicKey :: String , privateKey :: String , resourceEnv :: Env.ResourceEnv + , pollConfig :: PollConfig + , logVerbosity :: LogVerbosity } initializeGitHub :: Aff (Maybe GitHubEventEnv) @@ -140,17 +276,12 @@ initializeGitHub = do token <- Env.lookupRequired Env.pacchettibottiToken publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub privateKey <- Env.lookupRequired Env.pacchettibottiED25519 - spacesKey <- Env.lookupRequired Env.spacesKey - spacesSecret <- Env.lookupRequired Env.spacesSecret resourceEnv <- Env.lookupResourceEnv eventPath <- Env.lookupRequired Env.githubEventPath octokit <- Octokit.newOctokit token resourceEnv.githubApiUrl readOperation eventPath >>= case _ of - -- If the issue body is not just a JSON string, then we don't consider it - -- to be an attempted operation and it is presumably just an issue on the - -- registry repository. NotJson -> pure Nothing @@ -175,10 +306,11 @@ initializeGitHub = do , issue , username , operation - , spacesConfig: { key: spacesKey, secret: spacesSecret } , publicKey , privateKey , resourceEnv + , pollConfig: defaultPollConfig + , logVerbosity: Verbose } data OperationDecoding @@ -201,9 +333,6 @@ readOperation eventPath = do pure event let - -- TODO: Right now we parse all operations from GitHub issues, but we should - -- in the future only parse out package set operations. The others should be - -- handled via a HTTP API. decodeOperation :: JSON -> Either CJ.DecodeError (Either PackageSetOperation PackageOperation) decodeOperation json = do object <- CJ.decode CJ.jobject json @@ -243,7 +372,7 @@ firstObject input = fromMaybe input do after <- String.lastIndexOf (String.Pattern "}") start pure (String.take (after + 1) start) --- | An event triggered by a GitHub workflow, specifically via an issue comment +-- | An event triggered by a GitHub workflow, specifically via an issue commentAdd a comment on line L244Add diff commentMarkdown input: edit mode selected.WritePreviewHeadingBoldItalicQuoteCodeLinkUnordered listNumbered listTask listMentionReferenceSaved repliesAdd FilesPaste, drop, or click to add filesCancelCommentStart a reviewReturn to code -- | or issue creation. -- | https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#issue_comment newtype IssueEvent = IssueEvent @@ -302,3 +431,32 @@ signPacchettiBottiIfTrustee auth = do else do Log.info "Authenticated payload not submitted by a registry trustee, continuing with original signature." pure auth + +-- | Sign a package set update with pacchettibotti's key if the submitter is a trustee. +-- | Non-trustees get an unsigned request (signature = Nothing). +signPackageSetIfTrustee + :: forall r + . PackageSetOperation + -> Operation.PackageSetUpdateData + -> Run (GITHUB + PACCHETTIBOTTI_ENV + GITHUB_EVENT_ENV + LOG + EXCEPT String + r) Operation.PackageSetUpdateRequest +signPackageSetIfTrustee packageSetOp payload = do + let rawPayload = JSON.print $ CJ.encode Operation.packageSetUpdateCodec payload + GitHub.listTeamMembers API.packagingTeam >>= case _ of + Left githubError -> do + Log.warn $ Array.fold + [ "Unable to fetch members of packaging team, not signing package set request: " + , Octokit.printGitHubError githubError + ] + pure { payload: packageSetOp, rawPayload, signature: Nothing } + Right members -> do + { username } <- Env.askGitHubEvent + if Array.elem username members then do + Log.info "Package set update submitted by a registry trustee, signing with pacchettibotti keys." + { privateKey } <- Env.askPacchettiBotti + signature <- case Auth.signPayload { privateKey, rawPayload } of + Left _ -> Except.throw "Error signing package set update. cc: @purescript/packaging" + Right sig -> pure sig + pure { payload: packageSetOp, rawPayload, signature: Just signature } + else do + Log.info "Package set update not submitted by a registry trustee, sending unsigned request." + pure { payload: packageSetOp, rawPayload, signature: Nothing } diff --git a/app/src/App/Legacy/Manifest.purs b/app/src/App/Legacy/Manifest.purs index 65aad78ec..8d997342f 100644 --- a/app/src/App/Legacy/Manifest.purs +++ b/app/src/App/Legacy/Manifest.purs @@ -59,13 +59,13 @@ type LegacyManifest = , dependencies :: Map PackageName Range } -toManifest :: PackageName -> Version -> Location -> LegacyManifest -> Manifest -toManifest name version location legacy = do +toManifest :: PackageName -> Version -> Location -> String -> LegacyManifest -> Manifest +toManifest name version location ref legacy = do let { license, description, dependencies } = patchLegacyManifest name version legacy let includeFiles = Nothing let excludeFiles = Nothing let owners = Nothing - Manifest { name, version, location, license, description, dependencies, includeFiles, excludeFiles, owners } + Manifest { name, version, location, ref, license, description, dependencies, includeFiles, excludeFiles, owners } -- | Attempt to retrieve a license, description, and set of dependencies from a -- | PureScript repo that does not have a Registry-supported manifest, but does diff --git a/app/src/App/Legacy/PackageSet.purs b/app/src/App/Legacy/PackageSet.purs index eb1ce8021..62b718d7c 100644 --- a/app/src/App/Legacy/PackageSet.purs +++ b/app/src/App/Legacy/PackageSet.purs @@ -102,8 +102,8 @@ printPscTag (PscTag { compiler, date }) = , Format.DateTime.format pscDateFormat (DateTime date bottom) ] -convertPackageSet :: ManifestIndex -> Map PackageName Metadata -> PackageSet -> Either String ConvertedLegacyPackageSet -convertPackageSet index metadataMap (PackageSet { compiler, packages, published, version }) = do +convertPackageSet :: ManifestIndex -> PackageSet -> Either String ConvertedLegacyPackageSet +convertPackageSet index (PackageSet { compiler, packages, published, version }) = do converted <- case separate $ mapWithIndex convertPackage packages of { left, right } | Map.isEmpty left -> Right right { left } -> do @@ -130,17 +130,14 @@ convertPackageSet index metadataMap (PackageSet { compiler, packages, published, versions <- note noIndexPackageError $ Map.lookup packageName $ ManifestIndex.toMap index Manifest manifest <- note noIndexVersionError $ Map.lookup packageVersion versions - Metadata metadata <- note noMetadataPackageError $ Map.lookup packageName metadataMap - { ref } <- note noMetadataVersionError $ Map.lookup packageVersion metadata.published - - repo <- case metadata.location of + repo <- case manifest.location of GitHub { owner, repo, subdir: Nothing } -> Right $ "https://github.com/" <> owner <> "/" <> repo <> ".git" Git { url, subdir: Nothing } -> Right url GitHub _ -> Left usesSubdirError Git _ -> Left usesSubdirError pure - { version: RawVersion ref + { version: RawVersion manifest.ref , dependencies: Array.fromFoldable $ Map.keys $ manifest.dependencies , repo } @@ -149,8 +146,6 @@ convertPackageSet index metadataMap (PackageSet { compiler, packages, published, versionStr = Version.print packageVersion noIndexPackageError = "No registry index entry found for " <> nameStr noIndexVersionError = "Found registry index entry for " <> nameStr <> " but none for version " <> versionStr - noMetadataPackageError = "No metadata entry found for " <> nameStr - noMetadataVersionError = "Found metadata entry for " <> nameStr <> " but no published version for " <> versionStr usesSubdirError = "Package " <> nameStr <> " uses the 'subdir' key, which is not supported for legacy package sets." printDhall :: LegacyPackageSet -> String diff --git a/app/src/App/Main.purs b/app/src/App/Main.purs new file mode 100644 index 000000000..e638cc684 --- /dev/null +++ b/app/src/App/Main.purs @@ -0,0 +1,90 @@ +module Registry.App.Main where + +import Registry.App.Prelude hiding ((/)) + +import Data.DateTime (diff) +import Data.Time.Duration (Milliseconds(..), Seconds(..)) +import Effect.Aff as Aff +import Effect.Class.Console as Console +import Fetch.Retry as Fetch.Retry +import Node.Process as Process +import Registry.App.Server.Env (ServerEnv, createServerEnv) +import Registry.App.Server.JobExecutor as JobExecutor +import Registry.App.Server.Router as Router + +main :: Effect Unit +main = do + createServerEnv # Aff.runAff_ case _ of + Left error -> do + Console.log $ "Failed to start server: " <> Aff.message error + Process.exit' 1 + Right env -> do + case env.vars.resourceEnv.healthchecksUrl of + Nothing -> Console.log "HEALTHCHECKS_URL not set, healthcheck pinging disabled" + Just healthchecksUrl -> Aff.launchAff_ $ healthcheck healthchecksUrl + Aff.launchAff_ $ jobExecutor env + Router.runRouter env + where + healthcheck :: String -> Aff Unit + healthcheck healthchecksUrl = loop limit + where + limit = 10 + oneMinute = Aff.Milliseconds (1000.0 * 60.0) + fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) + + loop n = do + Fetch.Retry.withRetryRequest healthchecksUrl {} >>= case _ of + Succeeded { status } | status == 200 -> do + Aff.delay fiveMinutes + loop n + + Cancelled | n >= 0 -> do + Console.warn $ "Healthchecks cancelled, will retry..." + Aff.delay oneMinute + loop (n - 1) + + Failed error | n >= 0 -> do + Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error + Aff.delay oneMinute + loop (n - 1) + + Succeeded { status } | status /= 200, n >= 0 -> do + Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status + Aff.delay oneMinute + loop (n - 1) + + Cancelled -> do + Console.error + "Healthchecks cancelled and failure limit reached, will not retry." + + Failed error -> do + Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error + + Succeeded _ -> do + Console.error "Healthchecks returned non-200 status and failure limit reached, will not retry." + + jobExecutor :: ServerEnv -> Aff Unit + jobExecutor env = do + loop initialRestartDelay + where + initialRestartDelay = Milliseconds 100.0 + + loop restartDelay = do + start <- nowUTC + result <- JobExecutor.runJobExecutor env + end <- nowUTC + + Console.error case result of + Left error -> "Job executor failed: " <> Aff.message error + Right _ -> "Job executor exited for no reason." + + -- This is a heuristic: if the executor keeps crashing immediately, we + -- restart with an exponentially increasing delay, but once the executor + -- had a run longer than a minute, we start over with a small delay. + let + nextRestartDelay + | end `diff` start > Seconds 60.0 = initialRestartDelay + | otherwise = restartDelay <> restartDelay + + Aff.delay nextRestartDelay + loop nextRestartDelay diff --git a/app/src/App/Manifest/SpagoYaml.purs b/app/src/App/Manifest/SpagoYaml.purs index 1d701e57c..66ffa1c48 100644 --- a/app/src/App/Manifest/SpagoYaml.purs +++ b/app/src/App/Manifest/SpagoYaml.purs @@ -27,9 +27,10 @@ import Registry.Range (Range) import Registry.Range as Range import Registry.Version as Version --- | Attempt to convert a spago.yaml file to a Manifest -spagoYamlToManifest :: SpagoYaml -> Either String Manifest -spagoYamlToManifest config = do +-- | Attempt to convert a spago.yaml file to a Manifest. The ref parameter is +-- | the Git reference (tag or commit) used to fetch this version's source. +spagoYamlToManifest :: String -> SpagoYaml -> Either String Manifest +spagoYamlToManifest ref config = do package@{ name, description, dependencies: spagoDependencies } <- note "No 'package' key found in config." config.package publish@{ version, license, owners } <- note "No 'publish' key found under the 'package' key in config." package.publish location <- note "No 'location' key found under the 'publish' key in config." publish.location @@ -43,6 +44,7 @@ spagoYamlToManifest config = do , description , license , location + , ref , owners , includeFiles , excludeFiles diff --git a/app/src/App/Prelude.purs b/app/src/App/Prelude.purs index 7a046414d..5e586ebae 100644 --- a/app/src/App/Prelude.purs +++ b/app/src/App/Prelude.purs @@ -60,7 +60,7 @@ import Data.List (List) as Extra import Data.Map (Map) as Extra import Data.Map as Map import Data.Maybe (Maybe(..), fromJust, fromMaybe, isJust, isNothing, maybe) as Maybe -import Data.Newtype (class Newtype, un) as Extra +import Data.Newtype (class Newtype, un, unwrap, wrap) as Extra import Data.Newtype as Newtype import Data.Nullable (Nullable, toMaybe, toNullable) as Extra import Data.Set (Set) as Extra diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index 8158695fc..0ff5bd696 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -1,5 +1,13 @@ import Database from "better-sqlite3"; +const JOB_INFO_TABLE = 'job_info' +const LOGS_TABLE = 'logs' +const PUBLISH_JOBS_TABLE = 'publish_jobs'; +const UNPUBLISH_JOBS_TABLE = 'unpublish_jobs'; +const TRANSFER_JOBS_TABLE = 'transfer_jobs'; +const MATRIX_JOBS_TABLE = 'matrix_jobs'; +const PACKAGE_SET_JOBS_TABLE = 'package_set_jobs'; + export const connectImpl = (path, logger) => { logger("Connecting to database at " + path); let db = new Database(path, { @@ -11,49 +19,224 @@ export const connectImpl = (path, logger) => { return db; }; -export const insertLogImpl = (db, logLine) => { - db.prepare( - "INSERT INTO logs (jobId, level, message, timestamp) VALUES (@jobId, @level, @message, @timestamp)" - ).run(logLine); +export const selectJobInfoImpl = (db, jobId) => { + const stmt = db.prepare(` + SELECT * FROM ${JOB_INFO_TABLE} + WHERE jobId = ? LIMIT 1 + `); + return stmt.get(jobId); +} + +// A generic helper function for inserting a new package, matrix, or package set +// job Not exported because this should always be done as part of a more general +// job insertion. A job is expected to always include a 'jobId' and 'createdAt' +// field, though other fields will be required depending on the job. +const _insertJob = (db, table, columns, job) => { + const requiredFields = Array.from(new Set(['jobId', 'createdAt', ...columns])); + const missingFields = requiredFields.filter(field => !(field in job)); + const extraFields = Object.keys(job).filter(field => !requiredFields.includes(field)); + + if (missingFields.length > 0) { + throw new Error(`Missing required fields for insertion: ${missingFields.join(', ')}`); + } + + if (extraFields.length > 0) { + throw new Error(`Unexpected extra fields for insertion: ${extraFields.join(', ')}`); + } + + const insertInfo = db.prepare(` + INSERT INTO ${JOB_INFO_TABLE} (jobId, createdAt, startedAt, finishedAt, success) + VALUES (@jobId, @createdAt, @startedAt, @finishedAt, @success) + `); + + const insertJob = db.prepare(` + INSERT INTO ${table} (${columns.join(', ')}) + VALUES (${columns.map(col => `@${col}`).join(', ')}) + `); + + const insert = db.transaction((job) => { + insertInfo.run({ + jobId: job.jobId, + createdAt: job.createdAt, + startedAt: null, + finishedAt: null, + success: 0 + }); + insertJob.run(job); + }); + + return insert(job); +}; + +export const insertPublishJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'packageVersion', 'payload'] + return _insertJob(db, PUBLISH_JOBS_TABLE, columns, job); +}; + +export const insertUnpublishJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'packageVersion', 'payload'] + return _insertJob(db, UNPUBLISH_JOBS_TABLE, columns, job); +}; + +export const insertTransferJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'payload'] + return _insertJob(db, TRANSFER_JOBS_TABLE, columns, job); +}; + +export const insertMatrixJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'packageVersion', 'compilerVersion', 'payload'] + return _insertJob(db, MATRIX_JOBS_TABLE, columns, job); +}; + +export const insertPackageSetJobImpl = (db, job) => { + const columns = ['jobId', 'payload', 'rawPayload', 'signature'] + return _insertJob(db, PACKAGE_SET_JOBS_TABLE, columns, job); +}; + +const _selectJob = (db, { table, jobId, packageName, packageVersion }) => { + const params = []; + let query = ` + SELECT job.*, info.* + FROM ${table} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + `; + + if (jobId != null) { + query += ` WHERE info.jobId = ?`; + params.push(jobId); + } else if (packageName != null) { + query += ` WHERE job.packageName = ?`; + params.push(packageName); + if (packageVersion != null) { + query += ` AND job.packageVersion = ?`; + params.push(packageVersion); + } + } else { + query += ` WHERE info.finishedAt IS NULL AND info.startedAt IS NULL`; + } + + query += ` ORDER BY info.createdAt ASC LIMIT 1`; + const stmt = db.prepare(query); + + return stmt.get(...params); +} + +export const selectPublishJobImpl = (db, { jobId, packageName, packageVersion }) => { + return _selectJob(db, { table: PUBLISH_JOBS_TABLE, jobId, packageName, packageVersion }); +}; + +export const selectUnpublishJobImpl = (db, { jobId, packageName, packageVersion }) => { + return _selectJob(db, { table: UNPUBLISH_JOBS_TABLE, jobId, packageName, packageVersion }); +}; + +export const selectTransferJobImpl = (db, { jobId, packageName }) => { + return _selectJob(db, { table: TRANSFER_JOBS_TABLE, jobId, packageName }); }; -export const selectLogsByJobImpl = (db, jobId, logLevel) => { - const row = db - .prepare( - "SELECT * FROM logs WHERE jobId = ? AND level >= ? ORDER BY timestamp ASC" - ) - .all(jobId, logLevel); - return row; +export const selectMatrixJobImpl = (db, jobId) => { + return _selectJob(db, { table: MATRIX_JOBS_TABLE, jobId }); }; -export const createJobImpl = (db, job) => { - db.prepare( - "INSERT INTO jobs (jobId, jobType, createdAt, packageName, ref) VALUES (@jobId, @jobType, @createdAt, @packageName, @ref)" - ).run(job); +export const selectPackageSetJobImpl = (db, jobId) => { + return _selectJob(db, { table: PACKAGE_SET_JOBS_TABLE, jobId }); }; -export const finishJobImpl = (db, result) => { - db.prepare( - "UPDATE jobs SET success = @success, finishedAt = @finishedAt WHERE jobId = @jobId" - ).run(result); +// Find a pending package set job by payload (for duplicate detection) +export const selectPackageSetJobByPayloadImpl = (db, payload) => { + const stmt = db.prepare(` + SELECT job.*, info.* + FROM ${PACKAGE_SET_JOBS_TABLE} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE job.payload = ? AND info.finishedAt IS NULL + ORDER BY info.createdAt ASC LIMIT 1 + `); + return stmt.get(payload); +}; + +const _selectJobs = (db, { table, since, includeCompleted }) => { + let query = ` + SELECT job.*, info.* + FROM ${table} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE info.createdAt >= ? + `; + let params = [since]; + + if (includeCompleted === false) { + query += ` AND info.finishedAt IS NULL`; + } + + query += ` ORDER BY info.createdAt ASC LIMIT 100`; + const stmt = db.prepare(query); + + return stmt.all(...params); +} + +export const selectPublishJobsImpl = (db, since, includeCompleted) => { + return _selectJobs(db, { table: PUBLISH_JOBS_TABLE, since, includeCompleted }); }; -export const selectJobImpl = (db, jobId) => { - const row = db - .prepare("SELECT * FROM jobs WHERE jobId = ? LIMIT 1") - .get(jobId); - return row; +export const selectUnpublishJobsImpl = (db, since, includeCompleted) => { + return _selectJobs(db, { table: UNPUBLISH_JOBS_TABLE, since, includeCompleted }); }; -export const runningJobForPackageImpl = (db, packageName) => { - const row = db - .prepare( - "SELECT * FROM jobs WHERE finishedAt IS NULL AND packageName = ? ORDER BY createdAt ASC LIMIT 1" - ) - .get(packageName); - return row; +export const selectTransferJobsImpl = (db, since, includeCompleted) => { + return _selectJobs(db, { table: TRANSFER_JOBS_TABLE, since, includeCompleted }); }; -export const deleteIncompleteJobsImpl = (db) => { - db.prepare("DELETE FROM jobs WHERE finishedAt IS NULL").run(); +export const selectMatrixJobsImpl = (db, since, includeCompleted) => { + return _selectJobs(db, { table: MATRIX_JOBS_TABLE, since, includeCompleted }); +}; + +export const selectPackageSetJobsImpl = (db, since, includeCompleted) => { + return _selectJobs(db, { table: PACKAGE_SET_JOBS_TABLE, since, includeCompleted }); +}; + +export const startJobImpl = (db, args) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET startedAt = @startedAt + WHERE jobId = @jobId + `); + return stmt.run(args); +} + +export const finishJobImpl = (db, args) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET success = @success, finishedAt = @finishedAt + WHERE jobId = @jobId + `); + return stmt.run(args); +} + +// TODO I think we should keep track of this somehow. So either we save +// how many times this is being retried and give up at some point, notifying +// the trustees, or we notify right away for any retry so we can look at them +export const resetIncompleteJobsImpl = (db) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET startedAt = NULL + WHERE finishedAt IS NULL + AND startedAt IS NOT NULL`); + return stmt.run(); +}; + +export const insertLogLineImpl = (db, logLine) => { + const stmt = db.prepare(` + INSERT INTO ${LOGS_TABLE} (jobId, level, message, timestamp) + VALUES (@jobId, @level, @message, @timestamp) + `); + return stmt.run(logLine); +}; + +export const selectLogsByJobImpl = (db, jobId, logLevel, since) => { + let query = ` + SELECT * FROM ${LOGS_TABLE} + WHERE jobId = ? AND level >= ? AND timestamp >= ? + ORDER BY timestamp ASC LIMIT 100 + `; + + const stmt = db.prepare(query); + return stmt.all(jobId, logLevel, since); }; diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index b3683e84e..e51196d47 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -1,184 +1,828 @@ +-- | Bindings for the specific SQL queries we emit to the SQLite database. Use the +-- | Registry.App.Effect.Db module in production code instead of this module; +-- | the bindings here are still quite low-level and simply exist to provide a +-- | nicer interface with PureScript types for higher-level modules to use. + module Registry.App.SQLite - ( Job - , JobLogs - , JobResult - , NewJob + ( ConnectOptions + , FinishJob + , InsertMatrixJob + , InsertPackageSetJob + , InsertPublishJob + , InsertTransferJob + , InsertUnpublishJob + , JobInfo + , MatrixJobDetails + , PackageSetJobDetails + , PublishJobDetails , SQLite + , SelectJobRequest + , SelectJobsRequest + , StartJob + , TransferJobDetails + , UnpublishJobDetails , connect - , createJob - , deleteIncompleteJobs , finishJob - , insertLog - , runningJobForPackage + , insertLogLine + , insertMatrixJob + , insertPackageSetJob + , insertPublishJob + , insertTransferJob + , insertUnpublishJob + , resetIncompleteJobs , selectJob + , selectJobs , selectLogsByJob + , selectNextMatrixJob + , selectNextPackageSetJob + , selectNextPublishJob + , selectNextTransferJob + , selectNextUnpublishJob + , selectPackageSetJobByPayload + , selectPublishJob + , selectTransferJob + , selectUnpublishJob + , startJob ) where import Registry.App.Prelude +import Codec.JSON.DecodeError as JSON.DecodeError +import Control.Monad.Except (runExceptT) +import Data.Array (sortBy, take) import Data.Array as Array import Data.DateTime (DateTime) import Data.Formatter.DateTime as DateTime -import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn3) +import Data.Function (on) +import Data.Nullable (notNull, null) +import Data.Nullable as Nullable +import Data.UUID.Random as UUID +import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn3, EffectFn4) import Effect.Uncurried as Uncurried -import Registry.API.V1 (JobId(..), JobType, LogLevel, LogLine) +import Record as Record +import Registry.API.V1 (Job(..), JobId(..), LogLevel(..), LogLine) import Registry.API.V1 as API.V1 +import Registry.API.V1 as V1 +import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format +import Registry.Operation (AuthenticatedData, PackageSetOperation, PublishData, TransferData, UnpublishData) +import Registry.Operation as Operation import Registry.PackageName as PackageName +import Registry.SSH (Signature(..)) +import Registry.Version as Version +-- | An active database connection acquired with `connect` data SQLite foreign import connectImpl :: EffectFn2 FilePath (EffectFn1 String Unit) SQLite -foreign import insertLogImpl :: EffectFn2 SQLite JSLogLine Unit +type ConnectOptions = + { database :: FilePath + , logger :: String -> Effect Unit + } -foreign import selectLogsByJobImpl :: EffectFn3 SQLite String Int (Array JSLogLine) +-- Connect to the indicated SQLite database +connect :: ConnectOptions -> Effect SQLite +connect { database, logger } = Uncurried.runEffectFn2 connectImpl database (Uncurried.mkEffectFn1 logger) -foreign import createJobImpl :: EffectFn2 SQLite JSNewJob Unit +-------------------------------------------------------------------------------- +-- job_info table -foreign import finishJobImpl :: EffectFn2 SQLite JSJobResult Unit +-- | Metadata about a particular package, package set, or matrix job. +type JobInfo = + { jobId :: JobId + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean + } -foreign import selectJobImpl :: EffectFn2 SQLite String (Nullable JSJob) +type JSJobInfo = + { jobId :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int + } -foreign import runningJobForPackageImpl :: EffectFn2 SQLite String (Nullable JSJob) +-- jobInfoFromJSRep :: JSJobInfo -> Either String JobInfo +-- jobInfoFromJSRep { jobId, createdAt, startedAt, finishedAt, success } = do +-- created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt +-- started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) +-- finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) +-- isSuccess <- toSuccess success +-- pure +-- { jobId: JobId jobId +-- , createdAt: created +-- , startedAt: started +-- , finishedAt: finished +-- , success: isSuccess +-- } + +foreign import selectJobInfoImpl :: EffectFn2 SQLite String (Nullable JSJobInfo) + +-- selectJobInfo :: SQLite -> JobId -> Effect (Either String (Maybe JobInfo)) +-- selectJobInfo db (JobId jobId) = do +-- maybeJobInfo <- map toMaybe $ Uncurried.runEffectFn2 selectJobInfoImpl db jobId +-- pure $ traverse jobInfoFromJSRep maybeJobInfo + +finishJob :: SQLite -> FinishJob -> Effect Unit +finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< finishJobToJSRep + +type StartJob = + { jobId :: JobId + , startedAt :: DateTime + } -foreign import deleteIncompleteJobsImpl :: EffectFn1 SQLite Unit +type JSStartJob = + { jobId :: String + , startedAt :: String + } -type ConnectOptions = - { database :: FilePath - , logger :: String -> Effect Unit +startJobToJSRep :: StartJob -> JSStartJob +startJobToJSRep { jobId, startedAt } = + { jobId: un JobId jobId + , startedAt: DateTime.format Internal.Format.iso8601DateTime startedAt } -connect :: ConnectOptions -> Effect SQLite -connect { database, logger } = Uncurried.runEffectFn2 connectImpl database (Uncurried.mkEffectFn1 logger) +foreign import startJobImpl :: EffectFn2 SQLite JSStartJob Unit -type JSLogLine = - { level :: Int - , message :: String - , timestamp :: String - , jobId :: String +startJob :: SQLite -> StartJob -> Effect Unit +startJob db = Uncurried.runEffectFn2 startJobImpl db <<< startJobToJSRep + +type FinishJob = + { jobId :: JobId + , success :: Boolean + , finishedAt :: DateTime } -jsLogLineToLogLine :: JSLogLine -> Either String LogLine -jsLogLineToLogLine { level: rawLevel, message, timestamp: rawTimestamp, jobId } = case API.V1.logLevelFromPriority rawLevel, DateTime.unformat Internal.Format.iso8601DateTime rawTimestamp of - Left err, _ -> Left err - _, Left err -> Left $ "Invalid timestamp " <> show rawTimestamp <> ": " <> err - Right level, Right timestamp -> Right { level, message, jobId: JobId jobId, timestamp } +type JSFinishJob = + { jobId :: String + , success :: Int + , finishedAt :: String + } -logLineToJSLogLine :: LogLine -> JSLogLine -logLineToJSLogLine { level, message, timestamp, jobId: JobId jobId } = - { level: API.V1.logLevelToPriority level - , message - , timestamp: DateTime.format Internal.Format.iso8601DateTime timestamp - , jobId +finishJobToJSRep :: FinishJob -> JSFinishJob +finishJobToJSRep { jobId, success, finishedAt } = + { jobId: un JobId jobId + , success: fromSuccess success + , finishedAt: DateTime.format Internal.Format.iso8601DateTime finishedAt } -insertLog :: SQLite -> LogLine -> Effect Unit -insertLog db = Uncurried.runEffectFn2 insertLogImpl db <<< logLineToJSLogLine +foreign import finishJobImpl :: EffectFn2 SQLite JSFinishJob Unit -type JobLogs = { fail :: Array String, success :: Array LogLine } +foreign import resetIncompleteJobsImpl :: EffectFn1 SQLite Unit -selectLogsByJob :: SQLite -> JobId -> LogLevel -> Maybe DateTime -> Effect JobLogs -selectLogsByJob db (JobId jobId) level maybeDatetime = do - logs <- Uncurried.runEffectFn3 selectLogsByJobImpl db jobId (API.V1.logLevelToPriority level) - let { success, fail } = partitionEithers $ map jsLogLineToLogLine logs - pure { fail, success: Array.filter (\{ timestamp } -> timestamp > (fromMaybe bottom maybeDatetime)) success } +resetIncompleteJobs :: SQLite -> Effect Unit +resetIncompleteJobs = Uncurried.runEffectFn1 resetIncompleteJobsImpl -type NewJob = +newJobId :: forall m. MonadEffect m => m JobId +newJobId = do + id <- UUID.make + pure $ JobId $ UUID.toString id + +fromSuccess :: Boolean -> Int +fromSuccess success = if success then 1 else 0 + +toSuccess :: Int -> Either String Boolean +toSuccess success = case success of + 0 -> Right false + 1 -> Right true + _ -> Left $ "Invalid success value " <> show success + +type SelectJobRequest = + { level :: Maybe LogLevel + , since :: DateTime + , jobId :: JobId + } + +selectJob :: SQLite -> SelectJobRequest -> Effect { unreadableLogs :: Array String, job :: Either String (Maybe Job) } +selectJob db { level: maybeLogLevel, since, jobId: JobId jobId } = do + let logLevel = fromMaybe Error maybeLogLevel + { fail: unreadableLogs, success: logs } <- selectLogsByJob db (JobId jobId) logLevel since + -- Failing to decode a log should not prevent us from returning a job, so we pass + -- failures through to be handled by application code + job <- runExceptT $ firstJust + [ selectPublishJobById logs + , selectMatrixJobById logs + , selectTransferJobById logs + , selectPackageSetJobById logs + , selectUnpublishJobById logs + ] + pure { job, unreadableLogs } + where + firstJust :: Array (ExceptT String Effect (Maybe Job)) -> ExceptT String Effect (Maybe Job) + firstJust = Array.foldl go (pure Nothing) + where + go acc next = acc >>= case _ of + Just job -> pure (Just job) + Nothing -> next + + selectPublishJobById logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db + { jobId: notNull jobId, packageName: null, packageVersion: null } + pure $ traverse + ( map (PublishJob <<< Record.merge { logs, jobType: Proxy :: _ "publish" }) + <<< publishJobDetailsFromJSRep + ) + maybeJobDetails + + selectUnpublishJobById logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectUnpublishJobImpl db + { jobId: notNull jobId, packageName: null, packageVersion: null } + pure $ traverse + ( map (UnpublishJob <<< Record.merge { logs, jobType: Proxy :: _ "unpublish" }) + <<< unpublishJobDetailsFromJSRep + ) + maybeJobDetails + + selectTransferJobById logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectTransferJobImpl db + { jobId: notNull jobId, packageName: null } + pure $ traverse + ( map (TransferJob <<< Record.merge { logs, jobType: Proxy :: _ "transfer" }) + <<< transferJobDetailsFromJSRep + ) + maybeJobDetails + + selectMatrixJobById logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectMatrixJobImpl db (Nullable.notNull jobId) + pure $ traverse + ( map (MatrixJob <<< Record.merge { logs, jobType: Proxy :: _ "matrix" }) + <<< matrixJobDetailsFromJSRep + ) + maybeJobDetails + + selectPackageSetJobById logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobImpl db (notNull jobId) + pure $ traverse + ( map (PackageSetJob <<< Record.merge { logs, jobType: Proxy :: _ "packageset" }) + <<< packageSetJobDetailsFromJSRep + ) + maybeJobDetails + +type SelectJobsRequest = + { since :: DateTime + , includeCompleted :: Boolean + } + +selectJobs :: SQLite -> SelectJobsRequest -> Effect { failed :: Array String, jobs :: Array Job } +selectJobs db { since, includeCompleted } = do + publishJobs <- selectPublishJobs + unpublishJobs <- selectUnpublishJobs + transferJobs <- selectTransferJobs + matrixJobs <- selectMatrixJobs + packageSetJobs <- selectPackageSetJobs + let + { fail: failedJobs, success: allJobs } = partitionEithers + (publishJobs <> unpublishJobs <> transferJobs <> matrixJobs <> packageSetJobs) + pure { failed: failedJobs, jobs: take 100 $ sortBy (compare `on` (V1.jobInfo >>> _.createdAt)) allJobs } + + where + selectPublishJobs = do + jobs <- Uncurried.runEffectFn3 selectPublishJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted + pure $ map (map (PublishJob <<< Record.merge { logs: [], jobType: Proxy :: _ "publish" }) <<< publishJobDetailsFromJSRep) jobs + + selectUnpublishJobs = do + jobs <- Uncurried.runEffectFn3 selectUnpublishJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted + pure $ map (map (UnpublishJob <<< Record.merge { logs: [], jobType: Proxy :: _ "unpublish" }) <<< unpublishJobDetailsFromJSRep) jobs + + selectTransferJobs = do + jobs <- Uncurried.runEffectFn3 selectTransferJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted + pure $ map (map (TransferJob <<< Record.merge { logs: [], jobType: Proxy :: _ "transfer" }) <<< transferJobDetailsFromJSRep) jobs + + selectMatrixJobs = do + jobs <- Uncurried.runEffectFn3 selectMatrixJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted + pure $ map (map (MatrixJob <<< Record.merge { logs: [], jobType: Proxy :: _ "matrix" }) <<< matrixJobDetailsFromJSRep) jobs + + selectPackageSetJobs = do + jobs <- Uncurried.runEffectFn3 selectPackageSetJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted + pure $ map (map (PackageSetJob <<< Record.merge { logs: [], jobType: Proxy :: _ "packageset" }) <<< packageSetJobDetailsFromJSRep) jobs + +-------------------------------------------------------------------------------- +-- publish_jobs table + +type PublishJobDetails = { jobId :: JobId - , jobType :: JobType , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean , packageName :: PackageName - , ref :: String + , packageVersion :: Version + , payload :: PublishData } -type JSNewJob = +type JSPublishJobDetails = { jobId :: String - , jobType :: String , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int , packageName :: String - , ref :: String + , packageVersion :: String + , payload :: String } -newJobToJSNewJob :: NewJob -> JSNewJob -newJobToJSNewJob { jobId: JobId jobId, jobType, createdAt, packageName, ref } = - { jobId - , jobType: API.V1.printJobType jobType - , createdAt: DateTime.format Internal.Format.iso8601DateTime createdAt - , packageName: PackageName.print packageName - , ref +publishJobDetailsFromJSRep :: JSPublishJobDetails -> Either String PublishJobDetails +publishJobDetailsFromJSRep { jobId, packageName, packageVersion, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + name <- PackageName.parse packageName + version <- Version.parse packageVersion + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.publishCodec payload + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s + , packageName: name + , packageVersion: version + , payload: parsed + } + +type SelectPublishParams = + { jobId :: Nullable String + , packageName :: Nullable String + , packageVersion :: Nullable String + } + +foreign import selectPublishJobImpl :: EffectFn2 SQLite SelectPublishParams (Nullable JSPublishJobDetails) + +foreign import selectPublishJobsImpl :: EffectFn3 SQLite String Boolean (Array JSPublishJobDetails) + +selectNextPublishJob :: SQLite -> Effect (Either String (Maybe PublishJobDetails)) +selectNextPublishJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db { jobId: null, packageName: null, packageVersion: null } + pure $ traverse publishJobDetailsFromJSRep maybeJobDetails + +selectPublishJob :: SQLite -> PackageName -> Version -> Effect (Either String (Maybe PublishJobDetails)) +selectPublishJob db packageName packageVersion = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db + { jobId: null + , packageName: notNull $ PackageName.print packageName + , packageVersion: notNull $ Version.print packageVersion + } + pure $ traverse publishJobDetailsFromJSRep maybeJobDetails + +type InsertPublishJob = + { payload :: PublishData + } + +type JSInsertPublishJob = + { jobId :: String + , packageName :: String + , packageVersion :: String + , payload :: String + , createdAt :: String + } + +insertPublishJobToJSRep :: JobId -> DateTime -> InsertPublishJob -> JSInsertPublishJob +insertPublishJobToJSRep jobId now { payload } = + { jobId: un JobId jobId + , packageName: PackageName.print payload.name + , packageVersion: Version.print payload.version + , payload: stringifyJson Operation.publishCodec payload + , createdAt: DateTime.format Internal.Format.iso8601DateTime now } -type JobResult = +foreign import insertPublishJobImpl :: EffectFn2 SQLite JSInsertPublishJob Unit + +-- | Insert a new package job, ie. a publish, unpublish, or transfer. +insertPublishJob :: SQLite -> InsertPublishJob -> Effect JobId +insertPublishJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertPublishJobImpl db $ insertPublishJobToJSRep jobId now job + pure jobId + +-------------------------------------------------------------------------------- +-- unpublish_jobs table + +type UnpublishJobDetails = { jobId :: JobId - , finishedAt :: DateTime + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime , success :: Boolean + , packageName :: PackageName + , packageVersion :: Version + , payload :: AuthenticatedData } -type JSJobResult = +type JSUnpublishJobDetails = { jobId :: String - , finishedAt :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String , success :: Int + , packageName :: String + , packageVersion :: String + , payload :: String } -jobResultToJSJobResult :: JobResult -> JSJobResult -jobResultToJSJobResult { jobId: JobId jobId, finishedAt, success } = - { jobId - , finishedAt: DateTime.format Internal.Format.iso8601DateTime finishedAt - , success: if success then 1 else 0 +unpublishJobDetailsFromJSRep :: JSUnpublishJobDetails -> Either String UnpublishJobDetails +unpublishJobDetailsFromJSRep { jobId, packageName, packageVersion, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + name <- PackageName.parse packageName + version <- Version.parse packageVersion + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.authenticatedCodec payload + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s + , packageName: name + , packageVersion: version + , payload: parsed + } + +type SelectUnpublishParams = + { jobId :: Nullable String + , packageName :: Nullable String + , packageVersion :: Nullable String } -type Job = +foreign import selectUnpublishJobImpl :: EffectFn2 SQLite SelectUnpublishParams (Nullable JSUnpublishJobDetails) + +foreign import selectUnpublishJobsImpl :: EffectFn3 SQLite String Boolean (Array JSUnpublishJobDetails) + +selectNextUnpublishJob :: SQLite -> Effect (Either String (Maybe UnpublishJobDetails)) +selectNextUnpublishJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectUnpublishJobImpl db { jobId: null, packageName: null, packageVersion: null } + pure $ traverse unpublishJobDetailsFromJSRep maybeJobDetails + +selectUnpublishJob :: SQLite -> PackageName -> Version -> Effect (Either String (Maybe UnpublishJobDetails)) +selectUnpublishJob db packageName packageVersion = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectUnpublishJobImpl db + { jobId: null + , packageName: notNull $ PackageName.print packageName + , packageVersion: notNull $ Version.print packageVersion + } + pure $ traverse unpublishJobDetailsFromJSRep maybeJobDetails + +type InsertUnpublishJob = + { payload :: UnpublishData + , rawPayload :: String + , signature :: Signature + } + +type JSInsertUnpublishJob = + { jobId :: String + , packageName :: String + , packageVersion :: String + , payload :: String + , createdAt :: String + } + +insertUnpublishJobToJSRep :: JobId -> DateTime -> InsertUnpublishJob -> JSInsertUnpublishJob +insertUnpublishJobToJSRep jobId now { payload, rawPayload, signature } = + { jobId: un JobId jobId + , packageName: PackageName.print payload.name + , packageVersion: Version.print payload.version + , payload: stringifyJson Operation.authenticatedCodec + { payload: Operation.Unpublish payload + , rawPayload + , signature + } + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + } + +foreign import insertUnpublishJobImpl :: EffectFn2 SQLite JSInsertUnpublishJob Unit + +-- | Insert a new package job, ie. a publish, unpublish, or transfer. +insertUnpublishJob :: SQLite -> InsertUnpublishJob -> Effect JobId +insertUnpublishJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertUnpublishJobImpl db $ insertUnpublishJobToJSRep jobId now job + pure jobId + +-------------------------------------------------------------------------------- +-- transfer_jobs table + +type TransferJobDetails = { jobId :: JobId - , jobType :: JobType + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean , packageName :: PackageName - , ref :: String + , payload :: AuthenticatedData + } + +type JSTransferJobDetails = + { jobId :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int + , packageName :: String + , payload :: String + } + +transferJobDetailsFromJSRep :: JSTransferJobDetails -> Either String TransferJobDetails +transferJobDetailsFromJSRep { jobId, packageName, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + name <- PackageName.parse packageName + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.authenticatedCodec payload + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s + , packageName: name + , payload: parsed + } + +type SelectTransferParams = { jobId :: Nullable String, packageName :: Nullable String } + +foreign import selectTransferJobImpl :: EffectFn2 SQLite SelectTransferParams (Nullable JSTransferJobDetails) + +foreign import selectTransferJobsImpl :: EffectFn3 SQLite String Boolean (Array JSTransferJobDetails) + +selectNextTransferJob :: SQLite -> Effect (Either String (Maybe TransferJobDetails)) +selectNextTransferJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectTransferJobImpl db { jobId: null, packageName: null } + pure $ traverse transferJobDetailsFromJSRep maybeJobDetails + +selectTransferJob :: SQLite -> PackageName -> Effect (Either String (Maybe TransferJobDetails)) +selectTransferJob db packageName = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectTransferJobImpl db + { jobId: null + , packageName: notNull $ PackageName.print packageName + } + pure $ traverse transferJobDetailsFromJSRep maybeJobDetails + +type InsertTransferJob = + { payload :: TransferData + , rawPayload :: String + , signature :: Signature + } + +type JSInsertTransferJob = + { jobId :: String + , packageName :: String + , payload :: String + , createdAt :: String + } + +insertTransferJobToJSRep :: JobId -> DateTime -> InsertTransferJob -> JSInsertTransferJob +insertTransferJobToJSRep jobId now { payload, rawPayload, signature } = + { jobId: un JobId jobId + , packageName: PackageName.print payload.name + , payload: stringifyJson Operation.authenticatedCodec + { payload: Operation.Transfer payload, rawPayload, signature } + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + } + +foreign import insertTransferJobImpl :: EffectFn2 SQLite JSInsertTransferJob Unit + +-- | Insert a new package job, ie. a publish, unpublish, or transfer. +insertTransferJob :: SQLite -> InsertTransferJob -> Effect JobId +insertTransferJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertTransferJobImpl db $ insertTransferJobToJSRep jobId now job + pure jobId + +-------------------------------------------------------------------------------- +-- matrix_jobs table + +type InsertMatrixJob = + { packageName :: PackageName + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version + } + +type JSInsertMatrixJob = + { jobId :: String + , createdAt :: String + , packageName :: String + , packageVersion :: String + , compilerVersion :: String + , payload :: String + } + +insertMatrixJobToJSRep :: JobId -> DateTime -> InsertMatrixJob -> JSInsertMatrixJob +insertMatrixJobToJSRep jobId now { packageName, packageVersion, compilerVersion, payload } = + { jobId: un JobId jobId + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + , packageName: PackageName.print packageName + , packageVersion: Version.print packageVersion + , compilerVersion: Version.print compilerVersion + , payload: stringifyJson (Internal.Codec.packageMap Version.codec) payload + } + +foreign import insertMatrixJobImpl :: EffectFn2 SQLite JSInsertMatrixJob Unit + +insertMatrixJob :: SQLite -> InsertMatrixJob -> Effect JobId +insertMatrixJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertMatrixJobImpl db $ insertMatrixJobToJSRep jobId now job + pure jobId + +type MatrixJobDetails = + { jobId :: JobId , createdAt :: DateTime + , startedAt :: Maybe DateTime , finishedAt :: Maybe DateTime , success :: Boolean + , packageName :: PackageName + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version } -type JSJob = +type JSMatrixJobDetails = { jobId :: String - , jobType :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int , packageName :: String - , ref :: String + , packageVersion :: String + , compilerVersion :: String + , payload :: String + } + +matrixJobDetailsFromJSRep :: JSMatrixJobDetails -> Either String MatrixJobDetails +matrixJobDetailsFromJSRep { jobId, packageName, packageVersion, compilerVersion, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + name <- PackageName.parse packageName + version <- Version.parse packageVersion + compiler <- Version.parse compilerVersion + parsed <- lmap JSON.DecodeError.print $ parseJson (Internal.Codec.packageMap Version.codec) payload + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s + , packageName: name + , packageVersion: version + , compilerVersion: compiler + , payload: parsed + } + +foreign import selectMatrixJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSMatrixJobDetails) + +foreign import selectMatrixJobsImpl :: EffectFn3 SQLite String Boolean (Array JSMatrixJobDetails) + +selectNextMatrixJob :: SQLite -> Effect (Either String (Maybe MatrixJobDetails)) +selectNextMatrixJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectMatrixJobImpl db Nullable.null + pure $ traverse matrixJobDetailsFromJSRep maybeJobDetails + +-------------------------------------------------------------------------------- +-- package_set_jobs table + +type PackageSetJobDetails = + { jobId :: JobId + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean + , payload :: PackageSetOperation + } + +type JSPackageSetJobDetails = + { jobId :: String , createdAt :: String + , startedAt :: Nullable String , finishedAt :: Nullable String , success :: Int + , payload :: String + } + +packageSetJobDetailsFromJSRep :: JSPackageSetJobDetails -> Either String PackageSetJobDetails +packageSetJobDetailsFromJSRep { jobId, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageSetOperationCodec payload + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s + , payload: parsed + } + +foreign import selectPackageSetJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSPackageSetJobDetails) + +foreign import selectPackageSetJobByPayloadImpl :: EffectFn2 SQLite String (Nullable JSPackageSetJobDetails) + +foreign import selectPackageSetJobsImpl :: EffectFn3 SQLite String Boolean (Array JSPackageSetJobDetails) + +selectNextPackageSetJob :: SQLite -> Effect (Either String (Maybe PackageSetJobDetails)) +selectNextPackageSetJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobImpl db null + pure $ traverse packageSetJobDetailsFromJSRep maybeJobDetails + +-- | Find a pending package set job by payload (for duplicate detection) +selectPackageSetJobByPayload :: SQLite -> PackageSetOperation -> Effect (Either String (Maybe PackageSetJobDetails)) +selectPackageSetJobByPayload db payload = do + let payloadStr = stringifyJson Operation.packageSetOperationCodec payload + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobByPayloadImpl db payloadStr + pure $ traverse packageSetJobDetailsFromJSRep maybeJobDetails + +type InsertPackageSetJob = + { payload :: PackageSetOperation + , rawPayload :: String + , signature :: Maybe Signature + } + +type JSInsertPackageSetJob = + { jobId :: String + , createdAt :: String + , payload :: String + , rawPayload :: String + , signature :: Nullable String + } + +insertPackageSetJobToJSRep :: JobId -> DateTime -> InsertPackageSetJob -> JSInsertPackageSetJob +insertPackageSetJobToJSRep jobId now { payload, rawPayload, signature } = + { jobId: un JobId jobId + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + , payload: stringifyJson Operation.packageSetOperationCodec payload + , rawPayload + , signature: Nullable.toNullable $ map (\(Signature s) -> s) signature + } + +foreign import insertPackageSetJobImpl :: EffectFn2 SQLite JSInsertPackageSetJob Unit + +insertPackageSetJob :: SQLite -> InsertPackageSetJob -> Effect JobId +insertPackageSetJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertPackageSetJobImpl db $ insertPackageSetJobToJSRep jobId now job + pure jobId + +-------------------------------------------------------------------------------- +-- logs table + +type JSLogLine = + { level :: Int + , message :: String + , jobId :: String + , timestamp :: String + } + +logLineToJSRep :: LogLine -> JSLogLine +logLineToJSRep { level, message, jobId, timestamp } = + { level: API.V1.logLevelToPriority level + , message + , jobId: un JobId jobId + , timestamp: DateTime.format Internal.Format.iso8601DateTime timestamp } -jsJobToJob :: JSJob -> Either String Job -jsJobToJob raw = do - let jobId = JobId raw.jobId - jobType <- API.V1.parseJobType raw.jobType - packageName <- PackageName.parse raw.packageName - createdAt <- DateTime.unformat Internal.Format.iso8601DateTime raw.createdAt - finishedAt <- case toMaybe raw.finishedAt of - Nothing -> pure Nothing - Just rawFinishedAt -> Just <$> DateTime.unformat Internal.Format.iso8601DateTime rawFinishedAt - success <- case raw.success of - 0 -> Right false - 1 -> Right true - _ -> Left $ "Invalid success value " <> show raw.success - pure $ { jobId, jobType, createdAt, finishedAt, success, packageName, ref: raw.ref } - -createJob :: SQLite -> NewJob -> Effect Unit -createJob db = Uncurried.runEffectFn2 createJobImpl db <<< newJobToJSNewJob - -finishJob :: SQLite -> JobResult -> Effect Unit -finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< jobResultToJSJobResult - -selectJob :: SQLite -> JobId -> Effect (Either String Job) -selectJob db (JobId jobId) = do - maybeJob <- toMaybe <$> Uncurried.runEffectFn2 selectJobImpl db jobId - pure $ jsJobToJob =<< note ("Couldn't find job with id " <> jobId) maybeJob - -runningJobForPackage :: SQLite -> PackageName -> Effect (Either String Job) -runningJobForPackage db packageName = do - let pkgStr = PackageName.print packageName - maybeJSJob <- toMaybe <$> Uncurried.runEffectFn2 runningJobForPackageImpl db pkgStr - pure $ jsJobToJob =<< note ("Couldn't find running job for package " <> pkgStr) maybeJSJob - -deleteIncompleteJobs :: SQLite -> Effect Unit -deleteIncompleteJobs = Uncurried.runEffectFn1 deleteIncompleteJobsImpl +logLineFromJSRep :: JSLogLine -> Either String LogLine +logLineFromJSRep { level, message, jobId, timestamp } = do + logLevel <- API.V1.logLevelFromPriority level + time <- DateTime.unformat Internal.Format.iso8601DateTime timestamp + pure + { level: logLevel + , message + , jobId: JobId jobId + , timestamp: time + } + +foreign import insertLogLineImpl :: EffectFn2 SQLite JSLogLine Unit + +insertLogLine :: SQLite -> LogLine -> Effect Unit +insertLogLine db = Uncurried.runEffectFn2 insertLogLineImpl db <<< logLineToJSRep + +foreign import selectLogsByJobImpl :: EffectFn4 SQLite String Int String (Array JSLogLine) + +-- | Select all logs for a given job at or above the indicated log level. To get all +-- | logs, pass the DEBUG log level. +selectLogsByJob :: SQLite -> JobId -> LogLevel -> DateTime -> Effect { fail :: Array String, success :: Array LogLine } +selectLogsByJob db jobId level since = do + let timestamp = DateTime.format Internal.Format.iso8601DateTime since + jsLogLines <- + Uncurried.runEffectFn4 + selectLogsByJobImpl + db + (un JobId jobId) + (API.V1.logLevelToPriority level) + timestamp + pure $ partitionEithers $ map logLineFromJSRep jsLogLines diff --git a/app/src/App/Server.purs b/app/src/App/Server.purs deleted file mode 100644 index a6e8c3e1f..000000000 --- a/app/src/App/Server.purs +++ /dev/null @@ -1,346 +0,0 @@ -module Registry.App.Server where - -import Registry.App.Prelude hiding ((/)) - -import Control.Monad.Cont (ContT) -import Data.Codec.JSON as CJ -import Data.Formatter.DateTime as Formatter.DateTime -import Data.Newtype (unwrap) -import Data.String as String -import Data.UUID.Random as UUID -import Effect.Aff as Aff -import Effect.Class.Console as Console -import Fetch.Retry as Fetch.Retry -import HTTPurple (JsonDecoder(..), JsonEncoder(..), Method(..), Request, Response) -import HTTPurple as HTTPurple -import HTTPurple.Status as Status -import Node.Path as Path -import Node.Process as Process -import Record as Record -import Registry.API.V1 (JobId(..), JobType(..), LogLevel(..), Route(..)) -import Registry.API.V1 as V1 -import Registry.App.API (COMPILER_CACHE, _compilerCache) -import Registry.App.API as API -import Registry.App.CLI.Git as Git -import Registry.App.Effect.Archive (ARCHIVE) -import Registry.App.Effect.Archive as Archive -import Registry.App.Effect.Cache (CacheRef) -import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment -import Registry.App.Effect.Db (DB) -import Registry.App.Effect.Db as Db -import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV, ResourceEnv, serverPort) -import Registry.App.Effect.Env as Env -import Registry.App.Effect.GitHub (GITHUB) -import Registry.App.Effect.GitHub as GitHub -import Registry.App.Effect.Log (LOG) -import Registry.App.Effect.Log as Log -import Registry.App.Effect.Pursuit (PURSUIT) -import Registry.App.Effect.Pursuit as Pursuit -import Registry.App.Effect.Registry (REGISTRY) -import Registry.App.Effect.Registry as Registry -import Registry.App.Effect.Source (SOURCE) -import Registry.App.Effect.Source as Source -import Registry.App.Effect.Storage (STORAGE) -import Registry.App.Effect.Storage as Storage -import Registry.App.Legacy.Manifest (LEGACY_CACHE, _legacyCache) -import Registry.App.SQLite (SQLite) -import Registry.App.SQLite as SQLite -import Registry.Foreign.FSExtra as FS.Extra -import Registry.Foreign.Octokit (GitHubToken, Octokit) -import Registry.Foreign.Octokit as Octokit -import Registry.Internal.Format as Internal.Format -import Registry.Operation as Operation -import Registry.PackageName as PackageName -import Registry.Version as Version -import Run (AFF, EFFECT, Run) -import Run as Run -import Run.Except (EXCEPT) -import Run.Except as Except - -newJobId :: forall m. MonadEffect m => m JobId -newJobId = liftEffect do - id <- UUID.make - pure $ JobId $ UUID.toString id - -router :: ServerEnv -> Request Route -> Run ServerEffects Response -router env { route, method, body } = HTTPurple.usingCont case route, method of - Publish, Post -> do - publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body - lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish - forkPipelineJob publish.name publish.ref PublishJob \jobId -> do - Log.info $ "Received Publish request, job id: " <> unwrap jobId - API.publish Nothing publish - - Unpublish, Post -> do - auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - case auth.payload of - Operation.Unpublish { name, version } -> do - forkPipelineJob name (Version.print version) UnpublishJob \jobId -> do - Log.info $ "Received Unpublish request, job id: " <> unwrap jobId - API.authenticated auth - _ -> - HTTPurple.badRequest "Expected unpublish operation." - - Transfer, Post -> do - auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - case auth.payload of - Operation.Transfer { name } -> do - forkPipelineJob name "" TransferJob \jobId -> do - Log.info $ "Received Transfer request, job id: " <> unwrap jobId - API.authenticated auth - _ -> - HTTPurple.badRequest "Expected transfer operation." - - Jobs, Get -> do - jsonOk (CJ.array V1.jobCodec) [] - - Job jobId { level: maybeLogLevel, since }, Get -> do - let logLevel = fromMaybe Error maybeLogLevel - logs <- lift $ Db.selectLogsByJob jobId logLevel since - lift (Db.selectJob jobId) >>= case _ of - Left err -> do - lift $ Log.error $ "Error while fetching job: " <> err - HTTPurple.notFound - Right job -> do - jsonOk V1.jobCodec (Record.insert (Proxy :: _ "logs") logs job) - - Status, Get -> - HTTPurple.emptyResponse Status.ok - - Status, Head -> - HTTPurple.emptyResponse Status.ok - - _, _ -> - HTTPurple.notFound - where - forkPipelineJob :: PackageName -> String -> JobType -> (JobId -> Run _ Unit) -> ContT Response (Run _) Response - forkPipelineJob packageName ref jobType action = do - -- First thing we check if the package already has a pipeline in progress - lift (Db.runningJobForPackage packageName) >>= case _ of - -- If yes, we error out if it's the wrong kind, return it if it's the same type - Right { jobId, jobType: runningJobType } -> do - lift $ Log.info $ "Found running job for package " <> PackageName.print packageName <> ", job id: " <> unwrap jobId - case runningJobType == jobType of - true -> jsonOk V1.jobCreatedResponseCodec { jobId } - false -> HTTPurple.badRequest $ "There is already a " <> V1.printJobType runningJobType <> " job running for package " <> PackageName.print packageName - -- otherwise spin up a new thread - Left _err -> do - lift $ Log.info $ "No running job for package " <> PackageName.print packageName <> ", creating a new one" - jobId <- newJobId - now <- nowUTC - let newJob = { createdAt: now, jobId, jobType, packageName, ref } - lift $ Db.createJob newJob - let newEnv = env { jobId = Just jobId } - - _fiber <- liftAff $ Aff.forkAff $ Aff.attempt $ do - result <- runEffects newEnv (action jobId) - case result of - Left _ -> pure unit - Right _ -> do - finishedAt <- nowUTC - void $ runEffects newEnv (Db.finishJob { jobId, finishedAt, success: true }) - - jsonOk V1.jobCreatedResponseCodec { jobId } - -type ServerEnvVars = - { token :: GitHubToken - , publicKey :: String - , privateKey :: String - , spacesKey :: String - , spacesSecret :: String - , resourceEnv :: ResourceEnv - } - -readServerEnvVars :: Aff ServerEnvVars -readServerEnvVars = do - Env.loadEnvFile ".env" - token <- Env.lookupRequired Env.pacchettibottiToken - publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub - privateKey <- Env.lookupRequired Env.pacchettibottiED25519 - spacesKey <- Env.lookupRequired Env.spacesKey - spacesSecret <- Env.lookupRequired Env.spacesSecret - resourceEnv <- Env.lookupResourceEnv - pure { token, publicKey, privateKey, spacesKey, spacesSecret, resourceEnv } - -type ServerEnv = - { cacheDir :: FilePath - , logsDir :: FilePath - , githubCacheRef :: CacheRef - , legacyCacheRef :: CacheRef - , registryCacheRef :: CacheRef - , octokit :: Octokit - , vars :: ServerEnvVars - , debouncer :: Registry.Debouncer - , db :: SQLite - , jobId :: Maybe JobId - } - -createServerEnv :: Aff ServerEnv -createServerEnv = do - vars <- readServerEnvVars - - let cacheDir = Path.concat [ scratchDir, ".cache" ] - let logsDir = Path.concat [ scratchDir, "logs" ] - for_ [ cacheDir, logsDir ] FS.Extra.ensureDirectory - - githubCacheRef <- Cache.newCacheRef - legacyCacheRef <- Cache.newCacheRef - registryCacheRef <- Cache.newCacheRef - - octokit <- Octokit.newOctokit vars.token vars.resourceEnv.githubApiUrl - debouncer <- Registry.newDebouncer - - db <- liftEffect $ SQLite.connect - { database: vars.resourceEnv.databaseUrl.path - -- To see all database queries logged in the terminal, use this instead - -- of 'mempty'. Turned off by default because this is so verbose. - -- Run.runBaseEffect <<< Log.interpret (Log.handleTerminal Normal) <<< Log.info - , logger: mempty - } - - -- At server startup we clean out all the jobs that are not completed, - -- because they are stale runs from previous startups of the server. - -- We can just remove the jobs, and all the logs belonging to them will be - -- removed automatically by the foreign key constraint. - liftEffect $ SQLite.deleteIncompleteJobs db - - pure - { debouncer - , githubCacheRef - , legacyCacheRef - , registryCacheRef - , cacheDir - , logsDir - , vars - , octokit - , db - , jobId: Nothing - } - -type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + ARCHIVE + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) - -runServer :: ServerEnv -> (ServerEnv -> Request Route -> Run ServerEffects Response) -> Request Route -> Aff Response -runServer env router' request = do - result <- runEffects env (router' env request) - case result of - Left error -> HTTPurple.badRequest (Aff.message error) - Right response -> pure response - -main :: Effect Unit -main = do - createServerEnv # Aff.runAff_ case _ of - Left error -> do - Console.log $ "Failed to start server: " <> Aff.message error - Process.exit' 1 - Right env -> do - -- Start healthcheck ping loop if URL is configured - case env.vars.resourceEnv.healthchecksUrl of - Nothing -> Console.log "HEALTHCHECKS_URL not set, healthcheck pinging disabled" - Just healthchecksUrl -> do - _healthcheck <- Aff.launchAff do - let - limit = 10 - oneMinute = Aff.Milliseconds (1000.0 * 60.0) - fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) - - loop n = - Fetch.Retry.withRetryRequest healthchecksUrl {} >>= case _ of - Succeeded { status } | status == 200 -> do - Aff.delay fiveMinutes - loop n - - Cancelled | n >= 0 -> do - Console.warn $ "Healthchecks cancelled, will retry..." - Aff.delay oneMinute - loop (n - 1) - - Failed error | n >= 0 -> do - Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error - Aff.delay oneMinute - loop (n - 1) - - Succeeded { status } | status /= 200, n >= 0 -> do - Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status - Aff.delay oneMinute - loop (n - 1) - - Cancelled -> - Console.error "Healthchecks cancelled and failure limit reached, will not retry." - - Failed error -> do - Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error - - Succeeded _ -> do - Console.error $ "Healthchecks returned non-200 status and failure limit reached, will not retry." - - loop limit - pure unit - - -- Read port from SERVER_PORT env var (optional, HTTPurple defaults to 8080) - port <- liftEffect $ Env.lookupOptional serverPort - - _close <- HTTPurple.serve - { hostname: "0.0.0.0" - , port - } - { route: V1.routes - , router: runServer env router - } - pure unit - -jsonDecoder :: forall a. CJ.Codec a -> JsonDecoder CJ.DecodeError a -jsonDecoder codec = JsonDecoder (parseJson codec) - -jsonEncoder :: forall a. CJ.Codec a -> JsonEncoder a -jsonEncoder codec = JsonEncoder (stringifyJson codec) - -jsonOk :: forall m a. MonadAff m => CJ.Codec a -> a -> m Response -jsonOk codec datum = HTTPurple.ok' HTTPurple.jsonHeaders $ HTTPurple.toJson (jsonEncoder codec) datum - -runEffects :: forall a. ServerEnv -> Run ServerEffects a -> Aff (Either Aff.Error a) -runEffects env operation = Aff.attempt do - today <- nowUTC - let logFile = String.take 10 (Formatter.DateTime.format Internal.Format.iso8601Date today) <> ".log" - let logPath = Path.concat [ env.logsDir, logFile ] - operation - # Registry.interpret - ( Registry.handle - { repos: Registry.defaultRepos - , pull: Git.ForceClean - , write: Registry.CommitAs (Git.pacchettibottiCommitter env.vars.token) - , workdir: scratchDir - , debouncer: env.debouncer - , cacheRef: env.registryCacheRef - } - ) - # Archive.interpret Archive.handle - # Pursuit.interpret (Pursuit.handleAff env.vars.token) - # Storage.interpret (Storage.handleS3 { s3: { key: env.vars.spacesKey, secret: env.vars.spacesSecret }, cache: env.cacheDir }) - # Source.interpret (Source.handle Source.Recent) - # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache: env.cacheDir, ref: env.githubCacheRef }) - # Cache.interpret _legacyCache (Cache.handleMemoryFs { cache: env.cacheDir, ref: env.legacyCacheRef }) - # Cache.interpret _compilerCache (Cache.handleFs env.cacheDir) - # Except.catch - ( \msg -> do - finishedAt <- nowUTC - case env.jobId of - -- Important to make sure that we mark the job as completed - Just jobId -> Db.finishJob { jobId, finishedAt, success: false } - Nothing -> pure unit - Log.error msg *> Run.liftAff (Aff.throwError (Aff.error msg)) - ) - # Db.interpret (Db.handleSQLite { db: env.db }) - # Comment.interpret Comment.handleLog - # Log.interpret - ( \log -> case env.jobId of - Nothing -> Log.handleTerminal Verbose log *> Log.handleFs Verbose logPath log - Just jobId -> - Log.handleTerminal Verbose log - *> Log.handleFs Verbose logPath log - *> Log.handleDb { db: env.db, job: jobId } log - ) - # Env.runPacchettiBottiEnv { publicKey: env.vars.publicKey, privateKey: env.vars.privateKey } - # Env.runResourceEnv env.vars.resourceEnv - # Run.runBaseAff' diff --git a/app/src/App/Server/Env.purs b/app/src/App/Server/Env.purs new file mode 100644 index 000000000..70e5698f5 --- /dev/null +++ b/app/src/App/Server/Env.purs @@ -0,0 +1,191 @@ +module Registry.App.Server.Env where + +import Registry.App.Prelude hiding ((/)) + +import Data.Codec.JSON as CJ +import Data.Formatter.DateTime as Formatter.DateTime +import Data.String as String +import Effect.Aff as Aff +import HTTPurple (JsonDecoder(..), JsonEncoder(..), Request, Response) +import HTTPurple as HTTPurple +import Node.Path as Path +import Registry.API.V1 (JobId, Route) +import Registry.App.API (COMPILER_CACHE, _compilerCache) +import Registry.App.CLI.Git as Git +import Registry.App.Effect.Archive (ARCHIVE) +import Registry.App.Effect.Archive as Archive +import Registry.App.Effect.Cache (CacheRef) +import Registry.App.Effect.Cache as Cache +import Registry.App.Effect.Db (DB) +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV, ResourceEnv) +import Registry.App.Effect.Env as Env +import Registry.App.Effect.GitHub (GITHUB) +import Registry.App.Effect.GitHub as GitHub +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.PackageSets (PACKAGE_SETS) +import Registry.App.Effect.PackageSets as PackageSets +import Registry.App.Effect.Pursuit (PURSUIT) +import Registry.App.Effect.Pursuit as Pursuit +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.Effect.Source (SOURCE) +import Registry.App.Effect.Source as Source +import Registry.App.Effect.Storage (STORAGE) +import Registry.App.Effect.Storage as Storage +import Registry.App.Legacy.Manifest (LEGACY_CACHE, _legacyCache) +import Registry.App.SQLite (SQLite) +import Registry.App.SQLite as SQLite +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Foreign.Octokit (GitHubToken, Octokit) +import Registry.Foreign.Octokit as Octokit +import Registry.Internal.Format as Internal.Format +import Run (AFF, EFFECT, Run) +import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except + +type ServerEnvVars = + { token :: GitHubToken + , publicKey :: String + , privateKey :: String + , spacesKey :: String + , spacesSecret :: String + , resourceEnv :: ResourceEnv + } + +readServerEnvVars :: Aff ServerEnvVars +readServerEnvVars = do + Env.loadEnvFile ".temp/local-server/.env.local" + Env.loadEnvFile ".env" + token <- Env.lookupRequired Env.pacchettibottiToken + publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub + privateKey <- Env.lookupRequired Env.pacchettibottiED25519 + spacesKey <- Env.lookupRequired Env.spacesKey + spacesSecret <- Env.lookupRequired Env.spacesSecret + resourceEnv <- Env.lookupResourceEnv + pure { token, publicKey, privateKey, spacesKey, spacesSecret, resourceEnv } + +type ServerEnv = + { cacheDir :: FilePath + , logsDir :: FilePath + , githubCacheRef :: CacheRef + , legacyCacheRef :: CacheRef + , registryCacheRef :: CacheRef + , octokit :: Octokit + , vars :: ServerEnvVars + , debouncer :: Registry.Debouncer + , db :: SQLite + , jobId :: Maybe JobId + } + +createServerEnv :: Aff ServerEnv +createServerEnv = do + vars <- readServerEnvVars + + let cacheDir = Path.concat [ scratchDir, ".cache" ] + let logsDir = Path.concat [ scratchDir, "logs" ] + for_ [ cacheDir, logsDir ] FS.Extra.ensureDirectory + + githubCacheRef <- Cache.newCacheRef + legacyCacheRef <- Cache.newCacheRef + registryCacheRef <- Cache.newCacheRef + + octokit <- Octokit.newOctokit vars.token vars.resourceEnv.githubApiUrl + debouncer <- Registry.newDebouncer + + db <- liftEffect $ SQLite.connect + { database: vars.resourceEnv.databaseUrl.path + -- To see all database queries logged in the terminal, use this instead + -- of 'mempty'. Turned off by default because this is so verbose. + -- Run.runBaseEffect <<< Log.interpret (Log.handleTerminal Normal) <<< Log.info + , logger: mempty + } + + -- At server startup we clean out all the jobs that are not completed, + -- because they are stale runs from previous startups of the server. + -- We can just remove the jobs, and all the logs belonging to them will be + -- removed automatically by the foreign key constraint. + liftEffect $ SQLite.resetIncompleteJobs db + + pure + { debouncer + , githubCacheRef + , legacyCacheRef + , registryCacheRef + , cacheDir + , logsDir + , vars + , octokit + , db + , jobId: Nothing + } + +type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + ARCHIVE + REGISTRY + PACKAGE_SETS + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + LOG + EXCEPT String + AFF + EFFECT ()) + +runServer + :: ServerEnv + -> (ServerEnv -> Request Route -> Run ServerEffects Response) + -> Request Route + -> Aff Response +runServer env router' request = do + result <- runEffects env (router' env request) + case result of + Left error -> HTTPurple.badRequest (Aff.message error) + Right response -> pure response + +jsonDecoder :: forall a. CJ.Codec a -> JsonDecoder CJ.DecodeError a +jsonDecoder codec = JsonDecoder (parseJson codec) + +jsonEncoder :: forall a. CJ.Codec a -> JsonEncoder a +jsonEncoder codec = JsonEncoder (stringifyJson codec) + +jsonOk :: forall m a. MonadAff m => CJ.Codec a -> a -> m Response +jsonOk codec datum = HTTPurple.ok' HTTPurple.jsonHeaders $ HTTPurple.toJson (jsonEncoder codec) datum + +runEffects :: forall a. ServerEnv -> Run ServerEffects a -> Aff (Either Aff.Error a) +runEffects env operation = Aff.attempt do + today <- nowUTC + let logFile = String.take 10 (Formatter.DateTime.format Internal.Format.iso8601Date today) <> ".log" + let logPath = Path.concat [ env.logsDir, logFile ] + operation + # PackageSets.interpret (PackageSets.handle { workdir: scratchDir }) + # Registry.interpret + ( Registry.handle + { repos: Registry.defaultRepos + , pull: Git.ForceClean + , write: Registry.CommitAs (Git.pacchettibottiCommitter env.vars.token) + , workdir: scratchDir + , debouncer: env.debouncer + , cacheRef: env.registryCacheRef + } + ) + # Archive.interpret Archive.handle + # Pursuit.interpret (Pursuit.handleAff env.vars.token) + # Storage.interpret (Storage.handleS3 { s3: { key: env.vars.spacesKey, secret: env.vars.spacesSecret }, cache: env.cacheDir }) + # Source.interpret (Source.handle Source.Recent) + # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache: env.cacheDir, ref: env.githubCacheRef }) + # Cache.interpret _legacyCache (Cache.handleMemoryFs { cache: env.cacheDir, ref: env.legacyCacheRef }) + # Cache.interpret _compilerCache (Cache.handleFs env.cacheDir) + # Except.catch + ( \msg -> do + finishedAt <- nowUTC + case env.jobId of + -- Important to make sure that we mark the job as completed + Just jobId -> Db.finishJob { jobId, finishedAt, success: false } + Nothing -> pure unit + Log.error msg *> Run.liftAff (Aff.throwError (Aff.error msg)) + ) + # Db.interpret (Db.handleSQLite { db: env.db }) + # Log.interpret + ( \log -> case env.jobId of + Nothing -> Log.handleTerminal Verbose log *> Log.handleFs Verbose logPath log + Just jobId -> + Log.handleTerminal Verbose log + *> Log.handleFs Verbose logPath log + *> Log.handleDb { db: env.db, job: jobId } log + ) + # Env.runPacchettiBottiEnv { publicKey: env.vars.publicKey, privateKey: env.vars.privateKey } + # Env.runResourceEnv env.vars.resourceEnv + # Run.runBaseAff' diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs new file mode 100644 index 000000000..4970fa935 --- /dev/null +++ b/app/src/App/Server/JobExecutor.purs @@ -0,0 +1,180 @@ +module Registry.App.Server.JobExecutor + ( runJobExecutor + ) where + +import Registry.App.Prelude hiding ((/)) + +import Control.Monad.Maybe.Trans (MaybeT(..), runMaybeT) +import Control.Parallel as Parallel +import Data.Array as Array +import Data.DateTime (DateTime) +import Data.Map as Map +import Data.Set as Set +import Effect.Aff (Milliseconds(..)) +import Effect.Aff as Aff +import Record as Record +import Registry.API.V1 (Job(..)) +import Registry.API.V1 as V1 +import Registry.App.API as API +import Registry.App.Effect.Db (DB) +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.Server.Env (ServerEffects, ServerEnv, runEffects) +import Registry.App.Server.MatrixBuilder as MatrixBuilder +import Registry.ManifestIndex as ManifestIndex +import Registry.PackageName as PackageName +import Registry.Version as Version +import Run (Run) +import Run.Except (EXCEPT) + +runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) +runJobExecutor env = runEffects env do + Log.info "Starting Job Executor" + -- Before starting the executor we check if we need to run a whole-registry + -- compiler update: whenever a new compiler is published we need to see which + -- packages are compatible with it; this is a responsibility of the MatrixBuilder, + -- but it needs to be triggered to know there's a new version out. + -- To do that, we ask PursVersions what the compilers are, then we look for + -- the compatibility list of the latest `prelude` version. If the new compiler + -- is missing, then we know that we have not attempted to check compatibility + -- with it (since the latest `prelude` has to be compatible by definition), + -- and we can enqueue a "compile everything" here, which will be the first + -- thing that the JobExecutor picks up + void $ MatrixBuilder.checkIfNewCompiler + >>= traverse upgradeRegistryToNewCompiler + Db.resetIncompleteJobs + loop + where + loop = do + maybeJob <- findNextAvailableJob + case maybeJob of + Nothing -> do + liftAff $ Aff.delay (Milliseconds 1000.0) + loop + + Just job -> do + now <- nowUTC + let + jobId = (V1.jobInfo job).jobId + + Db.startJob { jobId, startedAt: now } + + -- We race the job execution against a timeout; if the timeout happens first, + -- we kill the job and move on to the next one. + -- Note: we set env.jobId so that logs are written to the database. + jobResult <- liftAff do + let envWithJobId = env { jobId = Just jobId } + let execute = Just <$> (runEffects envWithJobId $ executeJob now job) + let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes + let timeout = Aff.delay (Milliseconds delay) $> Nothing + Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout + + success <- case jobResult of + Nothing -> do + Log.error $ "Job " <> unwrap jobId <> " timed out." + pure false + + Just (Left err) -> do + Log.warn $ "Job " <> unwrap jobId <> " failed:\n" <> Aff.message err + pure false + + Just (Right _) -> do + Log.info $ "Job " <> unwrap jobId <> " succeeded." + pure true + + finishedAt <- nowUTC + Db.finishJob { jobId, finishedAt, success } + loop + +-- TODO: here we only get a single package for each operation, but really we should +-- have all of them and toposort them. There is something in ManifestIndex but not +-- sure that's what we need +findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe Job) +findNextAvailableJob = runMaybeT + $ (PublishJob <<< Record.merge { logs: [], jobType: Proxy :: _ "publish" } <$> MaybeT Db.selectNextPublishJob) + <|> (UnpublishJob <<< Record.merge { logs: [], jobType: Proxy :: _ "unpublish" } <$> MaybeT Db.selectNextUnpublishJob) + <|> (TransferJob <<< Record.merge { logs: [], jobType: Proxy :: _ "transfer" } <$> MaybeT Db.selectNextTransferJob) + <|> (MatrixJob <<< Record.merge { logs: [], jobType: Proxy :: _ "matrix" } <$> MaybeT Db.selectNextMatrixJob) + <|> (PackageSetJob <<< Record.merge { logs: [], jobType: Proxy :: _ "packageset" } <$> MaybeT Db.selectNextPackageSetJob) + +executeJob :: DateTime -> Job -> Run ServerEffects Unit +executeJob _ = case _ of + PublishJob { payload: payload@{ name } } -> do + maybeResult <- API.publish Nothing payload + -- The above operation will throw if not successful, and return a map of + -- dependencies of the package only if it has not been published before. + for_ maybeResult \{ dependencies, version } -> do + -- At this point this package has been verified with one compiler only. + -- So we need to enqueue compilation jobs for (1) same package, all the other + -- compilers, and (2) same compiler, all packages that depend on this one + -- TODO here we are building the compiler index, but we should really cache it + compilerIndex <- MatrixBuilder.readCompilerIndex + let solverData = { compiler: payload.compiler, name, version, dependencies, compilerIndex } + samePackageAllCompilers <- MatrixBuilder.solveForAllCompilers solverData + sameCompilerAllDependants <- MatrixBuilder.solveDependantsForCompiler solverData + for (Array.fromFoldable $ Set.union samePackageAllCompilers sameCompilerAllDependants) + \{ compiler: solvedCompiler, resolutions, name: solvedPackage, version: solvedVersion } -> do + Log.info $ "Enqueuing matrix job: compiler " + <> Version.print solvedCompiler + <> ", package " + <> PackageName.print solvedPackage + <> "@" + <> Version.print solvedVersion + Db.insertMatrixJob + { payload: resolutions + , compilerVersion: solvedCompiler + , packageName: solvedPackage + , packageVersion: solvedVersion + } + UnpublishJob { payload } -> API.authenticated payload + TransferJob { payload } -> API.authenticated payload + MatrixJob details@{ packageName, packageVersion } -> do + maybeDependencies <- MatrixBuilder.runMatrixJob details + -- Unlike the publishing case, after verifying a compilation here we only need + -- to followup with trying to compile the packages that depend on this one + for_ maybeDependencies \dependencies -> do + -- TODO here we are building the compiler index, but we should really cache it + compilerIndex <- MatrixBuilder.readCompilerIndex + let solverData = { compiler: details.compilerVersion, name: packageName, version: packageVersion, dependencies, compilerIndex } + sameCompilerAllDependants <- MatrixBuilder.solveDependantsForCompiler solverData + for (Array.fromFoldable sameCompilerAllDependants) + \{ compiler: solvedCompiler, resolutions, name: solvedPackage, version: solvedVersion } -> do + Log.info $ "Enqueuing matrix job: compiler " + <> Version.print solvedCompiler + <> ", package " + <> PackageName.print solvedPackage + <> "@" + <> Version.print solvedVersion + Db.insertMatrixJob + { payload: resolutions + , compilerVersion: solvedCompiler + , packageName: solvedPackage + , packageVersion: solvedVersion + } + PackageSetJob payload -> API.packageSetUpdate payload + +upgradeRegistryToNewCompiler :: forall r. Version -> Run (DB + LOG + EXCEPT String + REGISTRY + r) Unit +upgradeRegistryToNewCompiler newCompilerVersion = do + Log.info $ "New compiler found: " <> Version.print newCompilerVersion + Log.info "Starting upgrade of the whole registry to the new compiler..." + allManifests <- Registry.readAllManifests + for_ (ManifestIndex.toArray allManifests) \(Manifest manifest) -> do + -- Note: we enqueue compilation jobs only for packages with no dependencies, + -- because from them we should be able to reach the whole of the registry, + -- as they complete new jobs for their dependants will be queued up. + when (Map.isEmpty manifest.dependencies) do + Log.info $ "Enqueuing matrix job for _new_ compiler " + <> Version.print newCompilerVersion + <> ", package " + <> PackageName.print manifest.name + <> "@" + <> Version.print manifest.version + void $ Db.insertMatrixJob + { payload: Map.empty + , compilerVersion: newCompilerVersion + , packageName: manifest.name + , packageVersion: manifest.version + } diff --git a/app/src/App/Server/MatrixBuilder.purs b/app/src/App/Server/MatrixBuilder.purs new file mode 100644 index 000000000..34aba9ba0 --- /dev/null +++ b/app/src/App/Server/MatrixBuilder.purs @@ -0,0 +1,234 @@ +module Registry.App.Server.MatrixBuilder + ( checkIfNewCompiler + , installBuildPlan + , printCompilerFailure + , readCompilerIndex + , runMatrixJob + , solveForAllCompilers + , solveDependantsForCompiler + ) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.Array.NonEmpty as NonEmptyArray +import Data.Map as Map +import Data.Set as Set +import Data.Set.NonEmpty as NonEmptySet +import Data.String as String +import Effect.Aff as Aff +import Node.FS.Aff as FS.Aff +import Node.Path as Path +import Registry.API.V1 (MatrixJobData) +import Registry.App.CLI.Purs (CompilerFailure(..)) +import Registry.App.CLI.Purs as Purs +import Registry.App.CLI.PursVersions as PursVersions +import Registry.App.CLI.Tar as Tar +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.Effect.Storage (STORAGE) +import Registry.App.Effect.Storage as Storage +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Foreign.Tmp as Tmp +import Registry.ManifestIndex as ManifestIndex +import Registry.Metadata as Metadata +import Registry.PackageName as PackageName +import Registry.Range as Range +import Registry.Solver as Solver +import Registry.Version as Version +import Run (AFF, EFFECT, Run) +import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except + +runMatrixJob :: forall r. MatrixJobData -> Run (REGISTRY + STORAGE + LOG + AFF + EFFECT + EXCEPT String + r) (Maybe (Map PackageName Range)) +runMatrixJob { compilerVersion, packageName, packageVersion, payload: buildPlan } = do + workdir <- Tmp.mkTmpDir + let installed = Path.concat [ workdir, ".registry" ] + FS.Extra.ensureDirectory installed + installBuildPlan (Map.insert packageName packageVersion buildPlan) installed + result <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ Path.concat [ installed, "*/src/**/*.purs" ] ] } + , version: Just compilerVersion + , cwd: Just workdir + } + FS.Extra.remove workdir + case result of + Left err -> do + Log.info $ "Compilation failed with compiler " <> Version.print compilerVersion + <> ":\n" + <> printCompilerFailure compilerVersion err + pure Nothing + Right _ -> do + Log.info $ "Compilation succeeded with compiler " <> Version.print compilerVersion + + Registry.readMetadata packageName >>= case _ of + Nothing -> do + Log.error $ "No existing metadata for " <> PackageName.print packageName + pure Nothing + Just (Metadata metadata) -> do + let + metadataWithCompilers = metadata + { published = Map.update + ( \publishedMetadata@{ compilers } -> + Just $ publishedMetadata { compilers = NonEmptySet.toUnfoldable1 $ NonEmptySet.fromFoldable1 $ NonEmptyArray.cons compilerVersion compilers } + ) + packageVersion + metadata.published + } + Registry.writeMetadata packageName (Metadata metadataWithCompilers) + Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata metadataWithCompilers) + + Log.info "Wrote completed metadata to the registry!" + Registry.readManifest packageName packageVersion >>= case _ of + Just (Manifest manifest) -> pure (Just manifest.dependencies) + Nothing -> do + Log.error $ "No existing metadata for " <> PackageName.print packageName <> "@" <> Version.print packageVersion + pure Nothing + +-- TODO feels like we should be doing this at startup and use the cache instead +-- of reading files all over again +readCompilerIndex :: forall r. Run (REGISTRY + AFF + EXCEPT String + r) Solver.CompilerIndex +readCompilerIndex = do + metadata <- Registry.readAllMetadata + manifests <- Registry.readAllManifests + allCompilers <- PursVersions.pursVersions + pure $ Solver.buildCompilerIndex allCompilers manifests metadata + +-- | Install all dependencies indicated by the build plan to the specified +-- | directory. Packages will be installed at 'dir/package-name-x.y.z'. +installBuildPlan :: forall r. Map PackageName Version -> FilePath -> Run (STORAGE + LOG + AFF + EXCEPT String + r) Unit +installBuildPlan resolutions dependenciesDir = do + Run.liftAff $ FS.Extra.ensureDirectory dependenciesDir + -- We fetch every dependency at its resolved version, unpack the tarball, and + -- store the resulting source code in a specified directory for dependencies. + forWithIndex_ resolutions \name version -> do + let + -- This filename uses the format the directory name will have once + -- unpacked, ie. package-name-major.minor.patch + filename = PackageName.print name <> "-" <> Version.print version <> ".tar.gz" + filepath = Path.concat [ dependenciesDir, filename ] + Storage.download name version filepath + Run.liftAff (Aff.attempt (Tar.extract { cwd: dependenciesDir, archive: filename })) >>= case _ of + Left error -> do + Log.error $ "Failed to unpack " <> filename <> ": " <> Aff.message error + Except.throw "Failed to unpack dependency tarball, cannot continue." + Right _ -> + Log.debug $ "Unpacked " <> filename + Run.liftAff $ FS.Aff.unlink filepath + Log.debug $ "Installed " <> formatPackageVersion name version + +printCompilerFailure :: Version -> CompilerFailure -> String +printCompilerFailure compiler = case _ of + MissingCompiler -> Array.fold + [ "Compilation failed because the build plan compiler version " + , Version.print compiler + , " is not supported. Please try again with a different compiler." + ] + CompilationError errs -> String.joinWith "\n" + [ "Compilation failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" + , "```" + , Purs.printCompilerErrors errs + , "```" + ] + UnknownError err -> String.joinWith "\n" + [ "Compilation failed with version " <> Version.print compiler <> " because of an error :" + , "```" + , err + , "```" + ] + +type MatrixSolverData = + { compilerIndex :: Solver.CompilerIndex + , compiler :: Version + , name :: PackageName + , version :: Version + , dependencies :: Map PackageName Range + } + +type MatrixSolverResult = + { name :: PackageName + , version :: Version + , compiler :: Version + , resolutions :: Map PackageName Version + } + +solveForAllCompilers :: forall r. MatrixSolverData -> Run (AFF + EXCEPT String + LOG + r) (Set MatrixSolverResult) +solveForAllCompilers { compilerIndex, name, version, compiler, dependencies } = do + -- remove the compiler we tested with from the set of all of them + compilers <- (Array.filter (_ /= compiler) <<< NonEmptyArray.toArray) <$> PursVersions.pursVersions + newJobs <- for compilers \target -> do + Log.debug $ "Trying compiler " <> Version.print target <> " for package " <> PackageName.print name + case Solver.solveWithCompiler (Range.exact target) compilerIndex dependencies of + Left _solverErrors -> do + Log.info $ "Failed to solve with compiler " <> Version.print target + -- Log.debug $ Solver.printSolverError solverErrors + pure Nothing + Right (Tuple solvedCompiler resolutions) -> case solvedCompiler == target of + true -> do + Log.debug $ "Solved with compiler " <> Version.print solvedCompiler + pure $ Just { compiler: target, resolutions, name, version } + false -> do + Log.debug $ Array.fold + [ "Produced a compiler-derived build plan that selects a compiler (" + , Version.print solvedCompiler + , ") that differs from the target compiler (" + , Version.print target + , ")." + ] + pure Nothing + pure $ Set.fromFoldable $ Array.catMaybes newJobs + +solveDependantsForCompiler :: forall r. MatrixSolverData -> Run (EXCEPT String + LOG + REGISTRY + r) (Set MatrixSolverResult) +solveDependantsForCompiler { compilerIndex, name, version, compiler } = do + manifestIndex <- Registry.readAllManifests + let dependentManifests = ManifestIndex.dependants manifestIndex name version + newJobs <- for dependentManifests \(Manifest manifest) -> do + -- we first verify if we have already attempted this package with this compiler, + -- either in the form of having it in the metadata already, or as a failed compilation + -- (i.e. if we find compilers in the metadata for this version we only check this one + -- if it's newer, because all the previous ones have been tried) + shouldAttemptToCompile <- Registry.readMetadata manifest.name >>= case _ of + Nothing -> pure false + Just metadata -> pure $ case Map.lookup version (un Metadata metadata).published of + Nothing -> false + Just { compilers } -> any (_ > compiler) compilers + case shouldAttemptToCompile of + false -> pure Nothing + true -> do + -- if all good then run the solver + Log.debug $ "Trying compiler " <> Version.print compiler <> " for package " <> PackageName.print manifest.name + case Solver.solveWithCompiler (Range.exact compiler) compilerIndex manifest.dependencies of + Left _solverErrors -> do + Log.info $ "Failed to solve with compiler " <> Version.print compiler + -- Log.debug $ Solver.printSolverError solverErrors + pure Nothing + Right (Tuple solvedCompiler resolutions) -> case compiler == solvedCompiler of + true -> do + Log.debug $ "Solved with compiler " <> Version.print solvedCompiler + pure $ Just { compiler, resolutions, name: manifest.name, version: manifest.version } + false -> do + Log.debug $ Array.fold + [ "Produced a compiler-derived build plan that selects a compiler (" + , Version.print solvedCompiler + , ") that differs from the target compiler (" + , Version.print compiler + , ")." + ] + pure Nothing + pure $ Set.fromFoldable $ Array.catMaybes newJobs + +checkIfNewCompiler :: forall r. Run (EXCEPT String + LOG + REGISTRY + AFF + r) (Maybe Version) +checkIfNewCompiler = do + Log.info "Checking if there's a new compiler in town..." + latestCompiler <- NonEmptyArray.foldr1 max <$> PursVersions.pursVersions + maybeMetadata <- Registry.readMetadata $ unsafeFromRight $ PackageName.parse "prelude" + pure $ maybeMetadata >>= \(Metadata metadata) -> + Map.findMax metadata.published + >>= \{ key: _version, value: { compilers } } -> do + case all (_ < latestCompiler) compilers of + -- all compilers compatible with the latest prelude are older than this one + true -> Just latestCompiler + false -> Nothing diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs new file mode 100644 index 000000000..2553ea4a6 --- /dev/null +++ b/app/src/App/Server/Router.purs @@ -0,0 +1,178 @@ +module Registry.App.Server.Router where + +import Registry.App.Prelude hiding ((/)) + +import Data.Codec.JSON as CJ +import Data.DateTime as DateTime +import Data.Time.Duration (Hours(..), negateDuration) +import Effect.Aff as Aff +import Effect.Class.Console as Console +import HTTPurple (Method(..), Request, Response) +import HTTPurple as HTTPurple +import HTTPurple.Status as Status +import Registry.API.V1 (Route(..)) +import Registry.API.V1 as V1 +import Registry.App.API as API +import Registry.App.Auth as Auth +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Env as Env +import Registry.App.Effect.Log as Log +import Registry.App.Server.Env (ServerEffects, ServerEnv, jsonDecoder, jsonOk, runEffects) +import Registry.Operation (PackageSetOperation(..)) +import Registry.Operation as Operation +import Run (Run) +import Run as Run +import Run.Except as Run.Except + +runRouter :: ServerEnv -> Effect Unit +runRouter env = do + -- Read port from SERVER_PORT env var (optional, HTTPurple defaults to 8080) + port <- liftEffect $ Env.lookupOptional Env.serverPort + void $ HTTPurple.serve + { hostname: "0.0.0.0" + , port + } + { route: V1.routes + , router: runServer + } + where + runServer :: Request Route -> Aff Response + runServer request = do + result <- runEffects env (router request) + case result of + Left error -> do + Console.log $ "Bad request: " <> Aff.message error + HTTPurple.badRequest (Aff.message error) + Right response -> pure response + +router :: Request Route -> Run ServerEffects Response +router { route, method, body } = HTTPurple.usingCont case route, method of + Publish, Post -> do + publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body + lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish + + jobId <- lift (Db.selectPublishJob publish.name publish.version) >>= case _ of + Just job -> do + lift $ Log.warn $ "Duplicate publish job insertion, returning existing one: " <> unwrap job.jobId + pure job.jobId + Nothing -> do + lift $ Db.insertPublishJob { payload: publish } + + jsonOk V1.jobCreatedResponseCodec { jobId } + + Unpublish, Post -> do + auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + case auth.payload of + Operation.Unpublish payload -> do + lift $ Log.info $ "Received Unpublish request: " <> printJson Operation.unpublishCodec payload + + jobId <- lift (Db.selectUnpublishJob payload.name payload.version) >>= case _ of + Just job -> do + lift $ Log.warn $ "Duplicate unpublish job insertion, returning existing one: " <> unwrap job.jobId + pure job.jobId + Nothing -> do + lift $ Db.insertUnpublishJob + { payload: payload + , rawPayload: auth.rawPayload + , signature: auth.signature + } + + jsonOk V1.jobCreatedResponseCodec { jobId } + _ -> + HTTPurple.badRequest "Expected unpublish operation." + + Transfer, Post -> do + auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + case auth.payload of + Operation.Transfer payload -> do + lift $ Log.info $ "Received Transfer request: " <> printJson Operation.transferCodec payload + + jobId <- lift (Db.selectTransferJob payload.name) >>= case _ of + Just job -> do + lift $ Log.warn $ "Duplicate transfer job insertion, returning existing one: " <> unwrap job.jobId + pure job.jobId + Nothing -> do + lift $ Db.insertTransferJob + { payload: payload + , rawPayload: auth.rawPayload + , signature: auth.signature + } + + jsonOk V1.jobCreatedResponseCodec { jobId } + _ -> + HTTPurple.badRequest "Expected transfer operation." + + Jobs { since, include_completed }, Get -> do + now <- liftEffect nowUTC + let oneHourAgo = fromMaybe now $ DateTime.adjust (negateDuration (Hours 1.0)) now + lift + ( Run.Except.runExcept $ Db.selectJobs + { includeCompleted: fromMaybe false include_completed + , since: fromMaybe oneHourAgo since + } + ) >>= case _ of + Left err -> do + lift $ Log.error $ "Error while fetching jobs: " <> err + HTTPurple.internalServerError $ "Error while fetching jobs: " <> err + Right jobs -> jsonOk (CJ.array V1.jobCodec) jobs + + Job jobId { level: maybeLogLevel, since }, Get -> do + now <- liftEffect nowUTC + let oneHourAgo = fromMaybe now $ DateTime.adjust (negateDuration (Hours 1.0)) now + lift (Run.Except.runExcept $ Db.selectJob { jobId, level: maybeLogLevel, since: fromMaybe oneHourAgo since }) >>= case _ of + Left err -> do + lift $ Log.error $ "Error while fetching job: " <> err + HTTPurple.internalServerError $ "Error while fetching job: " <> err + Right Nothing -> do + HTTPurple.notFound + Right (Just job) -> jsonOk V1.jobCodec job + + PackageSets, Post -> do + request <- HTTPurple.fromJson (jsonDecoder Operation.packageSetUpdateRequestCodec) body + lift $ Log.info $ "Received PackageSet request: " <> request.rawPayload + + -- Check if the operation requires authentication (compiler change or package removal) + let + PackageSetUpdate payload = request.payload + didChangeCompiler = isJust payload.compiler + didRemovePackages = any isNothing payload.packages + requiresAuth = didChangeCompiler || didRemovePackages + + -- If restricted operation, verify pacchettibotti signature + authResult <- + if requiresAuth then do + pacchettiBotti <- lift API.getPacchettiBotti + lift $ Run.liftAff $ Auth.verifyPackageSetPayload pacchettiBotti request + else + pure (Right unit) + + case authResult of + Left err -> do + lift $ Log.error $ "Package set authentication failed: " <> err + HTTPurple.badRequest err + Right _ -> do + when requiresAuth do + lift $ Log.info "Package set authentication successful." + + -- Check for duplicate pending job with the same payload + jobId <- lift (Db.selectPackageSetJobByPayload request.payload) >>= case _ of + Just job -> do + lift $ Log.warn $ "Duplicate package set job insertion, returning existing one: " <> unwrap job.jobId + pure job.jobId + Nothing -> do + lift $ Db.insertPackageSetJob + { payload: request.payload + , rawPayload: request.rawPayload + , signature: request.signature + } + + jsonOk V1.jobCreatedResponseCodec { jobId } + + Status, Get -> + HTTPurple.emptyResponse Status.ok + + Status, Head -> + HTTPurple.emptyResponse Status.ok + + _, _ -> + HTTPurple.notFound diff --git a/app/test/App/API.purs b/app/test/App/API.purs index 27ed33cf1..28f17f90e 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -94,16 +94,17 @@ spec = do version = Utils.unsafeVersion "4.0.0" ref = "v4.0.0" publishArgs = - { compiler: Utils.unsafeVersion "0.15.9" + { compiler: Utils.unsafeVersion "0.15.10" , location: Just $ GitHub { owner: "purescript", repo: "purescript-effect", subdir: Nothing } , name , ref + , version: version , resolutions: Nothing } -- First, we publish the package. Registry.readAllManifests >>= \idx -> - API.publish (Just (toLegacyIndex idx)) publishArgs + void $ API.publish (Just (toLegacyIndex idx)) publishArgs -- Then, we can check that it did make it to "Pursuit" as expected Pursuit.getPublishedVersions name >>= case _ of @@ -141,7 +142,7 @@ spec = do Nothing -> Except.throw $ "Expected " <> formatPackageVersion name version <> " to be in metadata." Just published -> do let many' = NonEmptyArray.toArray published.compilers - let expected = map Utils.unsafeVersion [ "0.15.9", "0.15.10" ] + let expected = map Utils.unsafeVersion [ "0.15.10", "0.15.11" ] unless (many' == expected) do Except.throw $ "Expected " <> formatPackageVersion name version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') @@ -156,28 +157,30 @@ spec = do -- but did not have documentation make it to Pursuit. let pursuitOnlyPublishArgs = - { compiler: Utils.unsafeVersion "0.15.9" + { compiler: Utils.unsafeVersion "0.15.10" , location: Just $ GitHub { owner: "purescript", repo: "purescript-type-equality", subdir: Nothing } , name: Utils.unsafePackageName "type-equality" , ref: "v4.0.1" + , version: Utils.unsafeVersion "4.0.1" , resolutions: Nothing } Registry.readAllManifests >>= \idx -> - API.publish (Just (toLegacyIndex idx)) pursuitOnlyPublishArgs + void $ API.publish (Just (toLegacyIndex idx)) pursuitOnlyPublishArgs -- We can also verify that transitive dependencies are added for legacy -- packages. let transitive = { name: Utils.unsafePackageName "transitive", version: Utils.unsafeVersion "1.0.0" } transitivePublishArgs = - { compiler: Utils.unsafeVersion "0.15.9" + { compiler: Utils.unsafeVersion "0.15.10" , location: Just $ GitHub { owner: "purescript", repo: "purescript-transitive", subdir: Nothing } , name: transitive.name , ref: "v" <> Version.print transitive.version + , version: transitive.version , resolutions: Nothing } Registry.readAllManifests >>= \idx -> - API.publish (Just (toLegacyIndex idx)) transitivePublishArgs + void $ API.publish (Just (toLegacyIndex idx)) transitivePublishArgs -- We should verify the resulting metadata file is correct Metadata transitiveMetadata <- Registry.readMetadata transitive.name >>= case _ of @@ -188,7 +191,7 @@ spec = do Nothing -> Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to be in metadata." Just published -> do let many' = NonEmptyArray.toArray published.compilers - let expected = map Utils.unsafeVersion [ "0.15.9", "0.15.10" ] + let expected = map Utils.unsafeVersion [ "0.15.10", "0.15.11" ] unless (many' == expected) do Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') @@ -214,65 +217,6 @@ spec = do Console.error $ String.joinWith "\n" (map (\(Tuple _ msg) -> msg) recorded) Assert.fail $ "Expected to publish effect@4.0.0 and type-equality@4.0.1 and transitive@1.0.0 but got error: " <> err Right (Right _) -> pure unit - - Spec.it "Falls back to archive when GitHub repo is inaccessible during legacy import" \{ workdir, index, metadata, storageDir, archiveDir, githubDir } -> do - logs <- liftEffect (Ref.new []) - - let - toLegacyIndex :: ManifestIndex -> Solver.TransitivizedRegistry - toLegacyIndex = - Solver.exploreAllTransitiveDependencies - <<< Solver.initializeRegistry - <<< map (map (_.dependencies <<< un Manifest)) - <<< ManifestIndex.toMap - - testEnv = - { workdir - , logs - , index - , metadata - , pursuitExcludes: Set.empty - , username: "jon" - , storage: storageDir - , archive: archiveDir - , github: githubDir - } - - -- The prelude@6.0.2 package exists in registry-archive but NOT in - -- github-packages or registry-storage. This simulates an archive-backed - -- package whose original GitHub repo is gone. - result <- Assert.Run.runTestEffects testEnv $ Except.runExcept do - let - name = Utils.unsafePackageName "prelude" - version = Utils.unsafeVersion "6.0.2" - ref = "v6.0.2" - publishArgs = - { compiler: Utils.unsafeVersion "0.15.9" - , location: Just $ GitHub { owner: "purescript", repo: "purescript-prelude", subdir: Nothing } - , name - , ref - , resolutions: Nothing - } - - -- Legacy import with archive fallback - Registry.readAllManifests >>= \idx -> - API.publish (Just (toLegacyIndex idx)) publishArgs - - -- Verify the package was published to storage - Storage.query name >>= \versions -> - unless (Set.member version versions) do - Except.throw $ "Expected " <> formatPackageVersion name version <> " to be published to registry storage." - - case result of - Left exn -> do - recorded <- liftEffect (Ref.read logs) - Console.error $ String.joinWith "\n" (map (\(Tuple _ msg) -> msg) recorded) - Assert.fail $ "Got an Aff exception! " <> Aff.message exn - Right (Left err) -> do - recorded <- liftEffect (Ref.read logs) - Console.error $ String.joinWith "\n" (map (\(Tuple _ msg) -> msg) recorded) - Assert.fail $ "Expected prelude@6.0.2 to be published via archive fallback but got error: " <> err - Right (Right _) -> pure unit where withCleanEnv :: (PipelineEnv -> Aff Unit) -> Aff Unit withCleanEnv action = do @@ -307,6 +251,10 @@ spec = do copyFixture "registry-storage" copyFixture "registry-archive" copyFixture "github-packages" + -- FIXME: This is a bit hacky, but we remove effect-4.0.0.tar.gz since the unit test publishes + -- it from scratch and will fail if effect-4.0.0 is already in storage. We have it in storage + -- for the separate integration tests. + FS.Extra.remove $ Path.concat [ testFixtures, "registry-storage", "effect-4.0.0.tar.gz" ] let readFixtures = do diff --git a/app/test/App/GitHubIssue.purs b/app/test/App/GitHubIssue.purs index 70b3ccb3a..d2c6baf18 100644 --- a/app/test/App/GitHubIssue.purs +++ b/app/test/App/GitHubIssue.purs @@ -32,6 +32,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "something" , ref: "v1.2.3" + , version: Utils.unsafeVersion "1.2.3" , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Just $ Map.fromFoldable [ Utils.unsafePackageName "prelude" /\ Utils.unsafeVersion "1.0.0" ] , location: Nothing @@ -47,6 +48,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "prelude" , ref: "v5.0.0" + , version: Utils.unsafeVersion "5.0.0" , location: Just $ GitHub { subdir: Nothing, owner: "purescript", repo: "purescript-prelude" } , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Just $ Map.fromFoldable [ Utils.unsafePackageName "prelude" /\ Utils.unsafeVersion "1.0.0" ] @@ -75,6 +77,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "prelude" , ref: "v5.0.0" + , version: Utils.unsafeVersion "5.0.0" , location: Just $ GitHub { subdir: Nothing, owner: "purescript", repo: "purescript-prelude" } , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Nothing @@ -103,6 +106,7 @@ preludeAdditionString = { "name": "prelude", "ref": "v5.0.0", + "version": "5.0.0", "location": { "githubOwner": "purescript", "githubRepo": "purescript-prelude" @@ -121,6 +125,7 @@ packageNameTooLongString = { "name": "packagenamewayyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyytoolong", "ref": "v5.0.0", + "version": "5.0.0", "location": { "githubOwner": "purescript", "githubRepo": "purescript-prelude" diff --git a/app/test/App/Legacy/PackageSet.purs b/app/test/App/Legacy/PackageSet.purs index 414b09a57..2d4a7a2dc 100644 --- a/app/test/App/Legacy/PackageSet.purs +++ b/app/test/App/Legacy/PackageSet.purs @@ -2,8 +2,6 @@ module Test.Registry.App.Legacy.PackageSet (spec) where import Registry.App.Prelude -import Data.Array.NonEmpty as NonEmptyArray -import Data.DateTime (DateTime(..)) import Data.Either as Either import Data.Map as Map import Data.Set as Set @@ -14,7 +12,6 @@ import Registry.App.Legacy.PackageSet as Legacy.PackageSet import Registry.App.Legacy.Types (legacyPackageSetCodec) import Registry.ManifestIndex as ManifestIndex import Registry.PackageName as PackageName -import Registry.Sha256 as Sha256 import Registry.Test.Assert as Assert import Registry.Test.Utils as Utils import Registry.Version as Version @@ -93,7 +90,7 @@ packageSet = PackageSet convertedPackageSet :: ConvertedLegacyPackageSet convertedPackageSet = - case Legacy.PackageSet.convertPackageSet index metadata packageSet of + case Legacy.PackageSet.convertPackageSet index packageSet of Left err -> unsafeCrashWith err Right value -> value where @@ -104,13 +101,6 @@ convertedPackageSet = , mkManifest prelude [] ] - metadata = Map.fromFoldable - [ unsafeMetadataEntry assert - , unsafeMetadataEntry console - , unsafeMetadataEntry effect - , unsafeMetadataEntry prelude - ] - legacyPackageSetJson :: String legacyPackageSetJson = """{ @@ -201,23 +191,3 @@ mkManifest (Tuple name version) deps = do (PackageName.print name) (LenientVersion.print version) (map (bimap PackageName.print (LenientVersion.version >>> toRange)) deps) - -unsafeMetadataEntry :: Tuple PackageName LenientVersion -> Tuple PackageName Metadata -unsafeMetadataEntry (Tuple name version) = do - let - published = - { ref: LenientVersion.raw version - , hash: unsafeFromRight $ Sha256.parse "sha256-gb24ZRec6mgR8TFBVR2eIh5vsMdhuL+zK9VKjWP74Cw=" - , bytes: 0.0 - , compilers: NonEmptyArray.singleton (Utils.unsafeVersion "0.15.2") - , publishedTime: DateTime (Utils.unsafeDate "2022-07-07") bottom - } - - metadata = Metadata - { location: GitHub { owner: "purescript", repo: "purescript-" <> PackageName.print name, subdir: Nothing } - , owners: Nothing - , published: Map.singleton (LenientVersion.version version) published - , unpublished: Map.empty - } - - Tuple name metadata diff --git a/app/test/App/Manifest/SpagoYaml.purs b/app/test/App/Manifest/SpagoYaml.purs index 973af0a99..52174063c 100644 --- a/app/test/App/Manifest/SpagoYaml.purs +++ b/app/test/App/Manifest/SpagoYaml.purs @@ -19,6 +19,6 @@ spec = do config <- SpagoYaml.readSpagoYaml (Path.concat [ fixturesPath, path ]) >>= case _ of Left err -> Aff.throwError $ Aff.error err Right config -> pure config - case SpagoYaml.spagoYamlToManifest config of + case SpagoYaml.spagoYamlToManifest "v1.0.0" config of Left err -> Assert.fail $ path <> " failed: " <> err Right _ -> pure unit diff --git a/app/test/Test/Assert/Run.purs b/app/test/Test/Assert/Run.purs index 008d86cca..a858dc675 100644 --- a/app/test/Test/Assert/Run.purs +++ b/app/test/Test/Assert/Run.purs @@ -33,8 +33,6 @@ import Registry.App.Effect.Archive (ARCHIVE) import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache (CacheRef) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB, GITHUB_CACHE, GitHub(..)) @@ -94,7 +92,6 @@ type TEST_EFFECTS = + GITHUB_CACHE + LEGACY_CACHE + COMPILER_CACHE - + COMMENT + LOG + EXCEPT String + AFF @@ -136,7 +133,6 @@ runTestEffects env operation = Aff.attempt do # runGitHubCacheMemory githubCache # runLegacyCacheMemory legacyCache -- Other effects - # Comment.interpret Comment.handleLog # Log.interpret (\(Log level msg next) -> Run.liftEffect (Ref.modify_ (_ <> [ Tuple level (Dodo.print Dodo.plainText Dodo.twoSpaces msg) ]) env.logs) *> pure next) -- Base effects # Except.catch (\err -> Run.liftAff (Aff.throwError (Aff.error err))) diff --git a/db/migrations/20240914170550_delete_jobs_logs_table.sql b/db/migrations/20240914170550_delete_jobs_logs_table.sql new file mode 100644 index 000000000..9dc12c365 --- /dev/null +++ b/db/migrations/20240914170550_delete_jobs_logs_table.sql @@ -0,0 +1,22 @@ +-- migrate:up +DROP TABLE IF EXISTS jobs; +DROP TABLE IF EXISTS logs; + +-- migrate:down +CREATE TABLE IF NOT EXISTS jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + jobType TEXT NOT NULL, + packageName TEXT NOT NULL, + ref TEXT NOT NULL, + createdAt TEXT NOT NULL, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); + +CREATE TABLE IF NOT EXISTS logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES jobs (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL +); diff --git a/db/migrations/20240914171030_create_job_queue_tables.sql b/db/migrations/20240914171030_create_job_queue_tables.sql new file mode 100644 index 000000000..cdb137ad4 --- /dev/null +++ b/db/migrations/20240914171030_create_job_queue_tables.sql @@ -0,0 +1,76 @@ +-- migrate:up + +-- Common job information table +CREATE TABLE job_info ( + jobId TEXT PRIMARY KEY NOT NULL, + createdAt TEXT NOT NULL, + startedAt TEXT, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); + +-- Publishing jobs +CREATE TABLE publish_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Unpublishing jobs +CREATE TABLE unpublish_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Package transfer jobs +CREATE TABLE transfer_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Compiler matrix jobs +CREATE TABLE matrix_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + compilerVersion TEXT NOT NULL, + -- the build plan, which should be computed before the job is stored in the + -- queue so that if multiple jobs targeting one package get interrupted by + -- a higher-priority job then the build plan is not affected. + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Package set jobs +CREATE TABLE package_set_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + payload JSON NOT NULL, + rawPayload TEXT NOT NULL, + signature TEXT, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES job_info (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL +); + +-- migrate:down + +DROP TABLE job_info; +DROP TABLE publish_jobs; +DROP TABLE unpublish_jobs; +DROP TABLE transfer_jobs; +DROP TABLE matrix_jobs; +DROP TABLE package_set_jobs; +DROP TABLE logs; diff --git a/db/schema.sql b/db/schema.sql index 116de1dda..65319293a 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -1,21 +1,57 @@ CREATE TABLE IF NOT EXISTS "schema_migrations" (version varchar(128) primary key); -CREATE TABLE jobs ( - jobId text primary key not null, - jobType text not null, - packageName text not null, - ref text not null, - createdAt text not null, - finishedAt text, - success integer not null default 0 +CREATE TABLE job_info ( + jobId TEXT PRIMARY KEY NOT NULL, + createdAt TEXT NOT NULL, + startedAt TEXT, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); +CREATE TABLE publish_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE unpublish_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE transfer_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE matrix_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + compilerVersion TEXT NOT NULL, + -- the build plan, which should be computed before the job is stored in the + -- queue so that if multiple jobs targeting one package get interrupted by + -- a higher-priority job then the build plan is not affected. + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE package_set_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE ); CREATE TABLE logs ( - id integer primary key autoincrement, - jobId text not null references jobs on delete cascade, - level integer not null, - message text not null, - timestamp text not null + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES job_info (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL ); -- Dbmate schema migrations INSERT INTO "schema_migrations" (version) VALUES ('20230711143615'), - ('20230711143803'); + ('20230711143803'), + ('20240914170550'), + ('20240914171030'); diff --git a/flake.nix b/flake.nix index edca743d8..bbec41157 100644 --- a/flake.nix +++ b/flake.nix @@ -198,7 +198,8 @@ # Integration test - exercises the server API integration = import ./nix/test/integration.nix { - inherit pkgs spagoSrc testEnv; + inherit pkgs spagoSrc; + testSupport = testEnv; }; # VM smoke test - verifies deployment without full API testing @@ -211,13 +212,16 @@ devShells.default = pkgs.mkShell { name = "registry-dev"; - # Development defaults from .env.example SERVER_PORT = envDefaults.SERVER_PORT; DATABASE_URL = envDefaults.DATABASE_URL; # Dhall environment variables needed for manifest typechecking inherit DHALL_TYPES DHALL_PRELUDE GIT_TERMINAL_PROMPT; + # NOTE: Test-specific env vars (REGISTRY_API_URL, GITHUB_API_URL, PACCHETTIBOTTI_*) + # are NOT set here to avoid conflicting with .env files used by production scripts + # like legacy-importer. Use `nix run .#test-env` to run E2E tests with mocked services. + packages = with pkgs; registry-runtime-deps @@ -229,11 +233,19 @@ nodejs jq dbmate + sqlite purs spago purs-tidy-unstable purs-backend-es-unstable process-compose + + # E2E test runner script - uses same fixed test environment as test-env + (writeShellScriptBin "spago-test-e2e" '' + set -euo pipefail + ${testEnv.envToExports testEnv.testEnv} + exec spago run -p registry-app-e2e + '') ]; }; } diff --git a/lib/fixtures/manifests/aff-5.1.2.json b/lib/fixtures/manifests/aff-5.1.2.json index 22684f05c..77bb331dd 100644 --- a/lib/fixtures/manifests/aff-5.1.2.json +++ b/lib/fixtures/manifests/aff-5.1.2.json @@ -6,6 +6,7 @@ "githubOwner": "purescript", "githubRepo": "purescript-aff" }, + "ref": "v5.1.2", "dependencies": { "datetime": ">=4.0.0 <5.0.0", "effect": ">=2.0.0 <3.0.0", diff --git a/lib/fixtures/manifests/mysql-4.1.1.json b/lib/fixtures/manifests/mysql-4.1.1.json index 6f9703b61..e0e8c70fe 100644 --- a/lib/fixtures/manifests/mysql-4.1.1.json +++ b/lib/fixtures/manifests/mysql-4.1.1.json @@ -6,6 +6,7 @@ "githubOwner": "oreshinya", "githubRepo": "purescript-mysql" }, + "ref": "v4.1.1", "dependencies": { "aff": ">=5.0.2 <6.0.0", "js-date": ">=6.0.0 <7.0.0", diff --git a/lib/fixtures/manifests/prelude-4.1.1.json b/lib/fixtures/manifests/prelude-4.1.1.json index 3dd47411c..56ac6db20 100644 --- a/lib/fixtures/manifests/prelude-4.1.1.json +++ b/lib/fixtures/manifests/prelude-4.1.1.json @@ -7,6 +7,7 @@ "githubOwner": "purescript", "githubRepo": "purescript-prelude" }, + "ref": "v4.1.1", "owners": [ { "keytype": "ed-25519", diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index a6193b5f7..4c399342e 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -1,7 +1,33 @@ -module Registry.API.V1 where +module Registry.API.V1 + ( JobCreatedResponse + , JobId(..) + , JobInfo + , JobType(..) + , Job(..) + , LogLevel(..) + , LogLine + , MatrixJobData + , PackageSetJobData + , PublishJobData + , Route(..) + , TransferJobData + , UnpublishJobData + , jobInfo + , jobCodec + , jobCreatedResponseCodec + , logLevelFromPriority + , logLevelToPriority + , printJobType + , printLogLevel + , routes + ) where import Prelude hiding ((/)) +import Codec.JSON.DecodeError as CJ.DecodeError +import Control.Alt ((<|>)) +import Control.Monad.Except (Except, except) +import Data.Codec as Codec import Data.Codec.JSON as CJ import Data.Codec.JSON.Record as CJ.Record import Data.Codec.JSON.Sum as CJ.Sum @@ -10,23 +36,33 @@ import Data.Either (Either(..), hush) import Data.Formatter.DateTime as DateTime import Data.Generic.Rep (class Generic) import Data.Lens.Iso.Newtype (_Newtype) +import Data.Map (Map) import Data.Maybe (Maybe) import Data.Newtype (class Newtype) import Data.Profunctor as Profunctor +import Data.Symbol (class IsSymbol) +import Data.Symbol as Symbol +import JSON (JSON) import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format +import Registry.Operation (AuthenticatedData, PackageSetOperation, PublishData) +import Registry.Operation as Operation import Registry.PackageName (PackageName) import Registry.PackageName as PackageName +import Registry.Version (Version) +import Registry.Version as Version import Routing.Duplex (RouteDuplex') import Routing.Duplex as Routing import Routing.Duplex.Generic as RoutingG import Routing.Duplex.Generic.Syntax ((/), (?)) +import Type.Proxy (Proxy(..)) data Route = Publish | Unpublish | Transfer - | Jobs + | PackageSets + | Jobs { since :: Maybe DateTime, include_completed :: Maybe Boolean } | Job JobId { level :: Maybe LogLevel, since :: Maybe DateTime } | Status @@ -37,7 +73,11 @@ routes = Routing.root $ Routing.prefix "api" $ Routing.prefix "v1" $ RoutingG.su { "Publish": "publish" / RoutingG.noArgs , "Unpublish": "unpublish" / RoutingG.noArgs , "Transfer": "transfer" / RoutingG.noArgs - , "Jobs": "jobs" / RoutingG.noArgs + , "PackageSets": "package-sets" / RoutingG.noArgs + , "Jobs": "jobs" ? + { since: Routing.optional <<< timestampP <<< Routing.string + , include_completed: Routing.optional <<< Routing.boolean + } , "Job": "jobs" / ( jobIdS ? { level: Routing.optional <<< logLevelP <<< Routing.string @@ -64,55 +104,193 @@ type JobCreatedResponse = { jobId :: JobId } jobCreatedResponseCodec :: CJ.Codec JobCreatedResponse jobCreatedResponseCodec = CJ.named "JobCreatedResponse" $ CJ.Record.object { jobId: jobIdCodec } -type Job = +data Job + = PublishJob PublishJobData + | UnpublishJob UnpublishJobData + | TransferJob TransferJobData + | MatrixJob MatrixJobData + | PackageSetJob PackageSetJobData + +type JobInfo r = { jobId :: JobId - , jobType :: JobType - , packageName :: PackageName - , ref :: String , createdAt :: DateTime + , startedAt :: Maybe DateTime , finishedAt :: Maybe DateTime , success :: Boolean , logs :: Array LogLine + | r } +type PublishJobData = JobInfo + ( packageName :: PackageName + , packageVersion :: Version + , payload :: PublishData + , jobType :: Proxy "publish" + ) + +type UnpublishJobData = JobInfo + ( packageName :: PackageName + , packageVersion :: Version + , payload :: AuthenticatedData + , jobType :: Proxy "unpublish" + ) + +type TransferJobData = JobInfo + ( packageName :: PackageName + , payload :: AuthenticatedData + , jobType :: Proxy "transfer" + ) + +type MatrixJobData = JobInfo + ( packageName :: PackageName + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version + , jobType :: Proxy "matrix" + ) + +type PackageSetJobData = JobInfo + ( payload :: PackageSetOperation + , jobType :: Proxy "packageset" + ) + jobCodec :: CJ.Codec Job -jobCodec = CJ.named "Job" $ CJ.Record.object +jobCodec = Codec.codec' decode encode + where + decode :: JSON -> Except CJ.DecodeError Job + decode json = + do + map PublishJob (Codec.decode publishJobDataCodec json) + <|> map UnpublishJob (Codec.decode unpublishJobDataCodec json) + <|> map TransferJob (Codec.decode transferJobDataCodec json) + <|> map MatrixJob (Codec.decode matrixJobDataCodec json) + <|> map PackageSetJob (Codec.decode packageSetJobDataCodec json) + + encode :: Job -> JSON + encode = case _ of + PublishJob j -> CJ.encode publishJobDataCodec j + UnpublishJob j -> CJ.encode unpublishJobDataCodec j + TransferJob j -> CJ.encode transferJobDataCodec j + MatrixJob j -> CJ.encode matrixJobDataCodec j + PackageSetJob j -> CJ.encode packageSetJobDataCodec j + +publishJobDataCodec :: CJ.Codec PublishJobData +publishJobDataCodec = CJ.named "PublishJob" $ CJ.Record.object { jobId: jobIdCodec - , jobType: jobTypeCodec + , jobType: symbolCodec (Proxy :: _ "publish") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec , packageName: PackageName.codec - , ref: CJ.string + , packageVersion: Version.codec + , payload: Operation.publishCodec + } + +symbolCodec :: forall sym. IsSymbol sym => Proxy sym -> CJ.Codec (Proxy sym) +symbolCodec _ = Codec.codec' decode encode + where + decode json = except do + symbol <- CJ.decode CJ.string json + let expected = Symbol.reflectSymbol (Proxy :: _ sym) + case symbol == expected of + false -> Left $ CJ.DecodeError.basic + $ "Tried to decode symbol '" <> symbol <> "' as '" <> expected <> "'" + true -> Right (Proxy :: _ sym) + encode = CJ.encode CJ.string <<< Symbol.reflectSymbol + +unpublishJobDataCodec :: CJ.Codec UnpublishJobData +unpublishJobDataCodec = CJ.named "UnpublishJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "unpublish") , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime , success: CJ.boolean , logs: CJ.array logLineCodec + , packageName: PackageName.codec + , packageVersion: Version.codec + , payload: Operation.authenticatedCodec } +transferJobDataCodec :: CJ.Codec TransferJobData +transferJobDataCodec = CJ.named "TransferJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "transfer") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec + , packageName: PackageName.codec + , payload: Operation.authenticatedCodec + } + +matrixJobDataCodec :: CJ.Codec MatrixJobData +matrixJobDataCodec = CJ.named "MatrixJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "matrix") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec + , packageName: PackageName.codec + , packageVersion: Version.codec + , compilerVersion: Version.codec + , payload: Internal.Codec.packageMap Version.codec + } + +packageSetJobDataCodec :: CJ.Codec PackageSetJobData +packageSetJobDataCodec = CJ.named "PackageSetJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "packageset") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec + , payload: Operation.packageSetOperationCodec + } + +jobInfo :: Job -> JobInfo () +jobInfo = case _ of + PublishJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + UnpublishJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + TransferJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + MatrixJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + PackageSetJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + newtype JobId = JobId String derive instance Newtype JobId _ +derive newtype instance Eq JobId jobIdCodec :: CJ.Codec JobId jobIdCodec = Profunctor.wrapIso JobId CJ.string -data JobType = PublishJob | UnpublishJob | TransferJob +data JobType + = PublishJobType + | UnpublishJobType + | TransferJobType + | MatrixJobType + | PackageSetJobType derive instance Eq JobType -parseJobType :: String -> Either String JobType -parseJobType = case _ of - "publish" -> Right PublishJob - "unpublish" -> Right UnpublishJob - "transfer" -> Right TransferJob - j -> Left $ "Invalid job type " <> show j - printJobType :: JobType -> String printJobType = case _ of - PublishJob -> "publish" - UnpublishJob -> "unpublish" - TransferJob -> "transfer" - -jobTypeCodec :: CJ.Codec JobType -jobTypeCodec = CJ.Sum.enumSum printJobType (hush <<< parseJobType) + PublishJobType -> "publish" + UnpublishJobType -> "unpublish" + TransferJobType -> "transfer" + MatrixJobType -> "matrix" + PackageSetJobType -> "packageset" type LogLine = { level :: LogLevel @@ -129,7 +307,7 @@ logLineCodec = CJ.named "LogLine" $ CJ.Record.object , timestamp: Internal.Codec.iso8601DateTime } -data LogLevel = Debug | Info | Warn | Error +data LogLevel = Debug | Info | Warn | Notice | Error derive instance Eq LogLevel derive instance Ord LogLevel @@ -139,6 +317,7 @@ printLogLevel = case _ of Debug -> "DEBUG" Info -> "INFO" Warn -> "WARN" + Notice -> "NOTICE" Error -> "ERROR" -- These numbers are not consecutive so that we can insert new log levels if need be @@ -147,6 +326,7 @@ logLevelToPriority = case _ of Debug -> 0 Info -> 10 Warn -> 20 + Notice -> 25 Error -> 30 logLevelFromPriority :: Int -> Either String LogLevel @@ -154,6 +334,7 @@ logLevelFromPriority = case _ of 0 -> Right Debug 10 -> Right Info 20 -> Right Warn + 25 -> Right Notice 30 -> Right Error other -> Left $ "Invalid log level priority: " <> show other @@ -162,5 +343,6 @@ parseLogLevel = case _ of "DEBUG" -> Right Debug "INFO" -> Right Info "WARN" -> Right Warn + "NOTICE" -> Right Notice "ERROR" -> Right Error other -> Left $ "Invalid log level: " <> other diff --git a/lib/src/Manifest.purs b/lib/src/Manifest.purs index d660b459b..49bb62f2c 100644 --- a/lib/src/Manifest.purs +++ b/lib/src/Manifest.purs @@ -48,6 +48,7 @@ newtype Manifest = Manifest , version :: Version , license :: License , location :: Location + , ref :: String , owners :: Maybe (NonEmptyArray Owner) , description :: Maybe String , includeFiles :: Maybe (NonEmptyArray NonEmptyString) @@ -77,6 +78,7 @@ codec = Profunctor.wrapIso Manifest $ CJ.named "Manifest" $ CJ.object $ CJ.recordProp @"license" License.codec $ CJ.recordPropOptional @"description" (Internal.Codec.limitedString 300) $ CJ.recordProp @"location" Location.codec + $ CJ.recordProp @"ref" CJ.string $ CJ.recordPropOptional @"owners" (CJ.Common.nonEmptyArray Owner.codec) $ CJ.recordPropOptional @"includeFiles" (CJ.Common.nonEmptyArray CJ.Common.nonEmptyString) $ CJ.recordPropOptional @"excludeFiles" (CJ.Common.nonEmptyArray CJ.Common.nonEmptyString) diff --git a/lib/src/ManifestIndex.purs b/lib/src/ManifestIndex.purs index 4837b49ed..eb3b08480 100644 --- a/lib/src/ManifestIndex.purs +++ b/lib/src/ManifestIndex.purs @@ -7,11 +7,13 @@ -- | https://github.com/purescript/registry-index module Registry.ManifestIndex ( ManifestIndex + , IncludeRanges(..) + , delete + , dependants , empty , fromSet , insert , insertIntoEntryFile - , delete , lookup , maximalIndex , packageEntryDirectory @@ -20,10 +22,10 @@ module Registry.ManifestIndex , printEntry , readEntryFile , removeFromEntryFile + , toArray , toMap - , toSortedArray , topologicalSort - , IncludeRanges(..) + , toSortedArray , writeEntryFile ) where @@ -87,13 +89,18 @@ empty = ManifestIndex Map.empty toMap :: ManifestIndex -> Map PackageName (Map Version Manifest) toMap (ManifestIndex index) = index --- | Produce an array of manifests topologically sorted by dependencies. -toSortedArray :: IncludeRanges -> ManifestIndex -> Array Manifest -toSortedArray includeRanges (ManifestIndex index) = topologicalSort includeRanges $ Set.fromFoldable do +-- | Produce an array of all the manifests +toArray :: ManifestIndex -> Array Manifest +toArray (ManifestIndex index) = do Tuple _ versions <- Map.toUnfoldableUnordered index Tuple _ manifest <- Map.toUnfoldableUnordered versions [ manifest ] +-- | Produce an array of all the manifests, topologically sorted by dependencies. +toSortedArray :: IncludeRanges -> ManifestIndex -> Array Manifest +toSortedArray includeRanges index = + topologicalSort includeRanges $ Set.fromFoldable $ toArray index + -- | Look up a package version's manifest in the manifest index. lookup :: PackageName -> Version -> ManifestIndex -> Maybe Manifest lookup name version (ManifestIndex index) = @@ -199,6 +206,13 @@ topologicalSort includeRanges manifests = IgnoreRanges -> versions [ Tuple dependency included ] +dependants :: ManifestIndex -> PackageName -> Version -> Array Manifest +dependants idx packageName version = idx + # toSortedArray ConsiderRanges + # Array.filter \(Manifest { dependencies }) -> case Map.lookup packageName dependencies of + Nothing -> false + Just range -> Range.includes range version + -- | Calculate the directory containing this package in the registry index, -- | using the following format: -- | diff --git a/lib/src/Metadata.purs b/lib/src/Metadata.purs index c54bed31e..3235661de 100644 --- a/lib/src/Metadata.purs +++ b/lib/src/Metadata.purs @@ -63,17 +63,11 @@ codec = Profunctor.wrapIso Metadata $ CJ.named "Metadata" $ CJ.object $ CJ.record -- | Metadata about a published package version. --- | --- | NOTE: The `ref` field is UNSPECIFIED and WILL BE REMOVED in the future. Do --- | not rely on its presence! type PublishedMetadata = { bytes :: Number , compilers :: NonEmptyArray Version , hash :: Sha256 , publishedTime :: DateTime - - -- UNSPECIFIED: Will be removed in the future. - , ref :: String } publishedMetadataCodec :: CJ.Codec PublishedMetadata @@ -82,7 +76,6 @@ publishedMetadataCodec = CJ.named "PublishedMetadata" $ CJ.Record.object , compilers: CJ.Common.nonEmptyArray Version.codec , hash: Sha256.codec , publishedTime: Internal.Codec.iso8601DateTime - , ref: CJ.string } -- | Metadata about an unpublished package version. diff --git a/lib/src/Operation.purs b/lib/src/Operation.purs index 98c35f092..7327001e6 100644 --- a/lib/src/Operation.purs +++ b/lib/src/Operation.purs @@ -14,16 +14,21 @@ -- | are well-formed, and JSON codecs package managers can use to construct the -- | requests necessary to send to the Registry API or publish in a GitHub issue. module Registry.Operation - ( AuthenticatedPackageOperation(..) - , AuthenticatedData + ( AuthenticatedData + , AuthenticatedPackageOperation(..) , PackageOperation(..) , PackageSetOperation(..) , PackageSetUpdateData + , PackageSetUpdateRequest , PublishData , TransferData , UnpublishData , authenticatedCodec + , packageName + , packageOperationCodec + , packageSetOperationCodec , packageSetUpdateCodec + , packageSetUpdateRequestCodec , publishCodec , transferCodec , unpublishCodec @@ -58,6 +63,25 @@ data PackageOperation derive instance Eq PackageOperation +packageName :: PackageOperation -> PackageName +packageName = case _ of + Publish { name } -> name + Authenticated { payload } -> case payload of + Unpublish { name } -> name + Transfer { name } -> name + +-- | A codec for encoding and decoding a `PackageOperation` as JSON. +packageOperationCodec :: CJ.Codec PackageOperation +packageOperationCodec = CJ.named "PackageOperation" $ Codec.codec' decode encode + where + decode json = + map Publish (Codec.decode publishCodec json) + <|> map Authenticated (Codec.decode authenticatedCodec json) + + encode = case _ of + Publish publish -> CJ.encode publishCodec publish + Authenticated authenticated -> CJ.encode authenticatedCodec authenticated + -- | An operation supported by the registry HTTP API for package operations and -- | which must be authenticated. data AuthenticatedPackageOperation @@ -74,6 +98,7 @@ type PublishData = { name :: PackageName , location :: Maybe Location , ref :: String + , version :: Version , compiler :: Version , resolutions :: Maybe (Map PackageName Version) } @@ -84,6 +109,7 @@ publishCodec = CJ.named "Publish" $ CJ.Record.object { name: PackageName.codec , location: CJ.Record.optional Location.codec , ref: CJ.string + , version: Version.codec , compiler: Version.codec , resolutions: CJ.Record.optional (Internal.Codec.packageMap Version.codec) } @@ -178,6 +204,13 @@ data PackageSetOperation = PackageSetUpdate PackageSetUpdateData derive instance Eq PackageSetOperation +-- | A codec for encoding and decoding a `PackageSetOperation` as JSON. +packageSetOperationCodec :: CJ.Codec PackageSetOperation +packageSetOperationCodec = CJ.named "PackageSetOperation" $ Codec.codec' decode encode + where + decode json = map PackageSetUpdate (Codec.decode packageSetUpdateCodec json) + encode (PackageSetUpdate update) = CJ.encode packageSetUpdateCodec update + -- | Submit a batch update to the most recent package set. -- | -- | For full details, see the registry spec: @@ -197,3 +230,33 @@ packageSetUpdateCodec = CJ.named "PackageSetUpdate" $ CJ.Record.object -- `Compat` version of the `maybe` codec. , packages: Internal.Codec.packageMap (CJ.Common.nullable Version.codec) } + +-- | A package set update request that can be optionally authenticated. +-- | +-- | Non-trustees can submit add/upgrade operations without authentication. +-- | Trustees must sign requests for restricted operations (compiler changes, +-- | package removals) with pacchettibotti's key. +type PackageSetUpdateRequest = + { payload :: PackageSetOperation + , rawPayload :: String + , signature :: Maybe Signature + } + +-- | A codec for encoding and decoding a `PackageSetUpdateRequest` as JSON. +packageSetUpdateRequestCodec :: CJ.Codec PackageSetUpdateRequest +packageSetUpdateRequestCodec = CJ.named "PackageSetUpdateRequest" $ Codec.codec' decode encode + where + decode json = do + rep <- Codec.decode repCodec json + payloadJson <- except $ lmap JSON.DecodeError.basic $ JSON.parse rep.payload + operation <- Codec.decode packageSetOperationCodec payloadJson + pure { payload: operation, rawPayload: rep.payload, signature: map Signature rep.signature } + + encode { rawPayload, signature } = + CJ.encode repCodec { payload: rawPayload, signature: map (\(Signature s) -> s) signature } + + repCodec :: CJ.Codec { payload :: String, signature :: Maybe String } + repCodec = CJ.named "PackageSetUpdateRequestRep" $ CJ.Record.object + { payload: CJ.string + , signature: CJ.Record.optional CJ.string + } diff --git a/lib/src/Solver.purs b/lib/src/Solver.purs index 929894645..d3dcec10c 100644 --- a/lib/src/Solver.purs +++ b/lib/src/Solver.purs @@ -19,6 +19,7 @@ import Data.List.NonEmpty as NEL import Data.Map (Map, SemigroupMap(..)) import Data.Map as Map import Data.Maybe (Maybe(..), fromMaybe, maybe, maybe') +import Data.Maybe as Maybe import Data.Monoid.Disj (Disj(..)) import Data.Monoid.Endo (Endo(..)) import Data.Newtype (class Newtype, over, un, unwrap, wrap) @@ -81,11 +82,11 @@ buildCompilerIndex pursCompilers index metadata = CompilerIndex do -- | Solve the given dependencies using a dependency index that includes compiler -- | versions, such that the solution prunes results that would fall outside -- | a compiler range accepted by all dependencies. -solveWithCompiler :: Range -> CompilerIndex -> Map PackageName Range -> Either SolverErrors (Tuple (Maybe Version) (Map PackageName Version)) +solveWithCompiler :: Range -> CompilerIndex -> Map PackageName Range -> Either SolverErrors (Tuple Version (Map PackageName Version)) solveWithCompiler pursRange (CompilerIndex index) required = do let purs = Either.fromRight' (\_ -> Partial.unsafeCrashWith "Invalid package name!") (PackageName.parse "purs") results <- solveFull { registry: initializeRegistry index, required: initializeRequired (Map.insert purs pursRange required) } - let pursVersion = Map.lookup purs results + let pursVersion = Maybe.fromMaybe' (\_ -> Partial.unsafeCrashWith "Produced a compiler-derived build plan with no compiler!") $ Map.lookup purs results pure $ Tuple pursVersion $ Map.delete purs results -- | Data from the registry index, listing dependencies for each version of diff --git a/lib/test/Registry/ManifestIndex.purs b/lib/test/Registry/ManifestIndex.purs index 18e0863ef..1fb7e13a6 100644 --- a/lib/test/Registry/ManifestIndex.purs +++ b/lib/test/Registry/ManifestIndex.purs @@ -151,9 +151,9 @@ spec = do contextEntry :: String contextEntry = - """{"name":"context","version":"0.0.1","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"dependencies":{}} -{"name":"context","version":"0.0.2","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"dependencies":{}} -{"name":"context","version":"0.0.3","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"dependencies":{}} + """{"name":"context","version":"0.0.1","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"ref":"v0.0.1","dependencies":{}} +{"name":"context","version":"0.0.2","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"ref":"v0.0.2","dependencies":{}} +{"name":"context","version":"0.0.3","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"ref":"v0.0.3","dependencies":{}} """ testIndex @@ -242,6 +242,7 @@ manifestCodec' = Profunctor.dimap to from $ CJ.named "ManifestRep" $ CJ.Record.o { url: "https://github.com/purescript/purescript-" <> PackageName.print name <> ".git" , subdir: Nothing } + , ref: "v" <> Version.print version , description: Nothing , owners: Nothing , includeFiles: Nothing diff --git a/lib/test/Registry/Metadata.purs b/lib/test/Registry/Metadata.purs index 02e12c053..8daffc02c 100644 --- a/lib/test/Registry/Metadata.purs +++ b/lib/test/Registry/Metadata.purs @@ -29,8 +29,7 @@ recordStudio = "0.13.0" ], "hash": "sha256-LPRUC8ozZc7VCeRhKa4CtSgAfNqgAoVs2lH+7mYEcTk=", - "publishedTime": "2021-03-27T10:03:46.000Z", - "ref": "v0.1.0" + "publishedTime": "2021-03-27T10:03:46.000Z" }, "0.2.1": { "bytes": 3365, @@ -38,8 +37,7 @@ recordStudio = "0.13.0" ], "hash": "sha256-ySKKKp3rUJa4UmYTZshaOMO3jE+DW7IIqKJsurA2PP8=", - "publishedTime": "2022-05-15T10:51:57.000Z", - "ref": "v0.2.1" + "publishedTime": "2022-05-15T10:51:57.000Z" }, "1.0.0": { "bytes": 5155, @@ -47,8 +45,7 @@ recordStudio = "0.13.0" ], "hash": "sha256-0iMF8Rq88QBGuxTNrh+iuruw8l5boCP6J2JWBpQ4b7w=", - "publishedTime": "2022-11-03T17:30:28.000Z", - "ref": "v1.0.0" + "publishedTime": "2022-11-03T17:30:28.000Z" }, "1.0.1": { "bytes": 5635, @@ -57,8 +54,7 @@ recordStudio = "0.13.1" ], "hash": "sha256-Xm9pwDBHW5zYUEzxfVSgjglIcwRI1gcCOmcpyQ/tqeY=", - "publishedTime": "2022-11-04T12:21:09.000Z", - "ref": "v1.0.1" + "publishedTime": "2022-11-04T12:21:09.000Z" } }, "unpublished": { diff --git a/lib/test/Registry/Operation.purs b/lib/test/Registry/Operation.purs index 2ccb4075a..1400e70ee 100644 --- a/lib/test/Registry/Operation.purs +++ b/lib/test/Registry/Operation.purs @@ -54,7 +54,8 @@ minimalPublish = { "compiler": "0.15.6", "name": "my-package", - "ref": "v1.0.0" + "ref": "v1.0.0", + "version": "1.0.0" }""" fullPublish :: String @@ -67,7 +68,8 @@ fullPublish = "subdir": "core" }, "name": "my-package", - "ref": "c23snabhsrib39" + "ref": "c23snabhsrib39", + "version": "1.0.0" }""" unpublish :: String diff --git a/lib/test/Registry/Operation/Validation.purs b/lib/test/Registry/Operation/Validation.purs index cf474f103..955b08164 100644 --- a/lib/test/Registry/Operation/Validation.purs +++ b/lib/test/Registry/Operation/Validation.purs @@ -15,7 +15,8 @@ import Registry.Manifest (Manifest(..)) import Registry.Metadata (Metadata(..)) import Registry.Operation.Validation (UnpublishError(..), forbiddenModules, getUnresolvedDependencies, validatePursModule, validateUnpublish) import Registry.Test.Assert as Assert -import Registry.Test.Utils (defaultHash, defaultLocation, fromJust, unsafeDateTime, unsafeManifest, unsafePackageName, unsafeVersion) +import Registry.Test.Fixtures (defaultHash, defaultLocation) +import Registry.Test.Utils (fromJust, unsafeDateTime, unsafeManifest, unsafePackageName, unsafeVersion) import Test.Spec (Spec) import Test.Spec as Spec @@ -66,7 +67,7 @@ spec = do inRange = unsafeDateTime "2022-12-11T12:00:00.000Z" compilers = NonEmptyArray.singleton (unsafeVersion "0.13.0") - publishedMetadata = { bytes: 100.0, hash: defaultHash, publishedTime: outOfRange, compilers, ref: "" } + publishedMetadata = { bytes: 100.0, hash: defaultHash, publishedTime: outOfRange, compilers } metadata = Metadata { location: defaultLocation diff --git a/nix/overlay.nix b/nix/overlay.nix index 24b36afa1..8ec743a39 100644 --- a/nix/overlay.nix +++ b/nix/overlay.nix @@ -185,8 +185,9 @@ in ] ++ prev.lib.optionals prev.stdenv.isDarwin [ prev.darwin.cctools ]; - # To update: run `nix build .#server` and copy the hash from the error - npmDepsHash = "sha256-iWHvXmTcWr4A/VerriuewnH0qNIYBtYkQnqv1VO8Jhs="; + # To update: change to prev.lib.fakeHash, run `nix build .#server`, and copy the + # hash from the error + npmDepsHash = "sha256-AQcHoiM7CcBGFR0ZjOwunuq5oWhpWkTI3QGqeE3ASpI="; installPhase = '' mkdir -p $out @@ -239,7 +240,7 @@ in registry-server = prev.callPackage (buildRegistryPackage { name = "registry-server"; - module = "Registry.App.Server"; + module = "Registry.App.Main"; description = "PureScript Registry API server"; src = ../app; spagoLock = app; diff --git a/nix/test/config.nix b/nix/test/config.nix index 66813fe5b..07917444f 100644 --- a/nix/test/config.nix +++ b/nix/test/config.nix @@ -19,35 +19,51 @@ let ports = { server = serverPort; github = serverPort + 1; - bucket = serverPort + 2; - s3 = serverPort + 3; - pursuit = serverPort + 4; - healthchecks = serverPort + 5; + # Single storage WireMock instance for bucket + s3 + pursuit (merged for stateful scenarios) + storage = serverPort + 2; + healthchecks = serverPort + 3; }; - # Default state directory for tests - defaultStateDir = "/var/lib/registry-server"; + # Fixed state directory for tests - not configurable to avoid mismatch between + # test-env and spago-test-e2e shells. The test-env script cleans this up on start. + stateDir = "/tmp/registry-test-env"; # Mock service URLs for test environment + # All storage-related APIs (s3, bucket, pursuit) now share a single WireMock instance mockUrls = { + registry = "http://localhost:${toString ports.server}/api"; github = "http://localhost:${toString ports.github}"; - s3 = "http://localhost:${toString ports.s3}"; - bucket = "http://localhost:${toString ports.bucket}"; - pursuit = "http://localhost:${toString ports.pursuit}"; + storage = "http://localhost:${toString ports.storage}"; healthchecks = "http://localhost:${toString ports.healthchecks}"; }; + # Valid ED25519 test keypair for pacchettibotti (used for signing authenticated operations). + # These are test-only keys, not used in production. + testKeys = { + # ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHXE9ia5mQG5dPyS6pirU9PSWFP8hPglwChJERBpMoki pacchettibotti@purescript.org + public = "c3NoLWVkMjU1MTkgQUFBQUMzTnphQzFsWkRJMU5URTVBQUFBSUhYRTlpYTVtUUc1ZFB5UzZwaXJVOVBTV0ZQOGhQZ2x3Q2hKRVJCcE1va2kgcGFjY2hldHRpYm90dGlAcHVyZXNjcmlwdC5vcmcK"; + # OpenSSH format private key + private = "LS0tLS1CRUdJTiBPUEVOU1NIIFBSSVZBVEUgS0VZLS0tLS0KYjNCbGJuTnphQzFyWlhrdGRqRUFBQUFBQkc1dmJtVUFBQUFFYm05dVpRQUFBQUFBQUFBQkFBQUFNd0FBQUF0emMyZ3RaVwpReU5UVXhPUUFBQUNCMXhQWW11WmtCdVhUOGt1cVlxMVBUMGxoVC9JVDRKY0FvU1JFUWFUS0pJZ0FBQUtBMVFMT3NOVUN6CnJBQUFBQXR6YzJndFpXUXlOVFV4T1FBQUFDQjF4UFltdVprQnVYVDhrdXFZcTFQVDBsaFQvSVQ0SmNBb1NSRVFhVEtKSWcKQUFBRUJ1dUErV2NqODlTcjR2RUZnU043ZVF5SGFCWlYvc0F2YVhvVGRKa2lwanlYWEU5aWE1bVFHNWRQeVM2cGlyVTlQUwpXRlA4aFBnbHdDaEpFUkJwTW9raUFBQUFIWEJoWTJOb1pYUjBhV0p2ZEhScFFIQjFjbVZ6WTNKcGNIUXViM0puCi0tLS0tRU5EIE9QRU5TU0ggUFJJVkFURSBLRVktLS0tLQo="; + }; + # Complete test environment - starts with .env.example defaults which include - # mock secrets, then overrides external services with mock URLs. The DATABASE_URL - # and REPO_FIXTURES_DIR vars are derived from STATE_DIR at runtime so those are - # implemented in the script directly. + # mock secrets, then overrides external services with mock URLs. + # All storage-related APIs share a single WireMock instance for stateful scenarios. testEnv = envDefaults // { + # State directory and derived paths + STATE_DIR = stateDir; + REPO_FIXTURES_DIR = "${stateDir}/repo-fixtures"; + DATABASE_URL = "sqlite:${stateDir}/db/registry.sqlite3"; # Mock service URLs (override production endpoints) + REGISTRY_API_URL = mockUrls.registry; GITHUB_API_URL = mockUrls.github; - S3_API_URL = mockUrls.s3; - S3_BUCKET_URL = mockUrls.bucket; - PURSUIT_API_URL = mockUrls.pursuit; + # All storage-related APIs share a single base URL for stateful scenarios + S3_API_URL = mockUrls.storage; + S3_BUCKET_URL = mockUrls.storage; + PURSUIT_API_URL = mockUrls.storage; HEALTHCHECKS_URL = mockUrls.healthchecks; + PACCHETTIBOTTI_ED25519_PUB = testKeys.public; + PACCHETTIBOTTI_ED25519 = testKeys.private; }; envToExports = @@ -61,17 +77,22 @@ let exec ${pkgs.nodejs}/bin/node ${./git-mock.mjs} "$@" ''; - # Apply git mock overlay to get registry packages with mocked git. + # Test overlay: mocks git and limits compilers for faster tests. # Using pkgs.extend avoids a second nixpkgs instantiation (more efficient). - # This substitutes gitMock for git in registry-runtime-deps, which causes - # registry-server to be rebuilt with the mock baked into its PATH wrapper. - gitMockOverlay = _: prev: { + testOverlay = _: prev: { + # Substitute gitMock for git in registry-runtime-deps registry-runtime-deps = map ( pkg: if pkg == prev.git then gitMock else pkg ) prev.registry-runtime-deps; + + # Limit to 2 compilers for faster matrix job tests. + # These versions match the compilers referenced in app/fixtures. + registry-supported-compilers = lib.filterAttrs ( + name: _: name == "purs-0_15_10" || name == "purs-0_15_11" + ) prev.registry-supported-compilers; }; - registryPkgs = pkgs.extend gitMockOverlay; + registryPkgs = pkgs.extend testOverlay; # Helper to create GitHub contents API response, as it returns base64-encoded content base64Response = @@ -127,6 +148,30 @@ let }; }; + # Console package helpers (console@6.1.0) + consoleBase64Response = + fileName: + base64Response { + url = "/repos/purescript/purescript-console/contents/${fileName}?ref=v6.1.0"; + inherit fileName; + filePath = rootPath + "/app/fixtures/github-packages/console-6.1.0/${fileName}"; + }; + + console404Response = fileName: { + request = { + method = "GET"; + url = "/repos/purescript/purescript-console/contents/${fileName}?ref=v6.1.0"; + }; + response = { + status = 404; + headers."Content-Type" = "application/json"; + jsonBody = { + message = "Not Found"; + documentation_url = "https://docs.github.com/rest/repos/contents#get-repository-content"; + }; + }; + }; + # GitHub API wiremock mappings githubMappings = [ (effectBase64Response "bower.json") @@ -136,6 +181,13 @@ let (effect404Response "spago.dhall") (effect404Response "purs.json") (effect404Response "package.json") + # Console package (console@6.1.0) + (consoleBase64Response "bower.json") + (consoleBase64Response "LICENSE") + (console404Response "spago.yaml") + (console404Response "spago.dhall") + (console404Response "purs.json") + (console404Response "package.json") { request = { method = "GET"; @@ -153,85 +205,503 @@ let }; }; } - ]; - - # S3 API wiremock mappings (serves package tarballs) - s3Mappings = [ + # Accept issue comment creation (used by GitHubIssue workflow) { request = { - method = "GET"; - url = "/prelude/6.0.1.tar.gz"; + method = "POST"; + urlPattern = "/repos/purescript/registry/issues/[0-9]+/comments"; }; response = { - status = 200; - headers."Content-Type" = "application/octet-stream"; - bodyFileName = "prelude-6.0.1.tar.gz"; + status = 201; + headers."Content-Type" = "application/json"; + jsonBody = { + id = 1; + body = "ok"; + }; }; } + # Accept issue closing (used by GitHubIssue workflow) { request = { - method = "GET"; - url = "/type-equality/4.0.1.tar.gz"; + method = "PATCH"; + urlPattern = "/repos/purescript/registry/issues/[0-9]+"; }; response = { status = 200; - headers."Content-Type" = "application/octet-stream"; - bodyFileName = "type-equality-4.0.1.tar.gz"; + headers."Content-Type" = "application/json"; + jsonBody = { + id = 1; + state = "closed"; + }; }; } - ]; - - s3Files = [ - { - name = "prelude-6.0.1.tar.gz"; - path = rootPath + "/app/fixtures/registry-storage/prelude-6.0.1.tar.gz"; - } - { - name = "type-equality-4.0.1.tar.gz"; - path = rootPath + "/app/fixtures/registry-storage/type-equality-4.0.1.tar.gz"; - } - ]; - - # S3 Bucket API wiremock mappings (handles upload/list operations) - # The AWS SDK uses virtual-hosted style URLs by default, where the bucket name - # is in the hostname (purescript-registry.localhost:9002) and the path contains - # only the key. For example: GET /?prefix=effect/ instead of GET /purescript-registry?prefix=effect/ - bucketMappings = [ - # List objects - virtual-hosted style (bucket in hostname, path is just /?prefix=...) + # GitHub Teams API for trustee verification (used by GitHubIssue workflow) { request = { method = "GET"; - urlPattern = "/\\?.*prefix=.*"; + urlPattern = "/orgs/purescript/teams/packaging/members.*"; }; response = { status = 200; - headers."Content-Type" = "application/xml"; - body = ''prelude/6.0.1.tar.gz16298"abc123"type-equality/4.0.1.tar.gz2184"def456"''; - }; - } - # Upload effect@4.0.0 - virtual-hosted style (path is /effect/4.0.0.tar.gz) - { - request = { - method = "PUT"; - urlPattern = "/effect/4\\.0\\.0\\.tar\\.gz.*"; - }; - response = { - status = 200; - headers."ETag" = ''"abc123"''; - headers."Content-Type" = "application/xml"; - body = ""; - }; - } - # Fail upload for prelude (to test error handling) - { - request = { - method = "PUT"; - urlPattern = "/prelude/6\\.0\\.1\\.tar\\.gz.*"; + headers."Content-Type" = "application/json"; + # Return packaging-team-user as a packaging team member for trustee re-signing tests + jsonBody = [ + { + login = "packaging-team-user"; + id = 1; + } + ]; }; - response.status = 500; } ]; + # Fixture directory for storage (tarballs) + storageFixturesDir = rootPath + "/app/fixtures/registry-storage"; + + # Parse tarball filename into package name and version + # e.g. "effect-4.0.0.tar.gz" -> { name = "effect"; version = "4.0.0"; fileName = "effect-4.0.0.tar.gz"; } + # e.g. "type-equality-4.0.1.tar.gz" -> { name = "type-equality"; version = "4.0.1"; ... } + parseTarball = + fileName: + let + base = lib.removeSuffix ".tar.gz" fileName; + parts = lib.splitString "-" base; + # Version is the last part; name is everything before + version = lib.last parts; + name = lib.concatStringsSep "-" (lib.init parts); + in + { + inherit name version fileName; + }; + + # List all .tar.gz files in storage fixtures + storageTarballs = map parseTarball ( + builtins.filter (f: lib.hasSuffix ".tar.gz" f) ( + builtins.attrNames (builtins.readDir storageFixturesDir) + ) + ); + + # Metadata fixtures directory (to determine which packages are "published") + metadataFixturesDir = rootPath + "/app/fixtures/registry/metadata"; + metadataFiles = builtins.attrNames (builtins.readDir metadataFixturesDir); + publishedPackageNames = map (f: lib.removeSuffix ".json" f) metadataFiles; + + # ============================================================================ + # UNIFIED STORAGE MAPPINGS WITH WIREMOCK SCENARIOS + # ============================================================================ + # + # All storage-related APIs (S3 downloads, bucket uploads, Pursuit) are now served + # by a single WireMock instance with stateful scenarios. This enables proper + # read-after-write semantics - when a test publishes a package, subsequent + # downloads will succeed. + # + # Scenario design: + # - One scenario per package-version (e.g., "effect-4.0.0") + # - WireMock scenarios always start at state "Started" + # - Published packages (has metadata): "Started" means Present (tarball available) + # - After DELETE, transitions to "Deleted" state (404 on GET) + # - Unpublished packages (no metadata): "Started" means Absent (tarball 404) + # - After PUT upload, transitions to "Present" state + # - After DELETE, transitions to "Deleted" state (404 on GET) + # + # State machine: + # Published: Started(Present) --DELETE--> Deleted(404) + # Unpublished: Started(404) --PUT--> Present(200) --DELETE--> Deleted(404) + # + # Reset between tests via POST /__admin/scenarios/reset + # ============================================================================ + + # Generate S3 GET mappings with scenario support + s3Mappings = lib.concatMap ( + pkg: + let + scenario = "${pkg.name}-${pkg.version}"; + isPublished = builtins.elem pkg.name publishedPackageNames; + tarPath = "/${pkg.name}/${pkg.version}.tar.gz"; + in + if isPublished then + # Published package: tarball available in Started state, 404 in Deleted state + [ + { + request = { + method = "GET"; + url = tarPath; + }; + response = { + status = 200; + headers."Content-Type" = "application/octet-stream"; + bodyFileName = pkg.fileName; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + url = tarPath; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + else + # Unpublished package: 404 in Started, 200 in Present, 404 in Deleted + [ + { + request = { + method = "GET"; + url = tarPath; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + url = tarPath; + }; + response = { + status = 200; + headers."Content-Type" = "application/octet-stream"; + bodyFileName = pkg.fileName; + }; + scenarioName = scenario; + requiredScenarioState = "Present"; + } + { + request = { + method = "GET"; + url = tarPath; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + ) storageTarballs; + + # Generate s3Files list from fixtures (tarballs for bodyFileName references) + s3Files = map (pkg: { + name = pkg.fileName; + path = storageFixturesDir + "/${pkg.fileName}"; + }) storageTarballs; + + # Generate bucket PUT/DELETE/listObjects mappings with scenario support + # The AWS SDK uses virtual-hosted style URLs by default, where the bucket name + # is in the hostname (purescript-registry.localhost:9002) and the path contains + # only the key. + bucketMappings = + # Generate per-package listObjects mappings with scenario support + (lib.concatMap ( + pkg: + let + scenario = "${pkg.name}-${pkg.version}"; + isPublished = builtins.elem pkg.name publishedPackageNames; + escapedName = lib.replaceStrings [ "-" ] [ "\\-" ] pkg.name; + listUrlPattern = "/\\?.*prefix=${escapedName}.*"; + presentContents = ''${pkg.name}/${pkg.version}.tar.gz1000"abc123"''; + in + if isPublished then + # Published package: listObjects returns contents in Started, empty in Deleted + [ + { + request = { + method = "GET"; + urlPattern = listUrlPattern; + }; + response = { + status = 200; + headers."Content-Type" = "application/xml"; + body = "${presentContents}"; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + urlPattern = listUrlPattern; + }; + response = { + status = 200; + headers."Content-Type" = "application/xml"; + body = ""; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + else + # Unpublished package: listObjects returns empty in Started, contents in Present, empty in Deleted + [ + { + request = { + method = "GET"; + urlPattern = listUrlPattern; + }; + response = { + status = 200; + headers."Content-Type" = "application/xml"; + body = ""; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + urlPattern = listUrlPattern; + }; + response = { + status = 200; + headers."Content-Type" = "application/xml"; + body = "${presentContents}"; + }; + scenarioName = scenario; + requiredScenarioState = "Present"; + } + { + request = { + method = "GET"; + urlPattern = listUrlPattern; + }; + response = { + status = 200; + headers."Content-Type" = "application/xml"; + body = ""; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + ) storageTarballs) + ++ ( + # Generate PUT/DELETE mappings for all packages with scenario support + lib.concatMap ( + pkg: + let + scenario = "${pkg.name}-${pkg.version}"; + isPublished = builtins.elem pkg.name publishedPackageNames; + escapedVersion = lib.replaceStrings [ "." ] [ "\\." ] pkg.version; + urlPattern = "/${pkg.name}/${escapedVersion}\\.tar\\.gz.*"; + in + if isPublished then + # Published package: PUT fails (already exists), DELETE transitions to Deleted + [ + { + request = { + method = "PUT"; + urlPattern = urlPattern; + }; + response = { + status = 500; + body = "Package already published"; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + # DELETE in Started state (package exists) transitions to Deleted + { + request = { + method = "DELETE"; + urlPattern = urlPattern; + }; + response = { + status = 204; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + newScenarioState = "Deleted"; + } + # DELETE in Deleted state fails (already deleted) + { + request = { + method = "DELETE"; + urlPattern = urlPattern; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + else + # Unpublished package: PUT succeeds and transitions to Present, DELETE transitions to Deleted + [ + { + request = { + method = "PUT"; + urlPattern = urlPattern; + }; + response = { + status = 200; + headers."ETag" = ''"abc123"''; + headers."Content-Type" = "application/xml"; + body = ""; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + newScenarioState = "Present"; + } + # PUT in Present state fails (already uploaded) + { + request = { + method = "PUT"; + urlPattern = urlPattern; + }; + response = { + status = 500; + body = "Package already uploaded"; + }; + scenarioName = scenario; + requiredScenarioState = "Present"; + } + # DELETE in Started state fails (doesn't exist yet) + { + request = { + method = "DELETE"; + urlPattern = urlPattern; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + # DELETE in Present state (after publish) transitions to Deleted + { + request = { + method = "DELETE"; + urlPattern = urlPattern; + }; + response = { + status = 204; + }; + scenarioName = scenario; + requiredScenarioState = "Present"; + newScenarioState = "Deleted"; + } + # DELETE in Deleted state fails (already deleted) + { + request = { + method = "DELETE"; + urlPattern = urlPattern; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + ) storageTarballs + ); + + # Pursuit API mappings with scenario support + pursuitMappings = + (lib.concatMap ( + pkg: + let + scenario = "${pkg.name}-${pkg.version}"; + isPublished = builtins.elem pkg.name publishedPackageNames; + versionsUrl = "/packages/purescript-${pkg.name}/available-versions"; + publishedVersionsBody = ''[["${pkg.version}","https://pursuit.purescript.org/packages/purescript-${pkg.name}/${pkg.version}"]]''; + in + if isPublished then + # Published package: versions available in Started, empty in Deleted + [ + { + request = { + method = "GET"; + url = versionsUrl; + }; + response = { + status = 200; + body = publishedVersionsBody; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + url = versionsUrl; + }; + response = { + status = 200; + body = "[]"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + else + # Unpublished package: empty in Started, has version in Present, empty in Deleted + [ + { + request = { + method = "GET"; + url = versionsUrl; + }; + response = { + status = 200; + body = "[]"; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + url = versionsUrl; + }; + response = { + status = 200; + body = publishedVersionsBody; + }; + scenarioName = scenario; + requiredScenarioState = "Present"; + } + { + request = { + method = "GET"; + url = versionsUrl; + }; + response = { + status = 200; + body = "[]"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + ) storageTarballs) + ++ [ + # Accept documentation uploads (POST /packages) + { + request = { + method = "POST"; + url = "/packages"; + }; + response.status = 201; + } + ]; + # Healthchecks API wiremock mappings (simple ping endpoint) healthchecksMappings = [ { @@ -246,46 +716,9 @@ let } ]; - # Pursuit API wiremock mappings - pursuitMappings = [ - { - request = { - method = "GET"; - url = "/packages/purescript-prelude/available-versions"; - }; - response = { - status = 200; - body = ''[["6.0.1","https://pursuit.purescript.org/packages/purescript-prelude/6.0.1"]]''; - }; - } - { - request = { - method = "GET"; - url = "/packages/purescript-effect/available-versions"; - }; - response = { - status = 200; - body = ''[]''; - }; - } - { - request = { - method = "GET"; - url = "/packages/purescript-type-equality/available-versions"; - }; - response = { - status = 200; - body = ''[["4.0.1","https://pursuit.purescript.org/packages/purescript-type-equality/4.0.1"]]''; - }; - } - { - request = { - method = "POST"; - url = "/packages"; - }; - response.status = 201; - } - ]; + # Combined storage mappings (S3 + bucket + Pursuit) + storageMappings = s3Mappings ++ bucketMappings ++ pursuitMappings; + storageFiles = s3Files; # Wiremock root directory builder mkWiremockRoot = @@ -304,7 +737,9 @@ let ${lib.concatMapStrings (f: "cp ${f.path} $out/__files/${f.name}\n") files} ''; - # All wiremock configurations + # All WireMock configurations. + # Add new WireMock services here; both test-env.nix and integration.nix + # derive their processes from this attribute set automatically. wiremockConfigs = { github = { port = ports.github; @@ -313,26 +748,13 @@ let mappings = githubMappings; }; }; - s3 = { - port = ports.s3; - rootDir = mkWiremockRoot { - name = "s3"; - mappings = s3Mappings; - files = s3Files; - }; - }; - bucket = { - port = ports.bucket; + # Single storage WireMock instance with stateful scenarios + storage = { + port = ports.storage; rootDir = mkWiremockRoot { - name = "bucket"; - mappings = bucketMappings; - }; - }; - pursuit = { - port = ports.pursuit; - rootDir = mkWiremockRoot { - name = "pursuit"; - mappings = pursuitMappings; + name = "storage"; + mappings = storageMappings; + files = storageFiles; }; }; healthchecks = { @@ -357,45 +779,50 @@ let ''; # Script to set up git fixtures - setupGitFixtures = pkgs.writeShellScriptBin "setup-git-fixtures" '' - set -e - FIXTURES_DIR="''${1:-${defaultStateDir}/repo-fixtures}" - - # Remove any existing fixtures (they may have wrong permissions from nix store copy) - rm -rf "$FIXTURES_DIR/purescript" 2>/dev/null || true - - mkdir -p "$FIXTURES_DIR/purescript" - - # Use env vars instead of --global to avoid polluting user's git config - export GIT_AUTHOR_NAME="pacchettibotti" - export GIT_AUTHOR_EMAIL="pacchettibotti@purescript.org" - export GIT_COMMITTER_NAME="pacchettibotti" - export GIT_COMMITTER_EMAIL="pacchettibotti@purescript.org" - - # Copy fixtures and make writable (nix store files are read-only) - cp -r ${rootPath}/app/fixtures/{registry-index,registry,package-sets} "$FIXTURES_DIR/purescript/" - cp -r ${rootPath}/app/fixtures/github-packages/effect-4.0.0 "$FIXTURES_DIR/purescript/purescript-effect" - chmod -R u+w "$FIXTURES_DIR/purescript" - - for repo in "$FIXTURES_DIR"/purescript/*/; do - cd "$repo" - git init -b master && git add . - GIT_AUTHOR_NAME="pacchettibotti" GIT_AUTHOR_EMAIL="pacchettibotti@purescript.org" \ - GIT_COMMITTER_NAME="pacchettibotti" GIT_COMMITTER_EMAIL="pacchettibotti@purescript.org" \ - git commit -m "Fixture commit" - git config receive.denyCurrentBranch ignore - done - - git -C "$FIXTURES_DIR/purescript/package-sets" tag -m "psc-0.15.9-20230105" psc-0.15.9-20230105 - git -C "$FIXTURES_DIR/purescript/purescript-effect" tag -m "v4.0.0" v4.0.0 - ''; + setupGitFixtures = pkgs.writeShellApplication { + name = "setup-git-fixtures"; + runtimeInputs = [ pkgs.git ]; + text = '' + FIXTURES_DIR="''${1:-${stateDir}/repo-fixtures}" + + # Run git as pacchettibotti + gitbot() { + GIT_AUTHOR_NAME="pacchettibotti" GIT_AUTHOR_EMAIL="pacchettibotti@purescript.org" \ + GIT_COMMITTER_NAME="pacchettibotti" GIT_COMMITTER_EMAIL="pacchettibotti@purescript.org" \ + git "$@" + } + + # Remove any existing fixtures (they may have wrong permissions from nix store copy) + rm -rf "$FIXTURES_DIR/purescript" 2>/dev/null || true + mkdir -p "$FIXTURES_DIR/purescript" + + # Copy fixtures and make writable (nix store files are read-only) + cp -r ${rootPath}/app/fixtures/{registry-index,registry,package-sets} "$FIXTURES_DIR/purescript/" + cp -r ${rootPath}/app/fixtures/github-packages/effect-4.0.0 "$FIXTURES_DIR/purescript/purescript-effect" + cp -r ${rootPath}/app/fixtures/github-packages/console-6.1.0 "$FIXTURES_DIR/purescript/purescript-console" + chmod -R u+w "$FIXTURES_DIR/purescript" + + for repo in "$FIXTURES_DIR"/purescript/*/; do + cd "$repo" + git init -b master && git add . + gitbot commit -m "Fixture commit" + git config receive.denyCurrentBranch ignore + # Tag the initial commit so we can reset to it for test isolation + gitbot tag -m "initial-fixture" initial-fixture + done + + gitbot -C "$FIXTURES_DIR/purescript/package-sets" tag -m "psc-0.15.9-20230105" psc-0.15.9-20230105 + gitbot -C "$FIXTURES_DIR/purescript/purescript-effect" tag -m "v4.0.0" v4.0.0 + gitbot -C "$FIXTURES_DIR/purescript/purescript-console" tag -m "v6.1.0" v6.1.0 + ''; + }; # Publish payload for testing publishPayload = pkgs.writeText "publish-effect.json" ( builtins.toJSON { name = "effect"; ref = "v4.0.0"; - compiler = "0.15.9"; + compiler = "0.15.10"; location = { githubOwner = "purescript"; githubRepo = "purescript-effect"; @@ -473,12 +900,12 @@ in { inherit ports - defaultStateDir + stateDir mockUrls testEnv envToExports gitMock - gitMockOverlay + testOverlay wiremockConfigs combinedWiremockRoot setupGitFixtures @@ -487,10 +914,8 @@ in serverStartScript # For custom wiremock setups githubMappings - s3Mappings - s3Files - bucketMappings - pursuitMappings + storageMappings + storageFiles mkWiremockRoot ; } diff --git a/nix/test/integration.nix b/nix/test/integration.nix index 5f323a3f8..75b6e6487 100644 --- a/nix/test/integration.nix +++ b/nix/test/integration.nix @@ -1,7 +1,9 @@ { pkgs, spagoSrc, - testEnv, + # Test support module from test-env.nix. Named 'testSupport' to avoid confusion + # with testSupport.testEnv (the environment variables attribute set). + testSupport, }: if pkgs.stdenv.isDarwin then pkgs.runCommand "integration-skip" { } '' @@ -29,7 +31,7 @@ else ''; }; - ports = testEnv.ports; + ports = testSupport.ports; in pkgs.runCommand "e2e-integration" { @@ -38,10 +40,11 @@ else pkgs.curl pkgs.jq pkgs.git + pkgs.sqlite pkgs.nss_wrapper - testEnv.wiremockStartScript - testEnv.serverStartScript - testEnv.setupGitFixtures + testSupport.wiremockStartScript + testSupport.serverStartScript + testSupport.setupGitFixtures ]; NODE_PATH = "${pkgs.registry-package-lock}/node_modules"; # Use nss_wrapper to resolve S3 bucket subdomain in the Nix sandbox. @@ -57,7 +60,11 @@ else set -e export HOME=$TMPDIR export STATE_DIR=$TMPDIR/state - export SERVER_PORT=${toString ports.server} + export REPO_FIXTURES_DIR="$STATE_DIR/repo-fixtures" + + # Export test environment variables for E2E test runners + ${testSupport.envToExports testSupport.testEnv} + mkdir -p $STATE_DIR # Start wiremock services @@ -65,8 +72,8 @@ else start-wiremock & WIREMOCK_PID=$! - # Wait for wiremock (github, bucket, s3, pursuit) - for port in ${toString ports.github} ${toString ports.bucket} ${toString ports.s3} ${toString ports.pursuit}; do + # Wait for wiremock (github, storage, healthchecks) + for port in ${toString ports.github} ${toString ports.storage} ${toString ports.healthchecks}; do until curl -s "http://localhost:$port/__admin" > /dev/null 2>&1; do sleep 0.5 done diff --git a/nix/test/smoke.nix b/nix/test/smoke.nix index 53addca88..d754f36b6 100644 --- a/nix/test/smoke.nix +++ b/nix/test/smoke.nix @@ -9,6 +9,7 @@ # - systemd services start and stay running # - The server responds to basic HTTP requests # - Database migrations run successfully +# - The job executor starts without errors { pkgs, lib, @@ -25,11 +26,14 @@ else testConfig = import ./config.nix { inherit pkgs lib rootPath; }; envVars = testConfig.testEnv; stateDir = "/var/lib/registry-server"; + repoFixturesDir = "${stateDir}/repo-fixtures"; in pkgs.testers.nixosTest { name = "registry-smoke"; testScript = '' + import time + # Start the registry VM registry.start() @@ -42,11 +46,14 @@ else timeout=30 ) - # Verify we get a valid JSON response (empty array for jobs) + # Verify we get a valid JSON response (the jobs endpoint responds) result = registry.succeed( "curl -s http://localhost:${envVars.SERVER_PORT}/api/v1/jobs" ) - assert result.strip() == "[]", f"Expected empty jobs array, got: {result}" + + # The server may create matrix jobs on startup for new compilers, so we just verify + # the response is valid JSON (starts with '[') + assert result.strip().startswith("["), f"Expected JSON array, got: {result}" # Verify the database was created and migrations ran registry.succeed("test -f ${stateDir}/db/registry.sqlite3") @@ -54,6 +61,14 @@ else # Check that the service is still running (didn't crash) registry.succeed("systemctl is-active server.service") + # Give the job executor a moment to start and potentially fail + time.sleep(2) + + # Check that the job executor started successfully and didn't fail + logs = registry.succeed("journalctl -u server.service --no-pager") + assert "Job executor failed:" not in logs, f"Job executor failed on startup. Logs:\n{logs}" + assert "Starting Job Executor" in logs, f"Job executor did not start. Logs:\n{logs}" + print("✓ Smoke test passed: server deployed and responding") ''; @@ -62,7 +77,8 @@ else (rootPath + "/nix/registry-server.nix") ]; - nixpkgs.overlays = overlays; + # Apply the git mock overlay on top of the standard overlays + nixpkgs.overlays = overlays ++ [ testConfig.testOverlay ]; virtualisation = { graphics = false; @@ -70,12 +86,29 @@ else memorySize = 2048; }; + # Set up git fixtures before the server starts + systemd.services.setup-git-fixtures = { + description = "Set up git fixtures for smoke test"; + wantedBy = [ "server.service" ]; + before = [ "server.service" ]; + serviceConfig = { + Type = "oneshot"; + RemainAfterExit = true; + }; + script = '' + ${testConfig.setupGitFixtures}/bin/setup-git-fixtures ${repoFixturesDir} + ''; + }; + services.registry-server = { enable = true; host = "localhost"; port = lib.toInt envVars.SERVER_PORT; enableCerts = false; - inherit stateDir envVars; + inherit stateDir; + envVars = envVars // { + REPO_FIXTURES_DIR = repoFixturesDir; + }; }; }; } diff --git a/nix/test/test-env.nix b/nix/test/test-env.nix index 424f71364..764d01c47 100644 --- a/nix/test/test-env.nix +++ b/nix/test/test-env.nix @@ -59,18 +59,15 @@ let version = "0.5"; processes = { wiremock-github = mkWiremockProcess "github" ports.github; - wiremock-s3 = mkWiremockProcess "s3" ports.s3; - wiremock-bucket = mkWiremockProcess "bucket" ports.bucket; - wiremock-pursuit = mkWiremockProcess "pursuit" ports.pursuit; + # Unified storage WireMock instance for S3 + bucket + Pursuit with stateful scenarios + wiremock-storage = mkWiremockProcess "storage" ports.storage; wiremock-healthchecks = mkWiremockProcess "healthchecks" ports.healthchecks; registry-server = { command = "${serverStartScript}/bin/start-server"; depends_on = { wiremock-github.condition = "process_healthy"; - wiremock-s3.condition = "process_healthy"; - wiremock-bucket.condition = "process_healthy"; - wiremock-pursuit.condition = "process_healthy"; + wiremock-storage.condition = "process_healthy"; wiremock-healthchecks.condition = "process_healthy"; }; readiness_probe = { @@ -92,21 +89,21 @@ let processComposeYaml = pkgs.writeText "process-compose.yaml" (builtins.toJSON processComposeConfig); + testEnvExports = testConfig.envToExports testConfig.testEnv; + + # The state directory is fixed (not configurable) to avoid mismatch between + # the test-env and spago-test-e2e shells. + stateDir = testConfig.testEnv.STATE_DIR; + testEnvScript = pkgs.writeShellScriptBin "test-env" '' set -e - export SERVER_PORT="${toString ports.server}" - - if [ -z "''${STATE_DIR:-}" ]; then - STATE_DIR="$(mktemp -d)" - export STATE_DIR - echo "Using temporary directory: $STATE_DIR" - trap 'echo "Cleaning up $STATE_DIR..."; rm -rf "$STATE_DIR"' EXIT - else - export STATE_DIR - fi + # Clean up previous test state and create fresh directory + rm -rf ${stateDir} + mkdir -p ${stateDir} - mkdir -p "$STATE_DIR" + # Export all test environment variables + ${testEnvExports} exec ${pkgs.process-compose}/bin/process-compose up \ -f ${processComposeYaml} \ @@ -130,8 +127,8 @@ in wiremockStartScript serverStartScript setupGitFixtures - envVars - envFile + testEnv + envToExports ; # Full testConfig still available for less common access patterns diff --git a/package-lock.json b/package-lock.json index 3e868b0c6..5c5c89ccd 100644 --- a/package-lock.json +++ b/package-lock.json @@ -253,65 +253,65 @@ } }, "node_modules/@aws-sdk/client-s3": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.948.0.tgz", - "integrity": "sha512-uvEjds8aYA9SzhBS8RKDtsDUhNV9VhqKiHTcmvhM7gJO92q0WTn8/QeFTdNyLc6RxpiDyz+uBxS7PcdNiZzqfA==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.955.0.tgz", + "integrity": "sha512-bFvSM6UB0R5hpWfXzHI3BlKwT2qYHto9JoDtzSr5FxVguTMzJyr+an11VT1Hi5wgO03luXEeXeloURFvaMs6TQ==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha1-browser": "5.2.0", "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/credential-provider-node": "3.948.0", - "@aws-sdk/middleware-bucket-endpoint": "3.936.0", - "@aws-sdk/middleware-expect-continue": "3.936.0", - "@aws-sdk/middleware-flexible-checksums": "3.947.0", - "@aws-sdk/middleware-host-header": "3.936.0", - "@aws-sdk/middleware-location-constraint": "3.936.0", - "@aws-sdk/middleware-logger": "3.936.0", - "@aws-sdk/middleware-recursion-detection": "3.948.0", - "@aws-sdk/middleware-sdk-s3": "3.947.0", - "@aws-sdk/middleware-ssec": "3.936.0", - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/region-config-resolver": "3.936.0", - "@aws-sdk/signature-v4-multi-region": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@aws-sdk/util-user-agent-browser": "3.936.0", - "@aws-sdk/util-user-agent-node": "3.947.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/core": "^3.18.7", - "@smithy/eventstream-serde-browser": "^4.2.5", - "@smithy/eventstream-serde-config-resolver": "^4.3.5", - "@smithy/eventstream-serde-node": "^4.2.5", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/hash-blob-browser": "^4.2.6", - "@smithy/hash-node": "^4.2.5", - "@smithy/hash-stream-node": "^4.2.5", - "@smithy/invalid-dependency": "^4.2.5", - "@smithy/md5-js": "^4.2.5", - "@smithy/middleware-content-length": "^4.2.5", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-retry": "^4.4.14", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/credential-provider-node": "3.955.0", + "@aws-sdk/middleware-bucket-endpoint": "3.953.0", + "@aws-sdk/middleware-expect-continue": "3.953.0", + "@aws-sdk/middleware-flexible-checksums": "3.954.0", + "@aws-sdk/middleware-host-header": "3.953.0", + "@aws-sdk/middleware-location-constraint": "3.953.0", + "@aws-sdk/middleware-logger": "3.953.0", + "@aws-sdk/middleware-recursion-detection": "3.953.0", + "@aws-sdk/middleware-sdk-s3": "3.954.0", + "@aws-sdk/middleware-ssec": "3.953.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/region-config-resolver": "3.953.0", + "@aws-sdk/signature-v4-multi-region": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@aws-sdk/util-user-agent-browser": "3.953.0", + "@aws-sdk/util-user-agent-node": "3.954.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/core": "^3.19.0", + "@smithy/eventstream-serde-browser": "^4.2.6", + "@smithy/eventstream-serde-config-resolver": "^4.3.6", + "@smithy/eventstream-serde-node": "^4.2.6", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/hash-blob-browser": "^4.2.7", + "@smithy/hash-node": "^4.2.6", + "@smithy/hash-stream-node": "^4.2.6", + "@smithy/invalid-dependency": "^4.2.6", + "@smithy/md5-js": "^4.2.6", + "@smithy/middleware-content-length": "^4.2.6", + "@smithy/middleware-endpoint": "^4.4.0", + "@smithy/middleware-retry": "^4.4.16", + "@smithy/middleware-serde": "^4.2.7", + "@smithy/middleware-stack": "^4.2.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", - "@smithy/util-defaults-mode-browser": "^4.3.13", - "@smithy/util-defaults-mode-node": "^4.2.16", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/util-defaults-mode-browser": "^4.3.15", + "@smithy/util-defaults-mode-node": "^4.2.18", + "@smithy/util-endpoints": "^3.2.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-retry": "^4.2.6", + "@smithy/util-stream": "^4.5.7", "@smithy/util-utf8": "^4.2.0", - "@smithy/util-waiter": "^4.2.5", + "@smithy/util-waiter": "^4.2.6", "tslib": "^2.6.2" }, "engines": { @@ -319,47 +319,47 @@ } }, "node_modules/@aws-sdk/client-sso": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.948.0.tgz", - "integrity": "sha512-iWjchXy8bIAVBUsKnbfKYXRwhLgRg3EqCQ5FTr3JbR+QR75rZm4ZOYXlvHGztVTmtAZ+PQVA1Y4zO7v7N87C0A==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.955.0.tgz", + "integrity": "sha512-+nym5boDFt2ksba0fElocMKxCFJbJcd31PI3502hoI1N5VK7HyxkQeBtQJ64JYomvw8eARjWWC13hkB0LtZILw==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/middleware-host-header": "3.936.0", - "@aws-sdk/middleware-logger": "3.936.0", - "@aws-sdk/middleware-recursion-detection": "3.948.0", - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/region-config-resolver": "3.936.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@aws-sdk/util-user-agent-browser": "3.936.0", - "@aws-sdk/util-user-agent-node": "3.947.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/core": "^3.18.7", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/hash-node": "^4.2.5", - "@smithy/invalid-dependency": "^4.2.5", - "@smithy/middleware-content-length": "^4.2.5", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-retry": "^4.4.14", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/middleware-host-header": "3.953.0", + "@aws-sdk/middleware-logger": "3.953.0", + "@aws-sdk/middleware-recursion-detection": "3.953.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/region-config-resolver": "3.953.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@aws-sdk/util-user-agent-browser": "3.953.0", + "@aws-sdk/util-user-agent-node": "3.954.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/core": "^3.19.0", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/hash-node": "^4.2.6", + "@smithy/invalid-dependency": "^4.2.6", + "@smithy/middleware-content-length": "^4.2.6", + "@smithy/middleware-endpoint": "^4.4.0", + "@smithy/middleware-retry": "^4.4.16", + "@smithy/middleware-serde": "^4.2.7", + "@smithy/middleware-stack": "^4.2.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", - "@smithy/util-defaults-mode-browser": "^4.3.13", - "@smithy/util-defaults-mode-node": "^4.2.16", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", + "@smithy/util-defaults-mode-browser": "^4.3.15", + "@smithy/util-defaults-mode-node": "^4.2.18", + "@smithy/util-endpoints": "^3.2.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-retry": "^4.2.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -368,22 +368,22 @@ } }, "node_modules/@aws-sdk/core": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.947.0.tgz", - "integrity": "sha512-Khq4zHhuAkvCFuFbgcy3GrZTzfSX7ZIjIcW1zRDxXRLZKRtuhnZdonqTUfaWi5K42/4OmxkYNpsO7X7trQOeHw==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/types": "3.936.0", - "@aws-sdk/xml-builder": "3.930.0", - "@smithy/core": "^3.18.7", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/signature-v4": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.954.0.tgz", + "integrity": "sha512-5oYO5RP+mvCNXNj8XnF9jZo0EP0LTseYOJVNQYcii1D9DJqzHL3HJWurYh7cXxz7G7eDyvVYA01O9Xpt34TdoA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.953.0", + "@aws-sdk/xml-builder": "3.953.0", + "@smithy/core": "^3.19.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/signature-v4": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", "@smithy/util-base64": "^4.3.0", - "@smithy/util-middleware": "^4.2.5", + "@smithy/util-middleware": "^4.2.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -392,15 +392,15 @@ } }, "node_modules/@aws-sdk/credential-provider-env": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.947.0.tgz", - "integrity": "sha512-VR2V6dRELmzwAsCpK4GqxUi6UW5WNhAXS9F9AzWi5jvijwJo3nH92YNJUP4quMpgFZxJHEWyXLWgPjh9u0zYOA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.954.0.tgz", + "integrity": "sha512-2HNkqBjfsvyoRuPAiFh86JBFMFyaCNhL4VyH6XqwTGKZffjG7hdBmzXPy7AT7G3oFh1k/1Zc27v0qxaKoK7mBA==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -408,20 +408,20 @@ } }, "node_modules/@aws-sdk/credential-provider-http": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.947.0.tgz", - "integrity": "sha512-inF09lh9SlHj63Vmr5d+LmwPXZc2IbK8lAruhOr3KLsZAIHEgHgGPXWDC2ukTEMzg0pkexQ6FOhXXad6klK4RA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.954.0.tgz", + "integrity": "sha512-CrWD5300+NE1OYRnSVDxoG7G0b5cLIZb7yp+rNQ5Jq/kqnTmyJXpVAsivq+bQIDaGzPXhadzpAMIoo7K/aHaag==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/util-stream": "^4.5.6", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/util-stream": "^4.5.7", "tslib": "^2.6.2" }, "engines": { @@ -429,24 +429,24 @@ } }, "node_modules/@aws-sdk/credential-provider-ini": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.948.0.tgz", - "integrity": "sha512-Cl//Qh88e8HBL7yYkJNpF5eq76IO6rq8GsatKcfVBm7RFVxCqYEPSSBtkHdbtNwQdRQqAMXc6E/lEB/CZUDxnA==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.955.0.tgz", + "integrity": "sha512-90isLovxsPzaaSx3IIUZuxym6VXrsRetnQ3AuHr2kiTFk2pIzyIwmi+gDcUaLXQ5nNBoSj1Z/4+i1vhxa1n2DQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/credential-provider-env": "3.947.0", - "@aws-sdk/credential-provider-http": "3.947.0", - "@aws-sdk/credential-provider-login": "3.948.0", - "@aws-sdk/credential-provider-process": "3.947.0", - "@aws-sdk/credential-provider-sso": "3.948.0", - "@aws-sdk/credential-provider-web-identity": "3.948.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/credential-provider-imds": "^4.2.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/credential-provider-env": "3.954.0", + "@aws-sdk/credential-provider-http": "3.954.0", + "@aws-sdk/credential-provider-login": "3.955.0", + "@aws-sdk/credential-provider-process": "3.954.0", + "@aws-sdk/credential-provider-sso": "3.955.0", + "@aws-sdk/credential-provider-web-identity": "3.955.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/credential-provider-imds": "^4.2.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -454,18 +454,18 @@ } }, "node_modules/@aws-sdk/credential-provider-login": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-login/-/credential-provider-login-3.948.0.tgz", - "integrity": "sha512-gcKO2b6eeTuZGp3Vvgr/9OxajMrD3W+FZ2FCyJox363ZgMoYJsyNid1vuZrEuAGkx0jvveLXfwiVS0UXyPkgtw==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-login/-/credential-provider-login-3.955.0.tgz", + "integrity": "sha512-xlkmSvg8oDN5LIxLAq3N1QWK8F8gUAsBWZlp1IX8Lr5XhcKI3GVarIIUcZrvCy1NjzCd/LDXYdNL6MRlNP4bAw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -473,22 +473,22 @@ } }, "node_modules/@aws-sdk/credential-provider-node": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.948.0.tgz", - "integrity": "sha512-ep5vRLnrRdcsP17Ef31sNN4g8Nqk/4JBydcUJuFRbGuyQtrZZrVT81UeH2xhz6d0BK6ejafDB9+ZpBjXuWT5/Q==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.955.0.tgz", + "integrity": "sha512-XIL4QB+dPOJA6DRTmYZL52wFcLTslb7V1ydS4FCNT2DVLhkO4ExkPP+pe5YmIpzt/Our1ugS+XxAs3e6BtyFjA==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/credential-provider-env": "3.947.0", - "@aws-sdk/credential-provider-http": "3.947.0", - "@aws-sdk/credential-provider-ini": "3.948.0", - "@aws-sdk/credential-provider-process": "3.947.0", - "@aws-sdk/credential-provider-sso": "3.948.0", - "@aws-sdk/credential-provider-web-identity": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/credential-provider-imds": "^4.2.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/credential-provider-env": "3.954.0", + "@aws-sdk/credential-provider-http": "3.954.0", + "@aws-sdk/credential-provider-ini": "3.955.0", + "@aws-sdk/credential-provider-process": "3.954.0", + "@aws-sdk/credential-provider-sso": "3.955.0", + "@aws-sdk/credential-provider-web-identity": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/credential-provider-imds": "^4.2.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -496,16 +496,16 @@ } }, "node_modules/@aws-sdk/credential-provider-process": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.947.0.tgz", - "integrity": "sha512-WpanFbHe08SP1hAJNeDdBDVz9SGgMu/gc0XJ9u3uNpW99nKZjDpvPRAdW7WLA4K6essMjxWkguIGNOpij6Do2Q==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.954.0.tgz", + "integrity": "sha512-Y1/0O2LgbKM8iIgcVj/GNEQW6p90LVTCOzF2CI1pouoKqxmZ/1F7F66WHoa6XUOfKaCRj/R6nuMR3om9ThaM5A==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -513,18 +513,18 @@ } }, "node_modules/@aws-sdk/credential-provider-sso": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.948.0.tgz", - "integrity": "sha512-gqLhX1L+zb/ZDnnYbILQqJ46j735StfWV5PbDjxRzBKS7GzsiYoaf6MyHseEopmWrez5zl5l6aWzig7UpzSeQQ==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.955.0.tgz", + "integrity": "sha512-Y99KI73Fn8JnB4RY5Ls6j7rd5jmFFwnY9WLHIWeJdc+vfwL6Bb1uWKW3+m/B9+RC4Xoz2nQgtefBcdWq5Xx8iw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/client-sso": "3.948.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/token-providers": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/client-sso": "3.955.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/token-providers": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -532,17 +532,17 @@ } }, "node_modules/@aws-sdk/credential-provider-web-identity": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.948.0.tgz", - "integrity": "sha512-MvYQlXVoJyfF3/SmnNzOVEtANRAiJIObEUYYyjTqKZTmcRIVVky0tPuG26XnB8LmTYgtESwJIZJj/Eyyc9WURQ==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.955.0.tgz", + "integrity": "sha512-+lFxkZ2Vz3qp/T68ZONKzWVTQvomTu7E6tts1dfAbEcDt62Y/nPCByq/C2hQj+TiN05HrUx+yTJaGHBklhkbqA==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -550,16 +550,16 @@ } }, "node_modules/@aws-sdk/middleware-bucket-endpoint": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.936.0.tgz", - "integrity": "sha512-XLSVVfAorUxZh6dzF+HTOp4R1B5EQcdpGcPliWr0KUj2jukgjZEcqbBmjyMF/p9bmyQsONX80iURF1HLAlW0qg==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.953.0.tgz", + "integrity": "sha512-YHVRIOowtGIl/L2WuS83FgRlm31tU0aL1yryWaFtF+AFjA5BIeiFkxIZqaRGxJpJvFEBdohsyq6Ipv5mgWfezg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-arn-parser": "3.893.0", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-arn-parser": "3.953.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "@smithy/util-config-provider": "^4.2.0", "tslib": "^2.6.2" }, @@ -568,14 +568,14 @@ } }, "node_modules/@aws-sdk/middleware-expect-continue": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.936.0.tgz", - "integrity": "sha512-Eb4ELAC23bEQLJmUMYnPWcjD3FZIsmz2svDiXEcxRkQU9r7NRID7pM7C5NPH94wOfiCk0b2Y8rVyFXW0lGQwbA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.953.0.tgz", + "integrity": "sha512-BQTVXrypQ0rbb7au/Hk4IS5GaJZlwk6O44Rjk6Kxb0IvGQhSurNTuesFiJx1sLbf+w+T31saPtODcfQQERqhCQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -583,22 +583,22 @@ } }, "node_modules/@aws-sdk/middleware-flexible-checksums": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.947.0.tgz", - "integrity": "sha512-kXXxS2raNESNO+zR0L4YInVjhcGGNI2Mx0AE1ThRhDkAt2se3a+rGf9equ9YvOqA1m8Jl/GSI8cXYvSxXmS9Ag==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.954.0.tgz", + "integrity": "sha512-hHOPDJyxucNodkgapLhA0VdwDBwVYN9DX20aA6j+3nwutAlZ5skaV7Bw0W3YC7Fh/ieDKKhcSZulONd4lVTwMg==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/crc32": "5.2.0", "@aws-crypto/crc32c": "5.2.0", "@aws-crypto/util": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", "@smithy/is-array-buffer": "^4.2.0", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-stream": "^4.5.7", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -607,14 +607,14 @@ } }, "node_modules/@aws-sdk/middleware-host-header": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.936.0.tgz", - "integrity": "sha512-tAaObaAnsP1XnLGndfkGWFuzrJYuk9W0b/nLvol66t8FZExIAf/WdkT2NNAWOYxljVs++oHnyHBCxIlaHrzSiw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.953.0.tgz", + "integrity": "sha512-jTGhfkONav+r4E6HLOrl5SzBqDmPByUYCkyB/c/3TVb8jX3wAZx8/q9bphKpCh+G5ARi3IdbSisgkZrJYqQ19Q==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -622,13 +622,13 @@ } }, "node_modules/@aws-sdk/middleware-location-constraint": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.936.0.tgz", - "integrity": "sha512-SCMPenDtQMd9o5da9JzkHz838w3327iqXk3cbNnXWqnNRx6unyW8FL0DZ84gIY12kAyVHz5WEqlWuekc15ehfw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.953.0.tgz", + "integrity": "sha512-h0urrbteIQEybyIISaJfQLZ/+/lJPRzPWAQT4epvzfgv/4MKZI7K83dK7SfTwAooVKFBHiCMok2Cf0iHDt07Kw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -636,13 +636,13 @@ } }, "node_modules/@aws-sdk/middleware-logger": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.936.0.tgz", - "integrity": "sha512-aPSJ12d3a3Ea5nyEnLbijCaaYJT2QjQ9iW+zGh5QcZYXmOGWbKVyPSxmVOboZQG+c1M8t6d2O7tqrwzIq8L8qw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.953.0.tgz", + "integrity": "sha512-PlWdVYgcuptkIC0ZKqVUhWNtSHXJSx7U9V8J7dJjRmsXC40X7zpEycvrkzDMJjeTDGcCceYbyYAg/4X1lkcIMw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -650,15 +650,15 @@ } }, "node_modules/@aws-sdk/middleware-recursion-detection": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.948.0.tgz", - "integrity": "sha512-Qa8Zj+EAqA0VlAVvxpRnpBpIWJI9KUwaioY1vkeNVwXPlNaz9y9zCKVM9iU9OZ5HXpoUg6TnhATAHXHAE8+QsQ==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.953.0.tgz", + "integrity": "sha512-cmIJx0gWeesUKK4YwgE+VQL3mpACr3/J24fbwnc1Z5tntC86b+HQFzU5vsBDw6lLwyD46dBgWdsXFh1jL+ZaFw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", + "@aws-sdk/types": "3.953.0", "@aws/lambda-invoke-store": "^0.2.2", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -666,23 +666,23 @@ } }, "node_modules/@aws-sdk/middleware-sdk-s3": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.947.0.tgz", - "integrity": "sha512-DS2tm5YBKhPW2PthrRBDr6eufChbwXe0NjtTZcYDfUCXf0OR+W6cIqyKguwHMJ+IyYdey30AfVw9/Lb5KB8U8A==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-arn-parser": "3.893.0", - "@smithy/core": "^3.18.7", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/signature-v4": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.954.0.tgz", + "integrity": "sha512-274CNmnRjknmfFb2o0Azxic54fnujaA8AYSeRUOho3lN48TVzx85eAFWj2kLgvUJO88pE3jBDPWboKQiQdXeUQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-arn-parser": "3.953.0", + "@smithy/core": "^3.19.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/signature-v4": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", "@smithy/util-config-provider": "^4.2.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-stream": "^4.5.7", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -691,13 +691,13 @@ } }, "node_modules/@aws-sdk/middleware-ssec": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-ssec/-/middleware-ssec-3.936.0.tgz", - "integrity": "sha512-/GLC9lZdVp05ozRik5KsuODR/N7j+W+2TbfdFL3iS+7un+gnP6hC8RDOZd6WhpZp7drXQ9guKiTAxkZQwzS8DA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-ssec/-/middleware-ssec-3.953.0.tgz", + "integrity": "sha512-OrhG1kcQ9zZh3NS3RovR028N0+UndQ957zF1k5HPLeFLwFwQN1uPOufzzPzAyXIIKtR69ARFsQI4mstZS4DMvw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -705,17 +705,17 @@ } }, "node_modules/@aws-sdk/middleware-user-agent": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.947.0.tgz", - "integrity": "sha512-7rpKV8YNgCP2R4F9RjWZFcD2R+SO/0R4VHIbY9iZJdH2MzzJ8ZG7h8dZ2m8QkQd1fjx4wrFJGGPJUTYXPV3baA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.954.0.tgz", + "integrity": "sha512-5PX8JDe3dB2+MqXeGIhmgFnm2rbVsSxhz+Xyuu1oxLtbOn+a9UDA+sNBufEBjt3UxWy5qwEEY1fxdbXXayjlGg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@smithy/core": "^3.18.7", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@smithy/core": "^3.19.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -723,47 +723,47 @@ } }, "node_modules/@aws-sdk/nested-clients": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.948.0.tgz", - "integrity": "sha512-zcbJfBsB6h254o3NuoEkf0+UY1GpE9ioiQdENWv7odo69s8iaGBEQ4BDpsIMqcuiiUXw1uKIVNxCB1gUGYz8lw==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.955.0.tgz", + "integrity": "sha512-RBi6CQHbPF09kqXAoiEOOPkVnSoU5YppKoOt/cgsWfoMHwC+7itIrEv+yRD62h14jIjF3KngVIQIrBRbX3o3/Q==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/middleware-host-header": "3.936.0", - "@aws-sdk/middleware-logger": "3.936.0", - "@aws-sdk/middleware-recursion-detection": "3.948.0", - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/region-config-resolver": "3.936.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@aws-sdk/util-user-agent-browser": "3.936.0", - "@aws-sdk/util-user-agent-node": "3.947.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/core": "^3.18.7", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/hash-node": "^4.2.5", - "@smithy/invalid-dependency": "^4.2.5", - "@smithy/middleware-content-length": "^4.2.5", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-retry": "^4.4.14", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/middleware-host-header": "3.953.0", + "@aws-sdk/middleware-logger": "3.953.0", + "@aws-sdk/middleware-recursion-detection": "3.953.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/region-config-resolver": "3.953.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@aws-sdk/util-user-agent-browser": "3.953.0", + "@aws-sdk/util-user-agent-node": "3.954.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/core": "^3.19.0", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/hash-node": "^4.2.6", + "@smithy/invalid-dependency": "^4.2.6", + "@smithy/middleware-content-length": "^4.2.6", + "@smithy/middleware-endpoint": "^4.4.0", + "@smithy/middleware-retry": "^4.4.16", + "@smithy/middleware-serde": "^4.2.7", + "@smithy/middleware-stack": "^4.2.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", - "@smithy/util-defaults-mode-browser": "^4.3.13", - "@smithy/util-defaults-mode-node": "^4.2.16", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", + "@smithy/util-defaults-mode-browser": "^4.3.15", + "@smithy/util-defaults-mode-node": "^4.2.18", + "@smithy/util-endpoints": "^3.2.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-retry": "^4.2.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -772,15 +772,15 @@ } }, "node_modules/@aws-sdk/region-config-resolver": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.936.0.tgz", - "integrity": "sha512-wOKhzzWsshXGduxO4pqSiNyL9oUtk4BEvjWm9aaq6Hmfdoydq6v6t0rAGHWPjFwy9z2haovGRi3C8IxdMB4muw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.953.0.tgz", + "integrity": "sha512-5MJgnsc+HLO+le0EK1cy92yrC7kyhGZSpaq8PcQvKs9qtXCXT5Tb6tMdkr5Y07JxYsYOV1omWBynvL6PWh08tQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -788,16 +788,16 @@ } }, "node_modules/@aws-sdk/signature-v4-multi-region": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.947.0.tgz", - "integrity": "sha512-UaYmzoxf9q3mabIA2hc4T6x5YSFUG2BpNjAZ207EA1bnQMiK+d6vZvb83t7dIWL/U1de1sGV19c1C81Jf14rrA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.954.0.tgz", + "integrity": "sha512-GJJbUaSlGrMSRWui3Oz8ByygpQlzDGm195yTKirgGyu4tfYrFr/QWrWT42EUktY/L4Irev1pdHTuLS+AGHO1gw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/middleware-sdk-s3": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/signature-v4": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/middleware-sdk-s3": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/signature-v4": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -805,17 +805,17 @@ } }, "node_modules/@aws-sdk/token-providers": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.948.0.tgz", - "integrity": "sha512-V487/kM4Teq5dcr1t5K6eoUKuqlGr9FRWL3MIMukMERJXHZvio6kox60FZ/YtciRHRI75u14YUqm2Dzddcu3+A==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.955.0.tgz", + "integrity": "sha512-LVpWkxXvMPgZofP2Gc8XBfQhsyecBMVARDHWMvks6vPbCLSTM7dw6H1HI9qbGNCurYcyc2xBRAkEDhChQlbPPg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -823,12 +823,12 @@ } }, "node_modules/@aws-sdk/types": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.936.0.tgz", - "integrity": "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.953.0.tgz", + "integrity": "sha512-M9Iwg9kTyqTErI0vOTVVpcnTHWzS3VplQppy8MuL02EE+mJ0BIwpWfsaAPQW+/XnVpdNpWZTsHcNE29f1+hR8g==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -836,9 +836,9 @@ } }, "node_modules/@aws-sdk/util-arn-parser": { - "version": "3.893.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.893.0.tgz", - "integrity": "sha512-u8H4f2Zsi19DGnwj5FSZzDMhytYF/bCh37vAtBsn3cNDL3YG578X5oc+wSX54pM3tOxS+NY7tvOAo52SW7koUA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.953.0.tgz", + "integrity": "sha512-9hqdKkn4OvYzzaLryq2xnwcrPc8ziY34i9szUdgBfSqEC6pBxbY9/lLXmrgzfwMSL2Z7/v2go4Od0p5eukKLMQ==", "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" @@ -848,15 +848,15 @@ } }, "node_modules/@aws-sdk/util-endpoints": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.936.0.tgz", - "integrity": "sha512-0Zx3Ntdpu+z9Wlm7JKUBOzS9EunwKAb4KdGUQQxDqh5Lc3ta5uBoub+FgmVuzwnmBu9U1Os8UuwVTH0Lgu+P5w==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.953.0.tgz", + "integrity": "sha512-rjaS6jrFksopXvNg6YeN+D1lYwhcByORNlFuYesFvaQNtPOufbE5tJL4GJ3TMXyaY0uFR28N5BHHITPyWWfH/g==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", - "@smithy/util-endpoints": "^3.2.5", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", + "@smithy/util-endpoints": "^3.2.6", "tslib": "^2.6.2" }, "engines": { @@ -864,9 +864,9 @@ } }, "node_modules/@aws-sdk/util-locate-window": { - "version": "3.893.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.893.0.tgz", - "integrity": "sha512-T89pFfgat6c8nMmpI8eKjBcDcgJq36+m9oiXbcUzeU55MP9ZuGgBomGjGnHaEyF36jenW9gmg3NfZDm0AO2XPg==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.953.0.tgz", + "integrity": "sha512-mPxK+I1LcrgC/RSa3G5AMAn8eN2Ay0VOgw8lSRmV1jCtO+iYvNeCqOdxoJUjOW6I5BA4niIRWqVORuRP07776Q==", "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" @@ -876,27 +876,27 @@ } }, "node_modules/@aws-sdk/util-user-agent-browser": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.936.0.tgz", - "integrity": "sha512-eZ/XF6NxMtu+iCma58GRNRxSq4lHo6zHQLOZRIeL/ghqYJirqHdenMOwrzPettj60KWlv827RVebP9oNVrwZbw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.953.0.tgz", + "integrity": "sha512-UF5NeqYesWuFao+u7LJvpV1SJCaLml5BtFZKUdTnNNMeN6jvV+dW/eQoFGpXF94RCqguX0XESmRuRRPQp+/rzQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "node_modules/@aws-sdk/util-user-agent-node": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.947.0.tgz", - "integrity": "sha512-+vhHoDrdbb+zerV4noQk1DHaUMNzWFWPpPYjVTwW2186k5BEJIecAMChYkghRrBVJ3KPWP1+JnZwOd72F3d4rQ==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.954.0.tgz", + "integrity": "sha512-fB5S5VOu7OFkeNzcblQlez4AjO5hgDFaa7phYt7716YWisY3RjAaQPlxgv+G3GltHHDJIfzEC5aRxdf62B9zMg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -912,12 +912,12 @@ } }, "node_modules/@aws-sdk/xml-builder": { - "version": "3.930.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.930.0.tgz", - "integrity": "sha512-YIfkD17GocxdmlUVc3ia52QhcWuRIUJonbF8A2CYfcWNV3HzvAqpcPeC0bYUhkK+8e8YO1ARnLKZQE0TlwzorA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.953.0.tgz", + "integrity": "sha512-Zmrj21jQ2OeOJGr9spPiN00aQvXa/WUqRXcTVENhrMt+OFoSOfDFpYhUj9NQ09QmQ8KMWFoWuWW6iKurNqLvAA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.10.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" }, @@ -1169,12 +1169,12 @@ } }, "node_modules/@smithy/abort-controller": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.2.5.tgz", - "integrity": "sha512-j7HwVkBw68YW8UmFRcjZOmssE77Rvk0GWAIN1oFBhsaovQmZWYCIcGa9/pwRB0ExI8Sk9MWNALTjftjHZea7VA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.2.7.tgz", + "integrity": "sha512-rzMY6CaKx2qxrbYbqjXWS0plqEy7LOdKHS0bg4ixJ6aoGDPNUcLWk/FRNuCILh7GKLG9TFUXYYeQQldMBBwuyw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1207,16 +1207,16 @@ } }, "node_modules/@smithy/config-resolver": { - "version": "4.4.3", - "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.4.3.tgz", - "integrity": "sha512-ezHLe1tKLUxDJo2LHtDuEDyWXolw8WGOR92qb4bQdWq/zKenO5BvctZGrVJBK08zjezSk7bmbKFOXIVyChvDLw==", + "version": "4.4.5", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.4.5.tgz", + "integrity": "sha512-HAGoUAFYsUkoSckuKbCPayECeMim8pOu+yLy1zOxt1sifzEbrsRpYa+mKcMdiHKMeiqOibyPG0sFJnmaV/OGEg==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/types": "^4.11.0", "@smithy/util-config-provider": "^4.2.0", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", + "@smithy/util-endpoints": "^3.2.7", + "@smithy/util-middleware": "^4.2.7", "tslib": "^2.6.2" }, "engines": { @@ -1224,18 +1224,18 @@ } }, "node_modules/@smithy/core": { - "version": "3.18.7", - "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.18.7.tgz", - "integrity": "sha512-axG9MvKhMWOhFbvf5y2DuyTxQueO0dkedY9QC3mAfndLosRI/9LJv8WaL0mw7ubNhsO4IuXX9/9dYGPFvHrqlw==", + "version": "3.20.0", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.20.0.tgz", + "integrity": "sha512-WsSHCPq/neD5G/MkK4csLI5Y5Pkd9c1NMfpYEKeghSGaD4Ja1qLIohRQf2D5c1Uy5aXp76DeKHkzWZ9KAlHroQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/middleware-serde": "^4.2.6", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/middleware-serde": "^4.2.8", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/util-middleware": "^4.2.7", + "@smithy/util-stream": "^4.5.8", "@smithy/util-utf8": "^4.2.0", "@smithy/uuid": "^1.1.0", "tslib": "^2.6.2" @@ -1245,15 +1245,15 @@ } }, "node_modules/@smithy/credential-provider-imds": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.2.5.tgz", - "integrity": "sha512-BZwotjoZWn9+36nimwm/OLIcVe+KYRwzMjfhd4QT7QxPm9WY0HiOV8t/Wlh+HVUif0SBVV7ksq8//hPaBC/okQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.2.7.tgz", + "integrity": "sha512-CmduWdCiILCRNbQWFR0OcZlUPVtyE49Sr8yYL0rZQ4D/wKxiNzBNS/YHemvnbkIWj623fplgkexUd/c9CAKdoA==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/property-provider": "^4.2.7", + "@smithy/types": "^4.11.0", + "@smithy/url-parser": "^4.2.7", "tslib": "^2.6.2" }, "engines": { @@ -1261,13 +1261,13 @@ } }, "node_modules/@smithy/eventstream-codec": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-4.2.5.tgz", - "integrity": "sha512-Ogt4Zi9hEbIP17oQMd68qYOHUzmH47UkK7q7Gl55iIm9oKt27MUGrC5JfpMroeHjdkOliOA4Qt3NQ1xMq/nrlA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-4.2.7.tgz", + "integrity": "sha512-DrpkEoM3j9cBBWhufqBwnbbn+3nf1N9FP6xuVJ+e220jbactKuQgaZwjwP5CP1t+O94brm2JgVMD2atMGX3xIQ==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/crc32": "5.2.0", - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-hex-encoding": "^4.2.0", "tslib": "^2.6.2" }, @@ -1276,13 +1276,13 @@ } }, "node_modules/@smithy/eventstream-serde-browser": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-4.2.5.tgz", - "integrity": "sha512-HohfmCQZjppVnKX2PnXlf47CW3j92Ki6T/vkAT2DhBR47e89pen3s4fIa7otGTtrVxmj7q+IhH0RnC5kpR8wtw==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-4.2.7.tgz", + "integrity": "sha512-ujzPk8seYoDBmABDE5YqlhQZAXLOrtxtJLrbhHMKjBoG5b4dK4i6/mEU+6/7yXIAkqOO8sJ6YxZl+h0QQ1IJ7g==", "license": "Apache-2.0", "dependencies": { - "@smithy/eventstream-serde-universal": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/eventstream-serde-universal": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1290,12 +1290,12 @@ } }, "node_modules/@smithy/eventstream-serde-config-resolver": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-4.3.5.tgz", - "integrity": "sha512-ibjQjM7wEXtECiT6my1xfiMH9IcEczMOS6xiCQXoUIYSj5b1CpBbJ3VYbdwDy8Vcg5JHN7eFpOCGk8nyZAltNQ==", + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-4.3.7.tgz", + "integrity": "sha512-x7BtAiIPSaNaWuzm24Q/mtSkv+BrISO/fmheiJ39PKRNH3RmH2Hph/bUKSOBOBC9unqfIYDhKTHwpyZycLGPVQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1303,13 +1303,13 @@ } }, "node_modules/@smithy/eventstream-serde-node": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-node/-/eventstream-serde-node-4.2.5.tgz", - "integrity": "sha512-+elOuaYx6F2H6x1/5BQP5ugv12nfJl66GhxON8+dWVUEDJ9jah/A0tayVdkLRP0AeSac0inYkDz5qBFKfVp2Gg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-node/-/eventstream-serde-node-4.2.7.tgz", + "integrity": "sha512-roySCtHC5+pQq5lK4be1fZ/WR6s/AxnPaLfCODIPArtN2du8s5Ot4mKVK3pPtijL/L654ws592JHJ1PbZFF6+A==", "license": "Apache-2.0", "dependencies": { - "@smithy/eventstream-serde-universal": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/eventstream-serde-universal": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1317,13 +1317,13 @@ } }, "node_modules/@smithy/eventstream-serde-universal": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-4.2.5.tgz", - "integrity": "sha512-G9WSqbST45bmIFaeNuP/EnC19Rhp54CcVdX9PDL1zyEB514WsDVXhlyihKlGXnRycmHNmVv88Bvvt4EYxWef/Q==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-4.2.7.tgz", + "integrity": "sha512-QVD+g3+icFkThoy4r8wVFZMsIP08taHVKjE6Jpmz8h5CgX/kk6pTODq5cht0OMtcapUx+xrPzUTQdA+TmO0m1g==", "license": "Apache-2.0", "dependencies": { - "@smithy/eventstream-codec": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/eventstream-codec": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1331,14 +1331,14 @@ } }, "node_modules/@smithy/fetch-http-handler": { - "version": "5.3.6", - "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.3.6.tgz", - "integrity": "sha512-3+RG3EA6BBJ/ofZUeTFJA7mHfSYrZtQIrDP9dI8Lf7X6Jbos2jptuLrAAteDiFVrmbEmLSuRG/bUKzfAXk7dhg==", + "version": "5.3.8", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.3.8.tgz", + "integrity": "sha512-h/Fi+o7mti4n8wx1SR6UHWLaakwHRx29sizvp8OOm7iqwKGFneT06GCSFhml6Bha5BT6ot5pj3CYZnCHhGC2Rg==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.5", - "@smithy/querystring-builder": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/querystring-builder": "^4.2.7", + "@smithy/types": "^4.11.0", "@smithy/util-base64": "^4.3.0", "tslib": "^2.6.2" }, @@ -1347,14 +1347,14 @@ } }, "node_modules/@smithy/hash-blob-browser": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/@smithy/hash-blob-browser/-/hash-blob-browser-4.2.6.tgz", - "integrity": "sha512-8P//tA8DVPk+3XURk2rwcKgYwFvwGwmJH/wJqQiSKwXZtf/LiZK+hbUZmPj/9KzM+OVSwe4o85KTp5x9DUZTjw==", + "version": "4.2.8", + "resolved": "https://registry.npmjs.org/@smithy/hash-blob-browser/-/hash-blob-browser-4.2.8.tgz", + "integrity": "sha512-07InZontqsM1ggTCPSRgI7d8DirqRrnpL7nIACT4PW0AWrgDiHhjGZzbAE5UtRSiU0NISGUYe7/rri9ZeWyDpw==", "license": "Apache-2.0", "dependencies": { "@smithy/chunked-blob-reader": "^5.2.0", "@smithy/chunked-blob-reader-native": "^4.2.1", - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1362,12 +1362,12 @@ } }, "node_modules/@smithy/hash-node": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.2.5.tgz", - "integrity": "sha512-DpYX914YOfA3UDT9CN1BM787PcHfWRBB43fFGCYrZFUH0Jv+5t8yYl+Pd5PW4+QzoGEDvn5d5QIO4j2HyYZQSA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.2.7.tgz", + "integrity": "sha512-PU/JWLTBCV1c8FtB8tEFnY4eV1tSfBc7bDBADHfn1K+uRbPgSJ9jnJp0hyjiFN2PMdPzxsf1Fdu0eo9fJ760Xw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-buffer-from": "^4.2.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" @@ -1377,12 +1377,12 @@ } }, "node_modules/@smithy/hash-stream-node": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/hash-stream-node/-/hash-stream-node-4.2.5.tgz", - "integrity": "sha512-6+do24VnEyvWcGdHXomlpd0m8bfZePpUKBy7m311n+JuRwug8J4dCanJdTymx//8mi0nlkflZBvJe+dEO/O12Q==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/hash-stream-node/-/hash-stream-node-4.2.7.tgz", + "integrity": "sha512-ZQVoAwNYnFMIbd4DUc517HuwNelJUY6YOzwqrbcAgCnVn+79/OK7UjwA93SPpdTOpKDVkLIzavWm/Ck7SmnDPQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -1391,12 +1391,12 @@ } }, "node_modules/@smithy/invalid-dependency": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.2.5.tgz", - "integrity": "sha512-2L2erASEro1WC5nV+plwIMxrTXpvpfzl4e+Nre6vBVRR2HKeGGcvpJyyL3/PpiSg+cJG2KpTmZmq934Olb6e5A==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.2.7.tgz", + "integrity": "sha512-ncvgCr9a15nPlkhIUx3CU4d7E7WEuVJOV7fS7nnK2hLtPK9tYRBkMHQbhXU1VvvKeBm/O0x26OEoBq+ngFpOEQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1416,12 +1416,12 @@ } }, "node_modules/@smithy/md5-js": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-4.2.5.tgz", - "integrity": "sha512-Bt6jpSTMWfjCtC0s79gZ/WZ1w90grfmopVOWqkI2ovhjpD5Q2XRXuecIPB9689L2+cCySMbaXDhBPU56FKNDNg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-4.2.7.tgz", + "integrity": "sha512-Wv6JcUxtOLTnxvNjDnAiATUsk8gvA6EeS8zzHig07dotpByYsLot+m0AaQEniUBjx97AC41MQR4hW0baraD1Xw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -1430,13 +1430,13 @@ } }, "node_modules/@smithy/middleware-content-length": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.2.5.tgz", - "integrity": "sha512-Y/RabVa5vbl5FuHYV2vUCwvh/dqzrEY/K2yWPSqvhFUwIY0atLqO4TienjBXakoy4zrKAMCZwg+YEqmH7jaN7A==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.2.7.tgz", + "integrity": "sha512-GszfBfCcvt7kIbJ41LuNa5f0wvQCHhnGx/aDaZJCCT05Ld6x6U2s0xsc/0mBFONBZjQJp2U/0uSJ178OXOwbhg==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1444,18 +1444,18 @@ } }, "node_modules/@smithy/middleware-endpoint": { - "version": "4.3.14", - "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.3.14.tgz", - "integrity": "sha512-v0q4uTKgBM8dsqGjqsabZQyH85nFaTnFcgpWU1uydKFsdyyMzfvOkNum9G7VK+dOP01vUnoZxIeRiJ6uD0kjIg==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.4.1.tgz", + "integrity": "sha512-gpLspUAoe6f1M6H0u4cVuFzxZBrsGZmjx2O9SigurTx4PbntYa4AJ+o0G0oGm1L2oSX6oBhcGHwrfJHup2JnJg==", "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^3.18.7", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", - "@smithy/util-middleware": "^4.2.5", + "@smithy/core": "^3.20.0", + "@smithy/middleware-serde": "^4.2.8", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/shared-ini-file-loader": "^4.4.2", + "@smithy/types": "^4.11.0", + "@smithy/url-parser": "^4.2.7", + "@smithy/util-middleware": "^4.2.7", "tslib": "^2.6.2" }, "engines": { @@ -1463,18 +1463,18 @@ } }, "node_modules/@smithy/middleware-retry": { - "version": "4.4.14", - "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.4.14.tgz", - "integrity": "sha512-Z2DG8Ej7FyWG1UA+7HceINtSLzswUgs2np3sZX0YBBxCt+CXG4QUxv88ZDS3+2/1ldW7LqtSY1UO/6VQ1pND8Q==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/service-error-classification": "^4.2.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", + "version": "4.4.17", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.4.17.tgz", + "integrity": "sha512-MqbXK6Y9uq17h+4r0ogu/sBT6V/rdV+5NvYL7ZV444BKfQygYe8wAhDrVXagVebN6w2RE0Fm245l69mOsPGZzg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.3.7", + "@smithy/protocol-http": "^5.3.7", + "@smithy/service-error-classification": "^4.2.7", + "@smithy/smithy-client": "^4.10.2", + "@smithy/types": "^4.11.0", + "@smithy/util-middleware": "^4.2.7", + "@smithy/util-retry": "^4.2.7", "@smithy/uuid": "^1.1.0", "tslib": "^2.6.2" }, @@ -1483,13 +1483,13 @@ } }, "node_modules/@smithy/middleware-serde": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.2.6.tgz", - "integrity": "sha512-VkLoE/z7e2g8pirwisLz8XJWedUSY8my/qrp81VmAdyrhi94T+riBfwP+AOEEFR9rFTSonC/5D2eWNmFabHyGQ==", + "version": "4.2.8", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.2.8.tgz", + "integrity": "sha512-8rDGYen5m5+NV9eHv9ry0sqm2gI6W7mc1VSFMtn6Igo25S507/HaOX9LTHAS2/J32VXD0xSzrY0H5FJtOMS4/w==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1497,12 +1497,12 @@ } }, "node_modules/@smithy/middleware-stack": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.2.5.tgz", - "integrity": "sha512-bYrutc+neOyWxtZdbB2USbQttZN0mXaOyYLIsaTbJhFsfpXyGWUxJpEuO1rJ8IIJm2qH4+xJT0mxUSsEDTYwdQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.2.7.tgz", + "integrity": "sha512-bsOT0rJ+HHlZd9crHoS37mt8qRRN/h9jRve1SXUhVbkRzu0QaNYZp1i1jha4n098tsvROjcwfLlfvcFuJSXEsw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1510,14 +1510,14 @@ } }, "node_modules/@smithy/node-config-provider": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.3.5.tgz", - "integrity": "sha512-UTurh1C4qkVCtqggI36DGbLB2Kv8UlcFdMXDcWMbqVY2uRg0XmT9Pb4Vj6oSQ34eizO1fvR0RnFV4Axw4IrrAg==", + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.3.7.tgz", + "integrity": "sha512-7r58wq8sdOcrwWe+klL9y3bc4GW1gnlfnFOuL7CXa7UzfhzhxKuzNdtqgzmTV+53lEp9NXh5hY/S4UgjLOzPfw==", "license": "Apache-2.0", "dependencies": { - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@smithy/property-provider": "^4.2.7", + "@smithy/shared-ini-file-loader": "^4.4.2", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1525,15 +1525,15 @@ } }, "node_modules/@smithy/node-http-handler": { - "version": "4.4.5", - "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.4.5.tgz", - "integrity": "sha512-CMnzM9R2WqlqXQGtIlsHMEZfXKJVTIrqCNoSd/QpAyp+Dw0a1Vps13l6ma1fH8g7zSPNsA59B/kWgeylFuA/lw==", + "version": "4.4.7", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.4.7.tgz", + "integrity": "sha512-NELpdmBOO6EpZtWgQiHjoShs1kmweaiNuETUpuup+cmm/xJYjT4eUjfhrXRP4jCOaAsS3c3yPsP3B+K+/fyPCQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/abort-controller": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/querystring-builder": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/abort-controller": "^4.2.7", + "@smithy/protocol-http": "^5.3.7", + "@smithy/querystring-builder": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1541,12 +1541,12 @@ } }, "node_modules/@smithy/property-provider": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.2.5.tgz", - "integrity": "sha512-8iLN1XSE1rl4MuxvQ+5OSk/Zb5El7NJZ1td6Tn+8dQQHIjp59Lwl6bd0+nzw6SKm2wSSriH2v/I9LPzUic7EOg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.2.7.tgz", + "integrity": "sha512-jmNYKe9MGGPoSl/D7JDDs1C8b3dC8f/w78LbaVfoTtWy4xAd5dfjaFG9c9PWPihY4ggMQNQSMtzU77CNgAJwmA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1554,12 +1554,12 @@ } }, "node_modules/@smithy/protocol-http": { - "version": "5.3.5", - "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.3.5.tgz", - "integrity": "sha512-RlaL+sA0LNMp03bf7XPbFmT5gN+w3besXSWMkA8rcmxLSVfiEXElQi4O2IWwPfxzcHkxqrwBFMbngB8yx/RvaQ==", + "version": "5.3.7", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.3.7.tgz", + "integrity": "sha512-1r07pb994I20dD/c2seaZhoCuNYm0rWrvBxhCQ70brNh11M5Ml2ew6qJVo0lclB3jMIXirD4s2XRXRe7QEi0xA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1567,12 +1567,12 @@ } }, "node_modules/@smithy/querystring-builder": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.2.5.tgz", - "integrity": "sha512-y98otMI1saoajeik2kLfGyRp11e5U/iJYH/wLCh3aTV/XutbGT9nziKGkgCaMD1ghK7p6htHMm6b6scl9JRUWg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.2.7.tgz", + "integrity": "sha512-eKONSywHZxK4tBxe2lXEysh8wbBdvDWiA+RIuaxZSgCMmA0zMgoDpGLJhnyj+c0leOQprVnXOmcB4m+W9Rw7sg==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-uri-escape": "^4.2.0", "tslib": "^2.6.2" }, @@ -1581,12 +1581,12 @@ } }, "node_modules/@smithy/querystring-parser": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.2.5.tgz", - "integrity": "sha512-031WCTdPYgiQRYNPXznHXof2YM0GwL6SeaSyTH/P72M1Vz73TvCNH2Nq8Iu2IEPq9QP2yx0/nrw5YmSeAi/AjQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.2.7.tgz", + "integrity": "sha512-3X5ZvzUHmlSTHAXFlswrS6EGt8fMSIxX/c3Rm1Pni3+wYWB6cjGocmRIoqcQF9nU5OgGmL0u7l9m44tSUpfj9w==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1594,24 +1594,24 @@ } }, "node_modules/@smithy/service-error-classification": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.2.5.tgz", - "integrity": "sha512-8fEvK+WPE3wUAcDvqDQG1Vk3ANLR8Px979te96m84CbKAjBVf25rPYSzb4xU4hlTyho7VhOGnh5i62D/JVF0JQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.2.7.tgz", + "integrity": "sha512-YB7oCbukqEb2Dlh3340/8g8vNGbs/QsNNRms+gv3N2AtZz9/1vSBx6/6tpwQpZMEJFs7Uq8h4mmOn48ZZ72MkA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0" + "@smithy/types": "^4.11.0" }, "engines": { "node": ">=18.0.0" } }, "node_modules/@smithy/shared-ini-file-loader": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.4.0.tgz", - "integrity": "sha512-5WmZ5+kJgJDjwXXIzr1vDTG+RhF9wzSODQBfkrQ2VVkYALKGvZX1lgVSxEkgicSAFnFhPj5rudJV0zoinqS0bA==", + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.4.2.tgz", + "integrity": "sha512-M7iUUff/KwfNunmrgtqBfvZSzh3bmFgv/j/t1Y1dQ+8dNo34br1cqVEqy6v0mYEgi0DkGO7Xig0AnuOaEGVlcg==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1619,16 +1619,16 @@ } }, "node_modules/@smithy/signature-v4": { - "version": "5.3.5", - "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.3.5.tgz", - "integrity": "sha512-xSUfMu1FT7ccfSXkoLl/QRQBi2rOvi3tiBZU2Tdy3I6cgvZ6SEi9QNey+lqps/sJRnogIS+lq+B1gxxbra2a/w==", + "version": "5.3.7", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.3.7.tgz", + "integrity": "sha512-9oNUlqBlFZFOSdxgImA6X5GFuzE7V2H7VG/7E70cdLhidFbdtvxxt81EHgykGK5vq5D3FafH//X+Oy31j3CKOg==", "license": "Apache-2.0", "dependencies": { "@smithy/is-array-buffer": "^4.2.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "@smithy/util-hex-encoding": "^4.2.0", - "@smithy/util-middleware": "^4.2.5", + "@smithy/util-middleware": "^4.2.7", "@smithy/util-uri-escape": "^4.2.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" @@ -1638,17 +1638,17 @@ } }, "node_modules/@smithy/smithy-client": { - "version": "4.9.10", - "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.9.10.tgz", - "integrity": "sha512-Jaoz4Jw1QYHc1EFww/E6gVtNjhoDU+gwRKqXP6C3LKYqqH2UQhP8tMP3+t/ePrhaze7fhLE8vS2q6vVxBANFTQ==", + "version": "4.10.2", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.10.2.tgz", + "integrity": "sha512-D5z79xQWpgrGpAHb054Fn2CCTQZpog7JELbVQ6XAvXs5MNKWf28U9gzSBlJkOyMl9LA1TZEjRtwvGXfP0Sl90g==", "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^3.18.7", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", - "@smithy/util-stream": "^4.5.6", + "@smithy/core": "^3.20.0", + "@smithy/middleware-endpoint": "^4.4.1", + "@smithy/middleware-stack": "^4.2.7", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", + "@smithy/util-stream": "^4.5.8", "tslib": "^2.6.2" }, "engines": { @@ -1656,9 +1656,9 @@ } }, "node_modules/@smithy/types": { - "version": "4.9.0", - "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.9.0.tgz", - "integrity": "sha512-MvUbdnXDTwykR8cB1WZvNNwqoWVaTRA0RLlLmf/cIFNMM2cKWz01X4Ly6SMC4Kks30r8tT3Cty0jmeWfiuyHTA==", + "version": "4.11.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.11.0.tgz", + "integrity": "sha512-mlrmL0DRDVe3mNrjTcVcZEgkFmufITfUAPBEA+AHYiIeYyJebso/He1qLbP3PssRe22KUzLRpQSdBPbXdgZ2VA==", "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" @@ -1668,13 +1668,13 @@ } }, "node_modules/@smithy/url-parser": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.2.5.tgz", - "integrity": "sha512-VaxMGsilqFnK1CeBX+LXnSuaMx4sTL/6znSZh2829txWieazdVxr54HmiyTsIbpOTLcf5nYpq9lpzmwRdxj6rQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.2.7.tgz", + "integrity": "sha512-/RLtVsRV4uY3qPWhBDsjwahAtt3x2IsMGnP5W1b2VZIe+qgCqkLxI1UOHDZp1Q1QSOrdOR32MF3Ph2JfWT1VHg==", "license": "Apache-2.0", "dependencies": { - "@smithy/querystring-parser": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/querystring-parser": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1745,14 +1745,14 @@ } }, "node_modules/@smithy/util-defaults-mode-browser": { - "version": "4.3.13", - "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.3.13.tgz", - "integrity": "sha512-hlVLdAGrVfyNei+pKIgqDTxfu/ZI2NSyqj4IDxKd5bIsIqwR/dSlkxlPaYxFiIaDVrBy0he8orsFy+Cz119XvA==", + "version": "4.3.16", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.3.16.tgz", + "integrity": "sha512-/eiSP3mzY3TsvUOYMeL4EqUX6fgUOj2eUOU4rMMgVbq67TiRLyxT7Xsjxq0bW3OwuzK009qOwF0L2OgJqperAQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/property-provider": "^4.2.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "@smithy/property-provider": "^4.2.7", + "@smithy/smithy-client": "^4.10.2", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1760,17 +1760,17 @@ } }, "node_modules/@smithy/util-defaults-mode-node": { - "version": "4.2.16", - "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.2.16.tgz", - "integrity": "sha512-F1t22IUiJLHrxW9W1CQ6B9PN+skZ9cqSuzB18Eh06HrJPbjsyZ7ZHecAKw80DQtyGTRcVfeukKaCRYebFwclbg==", + "version": "4.2.19", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.2.19.tgz", + "integrity": "sha512-3a4+4mhf6VycEJyHIQLypRbiwG6aJvbQAeRAVXydMmfweEPnLLabRbdyo/Pjw8Rew9vjsh5WCdhmDaHkQnhhhA==", "license": "Apache-2.0", "dependencies": { - "@smithy/config-resolver": "^4.4.3", - "@smithy/credential-provider-imds": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "@smithy/config-resolver": "^4.4.5", + "@smithy/credential-provider-imds": "^4.2.7", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/property-provider": "^4.2.7", + "@smithy/smithy-client": "^4.10.2", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1778,13 +1778,13 @@ } }, "node_modules/@smithy/util-endpoints": { - "version": "3.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.2.5.tgz", - "integrity": "sha512-3O63AAWu2cSNQZp+ayl9I3NapW1p1rR5mlVHcF6hAB1dPZUQFfRPYtplWX/3xrzWthPGj5FqB12taJJCfH6s8A==", + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.2.7.tgz", + "integrity": "sha512-s4ILhyAvVqhMDYREeTS68R43B1V5aenV5q/V1QpRQJkCXib5BPRo4s7uNdzGtIKxaPHCfU/8YkvPAEvTpxgspg==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1804,12 +1804,12 @@ } }, "node_modules/@smithy/util-middleware": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.2.5.tgz", - "integrity": "sha512-6Y3+rvBF7+PZOc40ybeZMcGln6xJGVeY60E7jy9Mv5iKpMJpHgRE6dKy9ScsVxvfAYuEX4Q9a65DQX90KaQ3bA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.2.7.tgz", + "integrity": "sha512-i1IkpbOae6NvIKsEeLLM9/2q4X+M90KV3oCFgWQI4q0Qz+yUZvsr+gZPdAEAtFhWQhAHpTsJO8DRJPuwVyln+w==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1817,13 +1817,13 @@ } }, "node_modules/@smithy/util-retry": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.2.5.tgz", - "integrity": "sha512-GBj3+EZBbN4NAqJ/7pAhsXdfzdlznOh8PydUijy6FpNIMnHPSMO2/rP4HKu+UFeikJxShERk528oy7GT79YiJg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.2.7.tgz", + "integrity": "sha512-SvDdsQyF5CIASa4EYVT02LukPHVzAgUA4kMAuZ97QJc2BpAqZfA4PINB8/KOoCXEw9tsuv/jQjMeaHFvxdLNGg==", "license": "Apache-2.0", "dependencies": { - "@smithy/service-error-classification": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/service-error-classification": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1831,14 +1831,14 @@ } }, "node_modules/@smithy/util-stream": { - "version": "4.5.6", - "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.6.tgz", - "integrity": "sha512-qWw/UM59TiaFrPevefOZ8CNBKbYEP6wBAIlLqxn3VAIo9rgnTNc4ASbVrqDmhuwI87usnjhdQrxodzAGFFzbRQ==", + "version": "4.5.8", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.8.tgz", + "integrity": "sha512-ZnnBhTapjM0YPGUSmOs0Mcg/Gg87k503qG4zU2v/+Js2Gu+daKOJMeqcQns8ajepY8tgzzfYxl6kQyZKml6O2w==", "license": "Apache-2.0", "dependencies": { - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/types": "^4.9.0", + "@smithy/fetch-http-handler": "^5.3.8", + "@smithy/node-http-handler": "^4.4.7", + "@smithy/types": "^4.11.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-buffer-from": "^4.2.0", "@smithy/util-hex-encoding": "^4.2.0", @@ -1875,13 +1875,13 @@ } }, "node_modules/@smithy/util-waiter": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.2.5.tgz", - "integrity": "sha512-Dbun99A3InifQdIrsXZ+QLcC0PGBPAdrl4cj1mTgJvyc9N2zf7QSxg8TBkzsCmGJdE3TLbO9ycwpY0EkWahQ/g==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.2.7.tgz", + "integrity": "sha512-vHJFXi9b7kUEpHWUCY3Twl+9NPOZvQ0SAi+Ewtn48mbiJk4JY9MZmKQjGB4SCvVb9WPiSphZJYY6RIbs+grrzw==", "license": "Apache-2.0", "dependencies": { - "@smithy/abort-controller": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/abort-controller": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -2193,9 +2193,9 @@ "license": "MIT" }, "node_modules/fs-extra": { - "version": "11.3.2", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.2.tgz", - "integrity": "sha512-Xr9F6z6up6Ws+NjzMCZc6WXg2YFRlrLP9NQDO3VQrWrfiojdhS56TzueT88ze0uBdCTwEIhQ3ptnmKeWGFAe0A==", + "version": "11.3.3", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.3.tgz", + "integrity": "sha512-VWSRii4t0AFm6ixFFmLLx1t7wS1gh+ckoa84aOeapGum0h+EZd1EhEumSB+ZdDLnEPuucsVB9oB7cxJHap6Afg==", "license": "MIT", "dependencies": { "graceful-fs": "^4.2.0", diff --git a/scripts/src/ArchiveSeeder.purs b/scripts/src/ArchiveSeeder.purs index a474876d5..fe0ae805f 100644 --- a/scripts/src/ArchiveSeeder.purs +++ b/scripts/src/ArchiveSeeder.purs @@ -32,7 +32,6 @@ import Node.Process as Process import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache (class FsEncodable, class MemoryEncodable, Cache, FsEncoding(..), MemoryEncoding(..)) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub @@ -119,7 +118,6 @@ main = launchAff_ do hasErrors <- runArchiveSeeder parsedArgs logPath # runAppEffects # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runResourceEnv resourceEnv # Run.runBaseAff' diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index ee9423d68..05e73ae84 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -106,7 +106,6 @@ import Registry.App.CLI.Tar as Tar import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache (class FsEncodable, class MemoryEncodable, Cache, FsEncoding(..), MemoryEncoding(..)) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub @@ -124,6 +123,7 @@ import Registry.App.Legacy.Manifest (LegacyManifestError(..), LegacyManifestVali import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.App.Legacy.Types (RawPackageName(..), RawVersion(..), rawPackageNameMapCodec, rawVersionMapCodec) import Registry.App.Manifest.SpagoYaml as SpagoYaml +import Registry.App.Server.MatrixBuilder as MatrixBuilder import Registry.Constants as Constants import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit (Address, Tag) @@ -241,7 +241,6 @@ main = launchAff_ do # Cache.interpret _importCache (Cache.handleMemoryFs { cache, ref: importCacheRef }) # Cache.interpret API._compilerCache (Cache.handleFs cache) # Run.Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runResourceEnv resourceEnv # Run.runBaseAff' @@ -358,7 +357,7 @@ runLegacyImport logs = do Just ref -> pure ref Log.debug "Building dependency index with compiler versions..." - compilerIndex <- API.readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex Log.debug $ "Solving dependencies for " <> formatted eitherResolutions <- do @@ -470,7 +469,7 @@ runLegacyImport logs = do Log.debug "Downloading dependencies..." let installDir = Path.concat [ tmp, ".registry" ] FS.Extra.ensureDirectory installDir - API.installBuildPlan resolutions installDir + MatrixBuilder.installBuildPlan resolutions installDir Log.debug $ "Installed to " <> installDir Log.debug "Trying compilers one-by-one..." selected <- findFirstCompiler @@ -536,6 +535,7 @@ runLegacyImport logs = do { name: manifest.name , location: Just manifest.location , ref + , version: manifest.version , compiler , resolutions: Just resolutions } @@ -761,7 +761,7 @@ buildLegacyPackageManifests rawPackage rawUrl = Run.Except.runExceptAt _exceptPa Legacy.Manifest.fetchLegacyManifest package.name package.address (RawVersion tag.name) >>= case _ of Left error -> throwVersion { error: InvalidManifest error, reason: "Legacy manifest could not be parsed." } Right result -> pure result - pure $ Legacy.Manifest.toManifest package.name (LenientVersion.version version) location legacyManifest + pure $ Legacy.Manifest.toManifest package.name (LenientVersion.version version) location tag.name legacyManifest case manifest of Left err -> Log.info $ "Failed to build manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ": " <> printJson versionValidationErrorCodec err Right val -> Log.info $ "Built manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ":\n" <> printJson Manifest.codec val @@ -1463,7 +1463,7 @@ fetchSpagoYaml address ref = do | location /= GitHub { owner: address.owner, repo: address.repo, subdir: Nothing } -> do Log.warn "spago.yaml file does not use the same location it was fetched from, this is disallowed..." pure Nothing - Right config -> case SpagoYaml.spagoYamlToManifest config of + Right config -> case SpagoYaml.spagoYamlToManifest (un RawVersion ref) config of Left err -> do Log.warn $ "Failed to convert parsed spago.yaml file to purs.json " <> contents <> "\nwith errors:\n" <> err pure Nothing diff --git a/scripts/src/PackageDeleter.purs b/scripts/src/PackageDeleter.purs index 950871d0b..257a7b1a2 100644 --- a/scripts/src/PackageDeleter.purs +++ b/scripts/src/PackageDeleter.purs @@ -21,7 +21,6 @@ import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log as Log @@ -160,7 +159,6 @@ main = launchAff_ do >>> Pursuit.interpret Pursuit.handlePure >>> Cache.interpret _legacyCache (Cache.handleMemoryFs { ref: legacyCacheRef, cache }) >>> Cache.interpret _compilerCache (Cache.handleFs cache) - >>> Comment.interpret Comment.handleLog >>> Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) >>> Env.runResourceEnv resourceEnv >>> Run.runBaseAff' @@ -230,21 +228,25 @@ deleteVersion arguments name version = do Just published, Nothing -> pure (Just (Right published)) Nothing, Just unpublished -> pure (Just (Left unpublished)) Nothing, Nothing -> pure Nothing + -- Read manifest before deleting it (needed for reimport) + maybeManifest <- Registry.readManifest name version let newMetadata = Metadata $ oldMetadata { published = Map.delete version oldMetadata.published, unpublished = Map.delete version oldMetadata.unpublished } Registry.writeMetadata name newMetadata Registry.deleteManifest name version -- --reimport when arguments.reimport do - case publishment of - Nothing -> Log.error "Cannot reimport a version that was not published" - Just (Left _) -> Log.error "Cannot reimport a version that was specifically unpublished" - Just (Right specificPackageMetadata) -> do + case publishment, maybeManifest of + Nothing, _ -> Log.error "Cannot reimport a version that was not published" + Just (Left _), _ -> Log.error "Cannot reimport a version that was specifically unpublished" + Just (Right _), Nothing -> Log.error $ "Cannot reimport: manifest not found for " <> formatted + Just (Right _), Just (Manifest manifest) -> do -- Obtains `newMetadata` via cache - API.publish Nothing + void $ API.publish Nothing { location: Just oldMetadata.location , name: name - , ref: specificPackageMetadata.ref + , ref: manifest.ref + , version: version , compiler: unsafeFromRight $ Version.parse "0.15.4" , resolutions: Nothing } diff --git a/scripts/src/PackageSetUpdater.purs b/scripts/src/PackageSetUpdater.purs index 95053eed1..29423cf7b 100644 --- a/scripts/src/PackageSetUpdater.purs +++ b/scripts/src/PackageSetUpdater.purs @@ -19,7 +19,6 @@ import Node.Path as Path import Node.Process as Process import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) @@ -114,7 +113,6 @@ main = Aff.launchAff_ do # Storage.interpret (Storage.handleReadOnly cache) # GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runResourceEnv resourceEnv # Run.runBaseAff' diff --git a/scripts/src/PackageTransferrer.purs b/scripts/src/PackageTransferrer.purs index d203c66de..31e859197 100644 --- a/scripts/src/PackageTransferrer.purs +++ b/scripts/src/PackageTransferrer.purs @@ -16,7 +16,6 @@ import Registry.App.API as API import Registry.App.Auth as Auth import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub @@ -87,7 +86,6 @@ main = launchAff_ do # Storage.interpret (Storage.handleReadOnly cache) # GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runPacchettiBottiEnv { privateKey, publicKey } # Env.runResourceEnv resourceEnv diff --git a/scripts/src/Solver.purs b/scripts/src/Solver.purs index 13280a0af..ce615b5a9 100644 --- a/scripts/src/Solver.purs +++ b/scripts/src/Solver.purs @@ -17,7 +17,6 @@ import Data.DateTime.Instant as Instant import Data.Foldable (foldMap) import Data.Formatter.DateTime as Formatter.DateTime import Data.Map as Map -import Data.Newtype (unwrap) import Data.String as String import Data.Time.Duration (Milliseconds(..)) import Effect.Class.Console as Aff @@ -33,7 +32,6 @@ import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log as Log @@ -153,7 +151,6 @@ main = launchAff_ do # Cache.interpret _importCache (Cache.handleMemoryFs { cache, ref: importCacheRef }) # Cache.interpret _compilerCache (Cache.handleFs cache) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Env.runResourceEnv resourceEnv # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Run.runBaseAff' diff --git a/spago.lock b/spago.lock index 83d2afb8d..ea939ddeb 100644 --- a/spago.lock +++ b/spago.lock @@ -227,7 +227,6 @@ "exceptions", "exists", "exitcodes", - "fetch", "fixed-points", "foldable-traversable", "foreign", @@ -239,14 +238,10 @@ "functors", "gen", "graphs", - "http-methods", "identity", "integers", "invariant", "js-date", - "js-fetch", - "js-promise", - "js-promise-aff", "js-uri", "json", "language-cst-parser", @@ -254,7 +249,6 @@ "lcg", "lists", "maybe", - "media-types", "mmorph", "newtype", "node-buffer", @@ -299,11 +293,7 @@ "unfoldable", "unicode", "unsafe-coerce", - "variant", - "web-dom", - "web-events", - "web-file", - "web-streams" + "variant" ] } }, @@ -313,20 +303,32 @@ "dependencies": [ "aff", "arrays", + "codec-json", "console", "datetime", - "effect", - "either", - "maybe", - "prelude", + "exceptions", + "fetch", + "integers", + "json", + "node-child-process", + "node-execa", + "node-fs", + "node-path", + "node-process", + "ordered-collections", + "registry-app", + "registry-foreign", "registry-lib", "registry-test-utils", + "routing-duplex", "spec", "spec-node", - "strings" + "strings", + "transformers" ], "build_plan": [ "aff", + "aff-promise", "ansi", "argonaut-codecs", "argonaut-core", @@ -334,6 +336,7 @@ "arrays", "assert", "avar", + "b64", "bifunctors", "catenable-lists", "codec", @@ -342,15 +345,21 @@ "const", "contravariant", "control", + "convertable-options", "datetime", + "debug", "distributive", + "dodo-printer", + "dotenv", "effect", "either", + "encoding", "enums", "exceptions", "exists", "exitcodes", "fetch", + "filterable", "fixed-points", "foldable-traversable", "foreign", @@ -362,7 +371,9 @@ "functors", "gen", "graphs", + "heterogeneous", "http-methods", + "httpurple", "identity", "integers", "invariant", @@ -370,27 +381,39 @@ "js-fetch", "js-promise", "js-promise-aff", + "js-timers", "js-uri", "json", + "justifill", "language-cst-parser", "lazy", "lcg", "lists", + "literals", "maybe", "media-types", "mmorph", "newtype", "node-buffer", + "node-child-process", "node-event-emitter", + "node-execa", "node-fs", + "node-http", + "node-human-signals", + "node-net", + "node-os", "node-path", "node-process", "node-streams", + "node-tls", + "node-url", "nonempty", "now", "nullable", "numbers", "open-memoize", + "options", "optparse", "ordered-collections", "orders", @@ -402,19 +425,26 @@ "prelude", "profunctor", "profunctor-lenses", + "psci-support", "quickcheck", + "quickcheck-laws", "random", "record", + "record-studio", "refs", + "registry-app", + "registry-foreign", "registry-lib", "registry-test-utils", "routing-duplex", + "run", "safe-coerce", "spec", "spec-node", "st", "strings", "tailrec", + "these", "transformers", "tuples", "type-equality", @@ -422,6 +452,9 @@ "unfoldable", "unicode", "unsafe-coerce", + "unsafe-reference", + "untagged-union", + "uuidv4", "variant", "web-dom", "web-events", @@ -604,7 +637,6 @@ "exceptions", "exists", "exitcodes", - "fetch", "fixed-points", "foldable-traversable", "foreign", @@ -616,14 +648,10 @@ "functors", "gen", "graphs", - "http-methods", "identity", "integers", "invariant", "js-date", - "js-fetch", - "js-promise", - "js-promise-aff", "js-timers", "js-uri", "json", @@ -632,7 +660,6 @@ "lcg", "lists", "maybe", - "media-types", "mmorph", "newtype", "node-buffer", @@ -682,11 +709,7 @@ "unicode", "unsafe-coerce", "unsafe-reference", - "variant", - "web-dom", - "web-events", - "web-file", - "web-streams" + "variant" ] } }, @@ -846,7 +869,6 @@ "exceptions", "exists", "exitcodes", - "fetch", "fixed-points", "foldable-traversable", "foreign", @@ -858,14 +880,10 @@ "functors", "gen", "graphs", - "http-methods", "identity", "integers", "invariant", "js-date", - "js-fetch", - "js-promise", - "js-promise-aff", "js-timers", "js-uri", "json", @@ -874,7 +892,6 @@ "lcg", "lists", "maybe", - "media-types", "mmorph", "newtype", "node-buffer", @@ -924,11 +941,7 @@ "unicode", "unsafe-coerce", "unsafe-reference", - "variant", - "web-dom", - "web-events", - "web-file", - "web-streams" + "variant" ] } }, @@ -1104,22 +1117,16 @@ "path": "test-utils", "core": { "dependencies": [ - "aff", "arrays", "bifunctors", "codec-json", "datetime", - "effect", "either", "exceptions", - "fetch", "foldable-traversable", "formatters", - "integers", "json", "maybe", - "newtype", - "node-process", "ordered-collections", "partial", "prelude", @@ -1152,7 +1159,6 @@ "enums", "exceptions", "exists", - "fetch", "fixed-points", "foldable-traversable", "foreign", @@ -1164,14 +1170,10 @@ "functors", "gen", "graphs", - "http-methods", "identity", "integers", "invariant", "js-date", - "js-fetch", - "js-promise", - "js-promise-aff", "js-uri", "json", "language-cst-parser", @@ -1179,14 +1181,12 @@ "lcg", "lists", "maybe", - "media-types", "mmorph", "newtype", "node-buffer", "node-event-emitter", "node-fs", "node-path", - "node-process", "node-streams", "nonempty", "now", @@ -1198,7 +1198,6 @@ "parsing", "partial", "pipes", - "posix-types", "prelude", "profunctor", "profunctor-lenses", @@ -1220,11 +1219,7 @@ "unfoldable", "unicode", "unsafe-coerce", - "variant", - "web-dom", - "web-events", - "web-file", - "web-streams" + "variant" ] }, "test": { diff --git a/test-utils/spago.yaml b/test-utils/spago.yaml index d85190964..4362f8e77 100644 --- a/test-utils/spago.yaml +++ b/test-utils/spago.yaml @@ -3,22 +3,16 @@ package: build: pedanticPackages: true dependencies: - - aff - arrays - bifunctors - codec-json - datetime - - effect - either - exceptions - - fetch - foldable-traversable - formatters - - integers - json - maybe - - newtype - - node-process - ordered-collections - partial - prelude diff --git a/test-utils/src/Registry/Test/Assert.purs b/test-utils/src/Registry/Test/Assert.purs index 55c0f2277..2d15e7a74 100644 --- a/test-utils/src/Registry/Test/Assert.purs +++ b/test-utils/src/Registry/Test/Assert.purs @@ -38,6 +38,18 @@ shouldNotContain container elem = when (elem `Foldable.elem` container) do fail (Utils.unsafeStringify elem <> "\n\nshould not be a member of\n\n" <> Utils.unsafeStringify container) +-- | Assert that all elements in `expected` are present in `actual`. +-- | This is a subset check, not an equality check - `actual` may contain +-- | additional elements. +-- | +-- | Useful for E2E tests where a shared database means we can't predict +-- | exact contents, only that certain expected items are present. +shouldContainAll :: forall m a. MonadThrow Error m => Eq a => Array a -> Array a -> m Unit +shouldContainAll actual expected = + Foldable.for_ expected \elem -> + when (elem `Foldable.notElem` actual) do + fail ("Expected element not found:\n" <> Utils.unsafeStringify elem <> "\n\nin array:\n" <> Utils.unsafeStringify actual) + shouldSatisfy :: forall m a. MonadThrow Error m => a -> (a -> Boolean) -> m Unit shouldSatisfy a predicate = unless (predicate a) do diff --git a/test-utils/src/Registry/Test/E2E/Client.purs b/test-utils/src/Registry/Test/E2E/Client.purs deleted file mode 100644 index 960484609..000000000 --- a/test-utils/src/Registry/Test/E2E/Client.purs +++ /dev/null @@ -1,180 +0,0 @@ --- | HTTP client for making requests to the registry server during E2E tests. --- | This module provides typed helpers for interacting with the Registry API. -module Registry.Test.E2E.Client - ( Config - , ClientError(..) - , defaultConfig - , configFromEnv - , getJobs - , getJob - , getStatus - , publish - , pollJob - , printClientError - ) where - -import Prelude - -import Codec.JSON.DecodeError as CJ.DecodeError -import Control.Monad.Error.Class (class MonadThrow, throwError) -import Control.Monad.Except (runExceptT) -import Control.Monad.Trans.Class (lift) -import Data.Array as Array -import Data.Bifunctor (lmap) -import Data.Codec.JSON as CJ -import Data.DateTime (DateTime) -import Data.Either (Either(..)) -import Data.Formatter.DateTime as Formatter.DateTime -import Data.Int as Int -import Data.Maybe (Maybe(..)) -import Data.Newtype (unwrap) -import Effect (Effect) -import Effect.Aff (Aff, Milliseconds(..), delay) -import Effect.Aff.Class (class MonadAff, liftAff) -import Effect.Exception (Error, error) -import Effect.Exception as Effect.Exception -import Fetch (Method(..)) -import Fetch as Fetch -import JSON as JSON -import Node.Process as Process -import Registry.API.V1 (Job, JobId(..), LogLevel) -import Registry.API.V1 as V1 -import Registry.Internal.Format as Internal.Format -import Registry.Operation (PublishData) -import Registry.Operation as Operation - --- | Configuration for the E2E test client -type Config = - { baseUrl :: String - , timeout :: Milliseconds - , pollInterval :: Milliseconds - , maxPollAttempts :: Int - } - --- | Default configuration for production use (port 8080 matches HTTPurple default) -defaultConfig :: Config -defaultConfig = - { baseUrl: "http://localhost:8080" - , timeout: Milliseconds 30000.0 - , pollInterval: Milliseconds 2000.0 - , maxPollAttempts: 30 - } - --- | Create config from environment, reading SERVER_PORT. --- | --- | SERVER_PORT is required and must be set by the test environment. --- | See `nix/lib/env.nix` for the centralized environment configuration. -configFromEnv :: Effect Config -configFromEnv = do - maybePort <- Process.lookupEnv "SERVER_PORT" - case maybePort of - Nothing -> Effect.Exception.throw "SERVER_PORT environment variable is not set. Run tests via 'nix run .#test-env' or 'nix build .#checks.x86_64-linux.integration'." - Just port -> pure $ defaultConfig { baseUrl = "http://localhost:" <> port } - --- | Errors that can occur during client operations -data ClientError - = HttpError { status :: Int, body :: String } - | ParseError String - | Timeout String - | NetworkError String - -printClientError :: ClientError -> String -printClientError = case _ of - HttpError { status, body } -> "HTTP Error " <> Int.toStringAs Int.decimal status <> ": " <> body - ParseError msg -> "Parse Error: " <> msg - Timeout msg -> "Timeout: " <> msg - NetworkError msg -> "Network Error: " <> msg - --- | Convert a ClientError to an Effect Error for throwing -toError :: ClientError -> Error -toError = error <<< printClientError - --- | Parse JSON response body using a codec -parseResponse :: forall a. CJ.Codec a -> String -> Either String a -parseResponse codec body = do - json <- lmap (append "JSON parse error: ") $ JSON.parse body - lmap CJ.DecodeError.print $ CJ.decode codec json - --- | Make a GET request and decode the response -get :: forall a. CJ.Codec a -> Config -> String -> Aff (Either ClientError a) -get codec config path = runExceptT do - response <- lift $ Fetch.fetch (config.baseUrl <> path) { method: GET } - body <- lift response.text - if response.status >= 200 && response.status < 300 then - case parseResponse codec body of - Left err -> throwError $ ParseError err - Right a -> pure a - else - throwError $ HttpError { status: response.status, body } - --- | Make a POST request with JSON body and decode the response -post :: forall req res. CJ.Codec req -> CJ.Codec res -> Config -> String -> req -> Aff (Either ClientError res) -post reqCodec resCodec config path reqBody = runExceptT do - let jsonBody = JSON.print $ CJ.encode reqCodec reqBody - response <- lift $ Fetch.fetch (config.baseUrl <> path) - { method: POST - , headers: { "Content-Type": "application/json" } - , body: jsonBody - } - responseBody <- lift response.text - if response.status >= 200 && response.status < 300 then - case parseResponse resCodec responseBody of - Left err -> throwError $ ParseError err - Right a -> pure a - else - throwError $ HttpError { status: response.status, body: responseBody } - --- | Get the list of jobs -getJobs :: Config -> Aff (Either ClientError (Array Job)) -getJobs config = get (CJ.array V1.jobCodec) config "/api/v1/jobs" - --- | Get a specific job by ID, with optional log filtering -getJob :: Config -> JobId -> Maybe LogLevel -> Maybe DateTime -> Aff (Either ClientError Job) -getJob config (JobId jobId) level since = do - let - params = Array.catMaybes - [ level <#> \l -> "level=" <> V1.printLogLevel l - , since <#> \s -> "since=" <> Formatter.DateTime.format Internal.Format.iso8601DateTime s - ] - query = case params of - [] -> "" - ps -> "?" <> Array.intercalate "&" ps - get V1.jobCodec config ("/api/v1/jobs/" <> jobId <> query) - --- | Check if the server is healthy -getStatus :: Config -> Aff (Either ClientError Unit) -getStatus config = runExceptT do - response <- lift $ Fetch.fetch (config.baseUrl <> "/api/v1/status") { method: GET } - if response.status == 200 then - pure unit - else do - body <- lift response.text - throwError $ HttpError { status: response.status, body } - --- | Publish a package -publish :: Config -> PublishData -> Aff (Either ClientError V1.JobCreatedResponse) -publish config publishData = - post Operation.publishCodec V1.jobCreatedResponseCodec config "/api/v1/publish" publishData - --- | Poll a job until it completes or times out -pollJob - :: forall m - . MonadAff m - => MonadThrow Error m - => Config - -> JobId - -> m Job -pollJob config jobId = go 1 - where - go attempt - | attempt > config.maxPollAttempts = - throwError $ toError $ Timeout $ "Job " <> unwrap jobId <> " did not complete after " <> Int.toStringAs Int.decimal config.maxPollAttempts <> " attempts" - | otherwise = do - liftAff $ delay config.pollInterval - result <- liftAff $ getJob config jobId (Just V1.Debug) Nothing - case result of - Left err -> throwError $ toError err - Right job -> - case job.finishedAt of - Just _ -> pure job - Nothing -> go (attempt + 1) diff --git a/test-utils/src/Registry/Test/Fixtures.purs b/test-utils/src/Registry/Test/Fixtures.purs new file mode 100644 index 000000000..28692c13c --- /dev/null +++ b/test-utils/src/Registry/Test/Fixtures.purs @@ -0,0 +1,18 @@ +module Registry.Test.Fixtures where + +import Prelude + +import Data.Either as Either +import Data.Maybe (Maybe(..)) +import Partial.Unsafe as Partial +import Registry.Location (Location(..)) +import Registry.Sha256 (Sha256) +import Registry.Sha256 as Sha256 + +-- | A Location for use within tests. +defaultLocation :: Location +defaultLocation = GitHub { owner: "purescript", repo: "registry-dev", subdir: Nothing } + +-- | A Sha256 for use within tests. +defaultHash :: Sha256 +defaultHash = Either.fromRight' (\_ -> Partial.unsafeCrashWith "Failed to parse Sha256") $ Sha256.parse "sha256-fN9RUAzN21ZY4Y0UwqUSxwUPVz1g7/pcqoDvbJZoT04=" diff --git a/test-utils/src/Registry/Test/Utils.purs b/test-utils/src/Registry/Test/Utils.purs index 2db7280e5..57f177890 100644 --- a/test-utils/src/Registry/Test/Utils.purs +++ b/test-utils/src/Registry/Test/Utils.purs @@ -25,7 +25,6 @@ import Registry.PackageName (PackageName) import Registry.PackageName as PackageName import Registry.Range as Range import Registry.SSH as SSH -import Registry.Sha256 (Sha256) import Registry.Sha256 as Sha256 import Registry.Version (Version) import Registry.Version as Version @@ -149,6 +148,7 @@ unsafeManifest name version dependencies = Manifest { url: "https://github.com/purescript/purescript-" <> name <> ".git" , subdir: Nothing } + , ref: "v" <> version , description: Nothing , owners: Nothing , includeFiles: Nothing @@ -158,11 +158,3 @@ unsafeManifest name version dependencies = Manifest -- | Format a package version as a string in the form 'name@X.Y.Z' formatPackageVersion :: PackageName -> Version -> String formatPackageVersion name version = PackageName.print name <> "@" <> Version.print version - --- | A Location for use within tests. -defaultLocation :: Location -defaultLocation = GitHub { owner: "purescript", repo: "registry-dev", subdir: Nothing } - --- | A Sha256 for use within tests. -defaultHash :: Sha256 -defaultHash = fromRight "Failed to parse Sha256" $ Sha256.parse "sha256-fN9RUAzN21ZY4Y0UwqUSxwUPVz1g7/pcqoDvbJZoT04=" diff --git a/types/v1/Manifest.dhall b/types/v1/Manifest.dhall index e9fe88850..2f1a6fa5b 100644 --- a/types/v1/Manifest.dhall +++ b/types/v1/Manifest.dhall @@ -13,6 +13,7 @@ let Manifest = , license : License , version : Version , location : ./Location.dhall + , ref : Text , owners : Optional (List ./Owner.dhall) , description : Optional Text , includeFiles : Optional (List Text)