diff --git a/app-e2e/src/Test/E2E/Publish.purs b/app-e2e/src/Test/E2E/Publish.purs index f7bd1d63e..051d1931b 100644 --- a/app-e2e/src/Test/E2E/Publish.purs +++ b/app-e2e/src/Test/E2E/Publish.purs @@ -56,6 +56,7 @@ spec = do , ref: "v4.0.0" , compiler: Utils.unsafeVersion "0.15.9" , resolutions: Nothing + , version: Utils.unsafeVersion "4.0.0" } -- Submit publish request @@ -79,6 +80,6 @@ spec = do Assert.fail $ "Job failed with errors:\n" <> String.joinWith "\n" errorMessages Assert.shouldSatisfy job.finishedAt isJust - Assert.shouldEqual job.jobType V1.PublishJob - Assert.shouldEqual job.packageName (Utils.unsafePackageName "effect") - Assert.shouldEqual job.ref "v4.0.0" +-- Assert.shouldEqual job.jobType JobType.PublishJob +-- Assert.shouldEqual job.packageName (Utils.unsafePackageName "effect") +-- Assert.shouldEqual job.ref "v4.0.0" diff --git a/app/fixtures/addition_issue_created.json b/app/fixtures/addition_issue_created.json index d0b205555..b0aa93e6c 100644 --- a/app/fixtures/addition_issue_created.json +++ b/app/fixtures/addition_issue_created.json @@ -5,7 +5,7 @@ "assignee": null, "assignees": [], "author_association": "CONTRIBUTOR", - "body": "{\"location\": {\"githubOwner\": \"purescript\",\"githubRepo\": \"purescript-prelude\"},\"ref\": \"v5.0.0\",\"name\": \"prelude\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }", + "body": "{\"location\": {\"githubOwner\": \"purescript\",\"githubRepo\": \"purescript-prelude\"},\"ref\": \"v5.0.0\",\"name\": \"prelude\", \"version\": \"5.0.0\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }", "closed_at": null, "comments": 0, "comments_url": "https://api.github.com/repos/purescript/registry/issues/149/comments", diff --git a/app/fixtures/update_issue_comment.json b/app/fixtures/update_issue_comment.json index 5400a7c2e..c5673c4da 100644 --- a/app/fixtures/update_issue_comment.json +++ b/app/fixtures/update_issue_comment.json @@ -2,7 +2,7 @@ "action": "created", "comment": { "author_association": "MEMBER", - "body": "```json\n{\"name\":\"something\",\"ref\":\"v1.2.3\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }```", + "body": "```json\n{\"name\":\"something\",\"ref\":\"v1.2.3\", \"version\": \"1.2.3\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }```", "created_at": "2021-03-09T02:03:56Z", "html_url": "https://github.com/purescript/registry/issues/43#issuecomment-793265839", "id": 793265839, diff --git a/app/spago.yaml b/app/spago.yaml index be3c3bec6..03a600425 100644 --- a/app/spago.yaml +++ b/app/spago.yaml @@ -1,7 +1,7 @@ package: name: registry-app run: - main: Registry.App.Server + main: Registry.App.Main publish: license: BSD-3-Clause version: 0.0.1 diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 89322d52b..1e69a129e 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -9,11 +9,10 @@ module Registry.App.API , copyPackageSourceFiles , findAllCompilers , formatPursuitResolutions - , installBuildPlan , packageSetUpdate + , packageSetUpdate2 , packagingTeam , publish - , readCompilerIndex , removeIgnoredTarballFiles ) where @@ -31,7 +30,7 @@ import Data.FoldableWithIndex (foldMapWithIndex) import Data.List.NonEmpty as NonEmptyList import Data.Map (SemigroupMap(..)) import Data.Map as Map -import Data.Newtype (over, unwrap) +import Data.Newtype (over) import Data.Number.Format as Number.Format import Data.Set as Set import Data.Set.NonEmpty as NonEmptySet @@ -83,6 +82,8 @@ import Registry.App.Legacy.Manifest (LEGACY_CACHE) import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.App.Legacy.Types (RawPackageName(..), RawVersion(..), rawPackageNameMapCodec) import Registry.App.Manifest.SpagoYaml as SpagoYaml +import Registry.App.SQLite (PackageSetJobDetails) +import Registry.App.Server.MatrixBuilder as MatrixBuilder import Registry.Constants (ignoredDirectories, ignoredFiles, ignoredGlobs, includedGlobs, includedInsensitiveGlobs) import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.FastGlob as FastGlob @@ -116,6 +117,11 @@ import Safe.Coerce as Safe.Coerce type PackageSetUpdateEffects r = (REGISTRY + PACKAGE_SETS + GITHUB + GITHUB_EVENT_ENV + COMMENT + LOG + EXCEPT String + r) +packageSetUpdate2 :: forall r. PackageSetJobDetails -> Run (PackageSetUpdateEffects + r) Unit +packageSetUpdate2 {} = do + -- TODO: have github call into this + pure unit + -- | Process a package set update. Package set updates are only processed via -- | GitHub and not the HTTP API, so they require access to the GitHub env. packageSetUpdate :: forall r. PackageSetUpdateData -> Run (PackageSetUpdateEffects + r) Unit @@ -338,7 +344,7 @@ type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + -- The legacyIndex argument contains the unverified manifests produced by the -- legacy importer; these manifests can be used on legacy packages to conform -- them to the registry rule that transitive dependencies are not allowed. -publish :: forall r. Maybe Solver.TransitivizedRegistry -> PublishData -> Run (PublishEffects + r) Unit +publish :: forall r. Maybe Solver.TransitivizedRegistry -> PublishData -> Run (PublishEffects + r) (Maybe (Map PackageName Range)) publish maybeLegacyIndex payload = do let printedName = PackageName.print payload.name @@ -556,16 +562,17 @@ publish maybeLegacyIndex payload = do , "registry using compiler versions prior to " <> Version.print Purs.minPursuitPublish , ". Please try with a later compiler." ] + pure Nothing Nothing -> do Comment.comment $ Array.fold [ "This version has already been published to the registry, but the docs have not been " , "uploaded to Pursuit. Skipping registry publishing and retrying Pursuit publishing..." ] - compilerIndex <- readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex verifiedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest receivedManifest) payload.resolutions let installedResolutions = Path.concat [ tmp, ".registry" ] - installBuildPlan verifiedResolutions installedResolutions + MatrixBuilder.installBuildPlan verifiedResolutions installedResolutions compilationResult <- Run.liftAff $ Purs.callCompiler { command: Purs.Compile { globs: [ "src/**/*.purs", Path.concat [ installedResolutions, "*/src/**/*.purs" ] ] } , version: Just payload.compiler @@ -573,7 +580,7 @@ publish maybeLegacyIndex payload = do } case compilationResult of Left compileFailure -> do - let error = printCompilerFailure payload.compiler compileFailure + let error = MatrixBuilder.printCompilerFailure payload.compiler compileFailure Log.error $ "Compilation failed, cannot upload to pursuit: " <> error Except.throw "Cannot publish to Pursuit because this package failed to compile." Right _ -> do @@ -590,12 +597,13 @@ publish maybeLegacyIndex payload = do Right _ -> do FS.Extra.remove tmp Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" + pure Nothing -- In this case the package version has not been published, so we proceed -- with ordinary publishing. Nothing -> do Log.info "Verifying the package build plan..." - compilerIndex <- readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex validatedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest receivedManifest) payload.resolutions Comment.comment "Verifying unused and/or missing dependencies..." @@ -604,7 +612,7 @@ publish maybeLegacyIndex payload = do -- manifest as needed, but we defer compilation until after this check -- in case the package manifest and resolutions are adjusted. let installedResolutions = Path.concat [ tmp, ".registry" ] - installBuildPlan validatedResolutions installedResolutions + MatrixBuilder.installBuildPlan validatedResolutions installedResolutions let srcGlobs = Path.concat [ downloadedPackage, "src", "**", "*.purs" ] let depGlobs = Path.concat [ installedResolutions, "*", "src", "**", "*.purs" ] @@ -699,7 +707,7 @@ publish maybeLegacyIndex payload = do -- We clear the installation directory so that no old installed resolutions -- stick around. Run.liftAff $ FS.Extra.remove installedResolutions - installBuildPlan resolutions installedResolutions + MatrixBuilder.installBuildPlan resolutions installedResolutions compilationResult <- Run.liftAff $ Purs.callCompiler { command: Purs.Compile { globs: [ Path.concat [ packageSource, "src/**/*.purs" ], Path.concat [ installedResolutions, "*/src/**/*.purs" ] ] } , version: Just payload.compiler @@ -708,7 +716,7 @@ publish maybeLegacyIndex payload = do case compilationResult of Left compileFailure -> do - let error = printCompilerFailure payload.compiler compileFailure + let error = MatrixBuilder.printCompilerFailure payload.compiler compileFailure Except.throw $ "Publishing failed due to a compiler error:\n\n" <> error Right _ -> pure unit @@ -770,28 +778,35 @@ publish maybeLegacyIndex payload = do , "). If you want to publish documentation, please try again with a later compiler." ] - Comment.comment "Determining all valid compiler versions for this package..." - allCompilers <- PursVersions.pursVersions - { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromFoldable $ NonEmptyArray.delete payload.compiler allCompilers of - Nothing -> pure { failed: Map.empty, succeeded: NonEmptySet.singleton payload.compiler } - Just try -> do - found <- findAllCompilers - { source: packageSource - , manifest - , compilers: try - } - pure { failed: found.failed, succeeded: NonEmptySet.cons payload.compiler found.succeeded } + -- Note: this only runs for the Legacy Importer. In daily circumstances (i.e. + -- when running the server) this will be taken care of by followup jobs invoking + -- the MatrixBuilder for each compiler version + for_ maybeLegacyIndex \_idx -> do + Comment.comment "Determining all valid compiler versions for this package..." + allCompilers <- PursVersions.pursVersions + { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromFoldable $ NonEmptyArray.delete payload.compiler allCompilers of + Nothing -> pure { failed: Map.empty, succeeded: NonEmptySet.singleton payload.compiler } + Just try -> do + found <- findAllCompilers + { source: packageSource + , manifest + , compilers: try + } + pure { failed: found.failed, succeeded: NonEmptySet.cons payload.compiler found.succeeded } + + unless (Map.isEmpty invalidCompilers) do + Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) - unless (Map.isEmpty invalidCompilers) do - Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) + Comment.comment $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) + let metadataWithCompilers = newMetadata { published = Map.update (Just <<< (_ { compilers = NonEmptySet.toUnfoldable1 validCompilers })) (un Manifest manifest).version newMetadata.published } - Comment.comment $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) - let compilersMetadata = newMetadata { published = Map.update (Just <<< (_ { compilers = NonEmptySet.toUnfoldable1 validCompilers })) (un Manifest manifest).version newMetadata.published } - Registry.writeMetadata (un Manifest manifest).name (Metadata compilersMetadata) - Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata compilersMetadata) + Registry.writeMetadata (un Manifest manifest).name (Metadata metadataWithCompilers) + Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata metadataWithCompilers) + + Comment.comment "Wrote completed metadata to the registry!" - Comment.comment "Wrote completed metadata to the registry!" FS.Extra.remove tmp + pure $ Just (un Manifest manifest).dependencies -- | Verify the build plan for the package. If the user provided a build plan, -- | we ensure that the provided versions are within the ranges listed in the @@ -876,32 +891,30 @@ findAllCompilers . { source :: FilePath, manifest :: Manifest, compilers :: NonEmptyArray Version } -> Run (REGISTRY + STORAGE + COMPILER_CACHE + LOG + AFF + EFFECT + EXCEPT String + r) FindAllCompilersResult findAllCompilers { source, manifest, compilers } = do - compilerIndex <- readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex checkedCompilers <- for compilers \target -> do Log.debug $ "Trying compiler " <> Version.print target case Solver.solveWithCompiler (Range.exact target) compilerIndex (un Manifest manifest).dependencies of Left solverErrors -> do Log.info $ "Failed to solve with compiler " <> Version.print target pure $ Left $ Tuple target (Left solverErrors) - Right (Tuple mbCompiler resolutions) -> do + Right (Tuple compiler resolutions) -> do Log.debug $ "Solved with compiler " <> Version.print target <> " and got resolutions:\n" <> printJson (Internal.Codec.packageMap Version.codec) resolutions - case mbCompiler of - Nothing -> Except.throw "Produced a compiler-derived build plan with no compiler!" - Just selected | selected /= target -> Except.throw $ Array.fold + when (compiler /= target) do + Except.throw $ Array.fold [ "Produced a compiler-derived build plan that selects a compiler (" - , Version.print selected + , Version.print compiler , ") that differs from the target compiler (" , Version.print target , ")." ] - Just _ -> pure unit Cache.get _compilerCache (Compilation manifest resolutions target) >>= case _ of Nothing -> do Log.debug $ "No cached compilation, compiling with compiler " <> Version.print target workdir <- Tmp.mkTmpDir let installed = Path.concat [ workdir, ".registry" ] FS.Extra.ensureDirectory installed - installBuildPlan resolutions installed + MatrixBuilder.installBuildPlan resolutions installed result <- Run.liftAff $ Purs.callCompiler { command: Purs.Compile { globs: [ Path.concat [ source, "src/**/*.purs" ], Path.concat [ installed, "*/src/**/*.purs" ] ] } , version: Just target @@ -910,7 +923,7 @@ findAllCompilers { source, manifest, compilers } = do FS.Extra.remove workdir case result of Left err -> do - Log.info $ "Compilation failed with compiler " <> Version.print target <> ":\n" <> printCompilerFailure target err + Log.info $ "Compilation failed with compiler " <> Version.print target <> ":\n" <> MatrixBuilder.printCompilerFailure target err Right _ -> do Log.debug $ "Compilation succeeded with compiler " <> Version.print target Cache.put _compilerCache (Compilation manifest resolutions target) { target, result: map (const unit) result } @@ -921,49 +934,6 @@ findAllCompilers { source, manifest, compilers } = do let results = partitionEithers $ NonEmptyArray.toArray checkedCompilers pure { failed: Map.fromFoldable results.fail, succeeded: Set.fromFoldable results.success } -printCompilerFailure :: Version -> CompilerFailure -> String -printCompilerFailure compiler = case _ of - MissingCompiler -> Array.fold - [ "Compilation failed because the build plan compiler version " - , Version.print compiler - , " is not supported. Please try again with a different compiler." - ] - CompilationError errs -> String.joinWith "\n" - [ "Compilation failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" - , "```" - , Purs.printCompilerErrors errs - , "```" - ] - UnknownError err -> String.joinWith "\n" - [ "Compilation failed with version " <> Version.print compiler <> " because of an error :" - , "```" - , err - , "```" - ] - --- | Install all dependencies indicated by the build plan to the specified --- | directory. Packages will be installed at 'dir/package-name-x.y.z'. -installBuildPlan :: forall r. Map PackageName Version -> FilePath -> Run (STORAGE + LOG + AFF + EXCEPT String + r) Unit -installBuildPlan resolutions dependenciesDir = do - Run.liftAff $ FS.Extra.ensureDirectory dependenciesDir - -- We fetch every dependency at its resolved version, unpack the tarball, and - -- store the resulting source code in a specified directory for dependencies. - forWithIndex_ resolutions \name version -> do - let - -- This filename uses the format the directory name will have once - -- unpacked, ie. package-name-major.minor.patch - filename = PackageName.print name <> "-" <> Version.print version <> ".tar.gz" - filepath = Path.concat [ dependenciesDir, filename ] - Storage.download name version filepath - Run.liftAff (Aff.attempt (Tar.extract { cwd: dependenciesDir, archive: filename })) >>= case _ of - Left error -> do - Log.error $ "Failed to unpack " <> filename <> ": " <> Aff.message error - Except.throw "Failed to unpack dependency tarball, cannot continue." - Right _ -> - Log.debug $ "Unpacked " <> filename - Run.liftAff $ FS.Aff.unlink filepath - Log.debug $ "Installed " <> formatPackageVersion name version - -- | Parse the name and version from a path to a module installed in the standard -- | form: '-...' parseModulePath :: FilePath -> Either String { name :: PackageName, version :: Version } @@ -1034,7 +1004,7 @@ publishToPursuit { source, compiler, resolutions, installedResolutions } = Excep publishJson <- case compilerOutput of Left error -> - Except.throw $ printCompilerFailure compiler error + Except.throw $ MatrixBuilder.printCompilerFailure compiler error Right publishResult -> do -- The output contains plenty of diagnostic lines, ie. "Compiling ..." -- but we only want the final JSON payload. @@ -1181,13 +1151,6 @@ getPacchettiBotti = do packagingTeam :: Team packagingTeam = { org: "purescript", team: "packaging" } -readCompilerIndex :: forall r. Run (REGISTRY + AFF + EXCEPT String + r) Solver.CompilerIndex -readCompilerIndex = do - metadata <- Registry.readAllMetadata - manifests <- Registry.readAllManifests - allCompilers <- PursVersions.pursVersions - pure $ Solver.buildCompilerIndex allCompilers manifests metadata - type AdjustManifest = { source :: FilePath , compiler :: Version diff --git a/app/src/App/Effect/Db.purs b/app/src/App/Effect/Db.purs index c2c6dc67c..1e90a8163 100644 --- a/app/src/App/Effect/Db.purs +++ b/app/src/App/Effect/Db.purs @@ -8,10 +8,12 @@ import Data.String as String import Registry.API.V1 (JobId, LogLevel, LogLine) import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log -import Registry.App.SQLite (JobResult, NewJob, SQLite) +import Registry.App.SQLite (FinishJob, InsertMatrixJob, InsertPackageJob, InsertPackageSetJob, JobInfo, MatrixJobDetails, PackageJobDetails, PackageSetJobDetails, SQLite, StartJob) import Registry.App.SQLite as SQLite import Run (EFFECT, Run) import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except -- We could separate these by database if it grows too large. Also, for now these -- simply lift their Effect-based equivalents in the SQLite module, but ideally @@ -21,13 +23,20 @@ import Run as Run -- Also, this does not currently include setup and teardown (those are handled -- outside the effect), but we may wish to add those in the future if they'll -- be part of app code we want to test. + data Db a - = InsertLog LogLine a + = InsertPackageJob InsertPackageJob (JobId -> a) + | InsertMatrixJob InsertMatrixJob (JobId -> a) + | InsertPackageSetJob InsertPackageSetJob (JobId -> a) + | FinishJob FinishJob a + | StartJob StartJob a + | SelectJobInfo JobId (Either String (Maybe JobInfo) -> a) + | SelectNextPackageJob (Either String (Maybe PackageJobDetails) -> a) + | SelectNextMatrixJob (Either String (Maybe MatrixJobDetails) -> a) + | SelectNextPackageSetJob (Either String (Maybe PackageSetJobDetails) -> a) + | InsertLogLine LogLine a | SelectLogsByJob JobId LogLevel (Maybe DateTime) (Array LogLine -> a) - | CreateJob NewJob a - | FinishJob JobResult a - | SelectJob JobId (Either String SQLite.Job -> a) - | RunningJobForPackage PackageName (Either String SQLite.Job -> a) + | ResetIncompleteJobs a derive instance Functor Db @@ -39,28 +48,51 @@ _db = Proxy -- | Insert a new log line into the database. insertLog :: forall r. LogLine -> Run (DB + r) Unit -insertLog log = Run.lift _db (InsertLog log unit) +insertLog log = Run.lift _db (InsertLogLine log unit) --- | Select all logs for a given job, filtered by loglevel and a time cutoff. +-- | Select all logs for a given job, filtered by loglevel. selectLogsByJob :: forall r. JobId -> LogLevel -> Maybe DateTime -> Run (DB + r) (Array LogLine) selectLogsByJob jobId logLevel since = Run.lift _db (SelectLogsByJob jobId logLevel since identity) --- | Create a new job in the database. -createJob :: forall r. NewJob -> Run (DB + r) Unit -createJob newJob = Run.lift _db (CreateJob newJob unit) - -- | Set a job in the database to the 'finished' state. -finishJob :: forall r. JobResult -> Run (DB + r) Unit -finishJob jobResult = Run.lift _db (FinishJob jobResult unit) +finishJob :: forall r. FinishJob -> Run (DB + r) Unit +finishJob job = Run.lift _db (FinishJob job unit) -- | Select a job by ID from the database. -selectJob :: forall r. JobId -> Run (DB + r) (Either String SQLite.Job) -selectJob jobId = Run.lift _db (SelectJob jobId identity) +selectJobInfo :: forall r. JobId -> Run (DB + EXCEPT String + r) (Maybe JobInfo) +selectJobInfo jobId = Run.lift _db (SelectJobInfo jobId identity) >>= Except.rethrow + +-- | Insert a new package job into the database. +insertPackageJob :: forall r. InsertPackageJob -> Run (DB + r) JobId +insertPackageJob job = Run.lift _db (InsertPackageJob job identity) + +-- | Insert a new matrix job into the database. +insertMatrixJob :: forall r. InsertMatrixJob -> Run (DB + r) JobId +insertMatrixJob job = Run.lift _db (InsertMatrixJob job identity) + +-- | Insert a new package set job into the database. +insertPackageSetJob :: forall r. InsertPackageSetJob -> Run (DB + r) JobId +insertPackageSetJob job = Run.lift _db (InsertPackageSetJob job identity) --- | Select a job by package name from the database, failing if there is no --- | current job available for that package name. -runningJobForPackage :: forall r. PackageName -> Run (DB + r) (Either String SQLite.Job) -runningJobForPackage name = Run.lift _db (RunningJobForPackage name identity) +-- | Start a job in the database. +startJob :: forall r. StartJob -> Run (DB + r) Unit +startJob job = Run.lift _db (StartJob job unit) + +-- | Select the next package job from the database. +selectNextPackageJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PackageJobDetails) +selectNextPackageJob = Run.lift _db (SelectNextPackageJob identity) >>= Except.rethrow + +-- | Select the next matrix job from the database. +selectNextMatrixJob :: forall r. Run (DB + EXCEPT String + r) (Maybe MatrixJobDetails) +selectNextMatrixJob = Run.lift _db (SelectNextMatrixJob identity) >>= Except.rethrow + +-- | Select the next package set job from the database. +selectNextPackageSetJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PackageSetJobDetails) +selectNextPackageSetJob = Run.lift _db (SelectNextPackageSetJob identity) >>= Except.rethrow + +-- | Delete all incomplete jobs from the database. +resetIncompleteJobs :: forall r. Run (DB + r) Unit +resetIncompleteJobs = Run.lift _db (ResetIncompleteJobs unit) interpret :: forall r a. (Db ~> Run r) -> Run (DB + r) a -> Run r a interpret handler = Run.interpret (Run.on _db handler Run.send) @@ -70,28 +102,52 @@ type SQLiteEnv = { db :: SQLite } -- | Interpret DB by interacting with the SQLite database on disk. handleSQLite :: forall r a. SQLiteEnv -> Db a -> Run (LOG + EFFECT + r) a handleSQLite env = case _ of - InsertLog log next -> do - Run.liftEffect $ SQLite.insertLog env.db log - pure next + InsertPackageJob job reply -> do + result <- Run.liftEffect $ SQLite.insertPackageJob env.db job + pure $ reply result - SelectLogsByJob jobId logLevel since reply -> do - logs <- Run.liftEffect $ SQLite.selectLogsByJob env.db jobId logLevel since - unless (Array.null logs.fail) do - Log.warn $ "Some logs are not readable: " <> String.joinWith "\n" logs.fail - pure $ reply logs.success + InsertMatrixJob job reply -> do + result <- Run.liftEffect $ SQLite.insertMatrixJob env.db job + pure $ reply result + + InsertPackageSetJob job reply -> do + result <- Run.liftEffect $ SQLite.insertPackageSetJob env.db job + pure $ reply result - CreateJob newJob next -> do - Run.liftEffect $ SQLite.createJob env.db newJob + FinishJob job next -> do + Run.liftEffect $ SQLite.finishJob env.db job pure next - FinishJob jobResult next -> do - Run.liftEffect $ SQLite.finishJob env.db jobResult + StartJob job next -> do + Run.liftEffect $ SQLite.startJob env.db job + pure next + + SelectJobInfo jobId reply -> do + result <- Run.liftEffect $ SQLite.selectJobInfo env.db jobId + pure $ reply result + + SelectNextPackageJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextPackageJob env.db + pure $ reply result + + SelectNextMatrixJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextMatrixJob env.db + pure $ reply result + + SelectNextPackageSetJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextPackageSetJob env.db + pure $ reply result + + InsertLogLine log next -> do + Run.liftEffect $ SQLite.insertLogLine env.db log pure next - SelectJob jobId reply -> do - job <- Run.liftEffect $ SQLite.selectJob env.db jobId - pure $ reply job + SelectLogsByJob jobId logLevel since reply -> do + { fail, success } <- Run.liftEffect $ SQLite.selectLogsByJob env.db jobId logLevel since + unless (Array.null fail) do + Log.warn $ "Some logs are not readable: " <> String.joinWith "\n" fail + pure $ reply success - RunningJobForPackage name reply -> do - job <- Run.liftEffect $ SQLite.runningJobForPackage env.db name - pure $ reply job + ResetIncompleteJobs next -> do + Run.liftEffect $ SQLite.resetIncompleteJobs env.db + pure next diff --git a/app/src/App/Effect/Log.purs b/app/src/App/Effect/Log.purs index 6fc4b31b6..a1cb72c0a 100644 --- a/app/src/App/Effect/Log.purs +++ b/app/src/App/Effect/Log.purs @@ -134,5 +134,5 @@ handleDb env = case _ of let msg = Dodo.print Dodo.plainText Dodo.twoSpaces (toLog message) row = { timestamp, level, jobId: env.job, message: msg } - Run.liftEffect $ SQLite.insertLog env.db row + Run.liftEffect $ SQLite.insertLogLine env.db row pure next diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index 56422ab64..3764398cf 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -58,7 +58,7 @@ main = launchAff_ $ do Right packageOperation -> case packageOperation of Publish payload -> - API.publish Nothing payload + void $ API.publish Nothing payload Authenticated payload -> do -- If we receive an authenticated operation via GitHub, then we -- re-sign it with pacchettibotti credentials if and only if the diff --git a/app/src/App/Main.purs b/app/src/App/Main.purs new file mode 100644 index 000000000..e638cc684 --- /dev/null +++ b/app/src/App/Main.purs @@ -0,0 +1,90 @@ +module Registry.App.Main where + +import Registry.App.Prelude hiding ((/)) + +import Data.DateTime (diff) +import Data.Time.Duration (Milliseconds(..), Seconds(..)) +import Effect.Aff as Aff +import Effect.Class.Console as Console +import Fetch.Retry as Fetch.Retry +import Node.Process as Process +import Registry.App.Server.Env (ServerEnv, createServerEnv) +import Registry.App.Server.JobExecutor as JobExecutor +import Registry.App.Server.Router as Router + +main :: Effect Unit +main = do + createServerEnv # Aff.runAff_ case _ of + Left error -> do + Console.log $ "Failed to start server: " <> Aff.message error + Process.exit' 1 + Right env -> do + case env.vars.resourceEnv.healthchecksUrl of + Nothing -> Console.log "HEALTHCHECKS_URL not set, healthcheck pinging disabled" + Just healthchecksUrl -> Aff.launchAff_ $ healthcheck healthchecksUrl + Aff.launchAff_ $ jobExecutor env + Router.runRouter env + where + healthcheck :: String -> Aff Unit + healthcheck healthchecksUrl = loop limit + where + limit = 10 + oneMinute = Aff.Milliseconds (1000.0 * 60.0) + fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) + + loop n = do + Fetch.Retry.withRetryRequest healthchecksUrl {} >>= case _ of + Succeeded { status } | status == 200 -> do + Aff.delay fiveMinutes + loop n + + Cancelled | n >= 0 -> do + Console.warn $ "Healthchecks cancelled, will retry..." + Aff.delay oneMinute + loop (n - 1) + + Failed error | n >= 0 -> do + Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error + Aff.delay oneMinute + loop (n - 1) + + Succeeded { status } | status /= 200, n >= 0 -> do + Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status + Aff.delay oneMinute + loop (n - 1) + + Cancelled -> do + Console.error + "Healthchecks cancelled and failure limit reached, will not retry." + + Failed error -> do + Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error + + Succeeded _ -> do + Console.error "Healthchecks returned non-200 status and failure limit reached, will not retry." + + jobExecutor :: ServerEnv -> Aff Unit + jobExecutor env = do + loop initialRestartDelay + where + initialRestartDelay = Milliseconds 100.0 + + loop restartDelay = do + start <- nowUTC + result <- JobExecutor.runJobExecutor env + end <- nowUTC + + Console.error case result of + Left error -> "Job executor failed: " <> Aff.message error + Right _ -> "Job executor exited for no reason." + + -- This is a heuristic: if the executor keeps crashing immediately, we + -- restart with an exponentially increasing delay, but once the executor + -- had a run longer than a minute, we start over with a small delay. + let + nextRestartDelay + | end `diff` start > Seconds 60.0 = initialRestartDelay + | otherwise = restartDelay <> restartDelay + + Aff.delay nextRestartDelay + loop nextRestartDelay diff --git a/app/src/App/Prelude.purs b/app/src/App/Prelude.purs index 7a046414d..5e586ebae 100644 --- a/app/src/App/Prelude.purs +++ b/app/src/App/Prelude.purs @@ -60,7 +60,7 @@ import Data.List (List) as Extra import Data.Map (Map) as Extra import Data.Map as Map import Data.Maybe (Maybe(..), fromJust, fromMaybe, isJust, isNothing, maybe) as Maybe -import Data.Newtype (class Newtype, un) as Extra +import Data.Newtype (class Newtype, un, unwrap, wrap) as Extra import Data.Newtype as Newtype import Data.Nullable (Nullable, toMaybe, toNullable) as Extra import Data.Set (Set) as Extra diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index 8158695fc..9fbbeeec9 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -1,5 +1,11 @@ import Database from "better-sqlite3"; +const JOB_INFO_TABLE = 'job_info' +const LOGS_TABLE = 'logs' +const PACKAGE_JOBS_TABLE = 'package_jobs'; +const MATRIX_JOBS_TABLE = 'matrix_jobs'; +const PACKAGE_SET_JOBS_TABLE = 'package_set_jobs'; + export const connectImpl = (path, logger) => { logger("Connecting to database at " + path); let db = new Database(path, { @@ -11,49 +17,162 @@ export const connectImpl = (path, logger) => { return db; }; -export const insertLogImpl = (db, logLine) => { - db.prepare( - "INSERT INTO logs (jobId, level, message, timestamp) VALUES (@jobId, @level, @message, @timestamp)" - ).run(logLine); +export const selectJobInfoImpl = (db, jobId) => { + const stmt = db.prepare(` + SELECT * FROM ${JOB_INFO_TABLE} + WHERE jobId = ? LIMIT 1 + `); + return stmt.get(jobId); +} + +// A generic helper function for inserting a new package, matrix, or package set +// job Not exported because this should always be done as part of a more general +// job insertion. A job is expected to always include a 'jobId' and 'createdAt' +// field, though other fields will be required depending on the job. +const _insertJob = (db, table, columns, job) => { + const requiredFields = Array.from(new Set(['jobId', 'createdAt', ...columns])); + const missingFields = requiredFields.filter(field => !(field in job)); + const extraFields = Object.keys(job).filter(field => !requiredFields.includes(field)); + + if (missingFields.length > 0) { + throw new Error(`Missing required fields for insertion: ${missingFields.join(', ')}`); + } + + if (extraFields.length > 0) { + throw new Error(`Unexpected extra fields for insertion: ${extraFields.join(', ')}`); + } + + const insertInfo = db.prepare(` + INSERT INTO ${JOB_INFO_TABLE} (jobId, createdAt, startedAt, finishedAt, success) + VALUES (@jobId, @createdAt, @startedAt, @finishedAt, @success) + `); + + const insertJob = db.prepare(` + INSERT INTO ${table} (${columns.join(', ')}) + VALUES (${columns.map(col => `@${col}`).join(', ')}) + `); + + const insert = db.transaction((job) => { + insertInfo.run({ + jobId: job.jobId, + createdAt: job.createdAt, + startedAt: null, + finishedAt: null, + success: 0 + }); + insertJob.run(job); + }); + + return insert(job); +}; + +export const insertPackageJobImpl = (db, job) => { + const columns = ['jobId', 'jobType', 'packageName', 'payload'] + return _insertJob(db, PACKAGE_JOBS_TABLE, columns, job); }; -export const selectLogsByJobImpl = (db, jobId, logLevel) => { - const row = db - .prepare( - "SELECT * FROM logs WHERE jobId = ? AND level >= ? ORDER BY timestamp ASC" - ) - .all(jobId, logLevel); - return row; +export const insertMatrixJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'packageVersion', 'compilerVersion', 'payload'] + return _insertJob(db, MATRIX_JOBS_TABLE, columns, job); }; -export const createJobImpl = (db, job) => { - db.prepare( - "INSERT INTO jobs (jobId, jobType, createdAt, packageName, ref) VALUES (@jobId, @jobType, @createdAt, @packageName, @ref)" - ).run(job); +export const insertPackageSetJobImpl = (db, job) => { + const columns = ['jobId', 'payload'] + return _insertJob(db, PACKAGE_SET_JOBS_TABLE, columns, job); }; -export const finishJobImpl = (db, result) => { - db.prepare( - "UPDATE jobs SET success = @success, finishedAt = @finishedAt WHERE jobId = @jobId" - ).run(result); +export const selectNextPackageJobImpl = (db) => { + const stmt = db.prepare(` + SELECT job.*, info.createdAt, info.startedAt + FROM ${PACKAGE_JOBS_TABLE} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE info.finishedAt IS NULL + AND info.startedAt IS NULL + ORDER BY info.createdAt DESC + LIMIT 1 + `); + return stmt.get(); }; -export const selectJobImpl = (db, jobId) => { - const row = db - .prepare("SELECT * FROM jobs WHERE jobId = ? LIMIT 1") - .get(jobId); - return row; +export const selectNextMatrixJobImpl = (db) => { + const stmt = db.prepare(` + SELECT job.*, info.createdAt, info.startedAt + FROM ${MATRIX_JOBS_TABLE} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE info.finishedAt IS NULL + AND info.startedAt IS NULL + ORDER BY info.createdAt DESC + LIMIT 1 + `); + return stmt.get(); }; -export const runningJobForPackageImpl = (db, packageName) => { - const row = db - .prepare( - "SELECT * FROM jobs WHERE finishedAt IS NULL AND packageName = ? ORDER BY createdAt ASC LIMIT 1" - ) - .get(packageName); - return row; +export const selectNextPackageSetJobImpl = (db) => { + const stmt = db.prepare(` + SELECT job.*, info.createdAt, info.startedAt + FROM ${PACKAGE_SET_JOBS_TABLE} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE info.finishedAt IS NULL + AND info.startedAt IS NULL + ORDER BY info.createdAt DESC + LIMIT 1 + `); + return stmt.get(); }; -export const deleteIncompleteJobsImpl = (db) => { - db.prepare("DELETE FROM jobs WHERE finishedAt IS NULL").run(); +export const startJobImpl = (db, args) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET startedAt = @startedAt + WHERE jobId = @jobId + `); + return stmt.run(args); +} + +export const finishJobImpl = (db, args) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET success = @success, finishedAt = @finishedAt + WHERE jobId = @jobId + `); + return stmt.run(args); +} + +// TODO I think we should keep track of this somehow. So either we save +// how many times this is being retried and give up at some point, notifying +// the trustees, or we notify right away for any retry so we can look at them +export const resetIncompleteJobsImpl = (db) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET startedAt = NULL + WHERE finishedAt IS NULL + AND startedAt IS NOT NULL`); + return stmt.run(); +}; + +export const insertLogLineImpl = (db, logLine) => { + const stmt = db.prepare(` + INSERT INTO ${LOGS_TABLE} (jobId, level, message, timestamp) + VALUES (@jobId, @level, @message, @timestamp) + `); + return stmt.run(logLine); +}; + +export const selectLogsByJobImpl = (db, jobId, logLevel, since) => { + let query = ` + SELECT * FROM ${LOGS_TABLE} + WHERE jobId = ? AND level >= ? + `; + + const params = [jobId, logLevel]; + + if (since !== null) { + query += ' AND timestamp >= ?'; + params.push(since); + } + + query += ' ORDER BY timestamp ASC'; + + const stmt = db.prepare(query); + return stmt.all(...params); }; diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index b3683e84e..09f91f612 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -1,184 +1,434 @@ +-- | Bindings for the specific SQL queries we emit to the SQLite database. Use the +-- | Registry.App.Effect.Db module in production code instead of this module; +-- | the bindings here are still quite low-level and simply exist to provide a +-- | nicer interface with PureScript types for higher-level modules to use. + module Registry.App.SQLite - ( Job - , JobLogs - , JobResult - , NewJob - , SQLite + ( SQLite + , ConnectOptions , connect - , createJob - , deleteIncompleteJobs + , JobInfo + , selectJobInfo + , InsertPackageJob + , insertPackageJob + , InsertMatrixJob + , insertMatrixJob + , InsertPackageSetJob + , insertPackageSetJob + , FinishJob , finishJob - , insertLog - , runningJobForPackage - , selectJob + , StartJob + , startJob + , resetIncompleteJobs + , insertLogLine , selectLogsByJob + , PackageJobDetails + , selectNextPackageJob + , MatrixJobDetails + , selectNextMatrixJob + , PackageSetJobDetails + , selectNextPackageSetJob ) where import Registry.App.Prelude -import Data.Array as Array +import Codec.JSON.DecodeError as JSON.DecodeError import Data.DateTime (DateTime) import Data.Formatter.DateTime as DateTime -import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn3) +import Data.Nullable as Nullable +import Data.UUID.Random as UUID +import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn4) import Effect.Uncurried as Uncurried -import Registry.API.V1 (JobId(..), JobType, LogLevel, LogLine) +import Registry.API.V1 (JobId(..), LogLevel, LogLine) import Registry.API.V1 as API.V1 +import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format +import Registry.JobType as JobType +import Registry.Operation (PackageOperation, PackageSetOperation) +import Registry.Operation as Operation import Registry.PackageName as PackageName +import Registry.Version as Version +-- | An active database connection acquired with `connect` data SQLite foreign import connectImpl :: EffectFn2 FilePath (EffectFn1 String Unit) SQLite -foreign import insertLogImpl :: EffectFn2 SQLite JSLogLine Unit +type ConnectOptions = + { database :: FilePath + , logger :: String -> Effect Unit + } -foreign import selectLogsByJobImpl :: EffectFn3 SQLite String Int (Array JSLogLine) +-- Connect to the indicated SQLite database +connect :: ConnectOptions -> Effect SQLite +connect { database, logger } = Uncurried.runEffectFn2 connectImpl database (Uncurried.mkEffectFn1 logger) -foreign import createJobImpl :: EffectFn2 SQLite JSNewJob Unit +-------------------------------------------------------------------------------- +-- job_info table -foreign import finishJobImpl :: EffectFn2 SQLite JSJobResult Unit +-- | Metadata about a particular package, package set, or matrix job. +type JobInfo = + { jobId :: JobId + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean + } -foreign import selectJobImpl :: EffectFn2 SQLite String (Nullable JSJob) +type JSJobInfo = + { jobId :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int + } -foreign import runningJobForPackageImpl :: EffectFn2 SQLite String (Nullable JSJob) +jobInfoFromJSRep :: JSJobInfo -> Either String JobInfo +jobInfoFromJSRep { jobId, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + isSuccess <- case success of + 0 -> Right false + 1 -> Right true + _ -> Left $ "Invalid success value " <> show success + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: isSuccess + } + +foreign import selectJobInfoImpl :: EffectFn2 SQLite String (Nullable JSJobInfo) + +selectJobInfo :: SQLite -> JobId -> Effect (Either String (Maybe JobInfo)) +selectJobInfo db (JobId jobId) = do + maybeJobInfo <- map toMaybe $ Uncurried.runEffectFn2 selectJobInfoImpl db jobId + pure $ traverse jobInfoFromJSRep maybeJobInfo + +finishJob :: SQLite -> FinishJob -> Effect Unit +finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< finishJobToJSRep + +type StartJob = + { jobId :: JobId + , startedAt :: DateTime + } -foreign import deleteIncompleteJobsImpl :: EffectFn1 SQLite Unit +type JSStartJob = + { jobId :: String + , startedAt :: String + } -type ConnectOptions = - { database :: FilePath - , logger :: String -> Effect Unit +startJobToJSRep :: StartJob -> JSStartJob +startJobToJSRep { jobId, startedAt } = + { jobId: un JobId jobId + , startedAt: DateTime.format Internal.Format.iso8601DateTime startedAt } -connect :: ConnectOptions -> Effect SQLite -connect { database, logger } = Uncurried.runEffectFn2 connectImpl database (Uncurried.mkEffectFn1 logger) +foreign import startJobImpl :: EffectFn2 SQLite JSStartJob Unit -type JSLogLine = - { level :: Int - , message :: String - , timestamp :: String - , jobId :: String +startJob :: SQLite -> StartJob -> Effect Unit +startJob db = Uncurried.runEffectFn2 startJobImpl db <<< startJobToJSRep + +type FinishJob = + { jobId :: JobId + , success :: Boolean + , finishedAt :: DateTime } -jsLogLineToLogLine :: JSLogLine -> Either String LogLine -jsLogLineToLogLine { level: rawLevel, message, timestamp: rawTimestamp, jobId } = case API.V1.logLevelFromPriority rawLevel, DateTime.unformat Internal.Format.iso8601DateTime rawTimestamp of - Left err, _ -> Left err - _, Left err -> Left $ "Invalid timestamp " <> show rawTimestamp <> ": " <> err - Right level, Right timestamp -> Right { level, message, jobId: JobId jobId, timestamp } +type JSFinishJob = + { jobId :: String + , success :: Int + , finishedAt :: String + } -logLineToJSLogLine :: LogLine -> JSLogLine -logLineToJSLogLine { level, message, timestamp, jobId: JobId jobId } = - { level: API.V1.logLevelToPriority level - , message - , timestamp: DateTime.format Internal.Format.iso8601DateTime timestamp - , jobId +finishJobToJSRep :: FinishJob -> JSFinishJob +finishJobToJSRep { jobId, success, finishedAt } = + { jobId: un JobId jobId + , success: if success then 1 else 0 + , finishedAt: DateTime.format Internal.Format.iso8601DateTime finishedAt } -insertLog :: SQLite -> LogLine -> Effect Unit -insertLog db = Uncurried.runEffectFn2 insertLogImpl db <<< logLineToJSLogLine +foreign import finishJobImpl :: EffectFn2 SQLite JSFinishJob Unit + +foreign import resetIncompleteJobsImpl :: EffectFn1 SQLite Unit -type JobLogs = { fail :: Array String, success :: Array LogLine } +resetIncompleteJobs :: SQLite -> Effect Unit +resetIncompleteJobs = Uncurried.runEffectFn1 resetIncompleteJobsImpl -selectLogsByJob :: SQLite -> JobId -> LogLevel -> Maybe DateTime -> Effect JobLogs -selectLogsByJob db (JobId jobId) level maybeDatetime = do - logs <- Uncurried.runEffectFn3 selectLogsByJobImpl db jobId (API.V1.logLevelToPriority level) - let { success, fail } = partitionEithers $ map jsLogLineToLogLine logs - pure { fail, success: Array.filter (\{ timestamp } -> timestamp > (fromMaybe bottom maybeDatetime)) success } +newJobId :: forall m. MonadEffect m => m JobId +newJobId = do + id <- UUID.make + pure $ JobId $ UUID.toString id -type NewJob = +-------------------------------------------------------------------------------- +-- package_jobs table + +type PackageJobDetails = { jobId :: JobId - , jobType :: JobType - , createdAt :: DateTime , packageName :: PackageName - , ref :: String + , payload :: PackageOperation + , createdAt :: DateTime + , startedAt :: Maybe DateTime } -type JSNewJob = +type JSPackageJobDetails = { jobId :: String - , jobType :: String - , createdAt :: String , packageName :: String - , ref :: String + , payload :: String + , createdAt :: String + , startedAt :: Nullable String } -newJobToJSNewJob :: NewJob -> JSNewJob -newJobToJSNewJob { jobId: JobId jobId, jobType, createdAt, packageName, ref } = - { jobId - , jobType: API.V1.printJobType jobType - , createdAt: DateTime.format Internal.Format.iso8601DateTime createdAt - , packageName: PackageName.print packageName - , ref +packageJobDetailsFromJSRep :: JSPackageJobDetails -> Either String PackageJobDetails +packageJobDetailsFromJSRep { jobId, packageName, payload, createdAt, startedAt } = do + name <- PackageName.parse packageName + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageOperationCodec payload + pure + { jobId: JobId jobId + , packageName: name + , payload: parsed + , createdAt: created + , startedAt: started + } + +foreign import selectNextPackageJobImpl :: EffectFn1 SQLite (Nullable JSPackageJobDetails) + +selectNextPackageJob :: SQLite -> Effect (Either String (Maybe PackageJobDetails)) +selectNextPackageJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextPackageJobImpl db + pure $ traverse packageJobDetailsFromJSRep maybeJobDetails + +type InsertPackageJob = + { payload :: PackageOperation } -type JobResult = - { jobId :: JobId - , finishedAt :: DateTime - , success :: Boolean +type JSInsertPackageJob = + { jobId :: String + , jobType :: String + , packageName :: String + , payload :: String + , createdAt :: String + } + +insertPackageJobToJSRep :: JobId -> DateTime -> InsertPackageJob -> JSInsertPackageJob +insertPackageJobToJSRep jobId now { payload } = + { jobId: un JobId jobId + , jobType: JobType.print jobType + , packageName: PackageName.print name + , payload: stringifyJson Operation.packageOperationCodec payload + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + } + where + { jobType, name } = case payload of + Operation.Publish { name } -> { jobType: JobType.PublishJob, name } + Operation.Authenticated { payload: Operation.Unpublish { name } } -> { jobType: JobType.UnpublishJob, name } + Operation.Authenticated { payload: Operation.Transfer { name } } -> { jobType: JobType.TransferJob, name } + +foreign import insertPackageJobImpl :: EffectFn2 SQLite JSInsertPackageJob Unit + +-- | Insert a new package job, ie. a publish, unpublish, or transfer. +insertPackageJob :: SQLite -> InsertPackageJob -> Effect JobId +insertPackageJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertPackageJobImpl db $ insertPackageJobToJSRep jobId now job + pure jobId + +-------------------------------------------------------------------------------- +-- matrix_jobs table + +type InsertMatrixJob = + { packageName :: PackageName + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version } -type JSJobResult = +type JSInsertMatrixJob = { jobId :: String - , finishedAt :: String - , success :: Int + , createdAt :: String + , packageName :: String + , packageVersion :: String + , compilerVersion :: String + , payload :: String } -jobResultToJSJobResult :: JobResult -> JSJobResult -jobResultToJSJobResult { jobId: JobId jobId, finishedAt, success } = - { jobId - , finishedAt: DateTime.format Internal.Format.iso8601DateTime finishedAt - , success: if success then 1 else 0 +insertMatrixJobToJSRep :: JobId -> DateTime -> InsertMatrixJob -> JSInsertMatrixJob +insertMatrixJobToJSRep jobId now { packageName, packageVersion, compilerVersion, payload } = + { jobId: un JobId jobId + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + , packageName: PackageName.print packageName + , packageVersion: Version.print packageVersion + , compilerVersion: Version.print compilerVersion + , payload: stringifyJson (Internal.Codec.packageMap Version.codec) payload } -type Job = +foreign import insertMatrixJobImpl :: EffectFn2 SQLite JSInsertMatrixJob Unit + +insertMatrixJob :: SQLite -> InsertMatrixJob -> Effect JobId +insertMatrixJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertMatrixJobImpl db $ insertMatrixJobToJSRep jobId now job + pure jobId + +type MatrixJobDetails = { jobId :: JobId - , jobType :: JobType , packageName :: PackageName - , ref :: String + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version , createdAt :: DateTime - , finishedAt :: Maybe DateTime - , success :: Boolean + , startedAt :: Maybe DateTime } -type JSJob = +type JSMatrixJobDetails = { jobId :: String - , jobType :: String , packageName :: String - , ref :: String + , packageVersion :: String + , compilerVersion :: String + , payload :: String , createdAt :: String - , finishedAt :: Nullable String - , success :: Int + , startedAt :: Nullable String } -jsJobToJob :: JSJob -> Either String Job -jsJobToJob raw = do - let jobId = JobId raw.jobId - jobType <- API.V1.parseJobType raw.jobType - packageName <- PackageName.parse raw.packageName - createdAt <- DateTime.unformat Internal.Format.iso8601DateTime raw.createdAt - finishedAt <- case toMaybe raw.finishedAt of - Nothing -> pure Nothing - Just rawFinishedAt -> Just <$> DateTime.unformat Internal.Format.iso8601DateTime rawFinishedAt - success <- case raw.success of - 0 -> Right false - 1 -> Right true - _ -> Left $ "Invalid success value " <> show raw.success - pure $ { jobId, jobType, createdAt, finishedAt, success, packageName, ref: raw.ref } +matrixJobDetailsFromJSRep :: JSMatrixJobDetails -> Either String MatrixJobDetails +matrixJobDetailsFromJSRep { jobId, packageName, packageVersion, compilerVersion, payload, createdAt, startedAt } = do + name <- PackageName.parse packageName + version <- Version.parse packageVersion + compiler <- Version.parse compilerVersion + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + parsed <- lmap JSON.DecodeError.print $ parseJson (Internal.Codec.packageMap Version.codec) payload + pure + { jobId: JobId jobId + , packageName: name + , packageVersion: version + , compilerVersion: compiler + , payload: parsed + , createdAt: created + , startedAt: started + } + +foreign import selectNextMatrixJobImpl :: EffectFn1 SQLite (Nullable JSMatrixJobDetails) + +selectNextMatrixJob :: SQLite -> Effect (Either String (Maybe MatrixJobDetails)) +selectNextMatrixJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextMatrixJobImpl db + pure $ traverse matrixJobDetailsFromJSRep maybeJobDetails + +-------------------------------------------------------------------------------- +-- package_set_jobs table + +type PackageSetJobDetails = + { jobId :: JobId + , payload :: PackageSetOperation + , createdAt :: DateTime + , startedAt :: Maybe DateTime + } + +type JSPackageSetJobDetails = + { jobId :: String + , payload :: String + , createdAt :: String + , startedAt :: Nullable String + } + +packageSetJobDetailsFromJSRep :: JSPackageSetJobDetails -> Either String PackageSetJobDetails +packageSetJobDetailsFromJSRep { jobId, payload, createdAt, startedAt } = do + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageSetOperationCodec payload + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + pure + { jobId: JobId jobId + , payload: parsed + , createdAt: created + , startedAt: started + } + +foreign import selectNextPackageSetJobImpl :: EffectFn1 SQLite (Nullable JSPackageSetJobDetails) + +selectNextPackageSetJob :: SQLite -> Effect (Either String (Maybe PackageSetJobDetails)) +selectNextPackageSetJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextPackageSetJobImpl db + pure $ traverse packageSetJobDetailsFromJSRep maybeJobDetails + +type InsertPackageSetJob = + { payload :: PackageSetOperation + } + +type JSInsertPackageSetJob = + { jobId :: String + , createdAt :: String + , payload :: String + } -createJob :: SQLite -> NewJob -> Effect Unit -createJob db = Uncurried.runEffectFn2 createJobImpl db <<< newJobToJSNewJob +insertPackageSetJobToJSRep :: JobId -> DateTime -> InsertPackageSetJob -> JSInsertPackageSetJob +insertPackageSetJobToJSRep jobId now { payload } = + { jobId: un JobId jobId + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + , payload: stringifyJson Operation.packageSetOperationCodec payload + } + +foreign import insertPackageSetJobImpl :: EffectFn2 SQLite JSInsertPackageSetJob Unit -finishJob :: SQLite -> JobResult -> Effect Unit -finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< jobResultToJSJobResult +insertPackageSetJob :: SQLite -> InsertPackageSetJob -> Effect JobId +insertPackageSetJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertPackageSetJobImpl db $ insertPackageSetJobToJSRep jobId now job + pure jobId -selectJob :: SQLite -> JobId -> Effect (Either String Job) -selectJob db (JobId jobId) = do - maybeJob <- toMaybe <$> Uncurried.runEffectFn2 selectJobImpl db jobId - pure $ jsJobToJob =<< note ("Couldn't find job with id " <> jobId) maybeJob +-------------------------------------------------------------------------------- +-- logs table -runningJobForPackage :: SQLite -> PackageName -> Effect (Either String Job) -runningJobForPackage db packageName = do - let pkgStr = PackageName.print packageName - maybeJSJob <- toMaybe <$> Uncurried.runEffectFn2 runningJobForPackageImpl db pkgStr - pure $ jsJobToJob =<< note ("Couldn't find running job for package " <> pkgStr) maybeJSJob +type JSLogLine = + { level :: Int + , message :: String + , jobId :: String + , timestamp :: String + } + +logLineToJSRep :: LogLine -> JSLogLine +logLineToJSRep { level, message, jobId, timestamp } = + { level: API.V1.logLevelToPriority level + , message + , jobId: un JobId jobId + , timestamp: DateTime.format Internal.Format.iso8601DateTime timestamp + } -deleteIncompleteJobs :: SQLite -> Effect Unit -deleteIncompleteJobs = Uncurried.runEffectFn1 deleteIncompleteJobsImpl +logLineFromJSRep :: JSLogLine -> Either String LogLine +logLineFromJSRep { level, message, jobId, timestamp } = do + logLevel <- API.V1.logLevelFromPriority level + time <- DateTime.unformat Internal.Format.iso8601DateTime timestamp + pure + { level: logLevel + , message + , jobId: JobId jobId + , timestamp: time + } + +foreign import insertLogLineImpl :: EffectFn2 SQLite JSLogLine Unit + +insertLogLine :: SQLite -> LogLine -> Effect Unit +insertLogLine db = Uncurried.runEffectFn2 insertLogLineImpl db <<< logLineToJSRep + +foreign import selectLogsByJobImpl :: EffectFn4 SQLite String Int (Nullable String) (Array JSLogLine) + +-- | Select all logs for a given job at or above the indicated log level. To get all +-- | logs, pass the DEBUG log level. +selectLogsByJob :: SQLite -> JobId -> LogLevel -> Maybe DateTime -> Effect { fail :: Array String, success :: Array LogLine } +selectLogsByJob db jobId level since = do + let timestamp = map (DateTime.format Internal.Format.iso8601DateTime) since + jsLogLines <- + Uncurried.runEffectFn4 + selectLogsByJobImpl + db + (un JobId jobId) + (API.V1.logLevelToPriority level) + (Nullable.toNullable timestamp) + pure $ partitionEithers $ map logLineFromJSRep jsLogLines diff --git a/app/src/App/Server.purs b/app/src/App/Server.purs deleted file mode 100644 index b9aa35b1c..000000000 --- a/app/src/App/Server.purs +++ /dev/null @@ -1,343 +0,0 @@ -module Registry.App.Server where - -import Registry.App.Prelude hiding ((/)) - -import Control.Monad.Cont (ContT) -import Data.Codec.JSON as CJ -import Data.Formatter.DateTime as Formatter.DateTime -import Data.Newtype (unwrap) -import Data.String as String -import Data.UUID.Random as UUID -import Effect.Aff as Aff -import Effect.Class.Console as Console -import Fetch.Retry as Fetch.Retry -import HTTPurple (JsonDecoder(..), JsonEncoder(..), Method(..), Request, Response) -import HTTPurple as HTTPurple -import HTTPurple.Status as Status -import Node.Path as Path -import Node.Process as Process -import Record as Record -import Registry.API.V1 (JobId(..), JobType(..), LogLevel(..), Route(..)) -import Registry.API.V1 as V1 -import Registry.App.API (COMPILER_CACHE, _compilerCache) -import Registry.App.API as API -import Registry.App.CLI.Git as Git -import Registry.App.Effect.Cache (CacheRef) -import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment -import Registry.App.Effect.Db (DB) -import Registry.App.Effect.Db as Db -import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV, ResourceEnv, serverPort) -import Registry.App.Effect.Env as Env -import Registry.App.Effect.GitHub (GITHUB) -import Registry.App.Effect.GitHub as GitHub -import Registry.App.Effect.Log (LOG) -import Registry.App.Effect.Log as Log -import Registry.App.Effect.Pursuit (PURSUIT) -import Registry.App.Effect.Pursuit as Pursuit -import Registry.App.Effect.Registry (REGISTRY) -import Registry.App.Effect.Registry as Registry -import Registry.App.Effect.Source (SOURCE) -import Registry.App.Effect.Source as Source -import Registry.App.Effect.Storage (STORAGE) -import Registry.App.Effect.Storage as Storage -import Registry.App.Legacy.Manifest (LEGACY_CACHE, _legacyCache) -import Registry.App.SQLite (SQLite) -import Registry.App.SQLite as SQLite -import Registry.Foreign.FSExtra as FS.Extra -import Registry.Foreign.Octokit (GitHubToken, Octokit) -import Registry.Foreign.Octokit as Octokit -import Registry.Internal.Format as Internal.Format -import Registry.Operation as Operation -import Registry.PackageName as PackageName -import Registry.Version as Version -import Run (AFF, EFFECT, Run) -import Run as Run -import Run.Except (EXCEPT) -import Run.Except as Except - -newJobId :: forall m. MonadEffect m => m JobId -newJobId = liftEffect do - id <- UUID.make - pure $ JobId $ UUID.toString id - -router :: ServerEnv -> Request Route -> Run ServerEffects Response -router env { route, method, body } = HTTPurple.usingCont case route, method of - Publish, Post -> do - publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body - lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish - forkPipelineJob publish.name publish.ref PublishJob \jobId -> do - Log.info $ "Received Publish request, job id: " <> unwrap jobId - API.publish Nothing publish - - Unpublish, Post -> do - auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - case auth.payload of - Operation.Unpublish { name, version } -> do - forkPipelineJob name (Version.print version) UnpublishJob \jobId -> do - Log.info $ "Received Unpublish request, job id: " <> unwrap jobId - API.authenticated auth - _ -> - HTTPurple.badRequest "Expected unpublish operation." - - Transfer, Post -> do - auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - case auth.payload of - Operation.Transfer { name } -> do - forkPipelineJob name "" TransferJob \jobId -> do - Log.info $ "Received Transfer request, job id: " <> unwrap jobId - API.authenticated auth - _ -> - HTTPurple.badRequest "Expected transfer operation." - - Jobs, Get -> do - jsonOk (CJ.array V1.jobCodec) [] - - Job jobId { level: maybeLogLevel, since }, Get -> do - let logLevel = fromMaybe Error maybeLogLevel - logs <- lift $ Db.selectLogsByJob jobId logLevel since - lift (Db.selectJob jobId) >>= case _ of - Left err -> do - lift $ Log.error $ "Error while fetching job: " <> err - HTTPurple.notFound - Right job -> do - jsonOk V1.jobCodec (Record.insert (Proxy :: _ "logs") logs job) - - Status, Get -> - HTTPurple.emptyResponse Status.ok - - Status, Head -> - HTTPurple.emptyResponse Status.ok - - _, _ -> - HTTPurple.notFound - where - forkPipelineJob :: PackageName -> String -> JobType -> (JobId -> Run _ Unit) -> ContT Response (Run _) Response - forkPipelineJob packageName ref jobType action = do - -- First thing we check if the package already has a pipeline in progress - lift (Db.runningJobForPackage packageName) >>= case _ of - -- If yes, we error out if it's the wrong kind, return it if it's the same type - Right { jobId, jobType: runningJobType } -> do - lift $ Log.info $ "Found running job for package " <> PackageName.print packageName <> ", job id: " <> unwrap jobId - case runningJobType == jobType of - true -> jsonOk V1.jobCreatedResponseCodec { jobId } - false -> HTTPurple.badRequest $ "There is already a " <> V1.printJobType runningJobType <> " job running for package " <> PackageName.print packageName - -- otherwise spin up a new thread - Left _err -> do - lift $ Log.info $ "No running job for package " <> PackageName.print packageName <> ", creating a new one" - jobId <- newJobId - now <- nowUTC - let newJob = { createdAt: now, jobId, jobType, packageName, ref } - lift $ Db.createJob newJob - let newEnv = env { jobId = Just jobId } - - _fiber <- liftAff $ Aff.forkAff $ Aff.attempt $ do - result <- runEffects newEnv (action jobId) - case result of - Left _ -> pure unit - Right _ -> do - finishedAt <- nowUTC - void $ runEffects newEnv (Db.finishJob { jobId, finishedAt, success: true }) - - jsonOk V1.jobCreatedResponseCodec { jobId } - -type ServerEnvVars = - { token :: GitHubToken - , publicKey :: String - , privateKey :: String - , spacesKey :: String - , spacesSecret :: String - , resourceEnv :: ResourceEnv - } - -readServerEnvVars :: Aff ServerEnvVars -readServerEnvVars = do - Env.loadEnvFile ".env" - token <- Env.lookupRequired Env.pacchettibottiToken - publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub - privateKey <- Env.lookupRequired Env.pacchettibottiED25519 - spacesKey <- Env.lookupRequired Env.spacesKey - spacesSecret <- Env.lookupRequired Env.spacesSecret - resourceEnv <- Env.lookupResourceEnv - pure { token, publicKey, privateKey, spacesKey, spacesSecret, resourceEnv } - -type ServerEnv = - { cacheDir :: FilePath - , logsDir :: FilePath - , githubCacheRef :: CacheRef - , legacyCacheRef :: CacheRef - , registryCacheRef :: CacheRef - , octokit :: Octokit - , vars :: ServerEnvVars - , debouncer :: Registry.Debouncer - , db :: SQLite - , jobId :: Maybe JobId - } - -createServerEnv :: Aff ServerEnv -createServerEnv = do - vars <- readServerEnvVars - - let cacheDir = Path.concat [ scratchDir, ".cache" ] - let logsDir = Path.concat [ scratchDir, "logs" ] - for_ [ cacheDir, logsDir ] FS.Extra.ensureDirectory - - githubCacheRef <- Cache.newCacheRef - legacyCacheRef <- Cache.newCacheRef - registryCacheRef <- Cache.newCacheRef - - octokit <- Octokit.newOctokit vars.token vars.resourceEnv.githubApiUrl - debouncer <- Registry.newDebouncer - - db <- liftEffect $ SQLite.connect - { database: vars.resourceEnv.databaseUrl.path - -- To see all database queries logged in the terminal, use this instead - -- of 'mempty'. Turned off by default because this is so verbose. - -- Run.runBaseEffect <<< Log.interpret (Log.handleTerminal Normal) <<< Log.info - , logger: mempty - } - - -- At server startup we clean out all the jobs that are not completed, - -- because they are stale runs from previous startups of the server. - -- We can just remove the jobs, and all the logs belonging to them will be - -- removed automatically by the foreign key constraint. - liftEffect $ SQLite.deleteIncompleteJobs db - - pure - { debouncer - , githubCacheRef - , legacyCacheRef - , registryCacheRef - , cacheDir - , logsDir - , vars - , octokit - , db - , jobId: Nothing - } - -type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) - -runServer :: ServerEnv -> (ServerEnv -> Request Route -> Run ServerEffects Response) -> Request Route -> Aff Response -runServer env router' request = do - result <- runEffects env (router' env request) - case result of - Left error -> HTTPurple.badRequest (Aff.message error) - Right response -> pure response - -main :: Effect Unit -main = do - createServerEnv # Aff.runAff_ case _ of - Left error -> do - Console.log $ "Failed to start server: " <> Aff.message error - Process.exit' 1 - Right env -> do - -- Start healthcheck ping loop if URL is configured - case env.vars.resourceEnv.healthchecksUrl of - Nothing -> Console.log "HEALTHCHECKS_URL not set, healthcheck pinging disabled" - Just healthchecksUrl -> do - _healthcheck <- Aff.launchAff do - let - limit = 10 - oneMinute = Aff.Milliseconds (1000.0 * 60.0) - fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) - - loop n = - Fetch.Retry.withRetryRequest healthchecksUrl {} >>= case _ of - Succeeded { status } | status == 200 -> do - Aff.delay fiveMinutes - loop n - - Cancelled | n >= 0 -> do - Console.warn $ "Healthchecks cancelled, will retry..." - Aff.delay oneMinute - loop (n - 1) - - Failed error | n >= 0 -> do - Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error - Aff.delay oneMinute - loop (n - 1) - - Succeeded { status } | status /= 200, n >= 0 -> do - Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status - Aff.delay oneMinute - loop (n - 1) - - Cancelled -> - Console.error "Healthchecks cancelled and failure limit reached, will not retry." - - Failed error -> do - Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error - - Succeeded _ -> do - Console.error $ "Healthchecks returned non-200 status and failure limit reached, will not retry." - - loop limit - pure unit - - -- Read port from SERVER_PORT env var (optional, HTTPurple defaults to 8080) - port <- liftEffect $ Env.lookupOptional serverPort - - _close <- HTTPurple.serve - { hostname: "0.0.0.0" - , port - } - { route: V1.routes - , router: runServer env router - } - pure unit - -jsonDecoder :: forall a. CJ.Codec a -> JsonDecoder CJ.DecodeError a -jsonDecoder codec = JsonDecoder (parseJson codec) - -jsonEncoder :: forall a. CJ.Codec a -> JsonEncoder a -jsonEncoder codec = JsonEncoder (stringifyJson codec) - -jsonOk :: forall m a. MonadAff m => CJ.Codec a -> a -> m Response -jsonOk codec datum = HTTPurple.ok' HTTPurple.jsonHeaders $ HTTPurple.toJson (jsonEncoder codec) datum - -runEffects :: forall a. ServerEnv -> Run ServerEffects a -> Aff (Either Aff.Error a) -runEffects env operation = Aff.attempt do - today <- nowUTC - let logFile = String.take 10 (Formatter.DateTime.format Internal.Format.iso8601Date today) <> ".log" - let logPath = Path.concat [ env.logsDir, logFile ] - operation - # Registry.interpret - ( Registry.handle - { repos: Registry.defaultRepos - , pull: Git.ForceClean - , write: Registry.CommitAs (Git.pacchettibottiCommitter env.vars.token) - , workdir: scratchDir - , debouncer: env.debouncer - , cacheRef: env.registryCacheRef - } - ) - # Pursuit.interpret (Pursuit.handleAff env.vars.token) - # Storage.interpret (Storage.handleS3 { s3: { key: env.vars.spacesKey, secret: env.vars.spacesSecret }, cache: env.cacheDir }) - # Source.interpret (Source.handle Source.Recent) - # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache: env.cacheDir, ref: env.githubCacheRef }) - # Cache.interpret _legacyCache (Cache.handleMemoryFs { cache: env.cacheDir, ref: env.legacyCacheRef }) - # Cache.interpret _compilerCache (Cache.handleFs env.cacheDir) - # Except.catch - ( \msg -> do - finishedAt <- nowUTC - case env.jobId of - -- Important to make sure that we mark the job as completed - Just jobId -> Db.finishJob { jobId, finishedAt, success: false } - Nothing -> pure unit - Log.error msg *> Run.liftAff (Aff.throwError (Aff.error msg)) - ) - # Db.interpret (Db.handleSQLite { db: env.db }) - # Comment.interpret Comment.handleLog - # Log.interpret - ( \log -> case env.jobId of - Nothing -> Log.handleTerminal Verbose log *> Log.handleFs Verbose logPath log - Just jobId -> - Log.handleTerminal Verbose log - *> Log.handleFs Verbose logPath log - *> Log.handleDb { db: env.db, job: jobId } log - ) - # Env.runPacchettiBottiEnv { publicKey: env.vars.publicKey, privateKey: env.vars.privateKey } - # Env.runResourceEnv env.vars.resourceEnv - # Run.runBaseAff' diff --git a/app/src/App/Server/Env.purs b/app/src/App/Server/Env.purs new file mode 100644 index 000000000..07baa935c --- /dev/null +++ b/app/src/App/Server/Env.purs @@ -0,0 +1,188 @@ +module Registry.App.Server.Env where + +import Registry.App.Prelude hiding ((/)) + +import Data.Codec.JSON as CJ +import Data.Formatter.DateTime as Formatter.DateTime +import Data.String as String +import Effect.Aff as Aff +import HTTPurple (JsonDecoder(..), JsonEncoder(..), Request, Response) +import HTTPurple as HTTPurple +import Node.Path as Path +import Registry.API.V1 (JobId, Route) +import Registry.App.API (COMPILER_CACHE, _compilerCache) +import Registry.App.CLI.Git as Git +import Registry.App.Effect.Cache (CacheRef) +import Registry.App.Effect.Cache as Cache +import Registry.App.Effect.Comment (COMMENT) +import Registry.App.Effect.Comment as Comment +import Registry.App.Effect.Db (DB) +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV, ResourceEnv) +import Registry.App.Effect.Env as Env +import Registry.App.Effect.GitHub (GITHUB) +import Registry.App.Effect.GitHub as GitHub +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.Pursuit (PURSUIT) +import Registry.App.Effect.Pursuit as Pursuit +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.Effect.Source (SOURCE) +import Registry.App.Effect.Source as Source +import Registry.App.Effect.Storage (STORAGE) +import Registry.App.Effect.Storage as Storage +import Registry.App.Legacy.Manifest (LEGACY_CACHE, _legacyCache) +import Registry.App.SQLite (SQLite) +import Registry.App.SQLite as SQLite +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Foreign.Octokit (GitHubToken, Octokit) +import Registry.Foreign.Octokit as Octokit +import Registry.Internal.Format as Internal.Format +import Run (AFF, EFFECT, Run) +import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except + +type ServerEnvVars = + { token :: GitHubToken + , publicKey :: String + , privateKey :: String + , spacesKey :: String + , spacesSecret :: String + , resourceEnv :: ResourceEnv + } + +readServerEnvVars :: Aff ServerEnvVars +readServerEnvVars = do + Env.loadEnvFile ".temp/local-server/.env.local" + Env.loadEnvFile ".env" + token <- Env.lookupRequired Env.pacchettibottiToken + publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub + privateKey <- Env.lookupRequired Env.pacchettibottiED25519 + spacesKey <- Env.lookupRequired Env.spacesKey + spacesSecret <- Env.lookupRequired Env.spacesSecret + resourceEnv <- Env.lookupResourceEnv + pure { token, publicKey, privateKey, spacesKey, spacesSecret, resourceEnv } + +type ServerEnv = + { cacheDir :: FilePath + , logsDir :: FilePath + , githubCacheRef :: CacheRef + , legacyCacheRef :: CacheRef + , registryCacheRef :: CacheRef + , octokit :: Octokit + , vars :: ServerEnvVars + , debouncer :: Registry.Debouncer + , db :: SQLite + , jobId :: Maybe JobId + } + +createServerEnv :: Aff ServerEnv +createServerEnv = do + vars <- readServerEnvVars + + let cacheDir = Path.concat [ scratchDir, ".cache" ] + let logsDir = Path.concat [ scratchDir, "logs" ] + for_ [ cacheDir, logsDir ] FS.Extra.ensureDirectory + + githubCacheRef <- Cache.newCacheRef + legacyCacheRef <- Cache.newCacheRef + registryCacheRef <- Cache.newCacheRef + + octokit <- Octokit.newOctokit vars.token vars.resourceEnv.githubApiUrl + debouncer <- Registry.newDebouncer + + db <- liftEffect $ SQLite.connect + { database: vars.resourceEnv.databaseUrl.path + -- To see all database queries logged in the terminal, use this instead + -- of 'mempty'. Turned off by default because this is so verbose. + -- Run.runBaseEffect <<< Log.interpret (Log.handleTerminal Normal) <<< Log.info + , logger: mempty + } + + -- At server startup we clean out all the jobs that are not completed, + -- because they are stale runs from previous startups of the server. + -- We can just remove the jobs, and all the logs belonging to them will be + -- removed automatically by the foreign key constraint. + liftEffect $ SQLite.resetIncompleteJobs db + + pure + { debouncer + , githubCacheRef + , legacyCacheRef + , registryCacheRef + , cacheDir + , logsDir + , vars + , octokit + , db + , jobId: Nothing + } + +type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) + +runServer + :: ServerEnv + -> (ServerEnv -> Request Route -> Run ServerEffects Response) + -> Request Route + -> Aff Response +runServer env router' request = do + result <- runEffects env (router' env request) + case result of + Left error -> HTTPurple.badRequest (Aff.message error) + Right response -> pure response + +jsonDecoder :: forall a. CJ.Codec a -> JsonDecoder CJ.DecodeError a +jsonDecoder codec = JsonDecoder (parseJson codec) + +jsonEncoder :: forall a. CJ.Codec a -> JsonEncoder a +jsonEncoder codec = JsonEncoder (stringifyJson codec) + +jsonOk :: forall m a. MonadAff m => CJ.Codec a -> a -> m Response +jsonOk codec datum = HTTPurple.ok' HTTPurple.jsonHeaders $ HTTPurple.toJson (jsonEncoder codec) datum + +runEffects :: forall a. ServerEnv -> Run ServerEffects a -> Aff (Either Aff.Error a) +runEffects env operation = Aff.attempt do + today <- nowUTC + let logFile = String.take 10 (Formatter.DateTime.format Internal.Format.iso8601Date today) <> ".log" + let logPath = Path.concat [ env.logsDir, logFile ] + operation + # Registry.interpret + ( Registry.handle + { repos: Registry.defaultRepos + , pull: Git.ForceClean + , write: Registry.CommitAs (Git.pacchettibottiCommitter env.vars.token) + , workdir: scratchDir + , debouncer: env.debouncer + , cacheRef: env.registryCacheRef + } + ) + # Pursuit.interpret (Pursuit.handleAff env.vars.token) + # Storage.interpret (Storage.handleS3 { s3: { key: env.vars.spacesKey, secret: env.vars.spacesSecret }, cache: env.cacheDir }) + # Source.interpret (Source.handle Source.Recent) + # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache: env.cacheDir, ref: env.githubCacheRef }) + # Cache.interpret _legacyCache (Cache.handleMemoryFs { cache: env.cacheDir, ref: env.legacyCacheRef }) + # Cache.interpret _compilerCache (Cache.handleFs env.cacheDir) + # Except.catch + ( \msg -> do + finishedAt <- nowUTC + case env.jobId of + -- Important to make sure that we mark the job as completed + Just jobId -> Db.finishJob { jobId, finishedAt, success: false } + Nothing -> pure unit + Log.error msg *> Run.liftAff (Aff.throwError (Aff.error msg)) + ) + # Db.interpret (Db.handleSQLite { db: env.db }) + # Comment.interpret Comment.handleLog + # Log.interpret + ( \log -> case env.jobId of + Nothing -> Log.handleTerminal Verbose log *> Log.handleFs Verbose logPath log + Just jobId -> + Log.handleTerminal Verbose log + *> Log.handleFs Verbose logPath log + *> Log.handleDb { db: env.db, job: jobId } log + ) + # Env.runPacchettiBottiEnv { publicKey: env.vars.publicKey, privateKey: env.vars.privateKey } + # Env.runResourceEnv env.vars.resourceEnv + # Run.runBaseAff' diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs new file mode 100644 index 000000000..63a5cbddd --- /dev/null +++ b/app/src/App/Server/JobExecutor.purs @@ -0,0 +1,184 @@ +module Registry.App.Server.JobExecutor + ( runJobExecutor + ) where + +import Registry.App.Prelude hiding ((/)) + +import Control.Monad.Maybe.Trans (MaybeT(..), runMaybeT) +import Control.Parallel as Parallel +import Data.Array as Array +import Data.DateTime (DateTime) +import Data.Map as Map +import Data.Set as Set +import Effect.Aff (Milliseconds(..)) +import Effect.Aff as Aff +import Registry.App.API as API +import Registry.App.Effect.Db (DB) +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, PackageSetJobDetails) +import Registry.App.Server.Env (ServerEffects, ServerEnv, runEffects) +import Registry.App.Server.MatrixBuilder as MatrixBuilder +import Registry.ManifestIndex as ManifestIndex +import Registry.Operation as Operation +import Registry.PackageName as PackageName +import Registry.Version as Version +import Run (Run) +import Run.Except (EXCEPT) + +data JobDetails + = PackageJob PackageJobDetails + | MatrixJob MatrixJobDetails + | PackageSetJob PackageSetJobDetails + +runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) +runJobExecutor env = runEffects env do + Log.info "Starting Job Executor" + -- Before starting the executor we check if we need to run a whole-registry + -- compiler update: whenever a new compiler is published we need to see which + -- packages are compatible with it; this is a responsibility of the MatrixBuilder, + -- but it needs to be triggered to know there's a new version out. + -- To do that, we ask PursVersions what the compilers are, then we look for + -- the compatibility list of the latest `prelude` version. If the new compiler + -- is missing, then we know that we have not attempted to check compatibility + -- with it (since the latest `prelude` has to be compatible by definition), + -- and we can enqueue a "compile everything" here, which will be the first + -- thing that the JobExecutor picks up + void $ MatrixBuilder.checkIfNewCompiler + >>= traverse upgradeRegistryToNewCompiler + Db.resetIncompleteJobs + loop + where + loop = do + maybeJob <- findNextAvailableJob + case maybeJob of + Nothing -> do + liftAff $ Aff.delay (Milliseconds 1000.0) + loop + + Just job -> do + now <- nowUTC + let + jobId = case job of + PackageJob details -> details.jobId + MatrixJob details -> details.jobId + PackageSetJob details -> details.jobId + + Db.startJob { jobId, startedAt: now } + + -- We race the job execution against a timeout; if the timeout happens first, + -- we kill the job and move on to the next one. + jobResult <- liftAff do + let execute = Just <$> (runEffects env $ executeJob now job) + let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes + let timeout = Aff.delay (Milliseconds delay) $> Nothing + Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout + + success <- case jobResult of + Nothing -> do + Log.error $ "Job " <> unwrap jobId <> " timed out." + pure false + + Just (Left err) -> do + Log.warn $ "Job " <> unwrap jobId <> " failed:\n" <> Aff.message err + pure false + + Just (Right _) -> do + Log.info $ "Job " <> unwrap jobId <> " succeeded." + pure true + + finishedAt <- nowUTC + Db.finishJob { jobId, finishedAt, success } + loop + +-- TODO: here we only get a single package for each operation, but really we should +-- have all of them and toposort them. There is something in ManifestIndex but not +-- sure that's what we need +findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) +findNextAvailableJob = runMaybeT + $ (PackageJob <$> MaybeT Db.selectNextPackageJob) + <|> (MatrixJob <$> MaybeT Db.selectNextMatrixJob) + <|> (PackageSetJob <$> MaybeT Db.selectNextPackageSetJob) + +executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit +executeJob _ = case _ of + PackageJob { payload: Operation.Publish payload@{ name, version } } -> do + maybeDependencies <- API.publish Nothing payload + -- The above operation will throw if not successful, and return a map of + -- dependencies of the package only if it has not been published before. + for_ maybeDependencies \dependencies -> do + -- At this point this package has been verified with one compiler only. + -- So we need to enqueue compilation jobs for (1) same package, all the other + -- compilers, and (2) same compiler, all packages that depend on this one + -- TODO here we are building the compiler index, but we should really cache it + compilerIndex <- MatrixBuilder.readCompilerIndex + let solverData = { compiler: payload.compiler, name, version, dependencies, compilerIndex } + samePackageAllCompilers <- MatrixBuilder.solveForAllCompilers solverData + sameCompilerAllDependants <- MatrixBuilder.solveDependantsForCompiler solverData + for (Array.fromFoldable $ Set.union samePackageAllCompilers sameCompilerAllDependants) + \{ compiler: solvedCompiler, resolutions, name: solvedPackage, version: solvedVersion } -> do + Log.info $ "Enqueuing matrix job: compiler " + <> Version.print solvedCompiler + <> ", package " + <> PackageName.print solvedPackage + <> "@" + <> Version.print solvedVersion + Db.insertMatrixJob + { payload: resolutions + , compilerVersion: solvedCompiler + , packageName: solvedPackage + , packageVersion: solvedVersion + } + PackageJob { payload: Operation.Authenticated auth } -> + API.authenticated auth + MatrixJob details@{ packageName, packageVersion } -> do + maybeDependencies <- MatrixBuilder.runMatrixJob details + -- Unlike the publishing case, after verifying a compilation here we only need + -- to followup with trying to compile the packages that depend on this one + for_ maybeDependencies \dependencies -> do + -- TODO here we are building the compiler index, but we should really cache it + compilerIndex <- MatrixBuilder.readCompilerIndex + let solverData = { compiler: details.compilerVersion, name: packageName, version: packageVersion, dependencies, compilerIndex } + sameCompilerAllDependants <- MatrixBuilder.solveDependantsForCompiler solverData + for (Array.fromFoldable sameCompilerAllDependants) + \{ compiler: solvedCompiler, resolutions, name: solvedPackage, version: solvedVersion } -> do + Log.info $ "Enqueuing matrix job: compiler " + <> Version.print solvedCompiler + <> ", package " + <> PackageName.print solvedPackage + <> "@" + <> Version.print solvedVersion + Db.insertMatrixJob + { payload: resolutions + , compilerVersion: solvedCompiler + , packageName: solvedPackage + , packageVersion: solvedVersion + } + PackageSetJob _details -> + -- TODO: need to pass in the package_sets effect + -- API.packageSetUpdate2 details + pure unit + +upgradeRegistryToNewCompiler :: forall r. Version -> Run (DB + LOG + EXCEPT String + REGISTRY + r) Unit +upgradeRegistryToNewCompiler newCompilerVersion = do + allManifests <- Registry.readAllManifests + for_ (ManifestIndex.toArray allManifests) \(Manifest manifest) -> do + -- Note: we enqueue compilation jobs only for packages with no dependencies, + -- because from them we should be able to reach the whole of the registry, + -- as they complete new jobs for their dependants will be queued up. + when (not (Map.isEmpty manifest.dependencies)) do + Log.info $ "Enqueuing matrix job for _new_ compiler " + <> Version.print newCompilerVersion + <> ", package " + <> PackageName.print manifest.name + <> "@" + <> Version.print manifest.version + void $ Db.insertMatrixJob + { payload: Map.empty + , compilerVersion: newCompilerVersion + , packageName: manifest.name + , packageVersion: manifest.version + } diff --git a/app/src/App/Server/MatrixBuilder.purs b/app/src/App/Server/MatrixBuilder.purs new file mode 100644 index 000000000..7ae98d972 --- /dev/null +++ b/app/src/App/Server/MatrixBuilder.purs @@ -0,0 +1,230 @@ +module Registry.App.Server.MatrixBuilder + ( checkIfNewCompiler + , installBuildPlan + , printCompilerFailure + , readCompilerIndex + , runMatrixJob + , solveForAllCompilers + , solveDependantsForCompiler + ) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.Array.NonEmpty as NonEmptyArray +import Data.Map as Map +import Data.Set as Set +import Data.Set.NonEmpty as NonEmptySet +import Data.String as String +import Effect.Aff as Aff +import Node.FS.Aff as FS.Aff +import Node.Path as Path +import Registry.App.CLI.Purs (CompilerFailure(..)) +import Registry.App.CLI.Purs as Purs +import Registry.App.CLI.PursVersions as PursVersions +import Registry.App.CLI.Tar as Tar +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.Effect.Storage (STORAGE) +import Registry.App.Effect.Storage as Storage +import Registry.App.SQLite (MatrixJobDetails) +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Foreign.Tmp as Tmp +import Registry.ManifestIndex as ManifestIndex +import Registry.Metadata as Metadata +import Registry.PackageName as PackageName +import Registry.Range as Range +import Registry.Solver as Solver +import Registry.Version as Version +import Run (AFF, EFFECT, Run) +import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except + +runMatrixJob :: forall r. MatrixJobDetails -> Run (REGISTRY + STORAGE + LOG + AFF + EFFECT + EXCEPT String + r) (Maybe (Map PackageName Range)) +runMatrixJob { compilerVersion, packageName, packageVersion, payload: buildPlan } = do + workdir <- Tmp.mkTmpDir + let installed = Path.concat [ workdir, ".registry" ] + FS.Extra.ensureDirectory installed + installBuildPlan (Map.insert packageName packageVersion buildPlan) installed + result <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ Path.concat [ installed, "*/src/**/*.purs" ] ] } + , version: Just compilerVersion + , cwd: Just workdir + } + FS.Extra.remove workdir + case result of + Left err -> do + Log.info $ "Compilation failed with compiler " <> Version.print compilerVersion + <> ":\n" + <> printCompilerFailure compilerVersion err + pure Nothing + Right _ -> do + Log.info $ "Compilation succeeded with compiler " <> Version.print compilerVersion + + Registry.readMetadata packageName >>= case _ of + Nothing -> do + Log.error $ "No existing metadata for " <> PackageName.print packageName + pure Nothing + Just (Metadata metadata) -> do + let + metadataWithCompilers = metadata + { published = Map.update + ( \publishedMetadata@{ compilers } -> + Just $ publishedMetadata { compilers = NonEmptySet.toUnfoldable1 $ NonEmptySet.fromFoldable1 $ NonEmptyArray.cons compilerVersion compilers } + ) + packageVersion + metadata.published + } + Registry.writeMetadata packageName (Metadata metadataWithCompilers) + Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata metadataWithCompilers) + + Log.info "Wrote completed metadata to the registry!" + Registry.readManifest packageName packageVersion >>= case _ of + Just (Manifest manifest) -> pure (Just manifest.dependencies) + Nothing -> do + Log.error $ "No existing metadata for " <> PackageName.print packageName <> "@" <> Version.print packageVersion + pure Nothing + +-- TODO feels like we should be doing this at startup and use the cache instead +-- of reading files all over again +readCompilerIndex :: forall r. Run (REGISTRY + AFF + EXCEPT String + r) Solver.CompilerIndex +readCompilerIndex = do + metadata <- Registry.readAllMetadata + manifests <- Registry.readAllManifests + allCompilers <- PursVersions.pursVersions + pure $ Solver.buildCompilerIndex allCompilers manifests metadata + +-- | Install all dependencies indicated by the build plan to the specified +-- | directory. Packages will be installed at 'dir/package-name-x.y.z'. +installBuildPlan :: forall r. Map PackageName Version -> FilePath -> Run (STORAGE + LOG + AFF + EXCEPT String + r) Unit +installBuildPlan resolutions dependenciesDir = do + Run.liftAff $ FS.Extra.ensureDirectory dependenciesDir + -- We fetch every dependency at its resolved version, unpack the tarball, and + -- store the resulting source code in a specified directory for dependencies. + forWithIndex_ resolutions \name version -> do + let + -- This filename uses the format the directory name will have once + -- unpacked, ie. package-name-major.minor.patch + filename = PackageName.print name <> "-" <> Version.print version <> ".tar.gz" + filepath = Path.concat [ dependenciesDir, filename ] + Storage.download name version filepath + Run.liftAff (Aff.attempt (Tar.extract { cwd: dependenciesDir, archive: filename })) >>= case _ of + Left error -> do + Log.error $ "Failed to unpack " <> filename <> ": " <> Aff.message error + Except.throw "Failed to unpack dependency tarball, cannot continue." + Right _ -> + Log.debug $ "Unpacked " <> filename + Run.liftAff $ FS.Aff.unlink filepath + Log.debug $ "Installed " <> formatPackageVersion name version + +printCompilerFailure :: Version -> CompilerFailure -> String +printCompilerFailure compiler = case _ of + MissingCompiler -> Array.fold + [ "Compilation failed because the build plan compiler version " + , Version.print compiler + , " is not supported. Please try again with a different compiler." + ] + CompilationError errs -> String.joinWith "\n" + [ "Compilation failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" + , "```" + , Purs.printCompilerErrors errs + , "```" + ] + UnknownError err -> String.joinWith "\n" + [ "Compilation failed with version " <> Version.print compiler <> " because of an error :" + , "```" + , err + , "```" + ] + +type MatrixSolverData = + { compilerIndex :: Solver.CompilerIndex + , compiler :: Version + , name :: PackageName + , version :: Version + , dependencies :: Map PackageName Range + } + +type MatrixSolverResult = + { name :: PackageName + , version :: Version + , compiler :: Version + , resolutions :: Map PackageName Version + } + +solveForAllCompilers :: forall r. MatrixSolverData -> Run (AFF + EXCEPT String + LOG + r) (Set MatrixSolverResult) +solveForAllCompilers { compilerIndex, name, version, compiler, dependencies } = do + -- remove the compiler we tested with from the set of all of them + compilers <- (Array.filter (_ /= compiler) <<< NonEmptyArray.toArray) <$> PursVersions.pursVersions + newJobs <- for compilers \target -> do + Log.debug $ "Trying compiler " <> Version.print target <> " for package " <> PackageName.print name + case Solver.solveWithCompiler (Range.exact target) compilerIndex dependencies of + Left _solverErrors -> do + Log.info $ "Failed to solve with compiler " <> Version.print target + -- Log.debug $ Solver.printSolverError solverErrors + pure Nothing + Right (Tuple solvedCompiler resolutions) -> case solvedCompiler == target of + true -> pure $ Just { compiler: target, resolutions, name, version } + false -> do + Log.debug $ Array.fold + [ "Produced a compiler-derived build plan that selects a compiler (" + , Version.print solvedCompiler + , ") that differs from the target compiler (" + , Version.print target + , ")." + ] + pure Nothing + pure $ Set.fromFoldable $ Array.catMaybes newJobs + +solveDependantsForCompiler :: forall r. MatrixSolverData -> Run (EXCEPT String + LOG + REGISTRY + r) (Set MatrixSolverResult) +solveDependantsForCompiler { compilerIndex, name, version, compiler } = do + manifestIndex <- Registry.readAllManifests + let dependentManifests = ManifestIndex.dependants manifestIndex name version + newJobs <- for dependentManifests \(Manifest manifest) -> do + -- we first verify if we have already attempted this package with this compiler, + -- either in the form of having it in the metadata already, or as a failed compilation + -- (i.e. if we find compilers in the metadata for this version we only check this one + -- if it's newer, because all the previous ones have been tried) + shouldAttemptToCompile <- Registry.readMetadata manifest.name >>= case _ of + Nothing -> pure false + Just metadata -> pure $ case Map.lookup version (un Metadata metadata).published of + Nothing -> false + Just { compilers } -> any (_ > compiler) compilers + case shouldAttemptToCompile of + false -> pure Nothing + true -> do + -- if all good then run the solver + Log.debug $ "Trying compiler " <> Version.print compiler <> " for package " <> PackageName.print manifest.name + case Solver.solveWithCompiler (Range.exact compiler) compilerIndex manifest.dependencies of + Left _solverErrors -> do + Log.info $ "Failed to solve with compiler " <> Version.print compiler + -- Log.debug $ Solver.printSolverError solverErrors + pure Nothing + Right (Tuple solvedCompiler resolutions) -> case compiler == solvedCompiler of + true -> pure $ Just { compiler, resolutions, name: manifest.name, version: manifest.version } + false -> do + Log.debug $ Array.fold + [ "Produced a compiler-derived build plan that selects a compiler (" + , Version.print solvedCompiler + , ") that differs from the target compiler (" + , Version.print compiler + , ")." + ] + pure Nothing + pure $ Set.fromFoldable $ Array.catMaybes newJobs + +checkIfNewCompiler :: forall r. Run (EXCEPT String + LOG + REGISTRY + AFF + r) (Maybe Version) +checkIfNewCompiler = do + Log.info "Checking if there's a new compiler in town..." + latestCompiler <- NonEmptyArray.foldr1 max <$> PursVersions.pursVersions + maybeMetadata <- Registry.readMetadata $ unsafeFromRight $ PackageName.parse "prelude" + pure $ maybeMetadata >>= \(Metadata metadata) -> + Map.findMax metadata.published + >>= \{ key: _version, value: { compilers } } -> do + case all (_ < latestCompiler) compilers of + -- all compilers compatible with the latest prelude are older than this one + true -> Just latestCompiler + false -> Nothing diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs new file mode 100644 index 000000000..9a3f08074 --- /dev/null +++ b/app/src/App/Server/Router.purs @@ -0,0 +1,104 @@ +module Registry.App.Server.Router where + +import Registry.App.Prelude hiding ((/)) + +import Control.Monad.Cont (ContT) +import Data.Codec.JSON as CJ +import Effect.Aff as Aff +import HTTPurple (Method(..), Request, Response) +import HTTPurple as HTTPurple +import HTTPurple.Status as Status +import Registry.API.V1 (LogLevel(..), Route(..)) +import Registry.API.V1 as V1 +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Env as Env +import Registry.App.Effect.Log as Log +import Registry.App.Server.Env (ServerEffects, ServerEnv, jsonDecoder, jsonOk, runEffects) +import Registry.Operation (PackageOperation) +import Registry.Operation as Operation +import Registry.PackageName as PackageName +import Run (Run) +import Run.Except as Run.Except + +runRouter :: ServerEnv -> Effect Unit +runRouter env = do + -- Read port from SERVER_PORT env var (optional, HTTPurple defaults to 8080) + port <- liftEffect $ Env.lookupOptional Env.serverPort + void $ HTTPurple.serve + { hostname: "0.0.0.0" + , port + } + { route: V1.routes + , router: runServer + } + where + runServer :: Request Route -> Aff Response + runServer request = do + result <- runEffects env (router request) + case result of + Left error -> HTTPurple.badRequest (Aff.message error) + Right response -> pure response + +router :: Request Route -> Run ServerEffects Response +router { route, method, body } = HTTPurple.usingCont case route, method of + Publish, Post -> do + publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body + lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish + insertPackageJob $ Operation.Publish publish + + Unpublish, Post -> do + auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + case auth.payload of + Operation.Unpublish payload -> do + lift $ Log.info $ "Received Unpublish request: " <> printJson Operation.unpublishCodec payload + insertPackageJob $ Operation.Authenticated auth + _ -> + HTTPurple.badRequest "Expected unpublish operation." + + Transfer, Post -> do + auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + case auth.payload of + Operation.Transfer payload -> do + lift $ Log.info $ "Received Transfer request: " <> printJson Operation.transferCodec payload + insertPackageJob $ Operation.Authenticated auth + _ -> + HTTPurple.badRequest "Expected transfer operation." + + -- TODO return jobs + Jobs, Get -> do + now <- liftEffect nowUTC + jsonOk (CJ.array V1.jobCodec) [ { jobId: wrap "foo", createdAt: now, finishedAt: Nothing, success: true, logs: [] } ] + + Job jobId { level: maybeLogLevel, since }, Get -> do + let logLevel = fromMaybe Error maybeLogLevel + logs <- lift $ Db.selectLogsByJob jobId logLevel since + lift (Run.Except.runExcept $ Db.selectJobInfo jobId) >>= case _ of + Left err -> do + lift $ Log.error $ "Error while fetching job: " <> err + HTTPurple.notFound + Right Nothing -> do + HTTPurple.notFound + Right (Just job) -> + jsonOk V1.jobCodec + { jobId + , createdAt: job.createdAt + , finishedAt: job.finishedAt + , success: job.success + , logs + } + + Status, Get -> + HTTPurple.emptyResponse Status.ok + + Status, Head -> + HTTPurple.emptyResponse Status.ok + + _, _ -> + HTTPurple.notFound + where + insertPackageJob :: PackageOperation -> ContT Response (Run _) Response + insertPackageJob operation = do + lift $ Log.info $ "Enqueuing job for package " <> PackageName.print (Operation.packageName operation) + jobId <- lift $ Db.insertPackageJob { payload: operation } + jsonOk V1.jobCreatedResponseCodec { jobId } + diff --git a/app/test/App/API.purs b/app/test/App/API.purs index caaf6c215..63dcccc3d 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -96,12 +96,13 @@ spec = do , location: Just $ GitHub { owner: "purescript", repo: "purescript-effect", subdir: Nothing } , name , ref + , version , resolutions: Nothing } -- First, we publish the package. Registry.readAllManifests >>= \idx -> - API.publish (Just (toLegacyIndex idx)) publishArgs + void $ API.publish (Just (toLegacyIndex idx)) publishArgs -- Then, we can check that it did make it to "Pursuit" as expected Pursuit.getPublishedVersions name >>= case _ of @@ -158,10 +159,11 @@ spec = do , location: Just $ GitHub { owner: "purescript", repo: "purescript-type-equality", subdir: Nothing } , name: Utils.unsafePackageName "type-equality" , ref: "v4.0.1" + , version: Utils.unsafeVersion "4.0.1" , resolutions: Nothing } Registry.readAllManifests >>= \idx -> - API.publish (Just (toLegacyIndex idx)) pursuitOnlyPublishArgs + void $ API.publish (Just (toLegacyIndex idx)) pursuitOnlyPublishArgs -- We can also verify that transitive dependencies are added for legacy -- packages. @@ -172,10 +174,11 @@ spec = do , location: Just $ GitHub { owner: "purescript", repo: "purescript-transitive", subdir: Nothing } , name: transitive.name , ref: "v" <> Version.print transitive.version + , version: transitive.version , resolutions: Nothing } Registry.readAllManifests >>= \idx -> - API.publish (Just (toLegacyIndex idx)) transitivePublishArgs + void $ API.publish (Just (toLegacyIndex idx)) transitivePublishArgs -- We should verify the resulting metadata file is correct Metadata transitiveMetadata <- Registry.readMetadata transitive.name >>= case _ of diff --git a/app/test/App/GitHubIssue.purs b/app/test/App/GitHubIssue.purs index 70b3ccb3a..d2c6baf18 100644 --- a/app/test/App/GitHubIssue.purs +++ b/app/test/App/GitHubIssue.purs @@ -32,6 +32,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "something" , ref: "v1.2.3" + , version: Utils.unsafeVersion "1.2.3" , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Just $ Map.fromFoldable [ Utils.unsafePackageName "prelude" /\ Utils.unsafeVersion "1.0.0" ] , location: Nothing @@ -47,6 +48,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "prelude" , ref: "v5.0.0" + , version: Utils.unsafeVersion "5.0.0" , location: Just $ GitHub { subdir: Nothing, owner: "purescript", repo: "purescript-prelude" } , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Just $ Map.fromFoldable [ Utils.unsafePackageName "prelude" /\ Utils.unsafeVersion "1.0.0" ] @@ -75,6 +77,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "prelude" , ref: "v5.0.0" + , version: Utils.unsafeVersion "5.0.0" , location: Just $ GitHub { subdir: Nothing, owner: "purescript", repo: "purescript-prelude" } , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Nothing @@ -103,6 +106,7 @@ preludeAdditionString = { "name": "prelude", "ref": "v5.0.0", + "version": "5.0.0", "location": { "githubOwner": "purescript", "githubRepo": "purescript-prelude" @@ -121,6 +125,7 @@ packageNameTooLongString = { "name": "packagenamewayyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyytoolong", "ref": "v5.0.0", + "version": "5.0.0", "location": { "githubOwner": "purescript", "githubRepo": "purescript-prelude" diff --git a/db/migrations/20240914170550_delete_jobs_logs_table.sql b/db/migrations/20240914170550_delete_jobs_logs_table.sql new file mode 100644 index 000000000..9dc12c365 --- /dev/null +++ b/db/migrations/20240914170550_delete_jobs_logs_table.sql @@ -0,0 +1,22 @@ +-- migrate:up +DROP TABLE IF EXISTS jobs; +DROP TABLE IF EXISTS logs; + +-- migrate:down +CREATE TABLE IF NOT EXISTS jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + jobType TEXT NOT NULL, + packageName TEXT NOT NULL, + ref TEXT NOT NULL, + createdAt TEXT NOT NULL, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); + +CREATE TABLE IF NOT EXISTS logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES jobs (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL +); diff --git a/db/migrations/20240914171030_create_job_queue_tables.sql b/db/migrations/20240914171030_create_job_queue_tables.sql new file mode 100644 index 000000000..f4f1e68f3 --- /dev/null +++ b/db/migrations/20240914171030_create_job_queue_tables.sql @@ -0,0 +1,55 @@ +-- migrate:up + +-- Common job information table +CREATE TABLE job_info ( + jobId TEXT PRIMARY KEY NOT NULL, + createdAt TEXT NOT NULL, + startedAt TEXT, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); + +-- Package-oriented jobs (publish/unpublish/transfer) +CREATE TABLE package_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + jobType TEXT NOT NULL, + packageName TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Compiler matrix jobs (one compiler, all packages) +CREATE TABLE matrix_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + compilerVersion TEXT NOT NULL, + -- the build plan, which should be computed before the job is stored in the + -- queue so that if multiple jobs targeting one package get interrupted by + -- a higher-priority job then the build plan is not affected. + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Package set jobs +CREATE TABLE package_set_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES job_info (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL +); + +-- migrate:down + +DROP TABLE job_info; +DROP TABLE package_jobs; +DROP TABLE matrix_jobs; +DROP TABLE package_set_jobs; +DROP TABLE logs; diff --git a/db/schema.sql b/db/schema.sql index 116de1dda..1baf6403f 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -1,21 +1,44 @@ CREATE TABLE IF NOT EXISTS "schema_migrations" (version varchar(128) primary key); -CREATE TABLE jobs ( - jobId text primary key not null, - jobType text not null, - packageName text not null, - ref text not null, - createdAt text not null, - finishedAt text, - success integer not null default 0 +CREATE TABLE job_info ( + jobId TEXT PRIMARY KEY NOT NULL, + createdAt TEXT NOT NULL, + startedAt TEXT, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); +CREATE TABLE package_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + jobType TEXT NOT NULL, + packageName TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE matrix_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + compilerVersion TEXT NOT NULL, + -- the build plan, which should be computed before the job is stored in the + -- queue so that if multiple jobs targeting one package get interrupted by + -- a higher-priority job then the build plan is not affected. + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE package_set_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE ); CREATE TABLE logs ( - id integer primary key autoincrement, - jobId text not null references jobs on delete cascade, - level integer not null, - message text not null, - timestamp text not null + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES job_info (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL ); -- Dbmate schema migrations INSERT INTO "schema_migrations" (version) VALUES ('20230711143615'), - ('20230711143803'); + ('20230711143803'), + ('20240914170550'), + ('20240914171030'); diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index a6193b5f7..4bae692f5 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -15,8 +15,6 @@ import Data.Newtype (class Newtype) import Data.Profunctor as Profunctor import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format -import Registry.PackageName (PackageName) -import Registry.PackageName as PackageName import Routing.Duplex (RouteDuplex') import Routing.Duplex as Routing import Routing.Duplex.Generic as RoutingG @@ -66,9 +64,6 @@ jobCreatedResponseCodec = CJ.named "JobCreatedResponse" $ CJ.Record.object { job type Job = { jobId :: JobId - , jobType :: JobType - , packageName :: PackageName - , ref :: String , createdAt :: DateTime , finishedAt :: Maybe DateTime , success :: Boolean @@ -78,9 +73,6 @@ type Job = jobCodec :: CJ.Codec Job jobCodec = CJ.named "Job" $ CJ.Record.object { jobId: jobIdCodec - , jobType: jobTypeCodec - , packageName: PackageName.codec - , ref: CJ.string , createdAt: Internal.Codec.iso8601DateTime , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime , success: CJ.boolean @@ -94,26 +86,6 @@ derive instance Newtype JobId _ jobIdCodec :: CJ.Codec JobId jobIdCodec = Profunctor.wrapIso JobId CJ.string -data JobType = PublishJob | UnpublishJob | TransferJob - -derive instance Eq JobType - -parseJobType :: String -> Either String JobType -parseJobType = case _ of - "publish" -> Right PublishJob - "unpublish" -> Right UnpublishJob - "transfer" -> Right TransferJob - j -> Left $ "Invalid job type " <> show j - -printJobType :: JobType -> String -printJobType = case _ of - PublishJob -> "publish" - UnpublishJob -> "unpublish" - TransferJob -> "transfer" - -jobTypeCodec :: CJ.Codec JobType -jobTypeCodec = CJ.Sum.enumSum printJobType (hush <<< parseJobType) - type LogLine = { level :: LogLevel , message :: String diff --git a/lib/src/JobType.purs b/lib/src/JobType.purs new file mode 100644 index 000000000..dbc4eaf01 --- /dev/null +++ b/lib/src/JobType.purs @@ -0,0 +1,27 @@ +module Registry.JobType where + +import Prelude + +import Data.Codec.JSON as CJ +import Data.Codec.JSON.Sum as CJ.Sum +import Data.Either (Either(..), hush) + +data JobType = PublishJob | UnpublishJob | TransferJob + +derive instance Eq JobType + +parse :: String -> Either String JobType +parse = case _ of + "publish" -> Right PublishJob + "unpublish" -> Right UnpublishJob + "transfer" -> Right TransferJob + j -> Left $ "Invalid job type " <> show j + +print :: JobType -> String +print = case _ of + PublishJob -> "publish" + UnpublishJob -> "unpublish" + TransferJob -> "transfer" + +codec :: CJ.Codec JobType +codec = CJ.Sum.enumSum print (hush <<< parse) diff --git a/lib/src/ManifestIndex.purs b/lib/src/ManifestIndex.purs index 4837b49ed..eb3b08480 100644 --- a/lib/src/ManifestIndex.purs +++ b/lib/src/ManifestIndex.purs @@ -7,11 +7,13 @@ -- | https://github.com/purescript/registry-index module Registry.ManifestIndex ( ManifestIndex + , IncludeRanges(..) + , delete + , dependants , empty , fromSet , insert , insertIntoEntryFile - , delete , lookup , maximalIndex , packageEntryDirectory @@ -20,10 +22,10 @@ module Registry.ManifestIndex , printEntry , readEntryFile , removeFromEntryFile + , toArray , toMap - , toSortedArray , topologicalSort - , IncludeRanges(..) + , toSortedArray , writeEntryFile ) where @@ -87,13 +89,18 @@ empty = ManifestIndex Map.empty toMap :: ManifestIndex -> Map PackageName (Map Version Manifest) toMap (ManifestIndex index) = index --- | Produce an array of manifests topologically sorted by dependencies. -toSortedArray :: IncludeRanges -> ManifestIndex -> Array Manifest -toSortedArray includeRanges (ManifestIndex index) = topologicalSort includeRanges $ Set.fromFoldable do +-- | Produce an array of all the manifests +toArray :: ManifestIndex -> Array Manifest +toArray (ManifestIndex index) = do Tuple _ versions <- Map.toUnfoldableUnordered index Tuple _ manifest <- Map.toUnfoldableUnordered versions [ manifest ] +-- | Produce an array of all the manifests, topologically sorted by dependencies. +toSortedArray :: IncludeRanges -> ManifestIndex -> Array Manifest +toSortedArray includeRanges index = + topologicalSort includeRanges $ Set.fromFoldable $ toArray index + -- | Look up a package version's manifest in the manifest index. lookup :: PackageName -> Version -> ManifestIndex -> Maybe Manifest lookup name version (ManifestIndex index) = @@ -199,6 +206,13 @@ topologicalSort includeRanges manifests = IgnoreRanges -> versions [ Tuple dependency included ] +dependants :: ManifestIndex -> PackageName -> Version -> Array Manifest +dependants idx packageName version = idx + # toSortedArray ConsiderRanges + # Array.filter \(Manifest { dependencies }) -> case Map.lookup packageName dependencies of + Nothing -> false + Just range -> Range.includes range version + -- | Calculate the directory containing this package in the registry index, -- | using the following format: -- | diff --git a/lib/src/Metadata.purs b/lib/src/Metadata.purs index ddc39b48b..c54bed31e 100644 --- a/lib/src/Metadata.purs +++ b/lib/src/Metadata.purs @@ -20,20 +20,15 @@ module Registry.Metadata import Prelude -import Control.Alt ((<|>)) -import Control.Monad.Except (Except, except) import Data.Array.NonEmpty (NonEmptyArray) -import Data.Codec as Codec import Data.Codec.JSON as CJ import Data.Codec.JSON.Common as CJ.Common import Data.Codec.JSON.Record as CJ.Record import Data.DateTime (DateTime) -import Data.Either (Either(..)) import Data.Map (Map) import Data.Maybe (Maybe) import Data.Newtype (class Newtype) import Data.Profunctor as Profunctor -import JSON (JSON) import Registry.Internal.Codec as Internal.Codec import Registry.Location (Location) import Registry.Location as Location diff --git a/lib/src/Operation.purs b/lib/src/Operation.purs index 98c35f092..262ceb3db 100644 --- a/lib/src/Operation.purs +++ b/lib/src/Operation.purs @@ -14,8 +14,8 @@ -- | are well-formed, and JSON codecs package managers can use to construct the -- | requests necessary to send to the Registry API or publish in a GitHub issue. module Registry.Operation - ( AuthenticatedPackageOperation(..) - , AuthenticatedData + ( AuthenticatedData + , AuthenticatedPackageOperation(..) , PackageOperation(..) , PackageSetOperation(..) , PackageSetUpdateData @@ -23,6 +23,9 @@ module Registry.Operation , TransferData , UnpublishData , authenticatedCodec + , packageName + , packageOperationCodec + , packageSetOperationCodec , packageSetUpdateCodec , publishCodec , transferCodec @@ -58,6 +61,25 @@ data PackageOperation derive instance Eq PackageOperation +packageName :: PackageOperation -> PackageName +packageName = case _ of + Publish { name } -> name + Authenticated { payload } -> case payload of + Unpublish { name } -> name + Transfer { name } -> name + +-- | A codec for encoding and decoding a `PackageOperation` as JSON. +packageOperationCodec :: CJ.Codec PackageOperation +packageOperationCodec = CJ.named "PackageOperation" $ Codec.codec' decode encode + where + decode json = + map Publish (Codec.decode publishCodec json) + <|> map Authenticated (Codec.decode authenticatedCodec json) + + encode = case _ of + Publish publish -> CJ.encode publishCodec publish + Authenticated authenticated -> CJ.encode authenticatedCodec authenticated + -- | An operation supported by the registry HTTP API for package operations and -- | which must be authenticated. data AuthenticatedPackageOperation @@ -74,6 +96,7 @@ type PublishData = { name :: PackageName , location :: Maybe Location , ref :: String + , version :: Version , compiler :: Version , resolutions :: Maybe (Map PackageName Version) } @@ -84,6 +107,7 @@ publishCodec = CJ.named "Publish" $ CJ.Record.object { name: PackageName.codec , location: CJ.Record.optional Location.codec , ref: CJ.string + , version: Version.codec , compiler: Version.codec , resolutions: CJ.Record.optional (Internal.Codec.packageMap Version.codec) } @@ -178,6 +202,13 @@ data PackageSetOperation = PackageSetUpdate PackageSetUpdateData derive instance Eq PackageSetOperation +-- | A codec for encoding and decoding a `PackageSetOperation` as JSON. +packageSetOperationCodec :: CJ.Codec PackageSetOperation +packageSetOperationCodec = CJ.named "PackageSetOperation" $ Codec.codec' decode encode + where + decode json = map PackageSetUpdate (Codec.decode packageSetUpdateCodec json) + encode (PackageSetUpdate update) = CJ.encode packageSetUpdateCodec update + -- | Submit a batch update to the most recent package set. -- | -- | For full details, see the registry spec: diff --git a/lib/src/Solver.purs b/lib/src/Solver.purs index 929894645..d3dcec10c 100644 --- a/lib/src/Solver.purs +++ b/lib/src/Solver.purs @@ -19,6 +19,7 @@ import Data.List.NonEmpty as NEL import Data.Map (Map, SemigroupMap(..)) import Data.Map as Map import Data.Maybe (Maybe(..), fromMaybe, maybe, maybe') +import Data.Maybe as Maybe import Data.Monoid.Disj (Disj(..)) import Data.Monoid.Endo (Endo(..)) import Data.Newtype (class Newtype, over, un, unwrap, wrap) @@ -81,11 +82,11 @@ buildCompilerIndex pursCompilers index metadata = CompilerIndex do -- | Solve the given dependencies using a dependency index that includes compiler -- | versions, such that the solution prunes results that would fall outside -- | a compiler range accepted by all dependencies. -solveWithCompiler :: Range -> CompilerIndex -> Map PackageName Range -> Either SolverErrors (Tuple (Maybe Version) (Map PackageName Version)) +solveWithCompiler :: Range -> CompilerIndex -> Map PackageName Range -> Either SolverErrors (Tuple Version (Map PackageName Version)) solveWithCompiler pursRange (CompilerIndex index) required = do let purs = Either.fromRight' (\_ -> Partial.unsafeCrashWith "Invalid package name!") (PackageName.parse "purs") results <- solveFull { registry: initializeRegistry index, required: initializeRequired (Map.insert purs pursRange required) } - let pursVersion = Map.lookup purs results + let pursVersion = Maybe.fromMaybe' (\_ -> Partial.unsafeCrashWith "Produced a compiler-derived build plan with no compiler!") $ Map.lookup purs results pure $ Tuple pursVersion $ Map.delete purs results -- | Data from the registry index, listing dependencies for each version of diff --git a/lib/test/Registry/Operation.purs b/lib/test/Registry/Operation.purs index 2ccb4075a..1400e70ee 100644 --- a/lib/test/Registry/Operation.purs +++ b/lib/test/Registry/Operation.purs @@ -54,7 +54,8 @@ minimalPublish = { "compiler": "0.15.6", "name": "my-package", - "ref": "v1.0.0" + "ref": "v1.0.0", + "version": "1.0.0" }""" fullPublish :: String @@ -67,7 +68,8 @@ fullPublish = "subdir": "core" }, "name": "my-package", - "ref": "c23snabhsrib39" + "ref": "c23snabhsrib39", + "version": "1.0.0" }""" unpublish :: String diff --git a/nix/overlay.nix b/nix/overlay.nix index 2ac1705fb..6c8d5b848 100644 --- a/nix/overlay.nix +++ b/nix/overlay.nix @@ -183,7 +183,7 @@ in ++ prev.lib.optionals prev.stdenv.isDarwin [ prev.darwin.cctools ]; # To update: run `nix build .#server` and copy the hash from the error - npmDepsHash = "sha256-vm6k4DUDWUgPcPeym3YhA1hIg1LbHCDRBSH+7Zs52Uw="; + npmDepsHash = "sha256-Ju7R6Sa+NIHD8fkxLxicqToPLxLD4RM4wvl6bktE/7Y="; installPhase = '' mkdir -p $out @@ -236,7 +236,7 @@ in registry-server = prev.callPackage (buildRegistryPackage { name = "registry-server"; - module = "Registry.App.Server"; + module = "Registry.App.Main"; description = "PureScript Registry API server"; src = ../app; spagoLock = app; diff --git a/package-lock.json b/package-lock.json index 93959c062..e22731749 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10,7 +10,10 @@ "app", "foreign", "lib" - ] + ], + "dependencies": { + "spago": "^0.93.19" + } }, "app": { "name": "registry-app", @@ -1598,6 +1601,12 @@ "node": ">=14.0.0" } }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "license": "Python-2.0" + }, "node_modules/asn1": { "version": "0.2.6", "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", @@ -1606,6 +1615,12 @@ "safer-buffer": "~2.1.0" } }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "license": "MIT" + }, "node_modules/base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", @@ -1648,6 +1663,15 @@ "prebuild-install": "^7.1.1" } }, + "node_modules/big-integer": { + "version": "1.6.52", + "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.52.tgz", + "integrity": "sha512-QxD8cf2eVqJOOz63z6JIN9BzvVs/dlySa5HGSBH5xtR8dPteIRQnBxxKqkNTiT6jbDTF6jAfrd4oMcND9RGbQg==", + "license": "Unlicense", + "engines": { + "node": ">=0.6" + } + }, "node_modules/bindings": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", @@ -1676,6 +1700,27 @@ "resolved": "https://registry.npmjs.org/bowser/-/bowser-2.11.0.tgz", "integrity": "sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==" }, + "node_modules/bplist-parser": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/bplist-parser/-/bplist-parser-0.2.0.tgz", + "integrity": "sha512-z0M+byMThzQmD9NILRniCUXYsYpjwnlO8N5uCFaCqIOpqRsJCrQL9NK3JsD67CN5a08nF5oIL2bD6loTdHOuKw==", + "license": "MIT", + "dependencies": { + "big-integer": "^1.6.44" + }, + "engines": { + "node": ">= 5.10.0" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, "node_modules/braces": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", @@ -1719,6 +1764,21 @@ "node": ">=10.0.0" } }, + "node_modules/bundle-name": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bundle-name/-/bundle-name-3.0.0.tgz", + "integrity": "sha512-PKA4BeSvBpQKQ8iPOGCSiell+N8P+Tf1DlwqmYhpe2gAhKPHn8EYOxVT+ShuGmhg8lN8XiSlS80yiExKXrURlw==", + "license": "MIT", + "dependencies": { + "run-applescript": "^5.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/chownr": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", @@ -1727,6 +1787,12 @@ "node": ">=10" } }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "license": "MIT" + }, "node_modules/cpu-features": { "version": "0.0.9", "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.9.tgz", @@ -1741,6 +1807,20 @@ "node": ">=10.0.0" } }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, "node_modules/decompress-response": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", @@ -1763,6 +1843,52 @@ "node": ">=4.0.0" } }, + "node_modules/default-browser": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/default-browser/-/default-browser-4.0.0.tgz", + "integrity": "sha512-wX5pXO1+BrhMkSbROFsyxUm0i/cJEScyNhA4PPxc41ICuv05ZZB/MX28s8aZx6xjmatvebIapF6hLEKEcpneUA==", + "license": "MIT", + "dependencies": { + "bundle-name": "^3.0.0", + "default-browser-id": "^3.0.0", + "execa": "^7.1.1", + "titleize": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/default-browser-id": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/default-browser-id/-/default-browser-id-3.0.0.tgz", + "integrity": "sha512-OZ1y3y0SqSICtE8DE4S8YOE9UZOJ8wO16fKWVP5J1Qz42kV9jcnMVFrEE/noXb/ss3Q4pZIH79kxofzyNNtUNA==", + "license": "MIT", + "dependencies": { + "bplist-parser": "^0.2.0", + "untildify": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/define-lazy-prop": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-3.0.0.tgz", + "integrity": "sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/deprecation": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", @@ -1784,6 +1910,50 @@ "once": "^1.4.0" } }, + "node_modules/entities": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-2.1.0.tgz", + "integrity": "sha512-hCx1oky9PFrJ611mf0ifBLBRW8lUUVRlFolb5gWRfIELabBlbp9xZvrqZLZAs+NxFnbfQoeGd8wDkygjg7U85w==", + "license": "BSD-2-Clause", + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/env-paths": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-3.0.0.tgz", + "integrity": "sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A==", + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/execa": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-7.2.0.tgz", + "integrity": "sha512-UduyVP7TLB5IcAQl+OzLyLcS/l32W/GLg+AhHJ+ow40FOk2U3SAllPwR44v4vmdFwIWqpdwxxpQbF1n5ta9seA==", + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.1", + "human-signals": "^4.3.0", + "is-stream": "^3.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^5.1.0", + "onetime": "^6.0.0", + "signal-exit": "^3.0.7", + "strip-final-newline": "^3.0.0" + }, + "engines": { + "node": "^14.18.0 || ^16.14.0 || >=18.0.0" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, "node_modules/expand-template": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", @@ -1892,6 +2062,12 @@ "node": ">=8" } }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "license": "ISC" + }, "node_modules/fuse.js": { "version": "6.6.2", "resolved": "https://registry.npmjs.org/fuse.js/-/fuse.js-6.6.2.tgz", @@ -1900,11 +2076,44 @@ "node": ">=10" } }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/github-from-package": { "version": "0.0.0", "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", "integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==" }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/glob-parent": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", @@ -1921,6 +2130,15 @@ "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" }, + "node_modules/human-signals": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-4.3.1.tgz", + "integrity": "sha512-nZXjEF2nbo7lIw3mgYjItAfgQXog3OjJogSbKa2CQIIvSGWcKgeJnQlNXip6NglNzYH45nSRiEVimMvYL8DDqQ==", + "license": "Apache-2.0", + "engines": { + "node": ">=14.18.0" + } + }, "node_modules/ieee754": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", @@ -1940,6 +2158,17 @@ } ] }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", @@ -1950,6 +2179,21 @@ "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" }, + "node_modules/is-docker": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-3.0.0.tgz", + "integrity": "sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==", + "license": "MIT", + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", @@ -1969,6 +2213,24 @@ "node": ">=0.10.0" } }, + "node_modules/is-inside-container": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-inside-container/-/is-inside-container-1.0.0.tgz", + "integrity": "sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==", + "license": "MIT", + "dependencies": { + "is-docker": "^3.0.0" + }, + "bin": { + "is-inside-container": "cli.js" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", @@ -1985,6 +2247,51 @@ "node": ">=0.10.0" } }, + "node_modules/is-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "license": "MIT", + "dependencies": { + "is-docker": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-wsl/node_modules/is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", + "license": "MIT", + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "license": "ISC" + }, "node_modules/jsonfile": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", @@ -2004,6 +2311,15 @@ "jsonrepair": "bin/cli.js" } }, + "node_modules/linkify-it": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-3.0.3.tgz", + "integrity": "sha512-ynTsyrFSdE5oZ/O9GEf00kPngmOfVwazR5GKDq6EYfhlpFug3J2zybX56a2PRRpc9P+FuSoGNAwjlbDs9jJBPQ==", + "license": "MIT", + "dependencies": { + "uc.micro": "^1.0.1" + } + }, "node_modules/lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -2015,6 +2331,34 @@ "node": ">=10" } }, + "node_modules/markdown-it": { + "version": "12.3.2", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-12.3.2.tgz", + "integrity": "sha512-TchMembfxfNVpHkbtriWltGWc+m3xszaRD0CZup7GFFhzIgQqxIfn3eGj1yZpfuflzPvfkt611B2Q/Bsk1YnGg==", + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1", + "entities": "~2.1.0", + "linkify-it": "^3.0.1", + "mdurl": "^1.0.1", + "uc.micro": "^1.0.5" + }, + "bin": { + "markdown-it": "bin/markdown-it.js" + } + }, + "node_modules/mdurl": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", + "integrity": "sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==", + "license": "MIT" + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "license": "MIT" + }, "node_modules/merge2": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", @@ -2035,6 +2379,18 @@ "node": ">=8.6" } }, + "node_modules/mimic-fn": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", + "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/mimic-response": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", @@ -2046,6 +2402,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/minimist": { "version": "1.2.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", @@ -2142,6 +2510,33 @@ } } }, + "node_modules/npm-run-path": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", + "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", + "license": "MIT", + "dependencies": { + "path-key": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm-run-path/node_modules/path-key": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -2150,6 +2545,56 @@ "wrappy": "1" } }, + "node_modules/onetime": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", + "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", + "license": "MIT", + "dependencies": { + "mimic-fn": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/open": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/open/-/open-9.1.0.tgz", + "integrity": "sha512-OS+QTnw1/4vrf+9hh1jc1jnYjzSG4ttTBB8UxOwAnInG3Uo4ssetzC1ihqaIHjLJnA5GGlRl6QlZXOTQhRBUvg==", + "license": "MIT", + "dependencies": { + "default-browser": "^4.0.0", + "define-lazy-prop": "^3.0.0", + "is-inside-container": "^1.0.0", + "is-wsl": "^2.2.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/picomatch": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", @@ -2195,6 +2640,15 @@ "once": "^1.3.1" } }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", @@ -2262,6 +2716,110 @@ "node": ">=0.10.0" } }, + "node_modules/run-applescript": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/run-applescript/-/run-applescript-5.0.0.tgz", + "integrity": "sha512-XcT5rBksx1QdIhlFOCtgZkB99ZEouFZ1E2Kc2LHqNW13U3/74YGdkQRmThTwxy4QIyookibDKYZOPqX//6BlAg==", + "license": "MIT", + "dependencies": { + "execa": "^5.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/run-applescript/node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/run-applescript/node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "license": "Apache-2.0", + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/run-applescript/node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/run-applescript/node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/run-applescript/node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "license": "MIT", + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/run-applescript/node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/run-applescript/node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -2322,6 +2880,33 @@ "node": ">=10" } }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "license": "ISC" + }, "node_modules/simple-concat": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", @@ -2365,6 +2950,34 @@ "simple-concat": "^1.0.0" } }, + "node_modules/spago": { + "version": "0.93.19", + "resolved": "https://registry.npmjs.org/spago/-/spago-0.93.19.tgz", + "integrity": "sha512-BOSwPQSbULxlFmTjf5YXrvQtvQjRsqHdcbHo60ENbj4W1N8yPlyWKHzgRiayi7VE4av+d0v6x1OBGGL5lO+vsQ==", + "license": "BSD-3-Clause", + "dependencies": { + "better-sqlite3": "^8.6.0", + "env-paths": "^3.0.0", + "fast-glob": "^3.2.11", + "fs-extra": "^10.0.0", + "fuse.js": "^6.5.3", + "glob": "^7.1.6", + "markdown-it": "^12.0.4", + "open": "^9.1.0", + "punycode": "^2.3.0", + "semver": "^7.3.5", + "spdx-expression-parse": "^3.0.1", + "ssh2": "^1.14.0", + "supports-color": "^9.2.3", + "tar": "^6.1.11", + "tmp": "^0.2.1", + "xhr2": "^0.2.1", + "yaml": "^2.1.1" + }, + "bin": { + "spago": "bin/bundle.js" + } + }, "node_modules/spdx-exceptions": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", @@ -2409,6 +3022,18 @@ "safe-buffer": "~5.2.0" } }, + "node_modules/strip-final-newline": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", + "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/strip-json-comments": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", @@ -2422,6 +3047,18 @@ "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz", "integrity": "sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==" }, + "node_modules/supports-color": { + "version": "9.4.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-9.4.0.tgz", + "integrity": "sha512-VL+lNrEoIXww1coLPOmiEmK/0sGigko5COxI09KzHc2VJXJsQ37UaQ+8quuxjDeA7+KnLGTWRyOXSLLR2Wb4jw==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, "node_modules/tar": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", @@ -2469,6 +3106,18 @@ "node": ">=6" } }, + "node_modules/titleize": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/titleize/-/titleize-3.0.0.tgz", + "integrity": "sha512-KxVu8EYHDPBdUYdKZdKtU2aj2XfEx9AfjXxE/Aj0vT06w2icA09Vus1rh6eSu1y01akYg6BjIK/hxyLJINoMLQ==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/tmp": { "version": "0.2.4", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.4.tgz", @@ -2515,6 +3164,12 @@ "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" }, + "node_modules/uc.micro": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", + "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==", + "license": "MIT" + }, "node_modules/universal-user-agent": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz", @@ -2528,6 +3183,15 @@ "node": ">= 10.0.0" } }, + "node_modules/untildify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", + "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", @@ -2555,11 +3219,35 @@ "webidl-conversions": "^3.0.0" } }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, + "node_modules/xhr2": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/xhr2/-/xhr2-0.2.1.tgz", + "integrity": "sha512-sID0rrVCqkVNUn8t6xuv9+6FViXjUVXq8H5rWOH2rz9fDNQEd4g0EA2XlcEdJXRz5BMEn4O1pJFdT+z4YHhoWw==", + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, "node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", diff --git a/package.json b/package.json index 76bc4e96e..5066e42c0 100644 --- a/package.json +++ b/package.json @@ -6,5 +6,8 @@ "app", "foreign", "lib" - ] + ], + "dependencies": { + "spago": "^0.93.19" + } } diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index d642d41dc..783ee353c 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -76,6 +76,7 @@ import Registry.App.Legacy.Manifest (LegacyManifestError(..), LegacyManifestVali import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.App.Legacy.Types (RawPackageName(..), RawVersion(..), rawPackageNameMapCodec, rawVersionMapCodec) import Registry.App.Manifest.SpagoYaml as SpagoYaml +import Registry.App.Server.MatrixBuilder as MatrixBuilder import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit (Address, Tag) import Registry.Foreign.Octokit as Octokit @@ -300,7 +301,7 @@ runLegacyImport logs = do Just ref -> pure ref Log.debug "Building dependency index with compiler versions..." - compilerIndex <- API.readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex Log.debug $ "Solving dependencies for " <> formatted eitherResolutions <- do @@ -405,7 +406,7 @@ runLegacyImport logs = do Log.debug "Downloading dependencies..." let installDir = Path.concat [ tmp, ".registry" ] FS.Extra.ensureDirectory installDir - API.installBuildPlan resolutions installDir + MatrixBuilder.installBuildPlan resolutions installDir Log.debug $ "Installed to " <> installDir Log.debug "Trying compilers one-by-one..." selected <- findFirstCompiler @@ -471,6 +472,7 @@ runLegacyImport logs = do { name: manifest.name , location: Just manifest.location , ref + , version: manifest.version , compiler , resolutions: Just resolutions } diff --git a/scripts/src/PackageDeleter.purs b/scripts/src/PackageDeleter.purs index f0cb1c63f..e0de363ca 100644 --- a/scripts/src/PackageDeleter.purs +++ b/scripts/src/PackageDeleter.purs @@ -239,10 +239,11 @@ deleteVersion arguments name version = do Just (Left _) -> Log.error "Cannot reimport a version that was specifically unpublished" Just (Right specificPackageMetadata) -> do -- Obtains `newMetadata` via cache - API.publish Nothing + void $ API.publish Nothing { location: Just oldMetadata.location , name: name , ref: specificPackageMetadata.ref + , version , compiler: unsafeFromRight $ Version.parse "0.15.4" , resolutions: Nothing } diff --git a/scripts/src/Solver.purs b/scripts/src/Solver.purs index 8fa9a7070..aa2820e16 100644 --- a/scripts/src/Solver.purs +++ b/scripts/src/Solver.purs @@ -17,7 +17,6 @@ import Data.DateTime.Instant as Instant import Data.Foldable (foldMap) import Data.Formatter.DateTime as Formatter.DateTime import Data.Map as Map -import Data.Newtype (unwrap) import Data.String as String import Data.Time.Duration (Milliseconds(..)) import Effect.Class.Console as Aff diff --git a/test-utils/src/Registry/Test/E2E/Client.purs b/test-utils/src/Registry/Test/E2E/Client.purs index 960484609..ff34107df 100644 --- a/test-utils/src/Registry/Test/E2E/Client.purs +++ b/test-utils/src/Registry/Test/E2E/Client.purs @@ -74,14 +74,14 @@ configFromEnv = do -- | Errors that can occur during client operations data ClientError = HttpError { status :: Int, body :: String } - | ParseError String + | ParseError { msg :: String, raw :: String } | Timeout String | NetworkError String printClientError :: ClientError -> String printClientError = case _ of HttpError { status, body } -> "HTTP Error " <> Int.toStringAs Int.decimal status <> ": " <> body - ParseError msg -> "Parse Error: " <> msg + ParseError { msg, raw } -> "Parse Error: " <> msg <> "\nOriginal: " <> raw Timeout msg -> "Timeout: " <> msg NetworkError msg -> "Network Error: " <> msg @@ -102,7 +102,7 @@ get codec config path = runExceptT do body <- lift response.text if response.status >= 200 && response.status < 300 then case parseResponse codec body of - Left err -> throwError $ ParseError err + Left err -> throwError $ ParseError { msg: err, raw: body } Right a -> pure a else throwError $ HttpError { status: response.status, body } @@ -119,7 +119,7 @@ post reqCodec resCodec config path reqBody = runExceptT do responseBody <- lift response.text if response.status >= 200 && response.status < 300 then case parseResponse resCodec responseBody of - Left err -> throwError $ ParseError err + Left err -> throwError $ ParseError { msg: err, raw: responseBody } Right a -> pure a else throwError $ HttpError { status: response.status, body: responseBody }