diff --git a/doc/src/concepts.md b/doc/src/concepts.md index 7c5be0ba..2761f255 100644 --- a/doc/src/concepts.md +++ b/doc/src/concepts.md @@ -24,9 +24,9 @@ are four actions a project can define. of your repository. These flakes must expose a `typhonJobs` attribute, that in turn declares jobs for you project. -- The `begin` and `end` actions are run at the beginning and end of all jobs of - your project. They are typically used to set statuses on your repository, but - can also be used for deployment. +- The `pre` and `post` actions are run before and after all jobs of your + project. They are typically used to set statuses on your repository, but can + also be used for deployment. - The `webhook` action is triggered by calls to a specific endpoint of the API. It outputs commands for Typhon to update jobsets or evaluate a jobset. It is @@ -51,5 +51,5 @@ is evaluated and the corresponding derivations are built. ## Jobs -Jobs are derivations defined in `typhonJobs`. A job run consists of the `begin` -action, the derivation build, and the `end` action. +Jobs are derivations defined in `typhonJobs`. A job run consists of the `pre` +action, the derivation build, and the `post` action. diff --git a/flake.lock b/flake.lock index 605f8c7d..b0ac8adf 100644 --- a/flake.lock +++ b/flake.lock @@ -45,6 +45,22 @@ "type": "github" } }, + "flake-compat_2": { + "flake": false, + "locked": { + "lastModified": 1673956053, + "narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=", + "owner": "edolstra", + "repo": "flake-compat", + "rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9", + "type": "github" + }, + "original": { + "owner": "edolstra", + "repo": "flake-compat", + "type": "github" + } + }, "flake-utils": { "inputs": { "systems": "systems" @@ -62,7 +78,76 @@ "type": "indirect" } }, + "lowdown-src": { + "flake": false, + "locked": { + "lastModified": 1633514407, + "narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=", + "owner": "kristapsdz", + "repo": "lowdown", + "rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8", + "type": "github" + }, + "original": { + "owner": "kristapsdz", + "repo": "lowdown", + "type": "github" + } + }, + "nix": { + "inputs": { + "flake-compat": "flake-compat_2", + "lowdown-src": "lowdown-src", + "nixpkgs": "nixpkgs", + "nixpkgs-regression": "nixpkgs-regression" + }, + "locked": { + "lastModified": 1695928433, + "narHash": "sha256-0LqIW6V/khHyZf4GlXcZZUzFs8Ot22eZej+pwn++4qw=", + "owner": "nixos", + "repo": "nix", + "rev": "b7e712f9fd3b35e7b75180fcfbe90dce6c9b06a4", + "type": "github" + }, + "original": { + "owner": "nixos", + "repo": "nix", + "type": "github" + } + }, "nixpkgs": { + "locked": { + "lastModified": 1695283060, + "narHash": "sha256-CJz71xhCLlRkdFUSQEL0pIAAfcnWFXMzd9vXhPrnrEg=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "31ed632c692e6a36cfc18083b88ece892f863ed4", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-23.05-small", + "repo": "nixpkgs", + "type": "github" + } + }, + "nixpkgs-regression": { + "locked": { + "lastModified": 1643052045, + "narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", + "type": "github" + }, + "original": { + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", + "type": "github" + } + }, + "nixpkgs_2": { "locked": { "lastModified": 1695145219, "narHash": "sha256-Eoe9IHbvmo5wEDeJXKFOpKUwxYJIOxKUesounVccNYk=", @@ -82,7 +167,8 @@ "crane": "crane", "flake-compat": "flake-compat", "flake-utils": "flake-utils", - "nixpkgs": "nixpkgs", + "nix": "nix", + "nixpkgs": "nixpkgs_2", "rust-overlay": "rust-overlay" } }, diff --git a/flake.nix b/flake.nix index 19526416..575f2484 100644 --- a/flake.nix +++ b/flake.nix @@ -26,6 +26,8 @@ flake-utils.follows = "flake-utils"; }; }; + + nix.url = "github:nixos/nix"; }; outputs = inputs: import ./nix/outputs.nix {sources = inputs;}; diff --git a/nix/devshells.nix b/nix/devshells.nix index 1aefeb4b..ccfdb29c 100644 --- a/nix/devshells.nix +++ b/nix/devshells.nix @@ -25,6 +25,7 @@ in rec { ; }; DATABASE_URL = "sqlite:typhon.sqlite"; + TYPHON_FLAKE = ../typhon-flake; }; webapp = pkgs.mkShell { diff --git a/nix/jobs.nix b/nix/jobs.nix new file mode 100644 index 00000000..5c7c7eba --- /dev/null +++ b/nix/jobs.nix @@ -0,0 +1,6 @@ +{ + sources ? import ./sources.nix, + systems ? ["x86_64-linux"], + lib ? sources.nixpkgs.lib, +}: +lib.genAttrs systems (system: import ./checks {inherit sources system;}) diff --git a/nix/lib/github/mkGithubProject.nix b/nix/lib/github/mkGithubProject.nix index d6fd55cb..70f10123 100644 --- a/nix/lib/github/mkGithubProject.nix +++ b/nix/lib/github/mkGithubProject.nix @@ -22,8 +22,8 @@ in { meta = {inherit title description homepage;}; actions = { jobsets = mkGithubJobsets {inherit owner repo;}; - begin = mkGithubStatus {inherit owner repo;}; - end = mkGithubStatus {inherit owner repo;}; + pre = mkGithubStatus {inherit owner repo;}; + post = mkGithubStatus {inherit owner repo;}; webhook = githubWebhook; }; inherit secrets; diff --git a/nix/lib/mkProject.nix b/nix/lib/mkProject.nix index 228d7dcf..8640c2bc 100644 --- a/nix/lib/mkProject.nix +++ b/nix/lib/mkProject.nix @@ -26,8 +26,8 @@ in { mkdir $out cd $out ${linkAction "jobsets"} - ${linkAction "begin"} - ${linkAction "end"} + ${linkAction "pre"} + ${linkAction "post"} ${linkAction "webhook"} ${linkSecrets} '' diff --git a/nix/nixpkgs.nix b/nix/nixpkgs.nix index ac68c61e..0bc7a3ba 100644 --- a/nix/nixpkgs.nix +++ b/nix/nixpkgs.nix @@ -4,5 +4,8 @@ }: import sources.nixpkgs { inherit system; - overlays = [(import sources.rust-overlay)]; + overlays = [ + (import sources.rust-overlay) + sources.nix.overlays.default + ]; } diff --git a/nix/packages/server.nix b/nix/packages/server.nix index 4b83ea1d..8b16be41 100644 --- a/nix/packages/server.nix +++ b/nix/packages/server.nix @@ -25,4 +25,5 @@ in cargoArtifacts ; buildInputs = [pkgs.sqlite.dev]; + TYPHON_FLAKE = ../../typhon-flake; } diff --git a/typhon-flake/flake.nix b/typhon-flake/flake.nix new file mode 100644 index 00000000..d433ffec --- /dev/null +++ b/typhon-flake/flake.nix @@ -0,0 +1,7 @@ +{ + inputs.x.flake = false; + outputs = {x, ...}: { + typhonJobs = if builtins.pathExists "${x}/nix/jobs.nix" then import "${x}/nix/jobs.nix" {} else null; + typhonProject = if builtins.pathExists "${x}/nix/typhon.nix" then import "${x}/nix/typhon.nix" {} else null; + }; +} diff --git a/typhon-types/src/lib.rs b/typhon-types/src/lib.rs index 29afe290..5524ca23 100644 --- a/typhon-types/src/lib.rs +++ b/typhon-types/src/lib.rs @@ -31,14 +31,10 @@ pub mod handles { pub job: String, } #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] - pub struct Build { - pub build_hash: String, - } - #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub enum Log { Evaluation(Evaluation), - JobBegin(Job), - JobEnd(Job), + JobPre(Job), + JobPost(Job), } macro_rules! impl_display { @@ -75,12 +71,6 @@ pub mod handles { [x.evaluation.into(), vec![x.job]].concat() } } - impl_display!(Build); - impl From for Vec { - fn from(x: Build) -> Self { - vec![x.build_hash] - } - } impl_display!(Log); impl From for Vec { fn from(x: Log) -> Self { @@ -88,14 +78,14 @@ pub mod handles { vec![ match x { Evaluation(_) => "evaluation", - JobBegin(_) => "job_begin", - JobEnd(_) => "job_end", + JobPost(_) => "job_post", + JobPre(_) => "job_pre", } .into(), match x { Evaluation(h) => h.to_string(), - JobBegin(h) => h.to_string(), - JobEnd(h) => h.to_string(), + JobPost(h) => h.to_string(), + JobPre(h) => h.to_string(), }, ] } @@ -123,9 +113,6 @@ pub mod handles { job, } } - pub fn build(build_hash: String) -> Build { - Build { build_hash } - } #[macro_export] macro_rules! pattern { @@ -159,12 +146,18 @@ pub mod requests { use crate::handles; use serde::{Deserialize, Serialize}; + #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] + pub struct ProjectDecl { + pub flake_url: String, + pub legacy: bool, + } + #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub enum Project { Delete, Info, Refresh, - SetDecl(String), + SetDecl(ProjectDecl), SetPrivateKey(String), UpdateJobsets, } @@ -186,15 +179,8 @@ pub mod requests { pub enum Job { Cancel, Info, - LogBegin, - LogEnd, - } - - #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] - pub enum Build { - Cancel, - Info, - NixLog, + LogPost, + LogPre, } #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] @@ -208,7 +194,6 @@ pub mod requests { Jobset(handles::Jobset, Jobset), Evaluation(handles::Evaluation, Evaluation), Job(handles::Job, Job), - Build(handles::Build, Build), Login(String), } @@ -223,7 +208,6 @@ pub mod requests { Request::Jobset(h, req) => write!(f, "{:?} for jobset {}", req, h), Request::Evaluation(h, req) => write!(f, "{:?} for evaluation {}", req, h), Request::Job(h, req) => write!(f, "{:?} for job {}", req, h), - Request::Build(h, req) => write!(f, "{:?} for build {}", req, h), Request::Login(_) => write!(f, "Log in"), } } @@ -246,9 +230,10 @@ pub mod responses { #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct ProjectInfo { pub actions_path: Option, - pub decl: String, - pub decl_locked: String, + pub flake_url: String, + pub flake_url_locked: String, pub jobsets: Vec, + pub legacy: bool, pub metadata: ProjectMetadata, pub public_key: String, } @@ -256,32 +241,36 @@ pub mod responses { #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct JobsetInfo { pub evaluations: Vec<(i32, i64)>, - pub flake: String, + pub flake_url: String, + pub legacy: bool, } #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct EvaluationInfo { pub actions_path: Option, - pub flake_locked: String, + pub flake_url_locked: String, pub jobs: Vec, pub status: String, pub time_created: i64, + pub time_finished: Option, } #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct JobInfo { - pub build_handle: super::handles::Build, - pub build_infos: BuildInfo, + pub build_drv: String, + pub build_out: String, + pub build_status: String, + pub build_time_finished: Option, + pub build_time_started: Option, pub dist: bool, - pub status: String, + pub post_status: String, + pub post_time_finished: Option, + pub post_time_started: Option, + pub pre_status: String, + pub pre_time_finished: Option, + pub pre_time_started: Option, pub system: String, - } - - #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] - pub struct BuildInfo { - pub drv: String, - pub out: String, - pub status: String, + pub time_created: i64, } #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] @@ -294,7 +283,6 @@ pub mod responses { JobsetInfo(JobsetInfo), EvaluationInfo(EvaluationInfo), JobInfo(JobInfo), - BuildInfo(BuildInfo), Log(String), Login { token: String }, } @@ -326,6 +314,4 @@ pub enum Event { EvaluationNew(handles::Evaluation), EvaluationFinished(handles::Evaluation), JobUpdated(handles::Job), - BuildNew(handles::Build), - BuildFinished(handles::Build), } diff --git a/typhon-webapp/src/build.rs b/typhon-webapp/src/build.rs deleted file mode 100644 index b776f262..00000000 --- a/typhon-webapp/src/build.rs +++ /dev/null @@ -1,129 +0,0 @@ -use crate::{appurl::AppUrl, perform_request, view_error, view_log}; -use seed::{prelude::*, *}; -use typhon_types::*; - -pub struct Model { - error: Option, - handle: handles::Build, - info: Option, - nix_log: Option, -} - -impl Model { - pub fn app_url(&self) -> AppUrl { - Vec::::from(self.handle.clone()).into() - } -} - -#[derive(Clone, Debug)] -pub enum Msg { - Cancel, - Error(responses::ResponseError), - ErrorIgnored, - Event(Event), - FetchInfo, - FetchNixLog, - GetInfo(responses::BuildInfo), - GetNixLog(String), - Noop, -} - -pub fn init(orders: &mut impl Orders, handle: handles::Build) -> Model { - orders.send_msg(Msg::FetchInfo); - Model { - error: None, - handle: handle.clone(), - info: None, - nix_log: None, - } -} - -pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders) { - match msg { - Msg::Cancel => { - let handle = model.handle.clone(); - let req = requests::Request::Build(handle, requests::Build::Cancel); - perform_request!( - orders, - req, - responses::Response::Ok => Msg::Noop, - Msg::Error, - ); - } - Msg::Error(err) => { - model.error = Some(err); - } - Msg::ErrorIgnored => { - model.error = None; - } - Msg::Event(_) => { - orders.send_msg(Msg::FetchInfo); - } - Msg::FetchInfo => { - let handle = model.handle.clone(); - let req = requests::Request::Build(handle, requests::Build::Info); - perform_request!( - orders, - req, - responses::Response::BuildInfo(info) => Msg::GetInfo(info), - Msg::Error, - ); - } - Msg::FetchNixLog => { - let handle = model.handle.clone(); - let req = requests::Request::Build(handle, requests::Build::NixLog); - perform_request!( - orders, - req, - responses::Response::Log(log) => Msg::GetNixLog(log), - Msg::Error, - ); - } - Msg::GetInfo(info) => { - if info.status == "error" || info.status == "success" { - orders.send_msg(Msg::FetchNixLog); - } - model.info = Some(info); - } - Msg::GetNixLog(log) => { - model.nix_log = Some(log); - } - Msg::Noop => (), - } -} - -fn view_build(model: &Model) -> Node { - div![ - h2![format!("Build {}", model.handle),], - match &model.info { - None => div!["loading..."], - Some(info) => div![ - p![format!("Status: {}", info.status)], - p![format!("Derivation: {}", info.drv)], - p![format!("Output: {}", info.out)], - ], - }, - match &model.nix_log { - None => empty![], - Some(log) => div![h3!["Nix log"], view_log(log.clone()),], - }, - ] -} - -fn view_admin() -> Node { - div![ - h2!["Administration"], - button!["Cancel", ev(Ev::Click, |_| Msg::Cancel),] - ] -} - -pub fn view(model: &Model, admin: bool) -> Node { - model - .error - .as_ref() - .map(|err| view_error(err, Msg::ErrorIgnored)) - .unwrap_or(div![ - view_build(model), - if admin { view_admin() } else { empty![] }, - ]) -} diff --git a/typhon-webapp/src/evaluation.rs b/typhon-webapp/src/evaluation.rs index 41798654..507dc8ce 100644 --- a/typhon-webapp/src/evaluation.rs +++ b/typhon-webapp/src/evaluation.rs @@ -117,7 +117,7 @@ fn view_evaluation(model: &Model) -> Node { None => div!["loading..."], Some(info) => div![ p![format!("Status: {}", info.status)], - p![format!("Flake locked: {}", info.flake_locked)], + p![format!("Flake locked: {}", info.flake_url_locked)], p![format!( "Actions path: {}", info.actions_path.clone().unwrap_or("".into()) diff --git a/typhon-webapp/src/home.rs b/typhon-webapp/src/home.rs index 318168ce..acdaf959 100644 --- a/typhon-webapp/src/home.rs +++ b/typhon-webapp/src/home.rs @@ -6,7 +6,7 @@ use typhon_types::*; pub struct Model { error: Option, projects: Vec<(String, responses::ProjectMetadata)>, - new_project: (String, String), + new_project: (String, String, bool), } impl From for AppUrl { fn from(_: Model) -> AppUrl { @@ -24,7 +24,8 @@ pub enum Msg { Noop, SetProjects(Vec<(String, responses::ProjectMetadata)>), UpdateNewProjectName(String), - UpdateNewProjectExpr(String), + UpdateNewProjectFlakeUrl(String), + UpdateNewProjectLegacy(String), } pub fn init(orders: &mut impl Orders) -> Model { @@ -72,8 +73,11 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders) { Msg::UpdateNewProjectName(name) => { model.new_project.0 = name; } - Msg::UpdateNewProjectExpr(expr) => { - model.new_project.1 = expr; + Msg::UpdateNewProjectFlakeUrl(flake_url) => { + model.new_project.1 = flake_url; + } + Msg::UpdateNewProjectLegacy(legacy) => { + model.new_project.2 = legacy == "on"; } } } @@ -121,9 +125,16 @@ fn view_home(model: &Model, admin: bool) -> Node { At::Value => model.new_project.1, At::Placeholder => "github:org/repo", }, - input_ev(Ev::Input, Msg::UpdateNewProjectExpr), + input_ev(Ev::Input, Msg::UpdateNewProjectFlakeUrl), enter ], + input![ + attrs! { + At::Value => model.new_project.2, + At::Type => "checkbox", + }, + input_ev(Ev::Input, Msg::UpdateNewProjectLegacy), + ], div![], button![ "Add project", diff --git a/typhon-webapp/src/job.rs b/typhon-webapp/src/job.rs index 8a74aa4c..9a94333a 100644 --- a/typhon-webapp/src/job.rs +++ b/typhon-webapp/src/job.rs @@ -27,11 +27,11 @@ pub enum Msg { ErrorIgnored, Event(Event), FetchInfo, - FetchLogBegin, - FetchLogEnd, + FetchLogPost, + FetchLogPre, GetInfo(responses::JobInfo), - GetLogBegin(String), - GetLogEnd(String), + GetLogPost(String), + GetLogPre(String), Noop, LogLine(String), } @@ -131,45 +131,45 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders) { Msg::Error, ); } - Msg::FetchLogBegin => { + Msg::FetchLogPost => { let handle = model.handle.clone(); - let req = requests::Request::Job(handle, requests::Job::LogBegin); + let req = requests::Request::Job(handle, requests::Job::LogPost); perform_request!( orders, req, - responses::Response::Log(log) => Msg::GetLogBegin(log), + responses::Response::Log(log) => Msg::GetLogPost(log), Msg::Error, ); } - Msg::FetchLogEnd => { + Msg::FetchLogPre => { let handle = model.handle.clone(); - let req = requests::Request::Job(handle, requests::Job::LogEnd); + let req = requests::Request::Job(handle, requests::Job::LogPre); perform_request!( orders, req, - responses::Response::Log(log) => Msg::GetLogEnd(log), + responses::Response::Log(log) => Msg::GetLogPre(log), Msg::Error, ); } Msg::GetInfo(info) => { - if info.status == "waiting" || info.status == "end" || info.status == "success" { - orders.send_msg(Msg::FetchLogBegin); + if info.pre_status != "pending" { + orders.send_msg(Msg::FetchLogPre); } - if info.status == "success" { - orders.send_msg(Msg::FetchLogEnd); + if info.post_status != "pending" { + orders.send_msg(Msg::FetchLogPost); } - let drv = info.build_infos.drv.clone(); + let drv = info.build_drv.clone(); orders .proxy(Msg::LogLine) .stream(fetch_logs_as_stream(drv.into())); model.info = Some(info); } - Msg::GetLogBegin(log) => { - model.log_begin = Some(log); - } - Msg::GetLogEnd(log) => { + Msg::GetLogPost(log) => { model.log_end = Some(log); } + Msg::GetLogPre(log) => { + model.log_begin = Some(log); + } Msg::Noop => (), Msg::LogLine(line) => model.log.push(line), } @@ -206,16 +206,10 @@ fn view_job(model: &Model) -> Node { match &model.info { None => div!["loading..."], Some(info) => div![ - p![ - "Build: ", - a![ - format!("{}", info.build_handle), - attrs! { - At::Href => crate::Urls::build(&info.build_handle) - }, - ] - ], - p![format!("Status: {}", info.status)], + p![format!("Drv: {}", info.build_drv)], + p![format!("Status (pre): {}", info.pre_status)], + p![format!("Status (build): {}", info.build_status)], + p![format!("Status (post): {}", info.post_status)], if info.dist { let api_url = SETTINGS.get().unwrap().api_server.url(false); let job = &model.handle.job; diff --git a/typhon-webapp/src/jobset.rs b/typhon-webapp/src/jobset.rs index 94d87881..862e453f 100644 --- a/typhon-webapp/src/jobset.rs +++ b/typhon-webapp/src/jobset.rs @@ -122,7 +122,7 @@ fn view_jobset(model: &Model) -> Node { match &model.info { None => div!["loading..."], Some(info) => div![div![ - format!("Flake: {}", info.flake), + format!("Flake: {}", info.flake_url), h3!["Evaluations"], ul![model.evaluations.iter().map(|(id, time)| li![a![ timestamp::view(time).map_msg({ diff --git a/typhon-webapp/src/lib.rs b/typhon-webapp/src/lib.rs index 8bf1a400..65be678b 100644 --- a/typhon-webapp/src/lib.rs +++ b/typhon-webapp/src/lib.rs @@ -1,5 +1,4 @@ mod appurl; -mod build; mod editable_text; mod evaluation; mod home; @@ -157,11 +156,6 @@ impl<'a> Urls<'a> { pub fn job(handle: &handles::Job) -> Url { Urls::evaluation(&handle.evaluation).add_path_part(&handle.job) } - pub fn build(handle: &handles::Build) -> Url { - Urls::webroot() - .add_path_part("builds") - .add_path_part(&handle.build_hash) - } } pub enum Page { @@ -171,7 +165,6 @@ pub enum Page { Jobset(jobset::Model), Evaluation(evaluation::Model), Job(job::Model), - Build(build::Model), NotFound, } @@ -184,7 +177,6 @@ impl Page { Page::Jobset(m) => AppUrl::from("projects") + m.app_url(), Page::Evaluation(m) => AppUrl::from("projects") + m.app_url(), Page::Job(m) => AppUrl::from("projects") + m.app_url(), - Page::Build(m) => AppUrl::from("builds") + m.app_url(), Page::NotFound => AppUrl::from("404"), } } @@ -224,10 +216,6 @@ impl Page { )) }) .unwrap_or(Page::NotFound), - ["builds", build_hash] => Page::Build(build::init( - &mut orders.proxy(Msg::BuildMsg), - handles::build((*build_hash).into()), - )), _ => Page::NotFound, } } @@ -261,7 +249,6 @@ enum Msg { JobsetMsg(jobset::Msg), EvaluationMsg(evaluation::Msg), JobMsg(job::Msg), - BuildMsg(build::Msg), UrlChanged(subs::UrlChanged), WsMessageReceived(WebSocketMessage), } @@ -358,13 +345,6 @@ fn update_aux(msg: Msg, model: &mut Model, orders: &mut impl Orders) { .. }, ) => job::update(msg, job_model, &mut orders.proxy(Msg::JobMsg)), - ( - Msg::BuildMsg(msg), - Model { - page: Page::Build(build_model), - .. - }, - ) => build::update(msg, build_model, &mut orders.proxy(Msg::BuildMsg)), (Msg::WsMessageReceived(msg), _) => { let event: Event = msg.json().expect("failed to deserialize event"); log!(event); @@ -394,11 +374,6 @@ fn update_aux(msg: Msg, model: &mut Model, orders: &mut impl Orders) { model, &mut orders.proxy(Msg::JobMsg), ), - Page::Build(model) => build::update( - build::Msg::Event(event), - model, - &mut orders.proxy(Msg::BuildMsg), - ), _ => (), } } @@ -474,9 +449,6 @@ fn view(model: &Model) -> impl IntoNodes { evaluation::view(&evaluation_model, model.admin).map_msg(Msg::EvaluationMsg) } Page::Job(job_model) => job::view(&job_model, model.admin).map_msg(Msg::JobMsg), - Page::Build(build_model) => { - build::view(&build_model, model.admin).map_msg(Msg::BuildMsg) - } }, C![ match &model.page { Page::NotFound => "not-found", @@ -486,7 +458,6 @@ fn view(model: &Model) -> impl IntoNodes { Page::Jobset(_) => "jobset", Page::Evaluation(_) => "evaluation", Page::Job(_) => "job", - Page::Build(_) => "build" } ]], ] diff --git a/typhon-webapp/src/project.rs b/typhon-webapp/src/project.rs index f56a92fe..3fc3406b 100644 --- a/typhon-webapp/src/project.rs +++ b/typhon-webapp/src/project.rs @@ -9,7 +9,8 @@ pub struct Model { error: Option, handle: handles::Project, info: Option, - declaration: editable_text::Model, + declaration_flake_url: editable_text::Model, + declaration_legacy: bool, } impl Model { @@ -30,7 +31,7 @@ pub enum Msg { Noop, Refresh, UpdateJobsets, - MsgDeclaration(editable_text::Msg), + MsgDeclarationFlakeUrl(editable_text::Msg), } pub fn init(orders: &mut impl Orders, handle: handles::Project) -> Model { @@ -40,7 +41,8 @@ pub fn init(orders: &mut impl Orders, handle: handles::Project) -> Model { error: None, handle: handle.clone(), info: None, - declaration: editable_text::init("".to_string()), + declaration_flake_url: editable_text::init("".to_string()), + declaration_legacy: false, } } @@ -65,10 +67,18 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders) { } } match msg { - Msg::MsgDeclaration(m) => { - update_text_comp!(m, &mut model.declaration, Msg::MsgDeclaration, |decl| { - requests::Project::SetDecl(decl) - }) + Msg::MsgDeclarationFlakeUrl(m) => { + update_text_comp!( + m, + &mut model.declaration_flake_url, + Msg::MsgDeclarationFlakeUrl, + |flake_url| { + requests::Project::SetDecl(requests::ProjectDecl { + flake_url, + legacy: model.declaration_legacy, + }) + } + ) } //Msg::Delete => { // let handle = model.handle.clone(); @@ -104,7 +114,7 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders) { } Msg::GetInfo(info) => { model.info = Some(info.clone()); - model.declaration = editable_text::init(info.decl.clone()); + model.declaration_flake_url = editable_text::init(info.flake_url.clone()); } Msg::Noop => (), Msg::Refresh => { @@ -223,17 +233,20 @@ fn view_project(model: &Model, is_admin: bool) -> Node { show_info_block( "Flake URI", "desclaration", - editable_text_view(&model.declaration, Box::new(|s| code![s.clone()])) - .map_msg(Msg::MsgDeclaration), + editable_text_view( + &model.declaration_flake_url, + Box::new(|s| code![s.clone()]) + ) + .map_msg(Msg::MsgDeclarationFlakeUrl), Some(div![]) ), show_info_block( "Locked flake URI", "locked-declaration", - code![if info.decl_locked.clone() == "" { + code![if info.flake_url_locked.clone() == "" { "-".into() } else { - info.decl_locked.clone() + info.flake_url_locked.clone() }], Some(i![C!["ri-refresh-line"], ev(Ev::Click, |_| Msg::Refresh)]) ), diff --git a/typhon/migrations/00000000000000_typhon/down.sql b/typhon/migrations/00000000000000_typhon/down.sql index 411ad333..0adcc214 100644 --- a/typhon/migrations/00000000000000_typhon/down.sql +++ b/typhon/migrations/00000000000000_typhon/down.sql @@ -2,5 +2,4 @@ DROP TABLE projects; DROP TABLE jobsets; DROP TABLE evaluations; DROP TABLE jobs; -DROP TABLE builds; DROP TABLE logs; diff --git a/typhon/migrations/00000000000000_typhon/up.sql b/typhon/migrations/00000000000000_typhon/up.sql index 02c5e6f8..a4659650 100644 --- a/typhon/migrations/00000000000000_typhon/up.sql +++ b/typhon/migrations/00000000000000_typhon/up.sql @@ -1,11 +1,12 @@ CREATE TABLE projects ( project_id INTEGER NOT NULL PRIMARY KEY, project_actions_path TEXT, - project_decl TEXT DEFAULT "" NOT NULL, - project_decl_locked TEXT DEFAULT "" NOT NULL, project_description TEXT DEFAULT "" NOT NULL, + project_flake_url TEXT DEFAULT "" NOT NULL, + project_flake_url_locked TEXT DEFAULT "" NOT NULL, project_homepage TEXT DEFAULT "" NOT NULL, project_key TEXT NOT NULL, + project_legacy BOOL NOT NULL, project_name TEXT NOT NULL, project_title TEXT DEFAULT "" NOT NULL, UNIQUE(project_name) @@ -13,7 +14,8 @@ CREATE TABLE projects ( CREATE TABLE jobsets ( jobset_id INTEGER NOT NULL PRIMARY KEY, - jobset_flake TEXT NOT NULL, + jobset_flake_url TEXT NOT NULL, + jobset_legacy BOOL NOT NULL, jobset_name TEXT NOT NULL, jobset_project INTEGER NOT NULL REFERENCES projects(project_id) ON DELETE CASCADE, UNIQUE(jobset_project, jobset_name) @@ -22,38 +24,41 @@ CREATE TABLE jobsets ( CREATE TABLE evaluations ( evaluation_id INTEGER NOT NULL PRIMARY KEY, evaluation_actions_path TEXT, - evaluation_flake_locked TEXT NOT NULL, + evaluation_flake_url_locked TEXT NOT NULL, evaluation_jobset INTEGER NOT NULL REFERENCES jobsets(jobset_id) ON DELETE CASCADE, evaluation_num INTEGER NOT NULL, evaluation_status TEXT NOT NULL CHECK(evaluation_status in ('pending', 'success', 'error', 'canceled')), evaluation_time_created BIGINT NOT NULL, + evaluation_time_finished BIGINT, UNIQUE(evaluation_jobset, evaluation_num) ); CREATE TABLE jobs ( job_id INTEGER NOT NULL PRIMARY KEY, - job_build INTEGER NOT NULL REFERENCES builds(build_id) ON DELETE CASCADE, + job_build_drv TEXT NOT NULL, + job_build_out TEXT NOT NULL, + job_build_status TEXT CHECK(job_build_status in ('pending', 'success', 'error', 'canceled')) NOT NULL, + job_build_time_finished BIGINT, + job_build_time_started BIGINT, job_dist BOOLEAN NOT NULL, job_evaluation INTEGER NOT NULL REFERENCES evaluations(evaluation_id) ON DELETE CASCADE, job_name TEXT NOT NULL, - job_status TEXT CHECK(job_status in ('begin', 'waiting', 'end', 'success', 'error', 'canceled')) NOT NULL, + job_post_status TEXT CHECK(job_pre_status in ('waiting', 'pending', 'success', 'error', 'canceled')) NOT NULL, + job_post_time_finished BIGINT, + job_post_time_started BIGINT, + job_pre_status TEXT CHECK(job_pre_status in ('pending', 'success', 'error', 'canceled')) NOT NULL, + job_pre_time_finished BIGINT, + job_pre_time_started BIGINT, job_system TEXT NOT NULL, + job_time_created BIGINT NOT NULL, UNIQUE(job_evaluation, job_system, job_name) ); -CREATE TABLE builds ( - build_id INTEGER NOT NULL PRIMARY KEY, - build_drv TEXT NOT NULL UNIQUE, - build_hash TEXT NOT NULL UNIQUE, - build_out TEXT NOT NULL UNIQUE, - build_status TEXT NOT NULL CHECK(build_status in ('pending', 'success', 'error', 'canceled')) -); - CREATE TABLE logs ( log_id INTEGER NOT NULL PRIMARY KEY, log_evaluation INTEGER REFERENCES evaluations(evaluation_id) ON DELETE CASCADE, log_job INTEGER REFERENCES jobs(job_id) ON DELETE CASCADE, log_stderr TEXT NOT NULL, - log_type TEXT NOT NULL CHECK(log_type in ('build', 'evaluation', 'job_begin', 'job_end')), + log_type TEXT NOT NULL CHECK(log_type in ('build', 'evaluation', 'pre', 'post')), UNIQUE(log_evaluation, log_job, log_type) ); diff --git a/typhon/src/api.rs b/typhon/src/api.rs index 31e9f74a..da6a7a4e 100644 --- a/typhon/src/api.rs +++ b/typhon/src/api.rs @@ -34,7 +34,6 @@ impl Responder for ResponseWrapper { JobsetEvaluate(payload) => web::Json(payload).respond_to(req), EvaluationInfo(payload) => web::Json(payload).respond_to(req), JobInfo(payload) => web::Json(payload).respond_to(req), - BuildInfo(payload) => web::Json(payload).respond_to(req), Log(payload) => web::Json(payload).respond_to(req), Login { token } => web::Json(token).respond_to(req), } @@ -90,7 +89,7 @@ r!( Project::Refresh, ); - project_set_decl(path: web::Path, body: web::Json) => + project_set_decl(path: web::Path, body: web::Json) => Request::Project( handles::project(path.into_inner()), Project::SetDecl(body.into_inner()), @@ -150,34 +149,16 @@ r!( Job::Info, ); - job_log_begin(path: web::Path<(String,String,i32,String)>) => + job_log_pre(path: web::Path<(String,String,i32,String)>) => Request::Job( handles::job(path.into_inner()), - Job::LogBegin, + Job::LogPre, ); - job_log_end(path: web::Path<(String,String,i32,String)>) => + job_log_post(path: web::Path<(String,String,i32,String)>) => Request::Job( handles::job(path.into_inner()), - Job::LogEnd, - ); - - build_cancel(path: web::Path) => - Request::Build( - handles::build(path.into_inner()), - Build::Cancel, - ); - - build_info(path: web::Path) => - Request::Build( - handles::build(path.into_inner()), - Build::Info, - ); - - build_nix_log(path: web::Path) => - Request::Build( - handles::build(path.into_inner()), - Build::NixLog, + Job::LogPost, ); login(body: web::Json) => @@ -199,7 +180,7 @@ async fn dist( _ => Err(ResponseErrorWrapper(ResponseError::InternalError)), }?; if info.dist { - Ok(NamedFile::open_async(format!("{}/{}", info.build_infos.out, path)).await) + Ok(NamedFile::open_async(format!("{}/{}", info.build_out, path)).await) } else { Err(ResponseErrorWrapper(ResponseError::BadRequest( "typhonDist is not set".into(), @@ -321,19 +302,13 @@ pub fn config(cfg: &mut web::ServiceConfig) { web::scope("/jobs/{job}") .route("", web::get().to(job_info)) .route("/cancel", web::post().to(job_cancel)) - .route("/logs/begin", web::get().to(job_log_begin)) - .route("/logs/end", web::get().to(job_log_end)) + .route("/logs/pre", web::get().to(job_log_pre)) + .route("/logs/post", web::get().to(job_log_post)) .route("/dist/{path:.*}", web::get().to(dist)), ), ), ), ) - .service( - web::scope("/builds/{build}") - .route("", web::get().to(build_info)) - .route("/cancel", web::post().to(build_cancel)) - .route("/nixlog", web::get().to(build_nix_log)), - ) .route("/login", web::post().to(login)) .route( "{anything:.*}", diff --git a/typhon/src/builds.rs b/typhon/src/builds.rs deleted file mode 100644 index 32bde0cf..00000000 --- a/typhon/src/builds.rs +++ /dev/null @@ -1,81 +0,0 @@ -use crate::connection; -use crate::error::Error; -use crate::models::*; -use crate::nix; -use crate::schema::builds::dsl::*; -use crate::BUILDS; -use crate::{handles, responses}; -use crate::{log_event, Event}; -use diesel::prelude::*; - -impl From for responses::BuildInfo { - fn from(build: Build) -> responses::BuildInfo { - responses::BuildInfo { - drv: build.build_drv.clone(), - out: build.build_out.clone(), - status: build.build_status.clone(), - } - } -} - -impl Build { - pub async fn cancel(&self) -> Result<(), Error> { - let r = BUILDS.cancel(self.build_id).await; - if r { - Ok(()) - } else { - Err(Error::BuildNotRunning(self.handle())) - } - } - - pub async fn get(build_handle: &handles::Build) -> Result { - let build_hash_ = &build_handle.build_hash; - let mut conn = connection().await; - Ok(builds - .filter(build_hash.eq(build_hash_)) - .first::(&mut *conn) - .map_err(|_| { - Error::BuildNotFound(handles::Build { - build_hash: build_hash_.to_string(), - }) - })?) - } - - pub fn handle(&self) -> handles::Build { - handles::Build { - build_hash: self.build_hash.clone(), - } - } - - pub fn info(&self) -> Result { - Ok(self.clone().into()) - } - - pub async fn nixlog(&self) -> Result { - let log = nix::log(self.build_drv.clone()).await?; - Ok(log) - } - - pub async fn run(self) -> () { - let handle = self.handle(); - let id = self.build_id; - let drv = self.build_drv.clone(); - let task = async move { - nix::build(&nix::DrvPath::new(&drv)).await?; - Ok::<(), Error>(()) - }; - let f = move |r| async move { - let status = match r { - Some(Ok(())) => "success", - Some(Err(_)) => "error", // TODO: log error - None => "canceled", - }; - let conn = &mut *connection().await; - let _ = diesel::update(builds.find(id)) - .set(build_status.eq(status)) - .execute(conn); - log_event(Event::BuildFinished(handle)); - }; - BUILDS.run(id, task, f).await; - } -} diff --git a/typhon/src/error.rs b/typhon/src/error.rs index 84f4e12a..428e35ce 100644 --- a/typhon/src/error.rs +++ b/typhon/src/error.rs @@ -7,8 +7,6 @@ pub enum Error { AccessDenied, ActionError(actions::Error), BadJobsetDecl(String), - BuildNotFound(handles::Build), - BuildNotRunning(handles::Build), EvaluationNotFound(handles::Evaluation), EvaluationNotRunning(handles::Evaluation), IllegalProjectHandle(handles::Project), @@ -41,8 +39,6 @@ impl std::fmt::Display for Error { AccessDenied => write!(f, "Access denied"), ActionError(e) => write!(f, "Action error: {}", e), BadJobsetDecl(s) => write!(f, "Bad jobset declaration: {}", s), - BuildNotFound(build_handle) => write!(f, "Build {} not found", build_handle), - BuildNotRunning(build_handle) => write!(f, "Build {} is not running", build_handle), IllegalProjectHandle(handle) => { write!(f, "The project name [{}] is illegal. Legal project names are sequences of alphanumerical characters that may contains dashes [-] or underscores [_].", handle.project) } @@ -101,15 +97,12 @@ impl Into for Error { ActionError(actions::Error::Unexpected) | UnexpectedDatabaseError(_) | Todo => { InternalError } - BuildNotFound(_) - | EvaluationNotFound(_) - | JobNotFound(_) - | JobsetNotFound(_) - | ProjectNotFound(_) => ResourceNotFound(format!("{}", self)), + EvaluationNotFound(_) | JobNotFound(_) | JobsetNotFound(_) | ProjectNotFound(_) => { + ResourceNotFound(format!("{}", self)) + } AccessDenied | ActionError(_) | BadJobsetDecl(_) - | BuildNotRunning(_) | EvaluationNotRunning(_) | JobNotRunning(_) | IllegalProjectHandle(_) diff --git a/typhon/src/evaluations.rs b/typhon/src/evaluations.rs index be1a812e..22a247a3 100644 --- a/typhon/src/evaluations.rs +++ b/typhon/src/evaluations.rs @@ -3,71 +3,47 @@ use crate::error::Error; use crate::gcroots; use crate::models::*; use crate::nix; -use crate::schema::builds::dsl::*; use crate::schema::evaluations::dsl::*; use crate::schema::jobs::dsl::*; use crate::schema::jobsets::dsl::*; -use crate::CURRENT_SYSTEM; use crate::EVALUATIONS; use crate::{handles, responses}; use crate::{log_event, Event}; use diesel::prelude::*; -use std::collections::HashMap; -type JobName = String; -type JobDrvMap = HashMap; - -async fn evaluate_aux(id: i32, new_jobs: JobDrvMap) -> Result<(), Error> { +async fn evaluate_aux(id: i32, new_jobs: nix::NewJobs) -> Result<(), Error> { let mut conn = connection().await; - let created_jobs = conn.transaction::, Error, _>(|conn| { + let now = crate::time::now(); + let created_jobs = conn.transaction::, Error, _>(|conn| { new_jobs .iter() - .map(|(name, (drv, dist))| { - let hash = &drv.path.hash(); - let build = builds - .filter(build_hash.eq(hash)) - .load::(conn)? - .last() - .cloned() - .map(Ok::<_, Error>) - .unwrap_or_else(|| { - let build: Build = diesel::insert_into(builds) - .values(&NewBuild { - build_drv: &String::from(drv.path.clone()).as_str(), - build_hash: hash, - build_out: drv - .outputs - .iter() - .last() - .expect("TODO: derivations can have multiple outputs") - .1, - build_status: "pending", - }) - .get_result(conn)?; - log_event(Event::BuildNew(build.handle())); - Ok(build) - })?; - - // Create job - let job: Job = diesel::insert_into(jobs) + .map(|((system, name), (drv, dist))| { + Ok(diesel::insert_into(jobs) .values(&NewJob { - job_build: build.build_id, + job_build_drv: &String::from(drv.path.clone()).as_str(), + job_build_out: drv + .outputs + .iter() + .last() + .expect("TODO: derivations can have multiple outputs") + .1, + job_build_status: "pending", job_dist: *dist, job_evaluation: id, - job_name: &name, - job_status: "begin", - job_system: &*CURRENT_SYSTEM, + job_name: name, + job_post_status: "pending", + job_pre_status: "pending", + job_system: system, + job_time_created: now, }) - .get_result(conn)?; - Ok((build, job)) + .get_result(conn)?) }) .collect() })?; drop(conn); - for (build, job) in created_jobs.into_iter() { - build.run().await; + for job in created_jobs.into_iter() { job.run().await; } @@ -119,10 +95,11 @@ impl Evaluation { drop(conn); Ok(responses::EvaluationInfo { actions_path: self.evaluation_actions_path.clone(), - flake_locked: self.evaluation_flake_locked.clone(), + flake_url_locked: self.evaluation_flake_url_locked.clone(), jobs: jobs_names, status: self.evaluation_status.clone(), time_created: self.evaluation_time_created, + time_finished: self.evaluation_time_finished, }) } @@ -134,46 +111,28 @@ impl Evaluation { } pub async fn run(self) -> () { - let handle = self.handle().await.unwrap(); // TODO + // TODO: error management + let handle = self.handle().await.unwrap(); + let jobset = self.jobset().await.unwrap(); let id = self.evaluation_id; let task = async move { - let expr = format!( - "{}#typhonJobs.{}", - self.evaluation_flake_locked, *CURRENT_SYSTEM, - ); - let mut jobs_ = JobDrvMap::new(); - for job in nix::eval(expr.clone()) - .await? - .as_object() - .expect("unexpected Nix output") - .keys() - .cloned() - { - jobs_.insert( - job.clone(), - ( - nix::derivation(&format!("{expr}.{job}")).await?, - nix::eval(format!("{expr}.{job}.passthru.typhonDist")) - .await - .map(|json| json.as_bool().unwrap_or(false)) - .unwrap_or(false), - ), - ); - } - Ok(jobs_) + nix::eval_jobs(&self.evaluation_flake_url_locked, jobset.jobset_legacy).await }; - let f = move |r: Option>| async move { + let f = move |r: Option>| async move { // TODO: when logging, hide internal error messages? let status = match r { Some(Ok(new_jobs)) => match evaluate_aux(id, new_jobs).await { Ok(()) => "success", Err(e) => { - let _ = Log::new(handles::Log::Evaluation(handle.clone()), e.to_string()); + // TODO: handle errors + let _ = + Log::new(handles::Log::Evaluation(handle.clone()), e.to_string()).await; "error" } }, Some(Err(e)) => { - let _ = Log::new(handles::Log::Evaluation(handle.clone()), e.to_string()); + // TODO: handle errors + let _ = Log::new(handles::Log::Evaluation(handle.clone()), e.to_string()).await; "error" } None => "canceled", diff --git a/typhon/src/gcroots.rs b/typhon/src/gcroots.rs index f636db19..b054c433 100644 --- a/typhon/src/gcroots.rs +++ b/typhon/src/gcroots.rs @@ -1,5 +1,4 @@ use crate::models::*; -use crate::schema::builds::dsl::*; use crate::schema::evaluations::dsl::*; use crate::schema::jobs::dsl::*; use crate::schema::jobsets::dsl::*; @@ -51,11 +50,8 @@ fn update_aux(conn: &mut SqliteConnection) -> Result<(), Error> { .filter(job_evaluation.eq(evaluation.evaluation_id)) .load::(conn)? { - let build = builds - .filter(build_id.eq(job.job_build)) - .first::(conn)?; - gcroots.insert(build.build_drv.clone()); - gcroots.insert(build.build_out.clone()); + gcroots.insert(job.job_build_drv.clone()); + gcroots.insert(job.job_build_out.clone()); } } } diff --git a/typhon/src/jobs.rs b/typhon/src/jobs.rs index f5b38589..b9200949 100644 --- a/typhon/src/jobs.rs +++ b/typhon/src/jobs.rs @@ -3,26 +3,23 @@ use crate::connection; use crate::error::Error; use crate::handles; use crate::models::*; +use crate::nix; use crate::responses; -use crate::schema::builds::dsl::*; use crate::schema::evaluations::dsl::*; use crate::schema::jobs::dsl::*; use crate::CURRENT_SYSTEM; use crate::{log_event, Event}; -use crate::{BUILDS, JOBS, SETTINGS}; +use crate::{JOBS_BUILD, JOBS_POST, JOBS_PRE, SETTINGS}; use diesel::prelude::*; use serde_json::{json, Value}; use std::path::Path; impl Job { - pub async fn build(&self) -> Result { - let mut conn = connection().await; - Ok(builds.find(self.job_build).first::(&mut *conn)?) - } - pub async fn cancel(&self) -> Result<(), Error> { - let r = JOBS.cancel(self.job_id).await; - if r { + let a = JOBS_PRE.cancel(self.job_id).await; + let b = JOBS_BUILD.cancel(self.job_id).await; + let c = JOBS_POST.cancel(self.job_id).await; + if a || b || c { Ok(()) } else { Err(Error::JobNotRunning(self.handle().await?)) @@ -62,47 +59,68 @@ impl Job { }) } - pub async fn info(&self) -> Result { - let mut conn = connection().await; - let build = builds.find(self.job_build).first::(&mut *conn)?; - Ok(responses::JobInfo { - build_handle: handles::build(build.build_hash.clone()), - build_infos: build.into(), + pub fn info(&self) -> responses::JobInfo { + responses::JobInfo { + build_drv: self.job_build_drv.clone(), + build_out: self.job_build_out.clone(), + build_status: self.job_build_status.clone(), + build_time_finished: self.job_build_time_finished, + build_time_started: self.job_build_time_started, dist: self.job_dist, - status: self.job_status.clone(), + post_status: self.job_post_status.clone(), + post_time_finished: self.job_post_time_finished, + post_time_started: self.job_post_time_started, + pre_status: self.job_pre_status.clone(), + pre_time_finished: self.job_pre_time_finished, + pre_time_started: self.job_pre_time_started, system: self.job_system.clone(), - }) + time_created: self.job_time_created, + } } - async fn mk_input(&self) -> Result { + async fn mk_input(&self, build_status: &str) -> Result { let evaluation = self.evaluation().await?; let jobset = evaluation.jobset().await?; let project = jobset.project().await?; - let build = self.build().await?; Ok(json!({ - "build": build.build_hash, "data": SETTINGS.json, + "drv": self.job_build_drv, "evaluation": evaluation.evaluation_num, - "flake": jobset.jobset_flake, - "flake_locked": evaluation.evaluation_flake_locked, + "flake": jobset.jobset_flake_url, + "flake_locked": evaluation.evaluation_flake_url_locked, "job": self.job_name, "jobset": jobset.jobset_name, - "out": build.build_out, + "legacy": jobset.jobset_legacy, + "out": self.job_build_out, "project": project.project_name, - "status": build.build_status, + "status": build_status, "system": self.job_system, })) } pub async fn run(self) -> () { + use crate::time::now; let id = self.job_id; + let drv = nix::DrvPath::new(&self.job_build_drv); - let handle = self.handle().await.unwrap(); // TODO - let handle_bis = handle.clone(); + // FIXME? + let handle_1 = self.handle().await.unwrap(); + let handle_2 = handle_1.clone(); + let handle_3 = handle_1.clone(); + let handle_4 = handle_1.clone(); + let handle_5 = handle_1.clone(); + let job_1 = self; + let job_2 = job_1.clone(); + + let task_pre = async move { + let mut conn = connection().await; + let _ = diesel::update(jobs.find(id)) + .set(job_pre_time_started.eq(now())) + .execute(&mut *conn); + drop(conn); - let task = async move { // abort if actions are not defined - let evaluation = self.evaluation().await?; + let evaluation = job_1.evaluation().await?; let path = match &evaluation.evaluation_actions_path { None => return Ok(()), Some(path) => path, @@ -111,78 +129,118 @@ impl Job { let jobset = evaluation.jobset().await?; let project = jobset.project().await?; - { - // run action `begin` - let mut conn = connection().await; - let _ = diesel::update(jobs.find(id)) - .set(job_status.eq("begin")) - .execute(&mut *conn); - drop(conn); - - log_event(Event::JobUpdated(handle_bis.clone())); - - let input = self.mk_input().await?; - - let log = if Path::new(&format!("{}/begin", path)).exists() { - let (_, log) = actions::run( - &project.project_key, - &format!("{}/begin", path), - &format!("{}/secrets", path), - &input, - ) - .await?; - log - } else { - serde_json::to_string_pretty(&input).unwrap() // TODO - }; - - // save the log - let _ = Log::new(handles::Log::JobBegin(handle_bis.clone()), log).await?; - } - - // wait for build + let input = job_1.mk_input(&"pending".to_string()).await?; + + let log = if Path::new(&format!("{}/pre", path)).exists() { + let (_, log) = actions::run( + &project.project_key, + &format!("{}/pre", path), + &format!("{}/secrets", path), + &input, + ) + .await?; + log + } else { + serde_json::to_string_pretty(&input).unwrap() // TODO + }; + + // save the log + let _ = Log::new(handles::Log::JobPre(handle_1.clone()), log).await?; + + Ok::<(), Error>(()) + }; + let finish_pre = move |r| async move { + let status = match r { + Some(Ok(())) => "success", + Some(Err(_)) => "error", // TODO: log error + None => "canceled", + }; + let mut conn = connection().await; + let _ = diesel::update(jobs.find(id)) + .set((job_pre_status.eq(status), job_pre_time_finished.eq(now()))) + .execute(&mut *conn); + drop(conn); + log_event(Event::JobUpdated(handle_2)); + }; + JOBS_PRE.run(id, task_pre, finish_pre).await; + + // FIXME: + // if two jobs are running the same build, + // canceling the one that has the lock on the build will + // start the build over + let (sender, receiver) = tokio::sync::oneshot::channel::(); + let task_build = async move { + let mut conn = connection().await; + let _ = diesel::update(jobs.find(id)) + .set(job_build_time_started.eq(now())) + .execute(&mut *conn); + drop(conn); + nix::build(&drv).await?; + Ok::<(), Error>(()) + }; + let finish_build = move |r| async move { + let status = match r { + Some(Ok(())) => "success", + Some(Err(_)) => "error", // TODO: log error + None => "canceled", + }; + sender.send(status.to_string()).unwrap_or_else(|_| panic!()); let mut conn = connection().await; let _ = diesel::update(jobs.find(id)) - .set(job_status.eq("waiting")) + .set(( + job_build_status.eq(status), + job_build_time_finished.eq(now()), + )) .execute(&mut *conn); drop(conn); + log_event(Event::JobUpdated(handle_3)); + }; + JOBS_BUILD.run(id, task_build, finish_build).await; - log_event(Event::JobUpdated(handle_bis.clone())); + let task_post = async move { + // wait for `pre` to finish + JOBS_PRE.wait(&id).await; + // wait for the build to finish + JOBS_BUILD.wait(&id).await; + let build_status = receiver.await.unwrap_or_else(|_| panic!()); - BUILDS.wait(&self.job_build).await; + let mut conn = connection().await; + let _ = diesel::update(jobs.find(id)) + .set(job_post_time_started.eq(now())) + .execute(&mut *conn); + drop(conn); - { - // run action `end` - let mut conn = connection().await; - let _ = diesel::update(jobs.find(id)) - .set(job_status.eq("end")) - .execute(&mut *conn); - drop(conn); + // abort if actions are not defined + let evaluation = job_2.evaluation().await?; + let path = match &evaluation.evaluation_actions_path { + None => return Ok(()), + Some(path) => path, + }; - log_event(Event::JobUpdated(handle_bis.clone())); + let jobset = evaluation.jobset().await?; + let project = jobset.project().await?; - let input = self.mk_input().await?; + let input = job_2.mk_input(&build_status).await?; - let log = if Path::new(&format!("{}/end", path)).exists() { - let (_, log) = actions::run( - &project.project_key, - &format!("{}/end", path), - &format!("{}/secrets", path), - &input, - ) - .await?; - log - } else { - serde_json::to_string_pretty(&input).unwrap() // TODO - }; + let log = if Path::new(&format!("{}/post", path)).exists() { + let (_, log) = actions::run( + &project.project_key, + &format!("{}/post", path), + &format!("{}/secrets", path), + &input, + ) + .await?; + log + } else { + serde_json::to_string_pretty(&input).unwrap() // TODO + }; - // save the log - let _ = Log::new(handles::Log::JobEnd(handle_bis), log).await?; - } + // save the log + let _ = Log::new(handles::Log::JobPost(handle_4), log).await?; Ok::<(), Error>(()) }; - let f = move |r| async move { + let finish_post = move |r| async move { let status = match r { Some(Ok(())) => "success", Some(Err(_)) => "error", // TODO: log error @@ -190,11 +248,11 @@ impl Job { }; let mut conn = connection().await; let _ = diesel::update(jobs.find(id)) - .set(job_status.eq(status)) + .set((job_post_status.eq(status), job_post_time_finished.eq(now()))) .execute(&mut *conn); drop(conn); - log_event(Event::JobUpdated(handle)); + log_event(Event::JobUpdated(handle_5)); }; - JOBS.run(id, task, f).await; + JOBS_POST.run(id, task_post, finish_post).await; } } diff --git a/typhon/src/jobsets.rs b/typhon/src/jobsets.rs index 0eef1082..ffca480c 100644 --- a/typhon/src/jobsets.rs +++ b/typhon/src/jobsets.rs @@ -6,7 +6,6 @@ use crate::nix; use crate::schema::evaluations::dsl::*; use crate::schema::jobsets::dsl::*; use crate::schema::projects::dsl::*; -use crate::time; use crate::{handles, responses}; use crate::{log_event, Event}; @@ -15,14 +14,15 @@ use serde::Deserialize; #[derive(Deserialize)] pub struct JobsetDecl { - pub flake: String, + pub flake_url: String, + pub legacy: bool, } impl Jobset { pub async fn evaluate(&self, force: bool) -> Result { let project = self.project().await?; - let flake_locked = nix::lock(&self.jobset_flake).await?; + let flake_url_locked = nix::lock(&self.jobset_flake_url).await?; let mut conn = connection().await; let evaluation = conn.transaction::(|conn| { @@ -32,7 +32,7 @@ impl Jobset { if !force { match old_evaluations.last() { Some(eval) => { - if eval.evaluation_flake_locked == flake_locked { + if eval.evaluation_flake_url_locked == flake_url_locked { return Ok(eval.clone()); } } @@ -41,10 +41,10 @@ impl Jobset { } let n = old_evaluations.len() as i32 + 1; let status = "pending".to_string(); - let time = time::timestamp(); + let time = crate::time::now(); let new_evaluation = NewEvaluation { evaluation_actions_path: project.project_actions_path.as_ref().map(|s| s.as_str()), - evaluation_flake_locked: &flake_locked, + evaluation_flake_url_locked: &flake_url_locked, evaluation_jobset: self.jobset_id, evaluation_num: n, evaluation_status: &status, @@ -103,8 +103,9 @@ impl Jobset { .collect(); drop(conn); Ok(responses::JobsetInfo { - flake: self.jobset_flake.clone(), + flake_url: self.jobset_flake_url.clone(), evaluations: evals, + legacy: self.jobset_legacy, }) } diff --git a/typhon/src/lib.rs b/typhon/src/lib.rs index c2143712..cecf341c 100644 --- a/typhon/src/lib.rs +++ b/typhon/src/lib.rs @@ -1,5 +1,4 @@ mod actions; -mod builds; mod error; mod evaluations; mod gcroots; @@ -99,8 +98,9 @@ pub static SETTINGS: Lazy = Lazy::new(|| { } }); pub static EVALUATIONS: Lazy> = Lazy::new(tasks::Tasks::new); -pub static BUILDS: Lazy> = Lazy::new(tasks::Tasks::new); -pub static JOBS: Lazy> = Lazy::new(tasks::Tasks::new); +pub static JOBS_PRE: Lazy> = Lazy::new(tasks::Tasks::new); +pub static JOBS_BUILD: Lazy> = Lazy::new(tasks::Tasks::new); +pub static JOBS_POST: Lazy> = Lazy::new(tasks::Tasks::new); pub static CONNECTION: Lazy = Lazy::new(|| { use diesel::Connection as _; let database_url = std::env::var("DATABASE_URL").expect("DATABASE_URL must be set"); @@ -160,10 +160,8 @@ pub fn authorize_request(user: &User, req: &requests::Request) -> bool { | requests::Request::Evaluation(_, requests::Evaluation::Info) | requests::Request::Evaluation(_, requests::Evaluation::Log) | requests::Request::Job(_, requests::Job::Info) - | requests::Request::Job(_, requests::Job::LogBegin) - | requests::Request::Job(_, requests::Job::LogEnd) - | requests::Request::Build(_, requests::Build::Info) - | requests::Request::Build(_, requests::Build::NixLog) + | requests::Request::Job(_, requests::Job::LogPre) + | requests::Request::Job(_, requests::Job::LogPost) | requests::Request::Login(_) => true, _ => user.is_admin(), } @@ -189,8 +187,8 @@ pub async fn handle_request_aux(user: &User, req: &requests::Request) -> Result< project.refresh().await?; Response::Ok } - requests::Project::SetDecl(flake) => { - project.set_decl(&flake).await?; + requests::Project::SetDecl(decl) => { + project.set_decl(&decl.flake_url, decl.legacy).await?; Response::Ok } requests::Project::SetPrivateKey(key) => { @@ -237,28 +235,17 @@ pub async fn handle_request_aux(user: &User, req: &requests::Request) -> Result< job.cancel().await?; Response::Ok } - requests::Job::Info => Response::JobInfo(job.info().await?), - requests::Job::LogBegin => { - let log = Log::get(handles::Log::JobBegin(job_handle.clone())).await?; + requests::Job::Info => Response::JobInfo(job.info()), + requests::Job::LogPre => { + let log = Log::get(handles::Log::JobPre(job_handle.clone())).await?; Response::Log(log.log_stderr) } - requests::Job::LogEnd => { - let log = Log::get(handles::Log::JobEnd(job_handle.clone())).await?; + requests::Job::LogPost => { + let log = Log::get(handles::Log::JobPost(job_handle.clone())).await?; Response::Log(log.log_stderr) } } } - requests::Request::Build(build_handle, req) => { - let build = Build::get(&build_handle).await?; - match req { - requests::Build::Cancel => { - build.cancel().await?; - Response::Ok - } - requests::Build::Info => Response::BuildInfo(build.info()?), - requests::Build::NixLog => Response::Log(build.nixlog().await?), - } - } requests::Request::Login(password) => { let hash = digest(password.as_bytes()); if hash == SETTINGS.hashed_password { diff --git a/typhon/src/logs.rs b/typhon/src/logs.rs index 51642a48..2244c960 100644 --- a/typhon/src/logs.rs +++ b/typhon/src/logs.rs @@ -8,8 +8,8 @@ use typhon_types::*; fn get_log_type(log: &handles::Log) -> &'static str { match log { handles::Log::Evaluation(_) => "evaluation", - handles::Log::JobBegin(_) => "job_begin", - handles::Log::JobEnd(_) => "job_end", + handles::Log::JobPre(_) => "job_pre", + handles::Log::JobPost(_) => "job_post", } } @@ -27,7 +27,7 @@ impl Log { let evaluation = Evaluation::get(&h).await?; new_log.log_evaluation = Some(evaluation.evaluation_id); } - handles::Log::JobBegin(h) | handles::Log::JobEnd(h) => { + handles::Log::JobPre(h) | handles::Log::JobPost(h) => { let job = Job::get(&h).await?; new_log.log_job = Some(job.job_id); } @@ -48,7 +48,7 @@ impl Log { req.filter(log_evaluation.eq(Some(evaluation.evaluation_id))) .first::(&mut *conn) } - handles::Log::JobBegin(h) | handles::Log::JobEnd(h) => { + handles::Log::JobPre(h) | handles::Log::JobPost(h) => { let job = Job::get(&h).await?; let mut conn = connection().await; req.filter(log_job.eq(Some(job.job_id))) diff --git a/typhon/src/models.rs b/typhon/src/models.rs index bb264e39..a1c627ba 100644 --- a/typhon/src/models.rs +++ b/typhon/src/models.rs @@ -1,15 +1,16 @@ -use crate::schema::{builds, evaluations, jobs, jobsets, logs, projects}; +use crate::schema::{evaluations, jobs, jobsets, logs, projects}; use diesel::prelude::*; #[derive(Queryable, Clone)] pub struct Project { pub project_id: i32, pub project_actions_path: Option, - pub project_decl: String, - pub project_decl_locked: String, pub project_description: String, + pub project_flake_url: String, + pub project_flake_url_locked: String, pub project_homepage: String, pub project_key: String, + pub project_legacy: bool, pub project_name: String, pub project_title: String, } @@ -17,15 +18,17 @@ pub struct Project { #[derive(Insertable)] #[diesel(table_name = projects)] pub struct NewProject<'a> { + pub project_flake_url: &'a str, pub project_key: &'a str, + pub project_legacy: bool, pub project_name: &'a str, - pub project_decl: &'a str, } #[derive(Queryable, Clone)] pub struct Jobset { pub jobset_id: i32, - pub jobset_flake: String, + pub jobset_flake_url: String, + pub jobset_legacy: bool, pub jobset_name: String, pub jobset_project: i32, } @@ -33,7 +36,8 @@ pub struct Jobset { #[derive(Insertable)] #[diesel(table_name = jobsets)] pub struct NewJobset<'a> { - pub jobset_flake: &'a str, + pub jobset_flake_url: &'a str, + pub jobset_legacy: bool, pub jobset_name: &'a str, pub jobset_project: i32, } @@ -42,62 +46,66 @@ pub struct NewJobset<'a> { pub struct Evaluation { pub evaluation_id: i32, pub evaluation_actions_path: Option, - pub evaluation_flake_locked: String, + pub evaluation_flake_url_locked: String, pub evaluation_jobset: i32, pub evaluation_num: i32, pub evaluation_status: String, pub evaluation_time_created: i64, + pub evaluation_time_finished: Option, } #[derive(Insertable)] #[diesel(table_name = evaluations)] pub struct NewEvaluation<'a> { pub evaluation_actions_path: Option<&'a str>, - pub evaluation_flake_locked: &'a str, + pub evaluation_flake_url_locked: &'a str, pub evaluation_jobset: i32, pub evaluation_num: i32, pub evaluation_status: &'a str, pub evaluation_time_created: i64, + // pub evaluation_time_finished: Option, } #[derive(Queryable, Clone)] pub struct Job { pub job_id: i32, - pub job_build: i32, + pub job_build_drv: String, + pub job_build_out: String, + pub job_build_status: String, + pub job_build_time_finished: Option, + pub job_build_time_started: Option, pub job_dist: bool, pub job_evaluation: i32, pub job_name: String, - pub job_status: String, + pub job_post_status: String, + pub job_post_time_finished: Option, + pub job_post_time_started: Option, + pub job_pre_status: String, + pub job_pre_time_finished: Option, + pub job_pre_time_started: Option, pub job_system: String, + pub job_time_created: i64, } #[derive(Insertable)] #[diesel(table_name = jobs)] pub struct NewJob<'a> { - pub job_build: i32, + pub job_build_drv: &'a str, + pub job_build_out: &'a str, + pub job_build_status: &'a str, + //pub job_build_time_started: Option, + //pub job_build_time_finished: Option, pub job_dist: bool, pub job_evaluation: i32, pub job_name: &'a str, - pub job_status: &'a str, + pub job_post_status: &'a str, + //pub job_post_time_finished: Option, + //pub job_post_time_started: Option, + pub job_pre_status: &'a str, + //pub job_pre_time_finished: Option, + //pub job_pre_time_started: Option, pub job_system: &'a str, -} - -#[derive(Queryable, Clone)] -pub struct Build { - pub build_id: i32, - pub build_drv: String, - pub build_hash: String, - pub build_out: String, - pub build_status: String, -} - -#[derive(Insertable)] -#[diesel(table_name = builds)] -pub struct NewBuild<'a> { - pub build_drv: &'a str, - pub build_hash: &'a str, - pub build_out: &'a str, - pub build_status: &'a str, + pub job_time_created: i64, } #[derive(Queryable, Clone)] diff --git a/typhon/src/nix.rs b/typhon/src/nix.rs index a5112d01..41495ae7 100644 --- a/typhon/src/nix.rs +++ b/typhon/src/nix.rs @@ -7,6 +7,16 @@ use tokio::io::AsyncReadExt; use tokio::io::BufReader; use tokio::process::Command; +#[derive(Debug)] +pub enum Expr { + Flake { + flake_url: String, + path: String, + legacy: bool, + }, + Path(String), +} + #[derive(Debug)] pub enum Error { SerdeJson(serde_json::Error), @@ -19,9 +29,7 @@ pub enum Error { stdout: String, stderr: String, }, - ExpectedDrvGotAttrset { - expr: String, - }, + ExpectedDrvGotAttrset(Expr), BuildFailed, } @@ -174,21 +182,35 @@ pub async fn build(path: &DrvPath) -> Result { } } -const JSON_PARSE_ERROR: &str = "nix: failed to parse JSON"; - /// Runs `nix show-derivation [expr]` and parse its stdout as JSON. /// Note that [expr] can evaluates to one unique derivation or to an /// attrset of [n] derivations. The resulting JSON will be an object /// with one or [n] keys. The keys are `.drv` paths, the values are /// the derivation themselves. -pub async fn derivation_json(expr: &str) -> Result { - Ok(serde_json::from_str( - &Command::nix(["show-derivation"]) - .arg(expr) - .sync_stdout() - .await?, - ) - .expect(JSON_PARSE_ERROR)) +pub async fn derivation_json(expr: &Expr) -> Result { + let mut cmd = match expr { + Expr::Flake { + flake_url, + path, + legacy, + } => { + if *legacy { + Command::nix([ + "derivation", + "show", + "--no-write-lock-file", + "--override-input", + "x", + flake_url, + &format!("{}#{}", env!("TYPHON_FLAKE"), path), + ]) + } else { + Command::nix(["derivation", "show", &format!("{}#{}", flake_url, path)]) + } + } + Expr::Path(path) => Command::nix(["derivation", "show", path]), + }; + Ok(serde_json::from_str(&cmd.sync_stdout().await?).unwrap()) } #[derive(Clone, Debug, PartialEq, Hash, Eq)] @@ -257,12 +279,7 @@ impl Derivation { ), })? .iter() - .map(|(name, path)| { - ( - name.clone(), - path["path"].as_str().expect(JSON_PARSE_ERROR).into(), - ) - }), + .map(|(name, path)| (name.clone(), path["path"].as_str().unwrap().into())), ), }) } @@ -270,31 +287,73 @@ impl Derivation { /// Here, we assume [expr] evaluates to a derivation, not an attrset /// of derivations. -pub async fn derivation(expr: &str) -> Result { - let json = derivation_json(expr).await?; +pub async fn derivation(expr: Expr) -> Result { + let json = derivation_json(&expr).await?; if let [(path, derivation)] = *json .as_object() - .expect(JSON_PARSE_ERROR) + .unwrap() .iter() .collect::>() .as_slice() { Derivation::parse(path, derivation) } else { - Err(Error::ExpectedDrvGotAttrset { expr: expr.into() }) + Err(Error::ExpectedDrvGotAttrset(expr)) } } -pub async fn eval(expr: String) -> Result { +pub async fn eval(flake_url: &str, path: &str, legacy: bool) -> Result { Ok(serde_json::from_str( - &Command::nix(["eval", "--json"]) - .arg(expr) - .sync_stdout() - .await? - .to_string(), + &(if legacy { + Command::nix([ + "eval", + "--json", + "--no-write-lock-file", + "--override-input", + "x", + flake_url, + &format!("{}#{}", env!("TYPHON_FLAKE"), path), + ]) + } else { + Command::nix(["eval", "--json", &format!("{}#{}", flake_url, path)]) + }) + .sync_stdout() + .await? + .to_string(), )?) } +pub type NewJobs = HashMap<(String, String), (Derivation, bool)>; + +pub async fn eval_jobs(flake_url: &str, legacy: bool) -> Result { + let json = eval(flake_url, "typhonJobs", legacy).await?; + let mut jobs: HashMap<(String, String), (Derivation, bool)> = HashMap::new(); + for system in json.as_object().unwrap().keys() { + for name in json[system].as_object().unwrap().keys() { + jobs.insert( + (system.clone(), name.clone()), + ( + derivation(Expr::Flake { + flake_url: flake_url.to_string(), + legacy, + path: format!("typhonJobs.{system}.{name}"), + }) + .await?, + eval( + flake_url, + &format!("typhonJobs.{system}.{name}.passthru.typhonDist"), + legacy, + ) + .await + .map(|json| json.as_bool().unwrap_or(false)) + .unwrap_or(false), + ), + ); + } + } + Ok(jobs) +} + pub fn current_system() -> String { String::from_utf8( std::process::Command::new("nix") @@ -313,16 +372,31 @@ pub fn current_system() -> String { } pub async fn lock(flake_url: &String) -> Result { - let output = Command::nix(["flake", "metadata", "--refresh", "--json"]) - .arg(flake_url.clone()) - .sync_stdout() - .await?; - Ok( - serde_json::from_str::(&output).expect(JSON_PARSE_ERROR)["url"] - .as_str() - .expect(JSON_PARSE_ERROR) - .into(), - ) + let output = Command::nix([ + "flake", + "lock", + "--output-lock-file", + "/dev/stdout", + "--override-input", + "x", + flake_url, + env!("TYPHON_FLAKE"), + ]) + .sync_stdout() + .await?; + let locked_info = &serde_json::from_str::(&output).unwrap()["nodes"]["x"]["locked"]; + let output = Command::nix([ + "eval", + "--raw", + "--expr", + &format!( + "builtins.flakeRefToString (builtins.fromJSON ''{}'')", + locked_info + ), + ]) + .sync_stdout() + .await?; + Ok(output) } pub async fn log(drv: String) -> Result { diff --git a/typhon/src/projects.rs b/typhon/src/projects.rs index 12879a38..80cb65ac 100644 --- a/typhon/src/projects.rs +++ b/typhon/src/projects.rs @@ -20,7 +20,10 @@ use std::str::FromStr; use typhon_types::responses::ProjectMetadata; impl Project { - pub async fn create(project_handle: &handles::Project, decl: &String) -> Result<(), Error> { + pub async fn create( + project_handle: &handles::Project, + flake_url: &String, + ) -> Result<(), Error> { if !project_handle.legal() { return Err(Error::IllegalProjectHandle(project_handle.clone())); } @@ -32,9 +35,10 @@ impl Project { .expose_secret() .clone(); let new_project = NewProject { - project_name: &project_handle.project, + project_flake_url: flake_url, project_key: &key, - project_decl: decl, + project_legacy: false, + project_name: &project_handle.project, }; let mut conn = connection().await; diesel::insert_into(projects) @@ -51,7 +55,8 @@ impl Project { HashMap::from([( "main".to_string(), JobsetDecl { - flake: self.project_decl.clone(), + flake_url: self.project_flake_url.clone(), + legacy: false, }, )]) } @@ -96,16 +101,17 @@ impl Project { .to_public() .to_string(); Ok(responses::ProjectInfo { + actions_path: self.project_actions_path.clone(), + flake_url: self.project_flake_url.clone(), + flake_url_locked: self.project_flake_url_locked.clone(), + jobsets: jobsets_names, + legacy: self.project_legacy, metadata: responses::ProjectMetadata { title: self.project_title.clone(), description: self.project_description.clone(), homepage: self.project_homepage.clone(), }, - jobsets: jobsets_names, public_key, - decl: self.project_decl.clone(), - decl_locked: self.project_decl_locked.clone(), - actions_path: self.project_actions_path.clone(), }) } @@ -129,8 +135,7 @@ impl Project { } pub async fn refresh(&self) -> Result<(), Error> { - let flake_locked = nix::lock(&self.project_decl).await?; - let expr = format!("{}#typhonProject", flake_locked); + let flake_url_locked = nix::lock(&self.project_flake_url).await?; #[derive(Deserialize)] struct TyphonProject { @@ -139,17 +144,19 @@ impl Project { metadata: ProjectMetadata, } - let TyphonProject { actions, metadata }: TyphonProject = - serde_json::from_value(nix::eval(expr).await?).expect("TODO"); + let TyphonProject { actions, metadata } = serde_json::from_value( + nix::eval(&flake_url_locked, &"typhonProject", self.project_legacy).await?, + ) + .expect("TODO"); let actions: Option<&String> = actions.as_ref().map(|m| m.get(&*CURRENT_SYSTEM)).flatten(); let actions_path = if let Some(v) = actions { - let drv = nix::derivation(&v).await?; - nix::build(&drv.path).await?["out"].clone() + let drv = nix::derivation(nix::Expr::Path(v.clone())).await?; + Some(nix::build(&drv.path).await?["out"].clone()) // TODO: check public key used to encrypt secrets } else { - String::new() + None }; let mut conn = connection().await; @@ -159,7 +166,7 @@ impl Project { project_description.eq(metadata.description), project_homepage.eq(metadata.homepage), project_actions_path.eq(actions_path), - project_decl_locked.eq(flake_locked), + project_flake_url_locked.eq(flake_url_locked), )) .execute(&mut *conn)?; gcroots::update(&mut *conn); @@ -169,10 +176,10 @@ impl Project { Ok(()) } - pub async fn set_decl(&self, flake: &String) -> Result<(), Error> { + pub async fn set_decl(&self, flake: &String, legacy: bool) -> Result<(), Error> { let mut conn = connection().await; diesel::update(projects.find(self.project_id)) - .set(project_decl.eq(flake)) + .set((project_flake_url.eq(flake), project_legacy.eq(legacy))) .execute(&mut *conn)?; drop(conn); log_event(Event::ProjectUpdated(self.handle())); @@ -232,7 +239,7 @@ impl Project { .find(|&jobset| jobset.jobset_name == *name) .map(|jobset| { diesel::update(jobsets.find(jobset.jobset_id)) - .set(jobset_flake.eq(decl.flake.clone())) + .set(jobset_flake_url.eq(decl.flake_url.clone())) .execute(conn)?; Ok::<(), Error>(()) }) @@ -240,7 +247,8 @@ impl Project { let new_jobset = NewJobset { jobset_project: self.project_id, jobset_name: name, - jobset_flake: &decl.flake, + jobset_flake_url: &decl.flake_url, + jobset_legacy: decl.legacy, }; diesel::insert_into(jobsets) .values(&new_jobset) diff --git a/typhon/src/schema.rs b/typhon/src/schema.rs index ba8bafe5..d1c3bd24 100644 --- a/typhon/src/schema.rs +++ b/typhon/src/schema.rs @@ -1,43 +1,45 @@ // @generated automatically by Diesel CLI. -diesel::table! { - builds (build_id) { - build_id -> Integer, - build_drv -> Text, - build_hash -> Text, - build_out -> Text, - build_status -> Text, - } -} - diesel::table! { evaluations (evaluation_id) { evaluation_id -> Integer, evaluation_actions_path -> Nullable, - evaluation_flake_locked -> Text, + evaluation_flake_url_locked -> Text, evaluation_jobset -> Integer, evaluation_num -> Integer, evaluation_status -> Text, evaluation_time_created -> BigInt, + evaluation_time_finished -> Nullable, } } diesel::table! { jobs (job_id) { job_id -> Integer, - job_build -> Integer, + job_build_drv -> Text, + job_build_out -> Text, + job_build_status -> Text, + job_build_time_finished -> Nullable, + job_build_time_started -> Nullable, job_dist -> Bool, job_evaluation -> Integer, job_name -> Text, - job_status -> Text, + job_post_status -> Text, + job_post_time_finished -> Nullable, + job_post_time_started -> Nullable, + job_pre_status -> Text, + job_pre_time_finished -> Nullable, + job_pre_time_started -> Nullable, job_system -> Text, + job_time_created -> BigInt, } } diesel::table! { jobsets (jobset_id) { jobset_id -> Integer, - jobset_flake -> Text, + jobset_flake_url -> Text, + jobset_legacy -> Bool, jobset_name -> Text, jobset_project -> Integer, } @@ -57,21 +59,21 @@ diesel::table! { projects (project_id) { project_id -> Integer, project_actions_path -> Nullable, - project_decl -> Text, - project_decl_locked -> Text, project_description -> Text, + project_flake_url -> Text, + project_flake_url_locked -> Text, project_homepage -> Text, project_key -> Text, + project_legacy -> Bool, project_name -> Text, project_title -> Text, } } diesel::joinable!(evaluations -> jobsets (evaluation_jobset)); -diesel::joinable!(jobs -> builds (job_build)); diesel::joinable!(jobs -> evaluations (job_evaluation)); diesel::joinable!(jobsets -> projects (jobset_project)); diesel::joinable!(logs -> evaluations (log_evaluation)); diesel::joinable!(logs -> jobs (log_job)); -diesel::allow_tables_to_appear_in_same_query!(builds, evaluations, jobs, jobsets, logs, projects,); +diesel::allow_tables_to_appear_in_same_query!(evaluations, jobs, jobsets, logs, projects,); diff --git a/typhon/src/time.rs b/typhon/src/time.rs index c7d65334..fa51b798 100644 --- a/typhon/src/time.rs +++ b/typhon/src/time.rs @@ -1,6 +1,6 @@ use std::time::{SystemTime, UNIX_EPOCH}; -pub fn timestamp() -> i64 { +pub fn now() -> i64 { SystemTime::now() .duration_since(UNIX_EPOCH) .expect("Time went backwards")