From 32a6582481b2faadcc98f82a6bb9b422fd1ad6ec Mon Sep 17 00:00:00 2001 From: Paul-Nicolas Madelaine Date: Wed, 27 Sep 2023 18:24:41 +0200 Subject: [PATCH] (wip) refactor --- doc/src/concepts.md | 10 +- flake.lock | 88 ++++++- flake.nix | 2 + nix/devshells.nix | 1 + nix/lib/github/mkGithubProject.nix | 4 +- nix/lib/mkProject.nix | 4 +- nix/nixpkgs.nix | 5 +- nix/packages/server.nix | 1 + typhon-flake/flake.nix | 12 + typhon-types/src/lib.rs | 65 ++--- typhon-webapp/src/job.rs | 10 +- .../migrations/00000000000000_typhon/down.sql | 1 - .../migrations/00000000000000_typhon/up.sql | 26 +- typhon/src/api.rs | 39 +-- typhon/src/builds.rs | 81 ------ typhon/src/error.rs | 13 +- typhon/src/evaluations.rs | 53 ++-- typhon/src/gcroots.rs | 8 +- typhon/src/jobs.rs | 231 +++++++++++------- typhon/src/jobsets.rs | 4 +- typhon/src/lib.rs | 35 +-- typhon/src/logs.rs | 8 +- typhon/src/models.rs | 52 ++-- typhon/src/nix.rs | 35 ++- typhon/src/schema.rs | 29 +-- typhon/src/time.rs | 2 +- 26 files changed, 420 insertions(+), 399 deletions(-) create mode 100644 typhon-flake/flake.nix delete mode 100644 typhon/src/builds.rs diff --git a/doc/src/concepts.md b/doc/src/concepts.md index 7c5be0ba..2761f255 100644 --- a/doc/src/concepts.md +++ b/doc/src/concepts.md @@ -24,9 +24,9 @@ are four actions a project can define. of your repository. These flakes must expose a `typhonJobs` attribute, that in turn declares jobs for you project. -- The `begin` and `end` actions are run at the beginning and end of all jobs of - your project. They are typically used to set statuses on your repository, but - can also be used for deployment. +- The `pre` and `post` actions are run before and after all jobs of your + project. They are typically used to set statuses on your repository, but can + also be used for deployment. - The `webhook` action is triggered by calls to a specific endpoint of the API. It outputs commands for Typhon to update jobsets or evaluate a jobset. It is @@ -51,5 +51,5 @@ is evaluated and the corresponding derivations are built. ## Jobs -Jobs are derivations defined in `typhonJobs`. A job run consists of the `begin` -action, the derivation build, and the `end` action. +Jobs are derivations defined in `typhonJobs`. A job run consists of the `pre` +action, the derivation build, and the `post` action. diff --git a/flake.lock b/flake.lock index 605f8c7d..b0ac8adf 100644 --- a/flake.lock +++ b/flake.lock @@ -45,6 +45,22 @@ "type": "github" } }, + "flake-compat_2": { + "flake": false, + "locked": { + "lastModified": 1673956053, + "narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=", + "owner": "edolstra", + "repo": "flake-compat", + "rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9", + "type": "github" + }, + "original": { + "owner": "edolstra", + "repo": "flake-compat", + "type": "github" + } + }, "flake-utils": { "inputs": { "systems": "systems" @@ -62,7 +78,76 @@ "type": "indirect" } }, + "lowdown-src": { + "flake": false, + "locked": { + "lastModified": 1633514407, + "narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=", + "owner": "kristapsdz", + "repo": "lowdown", + "rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8", + "type": "github" + }, + "original": { + "owner": "kristapsdz", + "repo": "lowdown", + "type": "github" + } + }, + "nix": { + "inputs": { + "flake-compat": "flake-compat_2", + "lowdown-src": "lowdown-src", + "nixpkgs": "nixpkgs", + "nixpkgs-regression": "nixpkgs-regression" + }, + "locked": { + "lastModified": 1695928433, + "narHash": "sha256-0LqIW6V/khHyZf4GlXcZZUzFs8Ot22eZej+pwn++4qw=", + "owner": "nixos", + "repo": "nix", + "rev": "b7e712f9fd3b35e7b75180fcfbe90dce6c9b06a4", + "type": "github" + }, + "original": { + "owner": "nixos", + "repo": "nix", + "type": "github" + } + }, "nixpkgs": { + "locked": { + "lastModified": 1695283060, + "narHash": "sha256-CJz71xhCLlRkdFUSQEL0pIAAfcnWFXMzd9vXhPrnrEg=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "31ed632c692e6a36cfc18083b88ece892f863ed4", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-23.05-small", + "repo": "nixpkgs", + "type": "github" + } + }, + "nixpkgs-regression": { + "locked": { + "lastModified": 1643052045, + "narHash": "sha256-uGJ0VXIhWKGXxkeNnq4TvV3CIOkUJ3PAoLZ3HMzNVMw=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", + "type": "github" + }, + "original": { + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "215d4d0fd80ca5163643b03a33fde804a29cc1e2", + "type": "github" + } + }, + "nixpkgs_2": { "locked": { "lastModified": 1695145219, "narHash": "sha256-Eoe9IHbvmo5wEDeJXKFOpKUwxYJIOxKUesounVccNYk=", @@ -82,7 +167,8 @@ "crane": "crane", "flake-compat": "flake-compat", "flake-utils": "flake-utils", - "nixpkgs": "nixpkgs", + "nix": "nix", + "nixpkgs": "nixpkgs_2", "rust-overlay": "rust-overlay" } }, diff --git a/flake.nix b/flake.nix index 19526416..575f2484 100644 --- a/flake.nix +++ b/flake.nix @@ -26,6 +26,8 @@ flake-utils.follows = "flake-utils"; }; }; + + nix.url = "github:nixos/nix"; }; outputs = inputs: import ./nix/outputs.nix {sources = inputs;}; diff --git a/nix/devshells.nix b/nix/devshells.nix index 1aefeb4b..ccfdb29c 100644 --- a/nix/devshells.nix +++ b/nix/devshells.nix @@ -25,6 +25,7 @@ in rec { ; }; DATABASE_URL = "sqlite:typhon.sqlite"; + TYPHON_FLAKE = ../typhon-flake; }; webapp = pkgs.mkShell { diff --git a/nix/lib/github/mkGithubProject.nix b/nix/lib/github/mkGithubProject.nix index d6fd55cb..70f10123 100644 --- a/nix/lib/github/mkGithubProject.nix +++ b/nix/lib/github/mkGithubProject.nix @@ -22,8 +22,8 @@ in { meta = {inherit title description homepage;}; actions = { jobsets = mkGithubJobsets {inherit owner repo;}; - begin = mkGithubStatus {inherit owner repo;}; - end = mkGithubStatus {inherit owner repo;}; + pre = mkGithubStatus {inherit owner repo;}; + post = mkGithubStatus {inherit owner repo;}; webhook = githubWebhook; }; inherit secrets; diff --git a/nix/lib/mkProject.nix b/nix/lib/mkProject.nix index 228d7dcf..8640c2bc 100644 --- a/nix/lib/mkProject.nix +++ b/nix/lib/mkProject.nix @@ -26,8 +26,8 @@ in { mkdir $out cd $out ${linkAction "jobsets"} - ${linkAction "begin"} - ${linkAction "end"} + ${linkAction "pre"} + ${linkAction "post"} ${linkAction "webhook"} ${linkSecrets} '' diff --git a/nix/nixpkgs.nix b/nix/nixpkgs.nix index ac68c61e..0bc7a3ba 100644 --- a/nix/nixpkgs.nix +++ b/nix/nixpkgs.nix @@ -4,5 +4,8 @@ }: import sources.nixpkgs { inherit system; - overlays = [(import sources.rust-overlay)]; + overlays = [ + (import sources.rust-overlay) + sources.nix.overlays.default + ]; } diff --git a/nix/packages/server.nix b/nix/packages/server.nix index 4b83ea1d..8b16be41 100644 --- a/nix/packages/server.nix +++ b/nix/packages/server.nix @@ -25,4 +25,5 @@ in cargoArtifacts ; buildInputs = [pkgs.sqlite.dev]; + TYPHON_FLAKE = ../../typhon-flake; } diff --git a/typhon-flake/flake.nix b/typhon-flake/flake.nix new file mode 100644 index 00000000..a2b5f986 --- /dev/null +++ b/typhon-flake/flake.nix @@ -0,0 +1,12 @@ +{ + inputs = { + input = { + url = ""; + flake = false; + }; + }; + outputs = { + self, + input, + }: {}; +} diff --git a/typhon-types/src/lib.rs b/typhon-types/src/lib.rs index 29afe290..dc78c430 100644 --- a/typhon-types/src/lib.rs +++ b/typhon-types/src/lib.rs @@ -31,14 +31,10 @@ pub mod handles { pub job: String, } #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] - pub struct Build { - pub build_hash: String, - } - #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub enum Log { Evaluation(Evaluation), - JobBegin(Job), - JobEnd(Job), + JobPre(Job), + JobPost(Job), } macro_rules! impl_display { @@ -75,12 +71,6 @@ pub mod handles { [x.evaluation.into(), vec![x.job]].concat() } } - impl_display!(Build); - impl From for Vec { - fn from(x: Build) -> Self { - vec![x.build_hash] - } - } impl_display!(Log); impl From for Vec { fn from(x: Log) -> Self { @@ -88,14 +78,14 @@ pub mod handles { vec![ match x { Evaluation(_) => "evaluation", - JobBegin(_) => "job_begin", - JobEnd(_) => "job_end", + JobPre(_) => "job_begin", + JobPost(_) => "job_end", } .into(), match x { Evaluation(h) => h.to_string(), - JobBegin(h) => h.to_string(), - JobEnd(h) => h.to_string(), + JobPre(h) => h.to_string(), + JobPost(h) => h.to_string(), }, ] } @@ -123,9 +113,6 @@ pub mod handles { job, } } - pub fn build(build_hash: String) -> Build { - Build { build_hash } - } #[macro_export] macro_rules! pattern { @@ -186,15 +173,8 @@ pub mod requests { pub enum Job { Cancel, Info, - LogBegin, - LogEnd, - } - - #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] - pub enum Build { - Cancel, - Info, - NixLog, + LogPre, + LogPost, } #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] @@ -208,7 +188,6 @@ pub mod requests { Jobset(handles::Jobset, Jobset), Evaluation(handles::Evaluation, Evaluation), Job(handles::Job, Job), - Build(handles::Build, Build), Login(String), } @@ -223,7 +202,6 @@ pub mod requests { Request::Jobset(h, req) => write!(f, "{:?} for jobset {}", req, h), Request::Evaluation(h, req) => write!(f, "{:?} for evaluation {}", req, h), Request::Job(h, req) => write!(f, "{:?} for job {}", req, h), - Request::Build(h, req) => write!(f, "{:?} for build {}", req, h), Request::Login(_) => write!(f, "Log in"), } } @@ -264,24 +242,28 @@ pub mod responses { pub actions_path: Option, pub flake_locked: String, pub jobs: Vec, + pub legacy: bool, pub status: String, pub time_created: i64, + pub time_finished: Option, } #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct JobInfo { - pub build_handle: super::handles::Build, - pub build_infos: BuildInfo, + pub build_drv: String, + pub build_out: String, + pub build_status: String, + pub build_time_finished: Option, + pub build_time_started: Option, pub dist: bool, - pub status: String, + pub post_status: String, + pub post_time_finished: Option, + pub post_time_started: Option, + pub pre_status: String, + pub pre_time_finished: Option, + pub pre_time_started: Option, pub system: String, - } - - #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] - pub struct BuildInfo { - pub drv: String, - pub out: String, - pub status: String, + pub time_created: i64, } #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] @@ -294,7 +276,6 @@ pub mod responses { JobsetInfo(JobsetInfo), EvaluationInfo(EvaluationInfo), JobInfo(JobInfo), - BuildInfo(BuildInfo), Log(String), Login { token: String }, } @@ -326,6 +307,4 @@ pub enum Event { EvaluationNew(handles::Evaluation), EvaluationFinished(handles::Evaluation), JobUpdated(handles::Job), - BuildNew(handles::Build), - BuildFinished(handles::Build), } diff --git a/typhon-webapp/src/job.rs b/typhon-webapp/src/job.rs index 8a74aa4c..587f78a9 100644 --- a/typhon-webapp/src/job.rs +++ b/typhon-webapp/src/job.rs @@ -152,13 +152,13 @@ pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders) { ); } Msg::GetInfo(info) => { - if info.status == "waiting" || info.status == "end" || info.status == "success" { + if info.actions_pre_status != "pending" { orders.send_msg(Msg::FetchLogBegin); } - if info.status == "success" { + if info.actions_post_status != "pending" { orders.send_msg(Msg::FetchLogEnd); } - let drv = info.build_infos.drv.clone(); + let drv = info.build_drv.clone(); orders .proxy(Msg::LogLine) .stream(fetch_logs_as_stream(drv.into())); @@ -215,7 +215,9 @@ fn view_job(model: &Model) -> Node { }, ] ], - p![format!("Status: {}", info.status)], + p![format!("Status (pre): {}", info.actions_pre_status)], + p![format!("Status (build): {}", info.build_status)], + p![format!("Status (post): {}", info.actions_post_status)], if info.dist { let api_url = SETTINGS.get().unwrap().api_server.url(false); let job = &model.handle.job; diff --git a/typhon/migrations/00000000000000_typhon/down.sql b/typhon/migrations/00000000000000_typhon/down.sql index 411ad333..0adcc214 100644 --- a/typhon/migrations/00000000000000_typhon/down.sql +++ b/typhon/migrations/00000000000000_typhon/down.sql @@ -2,5 +2,4 @@ DROP TABLE projects; DROP TABLE jobsets; DROP TABLE evaluations; DROP TABLE jobs; -DROP TABLE builds; DROP TABLE logs; diff --git a/typhon/migrations/00000000000000_typhon/up.sql b/typhon/migrations/00000000000000_typhon/up.sql index 02c5e6f8..f4fdf5a5 100644 --- a/typhon/migrations/00000000000000_typhon/up.sql +++ b/typhon/migrations/00000000000000_typhon/up.sql @@ -24,36 +24,40 @@ CREATE TABLE evaluations ( evaluation_actions_path TEXT, evaluation_flake_locked TEXT NOT NULL, evaluation_jobset INTEGER NOT NULL REFERENCES jobsets(jobset_id) ON DELETE CASCADE, + evaluation_legacy BOOL NOT NULL, evaluation_num INTEGER NOT NULL, evaluation_status TEXT NOT NULL CHECK(evaluation_status in ('pending', 'success', 'error', 'canceled')), evaluation_time_created BIGINT NOT NULL, + evaluation_time_finished BIGINT, UNIQUE(evaluation_jobset, evaluation_num) ); CREATE TABLE jobs ( job_id INTEGER NOT NULL PRIMARY KEY, - job_build INTEGER NOT NULL REFERENCES builds(build_id) ON DELETE CASCADE, + job_build_drv TEXT NOT NULL, + job_build_out TEXT NOT NULL, + job_build_status TEXT CHECK(job_build_status in ('pending', 'success', 'error', 'canceled')) NOT NULL, + job_build_time_finished BIGINT, + job_build_time_started BIGINT, job_dist BOOLEAN NOT NULL, job_evaluation INTEGER NOT NULL REFERENCES evaluations(evaluation_id) ON DELETE CASCADE, job_name TEXT NOT NULL, - job_status TEXT CHECK(job_status in ('begin', 'waiting', 'end', 'success', 'error', 'canceled')) NOT NULL, + job_post_status TEXT CHECK(job_pre_status in ('waiting', 'pending', 'success', 'error', 'canceled')) NOT NULL, + job_post_time_finished BIGINT, + job_post_time_started BIGINT, + job_pre_status TEXT CHECK(job_pre_status in ('pending', 'success', 'error', 'canceled')) NOT NULL, + job_pre_time_finished BIGINT, + job_pre_time_started BIGINT, job_system TEXT NOT NULL, + job_time_created BIGINT NOT NULL, UNIQUE(job_evaluation, job_system, job_name) ); -CREATE TABLE builds ( - build_id INTEGER NOT NULL PRIMARY KEY, - build_drv TEXT NOT NULL UNIQUE, - build_hash TEXT NOT NULL UNIQUE, - build_out TEXT NOT NULL UNIQUE, - build_status TEXT NOT NULL CHECK(build_status in ('pending', 'success', 'error', 'canceled')) -); - CREATE TABLE logs ( log_id INTEGER NOT NULL PRIMARY KEY, log_evaluation INTEGER REFERENCES evaluations(evaluation_id) ON DELETE CASCADE, log_job INTEGER REFERENCES jobs(job_id) ON DELETE CASCADE, log_stderr TEXT NOT NULL, - log_type TEXT NOT NULL CHECK(log_type in ('build', 'evaluation', 'job_begin', 'job_end')), + log_type TEXT NOT NULL CHECK(log_type in ('build', 'evaluation', 'pre', 'post')), UNIQUE(log_evaluation, log_job, log_type) ); diff --git a/typhon/src/api.rs b/typhon/src/api.rs index 31e9f74a..a655436c 100644 --- a/typhon/src/api.rs +++ b/typhon/src/api.rs @@ -34,7 +34,6 @@ impl Responder for ResponseWrapper { JobsetEvaluate(payload) => web::Json(payload).respond_to(req), EvaluationInfo(payload) => web::Json(payload).respond_to(req), JobInfo(payload) => web::Json(payload).respond_to(req), - BuildInfo(payload) => web::Json(payload).respond_to(req), Log(payload) => web::Json(payload).respond_to(req), Login { token } => web::Json(token).respond_to(req), } @@ -150,34 +149,16 @@ r!( Job::Info, ); - job_log_begin(path: web::Path<(String,String,i32,String)>) => + job_log_pre(path: web::Path<(String,String,i32,String)>) => Request::Job( handles::job(path.into_inner()), - Job::LogBegin, + Job::LogPre, ); - job_log_end(path: web::Path<(String,String,i32,String)>) => + job_log_post(path: web::Path<(String,String,i32,String)>) => Request::Job( handles::job(path.into_inner()), - Job::LogEnd, - ); - - build_cancel(path: web::Path) => - Request::Build( - handles::build(path.into_inner()), - Build::Cancel, - ); - - build_info(path: web::Path) => - Request::Build( - handles::build(path.into_inner()), - Build::Info, - ); - - build_nix_log(path: web::Path) => - Request::Build( - handles::build(path.into_inner()), - Build::NixLog, + Job::LogPost, ); login(body: web::Json) => @@ -199,7 +180,7 @@ async fn dist( _ => Err(ResponseErrorWrapper(ResponseError::InternalError)), }?; if info.dist { - Ok(NamedFile::open_async(format!("{}/{}", info.build_infos.out, path)).await) + Ok(NamedFile::open_async(format!("{}/{}", info.build_out, path)).await) } else { Err(ResponseErrorWrapper(ResponseError::BadRequest( "typhonDist is not set".into(), @@ -321,19 +302,13 @@ pub fn config(cfg: &mut web::ServiceConfig) { web::scope("/jobs/{job}") .route("", web::get().to(job_info)) .route("/cancel", web::post().to(job_cancel)) - .route("/logs/begin", web::get().to(job_log_begin)) - .route("/logs/end", web::get().to(job_log_end)) + .route("/logs/pre", web::get().to(job_log_pre)) + .route("/logs/post", web::get().to(job_log_post)) .route("/dist/{path:.*}", web::get().to(dist)), ), ), ), ) - .service( - web::scope("/builds/{build}") - .route("", web::get().to(build_info)) - .route("/cancel", web::post().to(build_cancel)) - .route("/nixlog", web::get().to(build_nix_log)), - ) .route("/login", web::post().to(login)) .route( "{anything:.*}", diff --git a/typhon/src/builds.rs b/typhon/src/builds.rs deleted file mode 100644 index 32bde0cf..00000000 --- a/typhon/src/builds.rs +++ /dev/null @@ -1,81 +0,0 @@ -use crate::connection; -use crate::error::Error; -use crate::models::*; -use crate::nix; -use crate::schema::builds::dsl::*; -use crate::BUILDS; -use crate::{handles, responses}; -use crate::{log_event, Event}; -use diesel::prelude::*; - -impl From for responses::BuildInfo { - fn from(build: Build) -> responses::BuildInfo { - responses::BuildInfo { - drv: build.build_drv.clone(), - out: build.build_out.clone(), - status: build.build_status.clone(), - } - } -} - -impl Build { - pub async fn cancel(&self) -> Result<(), Error> { - let r = BUILDS.cancel(self.build_id).await; - if r { - Ok(()) - } else { - Err(Error::BuildNotRunning(self.handle())) - } - } - - pub async fn get(build_handle: &handles::Build) -> Result { - let build_hash_ = &build_handle.build_hash; - let mut conn = connection().await; - Ok(builds - .filter(build_hash.eq(build_hash_)) - .first::(&mut *conn) - .map_err(|_| { - Error::BuildNotFound(handles::Build { - build_hash: build_hash_.to_string(), - }) - })?) - } - - pub fn handle(&self) -> handles::Build { - handles::Build { - build_hash: self.build_hash.clone(), - } - } - - pub fn info(&self) -> Result { - Ok(self.clone().into()) - } - - pub async fn nixlog(&self) -> Result { - let log = nix::log(self.build_drv.clone()).await?; - Ok(log) - } - - pub async fn run(self) -> () { - let handle = self.handle(); - let id = self.build_id; - let drv = self.build_drv.clone(); - let task = async move { - nix::build(&nix::DrvPath::new(&drv)).await?; - Ok::<(), Error>(()) - }; - let f = move |r| async move { - let status = match r { - Some(Ok(())) => "success", - Some(Err(_)) => "error", // TODO: log error - None => "canceled", - }; - let conn = &mut *connection().await; - let _ = diesel::update(builds.find(id)) - .set(build_status.eq(status)) - .execute(conn); - log_event(Event::BuildFinished(handle)); - }; - BUILDS.run(id, task, f).await; - } -} diff --git a/typhon/src/error.rs b/typhon/src/error.rs index 84f4e12a..428e35ce 100644 --- a/typhon/src/error.rs +++ b/typhon/src/error.rs @@ -7,8 +7,6 @@ pub enum Error { AccessDenied, ActionError(actions::Error), BadJobsetDecl(String), - BuildNotFound(handles::Build), - BuildNotRunning(handles::Build), EvaluationNotFound(handles::Evaluation), EvaluationNotRunning(handles::Evaluation), IllegalProjectHandle(handles::Project), @@ -41,8 +39,6 @@ impl std::fmt::Display for Error { AccessDenied => write!(f, "Access denied"), ActionError(e) => write!(f, "Action error: {}", e), BadJobsetDecl(s) => write!(f, "Bad jobset declaration: {}", s), - BuildNotFound(build_handle) => write!(f, "Build {} not found", build_handle), - BuildNotRunning(build_handle) => write!(f, "Build {} is not running", build_handle), IllegalProjectHandle(handle) => { write!(f, "The project name [{}] is illegal. Legal project names are sequences of alphanumerical characters that may contains dashes [-] or underscores [_].", handle.project) } @@ -101,15 +97,12 @@ impl Into for Error { ActionError(actions::Error::Unexpected) | UnexpectedDatabaseError(_) | Todo => { InternalError } - BuildNotFound(_) - | EvaluationNotFound(_) - | JobNotFound(_) - | JobsetNotFound(_) - | ProjectNotFound(_) => ResourceNotFound(format!("{}", self)), + EvaluationNotFound(_) | JobNotFound(_) | JobsetNotFound(_) | ProjectNotFound(_) => { + ResourceNotFound(format!("{}", self)) + } AccessDenied | ActionError(_) | BadJobsetDecl(_) - | BuildNotRunning(_) | EvaluationNotRunning(_) | JobNotRunning(_) | IllegalProjectHandle(_) diff --git a/typhon/src/evaluations.rs b/typhon/src/evaluations.rs index be1a812e..5046804c 100644 --- a/typhon/src/evaluations.rs +++ b/typhon/src/evaluations.rs @@ -3,7 +3,6 @@ use crate::error::Error; use crate::gcroots; use crate::models::*; use crate::nix; -use crate::schema::builds::dsl::*; use crate::schema::evaluations::dsl::*; use crate::schema::jobs::dsl::*; use crate::schema::jobsets::dsl::*; @@ -20,54 +19,36 @@ type JobDrvMap = HashMap; async fn evaluate_aux(id: i32, new_jobs: JobDrvMap) -> Result<(), Error> { let mut conn = connection().await; - let created_jobs = conn.transaction::, Error, _>(|conn| { + let now = crate::time::now(); + let created_jobs = conn.transaction::, Error, _>(|conn| { new_jobs .iter() .map(|(name, (drv, dist))| { - let hash = &drv.path.hash(); - let build = builds - .filter(build_hash.eq(hash)) - .load::(conn)? - .last() - .cloned() - .map(Ok::<_, Error>) - .unwrap_or_else(|| { - let build: Build = diesel::insert_into(builds) - .values(&NewBuild { - build_drv: &String::from(drv.path.clone()).as_str(), - build_hash: hash, - build_out: drv - .outputs - .iter() - .last() - .expect("TODO: derivations can have multiple outputs") - .1, - build_status: "pending", - }) - .get_result(conn)?; - log_event(Event::BuildNew(build.handle())); - Ok(build) - })?; - - // Create job - let job: Job = diesel::insert_into(jobs) + Ok(diesel::insert_into(jobs) .values(&NewJob { - job_build: build.build_id, + job_build_drv: &String::from(drv.path.clone()).as_str(), + job_build_out: drv + .outputs + .iter() + .last() + .expect("TODO: derivations can have multiple outputs") + .1, + job_build_status: "pending", job_dist: *dist, job_evaluation: id, job_name: &name, - job_status: "begin", + job_post_status: "pending", + job_pre_status: "pending", job_system: &*CURRENT_SYSTEM, + job_time_created: now, }) - .get_result(conn)?; - Ok((build, job)) + .get_result(conn)?) }) .collect() })?; drop(conn); - for (build, job) in created_jobs.into_iter() { - build.run().await; + for job in created_jobs.into_iter() { job.run().await; } @@ -121,8 +102,10 @@ impl Evaluation { actions_path: self.evaluation_actions_path.clone(), flake_locked: self.evaluation_flake_locked.clone(), jobs: jobs_names, + legacy: self.evaluation_legacy, status: self.evaluation_status.clone(), time_created: self.evaluation_time_created, + time_finished: self.evaluation_time_finished, }) } diff --git a/typhon/src/gcroots.rs b/typhon/src/gcroots.rs index f636db19..b054c433 100644 --- a/typhon/src/gcroots.rs +++ b/typhon/src/gcroots.rs @@ -1,5 +1,4 @@ use crate::models::*; -use crate::schema::builds::dsl::*; use crate::schema::evaluations::dsl::*; use crate::schema::jobs::dsl::*; use crate::schema::jobsets::dsl::*; @@ -51,11 +50,8 @@ fn update_aux(conn: &mut SqliteConnection) -> Result<(), Error> { .filter(job_evaluation.eq(evaluation.evaluation_id)) .load::(conn)? { - let build = builds - .filter(build_id.eq(job.job_build)) - .first::(conn)?; - gcroots.insert(build.build_drv.clone()); - gcroots.insert(build.build_out.clone()); + gcroots.insert(job.job_build_drv.clone()); + gcroots.insert(job.job_build_out.clone()); } } } diff --git a/typhon/src/jobs.rs b/typhon/src/jobs.rs index f5b38589..b0bfe9d0 100644 --- a/typhon/src/jobs.rs +++ b/typhon/src/jobs.rs @@ -3,26 +3,23 @@ use crate::connection; use crate::error::Error; use crate::handles; use crate::models::*; +use crate::nix; use crate::responses; -use crate::schema::builds::dsl::*; use crate::schema::evaluations::dsl::*; use crate::schema::jobs::dsl::*; use crate::CURRENT_SYSTEM; use crate::{log_event, Event}; -use crate::{BUILDS, JOBS, SETTINGS}; +use crate::{JOBS_BUILD, JOBS_POST, JOBS_PRE, SETTINGS}; use diesel::prelude::*; use serde_json::{json, Value}; use std::path::Path; impl Job { - pub async fn build(&self) -> Result { - let mut conn = connection().await; - Ok(builds.find(self.job_build).first::(&mut *conn)?) - } - pub async fn cancel(&self) -> Result<(), Error> { - let r = JOBS.cancel(self.job_id).await; - if r { + let a = JOBS_PRE.cancel(self.job_id).await; + let b = JOBS_BUILD.cancel(self.job_id).await; + let c = JOBS_POST.cancel(self.job_id).await; + if a || b || c { Ok(()) } else { Err(Error::JobNotRunning(self.handle().await?)) @@ -62,47 +59,67 @@ impl Job { }) } - pub async fn info(&self) -> Result { - let mut conn = connection().await; - let build = builds.find(self.job_build).first::(&mut *conn)?; - Ok(responses::JobInfo { - build_handle: handles::build(build.build_hash.clone()), - build_infos: build.into(), + pub fn info(&self) -> responses::JobInfo { + responses::JobInfo { + build_drv: self.job_build_drv.clone(), + build_out: self.job_build_out.clone(), + build_status: self.job_build_status.clone(), + build_time_finished: self.job_build_time_finished, + build_time_started: self.job_build_time_started, dist: self.job_dist, - status: self.job_status.clone(), + post_status: self.job_post_status.clone(), + post_time_finished: self.job_post_time_finished, + post_time_started: self.job_post_time_started, + pre_status: self.job_pre_status.clone(), + pre_time_finished: self.job_pre_time_finished, + pre_time_started: self.job_pre_time_started, system: self.job_system.clone(), - }) + time_created: self.job_time_created, + } } - async fn mk_input(&self) -> Result { + async fn mk_input(&self, build_status: &str) -> Result { let evaluation = self.evaluation().await?; let jobset = evaluation.jobset().await?; let project = jobset.project().await?; - let build = self.build().await?; Ok(json!({ - "build": build.build_hash, "data": SETTINGS.json, + "drv": self.job_build_drv, "evaluation": evaluation.evaluation_num, "flake": jobset.jobset_flake, "flake_locked": evaluation.evaluation_flake_locked, "job": self.job_name, "jobset": jobset.jobset_name, - "out": build.build_out, + "out": self.job_build_out, "project": project.project_name, - "status": build.build_status, + "status": build_status, "system": self.job_system, })) } pub async fn run(self) -> () { + use crate::time::now; let id = self.job_id; + let drv = nix::DrvPath::new(&self.job_build_drv); - let handle = self.handle().await.unwrap(); // TODO - let handle_bis = handle.clone(); + // FIXME? + let handle_1 = self.handle().await.unwrap(); + let handle_2 = handle_1.clone(); + let handle_3 = handle_1.clone(); + let handle_4 = handle_1.clone(); + let handle_5 = handle_1.clone(); + let job_1 = self; + let job_2 = job_1.clone(); + + let task_pre = async move { + let mut conn = connection().await; + let _ = diesel::update(jobs.find(id)) + .set(job_pre_time_started.eq(now())) + .execute(&mut *conn); + drop(conn); - let task = async move { // abort if actions are not defined - let evaluation = self.evaluation().await?; + let evaluation = job_1.evaluation().await?; let path = match &evaluation.evaluation_actions_path { None => return Ok(()), Some(path) => path, @@ -111,78 +128,118 @@ impl Job { let jobset = evaluation.jobset().await?; let project = jobset.project().await?; - { - // run action `begin` - let mut conn = connection().await; - let _ = diesel::update(jobs.find(id)) - .set(job_status.eq("begin")) - .execute(&mut *conn); - drop(conn); - - log_event(Event::JobUpdated(handle_bis.clone())); - - let input = self.mk_input().await?; - - let log = if Path::new(&format!("{}/begin", path)).exists() { - let (_, log) = actions::run( - &project.project_key, - &format!("{}/begin", path), - &format!("{}/secrets", path), - &input, - ) - .await?; - log - } else { - serde_json::to_string_pretty(&input).unwrap() // TODO - }; - - // save the log - let _ = Log::new(handles::Log::JobBegin(handle_bis.clone()), log).await?; - } - - // wait for build + let input = job_1.mk_input(&"pending".to_string()).await?; + + let log = if Path::new(&format!("{}/pre", path)).exists() { + let (_, log) = actions::run( + &project.project_key, + &format!("{}/pre", path), + &format!("{}/secrets", path), + &input, + ) + .await?; + log + } else { + serde_json::to_string_pretty(&input).unwrap() // TODO + }; + + // save the log + let _ = Log::new(handles::Log::JobPre(handle_1.clone()), log).await?; + + Ok::<(), Error>(()) + }; + let finish_pre = move |r| async move { + let status = match r { + Some(Ok(())) => "success", + Some(Err(_)) => "error", // TODO: log error + None => "canceled", + }; + let mut conn = connection().await; + let _ = diesel::update(jobs.find(id)) + .set((job_pre_status.eq(status), job_pre_time_finished.eq(now()))) + .execute(&mut *conn); + drop(conn); + log_event(Event::JobUpdated(handle_2)); + }; + JOBS_PRE.run(id, task_pre, finish_pre).await; + + // FIXME: + // if two jobs are running the same build, + // canceling the one that has the lock on the build will + // start the build over + let (sender, receiver) = tokio::sync::oneshot::channel::(); + let task_build = async move { + let mut conn = connection().await; + let _ = diesel::update(jobs.find(id)) + .set(job_build_time_started.eq(now())) + .execute(&mut *conn); + drop(conn); + nix::build(&drv).await?; + Ok::<(), Error>(()) + }; + let finish_build = move |r| async move { + let status = match r { + Some(Ok(())) => "success", + Some(Err(_)) => "error", // TODO: log error + None => "canceled", + }; + sender.send(status.to_string()).unwrap_or_else(|_| panic!()); let mut conn = connection().await; let _ = diesel::update(jobs.find(id)) - .set(job_status.eq("waiting")) + .set(( + job_build_status.eq(status), + job_build_time_finished.eq(now()), + )) .execute(&mut *conn); drop(conn); + log_event(Event::JobUpdated(handle_3)); + }; + JOBS_BUILD.run(id, task_build, finish_build).await; - log_event(Event::JobUpdated(handle_bis.clone())); + let task_post = async move { + // wait for `pre` to finish + JOBS_PRE.wait(&id).await; + // wait for the build to finish + JOBS_BUILD.wait(&id).await; + let build_status = receiver.await.unwrap_or_else(|_| panic!()); - BUILDS.wait(&self.job_build).await; + let mut conn = connection().await; + let _ = diesel::update(jobs.find(id)) + .set(job_post_time_started.eq(now())) + .execute(&mut *conn); + drop(conn); - { - // run action `end` - let mut conn = connection().await; - let _ = diesel::update(jobs.find(id)) - .set(job_status.eq("end")) - .execute(&mut *conn); - drop(conn); + // abort if actions are not defined + let evaluation = job_2.evaluation().await?; + let path = match &evaluation.evaluation_actions_path { + None => return Ok(()), + Some(path) => path, + }; - log_event(Event::JobUpdated(handle_bis.clone())); + let jobset = evaluation.jobset().await?; + let project = jobset.project().await?; - let input = self.mk_input().await?; + let input = job_2.mk_input(&build_status).await?; - let log = if Path::new(&format!("{}/end", path)).exists() { - let (_, log) = actions::run( - &project.project_key, - &format!("{}/end", path), - &format!("{}/secrets", path), - &input, - ) - .await?; - log - } else { - serde_json::to_string_pretty(&input).unwrap() // TODO - }; + let log = if Path::new(&format!("{}/post", path)).exists() { + let (_, log) = actions::run( + &project.project_key, + &format!("{}/post", path), + &format!("{}/secrets", path), + &input, + ) + .await?; + log + } else { + serde_json::to_string_pretty(&input).unwrap() // TODO + }; - // save the log - let _ = Log::new(handles::Log::JobEnd(handle_bis), log).await?; - } + // save the log + let _ = Log::new(handles::Log::JobPost(handle_4), log).await?; Ok::<(), Error>(()) }; - let f = move |r| async move { + let finish_post = move |r| async move { let status = match r { Some(Ok(())) => "success", Some(Err(_)) => "error", // TODO: log error @@ -190,11 +247,11 @@ impl Job { }; let mut conn = connection().await; let _ = diesel::update(jobs.find(id)) - .set(job_status.eq(status)) + .set((job_post_status.eq(status), job_post_time_finished.eq(now()))) .execute(&mut *conn); drop(conn); - log_event(Event::JobUpdated(handle)); + log_event(Event::JobUpdated(handle_5)); }; - JOBS.run(id, task, f).await; + JOBS_POST.run(id, task_post, finish_post).await; } } diff --git a/typhon/src/jobsets.rs b/typhon/src/jobsets.rs index 0eef1082..02d5ca0e 100644 --- a/typhon/src/jobsets.rs +++ b/typhon/src/jobsets.rs @@ -6,7 +6,6 @@ use crate::nix; use crate::schema::evaluations::dsl::*; use crate::schema::jobsets::dsl::*; use crate::schema::projects::dsl::*; -use crate::time; use crate::{handles, responses}; use crate::{log_event, Event}; @@ -41,11 +40,12 @@ impl Jobset { } let n = old_evaluations.len() as i32 + 1; let status = "pending".to_string(); - let time = time::timestamp(); + let time = crate::time::now(); let new_evaluation = NewEvaluation { evaluation_actions_path: project.project_actions_path.as_ref().map(|s| s.as_str()), evaluation_flake_locked: &flake_locked, evaluation_jobset: self.jobset_id, + evaluation_legacy: false, // TODO (refactor) evaluation_num: n, evaluation_status: &status, evaluation_time_created: time, diff --git a/typhon/src/lib.rs b/typhon/src/lib.rs index c2143712..f90513eb 100644 --- a/typhon/src/lib.rs +++ b/typhon/src/lib.rs @@ -1,12 +1,11 @@ mod actions; -mod builds; mod error; mod evaluations; mod gcroots; mod jobs; mod jobsets; mod models; -mod nix; +pub mod nix; mod projects; mod schema; mod time; @@ -99,8 +98,9 @@ pub static SETTINGS: Lazy = Lazy::new(|| { } }); pub static EVALUATIONS: Lazy> = Lazy::new(tasks::Tasks::new); -pub static BUILDS: Lazy> = Lazy::new(tasks::Tasks::new); -pub static JOBS: Lazy> = Lazy::new(tasks::Tasks::new); +pub static JOBS_PRE: Lazy> = Lazy::new(tasks::Tasks::new); +pub static JOBS_BUILD: Lazy> = Lazy::new(tasks::Tasks::new); +pub static JOBS_POST: Lazy> = Lazy::new(tasks::Tasks::new); pub static CONNECTION: Lazy = Lazy::new(|| { use diesel::Connection as _; let database_url = std::env::var("DATABASE_URL").expect("DATABASE_URL must be set"); @@ -160,10 +160,8 @@ pub fn authorize_request(user: &User, req: &requests::Request) -> bool { | requests::Request::Evaluation(_, requests::Evaluation::Info) | requests::Request::Evaluation(_, requests::Evaluation::Log) | requests::Request::Job(_, requests::Job::Info) - | requests::Request::Job(_, requests::Job::LogBegin) - | requests::Request::Job(_, requests::Job::LogEnd) - | requests::Request::Build(_, requests::Build::Info) - | requests::Request::Build(_, requests::Build::NixLog) + | requests::Request::Job(_, requests::Job::LogPre) + | requests::Request::Job(_, requests::Job::LogPost) | requests::Request::Login(_) => true, _ => user.is_admin(), } @@ -237,28 +235,17 @@ pub async fn handle_request_aux(user: &User, req: &requests::Request) -> Result< job.cancel().await?; Response::Ok } - requests::Job::Info => Response::JobInfo(job.info().await?), - requests::Job::LogBegin => { - let log = Log::get(handles::Log::JobBegin(job_handle.clone())).await?; + requests::Job::Info => Response::JobInfo(job.info()), + requests::Job::LogPre => { + let log = Log::get(handles::Log::JobPre(job_handle.clone())).await?; Response::Log(log.log_stderr) } - requests::Job::LogEnd => { - let log = Log::get(handles::Log::JobEnd(job_handle.clone())).await?; + requests::Job::LogPost => { + let log = Log::get(handles::Log::JobPost(job_handle.clone())).await?; Response::Log(log.log_stderr) } } } - requests::Request::Build(build_handle, req) => { - let build = Build::get(&build_handle).await?; - match req { - requests::Build::Cancel => { - build.cancel().await?; - Response::Ok - } - requests::Build::Info => Response::BuildInfo(build.info()?), - requests::Build::NixLog => Response::Log(build.nixlog().await?), - } - } requests::Request::Login(password) => { let hash = digest(password.as_bytes()); if hash == SETTINGS.hashed_password { diff --git a/typhon/src/logs.rs b/typhon/src/logs.rs index 51642a48..2244c960 100644 --- a/typhon/src/logs.rs +++ b/typhon/src/logs.rs @@ -8,8 +8,8 @@ use typhon_types::*; fn get_log_type(log: &handles::Log) -> &'static str { match log { handles::Log::Evaluation(_) => "evaluation", - handles::Log::JobBegin(_) => "job_begin", - handles::Log::JobEnd(_) => "job_end", + handles::Log::JobPre(_) => "job_pre", + handles::Log::JobPost(_) => "job_post", } } @@ -27,7 +27,7 @@ impl Log { let evaluation = Evaluation::get(&h).await?; new_log.log_evaluation = Some(evaluation.evaluation_id); } - handles::Log::JobBegin(h) | handles::Log::JobEnd(h) => { + handles::Log::JobPre(h) | handles::Log::JobPost(h) => { let job = Job::get(&h).await?; new_log.log_job = Some(job.job_id); } @@ -48,7 +48,7 @@ impl Log { req.filter(log_evaluation.eq(Some(evaluation.evaluation_id))) .first::(&mut *conn) } - handles::Log::JobBegin(h) | handles::Log::JobEnd(h) => { + handles::Log::JobPre(h) | handles::Log::JobPost(h) => { let job = Job::get(&h).await?; let mut conn = connection().await; req.filter(log_job.eq(Some(job.job_id))) diff --git a/typhon/src/models.rs b/typhon/src/models.rs index bb264e39..77eaae90 100644 --- a/typhon/src/models.rs +++ b/typhon/src/models.rs @@ -1,4 +1,4 @@ -use crate::schema::{builds, evaluations, jobs, jobsets, logs, projects}; +use crate::schema::{evaluations, jobs, jobsets, logs, projects}; use diesel::prelude::*; #[derive(Queryable, Clone)] @@ -44,9 +44,11 @@ pub struct Evaluation { pub evaluation_actions_path: Option, pub evaluation_flake_locked: String, pub evaluation_jobset: i32, + pub evaluation_legacy: bool, pub evaluation_num: i32, pub evaluation_status: String, pub evaluation_time_created: i64, + pub evaluation_time_finished: Option, } #[derive(Insertable)] @@ -55,49 +57,53 @@ pub struct NewEvaluation<'a> { pub evaluation_actions_path: Option<&'a str>, pub evaluation_flake_locked: &'a str, pub evaluation_jobset: i32, + pub evaluation_legacy: bool, pub evaluation_num: i32, pub evaluation_status: &'a str, pub evaluation_time_created: i64, + // pub evaluation_time_finished: Option, } #[derive(Queryable, Clone)] pub struct Job { pub job_id: i32, - pub job_build: i32, + pub job_build_drv: String, + pub job_build_out: String, + pub job_build_status: String, + pub job_build_time_finished: Option, + pub job_build_time_started: Option, pub job_dist: bool, pub job_evaluation: i32, pub job_name: String, - pub job_status: String, + pub job_post_status: String, + pub job_post_time_finished: Option, + pub job_post_time_started: Option, + pub job_pre_status: String, + pub job_pre_time_finished: Option, + pub job_pre_time_started: Option, pub job_system: String, + pub job_time_created: i64, } #[derive(Insertable)] #[diesel(table_name = jobs)] pub struct NewJob<'a> { - pub job_build: i32, + pub job_build_drv: &'a str, + pub job_build_out: &'a str, + pub job_build_status: &'a str, + //pub job_build_time_started: Option, + //pub job_build_time_finished: Option, pub job_dist: bool, pub job_evaluation: i32, pub job_name: &'a str, - pub job_status: &'a str, + pub job_post_status: &'a str, + //pub job_post_time_finished: Option, + //pub job_post_time_started: Option, + pub job_pre_status: &'a str, + //pub job_pre_time_finished: Option, + //pub job_pre_time_started: Option, pub job_system: &'a str, -} - -#[derive(Queryable, Clone)] -pub struct Build { - pub build_id: i32, - pub build_drv: String, - pub build_hash: String, - pub build_out: String, - pub build_status: String, -} - -#[derive(Insertable)] -#[diesel(table_name = builds)] -pub struct NewBuild<'a> { - pub build_drv: &'a str, - pub build_hash: &'a str, - pub build_out: &'a str, - pub build_status: &'a str, + pub job_time_created: i64, } #[derive(Queryable, Clone)] diff --git a/typhon/src/nix.rs b/typhon/src/nix.rs index a5112d01..67f93fee 100644 --- a/typhon/src/nix.rs +++ b/typhon/src/nix.rs @@ -313,16 +313,31 @@ pub fn current_system() -> String { } pub async fn lock(flake_url: &String) -> Result { - let output = Command::nix(["flake", "metadata", "--refresh", "--json"]) - .arg(flake_url.clone()) - .sync_stdout() - .await?; - Ok( - serde_json::from_str::(&output).expect(JSON_PARSE_ERROR)["url"] - .as_str() - .expect(JSON_PARSE_ERROR) - .into(), - ) + let output = Command::nix([ + "flake", + "lock", + "--output-lock-file", + "/dev/stdout", + "--override-input", + "input", + flake_url, + env!("TYPHON_FLAKE"), + ]) + .sync_stdout() + .await?; + let locked_info = &serde_json::from_str::(&output).unwrap()["nodes"]["input"]["locked"]; + let output = Command::nix([ + "eval", + "--raw", + "--expr", + &format!( + "builtins.flakeRefToString (builtins.fromJSON ''{}'')", + locked_info + ), + ]) + .sync_stdout() + .await?; + Ok(output) } pub async fn log(drv: String) -> Result { diff --git a/typhon/src/schema.rs b/typhon/src/schema.rs index ba8bafe5..69e2907e 100644 --- a/typhon/src/schema.rs +++ b/typhon/src/schema.rs @@ -1,36 +1,38 @@ // @generated automatically by Diesel CLI. -diesel::table! { - builds (build_id) { - build_id -> Integer, - build_drv -> Text, - build_hash -> Text, - build_out -> Text, - build_status -> Text, - } -} - diesel::table! { evaluations (evaluation_id) { evaluation_id -> Integer, evaluation_actions_path -> Nullable, evaluation_flake_locked -> Text, evaluation_jobset -> Integer, + evaluation_legacy -> Bool, evaluation_num -> Integer, evaluation_status -> Text, evaluation_time_created -> BigInt, + evaluation_time_finished -> Nullable, } } diesel::table! { jobs (job_id) { job_id -> Integer, - job_build -> Integer, + job_build_drv -> Text, + job_build_out -> Text, + job_build_status -> Text, + job_build_time_finished -> Nullable, + job_build_time_started -> Nullable, job_dist -> Bool, job_evaluation -> Integer, job_name -> Text, - job_status -> Text, + job_post_status -> Text, + job_post_time_finished -> Nullable, + job_post_time_started -> Nullable, + job_pre_status -> Text, + job_pre_time_finished -> Nullable, + job_pre_time_started -> Nullable, job_system -> Text, + job_time_created -> BigInt, } } @@ -68,10 +70,9 @@ diesel::table! { } diesel::joinable!(evaluations -> jobsets (evaluation_jobset)); -diesel::joinable!(jobs -> builds (job_build)); diesel::joinable!(jobs -> evaluations (job_evaluation)); diesel::joinable!(jobsets -> projects (jobset_project)); diesel::joinable!(logs -> evaluations (log_evaluation)); diesel::joinable!(logs -> jobs (log_job)); -diesel::allow_tables_to_appear_in_same_query!(builds, evaluations, jobs, jobsets, logs, projects,); +diesel::allow_tables_to_appear_in_same_query!(evaluations, jobs, jobsets, logs, projects,); diff --git a/typhon/src/time.rs b/typhon/src/time.rs index c7d65334..fa51b798 100644 --- a/typhon/src/time.rs +++ b/typhon/src/time.rs @@ -1,6 +1,6 @@ use std::time::{SystemTime, UNIX_EPOCH}; -pub fn timestamp() -> i64 { +pub fn now() -> i64 { SystemTime::now() .duration_since(UNIX_EPOCH) .expect("Time went backwards")