From 4b97c24d1a130db1c8dcdbf044a5e58513279d33 Mon Sep 17 00:00:00 2001 From: seliayeu Date: Fri, 3 Nov 2023 20:17:05 -0600 Subject: [PATCH 01/11] add console fetching backend endpoint --- core/src/events.rs | 3 +- core/src/handlers/console.rs | 53 ++++++++++++++++++++++++++++++++++++ core/src/handlers/mod.rs | 1 + core/src/lib.rs | 7 +++-- core/src/types.rs | 7 +++++ 5 files changed, 68 insertions(+), 3 deletions(-) create mode 100644 core/src/handlers/console.rs diff --git a/core/src/events.rs b/core/src/events.rs index 4ea0f634..5f2532a3 100644 --- a/core/src/events.rs +++ b/core/src/events.rs @@ -10,7 +10,7 @@ use crate::{ macro_executor::MacroPID, output_types::ClientEvent, traits::{t_macro::ExitStatus, t_player::Player, t_server::State, InstanceInfo}, - types::{InstanceUuid, Snowflake, TimeRange}, + types::{InstanceUuid, Snowflake, TimeRange, EventCount}, }; pub trait EventFilter { @@ -28,6 +28,7 @@ pub struct EventQuery { pub event_instance_ids: Option>, pub bearer_token: Option, pub time_range: Option, + pub event_count: Option, } impl EventQuery { diff --git a/core/src/handlers/console.rs b/core/src/handlers/console.rs new file mode 100644 index 00000000..c3d26beb --- /dev/null +++ b/core/src/handlers/console.rs @@ -0,0 +1,53 @@ +use axum::{extract::{Path, Query}, routing::{put, get}, Json, Router}; +use axum_macros::debug_handler; +use serde::{Deserialize, Serialize}; +use crate::AppState; +use crate::error::Error; +use ts_rs::TS; + +#[derive(Debug, Serialize, Deserialize, TS)] +#[ts(export)] +pub struct ConsoleQuery { + instance_uuid : String, + start_snowflake_id : i64, + count : i64, +} + +#[derive(Debug, Serialize, Deserialize, TS)] +pub struct ConsoleQueryParams { + start_snowflake_id : i64, + count : i64, +} + +#[derive(Debug, Serialize, Deserialize, TS)] +#[ts(export)] +pub struct ConsoleEvent { + timestamp: i64, + snowflake: i64, + detail: String, + uuid: String, + name: String, + message: String, +} + +async fn get_console_messages( + axum::extract::State(state): axum::extract::State, + Path(uuid): Path, + Query(query_params): Query, +) -> Result>, Error> { + let console_query = ConsoleQuery { + instance_uuid: uuid, + start_snowflake_id: query_params.start_snowflake_id, + count: query_params.count, + }; + + + return Ok(Json(vec![])) +} + + +pub fn get_console_routes(state: AppState) -> Router { + Router::new() + .route("/console/:uuid", get(get_console_messages)) + .with_state(state) +} diff --git a/core/src/handlers/mod.rs b/core/src/handlers/mod.rs index b8efeac8..691b3a1b 100644 --- a/core/src/handlers/mod.rs +++ b/core/src/handlers/mod.rs @@ -2,6 +2,7 @@ // pub mod instance; // pub mod users; pub mod checks; +pub mod console; pub mod core_info; pub mod events; pub mod gateway; diff --git a/core/src/lib.rs b/core/src/lib.rs index c676184c..0bcad879 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -1,6 +1,7 @@ #![allow(clippy::comparison_chain, clippy::type_complexity)] use crate::event_broadcaster::EventBroadcaster; +use crate::handlers::console::get_console_routes; use crate::migration::migrate; use crate::prelude::{ init_app_state, init_paths, lodestone_path, path_to_global_settings, path_to_stores, @@ -43,6 +44,7 @@ use prelude::GameInstance; use reqwest::{header, Method}; use ringbuffer::{AllocRingBuffer, RingBufferWrite}; +use fs3::FileExt; use semver::Version; use sqlx::{sqlite::SqliteConnectOptions, Pool}; use std::{ @@ -68,7 +70,6 @@ use tracing_subscriber::{prelude::__tracing_subscriber_SubscriberExt, EnvFilter} use traits::{t_configurable::TConfigurable, t_server::MonitorReport, t_server::TServer}; use types::{DotLodestoneConfig, InstanceUuid}; use uuid::Uuid; -use fs3::FileExt; pub mod auth; pub mod db; @@ -613,6 +614,7 @@ pub async fn run( .merge(get_global_fs_routes(shared_state.clone())) .merge(get_global_settings_routes(shared_state.clone())) .merge(get_gateway_routes(shared_state.clone())) + .merge(get_console_routes(shared_state.clone())) .layer(cors) .layer(trace); let app = Router::new().nest("/api/v1", api_routes); @@ -724,4 +726,5 @@ pub async fn run( guard, shutdown_tx, ) -} \ No newline at end of file +} + diff --git a/core/src/types.rs b/core/src/types.rs index f116894c..8df70227 100644 --- a/core/src/types.rs +++ b/core/src/types.rs @@ -27,6 +27,13 @@ pub struct TimeRange { pub end: i64, } +#[derive(Deserialize, Clone, Debug, TS)] +#[ts(export)] +pub struct EventCount { + pub start: i64, + pub count: i64, +} + impl From for String { fn from(snowflake: Snowflake) -> Self { snowflake.to_string() From 1892d419873a12ecb689ac1546b71eb7519e27bd Mon Sep 17 00:00:00 2001 From: seliayeu Date: Fri, 10 Nov 2023 23:05:26 -0700 Subject: [PATCH 02/11] work on backend pagination --- core/bindings/ConsoleQueryParams.ts | 3 + core/src/db/read.rs | 60 ++++++++++++++++- core/src/events.rs | 11 +-- core/src/handlers/console.rs | 67 ++++++++++--------- core/src/lib.rs | 1 - core/test.db | Bin 12288 -> 12288 bytes dashboard/src/bindings/ConsoleQueryParams.ts | 3 + dashboard/src/data/ConsoleEvent.ts | 56 ++++++++++++++++ dashboard/src/data/ConsoleStream.ts | 51 +------------- dashboard/src/utils/apis.ts | 21 +++++- 10 files changed, 178 insertions(+), 95 deletions(-) create mode 100644 core/bindings/ConsoleQueryParams.ts create mode 100644 dashboard/src/bindings/ConsoleQueryParams.ts create mode 100644 dashboard/src/data/ConsoleEvent.ts diff --git a/core/bindings/ConsoleQueryParams.ts b/core/bindings/ConsoleQueryParams.ts new file mode 100644 index 00000000..968114d8 --- /dev/null +++ b/core/bindings/ConsoleQueryParams.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export interface ConsoleQueryParams { start_snowflake_id: bigint, count: number, } \ No newline at end of file diff --git a/core/src/db/read.rs b/core/src/db/read.rs index 4b786280..f7b18268 100644 --- a/core/src/db/read.rs +++ b/core/src/db/read.rs @@ -1,9 +1,11 @@ use crate::{ - error::Error, output_types::ClientEvent, - prelude::LODESTONE_EPOCH_MIL, events::EventQuery, + error::{Error, ErrorKind}, + events::EventQuery, + output_types::ClientEvent, + prelude::LODESTONE_EPOCH_MIL, }; -use color_eyre::eyre::Context; +use color_eyre::eyre::{eyre, Context}; use sqlx::sqlite::SqlitePool; use tracing::error; @@ -69,6 +71,58 @@ FROM ClientEvents"# Ok(filtered) } +pub async fn search_events_limited( + pool: &SqlitePool, + event_query: EventQuery, + limit: u32, +) -> Result, Error> { + // TODO do not return sqlx::Error + let mut connection = pool + .acquire() + .await + .context("Failed to aquire connection to db")?; + let parsed_client_events = if let Some(time_range) = &event_query.time_range { + let start = time_range.start; + // let end = (time_range.end + 1 - LODESTONE_EPOCH_MIL.with(|p| *p)) << 22; + let rows = sqlx::query!( + r#" +SELECT +event_value, details, snowflake, level, caused_by_user_id, instance_id +FROM ClientEvents +WHERE snowflake >= ($1) +LIMIT $2"#, + start, + limit, + ) // TODO bit shift + .fetch_all(&mut connection) + .await + .context("Failed to fetch events")?; + + let mut parsed_client_events: Vec = Vec::new(); + for row in rows { + if let Ok(client_event) = serde_json::from_str(&row.event_value) { + parsed_client_events.push(client_event); + } else { + error!("Failed to parse client event: {}", row.event_value); + } + } + parsed_client_events + } else { + return Err(Error { + kind: ErrorKind::BadRequest, + source: eyre!("Queries without time_range are unsupported"), + }); + }; + + println!("{:?}", parsed_client_events); + + let filtered = parsed_client_events + .into_iter() + .filter(|client_event| event_query.filter(client_event)) + .collect(); + println!("{:?}", filtered); + Ok(filtered) +} #[cfg(test)] #[allow(unused_imports)] mod tests { diff --git a/core/src/events.rs b/core/src/events.rs index 5f2532a3..9b7725fd 100644 --- a/core/src/events.rs +++ b/core/src/events.rs @@ -10,7 +10,7 @@ use crate::{ macro_executor::MacroPID, output_types::ClientEvent, traits::{t_macro::ExitStatus, t_player::Player, t_server::State, InstanceInfo}, - types::{InstanceUuid, Snowflake, TimeRange, EventCount}, + types::{EventCount, InstanceUuid, Snowflake, TimeRange}, }; pub trait EventFilter { @@ -28,7 +28,6 @@ pub struct EventQuery { pub event_instance_ids: Option>, pub bearer_token: Option, pub time_range: Option, - pub event_count: Option, } impl EventQuery { @@ -218,12 +217,8 @@ pub enum ProgressionEndValue { #[ts(export)] #[serde(tag = "type")] pub enum ProgressionStartValue { - InstanceCreation { - instance_uuid: InstanceUuid, - }, - InstanceDelete { - instance_uuid: InstanceUuid, - }, + InstanceCreation { instance_uuid: InstanceUuid }, + InstanceDelete { instance_uuid: InstanceUuid }, } // the backend will keep exactly 1 copy of ProgressionStart, and 1 copy of ProgressionUpdate OR ProgressionEnd diff --git a/core/src/handlers/console.rs b/core/src/handlers/console.rs index c3d26beb..45e08a4b 100644 --- a/core/src/handlers/console.rs +++ b/core/src/handlers/console.rs @@ -1,53 +1,56 @@ -use axum::{extract::{Path, Query}, routing::{put, get}, Json, Router}; -use axum_macros::debug_handler; -use serde::{Deserialize, Serialize}; -use crate::AppState; use crate::error::Error; +use crate::{ + db::read::search_events_limited, + events::{EventQuery, EventType, InstanceEventKind}, + output_types::ClientEvent, + types::{InstanceUuid, TimeRange}, + AppState, +}; +use axum::{ + extract::{Path, Query}, + routing::get, + Json, Router, +}; +use serde::{Deserialize, Serialize}; use ts_rs::TS; #[derive(Debug, Serialize, Deserialize, TS)] #[ts(export)] -pub struct ConsoleQuery { - instance_uuid : String, - start_snowflake_id : i64, - count : i64, -} - -#[derive(Debug, Serialize, Deserialize, TS)] pub struct ConsoleQueryParams { - start_snowflake_id : i64, - count : i64, -} - -#[derive(Debug, Serialize, Deserialize, TS)] -#[ts(export)] -pub struct ConsoleEvent { - timestamp: i64, - snowflake: i64, - detail: String, - uuid: String, - name: String, - message: String, + start_snowflake_id: i64, + count: u32, } async fn get_console_messages( axum::extract::State(state): axum::extract::State, Path(uuid): Path, Query(query_params): Query, -) -> Result>, Error> { - let console_query = ConsoleQuery { - instance_uuid: uuid, - start_snowflake_id: query_params.start_snowflake_id, - count: query_params.count, +) -> Result>, Error> { + let event_instance_ids = vec![InstanceUuid::from(uuid)]; + let time_range = TimeRange { + start: query_params.start_snowflake_id, + end: i64::MAX, }; + let event_query = EventQuery { + event_levels: None, + event_types: Some(vec![EventType::InstanceEvent]), + instance_event_types: Some(vec![InstanceEventKind::InstanceOutput]), + user_event_types: None, + event_user_ids: None, + event_instance_ids: Some(event_instance_ids), + bearer_token: None, + time_range: Some(time_range), + }; - return Ok(Json(vec![])) -} + let client_events = + dbg!(search_events_limited(&state.sqlite_pool, event_query, query_params.count).await)?; + return Ok(Json(client_events)); +} pub fn get_console_routes(state: AppState) -> Router { Router::new() - .route("/console/:uuid", get(get_console_messages)) + .route("/instance/:uuid/console", get(get_console_messages)) .with_state(state) } diff --git a/core/src/lib.rs b/core/src/lib.rs index 0bcad879..4e02e93b 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -727,4 +727,3 @@ pub async fn run( shutdown_tx, ) } - diff --git a/core/test.db b/core/test.db index e69a8a1f58737600e62a92e4bf861a230aa7bd91..c79f935a94a2a2c89e8bfd20d6c94e7c7aa085e5 100644 GIT binary patch delta 125 zcmZojXh@hKEy%~fz`zW|Fu*-g$C#0CW5N=CHU(*+y_ delta 127 zcmZojXh@hKEy%*az`zW|Fu*)f$C#02W5N=CHb#DF27c+yf(nBC?2So`oD9u^lNIIL z1ucz?EX<7zO)QKJ3@puzEleiwlD8Ihzqfm#lK_KfURpjc&@e{+_YC~+ftnxkPYhrY U { + const event_inner: InstanceEvent = match( + event.event_inner, + otherwise( + { + InstanceEvent: (instanceEvent) => instanceEvent, + }, + () => { + throw new Error('Expected InstanceEvent'); + } + ) + ); + + const message = match( + event_inner.instance_event_inner, + otherwise( + { + InstanceOutput: (instanceOutput) => instanceOutput.message, + }, + () => { + throw new Error('Expected InstanceOutput'); + } + ) + ); + + console.log(event.snowflake); + getConsoleEvents(event_inner.instance_uuid, { start_snowflake_id: event.snowflake as unknown as bigint, count: 1}) + .then((e) => console.log(e)); + + return { + timestamp: getSnowflakeTimestamp(event.snowflake), + snowflake: event.snowflake, + detail: event.details, + uuid: event_inner.instance_uuid, + name: event_inner.instance_name, + message: message, + }; +}; diff --git a/dashboard/src/data/ConsoleStream.ts b/dashboard/src/data/ConsoleStream.ts index 36c123f1..8ab2bb9b 100644 --- a/dashboard/src/data/ConsoleStream.ts +++ b/dashboard/src/data/ConsoleStream.ts @@ -1,11 +1,10 @@ -import { getSnowflakeTimestamp, LODESTONE_PORT } from './../utils/util'; -import { InstanceEvent } from './../bindings/InstanceEvent'; -import { match, otherwise } from 'variant'; +import { LODESTONE_PORT } from './../utils/util'; import { useUserAuthorized } from 'data/UserInfo'; import axios from 'axios'; import { useContext, useEffect, useRef, useState } from 'react'; import { LodestoneContext } from './LodestoneContext'; import { ClientEvent } from 'bindings/ClientEvent'; +import { ConsoleEvent, toConsoleEvent} from 'data/ConsoleEvent'; export type ConsoleStreamStatus = | 'no-permission' @@ -16,52 +15,6 @@ export type ConsoleStreamStatus = | 'closed' | 'error'; -// simplified version of a ClientEvent with just InstanceOutput -export type ConsoleEvent = { - timestamp: number; - snowflake: string; - detail: string; - uuid: string; - name: string; - message: string; -}; - -// function to convert a ClientEvent to a ConsoleEvent -const toConsoleEvent = (event: ClientEvent): ConsoleEvent => { - const event_inner: InstanceEvent = match( - event.event_inner, - otherwise( - { - InstanceEvent: (instanceEvent) => instanceEvent, - }, - () => { - throw new Error('Expected InstanceEvent'); - } - ) - ); - - const message = match( - event_inner.instance_event_inner, - otherwise( - { - InstanceOutput: (instanceOutput) => instanceOutput.message, - }, - () => { - throw new Error('Expected InstanceOutput'); - } - ) - ); - - return { - timestamp: getSnowflakeTimestamp(event.snowflake), - snowflake: event.snowflake, - detail: event.details, - uuid: event_inner.instance_uuid, - name: event_inner.instance_name, - message: message, - }; -}; - /** * Does two things: * 1. calls useEffect to fetch the console stream diff --git a/dashboard/src/utils/apis.ts b/dashboard/src/utils/apis.ts index a68c2f77..c38fec76 100644 --- a/dashboard/src/utils/apis.ts +++ b/dashboard/src/utils/apis.ts @@ -21,6 +21,9 @@ import { CopyInstanceFileRequest } from 'bindings/CopyInstanceFileRequest'; import { ZipRequest } from 'bindings/ZipRequest'; import { TaskEntry } from 'bindings/TaskEntry'; import { HistoryEntry } from 'bindings/HistoryEntry'; +import { ConsoleQueryParams } from 'bindings/ConsoleQueryParams'; +import { ClientEvent } from 'bindings/ClientEvent'; +import { ConsoleEvent, toConsoleEvent } from 'data/ConsoleEvent'; /*********************** * Start Files API @@ -280,8 +283,7 @@ export const zipInstanceFiles = async ( return; } else { toast.info( - `Zipping ${zipRequest.target_relative_paths.length} item${ - zipRequest.target_relative_paths.length > 1 ? 's' : '' + `Zipping ${zipRequest.target_relative_paths.length} item${zipRequest.target_relative_paths.length > 1 ? 's' : '' }...` ); } @@ -491,3 +493,18 @@ export const killTask = async ( /*********************** * End Tasks/Macro API ***********************/ + +export const getConsoleEvents = async (uuid: string, query_params: ConsoleQueryParams) => { + const clientEventList = await axiosWrapper<[ClientEvent]>({ + method: 'get', + url: `/instance/${uuid}/console?start_snowflake_id=${query_params.start_snowflake_id}&count=${query_params.count}`, + }); + + const consoleEventList = clientEventList.map((clientEvent) => toConsoleEvent(clientEvent)); + + return consoleEventList; +}; + +/*********************** + * End Console API + ***********************/ From b61c5ba3db100a5649896daa8fb3b810fab4cfeb Mon Sep 17 00:00:00 2001 From: seliayeu Date: Sun, 12 Nov 2023 14:59:44 -0700 Subject: [PATCH 03/11] finish pagination backend (?) --- core/src/db/read.rs | 52 ------------------------------ core/src/events.rs | 2 +- core/src/handlers/console.rs | 49 +++++++++++++++++++++++----- core/src/types.rs | 7 ---- dashboard/src/data/ConsoleEvent.ts | 4 --- 5 files changed, 42 insertions(+), 72 deletions(-) diff --git a/core/src/db/read.rs b/core/src/db/read.rs index f7b18268..eb898bb3 100644 --- a/core/src/db/read.rs +++ b/core/src/db/read.rs @@ -71,58 +71,6 @@ FROM ClientEvents"# Ok(filtered) } -pub async fn search_events_limited( - pool: &SqlitePool, - event_query: EventQuery, - limit: u32, -) -> Result, Error> { - // TODO do not return sqlx::Error - let mut connection = pool - .acquire() - .await - .context("Failed to aquire connection to db")?; - let parsed_client_events = if let Some(time_range) = &event_query.time_range { - let start = time_range.start; - // let end = (time_range.end + 1 - LODESTONE_EPOCH_MIL.with(|p| *p)) << 22; - let rows = sqlx::query!( - r#" -SELECT -event_value, details, snowflake, level, caused_by_user_id, instance_id -FROM ClientEvents -WHERE snowflake >= ($1) -LIMIT $2"#, - start, - limit, - ) // TODO bit shift - .fetch_all(&mut connection) - .await - .context("Failed to fetch events")?; - - let mut parsed_client_events: Vec = Vec::new(); - for row in rows { - if let Ok(client_event) = serde_json::from_str(&row.event_value) { - parsed_client_events.push(client_event); - } else { - error!("Failed to parse client event: {}", row.event_value); - } - } - parsed_client_events - } else { - return Err(Error { - kind: ErrorKind::BadRequest, - source: eyre!("Queries without time_range are unsupported"), - }); - }; - - println!("{:?}", parsed_client_events); - - let filtered = parsed_client_events - .into_iter() - .filter(|client_event| event_query.filter(client_event)) - .collect(); - println!("{:?}", filtered); - Ok(filtered) -} #[cfg(test)] #[allow(unused_imports)] mod tests { diff --git a/core/src/events.rs b/core/src/events.rs index 9b7725fd..58dcdd33 100644 --- a/core/src/events.rs +++ b/core/src/events.rs @@ -10,7 +10,7 @@ use crate::{ macro_executor::MacroPID, output_types::ClientEvent, traits::{t_macro::ExitStatus, t_player::Player, t_server::State, InstanceInfo}, - types::{EventCount, InstanceUuid, Snowflake, TimeRange}, + types::{InstanceUuid, Snowflake, TimeRange}, }; pub trait EventFilter { diff --git a/core/src/handlers/console.rs b/core/src/handlers/console.rs index 45e08a4b..81fb4ec8 100644 --- a/core/src/handlers/console.rs +++ b/core/src/handlers/console.rs @@ -1,7 +1,6 @@ use crate::error::Error; +use crate::events::{EventQuery, EventType, InstanceEventKind}; use crate::{ - db::read::search_events_limited, - events::{EventQuery, EventType, InstanceEventKind}, output_types::ClientEvent, types::{InstanceUuid, TimeRange}, AppState, @@ -13,6 +12,9 @@ use axum::{ }; use serde::{Deserialize, Serialize}; use ts_rs::TS; +use color_eyre::eyre::{eyre, Context}; +use sqlx::sqlite::SqlitePool; +use tracing::error; #[derive(Debug, Serialize, Deserialize, TS)] #[ts(export)] @@ -26,27 +28,58 @@ async fn get_console_messages( Path(uuid): Path, Query(query_params): Query, ) -> Result>, Error> { - let event_instance_ids = vec![InstanceUuid::from(uuid)]; let time_range = TimeRange { start: query_params.start_snowflake_id, end: i64::MAX, }; + let pool = &state.sqlite_pool; + + let mut connection = pool + .acquire() + .await + .context("Failed to aquire connection to db")?; + + let rows = sqlx::query!( + r#" +SELECT +event_value, details, snowflake, level, caused_by_user_id, instance_id +FROM ClientEvents +WHERE snowflake >= ($1) +LIMIT $2"#, + query_params.start_snowflake_id, + query_params.count, + ) + .fetch_all(&mut connection) + .await + .context("Failed to fetch events")?; + + let mut parsed_client_events: Vec = Vec::new(); + for row in rows { + if let Ok(client_event) = serde_json::from_str(&row.event_value) { + parsed_client_events.push(client_event); + } else { + error!("Failed to parse client event: {}", row.event_value); + } + } + let event_query = EventQuery { event_levels: None, event_types: Some(vec![EventType::InstanceEvent]), instance_event_types: Some(vec![InstanceEventKind::InstanceOutput]), user_event_types: None, event_user_ids: None, - event_instance_ids: Some(event_instance_ids), + event_instance_ids: Some(vec![InstanceUuid::from(uuid)]), bearer_token: None, - time_range: Some(time_range), + time_range: None, }; - let client_events = - dbg!(search_events_limited(&state.sqlite_pool, event_query, query_params.count).await)?; + let filtered = parsed_client_events + .into_iter() + .filter(|client_event| event_query.filter(client_event)) + .collect(); - return Ok(Json(client_events)); + return Ok(Json(filtered)); } pub fn get_console_routes(state: AppState) -> Router { diff --git a/core/src/types.rs b/core/src/types.rs index 8df70227..f116894c 100644 --- a/core/src/types.rs +++ b/core/src/types.rs @@ -27,13 +27,6 @@ pub struct TimeRange { pub end: i64, } -#[derive(Deserialize, Clone, Debug, TS)] -#[ts(export)] -pub struct EventCount { - pub start: i64, - pub count: i64, -} - impl From for String { fn from(snowflake: Snowflake) -> Self { snowflake.to_string() diff --git a/dashboard/src/data/ConsoleEvent.ts b/dashboard/src/data/ConsoleEvent.ts index 3f7b95e4..848fdc8d 100644 --- a/dashboard/src/data/ConsoleEvent.ts +++ b/dashboard/src/data/ConsoleEvent.ts @@ -41,10 +41,6 @@ export const toConsoleEvent = (event: ClientEvent): ConsoleEvent => { ) ); - console.log(event.snowflake); - getConsoleEvents(event_inner.instance_uuid, { start_snowflake_id: event.snowflake as unknown as bigint, count: 1}) - .then((e) => console.log(e)); - return { timestamp: getSnowflakeTimestamp(event.snowflake), snowflake: event.snowflake, From c9b94ea0350cc0df2dee1467368e17981fabe3d1 Mon Sep 17 00:00:00 2001 From: seliayeu Date: Mon, 13 Nov 2023 12:42:44 -0700 Subject: [PATCH 04/11] add commented example --- dashboard/src/data/ConsoleEvent.ts | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/dashboard/src/data/ConsoleEvent.ts b/dashboard/src/data/ConsoleEvent.ts index 848fdc8d..4620ddcf 100644 --- a/dashboard/src/data/ConsoleEvent.ts +++ b/dashboard/src/data/ConsoleEvent.ts @@ -41,6 +41,12 @@ export const toConsoleEvent = (event: ClientEvent): ConsoleEvent => { ) ); + // console.log(event.snowflake); + // getConsoleEvents(event_inner.instance_uuid, { start_snowflake_id: event.snowflake as unknown as bigint, count: 5 }) + // .then((e) => console.log(e)); + + + return { timestamp: getSnowflakeTimestamp(event.snowflake), snowflake: event.snowflake, From 597f6525a3b17a620c3e87a21db94f06ba797452 Mon Sep 17 00:00:00 2001 From: seliayeu Date: Mon, 13 Nov 2023 14:02:40 -0700 Subject: [PATCH 05/11] fix endpoint to get pages backwards instead of forwards --- core/src/handlers/console.rs | 32 +++++++++++++++++++++--------- dashboard/src/data/ConsoleEvent.ts | 10 +++++----- 2 files changed, 28 insertions(+), 14 deletions(-) diff --git a/core/src/handlers/console.rs b/core/src/handlers/console.rs index 81fb4ec8..2e028029 100644 --- a/core/src/handlers/console.rs +++ b/core/src/handlers/console.rs @@ -10,11 +10,11 @@ use axum::{ routing::get, Json, Router, }; -use serde::{Deserialize, Serialize}; -use ts_rs::TS; use color_eyre::eyre::{eyre, Context}; +use serde::{Deserialize, Serialize}; use sqlx::sqlite::SqlitePool; use tracing::error; +use ts_rs::TS; #[derive(Debug, Serialize, Deserialize, TS)] #[ts(export)] @@ -39,16 +39,20 @@ async fn get_console_messages( .acquire() .await .context("Failed to aquire connection to db")?; + + let limit_num = &query_params.count * 2 + 10; let rows = sqlx::query!( r#" SELECT event_value, details, snowflake, level, caused_by_user_id, instance_id FROM ClientEvents -WHERE snowflake >= ($1) -LIMIT $2"#, +WHERE snowflake <= ($1) AND event_value IS NOT NULL +ORDER BY snowflake DESC +LIMIT $2 +"#, query_params.start_snowflake_id, - query_params.count, + limit_num, // hacky, but need more since filter ) .fetch_all(&mut connection) .await @@ -56,10 +60,14 @@ LIMIT $2"#, let mut parsed_client_events: Vec = Vec::new(); for row in rows { - if let Ok(client_event) = serde_json::from_str(&row.event_value) { - parsed_client_events.push(client_event); + if let Some(event_value) = &row.event_value { + if let Ok(client_event) = serde_json::from_str(event_value) { + parsed_client_events.push(client_event); + } else { + error!("Failed to parse client event: {}", event_value); + } } else { - error!("Failed to parse client event: {}", row.event_value); + error!("Failed to parse row"); } } @@ -74,11 +82,17 @@ LIMIT $2"#, time_range: None, }; - let filtered = parsed_client_events + let filtered: Vec = parsed_client_events .into_iter() .filter(|client_event| event_query.filter(client_event)) .collect(); + let filtered = if filtered.len() as u32 > query_params.count { + filtered[0..query_params.count as usize].to_vec() + } else { + filtered + }; + return Ok(Json(filtered)); } diff --git a/dashboard/src/data/ConsoleEvent.ts b/dashboard/src/data/ConsoleEvent.ts index 4620ddcf..6f7760e5 100644 --- a/dashboard/src/data/ConsoleEvent.ts +++ b/dashboard/src/data/ConsoleEvent.ts @@ -41,11 +41,11 @@ export const toConsoleEvent = (event: ClientEvent): ConsoleEvent => { ) ); - // console.log(event.snowflake); - // getConsoleEvents(event_inner.instance_uuid, { start_snowflake_id: event.snowflake as unknown as bigint, count: 5 }) - // .then((e) => console.log(e)); - - + if (event.snowflake as unknown as number % 100 == 0) { + console.log(event.snowflake); + getConsoleEvents(event_inner.instance_uuid, { start_snowflake_id: event.snowflake as unknown as bigint, count: 5 }) + .then((e) => console.log(e)); + } return { timestamp: getSnowflakeTimestamp(event.snowflake), From b3ac65a89e35d6d9498d2b7bf1737c3805813d24 Mon Sep 17 00:00:00 2001 From: seliayeu Date: Sun, 19 Nov 2023 16:50:41 -0700 Subject: [PATCH 06/11] do part of frontend integration --- dashboard/src/components/GameConsole.tsx | 24 +++++++++---------- dashboard/src/data/ConsoleEvent.ts | 6 ----- dashboard/src/data/ConsoleStream.ts | 30 ++++++++++++++++++++---- 3 files changed, 37 insertions(+), 23 deletions(-) diff --git a/dashboard/src/components/GameConsole.tsx b/dashboard/src/components/GameConsole.tsx index d33ae6d8..d4e882d0 100644 --- a/dashboard/src/components/GameConsole.tsx +++ b/dashboard/src/components/GameConsole.tsx @@ -24,7 +24,7 @@ export default function GameConsole() { 'can_access_instance_console', uuid ); - const { consoleLog, consoleStatus } = useConsoleStream(uuid); + const { consoleLog, consoleStatus, fetchConsolePage } = useConsoleStream(uuid, undefined); const [command, setCommand] = useState(''); const { commandHistory, appendCommandHistory } = useContext( CommandHistoryContext @@ -33,9 +33,9 @@ export default function GameConsole() { const listRef = useRef(null); const isAtBottom = listRef.current ? listRef.current.scrollHeight - - listRef.current.scrollTop - - listRef.current.clientHeight < - autoScrollThreshold + listRef.current.scrollTop - + listRef.current.clientHeight < + autoScrollThreshold : false; const oldIsAtBottom = usePrevious(isAtBottom); @@ -129,7 +129,7 @@ export default function GameConsole() { }; return ( -
+
{consoleStatusMessage}} placement="bottom" @@ -139,14 +139,14 @@ export default function GameConsole() { > {!canAccessConsole || consoleStatus === 'no-permission' ? ( @@ -154,26 +154,26 @@ export default function GameConsole() { ) : (
    {consoleLog.map((line) => (
  1. {line.message}
  2. ))}
)} -
+
setCommand(e.target.value)} diff --git a/dashboard/src/data/ConsoleEvent.ts b/dashboard/src/data/ConsoleEvent.ts index 6f7760e5..848fdc8d 100644 --- a/dashboard/src/data/ConsoleEvent.ts +++ b/dashboard/src/data/ConsoleEvent.ts @@ -41,12 +41,6 @@ export const toConsoleEvent = (event: ClientEvent): ConsoleEvent => { ) ); - if (event.snowflake as unknown as number % 100 == 0) { - console.log(event.snowflake); - getConsoleEvents(event_inner.instance_uuid, { start_snowflake_id: event.snowflake as unknown as bigint, count: 5 }) - .then((e) => console.log(e)); - } - return { timestamp: getSnowflakeTimestamp(event.snowflake), snowflake: event.snowflake, diff --git a/dashboard/src/data/ConsoleStream.ts b/dashboard/src/data/ConsoleStream.ts index 8ab2bb9b..aa89cae9 100644 --- a/dashboard/src/data/ConsoleStream.ts +++ b/dashboard/src/data/ConsoleStream.ts @@ -4,7 +4,8 @@ import axios from 'axios'; import { useContext, useEffect, useRef, useState } from 'react'; import { LodestoneContext } from './LodestoneContext'; import { ClientEvent } from 'bindings/ClientEvent'; -import { ConsoleEvent, toConsoleEvent} from 'data/ConsoleEvent'; +import { ConsoleEvent, toConsoleEvent } from 'data/ConsoleEvent'; +import { getConsoleEvents } from 'utils/apis'; export type ConsoleStreamStatus = | 'no-permission' @@ -26,10 +27,11 @@ export type ConsoleStreamStatus = * @param uuid the uuid of the instance to subscribe to * @return whatever useQuery returns */ -export const useConsoleStream = (uuid: string) => { +export const useConsoleStream = (uuid: string, logLimit: number | undefined) => { const { core, token } = useContext(LodestoneContext); const { address, port, apiVersion, protocol } = core; const [consoleLog, setConsoleLog] = useState([]); + const [limit, setLimit] = useState(logLimit); const [status, setStatusInner] = useState('loading'); //callbacks should use statusRef.current instead of status const statusRef = useRef('loading'); statusRef.current = status; @@ -55,10 +57,28 @@ export const useConsoleStream = (uuid: string) => { const mergedLog = [...oldLog, ...consoleEvents]; // this is slow ik - return mergedLog.filter( + const filteredLog = mergedLog.filter( (event, index) => mergedLog.findIndex((e) => e.snowflake === event.snowflake) === index ); + + return filteredLog.slice(limit ? -limit : 0); + }); + }; + + const fetchConsolePage = async (snowflake: bigint, count: number) => { + console.log("called with ", snowflake); + console.log(consoleLog[0].snowflake); + const paginatedEvents = await getConsoleEvents(uuid, { start_snowflake_id: snowflake, count: count }) + setLimit(undefined); + setConsoleLog((oldLog) => { + const mergedLog = [...paginatedEvents, ...oldLog]; + const filteredLog = mergedLog.filter( + (event, index) => + mergedLog.findIndex((e) => e.snowflake === event.snowflake) === index + ); + console.log(oldLog.length, filteredLog.length) + return filteredLog.slice(limit ? -limit : 0); }); }; @@ -71,8 +91,7 @@ export const useConsoleStream = (uuid: string) => { try { const websocket = new WebSocket( - `${protocol === 'https' ? 'wss' : 'ws'}://${address}:${ - port ?? LODESTONE_PORT + `${protocol === 'https' ? 'wss' : 'ws'}://${address}:${port ?? LODESTONE_PORT }/api/${apiVersion}/instance/${uuid}/console/stream?token=Bearer ${token}` ); @@ -115,5 +134,6 @@ export const useConsoleStream = (uuid: string) => { return { consoleLog, consoleStatus: status, + fetchConsolePage, }; }; From 7f60f904bcd973ed9a0770c8020c36d598659eb3 Mon Sep 17 00:00:00 2001 From: James Huang Date: Sun, 19 Nov 2023 18:52:52 -0500 Subject: [PATCH 07/11] logic for checking scroll direction and position --- dashboard/src/components/GameConsole.tsx | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/dashboard/src/components/GameConsole.tsx b/dashboard/src/components/GameConsole.tsx index d33ae6d8..08f31d54 100644 --- a/dashboard/src/components/GameConsole.tsx +++ b/dashboard/src/components/GameConsole.tsx @@ -6,7 +6,7 @@ import { InstanceContext } from 'data/InstanceContext'; import { CommandHistoryContext } from 'data/CommandHistoryContext'; import { useUserAuthorized } from 'data/UserInfo'; import Tooltip from 'rc-tooltip'; -import { useContext, useEffect } from 'react'; +import React, { useContext, useEffect } from 'react'; import { useRef, useState } from 'react'; import { usePrevious } from 'utils/hooks'; import { DISABLE_AUTOFILL } from 'utils/util'; @@ -29,6 +29,7 @@ export default function GameConsole() { const { commandHistory, appendCommandHistory } = useContext( CommandHistoryContext ); + const [lastScrollPos, setLastScrollPos] = useState(0); const [commandNav, setCommandNav] = useState(commandHistory.length); const listRef = useRef(null); const isAtBottom = listRef.current @@ -162,6 +163,22 @@ export default function GameConsole() {
    { + if (!e.currentTarget || !e.currentTarget.scrollTop) { + return; + } + const prevScrollPos = lastScrollPos; + setLastScrollPos(e.currentTarget.scrollTop) + + const scrollPosDiff = e.currentTarget.scrollTop - prevScrollPos; // should be negative to be considered proper trigger + const triggerThreshhold = e.currentTarget.scrollTop == autoScrollThreshold * 15; + + if (!triggerThreshhold || scrollPosDiff >= 0) { + return; + } + console.log("make a call here!") + + }} > {consoleLog.map((line) => (
  1. Date: Sun, 19 Nov 2023 19:29:27 -0500 Subject: [PATCH 08/11] integrate logic for fetching new logs with scroll check --- dashboard/src/components/GameConsole.tsx | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/dashboard/src/components/GameConsole.tsx b/dashboard/src/components/GameConsole.tsx index e885f5c8..c34a5118 100644 --- a/dashboard/src/components/GameConsole.tsx +++ b/dashboard/src/components/GameConsole.tsx @@ -30,6 +30,8 @@ export default function GameConsole() { CommandHistoryContext ); const [lastScrollPos, setLastScrollPos] = useState(0); + const [additionalLogs, setAdditionaLogs] = useState(0); + const [logsRecentlyIncreased, setLogsRecentlyIncreased] = useState(false); const [commandNav, setCommandNav] = useState(commandHistory.length); const listRef = useRef(null); const isAtBottom = listRef.current @@ -164,19 +166,30 @@ export default function GameConsole() { className="border-gray-faded/30 text-small flex h-0 grow flex-col overflow-y-auto whitespace-pre-wrap break-words rounded-t-lg border-b bg-gray-900 py-3 font-mono font-light tracking-tight text-gray-300" ref={listRef} onScroll={(e: React.SyntheticEvent) => { - if (!e.currentTarget || !e.currentTarget.scrollTop) { + // check that position is close to top + if (!e.currentTarget || !e.currentTarget.scrollTop || logsRecentlyIncreased) { return; } const prevScrollPos = lastScrollPos; setLastScrollPos(e.currentTarget.scrollTop) const scrollPosDiff = e.currentTarget.scrollTop - prevScrollPos; // should be negative to be considered proper trigger - const triggerThreshhold = e.currentTarget.scrollTop == autoScrollThreshold * 15; + const triggerThreshhold = + e.currentTarget.scrollTop >= autoScrollThreshold * 15 && + e.currentTarget.scrollTop <= autoScrollThreshold * 20; // consider range to factor in scroll speed - if (!triggerThreshhold || scrollPosDiff >= 0) { + if (!triggerThreshhold || scrollPosDiff >= 0 || logsRecentlyIncreased) { + //allow new logs to be generated again if user scrolls down first return; } - console.log("make a call here!") + fetchConsolePage(consoleLog[0].snowflake as unknown as bigint, additionalLogs + 40); + setAdditionaLogs(currNumLogs => currNumLogs + 40); + setLogsRecentlyIncreased(true); + + // debounce log generation + setTimeout(() => { + setLogsRecentlyIncreased(false); + }, 1000) }} > From 1460f1ee9398f484711008904bb4fe603b4efa39 Mon Sep 17 00:00:00 2001 From: seliayeu Date: Tue, 21 Nov 2023 20:30:11 -0700 Subject: [PATCH 09/11] fix bug --- dashboard/src/data/ConsoleStream.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dashboard/src/data/ConsoleStream.ts b/dashboard/src/data/ConsoleStream.ts index aa89cae9..c42f3680 100644 --- a/dashboard/src/data/ConsoleStream.ts +++ b/dashboard/src/data/ConsoleStream.ts @@ -72,7 +72,7 @@ export const useConsoleStream = (uuid: string, logLimit: number | undefined) => const paginatedEvents = await getConsoleEvents(uuid, { start_snowflake_id: snowflake, count: count }) setLimit(undefined); setConsoleLog((oldLog) => { - const mergedLog = [...paginatedEvents, ...oldLog]; + const mergedLog = [...paginatedEvents.reverse(), ...oldLog]; const filteredLog = mergedLog.filter( (event, index) => mergedLog.findIndex((e) => e.snowflake === event.snowflake) === index From b42f78fbb1b98382f41a1add475b7082ab056cf0 Mon Sep 17 00:00:00 2001 From: James Huang Date: Thu, 28 Dec 2023 23:35:45 -0800 Subject: [PATCH 10/11] added scroll to bottom on initial component update --- dashboard/src/components/GameConsole.tsx | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/dashboard/src/components/GameConsole.tsx b/dashboard/src/components/GameConsole.tsx index c34a5118..acc50b6d 100644 --- a/dashboard/src/components/GameConsole.tsx +++ b/dashboard/src/components/GameConsole.tsx @@ -33,6 +33,7 @@ export default function GameConsole() { const [additionalLogs, setAdditionaLogs] = useState(0); const [logsRecentlyIncreased, setLogsRecentlyIncreased] = useState(false); const [commandNav, setCommandNav] = useState(commandHistory.length); + const [initialScroll, setInitialScroll] = useState(false); const listRef = useRef(null); const isAtBottom = listRef.current ? listRef.current.scrollHeight - @@ -55,6 +56,13 @@ export default function GameConsole() { // eslint-disable-next-line react-hooks/exhaustive-deps }, [consoleLog]); + useEffect(() => { + if (listRef.current && !initialScroll) { + setInitialScroll(true); + scrollToBottom(); + } + }) + const sendCommand = (command: string) => { axios({ method: 'post', From f94a9a8a65336577ff7874e579ed1464e3b3697f Mon Sep 17 00:00:00 2001 From: James Huang Date: Sat, 30 Dec 2023 18:13:20 -0800 Subject: [PATCH 11/11] modified mechanism for fetching new logs --- dashboard/src/components/GameConsole.tsx | 43 +++++++++--------------- dashboard/src/components/LogLoading.tsx | 25 ++++++++++++++ 2 files changed, 41 insertions(+), 27 deletions(-) create mode 100644 dashboard/src/components/LogLoading.tsx diff --git a/dashboard/src/components/GameConsole.tsx b/dashboard/src/components/GameConsole.tsx index acc50b6d..69f088c7 100644 --- a/dashboard/src/components/GameConsole.tsx +++ b/dashboard/src/components/GameConsole.tsx @@ -5,6 +5,7 @@ import { useConsoleStream } from 'data/ConsoleStream'; import { InstanceContext } from 'data/InstanceContext'; import { CommandHistoryContext } from 'data/CommandHistoryContext'; import { useUserAuthorized } from 'data/UserInfo'; +import LogLoading from './LogLoading'; import Tooltip from 'rc-tooltip'; import React, { useContext, useEffect } from 'react'; import { useRef, useState } from 'react'; @@ -29,11 +30,10 @@ export default function GameConsole() { const { commandHistory, appendCommandHistory } = useContext( CommandHistoryContext ); - const [lastScrollPos, setLastScrollPos] = useState(0); const [additionalLogs, setAdditionaLogs] = useState(0); - const [logsRecentlyIncreased, setLogsRecentlyIncreased] = useState(false); const [commandNav, setCommandNav] = useState(commandHistory.length); const [initialScroll, setInitialScroll] = useState(false); + const [fetchingItems, setFetchingItems] = useState(false); const listRef = useRef(null); const isAtBottom = listRef.current ? listRef.current.scrollHeight - @@ -56,6 +56,7 @@ export default function GameConsole() { // eslint-disable-next-line react-hooks/exhaustive-deps }, [consoleLog]); + // scroll to bottom of screen on initial load useEffect(() => { if (listRef.current && !initialScroll) { setInitialScroll(true); @@ -63,6 +64,15 @@ export default function GameConsole() { } }) + useEffect(() => { + if (!consoleLog || consoleLog.length <= 0) { + return; + } + fetchConsolePage(consoleLog[0].snowflake as unknown as bigint, additionalLogs + 40); + setAdditionaLogs(currNumLogs => currNumLogs + 40); + setFetchingItems(false); + }, [fetchingItems]) + const sendCommand = (command: string) => { axios({ method: 'post', @@ -174,33 +184,12 @@ export default function GameConsole() { className="border-gray-faded/30 text-small flex h-0 grow flex-col overflow-y-auto whitespace-pre-wrap break-words rounded-t-lg border-b bg-gray-900 py-3 font-mono font-light tracking-tight text-gray-300" ref={listRef} onScroll={(e: React.SyntheticEvent) => { - // check that position is close to top - if (!e.currentTarget || !e.currentTarget.scrollTop || logsRecentlyIncreased) { - return; - } - const prevScrollPos = lastScrollPos; - setLastScrollPos(e.currentTarget.scrollTop) - - const scrollPosDiff = e.currentTarget.scrollTop - prevScrollPos; // should be negative to be considered proper trigger - const triggerThreshhold = - e.currentTarget.scrollTop >= autoScrollThreshold * 15 && - e.currentTarget.scrollTop <= autoScrollThreshold * 20; // consider range to factor in scroll speed - - if (!triggerThreshhold || scrollPosDiff >= 0 || logsRecentlyIncreased) { - //allow new logs to be generated again if user scrolls down first - return; - } - fetchConsolePage(consoleLog[0].snowflake as unknown as bigint, additionalLogs + 40); - setAdditionaLogs(currNumLogs => currNumLogs + 40); - setLogsRecentlyIncreased(true); - - // debounce log generation - setTimeout(() => { - setLogsRecentlyIncreased(false); - }, 1000) - + if (e.currentTarget.scrollTop !== 0) return; + setFetchingItems(true); }} > + + {fetchingItems && } {consoleLog.map((line) => (
  2. +
    +
    +
    + +

    {loadingText}

    +
+ + ); +} \ No newline at end of file