From 6780ab2947d46a3a3b55156efad45d431541b4ab Mon Sep 17 00:00:00 2001 From: thedonkified Date: Sun, 22 Sep 2024 17:29:25 -0700 Subject: [PATCH] Adds preliminary documentation for ndatabase - Also fixes failing build, although the entities as-is are not going to work --- .github/guides/NDATABASE.md | 111 +++++++++++++ .../{ndatabase.dm => ndatabase/_ndatabase.dm} | 88 +++++----- code/__DEFINES/{ => ndatabase}/clans.dm | 0 code/datums/_ndatabase/code/brsql_adapter.dm | 151 +++++++++--------- .../datums/_ndatabase/code/entity/db_field.dm | 3 +- .../_ndatabase/code/entity/db_field_type.dm | 54 ++++++- code/datums/_ndatabase/code/entity/entity.dm | 3 +- .../_ndatabase/code/entity/entity_meta.dm | 14 +- .../_ndatabase/code/interfaces/adapter.dm | 2 +- code/datums/_ndatabase/code/native_adapter.dm | 38 ++--- .../subsystems/database_query_manager.dm | 52 +++--- .../_ndatabase/subsystems/entity_manager.dm | 46 +++--- code/datums/_ndatabase/tests/test_entity.dm | 2 +- code/datums/entities/chemical_information.dm | 2 +- code/datums/entities/clans.dm | 10 +- code/datums/entities/discord_identifier.dm | 2 +- code/datums/entities/discord_link.dm | 5 +- code/datums/entities/login_triplets.dm | 2 +- code/datums/entities/logs/player_times_log.dm | 2 +- code/datums/entities/map_votes.dm | 2 +- code/datums/entities/mc_controller.dm | 2 +- code/datums/entities/mc_record.dm | 2 +- code/datums/entities/mc_round.dm | 2 +- code/datums/entities/mc_timing_info.dm | 2 +- code/datums/entities/player.dm | 6 +- code/datums/entities/player_job_ban.dm | 7 +- code/datums/entities/player_note.dm | 7 +- code/datums/entities/player_stat.dm | 5 +- code/datums/entities/player_sticky_ban.dm | 9 +- code/datums/entities/player_times.dm | 5 +- code/datums/entities/ticket.dm | 2 +- .../datums/statistics/entities/death_stats.dm | 2 +- code/datums/statistics/entities/map_stats.dm | 2 +- .../datums/statistics/entities/medal_stats.dm | 2 +- .../datums/statistics/entities/round_stats.dm | 2 +- colonialmarines.dme | 4 +- 36 files changed, 393 insertions(+), 257 deletions(-) create mode 100644 .github/guides/NDATABASE.md rename code/__DEFINES/{ndatabase.dm => ndatabase/_ndatabase.dm} (82%) rename code/__DEFINES/{ => ndatabase}/clans.dm (100%) diff --git a/.github/guides/NDATABASE.md b/.github/guides/NDATABASE.md new file mode 100644 index 000000000000..bebd6ff47eca --- /dev/null +++ b/.github/guides/NDATABASE.md @@ -0,0 +1,111 @@ +# Database +The actual interface for interacting with the database we are using. +## Adapter +### Core functions +TBD +#### Sync Table Meta (sync_table_meta) +TBD +#### Sync Table (sync_table) +TBD +#### Sync Index (sync_index) +TBD +#### Read Table (read_table) +TBD +#### Update Table (update_table) +TBD +#### Insert Table (insert_table) +TBD +#### Delete Table (delete_table) +TBD +#### Read Filter (read_filter) +TBD +#### Prepare View (prepare_view) +TBD +#### Read View (read_view) +TBD +### Filters +TBD +# Query +## Read Single (read_single) +Executes the given query, +## Connection +# Schema +Different structures we use for defining the structure of our data within the database. +## Entity +TBD +## Link +TBD +## Index +TBD +## Entity View +Virtual table to view a subset of data from a specific entity and that entity's links. + +Differs from a standard database view in that the fields of an entity view must ALWAYS be derived from its parent entity and its links. + +Standard database views are currently not implemented. +# Database Drivers +## brsql +The Rust MySQL database driver +### [Adapter](#Adapter) +TBD +### [Query](#Query) +See [rust-g query execution code](https://github.com/tgstation/rust-g/blob/9682fc08fe0306247fabc303cc93dd9858f2ce76/src/sql.rs#L147-L226) + +Query response JSON schema: +```json +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "rust-g SQL Query Response", + "description": "The JSON object returned from any rust-g query calls via the `rustg_sql_check_query()` proc.", + "type": "object", + "properties": { + "status": { + "type": "string" + }, + "affected": { + "type": "integer" + }, + "last_insert_id": { + "type": [ "integer", "null" ] + }, + "columns": { + "type": "array", + "items": { + "type": "object", + "properties": { + "name": { + "type": "string" + } + } + } + }, + "rows": { + "type": "array", + "items": { + "anyOf": [ + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "null" + }, + { + "type": "array", + "items": { + "type": "number" + } + } + ] + } + } + } +} +``` +## Native +BYOND internal database driver: http://www.byond.com/docs/ref/#/database + +Uses SQLite as its DB +> A /database datum gives you the ability to create or access a database using SQLite diff --git a/code/__DEFINES/ndatabase.dm b/code/__DEFINES/ndatabase/_ndatabase.dm similarity index 82% rename from code/__DEFINES/ndatabase.dm rename to code/__DEFINES/ndatabase/_ndatabase.dm index ba42759dd2d4..8135f90e3151 100644 --- a/code/__DEFINES/ndatabase.dm +++ b/code/__DEFINES/ndatabase/_ndatabase.dm @@ -161,177 +161,177 @@ #define FIELD_INT(entity, field) \ /datum/db_field/##entity/##field{ name = #field; field_type = DB_FIELDTYPE_INT; parent_entity_type = /datum/entity/##entity; }; \ /datum/entity/##entity/var/datum/db_field/##entity/##field/##field; \ -/datum/entity_meta/##entity/setup_field_types() { \ +/datum/entity_meta/##entity/setup_field_typepaths() { \ ..(); \ - LAZYSET(field_types, #field, /datum/db_field/##entity/##field); \ + LAZYSET(field_typepaths, #field, /datum/db_field/##entity/##field); \ } #define FIELD_DEFAULT_VALUE_INT(entity, field, default) \ /datum/db_field/##entity/##field{ name = #field; field_type = DB_FIELDTYPE_INT; parent_entity_type = /datum/entity/##entity; }; \ /datum/db_field/##entity/##field/New() { . = ..(); value = default; } \ /datum/entity/##entity/var/datum/db_field/##entity/##field/##field; \ -/datum/entity_meta/##entity/setup_field_types() { \ +/datum/entity_meta/##entity/setup_field_typepaths() { \ ..(); \ - LAZYSET(field_types, #field, /datum/db_field/##entity/##field); \ + LAZYSET(field_typepaths, #field, /datum/db_field/##entity/##field); \ } #define FIELD_BIGINT(entity, field) \ /datum/db_field/##entity/##field{ name = #field; field_type = DB_FIELDTYPE_BIGINT; parent_entity_type = /datum/entity/##entity; }; \ /datum/entity/##entity/var/datum/db_field/##entity/##field/##field; \ -/datum/entity_meta/##entity/setup_field_types() { \ +/datum/entity_meta/##entity/setup_field_typepaths() { \ ..(); \ - LAZYSET(field_types, #field, /datum/db_field/##entity/##field); \ + LAZYSET(field_typepaths, #field, /datum/db_field/##entity/##field); \ } #define FIELD_DEFAULT_VALUE_BIGINT(entity, field, default) \ /datum/db_field/##entity/##field{ name = #field; field_type = DB_FIELDTYPE_BIGINT; parent_entity_type = /datum/entity/##entity; }; \ /datum/db_field/##entity/##field/New() { . = ..(); value = default; } \ /datum/entity/##entity/var/datum/db_field/##entity/##field/##field; \ -/datum/entity_meta/##entity/setup_field_types() { \ +/datum/entity_meta/##entity/setup_field_typepaths() { \ ..(); \ - LAZYSET(field_types, #field, /datum/db_field/##entity/##field); \ + LAZYSET(field_typepaths, #field, /datum/db_field/##entity/##field); \ } #define FIELD_CHAR(entity, field) \ /datum/db_field/##entity/##field{ name = #field; field_type = DB_FIELDTYPE_CHAR; parent_entity_type = /datum/entity/##entity; }; \ /datum/entity/##entity/var/datum/db_field/##entity/##field/##field; \ -/datum/entity_meta/##entity/setup_field_types() { \ +/datum/entity_meta/##entity/setup_field_typepaths() { \ ..(); \ - LAZYSET(field_types, #field, /datum/db_field/##entity/##field); \ + LAZYSET(field_typepaths, #field, /datum/db_field/##entity/##field); \ } #define FIELD_DEFAULT_VALUE_CHAR(entity, field, default) \ /datum/db_field/##entity/##field{ name = #field; field_type = DB_FIELDTYPE_CHAR; parent_entity_type = /datum/entity/##entity; }; \ /datum/db_field/##entity/##field/New() { . = ..(); value = default; } \ /datum/entity/##entity/var/datum/db_field/##entity/##field/##field; \ -/datum/entity_meta/##entity/setup_field_types() { \ +/datum/entity_meta/##entity/setup_field_typepaths() { \ ..(); \ - LAZYSET(field_types, #field, /datum/db_field/##entity/##field); \ + LAZYSET(field_typepaths, #field, /datum/db_field/##entity/##field); \ } #define FIELD_STRING_SMALL(entity, field) \ /datum/db_field/##entity/##field{ name = #field; field_type = DB_FIELDTYPE_STRING_SMALL; parent_entity_type = /datum/entity/##entity; }; \ /datum/entity/##entity/var/datum/db_field/##entity/##field/##field; \ -/datum/entity_meta/##entity/setup_field_types() { \ +/datum/entity_meta/##entity/setup_field_typepaths() { \ ..(); \ - LAZYSET(field_types, #field, /datum/db_field/##entity/##field); \ + LAZYSET(field_typepaths, #field, /datum/db_field/##entity/##field); \ } #define FIELD_DEFAULT_VALUE_STRING_SMALL(entity, field, default) \ /datum/db_field/##entity/##field{ name = #field; field_type = DB_FIELDTYPE_STRING_SMALL; parent_entity_type = /datum/entity/##entity; }; \ /datum/db_field/##entity/##field/New() { . = ..(); value = default; } \ /datum/entity/##entity/var/datum/db_field/##entity/##field/##field; \ -/datum/entity_meta/##entity/setup_field_types() { \ +/datum/entity_meta/##entity/setup_field_typepaths() { \ ..(); \ - LAZYSET(field_types, #field, /datum/db_field/##entity/##field); \ + LAZYSET(field_typepaths, #field, /datum/db_field/##entity/##field); \ } #define FIELD_STRING_MEDIUM(entity, field) \ /datum/db_field/##entity/##field{ name = #field; field_type = DB_FIELDTYPE_STRING_MEDIUM; parent_entity_type = /datum/entity/##entity; }; \ /datum/entity/##entity/var/datum/db_field/##entity/##field/##field; \ -/datum/entity_meta/##entity/setup_field_types() { \ +/datum/entity_meta/##entity/setup_field_typepaths() { \ ..(); \ - LAZYSET(field_types, #field, /datum/db_field/##entity/##field); \ + LAZYSET(field_typepaths, #field, /datum/db_field/##entity/##field); \ } #define FIELD_DEFAULT_VALUE_STRING_MEDIUM(entity, field, default) \ /datum/db_field/##entity/##field{ name = #field; field_type = DB_FIELDTYPE_STRING_MEDIUM; parent_entity_type = /datum/entity/##entity; }; \ /datum/db_field/##entity/##field/New() { . = ..(); value = default; } \ /datum/entity/##entity/var/datum/db_field/##entity/##field/##field; \ -/datum/entity_meta/##entity/setup_field_types() { \ +/datum/entity_meta/##entity/setup_field_typepaths() { \ ..(); \ - LAZYSET(field_types, #field, /datum/db_field/##entity/##field); \ + LAZYSET(field_typepaths, #field, /datum/db_field/##entity/##field); \ } #define FIELD_STRING_LARGE(entity, field) \ /datum/db_field/##entity/##field{ name = #field; field_type = DB_FIELDTYPE_STRING_LARGE; parent_entity_type = /datum/entity/##entity; }; \ /datum/entity/##entity/var/datum/db_field/##entity/##field/##field; \ -/datum/entity_meta/##entity/setup_field_types() { \ +/datum/entity_meta/##entity/setup_field_typepaths() { \ ..(); \ - LAZYSET(field_types, #field, /datum/db_field/##entity/##field); \ + LAZYSET(field_typepaths, #field, /datum/db_field/##entity/##field); \ } #define FIELD_DEFAULT_VALUE_STRING_LARGE(entity, field, default) \ /datum/db_field/##entity/##field{ name = #field; field_type = DB_FIELDTYPE_STRING_LARGE; parent_entity_type = /datum/entity/##entity; }; \ /datum/db_field/##entity/##field/New() { . = ..(); value = default; } \ /datum/entity/##entity/var/datum/db_field/##entity/##field/##field; \ -/datum/entity_meta/##entity/setup_field_types() { \ +/datum/entity_meta/##entity/setup_field_typepaths() { \ ..(); \ - LAZYSET(field_types, #field, /datum/db_field/##entity/##field); \ + LAZYSET(field_typepaths, #field, /datum/db_field/##entity/##field); \ } #define FIELD_STRING_MAX(entity, field) \ /datum/db_field/##entity/##field{ name = #field; field_type = DB_FIELDTYPE_STRING_MAX; parent_entity_type = /datum/entity/##entity; }; \ /datum/entity/##entity/var/datum/db_field/##entity/##field/##field; \ -/datum/entity_meta/##entity/setup_field_types() { \ +/datum/entity_meta/##entity/setup_field_typepaths() { \ ..(); \ - LAZYSET(field_types, #field, /datum/db_field/##entity/##field); \ + LAZYSET(field_typepaths, #field, /datum/db_field/##entity/##field); \ } #define FIELD_DEFAULT_VALUE_STRING_MAX(entity, field, default) \ /datum/db_field/##entity/##field{ name = #field; field_type = DB_FIELDTYPE_STRING_MAX; parent_entity_type = /datum/entity/##entity; }; \ /datum/db_field/##entity/##field/New() { . = ..(); value = default; } \ /datum/entity/##entity/var/datum/db_field/##entity/##field/##field; \ -/datum/entity_meta/##entity/setup_field_types() { \ +/datum/entity_meta/##entity/setup_field_typepaths() { \ ..(); \ - LAZYSET(field_types, #field, /datum/db_field/##entity/##field); \ + LAZYSET(field_typepaths, #field, /datum/db_field/##entity/##field); \ } #define FIELD_DATE(entity, field) \ /datum/db_field/##entity/##field{ name = #field; field_type = DB_FIELDTYPE_DATE; parent_entity_type = /datum/entity/##entity; }; \ /datum/entity/##entity/var/datum/db_field/##entity/##field/##field; \ -/datum/entity_meta/##entity/setup_field_types() { \ +/datum/entity_meta/##entity/setup_field_typepaths() { \ ..(); \ - LAZYSET(field_types, #field, /datum/db_field/##entity/##field); \ + LAZYSET(field_typepaths, #field, /datum/db_field/##entity/##field); \ } #define FIELD_DEFAULT_VALUE_DATE(entity, field, default) \ /datum/db_field/##entity/##field{ name = #field; field_type = DB_FIELDTYPE_DATE; parent_entity_type = /datum/entity/##entity; }; \ /datum/db_field/##entity/##field/New() { . = ..(); value = default; } \ /datum/entity/##entity/var/datum/db_field/##entity/##field/##field; \ -/datum/entity_meta/##entity/setup_field_types() { \ +/datum/entity_meta/##entity/setup_field_typepaths() { \ ..(); \ - LAZYSET(field_types, #field, /datum/db_field/##entity/##field); \ + LAZYSET(field_typepaths, #field, /datum/db_field/##entity/##field); \ } #define FIELD_TEXT(entity, field) \ /datum/db_field/##entity/##field{ name = #field; field_type = DB_FIELDTYPE_TEXT; parent_entity_type = /datum/entity/##entity; }; \ /datum/entity/##entity/var/datum/db_field/##entity/##field/##field; \ -/datum/entity_meta/##entity/setup_field_types() { \ +/datum/entity_meta/##entity/setup_field_typepaths() { \ ..(); \ - LAZYSET(field_types, #field, /datum/db_field/##entity/##field); \ + LAZYSET(field_typepaths, #field, /datum/db_field/##entity/##field); \ } #define FIELD_DEFAULT_VALUE_TEXT(entity, field, default) \ /datum/db_field/##entity/##field{ name = #field; field_type = DB_FIELDTYPE_TEXT; parent_entity_type = /datum/entity/##entity; }; \ /datum/db_field/##entity/##field/New() { . = ..(); value = default; } \ /datum/entity/##entity/var/datum/db_field/##entity/##field/##field; \ -/datum/entity_meta/##entity/setup_field_types() { \ +/datum/entity_meta/##entity/setup_field_typepaths() { \ ..(); \ - LAZYSET(field_types, #field, /datum/db_field/##entity/##field); \ + LAZYSET(field_typepaths, #field, /datum/db_field/##entity/##field); \ } #define FIELD_BLOB(entity, field) \ /datum/db_field/##entity/##field{ name = #field; field_type = DB_FIELDTYPE_BLOB; parent_entity_type = /datum/entity/##entity; }; \ /datum/entity/##entity/var/datum/db_field/##entity/##field/##field; \ -/datum/entity_meta/##entity/setup_field_types() { \ +/datum/entity_meta/##entity/setup_field_typepaths() { \ ..(); \ - LAZYSET(field_types, #field, /datum/db_field/##entity/##field); \ + LAZYSET(field_typepaths, #field, /datum/db_field/##entity/##field); \ } #define FIELD_DEFAULT_VALUE_BLOB(entity, field, default) \ /datum/db_field/##entity/##field{ name = #field; field_type = DB_FIELDTYPE_BLOB; parent_entity_type = /datum/entity/##entity; }; \ /datum/db_field/##entity/##field/New() { . = ..(); value = default; } \ /datum/entity/##entity/var/datum/db_field/##entity/##field/##field; \ -/datum/entity_meta/##entity/setup_field_types() { \ +/datum/entity_meta/##entity/setup_field_typepaths() { \ ..(); \ - LAZYSET(field_types, #field, /datum/db_field/##entity/##field); \ + LAZYSET(field_typepaths, #field, /datum/db_field/##entity/##field); \ } #define FIELD_DECIMAL(entity, field) \ /datum/db_field/##entity/##field{ name = #field; field_type = DB_FIELDTYPE_DECIMAL; parent_entity_type = /datum/entity/##entity; }; \ /datum/entity/##entity/var/datum/db_field/##entity/##field/##field; \ -/datum/entity_meta/##entity/setup_field_types() { \ +/datum/entity_meta/##entity/setup_field_typepaths() { \ ..(); \ - LAZYSET(field_types, #field, /datum/db_field/##entity/##field); \ + LAZYSET(field_typepaths, #field, /datum/db_field/##entity/##field); \ } #define FIELD_DEFAULT_VALUE_DECIMAL(entity, field, default) \ /datum/db_field/##entity/##field{ name = #field; field_type = DB_FIELDTYPE_DECIMAL; parent_entity_type = /datum/entity/##entity; }; \ /datum/db_field/##entity/##field/New() { . = ..(); value = default; } \ /datum/entity/##entity/var/datum/db_field/##entity/##field/##field; \ -/datum/entity_meta/##entity/setup_field_types() { \ +/datum/entity_meta/##entity/setup_field_typepaths() { \ ..(); \ - LAZYSET(field_types, #field, /datum/db_field/##entity/##field); \ + LAZYSET(field_typepaths, #field, /datum/db_field/##entity/##field); \ } /// Defines an entity link where `child_entity` refers to `parent_entity` through its column `foreign_key`. diff --git a/code/__DEFINES/clans.dm b/code/__DEFINES/ndatabase/clans.dm similarity index 100% rename from code/__DEFINES/clans.dm rename to code/__DEFINES/ndatabase/clans.dm diff --git a/code/datums/_ndatabase/code/brsql_adapter.dm b/code/datums/_ndatabase/code/brsql_adapter.dm index 44824ac5a520..d8947dd7a74c 100644 --- a/code/datums/_ndatabase/code/brsql_adapter.dm +++ b/code/datums/_ndatabase/code/brsql_adapter.dm @@ -85,53 +85,53 @@ return TRUE -/datum/db/adapter/brsql_adapter/read_table(table_name, list/ids, datum/callback/CB, sync = FALSE) +/datum/db/adapter/brsql_adapter/read_table(table_name, list/ids, datum/callback/on_read_table_callback, sync = FALSE) var/query_gettable = getquery_select_table(table_name, ids) if(sync) - SSdatabase.create_query_sync(query_gettable, CB) + SSdatabase.create_query_sync(query_gettable, on_read_table_callback) else - SSdatabase.create_query(query_gettable, CB) + SSdatabase.create_query(query_gettable, on_read_table_callback) -/datum/db/adapter/brsql_adapter/update_table(table_name, list/values, datum/callback/CB, sync = FALSE) +/datum/db/adapter/brsql_adapter/update_table(table_name, list/values, datum/callback/on_update_table_callback, sync = FALSE) var/list/query_parameters = list() var/query_updatetable = getquery_update_table(table_name, values, query_parameters) if(sync) - SSdatabase.create_parametric_query_sync(query_updatetable, query_parameters, CB) + SSdatabase.create_parametric_query_sync(query_updatetable, query_parameters, on_update_table_callback) else - SSdatabase.create_parametric_query(query_updatetable, query_parameters, CB) + SSdatabase.create_parametric_query(query_updatetable, query_parameters, on_update_table_callback) -/datum/db/adapter/brsql_adapter/insert_table(table_name, list/values, datum/callback/CB, sync = FALSE) +/datum/db/adapter/brsql_adapter/insert_table(table_name, list/values, datum/callback/on_insert_table_callback, sync = FALSE) set waitfor = FALSE var/length = length(values) var/list/query_parameters = list() var/query_inserttable = getquery_insert_table(table_name, values, query_parameters) - var/datum/callback/callback = CALLBACK(src, TYPE_PROC_REF(/datum/db/adapter/brsql_adapter, after_insert_table), CB, length, table_name) + var/datum/callback/on_insert_table_callback_wrapper = CALLBACK(src, TYPE_PROC_REF(/datum/db/adapter/brsql_adapter, after_insert_table), on_insert_table_callback, length, table_name) if(sync) - SSdatabase.create_parametric_query_sync(query_inserttable, query_parameters, callback) + SSdatabase.create_parametric_query_sync(query_inserttable, query_parameters, on_insert_table_callback_wrapper) else - SSdatabase.create_parametric_query(query_inserttable, query_parameters, callback) + SSdatabase.create_parametric_query(query_inserttable, query_parameters, on_insert_table_callback_wrapper) -/datum/db/adapter/brsql_adapter/proc/after_insert_table(datum/callback/CB, length, table_name, uid, list/results, datum/db/query/brsql/query) - CB.Invoke(query.last_insert_id) +/// Wrapper to on_insert_table_callback that passes the only value it cares about: the id of the newly inserted table +/datum/db/adapter/brsql_adapter/proc/after_insert_table(datum/callback/on_insert_table_callback, length, table_name, uid, list/results, datum/db/query/brsql/query) + on_insert_table_callback.Invoke(query.last_insert_id) - -/datum/db/adapter/brsql_adapter/delete_table(table_name, list/ids, datum/callback/CB, sync = FALSE) +/datum/db/adapter/brsql_adapter/delete_table(table_name, list/ids, datum/callback/on_delete_table_callback, sync = FALSE) var/query_deletetable = getquery_delete_table(table_name, ids) if(sync) - SSdatabase.create_query_sync(query_deletetable, CB) + SSdatabase.create_query_sync(query_deletetable, on_delete_table_callback) else - SSdatabase.create_query(query_deletetable, CB) + SSdatabase.create_query(query_deletetable, on_delete_table_callback) -/datum/db/adapter/brsql_adapter/read_filter(table_name, datum/db/filter, datum/callback/CB, sync = FALSE) +/datum/db/adapter/brsql_adapter/read_filter(table_name, datum/db/filter, datum/callback/on_read_filter_callback, sync = FALSE) var/list/query_parameters = list() var/query_gettable = getquery_filter_table(table_name, filter, query_parameters) if(sync) - SSdatabase.create_parametric_query_sync(query_gettable, query_parameters, CB) + SSdatabase.create_parametric_query_sync(query_gettable, query_parameters, on_read_filter_callback) else - SSdatabase.create_parametric_query(query_gettable, query_parameters, CB) + SSdatabase.create_parametric_query(query_gettable, query_parameters, on_read_filter_callback) -/datum/db/adapter/brsql_adapter/read_view(datum/entity_view_meta/view, datum/db/filter/filter, datum/callback/CB, sync=FALSE) +/datum/db/adapter/brsql_adapter/read_view(datum/entity_view_meta/view, datum/db/filter/filter, datum/callback/on_read_view_callback, sync=FALSE) var/v_key = "v_[view.type]" var/list/query_parameters = list() var/datum/db/brsql_cached_query/cached_view = cached_queries[v_key] @@ -139,11 +139,11 @@ return null var/query_getview = cached_view.spawn_query(filter, query_parameters) if(sync) - SSdatabase.create_parametric_query_sync(query_getview, query_parameters, CB) + SSdatabase.create_parametric_query_sync(query_getview, query_parameters, on_read_view_callback) else - SSdatabase.create_parametric_query(query_getview, query_parameters, CB) + SSdatabase.create_parametric_query(query_getview, query_parameters, on_read_view_callback) -/datum/db/adapter/brsql_adapter/sync_table(type_name, table_name, list/field_types) +/datum/db/adapter/brsql_adapter/sync_table(type_name, table_name, list/datum/db_field/field_typepaths) var/list/query_parameters = list() var/query_gettable = getquery_systable_gettable(table_name, query_parameters) var/datum/db/query_response/table_meta = SSdatabase.create_parametric_query_sync(query_gettable, query_parameters) @@ -151,12 +151,12 @@ issue_log += "Unable to access system table, error: '[table_meta.error]'" return FALSE // OH SHIT OH FUCK if(!length(table_meta.results)) // Table doesn't exist - return internal_create_table(table_name, field_types) && internal_record_table_in_sys(type_name, table_name, field_types) + return internal_create_table(table_name, field_typepaths) && internal_record_table_in_sys(type_name, table_name, field_typepaths) var/id = table_meta.results[1][DB_DEFAULT_ID_FIELD] - var/old_fields = savetext2fields(table_meta.results[1]["fields_current"]) + var/old_field_typepaths = savetext2fields(table_meta.results[1]["fields_current"]) var/old_hash = table_meta.results[1]["fields_hash"] - var/field_text = fields2savetext(field_types) + var/field_text = fields2savetext(field_typepaths) var/new_hash = sha1(field_text) if(old_hash == new_hash) @@ -166,10 +166,10 @@ // check if we have any records if(tablecount == 0) // just MURDER IT - return internal_drop_table(table_name) && internal_create_table(table_name, field_types) && internal_record_table_in_sys(type_name, table_name, field_types, id) + return internal_drop_table(table_name) && internal_create_table(table_name, field_typepaths) && internal_record_table_in_sys(type_name, table_name, field_typepaths, id) - return internal_drop_backup_table(table_name) && internal_create_backup_table(table_name, old_fields) && internal_migrate_to_backup(table_name, old_fields) && \ - internal_update_table(table_name, field_types, old_fields) && internal_record_table_in_sys(type_name, table_name, field_types, id) + return internal_drop_backup_table(table_name) && internal_create_backup_table(table_name, old_field_typepaths) && internal_migrate_to_backup(table_name, old_field_typepaths) && \ + internal_update_table(table_name, field_typepaths, old_field_typepaths) && internal_record_table_in_sys(type_name, table_name, field_typepaths, id) /datum/db/adapter/brsql_adapter/sync_index(index_name, table_name, list/fields, unique, cluster) var/list/query_parameters = list() @@ -192,17 +192,17 @@ // Index can be updated only by recreating it return internal_drop_index(index_name, table_name) && internal_create_index(index_name, table_name, fields, unique, cluster) && internal_record_index_in_sys(index_name, table_name, fields, id) -/datum/db/adapter/brsql_adapter/proc/internal_create_table(table_name, field_types) - var/query = getquery_systable_maketable(table_name, field_types) +/datum/db/adapter/brsql_adapter/proc/internal_create_table(table_name, field_typepaths) + var/query = getquery_systable_maketable(table_name, field_typepaths) var/datum/db/query_response/sit_check = SSdatabase.create_query_sync(query) if(sit_check.status != DB_QUERY_FINISHED) issue_log += "Unable to create new table [table_name], error: '[sit_check.error]'" return FALSE // OH SHIT OH FUCK return TRUE -/datum/db/adapter/brsql_adapter/proc/internal_record_table_in_sys(type_name, table_name, field_types, id) +/datum/db/adapter/brsql_adapter/proc/internal_record_table_in_sys(type_name, table_name, field_typepaths, id) var/list/query_parameters = list() - var/query = getquery_systable_recordtable(type_name, table_name, field_types, query_parameters, id) + var/query = getquery_systable_recordtable(type_name, table_name, field_typepaths, query_parameters, id) var/datum/db/query_response/sit_check = SSdatabase.create_parametric_query_sync(query, query_parameters) if(sit_check.status != DB_QUERY_FINISHED) issue_log += "Unable to record meta for table [table_name], error: '[sit_check.error]'" @@ -244,12 +244,7 @@ return TRUE /datum/db/adapter/brsql_adapter/proc/internal_drop_backup_table(table_name) - var/query = getcommand_droptable("[BRSQL_BACKUP_PREFIX][table_name]") - var/datum/db/query_response/sit_check = SSdatabase.create_query_sync(query) - if(sit_check.status != DB_QUERY_FINISHED) - issue_log += "Unable to drop table [table_name], error: '[sit_check.error]'" - return FALSE // OH SHIT OH FUCK - return TRUE + return internal_drop_table("[BRSQL_BACKUP_PREFIX][table_name]") // returns -1 if shit is fucked, otherwise returns count /datum/db/adapter/brsql_adapter/proc/internal_table_count(table_name) @@ -271,29 +266,29 @@ return 1 return value -/datum/db/adapter/brsql_adapter/proc/internal_create_backup_table(table_name, field_types) - var/query = getquery_systable_maketable("[BRSQL_BACKUP_PREFIX][table_name]", field_types) +/datum/db/adapter/brsql_adapter/proc/internal_create_backup_table(table_name, field_typepaths) + var/query = getquery_systable_maketable("[BRSQL_BACKUP_PREFIX][table_name]", field_typepaths) var/datum/db/query_response/sit_check = SSdatabase.create_query_sync(query) if(sit_check.status != DB_QUERY_FINISHED) issue_log += "Unable to create backup for table [table_name], error: '[sit_check.error]'" return FALSE // OH SHIT OH FUCK return TRUE -/datum/db/adapter/brsql_adapter/proc/internal_migrate_table(table_name, list/field_types_old) +/datum/db/adapter/brsql_adapter/proc/internal_migrate_table(table_name, list/field_typepaths_old) var/list/fields = list(DB_DEFAULT_ID_FIELD) - for(var/field in field_types_old) + for(var/field in field_typepaths_old) fields += field var/query = getquery_insert_from_backup(table_name, fields) var/datum/db/query_response/sit_check = SSdatabase.create_query_sync(query) if(sit_check.status != DB_QUERY_FINISHED) - issue_log += "Unable to migrate table [table_name] to backup, error: '[sit_check.error]'" + issue_log += "Unable to migrate table [table_name] from backup, error: '[sit_check.error]'" return FALSE // OH SHIT OH FUCK return TRUE -/datum/db/adapter/brsql_adapter/proc/internal_migrate_to_backup(table_name, list/field_types_old) +/datum/db/adapter/brsql_adapter/proc/internal_migrate_to_backup(table_name, list/field_typepaths_old) var/list/fields = list(DB_DEFAULT_ID_FIELD) - for(var/field in field_types_old) + for(var/field in field_typepaths_old) fields += field var/query = getquery_insert_into_backup(table_name, fields) @@ -303,25 +298,25 @@ return FALSE // OH SHIT OH FUCK return TRUE -/datum/db/adapter/brsql_adapter/proc/internal_update_table(table_name, list/field_types_new, list/field_types_old) - for(var/field in field_types_old) - if(!field_types_new[field]) +/datum/db/adapter/brsql_adapter/proc/internal_update_table(table_name, list/field_typepaths_new, list/field_typepaths_old) + for(var/field in field_typepaths_old) + if(!field_typepaths_new[field]) var/query = getquery_update_table_delete_column(table_name, field) var/datum/db/query_response/sit_check = SSdatabase.create_query_sync(query) if(sit_check.status != DB_QUERY_FINISHED) issue_log += "Unable to update table `[table_name]`, error: '[sit_check.error]'" return FALSE // OH SHIT OH FUCK - for(var/field in field_types_new) - if(!field_types_old[field]) - var/query = getquery_update_table_add_column(table_name, field, field_types_new[field]) + for(var/field in field_typepaths_new) + if(!field_typepaths_old[field]) + var/query = getquery_update_table_add_column(table_name, field_typepaths_new[field]) var/datum/db/query_response/sit_check = SSdatabase.create_query_sync(query) if(sit_check.status != DB_QUERY_FINISHED) issue_log += "Unable to update table `[table_name]`, error: '[sit_check.error]'" return FALSE // OH SHIT OH FUCK else - if(field_types_old[field] != field_types_new[field]) - var/query = getquery_update_table_change_column(table_name, field, field_types_new[field]) + if(field_typepaths_old[field] != field_typepaths_new[field]) + var/query = getquery_update_table_change_column(table_name, field_typepaths_new[field]) var/datum/db/query_response/sit_check = SSdatabase.create_query_sync(query) if(sit_check.status != DB_QUERY_FINISHED) issue_log += "Unable to update table `[table_name]`, error: '[sit_check.error]'" @@ -377,15 +372,15 @@ ); "} -/datum/db/adapter/brsql_adapter/proc/getquery_systable_maketable(table_name, field_types) +/datum/db/adapter/brsql_adapter/proc/getquery_systable_maketable(table_name, field_typepaths) return {" CREATE TABLE `[connection.database]`.`[table_name]` ( - id BIGINT NOT NULL AUTO_INCREMENT PRIMARY KEY, [fields2text(field_types)] + id BIGINT NOT NULL AUTO_INCREMENT PRIMARY KEY, [fields2text(field_typepaths)] ); "} -/datum/db/adapter/brsql_adapter/proc/getquery_systable_recordtable(type_name, table_name, field_types, list/qpar, id = null) - var/field_text = fields2savetext(field_types) +/datum/db/adapter/brsql_adapter/proc/getquery_systable_recordtable(type_name, table_name, field_typepaths, list/qpar, id = null) + var/field_text = fields2savetext(field_typepaths) var/new_hash = sha1(field_text) qpar.Add("[type_name]") qpar.Add("[table_name]") @@ -559,11 +554,11 @@ SELECT max(id) + 1 as total FROM `[connection.database]`.`[table_name]` "} -/datum/db/adapter/brsql_adapter/proc/getquery_update_table_add_column(table_name, column_name, column_type) - return "ALTER TABLE `[connection.database]`.`[table_name]` ADD COLUMN `[column_name]` [fieldtype2text(column_type)];" +/datum/db/adapter/brsql_adapter/proc/getquery_update_table_add_column(table_name, datum/db_field/column) + return "ALTER TABLE `[connection.database]`.`[table_name]` ADD COLUMN `[column::name]` [fieldtype2text(column::field_type)];" -/datum/db/adapter/brsql_adapter/proc/getquery_update_table_change_column(table_name, column_name, column_type) - return "ALTER TABLE `[connection.database]`.`[table_name]` MODIFY COLUMN `[column_name]` [fieldtype2text(column_type)];" +/datum/db/adapter/brsql_adapter/proc/getquery_update_table_change_column(table_name, datum/db_field/column) + return "ALTER TABLE `[connection.database]`.`[table_name]` MODIFY COLUMN `[column::name]` [fieldtype2text(column::field_type)];" /datum/db/adapter/brsql_adapter/proc/getquery_update_table_delete_column(table_name, column_name) return "ALTER TABLE `[connection.database]`.`[table_name]` DROP COLUMN `[column_name]`;" @@ -594,25 +589,32 @@ return "DECIMAL(18,5)" return FALSE -/datum/db/adapter/brsql_adapter/proc/fields2text(list/L) +/datum/db/adapter/brsql_adapter/proc/fields2text(list/datum/db_field/field_typepaths) var/list/result = list() - for(var/item in L) - result += "[item] [fieldtype2text(L[item])]" + for(var/field in field_typepaths) + var/datum/db_field/field_typepath = field_typepaths[field] + result += "[field_typepath::name] [fieldtype2text(field_typepath::field_type)]" return jointext(result, ",") -/datum/db/adapter/brsql_adapter/proc/fields2savetext(list/L) +/datum/db/adapter/brsql_adapter/proc/fields2savetext(list/datum/db_field/field_typepaths) var/list/result = list() - for(var/item in L) - result += "[item]:[L[item]]" + for(var/field in field_typepaths) + var/datum/db_field/field_typepath = field_typepaths[field] + result += "[field_typepath::name]:[field_typepath::field_type::type_id]" return jointext(result, ",") +#define FIELD_NAME_IDX 1 +#define FIELD_TYPE_ID_IDX 2 /datum/db/adapter/brsql_adapter/proc/savetext2fields(text) var/list/result = list() - var/list/split1 = splittext(text, ",") - for(var/field in split1) - var/list/split2 = splittext(field, ":") - result[split2[1]] = text2num(split2[2]) + var/list/fields_as_savetext = splittext(text, ",") + for(var/field_as_savetext in fields_as_savetext) + var/list/field_information = splittext(field_as_savetext, ":") + var/field_type_id = text2num(field_information[FIELD_TYPE_ID_IDX]) + result[field_information[FIELD_NAME_IDX]] = GLOB.db_field_types[field_type_id] return result +#undef FIELD_NAME_IDX +#undef FIELD_TYPE_ID_IDX /datum/db/adapter/brsql_adapter/prepare_view(datum/entity_view_meta/view) var/list/datum/entity_meta/meta_to_load = list(BRSQL_ROOT_NAME = view.root_entity_meta) @@ -678,9 +680,8 @@ var/query_text = "SELECT " for(var/fld in view.fields) var/field = field_alias[fld] - var/datum/db/native_function/native_function = field - // this is a function? - if(istype(native_function)) + if(istype(field, /datum/db/native_function)) + var/datum/db/native_function/native_function = field field = internal_proc_to_text(native_function, field_alias, pre_pflds) query_text += "[field] as `[fld]`, " query_text += "1 as is_view " diff --git a/code/datums/_ndatabase/code/entity/db_field.dm b/code/datums/_ndatabase/code/entity/db_field.dm index 4213cce5e4df..eb3997ab65e7 100644 --- a/code/datums/_ndatabase/code/entity/db_field.dm +++ b/code/datums/_ndatabase/code/entity/db_field.dm @@ -2,7 +2,8 @@ /// The name of the field on the DB, should only contain lowercase letters and underscores. var/name /// The type on the DB, should be a subtype of /datum/db_field_type (see: `__DEFINES/ndatabase.dm`). - var/field_type + /// Not an actual instance, just a typepath but casted to get the value specified in the field type definition. + var/datum/db_field_type/field_type /// The actual value of the field (not necessarily persisted to DB) var/value /// The typepath of the entity datum that holds the field diff --git a/code/datums/_ndatabase/code/entity/db_field_type.dm b/code/datums/_ndatabase/code/entity/db_field_type.dm index b854f532187f..8555ca1ecd44 100644 --- a/code/datums/_ndatabase/code/entity/db_field_type.dm +++ b/code/datums/_ndatabase/code/entity/db_field_type.dm @@ -1,16 +1,48 @@ -/// Should be TRUE for everything but interface parent types +// These IDs must be sequential and MUST maintain order specified here. Can add more types if needed but the type ID needs to be incremented. +#define TYPE_ID_INT 1 +#define TYPE_ID_BIGINT 2 +#define TYPE_ID_CHAR 3 +#define TYPE_ID_STRING_SMALL 4 +#define TYPE_ID_STRING_MEDIUM 5 +#define TYPE_ID_STRING_LARGE 6 +#define TYPE_ID_STRING_MAX 7 +#define TYPE_ID_DATE 8 +#define TYPE_ID_TEXT 9 +#define TYPE_ID_BLOB 10 +#define TYPE_ID_DECIMAL 11 + +/// A standard list where each index is mapped to a specific DB field type +/// by the field type's `type_id` var. +GLOBAL_LIST_INIT(db_field_types, setup_db_field_types()) + +/proc/setup_db_field_types() + var/list/result = list() + for (var/datum/db_field_type/field_type in subtypesof(/datum/db_field_type)) + if (!field_type::valid) + continue + var/type_id = field_type::type_id + if (length(result) < type_id) + result.len = type_id + result[type_id] = field_type + return result + /datum/db_field_type + /// Should be TRUE for everything but interface parent types var/valid = FALSE + var/type_id /datum/db_field_type/int valid = TRUE + type_id = TYPE_ID_INT /datum/db_field_type/bigint valid = TRUE + type_id = TYPE_ID_BIGINT /// Field that allows only 1 symbol /datum/db_field_type/char valid = TRUE + type_id = TYPE_ID_CHAR /datum/db_field_type/string valid = FALSE @@ -18,28 +50,48 @@ /// Field that allows 16 symbols /datum/db_field_type/string/small valid = TRUE + type_id = TYPE_ID_STRING_SMALL /// Field that allows 64 symbols /datum/db_field_type/string/medium valid = TRUE + type_id = TYPE_ID_STRING_MEDIUM /// Field that allows 256 symbols /datum/db_field_type/string/large valid = TRUE + type_id = TYPE_ID_STRING_LARGE /// Field that allows 4000 symbols /datum/db_field_type/string/max valid = TRUE + type_id = TYPE_ID_STRING_MAX /datum/db_field_type/date valid = TRUE + type_id = TYPE_ID_DATE /// Field that allows any amount of symbols but really inefficient /datum/db_field_type/text valid = TRUE + type_id = TYPE_ID_TEXT /datum/db_field_type/blob valid = TRUE + type_id = TYPE_ID_BLOB /datum/db_field_type/decimal valid = TRUE + type_id = TYPE_ID_DECIMAL + +#undef TYPE_ID_INT +#undef TYPE_ID_BIGINT +#undef TYPE_ID_CHAR +#undef TYPE_ID_STRING_SMALL +#undef TYPE_ID_STRING_MEDIUM +#undef TYPE_ID_STRING_LARGE +#undef TYPE_ID_STRING_MAX +#undef TYPE_ID_DATE +#undef TYPE_ID_TEXT +#undef TYPE_ID_BLOB +#undef TYPE_ID_DECIMAL diff --git a/code/datums/_ndatabase/code/entity/entity.dm b/code/datums/_ndatabase/code/entity/entity.dm index 59a85ebf466d..5c3e1290cc51 100644 --- a/code/datums/_ndatabase/code/entity/entity.dm +++ b/code/datums/_ndatabase/code/entity/entity.dm @@ -22,7 +22,6 @@ /datum/entity var/id var/status - var/datum/db_field_type/field_type var/datum/entity_meta/metadata var/__key_synced = FALSE @@ -85,6 +84,6 @@ CB.Invoke(src) /datum/entity/proc/assign_values(list/values, list/ignore = list()) - for(var/F in metadata.field_types) + for(var/F in metadata.field_typepaths) if(!(ignore.Find(F))) vars[F] = values[F] diff --git a/code/datums/_ndatabase/code/entity/entity_meta.dm b/code/datums/_ndatabase/code/entity/entity_meta.dm index 8f8b58ac3f63..bbc5397ae9a3 100644 --- a/code/datums/_ndatabase/code/entity/entity_meta.dm +++ b/code/datums/_ndatabase/code/entity/entity_meta.dm @@ -22,7 +22,8 @@ /datum/entity_meta var/entity_type var/table_name - var/list/datum/db_field/field_types + var/list/datum/db_field/field_typepaths + /// Whether we are persisting the entity in the DB var/active_entity = TRUE var/key_field = null @@ -52,10 +53,11 @@ key_managed = list() inbound_links = list() outbound_links = list() - setup_field_types() + setup_field_typepaths() -/// Proc to populate field_types list, mainly used for the no-boiler plate entity definitions -/datum/entity_meta/proc/setup_field_types() +/// Proc to populate field_typepaths list, mainly used for the no-boiler plate entity definitions +/datum/entity_meta/proc/setup_field_typepaths() + PRIVATE_PROC(TRUE) // Should always be calling other overrides of the proc, entities are final classes any way SHOULD_CALL_PARENT(TRUE) return @@ -64,7 +66,7 @@ /datum/entity_meta/proc/map(datum/entity/ET, list/values) var/strid = "[values[DB_DEFAULT_ID_FIELD]]" ET.id = strid - for(var/F in field_types) + for(var/F in field_typepaths) ET.vars[F] = values[F] // redefine this for faster operations @@ -72,7 +74,7 @@ var/list/values = list() if(include_id) values[DB_DEFAULT_ID_FIELD] = ET.id - for(var/F in field_types) + for(var/F in field_typepaths) values[F] = ET.vars[F] return values diff --git a/code/datums/_ndatabase/code/interfaces/adapter.dm b/code/datums/_ndatabase/code/interfaces/adapter.dm index 33270fe5de41..76a81d846143 100644 --- a/code/datums/_ndatabase/code/interfaces/adapter.dm +++ b/code/datums/_ndatabase/code/interfaces/adapter.dm @@ -24,7 +24,7 @@ /datum/db/adapter/proc/sync_table_meta() return -/datum/db/adapter/proc/sync_table(type_name, table_name, list/field_types) +/datum/db/adapter/proc/sync_table(type_name, table_name, list/field_typepaths) return /datum/db/adapter/proc/sync_index(index_name, table_name, list/fields, unique, cluster) diff --git a/code/datums/_ndatabase/code/native_adapter.dm b/code/datums/_ndatabase/code/native_adapter.dm index d5956ca8d85f..ab53b4df093f 100644 --- a/code/datums/_ndatabase/code/native_adapter.dm +++ b/code/datums/_ndatabase/code/native_adapter.dm @@ -131,7 +131,7 @@ else SSdatabase.create_parametric_query(query_getview, qpars, CB) -/datum/db/adapter/native_adapter/sync_table(type_name, table_name, list/field_types) +/datum/db/adapter/native_adapter/sync_table(type_name, table_name, list/field_typepaths) var/list/qpars = list() var/query_gettable = getquery_systable_gettable(table_name, qpars) var/datum/db/query_response/table_meta = SSdatabase.create_parametric_query_sync(query_gettable, qpars) @@ -139,12 +139,12 @@ issue_log += "Unable to access system table, error: '[table_meta.error]'" return FALSE // OH SHIT OH FUCK if(!length(table_meta.results)) // Table doesn't exist - return internal_create_table(table_name, field_types) && internal_record_table_in_sys(type_name, table_name, field_types) + return internal_create_table(table_name, field_typepaths) && internal_record_table_in_sys(type_name, table_name, field_typepaths) var/id = table_meta.results[1][DB_DEFAULT_ID_FIELD] var/old_fields = savetext2fields(table_meta.results[1]["fields_current"]) var/old_hash = table_meta.results[1]["fields_hash"] - var/field_text = fields2savetext(field_types) + var/field_text = fields2savetext(field_typepaths) var/new_hash = sha1(field_text) if(old_hash == new_hash) @@ -154,24 +154,24 @@ // check if we have any records if(tablecount == 0) // just MURDER IT - return internal_drop_table(table_name) && internal_create_table(table_name, field_types) && internal_record_table_in_sys(type_name, table_name, field_types, id) + return internal_drop_table(table_name) && internal_create_table(table_name, field_typepaths) && internal_record_table_in_sys(type_name, table_name, field_typepaths, id) return internal_drop_backup_table(table_name) && internal_create_backup_table(table_name, old_fields) && internal_migrate_to_backup(table_name, old_fields) && \ - internal_drop_table(table_name) && internal_create_table(table_name, field_types) && internal_migrate_table(table_name, old_fields) && internal_record_table_in_sys(type_name, table_name, field_types, id) + internal_drop_table(table_name) && internal_create_table(table_name, field_typepaths) && internal_migrate_table(table_name, old_fields) && internal_record_table_in_sys(type_name, table_name, field_typepaths, id) -/datum/db/adapter/native_adapter/proc/internal_create_table(table_name, field_types) - var/query = getquery_systable_maketable(table_name, field_types) +/datum/db/adapter/native_adapter/proc/internal_create_table(table_name, field_typepaths) + var/query = getquery_systable_maketable(table_name, field_typepaths) var/datum/db/query_response/sit_check = SSdatabase.create_query_sync(query) if(sit_check.status != DB_QUERY_FINISHED) issue_log += "Unable to create new table [table_name], error: '[sit_check.error]'" return FALSE // OH SHIT OH FUCK return TRUE -/datum/db/adapter/native_adapter/proc/internal_record_table_in_sys(type_name, table_name, field_types, id) +/datum/db/adapter/native_adapter/proc/internal_record_table_in_sys(type_name, table_name, field_typepaths, id) var/list/qpars = list() - var/query = getquery_systable_recordtable(type_name, table_name, field_types, qpars, id) + var/query = getquery_systable_recordtable(type_name, table_name, field_typepaths, qpars, id) var/datum/db/query_response/sit_check = SSdatabase.create_parametric_query_sync(query, qpars) if(sit_check.status != DB_QUERY_FINISHED) issue_log += "Unable to record meta for table [table_name], error: '[sit_check.error]'" @@ -214,17 +214,17 @@ return 1 return value -/datum/db/adapter/native_adapter/proc/internal_create_backup_table(table_name, field_types) - var/query = getquery_systable_maketable("[NATIVE_BACKUP_PREFIX][table_name]", field_types) +/datum/db/adapter/native_adapter/proc/internal_create_backup_table(table_name, field_typepaths) + var/query = getquery_systable_maketable("[NATIVE_BACKUP_PREFIX][table_name]", field_typepaths) var/datum/db/query_response/sit_check = SSdatabase.create_query_sync(query) if(sit_check.status != DB_QUERY_FINISHED) issue_log += "Unable to create backup for table [table_name], error: '[sit_check.error]'" return FALSE // OH SHIT OH FUCK return TRUE -/datum/db/adapter/native_adapter/proc/internal_migrate_table(table_name, list/field_types_old) +/datum/db/adapter/native_adapter/proc/internal_migrate_table(table_name, list/field_typepaths_old) var/list/fields = list(DB_DEFAULT_ID_FIELD) - for(var/field in field_types_old) + for(var/field in field_typepaths_old) fields += field var/query = getquery_insert_from_backup(table_name, fields) @@ -234,9 +234,9 @@ return FALSE // OH SHIT OH FUCK return TRUE -/datum/db/adapter/native_adapter/proc/internal_migrate_to_backup(table_name, list/field_types_old) +/datum/db/adapter/native_adapter/proc/internal_migrate_to_backup(table_name, list/field_typepaths_old) var/list/fields = list(DB_DEFAULT_ID_FIELD) - for(var/field in field_types_old) + for(var/field in field_typepaths_old) fields += field var/query = getquery_insert_into_backup(table_name, fields) @@ -268,15 +268,15 @@ DROP TABLE IF EXISTS [table_name] "} -/datum/db/adapter/native_adapter/proc/getquery_systable_maketable(table_name, field_types) +/datum/db/adapter/native_adapter/proc/getquery_systable_maketable(table_name, field_typepaths) return {" CREATE TABLE [table_name] ( - id BIGINT NOT NULL PRIMARY KEY, [fields2text(field_types)] + id BIGINT NOT NULL PRIMARY KEY, [fields2text(field_typepaths)] ); "} -/datum/db/adapter/native_adapter/proc/getquery_systable_recordtable(type_name, table_name, field_types, list/qpar, id = null) - var/field_text = fields2savetext(field_types) +/datum/db/adapter/native_adapter/proc/getquery_systable_recordtable(type_name, table_name, field_typepaths, list/qpar, id = null) + var/field_text = fields2savetext(field_typepaths) var/new_hash = sha1(field_text) qpar.Add("[type_name]") qpar.Add("[table_name]") diff --git a/code/datums/_ndatabase/subsystems/database_query_manager.dm b/code/datums/_ndatabase/subsystems/database_query_manager.dm index 356644140c2e..0912cecf2c54 100644 --- a/code/datums/_ndatabase/subsystems/database_query_manager.dm +++ b/code/datums/_ndatabase/subsystems/database_query_manager.dm @@ -127,50 +127,50 @@ GLOBAL_REAL(SSdatabase, /datum/controller/subsystem/database_query_manager) return query_actual /datum/controller/subsystem/database_query_manager/proc/create_query(query_text, success_callback, fail_callback, unique_query_id) - var/datum/db/query_response/query_response = new() - query_response.query_text = query_text - query_response.success_callback = success_callback - query_response.fail_callback = fail_callback + var/datum/db/query_response/query = new() + query.query_text = query_text + query.success_callback = success_callback + query.fail_callback = fail_callback if(unique_query_id) - query_response.unique_query_id = unique_query_id - queries_standby[query_response] = query_text + query.unique_query_id = unique_query_id + queries_standby[query] = query_text // if DB supports this /datum/controller/subsystem/database_query_manager/proc/create_parametric_query(query_text, parameters, success_callback, fail_callback, unique_query_id) - var/datum/db/query_response/query_response = new() + var/datum/db/query_response/query = new() var/list/query_parameters = list() query_parameters.Add(query_text) if(parameters) query_parameters.Add(parameters) - query_response.query_text = query_text - query_response.success_callback = success_callback - query_response.fail_callback = fail_callback + query.query_text = query_text + query.success_callback = success_callback + query.fail_callback = fail_callback if(unique_query_id) - query_response.unique_query_id = unique_query_id - queries_standby[query_response] = query_parameters + query.unique_query_id = unique_query_id + queries_standby[query] = query_parameters // Do not use this if you don't know why this exists /datum/controller/subsystem/database_query_manager/proc/create_query_sync(query_text, success_callback, fail_callback) - var/datum/db/query_response/query_response = new() - query_response.query = connection.query(query_text) - query_response.query_text = query_text - query_response.success_callback = success_callback - query_response.fail_callback = fail_callback - UNTIL(query_response.process()) - return query_response + var/datum/db/query_response/query = new() + query.query = connection.query(query_text) + query.query_text = query_text + query.success_callback = success_callback + query.fail_callback = fail_callback + UNTIL(query.process()) + return query /datum/controller/subsystem/database_query_manager/proc/create_parametric_query_sync(query_text, parameters, success_callback, fail_callback) - var/datum/db/query_response/query_response = new() + var/datum/db/query_response/query = new() var/list/query_parameters = list() query_parameters += query_text if(parameters) query_parameters += parameters - query_response.query = connection.query(arglist(query_parameters)) - query_response.query_text = query_text - query_response.success_callback = success_callback - query_response.fail_callback = fail_callback - UNTIL(query_response.process()) - return query_response + query.query = connection.query(arglist(query_parameters)) + query.query_text = query_text + query.success_callback = success_callback + query.fail_callback = fail_callback + UNTIL(query.process()) + return query /proc/loadsql(filename) var/list/Lines = file2list(filename) diff --git a/code/datums/_ndatabase/subsystems/entity_manager.dm b/code/datums/_ndatabase/subsystems/entity_manager.dm index 09c5a60dc4c3..74dab079a1e6 100644 --- a/code/datums/_ndatabase/subsystems/entity_manager.dm +++ b/code/datums/_ndatabase/subsystems/entity_manager.dm @@ -91,14 +91,14 @@ GLOBAL_REAL(SSentity_manager, /datum/controller/subsystem/entity_manager) /datum/controller/subsystem/entity_manager/proc/prepare_tables() adapter.sync_table_meta() - for(var/ET in tables) - var/datum/entity_meta/EM = tables[ET] - adapter.sync_table(EM.entity_type, EM.table_name, EM.field_types) - if(EM.indexes) - for(var/datum/db/index/I in EM.indexes) - adapter.sync_index(I.name, EM.table_name, I.fields, I.hints & DB_INDEXHINT_UNIQUE, I.hints & DB_INDEXHINT_CLUSTER) - if(EM.key_field) - adapter.sync_index("keyfield_index_[EM.key_field]", EM.table_name, list(EM.key_field), TRUE, TRUE) + for(var/entity_type in tables) + var/datum/entity_meta/entity_meta = tables[entity_type] + adapter.sync_table(entity_meta.entity_type, entity_meta.table_name, entity_meta.field_typepaths) + if(entity_meta.indexes) + for(var/datum/db/index/index in entity_meta.indexes) + adapter.sync_index(index.name, entity_meta.table_name, index.fields, index.hints & DB_INDEXHINT_UNIQUE, index.hints & DB_INDEXHINT_CLUSTER) + if(entity_meta.key_field) + adapter.sync_index("keyfield_index_[entity_meta.key_field]", entity_meta.table_name, list(entity_meta.key_field), TRUE, TRUE) /datum/controller/subsystem/entity_manager/fire(resumed = FALSE) @@ -134,16 +134,16 @@ GLOBAL_REAL(SSentity_manager, /datum/controller/subsystem/entity_manager) var/currid = text2num("[first_id]") meta.inserting = list() // order between those two has to be same - for(var/datum/entity/IE in inserted_entities) - IE.id = "[currid]" - meta.on_insert(IE) - meta.on_action(IE) + for(var/datum/entity/inserted_entity in inserted_entities) + inserted_entity.id = "[currid]" + meta.on_insert(inserted_entity) + meta.on_action(inserted_entity) currid++ - if(IE.status == DB_ENTITY_STATE_ADD_DETACH) - qdel(IE) + if(inserted_entity.status == DB_ENTITY_STATE_ADD_DETACH) + qdel(inserted_entity) continue - IE.status = DB_ENTITY_STATE_SYNCED - meta.managed["[IE.id]"] = IE + inserted_entity.status = DB_ENTITY_STATE_SYNCED + meta.managed["[inserted_entity.id]"] = inserted_entity /datum/controller/subsystem/entity_manager/proc/do_update(datum/entity_meta/meta) var/list/datum/entity/to_update = meta.to_update @@ -158,10 +158,10 @@ GLOBAL_REAL(SSentity_manager, /datum/controller/subsystem/entity_manager) adapter.update_table(meta.table_name, unmap, CALLBACK(src, TYPE_PROC_REF(/datum/controller/subsystem/entity_manager, after_update), meta, to_update)) /datum/controller/subsystem/entity_manager/proc/after_update(datum/entity_meta/meta, list/datum/entity/updated_entities) - for(var/datum/entity/IE in updated_entities) - IE.status = DB_ENTITY_STATE_SYNCED - meta.on_update(IE) - meta.on_action(IE) + for(var/datum/entity/updated_entity in updated_entities) + updated_entity.status = DB_ENTITY_STATE_SYNCED + meta.on_update(updated_entity) + meta.on_action(updated_entity) /datum/controller/subsystem/entity_manager/proc/do_delete(datum/entity_meta/meta) var/list/datum/entity/to_delete = meta.to_delete @@ -175,9 +175,9 @@ GLOBAL_REAL(SSentity_manager, /datum/controller/subsystem/entity_manager) adapter.delete_table(meta.table_name, ids, CALLBACK(src, TYPE_PROC_REF(/datum/controller/subsystem/entity_manager, after_delete), meta, to_delete)) /datum/controller/subsystem/entity_manager/proc/after_delete(datum/entity_meta/meta, list/datum/entity/deleted_entities) - for(var/datum/entity/IE in deleted_entities) - IE.status = DB_ENTITY_STATE_BROKEN - meta.on_delete(IE) + for(var/datum/entity/deleted_entity in deleted_entities) + deleted_entity.status = DB_ENTITY_STATE_BROKEN + meta.on_delete(deleted_entity) /datum/controller/subsystem/entity_manager/proc/do_select(datum/entity_meta/meta) var/list/datum/entity/to_select = meta.to_read diff --git a/code/datums/_ndatabase/tests/test_entity.dm b/code/datums/_ndatabase/tests/test_entity.dm index 3bc9cc829bf4..fefef7b21749 100644 --- a/code/datums/_ndatabase/tests/test_entity.dm +++ b/code/datums/_ndatabase/tests/test_entity.dm @@ -6,7 +6,7 @@ /datum/entity_meta/test_entity entity_type = /datum/entity/test_entity table_name = "test_table" - field_types = list("name"=DB_FIELDTYPE_STRING_MEDIUM, "description"=DB_FIELDTYPE_STRING_MAX, "value"=DB_FIELDTYPE_BIGINT) + field_typepaths = list("name"=DB_FIELDTYPE_STRING_MEDIUM, "description"=DB_FIELDTYPE_STRING_MAX, "value"=DB_FIELDTYPE_BIGINT) // redefine this for faster operations /datum/entity_meta/test_entity/map(datum/entity/test_entity/ET, list/values) diff --git a/code/datums/entities/chemical_information.dm b/code/datums/entities/chemical_information.dm index 9ef702156006..b40297563a7f 100644 --- a/code/datums/entities/chemical_information.dm +++ b/code/datums/entities/chemical_information.dm @@ -21,7 +21,7 @@ /datum/entity_meta/chemical_information entity_type = /datum/entity/chemical_information table_name = "chemical_information" - field_types = list( + field_typepaths = list( "nutriment_factor" = DB_FIELDTYPE_DECIMAL, "custom_metabolism" = DB_FIELDTYPE_DECIMAL, "overdose" = DB_FIELDTYPE_INT, diff --git a/code/datums/entities/clans.dm b/code/datums/entities/clans.dm index 0471257d2b2c..6ea4db6b4abc 100644 --- a/code/datums/entities/clans.dm +++ b/code/datums/entities/clans.dm @@ -19,7 +19,7 @@ BSQL_PROTECT_DATUM(/datum/entity/clan) /datum/entity_meta/clan entity_type = /datum/entity/clan table_name = "clans" - field_types = list( + field_typepaths = list( "name" = DB_FIELDTYPE_STRING_MEDIUM, "description" = DB_FIELDTYPE_STRING_MAX, "honor" = DB_FIELDTYPE_BIGINT, @@ -34,7 +34,7 @@ BSQL_PROTECT_DATUM(/datum/entity/clan) /datum/entity_meta/clan_player entity_type = /datum/entity/clan_player table_name = "clans_player" - field_types = list( + field_typepaths = list( "player_id" = DB_FIELDTYPE_BIGINT, "clan_rank" = DB_FIELDTYPE_BIGINT, "permissions" = DB_FIELDTYPE_BIGINT, @@ -56,17 +56,11 @@ BSQL_PROTECT_DATUM(/datum/entity/clan) child_entity = /datum/entity/clan_player child_foreign_key = "player_id" - parent_name = "player" - child_name = "clan_player" - /datum/entity_link/clan_to_player parent_entity = /datum/entity/clan child_entity = /datum/entity/clan_player child_foreign_key = "clan_id" - parent_name = "clan" - child_name = "clan_player" - /datum/view_record/clan_playerbase_view var/clan_id var/player_id diff --git a/code/datums/entities/discord_identifier.dm b/code/datums/entities/discord_identifier.dm index 7475af74ae3c..7b03c7bca40f 100644 --- a/code/datums/entities/discord_identifier.dm +++ b/code/datums/entities/discord_identifier.dm @@ -14,7 +14,7 @@ table_name = "discord_identifiers" key_field = "identifier" - field_types = list( + field_typepaths = list( "identifier" = DB_FIELDTYPE_STRING_LARGE, "playerid" = DB_FIELDTYPE_BIGINT, "realtime" = DB_FIELDTYPE_BIGINT, diff --git a/code/datums/entities/discord_link.dm b/code/datums/entities/discord_link.dm index 8d91c517821b..34923755ed49 100644 --- a/code/datums/entities/discord_link.dm +++ b/code/datums/entities/discord_link.dm @@ -7,7 +7,7 @@ table_name = "discord_links" key_field = "discord_id" - field_types = list( + field_typepaths = list( "player_id" = DB_FIELDTYPE_BIGINT, "discord_id" = DB_FIELDTYPE_STRING_MEDIUM, ) @@ -32,7 +32,4 @@ child_entity = /datum/entity/discord_link child_foreign_key = "player_id" - parent_name = "player" - child_name = "discord_link_id" - diff --git a/code/datums/entities/login_triplets.dm b/code/datums/entities/login_triplets.dm index 545f83b90c77..378bef335294 100644 --- a/code/datums/entities/login_triplets.dm +++ b/code/datums/entities/login_triplets.dm @@ -11,7 +11,7 @@ /datum/entity_meta/login_triplet entity_type = /datum/entity/login_triplet table_name = "login_triplets" - field_types = list( + field_typepaths = list( "ckey" = DB_FIELDTYPE_STRING_MEDIUM, "ip1" = DB_FIELDTYPE_INT, "ip2" = DB_FIELDTYPE_INT, diff --git a/code/datums/entities/logs/player_times_log.dm b/code/datums/entities/logs/player_times_log.dm index 1702feb51911..8f4d07965e1d 100644 --- a/code/datums/entities/logs/player_times_log.dm +++ b/code/datums/entities/logs/player_times_log.dm @@ -20,7 +20,7 @@ BSQL_PROTECT_DATUM(/datum/entity/player_time_log) /datum/entity_meta/player_time_log entity_type = /datum/entity/player_time_log table_name = "log_player_playtime" - field_types = list( + field_typepaths = list( "player_id" = DB_FIELDTYPE_BIGINT, "round_id" = DB_FIELDTYPE_BIGINT, diff --git a/code/datums/entities/map_votes.dm b/code/datums/entities/map_votes.dm index f509a4af3b7f..af6bb4f7e008 100644 --- a/code/datums/entities/map_votes.dm +++ b/code/datums/entities/map_votes.dm @@ -6,7 +6,7 @@ /datum/entity_meta/map_vote entity_type = /datum/entity/map_vote table_name = "map_vote" - field_types = list( + field_typepaths = list( "map_name"=DB_FIELDTYPE_STRING_LARGE, "total_votes"=DB_FIELDTYPE_BIGINT, ) diff --git a/code/datums/entities/mc_controller.dm b/code/datums/entities/mc_controller.dm index fe8359327942..9e8eba3f2098 100644 --- a/code/datums/entities/mc_controller.dm +++ b/code/datums/entities/mc_controller.dm @@ -6,7 +6,7 @@ entity_type = /datum/entity/mc_controller table_name = "mc_controller" key_field = "controller_type" - field_types = list( + field_typepaths = list( "controller_type"=DB_FIELDTYPE_STRING_LARGE, "wait_time"=DB_FIELDTYPE_INT, ) diff --git a/code/datums/entities/mc_record.dm b/code/datums/entities/mc_record.dm index f81c141bd1e3..0b17d00c13db 100644 --- a/code/datums/entities/mc_record.dm +++ b/code/datums/entities/mc_record.dm @@ -7,7 +7,7 @@ /datum/entity_meta/mc_record entity_type = /datum/entity/mc_record table_name = "mc_record" - field_types = list( + field_typepaths = list( "round_time"=DB_FIELDTYPE_BIGINT, "round_id"=DB_FIELDTYPE_BIGINT, "controller_id"=DB_FIELDTYPE_BIGINT, diff --git a/code/datums/entities/mc_round.dm b/code/datums/entities/mc_round.dm index cc1045e6778d..a6938618d991 100644 --- a/code/datums/entities/mc_round.dm +++ b/code/datums/entities/mc_round.dm @@ -4,6 +4,6 @@ /datum/entity_meta/mc_round entity_type = /datum/entity/mc_round table_name = "mc_round" - field_types = list( + field_typepaths = list( "map_name"=DB_FIELDTYPE_STRING_LARGE, ) diff --git a/code/datums/entities/mc_timing_info.dm b/code/datums/entities/mc_timing_info.dm index e186bc03b286..b3c2b8650546 100644 --- a/code/datums/entities/mc_timing_info.dm +++ b/code/datums/entities/mc_timing_info.dm @@ -9,7 +9,7 @@ /datum/entity_meta/mc_timing_info entity_type = /datum/entity/mc_timing_info table_name = "mc_timing_info" - field_types = list( + field_typepaths = list( "round_id"=DB_FIELDTYPE_BIGINT, "round_time"=DB_FIELDTYPE_INT, "client_count"=DB_FIELDTYPE_INT, diff --git a/code/datums/entities/player.dm b/code/datums/entities/player.dm index 39f235283f9c..b314740719b7 100644 --- a/code/datums/entities/player.dm +++ b/code/datums/entities/player.dm @@ -61,7 +61,7 @@ BSQL_PROTECT_DATUM(/datum/entity/player) entity_type = /datum/entity/player table_name = "players" key_field = "ckey" - field_types = list( + field_typepaths = list( "ckey" = DB_FIELDTYPE_STRING_MEDIUM, "last_known_ip" = DB_FIELDTYPE_STRING_SMALL, "last_known_cid" = DB_FIELDTYPE_STRING_SMALL, @@ -741,14 +741,10 @@ BSQL_PROTECT_DATUM(/datum/entity/player) child_entity = /datum/entity/player child_foreign_key = "time_ban_admin_id" - parent_name = "banning_admin" - - /datum/entity_link/player_to_permabanning_admin parent_entity = /datum/entity/player child_entity = /datum/entity/player child_foreign_key = "permaban_admin_id" - parent_name = "permabanning_admin" /datum/view_record/players var/id diff --git a/code/datums/entities/player_job_ban.dm b/code/datums/entities/player_job_ban.dm index 7d9b89a8f7b6..8db2f6a93079 100644 --- a/code/datums/entities/player_job_ban.dm +++ b/code/datums/entities/player_job_ban.dm @@ -15,7 +15,7 @@ BSQL_PROTECT_DATUM(/datum/entity/player_job_ban) /datum/entity_meta/player_job_ban entity_type = /datum/entity/player_job_ban table_name = "player_job_bans" - field_types = list( + field_typepaths = list( "player_id"=DB_FIELDTYPE_BIGINT, "admin_id"=DB_FIELDTYPE_BIGINT, "text"=DB_FIELDTYPE_STRING_MAX, @@ -39,12 +39,7 @@ BSQL_PROTECT_DATUM(/datum/entity/player_job_ban) child_entity = /datum/entity/player_job_ban child_foreign_key = "player_id" - parent_name = "player" - child_name = "jobbans" - /datum/entity_link/admin_to_player_job_bans parent_entity = /datum/entity/player child_entity = /datum/entity/player_job_ban child_foreign_key = "admin_id" - - parent_name = "admin" diff --git a/code/datums/entities/player_note.dm b/code/datums/entities/player_note.dm index 33b6aa1f4938..b4630328aaf8 100644 --- a/code/datums/entities/player_note.dm +++ b/code/datums/entities/player_note.dm @@ -21,7 +21,7 @@ BSQL_PROTECT_DATUM(/datum/entity/player_note) /datum/entity_meta/player_note entity_type = /datum/entity/player_note table_name = "player_notes" - field_types = list( + field_typepaths = list( "player_id" = DB_FIELDTYPE_BIGINT, "admin_id" = DB_FIELDTYPE_BIGINT, "text" = DB_FIELDTYPE_STRING_MAX, @@ -51,16 +51,11 @@ BSQL_PROTECT_DATUM(/datum/entity/player_note) child_entity = /datum/entity/player_note child_foreign_key = "player_id" - parent_name = "player" - child_name = "notes" - /datum/entity_link/admin_to_player_notes parent_entity = /datum/entity/player child_entity = /datum/entity/player_note child_foreign_key = "admin_id" - parent_name = "admin" - /datum/view_record/note_view var/id var/player_ckey diff --git a/code/datums/entities/player_stat.dm b/code/datums/entities/player_stat.dm index b0b5b37e1e29..7e125377f136 100644 --- a/code/datums/entities/player_stat.dm +++ b/code/datums/entities/player_stat.dm @@ -9,7 +9,7 @@ BSQL_PROTECT_DATUM(/datum/entity/player_stat) /datum/entity_meta/player_stat entity_type = /datum/entity/player_stat table_name = "player_stat" - field_types = list( + field_typepaths = list( "player_id" = DB_FIELDTYPE_BIGINT, "stat_id" = DB_FIELDTYPE_STRING_LARGE, "stat_number" = DB_FIELDTYPE_BIGINT, @@ -23,6 +23,3 @@ BSQL_PROTECT_DATUM(/datum/entity/player_stat) parent_entity = /datum/entity/player child_entity = /datum/entity/player_stat child_foreign_key = "player_id" - - parent_name = "player" - child_name = "player_stats" diff --git a/code/datums/entities/player_sticky_ban.dm b/code/datums/entities/player_sticky_ban.dm index 9cc139df8dd4..8ecdde97cba6 100644 --- a/code/datums/entities/player_sticky_ban.dm +++ b/code/datums/entities/player_sticky_ban.dm @@ -11,7 +11,7 @@ BSQL_PROTECT_DATUM(/datum/entity/stickyban) /datum/entity_meta/stickyban entity_type = /datum/entity/stickyban table_name = "stickyban" - field_types = list( + field_typepaths = list( "identifier" = DB_FIELDTYPE_STRING_LARGE, "reason" = DB_FIELDTYPE_STRING_MAX, "message" = DB_FIELDTYPE_STRING_MAX, @@ -46,7 +46,6 @@ BSQL_PROTECT_DATUM(/datum/entity/stickyban) parent_entity = /datum/entity/player child_entity = /datum/entity/stickyban child_foreign_key = "adminid" - parent_name = "stickybanning_admin" /datum/entity/stickyban_matched_ckey var/ckey @@ -56,7 +55,7 @@ BSQL_PROTECT_DATUM(/datum/entity/stickyban) /datum/entity_meta/stickyban_matched_ckey entity_type = /datum/entity/stickyban_matched_ckey table_name = "stickyban_matched_ckey" - field_types = list( + field_typepaths = list( "ckey" = DB_FIELDTYPE_STRING_LARGE, "linked_stickyban" = DB_FIELDTYPE_BIGINT, "whitelisted" = DB_FIELDTYPE_INT, @@ -86,7 +85,7 @@ BSQL_PROTECT_DATUM(/datum/entity/stickyban) /datum/entity_meta/stickyban_matched_cid entity_type = /datum/entity/stickyban_matched_cid table_name = "stickyban_matched_cid" - field_types = list( + field_typepaths = list( "cid" = DB_FIELDTYPE_STRING_LARGE, "linked_stickyban" = DB_FIELDTYPE_BIGINT, ) @@ -113,7 +112,7 @@ BSQL_PROTECT_DATUM(/datum/entity/stickyban) /datum/entity_meta/stickyban_matched_ip entity_type = /datum/entity/stickyban_matched_ip table_name = "stickyban_matched_ip" - field_types = list( + field_typepaths = list( "ip" = DB_FIELDTYPE_STRING_LARGE, "linked_stickyban" = DB_FIELDTYPE_BIGINT, ) diff --git a/code/datums/entities/player_times.dm b/code/datums/entities/player_times.dm index 43498709806b..de04d6c95b78 100644 --- a/code/datums/entities/player_times.dm +++ b/code/datums/entities/player_times.dm @@ -12,7 +12,7 @@ BSQL_PROTECT_DATUM(/datum/entity/player_time) /datum/entity_meta/player_time entity_type = /datum/entity/player_time table_name = "player_playtime" - field_types = list( + field_typepaths = list( "player_id" = DB_FIELDTYPE_BIGINT, "role_id" = DB_FIELDTYPE_STRING_LARGE, "total_minutes" = DB_FIELDTYPE_BIGINT, @@ -26,9 +26,6 @@ BSQL_PROTECT_DATUM(/datum/entity/player_time) child_entity = /datum/entity/player_time child_foreign_key = "player_id" - parent_name = "player" - child_name = "player_times" - /datum/view_record/playtime var/player_id var/role_id diff --git a/code/datums/entities/ticket.dm b/code/datums/entities/ticket.dm index e34bf4241b2c..d96ea679ef11 100644 --- a/code/datums/entities/ticket.dm +++ b/code/datums/entities/ticket.dm @@ -13,7 +13,7 @@ BSQL_PROTECT_DATUM(/datum/entity/ticket) /datum/entity_meta/ticket entity_type = /datum/entity/ticket table_name = "ticket" - field_types = list( + field_typepaths = list( "ticket"=DB_FIELDTYPE_BIGINT, "action"=DB_FIELDTYPE_STRING_LARGE, "message"=DB_FIELDTYPE_STRING_MAX, diff --git a/code/datums/statistics/entities/death_stats.dm b/code/datums/statistics/entities/death_stats.dm index de2d9cc71526..2998a8f5946e 100644 --- a/code/datums/statistics/entities/death_stats.dm +++ b/code/datums/statistics/entities/death_stats.dm @@ -34,7 +34,7 @@ /datum/entity_meta/statistic_death entity_type = /datum/entity/statistic/death table_name = "log_player_statistic_death" - field_types = list( + field_typepaths = list( "player_id" = DB_FIELDTYPE_BIGINT, "round_id" = DB_FIELDTYPE_BIGINT, diff --git a/code/datums/statistics/entities/map_stats.dm b/code/datums/statistics/entities/map_stats.dm index ec3ebb79b448..5464fb266e28 100644 --- a/code/datums/statistics/entities/map_stats.dm +++ b/code/datums/statistics/entities/map_stats.dm @@ -12,7 +12,7 @@ entity_type = /datum/entity/statistic/map table_name = "maps" key_field = "map_name" - field_types = list( + field_typepaths = list( "map_name" = DB_FIELDTYPE_STRING_LARGE, "total_rounds" = DB_FIELDTYPE_BIGINT, diff --git a/code/datums/statistics/entities/medal_stats.dm b/code/datums/statistics/entities/medal_stats.dm index c5684e3cd9a4..95eb8f08bc33 100644 --- a/code/datums/statistics/entities/medal_stats.dm +++ b/code/datums/statistics/entities/medal_stats.dm @@ -13,7 +13,7 @@ /datum/entity_meta/statistic_medal entity_type = /datum/entity/statistic/medal table_name = "log_player_statistic_medal" - field_types = list( + field_typepaths = list( "player_id" = DB_FIELDTYPE_BIGINT, "round_id" = DB_FIELDTYPE_BIGINT, diff --git a/code/datums/statistics/entities/round_stats.dm b/code/datums/statistics/entities/round_stats.dm index 79493ca87ef0..08a0e341d9a8 100644 --- a/code/datums/statistics/entities/round_stats.dm +++ b/code/datums/statistics/entities/round_stats.dm @@ -56,7 +56,7 @@ entity_type = /datum/entity/statistic/round table_name = "rounds" key_field = "round_id" - field_types = list( + field_typepaths = list( "round_id" = DB_FIELDTYPE_BIGINT, "round_name" = DB_FIELDTYPE_STRING_LARGE, diff --git a/colonialmarines.dme b/colonialmarines.dme index ae5bea645e81..371fd65d5431 100644 --- a/colonialmarines.dme +++ b/colonialmarines.dme @@ -45,7 +45,6 @@ #include "code\__DEFINES\camera.dm" #include "code\__DEFINES\chat.dm" #include "code\__DEFINES\chemistry.dm" -#include "code\__DEFINES\clans.dm" #include "code\__DEFINES\client_prefs.dm" #include "code\__DEFINES\colours.dm" #include "code\__DEFINES\combat.dm" @@ -83,7 +82,6 @@ #include "code\__DEFINES\mobs.dm" #include "code\__DEFINES\mode.dm" #include "code\__DEFINES\movement.dm" -#include "code\__DEFINES\ndatabase.dm" #include "code\__DEFINES\nightmare.dm" #include "code\__DEFINES\objects.dm" #include "code\__DEFINES\origins.dm" @@ -143,6 +141,8 @@ #include "code\__DEFINES\dcs\signals\atom\mob\living\signals_human.dm" #include "code\__DEFINES\dcs\signals\atom\mob\living\signals_living.dm" #include "code\__DEFINES\dcs\signals\atom\mob\living\signals_xeno.dm" +#include "code\__DEFINES\ndatabase\_ndatabase.dm" +#include "code\__DEFINES\ndatabase\clans.dm" #include "code\__DEFINES\paygrade_defs\civilian.dm" #include "code\__DEFINES\paygrade_defs\cmb.dm" #include "code\__DEFINES\paygrade_defs\dutch.dm"