-
Notifications
You must be signed in to change notification settings - Fork 565
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'master' into crash-chance
- Loading branch information
Showing
58 changed files
with
621 additions
and
107 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -21,7 +21,7 @@ jobs: | |
run: | | ||
~/dmdoc | ||
touch dmdoc/.nojekyll | ||
echo codedocs.tgstation13.org > dmdoc/CNAME | ||
echo docs.cm-ss13.com > dmdoc/CNAME | ||
- name: Deploy | ||
uses: JamesIves/[email protected] | ||
with: | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,132 @@ | ||
/// Sends collected statistics to an influxdb v2 backend periodically | ||
SUBSYSTEM_DEF(influxdriver) | ||
name = "InfluxDB Driver" | ||
wait = 10 SECONDS | ||
init_order = SS_INIT_INFLUXDRIVER | ||
priority = SS_PRIORITY_INFLUXDRIVER | ||
runlevels = RUNLEVELS_DEFAULT|RUNLEVEL_LOBBY | ||
|
||
var/list/send_queue = list() | ||
|
||
/// Maximum amount of metric lines to send at most in one request | ||
/// This is neccessary because sending a lot of metrics can get expensive | ||
/// and drive the subsystem into overtime, but we can't split the work as it'd be even less efficient | ||
var/max_batch = 150 | ||
|
||
/// Last timestamp in microseconds | ||
var/timestamp_cache_realtime | ||
/// Last tick time the timestamp was taken at | ||
var/timestamp_cache_worldtime | ||
|
||
/datum/controller/subsystem/influxdriver/Initialize() | ||
var/period = text2num(CONFIG_GET(number/influxdb_send_period)) | ||
if(isnum(period)) | ||
wait = max(period * (1 SECONDS), 2 SECONDS) | ||
return SS_INIT_SUCCESS | ||
|
||
/datum/controller/subsystem/influxdriver/stat_entry(msg) | ||
msg += "period=[wait] queue=[length(send_queue)]" | ||
return ..() | ||
|
||
/datum/controller/subsystem/influxdriver/proc/unix_timestamp_string() // pending change to rust-g | ||
return RUSTG_CALL(RUST_G, "unix_timestamp")() | ||
|
||
/datum/controller/subsystem/influxdriver/proc/update_timestamp() | ||
PRIVATE_PROC(TRUE) | ||
// We make only one request to rustg per game tick, so we cache the result per world.time | ||
var/whole_timestamp = unix_timestamp_string() // Format "7129739474.4758981" - timestamp with up to 7-8 decimals | ||
var/list/tsparts = splittext(whole_timestamp, ".") | ||
var/fractional = copytext(pad_trailing(tsparts[2], "0", 6), 1, 7) // in microseconds | ||
timestamp_cache_worldtime = world.time | ||
timestamp_cache_realtime = "[tsparts[1]][fractional]" | ||
|
||
/datum/controller/subsystem/influxdriver/fire(resumed) | ||
var/maxlen = min(length(send_queue)+1, max_batch) | ||
var/list/queue = send_queue.Copy(1, maxlen) | ||
send_queue.Cut(1, maxlen) | ||
flush_queue(queue) | ||
|
||
/// Flushes measurements batch to InfluxDB backend | ||
/datum/controller/subsystem/influxdriver/proc/flush_queue(list/queue) | ||
PRIVATE_PROC(TRUE) | ||
|
||
var/host = CONFIG_GET(string/influxdb_host) | ||
var/token = CONFIG_GET(string/influxdb_token) | ||
var/bucket = CONFIG_GET(string/influxdb_bucket) | ||
var/org = CONFIG_GET(string/influxdb_org) | ||
|
||
if(!host || !token || !bucket || !org) | ||
can_fire = FALSE | ||
return | ||
|
||
if(!length(queue)) | ||
return // Nothing to do | ||
|
||
var/url = "[host]/api/v2/write?org=[org]&bucket=[bucket]&precision=us" // microseconds | ||
var/list/headers = list() | ||
headers["Authorization"] = "Token [token]" | ||
headers["Content-Type"] = "text/plain; charset=utf-8" | ||
headers["Accept"] = "application/json" | ||
|
||
var/datum/http_request/request = new | ||
var/payload = "" | ||
for(var/line in queue) | ||
payload += "[line]\n" | ||
request.prepare(RUSTG_HTTP_METHOD_POST, url, payload, headers) | ||
request.begin_async() | ||
// TODO possibly check back result of request later | ||
|
||
/// Enqueues sending to InfluxDB Backend selected measurement values - round_id and timestamp are filled in automatically | ||
/datum/controller/subsystem/influxdriver/proc/enqueue_stats(measurement, list/tags, list/fields) | ||
. = FALSE | ||
var/valid = FALSE | ||
var/serialized = "[measurement],round_id=[GLOB.round_id]" | ||
if(tags) | ||
for(var/tag in tags) | ||
var/serialized_tag = serialize_field(tag, tags[tag]) | ||
if(serialized_tag) | ||
serialized += ",[serialized_tag]" | ||
serialized += " " | ||
var/comma = "" | ||
for(var/field in fields) | ||
var/serialized_field = serialize_field(field, fields[field]) | ||
if(serialized_field) | ||
valid = TRUE | ||
serialized += "[comma][serialized_field]" | ||
comma = "," | ||
if(!valid) | ||
CRASH("Attempted to serialize to InfluxDB backend an invalid measurement (likely has no fields)") | ||
if(timestamp_cache_worldtime != world.time) | ||
update_timestamp() | ||
serialized += " [timestamp_cache_realtime]" | ||
send_queue += serialized | ||
return TRUE | ||
|
||
/// Enqueues sending varied stats in a dumb and simpler format directly as: measurement count= | ||
/datum/controller/subsystem/influxdriver/proc/enqueue_stats_crude(measurement, value, field_name = "count") | ||
. = FALSE | ||
var/serialized_field = serialize_field(field_name, value) | ||
if(!length(serialized_field)) | ||
return | ||
if(timestamp_cache_worldtime != world.time) | ||
update_timestamp() | ||
var/serialized = "[measurement],round_id=[GLOB.round_id] [serialized_field] [timestamp_cache_realtime]" | ||
send_queue += serialized | ||
return TRUE | ||
|
||
/// Puts a single field or tag value into InfluxDB Line format | ||
/datum/controller/subsystem/influxdriver/proc/serialize_field(field, value) | ||
var/static/regex/whitelistedCharacters = regex(@{"([^a-zA-Z0-9_]+)"}, "g") | ||
var/sanitized_field = whitelistedCharacters.Replace("[field]", "") | ||
if(!length(sanitized_field) || copytext(sanitized_field, 1, 2) == "_") | ||
CRASH("Invalid tag/field for InfluxDB serialization: '[sanitized_field]' (original: '[field]')") | ||
var/sanitized_value | ||
if(isnum(value)) | ||
sanitized_value = value | ||
else if(istext(value)) | ||
sanitized_value = whitelistedCharacters.Replace("[value]", "") | ||
if(!length(sanitized_value) || copytext(sanitized_value, 1, 2) == "_") | ||
CRASH("Invalid value for InfluxDB serialization: '[sanitized_value]' (original: '[value]')") | ||
else | ||
CRASH("Invalid value type passed for InfluxDB serialization: '[value]'") | ||
return "[sanitized_field]=[sanitized_value]" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,47 @@ | ||
SUBSYSTEM_DEF(influxmcstats) | ||
name = "InfluxDB MC Stats" | ||
wait = 60 SECONDS | ||
priority = SS_PRIORITY_INFLUXMCSTATS | ||
init_order = SS_INIT_INFLUXMCSTATS | ||
runlevels = RUNLEVEL_LOBBY|RUNLEVELS_DEFAULT | ||
flags = SS_KEEP_TIMING | ||
var/checkpoint = 0 | ||
var/list/subsystem_name_cache = list() | ||
|
||
/datum/controller/subsystem/influxmcstats/Initialize() | ||
var/period = text2num(CONFIG_GET(number/influxdb_mcstats_period)) | ||
if(isnum(period)) | ||
wait = max(period * (1 SECONDS), 10 SECONDS) | ||
return SS_INIT_SUCCESS | ||
|
||
/datum/controller/subsystem/influxmcstats/stat_entry(msg) | ||
msg += "period=[wait] checkpoint=[checkpoint]" | ||
return ..() | ||
|
||
/datum/controller/subsystem/influxmcstats/fire(resumed) | ||
if(!SSinfluxdriver.can_fire) | ||
can_fire = FALSE | ||
return | ||
|
||
var/list/data = list() | ||
data["time_dilation_current"] = SStime_track.time_dilation_current | ||
data["time_dilation_avg"] = SStime_track.time_dilation_avg | ||
data["time_dilation_avg_slow"] = SStime_track.time_dilation_avg_slow | ||
data["time_dilation_avg_fast"] = SStime_track.time_dilation_avg_fast | ||
SSinfluxdriver.enqueue_stats("tidi", null, data) | ||
|
||
SSinfluxdriver.enqueue_stats("cpu", null, list("cpu" = world.cpu, "map_cpu" = world.map_cpu)) | ||
|
||
var/static/regex/get_last_path_element = regex(@{"/([^/]+)$"}) | ||
checkpoint++ | ||
for(var/datum/controller/subsystem/SS in Master.subsystems) | ||
if(!SS.can_fire) | ||
continue | ||
if(!subsystem_name_cache[SS.type]) | ||
get_last_path_element.Find("[SS.type]") | ||
subsystem_name_cache[SS.type] = "SS[get_last_path_element.group[1]]" | ||
var/SSname = subsystem_name_cache[SS.type] | ||
if(!SSname) | ||
stack_trace("Influx MC Stats couldnt name a subsystem, type=[SS.type]") | ||
continue | ||
SSinfluxdriver.enqueue_stats("sstimings", list("ss" = SSname), list("cost" = SS.cost, "tick_overrun" = SS.tick_overrun, "tick_usage" = SS.tick_usage, "wait" = SS.wait)) |
Oops, something went wrong.