Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Export logs in real time using vector #812

Open
wants to merge 14 commits into
base: main
Choose a base branch
from
13 changes: 12 additions & 1 deletion .env
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,17 @@ NEXTAUTH_SECRET=secret
RETRACED_HOST_URL=http://retraced-api:3000/auditlog
RETRACED_EXTERNAL_URL=http://localhost:3000/auditlog

# Export Logs
EXPORT_DATADOG_API_KEY=
EXPORT_WEBHOOK_URL=http://vector:9000
EXPORT_WEBHOOK_USERNAME=admin
EXPORT_WEBHOOK_PASSWORD=admin
EXPORT_DDSOURCE=local-dev-machine
EXPORT_DDTAGS="Audit-Logs, Retraced, BoxyHQ"
EXPORT_DATADOG_HOSTNAME="127.0.0.1"
EXPORT_SERVICE="Retraced-audit-logs"
EXPORT_DATADOG_REGION=us
EXPORT_DATADOG_SITE=datadoghq.com
# OpenTelemetry
# https://opentelemetry.io/docs/concepts/sdk-configuration/otlp-exporter-configuration/
# If you have any issues with using the otel exporter and want to enable debug logs
Expand All @@ -40,4 +51,4 @@ OTEL_EXPORTER_OTLP_METRICS_HEADERS=
GEOIPUPDATE_LICENSE_KEY=
GEOIPUPDATE_ACCOUNT_ID=
GEOIPUPDATE_USE_MMDB=
GEOIPUPDATE_DB_DIR=/etc/mmdb
GEOIPUPDATE_DB_DIR=/etc/mmdb
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -106,5 +106,6 @@ test-results.xml
.env.development.local
.env.test.local
.env.production.local
vector/*
mmdb/**/**
GeoIP.conf
GeoIP.conf
18 changes: 18 additions & 0 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -221,5 +221,23 @@ services:
depends_on:
- "retraced-api"
restart: "always"

vector:
image: timberio/vector:0.X-alpine
environment:
- EXPORT_WEBHOOK_USERNAME=${EXPORT_WEBHOOK_USERNAME}
- EXPORT_WEBHOOK_PASSWORD=${EXPORT_WEBHOOK_PASSWORD}
- EXPORT_DDSOURCE=${EXPORT_DDSOURCE}
- EXPORT_DDTAGS=${EXPORT_DDTAGS}
- EXPORT_DATADOG_HOSTNAME=${EXPORT_DATADOG_HOSTNAME}
- EXPORT_SERVICE=${EXPORT_SERVICE}
- EXPORT_DATADOG_API_KEY=${EXPORT_DATADOG_API_KEY}
- EXPORT_DATADOG_REGION=${EXPORT_DATADOG_REGION}
- EXPORT_DATADOG_SITE=${EXPORT_DATADOG_SITE}
volumes:
- ./vector.toml:/etc/vector/vector.toml
- ./vector/data:/var/lib/vector/
networks:
- retraced
volumes:
mmdb:
2 changes: 2 additions & 0 deletions src/_processor/workers/saveEventToElasticsearch.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import _ from "lodash";
import moment from "moment";
import { Clock } from "../common";
import { ClientWithRetry, getESWithRetry } from "../../persistence/elasticsearch";
import sendToWebhook from "../../ee/export/index";
import { instrumented, recordOtelHistogram } from "../../metrics/opentelemetry/instrumentation";

export class ElasticsearchSaver {
Expand All @@ -25,6 +26,7 @@ export class ElasticsearchSaver {
const alias = `retraced.${jobObj.projectId}.${jobObj.environmentId}.current`;
try {
await this.esIndex(event, alias);
sendToWebhook(event);
} catch (e) {
e.retry = true;
throw e;
Expand Down
3 changes: 3 additions & 0 deletions src/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,9 @@ export default {
env.RETRACED_NO_ANALYTICS ||
process.env.DO_NOT_TRACK ||
env.DO_NOT_TRACK,
EXPORT_WEBHOOK_URL: process.env.EXPORT_WEBHOOK_URL || env.EXPORT_WEBHOOK_URL,
EXPORT_WEBHOOK_USERNAME: process.env.EXPORT_WEBHOOK_USERNAME || env.EXPORT_WEBHOOK_USERNAME,
EXPORT_WEBHOOK_PASSWORD: process.env.EXPORT_WEBHOOK_PASSWORD || env.EXPORT_WEBHOOK_PASSWORD,
GEOIPUPDATE_USE_MMDB: process.env.GEOIPUPDATE_USE_MMDB || env.GEOIPUPDATE_USE_MMDB,
GEOIPUPDATE_DB_DIR: process.env.GEOIPUPDATE_DB_DIR || env.GEOIPUPDATE_DB_DIR || "/etc/mmdb",
};
7 changes: 7 additions & 0 deletions src/ee/ENTERPRISE.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# Enterprise Edition

Welcome to the Enterprise Edition ("/ee") of BoxyHQ.

The [/ee](https://github.com/retracedhq/retraced/tree/main/ee) subfolder is the place for all the **Enterprise** features for this repository.

> _❗ NOTE: This section is copyrighted (unlike the rest of our [repository](https://github.com/retracedhq/retraced)). You are not allowed to use this code without obtaining a proper [license](https://boxyhq.com/pricing) first.❗_
1 change: 1 addition & 0 deletions src/ee/LICENSE
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
The BoxyHQ Enterprise Edition (EE) license (the “EE License”)
3 changes: 3 additions & 0 deletions src/ee/export/Readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# Export Audit-logs in real time using Vector

This feature uses vector to export auditlogs as they get indexed to datadog & other destinations.
31 changes: 31 additions & 0 deletions src/ee/export/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
import axios from "axios";
import config from "../../config";
import { logger } from "../../logger";

export default function sendToWebhook(event: any): void {
if (config.EXPORT_WEBHOOK_URL) {
delete event.raw;
axios
.post(
config.EXPORT_WEBHOOK_URL,
{
message: JSON.stringify(event),
},
{
auth:
config.EXPORT_WEBHOOK_USERNAME && config.EXPORT_WEBHOOK_PASSWORD
? {
username: config.EXPORT_WEBHOOK_USERNAME,
password: config.EXPORT_WEBHOOK_PASSWORD,
}
: undefined,
}
)
.catch(() => {
logger.info(`[VECTOR EXPORT] Failed to send to webhook`);
})
.then(() => {
logger.info(`[VECTOR EXPORT] Sent to webhook`);
});
}
}
39 changes: 39 additions & 0 deletions vector.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
data_dir = "/var/lib/vector/"

# The data source that Vector will collect logs from
[sources.webhook]
type = "http_server" # The protocol to use
address = "0.0.0.0:9000" # The address to bind to
healthcheck = true # Enable built-in health checks
body_size_limit = "1mb" # Maximum size of request body
auth.password = "${EXPORT_WEBHOOK_PASSWORD}"
auth.username = "${EXPORT_WEBHOOK_USERNAME}"

# The transformation(s) to apply to each event
[transforms.add_datadog_info]
type = "remap"
inputs = [ "webhook" ]
source = """
# Set the values of the output object
.ddsource = "${EXPORT_DDSOURCE}"
.ddtags = "${EXPORT_DDTAGS}"
.hostname = "${EXPORT_DATADOG_HOSTNAME}"
.service = "${EXPORT_SERVICE}"
"""

# The destination(s) to send the events to
[sinks.datadog_sink]
type = "datadog_logs"
inputs = [ "add_datadog_info" ]
default_api_key = "${EXPORT_DATADOG_API_KEY}"
compression = "gzip"
region = "${EXPORT_DATADOG_REGION}"
site = "${EXPORT_DATADOG_SITE}"
acknowledgements.enabled = true
healthcheck.enabled = true
request.concurrency = 10
request.rate_limit_duration_secs = 1
request.rate_limit_num = 10
buffer.type = "disk"
# 1GB
buffer.max_size = 1073741952