Skip to content

Commit

Permalink
Revert "add logo"
Browse files Browse the repository at this point in the history
This reverts commit c05e6f0.
  • Loading branch information
daschr committed Mar 19, 2024
1 parent c05e6f0 commit 7d93ef1
Show file tree
Hide file tree
Showing 19 changed files with 1,626 additions and 34 deletions.
20 changes: 20 additions & 0 deletions .github/workflows/rust.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
name: Rust

on:
push:
branches: [ "main" ]
pull_request:
branches: [ "main" ]

env:
CARGO_TERM_COLOR: always

jobs:
build:

runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3
- name: Build
run: cargo build --verbose
14 changes: 14 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# Generated by Cargo
# will have compiled files and executables
debug/
target/

# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
Cargo.lock

# These are backup files generated by rustfmt
**/*.rs.bk

# MSVC Windows builds of rustc generate these, which store debugging information
*.pdb
32 changes: 32 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
[package]
name = "logsqlite"
version = "0.1.0"
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
axum = { version= "0.6.20", features= ["macros"]}
futures = "0.3.28"
serde_json = "1.0.105"
serde = {version = "1.0.183", features = ["derive"]}
hyperlocal = "0.8"
hyper = "0.14.27"
tokio = {version = "1.29.1", features = ["fs", "io-util", "rt-multi-thread", "macros", "time", "sync"] }
chrono = "0.4.26"
bytes = "1"
futures-util = { version = "0.3", default-features = false, features = ["alloc"] }
prost = { version= "0.12" }
http = "0.2"
log = "0.4"
simple_logger = "4.2"
configparser = "3.0"
sqlx = { version = "0.7.3", features = ["runtime-tokio", "macros", "sqlite"]}
bincode = "1.3.3"

[build-dependencies]
prost-build = "0.5"

[profile.release]
lto=true
strip=true
24 changes: 24 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# syntax=docker/dockerfile:1.7-labs

FROM rust:bookworm as prefetch
WORKDIR /src/logsqlite
RUN apt update && apt install -y build-essential libsqlite3-0 libsqlite3-dev protobuf-compiler && apt clean
COPY Cargo.toml /src/logsqlite/
RUN mkdir /src/logsqlite/src
RUN echo 'fn main() {}' >/src/logsqlite/src/main.rs
RUN cargo b --release

FROM prefetch as build
WORKDIR /src/logsqlite
COPY Cargo.toml build.rs /src/logsqlite/
COPY src /src/logsqlite/src
RUN cargo b --release && cp target/*/logsqlite .

FROM debian:bookworm as logsqlite_rootfs
RUN apt update && apt install -y libsqlite3-0 && apt clean
RUN mkdir -p /var/spool/logsqlite /etc/logsqlite /run/docker/plugins
COPY --from=build /src/logsqlite/logsqlite /bin/logsqlite
COPY conf.ini /etc/logsqlite/
VOLUME /var/spool/logsqlite
WORKDIR /bin/
ENTRYPOINT [ "/bin/logsqlite", "/etc/logsqlite/conf.ini" ]
21 changes: 21 additions & 0 deletions LICENSE.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
MIT License

Copyright (c) 2023 David Schramm

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
24 changes: 24 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# logsqlite
Moby/Docker logging driver plugin which uses sqlite3 databases.

Allows **faster** querying of logs (f.e using `docker logs --since/--until`) than the default JSON File logging driver

# Building
* `cargo b --release`

# Installation (systemd)
1. `cp logsqlite /usr/local/bin/`
2. `mkdir /etc/logsqlite && cp conf.ini /etc/logsqlite/`
3. `cp logsqlite.service /etc/systemd/system/ && systemctl daemon-reload`
4. `mkdir /var/spool/logsqlite/`
5. `systemctl enable logsqlite && systemctl start logsqlite`

# Configuration
See `conf.ini`

# Using the driver
- as the default logging driver:
- add `"log-driver": "logsqlite"` in the `daemon.json`
- per container
- `docker run --log-driver logsqlite`
- or in the docker-compose: https://docs.docker.com/compose/compose-file/compose-file-v3/#logging
6 changes: 6 additions & 0 deletions build.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
extern crate prost_build;

fn main() {
println!("cargo:rerun-if-changed=src/entry.proto");
prost_build::compile_protos(&["src/entry.proto"], &["src/"]).unwrap();
}
13 changes: 13 additions & 0 deletions build_plugin.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
#!/bin/bash
set -e
docker build -t logsqlite_rootfs .

id=$(docker create logsqlite_rootfs true)

mkdir -p plugin/rootfs

docker export "$id" | sudo tar -x -C plugin/rootfs

docker rm -vf "$id"

docker plugin create logsqlite ./plugin/
16 changes: 16 additions & 0 deletions conf.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
[general]
# the directory where docker plugins sockets are placed in
# (usually you can leave this to the default path)
# don't forget to also adapt your systemd-service if you change this one
plugins_dir=/run/docker/plugins/

# the directory where logsqlite puts the databases into
databases_dir=/var/spool/logsqlite/

# this database is used for keeping the state of to-be-logged containers
# across restarts of logsqlite
state_database=/etc/logsqlite/state.db

[cleanup]
# in which interval the log cleaner runs
interval=600 # in seconds
34 changes: 0 additions & 34 deletions images/LogSQlite Main Logo.svg

This file was deleted.

17 changes: 17 additions & 0 deletions logsqlite.service
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
[Unit]
Description=Moby/Docker logdriver for logging into sqlite databases

[Service]
Type=simple
User=root
Group=root
Restart=always
#RemainAfterExit=no
#Environment="DEBUG=true"
WorkingDirectory=/var/spool/logsqlite/
ExecStartPre=-rm /var/run/docker/plugins/logsqlite.sock
ExecStart=/usr/local/bin/logsqlite /etc/logsqlite/conf.ini
#StandardOutput=null

[Install]
WantedBy=multi-user.target
27 changes: 27 additions & 0 deletions plugin/config.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
{
"description": "Sqlite Logging Driver",
"documentation": "https://github.com/daschr/logsqlite",
"entrypoint": [
"/bin/logsqlite",
"/etc/logsqlite/conf.ini"
],
"network": {
"type": "host"
},
"interface": {
"types": [
"docker.logdriver/1.0"
],
"socket": "logsqlite.sock"
},
"env": [
{
"name": "DEBUG",
"description": "Log debug messages",
"value": "false",
"settable": [
"value"
]
}
]
}
159 changes: 159 additions & 0 deletions src/cleaner.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,159 @@
use log::{debug, error, info};
use sqlx::{Connection, SqliteConnection};
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::Duration;
use tokio::{sync::RwLock, time};

use crate::config::LogConfig;

#[derive(Clone)]
pub struct LogCleaner {
fifos: Arc<RwLock<HashMap<PathBuf, String>>>,
containers: Arc<RwLock<HashMap<String, LogConfig>>>,
dbs_path: String,
}

impl LogCleaner {
pub fn new(dbs_path: String) -> Self {
LogCleaner {
fifos: Arc::new(RwLock::new(HashMap::new())),
containers: Arc::new(RwLock::new(HashMap::new())),
dbs_path,
}
}

pub async fn add(&self, container_id: &str, fifo: PathBuf, log_conf: LogConfig) {
self.fifos
.write()
.await
.insert(fifo, container_id.to_string());

let mut map = self.containers.write().await;
map.insert(container_id.to_string(), log_conf);
}

pub async fn remove(&self, fifo: &Path) -> Option<LogConfig> {
let container_id: String = match self.fifos.write().await.remove(fifo) {
Some(v) => v,
None => return None,
};

self.containers.write().await.remove(&container_id)
}

async fn get_first_tail_rowid(
con: &mut SqliteConnection,
tail: u64,
) -> Result<u64, sqlx::Error> {
let count = match sqlx::query_as::<sqlx::Sqlite, (i64,)>("SELECT count(*) FROM logs")
.fetch_one(&mut *con)
.await
{
Ok(v) => v.0 as u64,
Err(sqlx::Error::RowNotFound) => {
return Ok(0);
}
Err(e) => {
return Err(e);
}
};

let rowid =
sqlx::query_as::<sqlx::Sqlite, (i64,)>("SELECT ROWID FROM logs LIMIT 1 OFFSET ?1")
.bind(if count > tail {
(count - tail) as i64
} else {
0
})
.fetch_one(&mut *con)
.await?
.0 as u64;

Ok(rowid)
}

async fn cleanup_db(
&self,
log_conf: &LogConfig,
con: &mut SqliteConnection,
) -> Result<(), sqlx::Error> {
match (log_conf.cleanup_age, log_conf.cleanup_max_lines) {
(Some(cleanup_age), Some(max_lines)) => {
let rowid = Self::get_first_tail_rowid(con, max_lines as u64).await?;

let mut max_time = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap();

max_time -= cleanup_age;

debug!(
"cleanup: DELETE FROM logs WHERE ts < {} OR ROWID < {}",
max_time.as_nanos(),
rowid
);
sqlx::query("DELETE FROM logs WHERE ts < ?1 OR ROWID < ?2")
.bind(max_time.as_nanos() as i64)
.bind(rowid as i64)
.execute(con)
.await?;
}
(Some(cleanup_age), None) => {
let mut max_time = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap();

max_time -= cleanup_age;

debug!(
"cleanup: DELETE FROM logs WHERE ts < {}",
max_time.as_nanos()
);
sqlx::query("DELETE FROM logs WHERE ts < ?1")
.bind(max_time.as_nanos() as i64)
.execute(con)
.await?;
}
(None, Some(max_lines)) => {
let rowid = Self::get_first_tail_rowid(con, max_lines as u64).await?;

debug!("cleanup: DELETE FROM logs WHERE ROWID < {}", rowid);
sqlx::query("DELETE FROM logs WHERE ROWID < ?1")
.bind(rowid as i64)
.execute(con)
.await?;
}
(None, None) => (), // never happens
}

Ok(())
}

pub async fn run(&self, cleanup_interval: Duration) -> Result<(), sqlx::Error> {
loop {
info!("starting cleanup");
for (container, log_conf) in self.containers.read().await.iter() {
debug!(
"[cleanup] cleaning up container: {}, max_age: {:?} max_lines: {:?}",
container, log_conf.cleanup_age, log_conf.cleanup_max_lines
);
let db_url = format!("sqlite://{}/{}", self.dbs_path, container);
match SqliteConnection::connect(&db_url).await {
Err(e) => {
error!("[cleanup] failed to open connection: {:?}", e);
}
Ok(mut con) => {
if let Err(e) = self.cleanup_db(&log_conf, &mut con).await {
error!("[cleanup] could not cleanup {}: {:?}", container, e);
}
}
}
}
info!("cleanup done");

time::sleep(cleanup_interval).await;
}
}
}
Loading

0 comments on commit 7d93ef1

Please sign in to comment.