Skip to content
This repository was archived by the owner on May 27, 2025. It is now read-only.

Commit b24feaa

Browse files
committed
feat(PROTO-945): implement Redis-based rate limiting and update dependencies
- Added RedisRateLimit for distributed rate limiting, allowing connection tracking across multiple instances. - Updated Cargo.toml to include new dependencies for Redis and related packages. - Enhanced README with Redis integration instructions and usage examples. - Modified main.rs to support Redis configuration via command-line arguments. - Updated Cargo.lock with new package versions and dependencies.
1 parent 1460721 commit b24feaa

File tree

5 files changed

+498
-10
lines changed

5 files changed

+498
-10
lines changed

Cargo.lock

Lines changed: 110 additions & 4 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,9 @@ metrics-derive = "0.1"
2323
thiserror = "2.0.11"
2424
serde_json = "1.0.138"
2525
hostname = "0.4.0"
26+
redis = "0.24.0"
27+
redis-test = "0.9.0"
28+
2629

2730
[features]
2831
integration = []

README.md

Lines changed: 28 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ You can build and test the project using [Cargo](https://doc.rust-lang.org/cargo
2222
# Build the project
2323
cargo build
2424
25-
# Run all the tests
25+
# Run all the tests (requires local version of redis to be installed)
2626
cargo test --all-features
2727
```
2828

@@ -35,3 +35,30 @@ You can see a full list of parameters by running:
3535

3636
`docker run ghcr.io/base/flashblocks-websocket-proxy:master --help`
3737

38+
### Redis Integration
39+
40+
The proxy supports distributed rate limiting with Redis. This is useful when running multiple instances of the proxy behind a load balancer, as it allows rate limits to be enforced across all instances.
41+
42+
To enable Redis integration, use the following parameters:
43+
44+
- `--redis-url` - Redis connection URL (e.g., `redis://localhost:6379`)
45+
- `--redis-key-prefix` - Prefix for Redis keys (default: `flashblocks`)
46+
47+
Example:
48+
49+
```bash
50+
docker run ghcr.io/base/flashblocks-websocket-proxy:master \
51+
--upstream-ws wss://your-sequencer-endpoint \
52+
--redis-url redis://redis:6379 \
53+
--global-connections-limit 1000 \
54+
--per-ip-connections-limit 10
55+
```
56+
57+
When Redis is enabled, the following features are available:
58+
59+
- Distributed rate limiting across multiple proxy instances
60+
- Connection tracking persists even if the proxy instance restarts
61+
- More accurate global connection limiting in multi-instance deployments
62+
63+
If the Redis connection fails, the proxy will automatically fall back to in-memory rate limiting.
64+

src/main.rs

Lines changed: 51 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,14 +8,15 @@ mod server;
88
mod subscriber;
99

1010
use crate::metrics::Metrics;
11-
use crate::rate_limit::InMemoryRateLimit;
11+
use crate::rate_limit::{InMemoryRateLimit, RateLimit};
1212
use crate::registry::Registry;
1313
use crate::server::Server;
1414
use crate::subscriber::WebsocketSubscriber;
1515
use axum::http::Uri;
1616
use clap::Parser;
1717
use dotenv::dotenv;
1818
use metrics_exporter_prometheus::PrometheusBuilder;
19+
use rate_limit::RedisRateLimit;
1920
use std::net::SocketAddr;
2021
use std::sync::Arc;
2122
use tokio::signal::unix::{signal, SignalKind};
@@ -96,6 +97,21 @@ struct Args {
9697
/// Maximum backoff allowed for upstream connections
9798
#[arg(long, env, default_value = "20")]
9899
subscriber_max_interval: u64,
100+
101+
#[arg(
102+
long,
103+
env,
104+
help = "Redis URL for distributed rate limiting (e.g., redis://localhost:6379). If not provided, in-memory rate limiting will be used."
105+
)]
106+
redis_url: Option<String>,
107+
108+
#[arg(
109+
long,
110+
env,
111+
default_value = "flashblocks",
112+
help = "Prefix for Redis keys"
113+
)]
114+
redis_key_prefix: String,
99115
}
100116

101117
#[tokio::main]
@@ -176,10 +192,40 @@ async fn main() {
176192

177193
let registry = Registry::new(sender, metrics.clone());
178194

179-
let rate_limiter = Arc::new(InMemoryRateLimit::new(
180-
args.global_connections_limit,
181-
args.per_ip_connections_limit,
182-
));
195+
let rate_limiter = match &args.redis_url {
196+
Some(redis_url) => {
197+
info!(message = "Using Redis rate limiter", redis_url = redis_url);
198+
match RedisRateLimit::new(
199+
redis_url,
200+
args.global_connections_limit,
201+
args.per_ip_connections_limit,
202+
&args.redis_key_prefix,
203+
) {
204+
Ok(limiter) => {
205+
info!(message = "Connected to Redis successfully");
206+
Arc::new(limiter) as Arc<dyn RateLimit>
207+
}
208+
Err(e) => {
209+
error!(
210+
message =
211+
"Failed to connect to Redis, falling back to in-memory rate limiting",
212+
error = e.to_string()
213+
);
214+
Arc::new(InMemoryRateLimit::new(
215+
args.global_connections_limit,
216+
args.per_ip_connections_limit,
217+
)) as Arc<dyn RateLimit>
218+
}
219+
}
220+
}
221+
None => {
222+
info!(message = "Using in-memory rate limiter");
223+
Arc::new(InMemoryRateLimit::new(
224+
args.global_connections_limit,
225+
args.per_ip_connections_limit,
226+
)) as Arc<dyn RateLimit>
227+
}
228+
};
183229

184230
let server = Server::new(
185231
args.listen_addr,

0 commit comments

Comments
 (0)