diff --git a/README.md b/README.md index 3fb16b94..547c9af8 100644 --- a/README.md +++ b/README.md @@ -245,6 +245,7 @@ descriptors: - name: (optional) unit: requests_per_unit: + unit_multiplier: shadow_mode: (optional) detailed_metric: (optional) descriptors: (optional block) @@ -262,11 +263,15 @@ effectively whitelisted. Otherwise, nested descriptors allow more complex matchi rate_limit: unit: requests_per_unit: + unit_multiplier: ``` The rate limit block specifies the actual rate limit that will be used when there is a match. Currently the service supports per second, minute, hour, and day limits. More types of limits may be added in the future based on user demand. +The `unit_multiplier` allows for creating custom rate limit durations in combination with `unit`. +This allows for rate limit durations such as 30 seconds or 5 minutes. +A `unit_multiplier` of 0 is invalid and leaving out the field means the duration is equal to the unit (e.g. 1 minute). ### Replaces diff --git a/src/config/config_impl.go b/src/config/config_impl.go index 0c4152a6..865f9e2f 100644 --- a/src/config/config_impl.go +++ b/src/config/config_impl.go @@ -20,7 +20,8 @@ type yamlReplaces struct { type YamlRateLimit struct { RequestsPerUnit uint32 `yaml:"requests_per_unit"` Unit string - Unlimited bool `yaml:"unlimited"` + UnitMultiplier *uint32 `yaml:"unit_multiplier"` + Unlimited bool `yaml:"unlimited"` Name string Replaces []yamlReplaces } @@ -68,23 +69,26 @@ var validKeys = map[string]bool{ "name": true, "replaces": true, "detailed_metric": true, + "unit_multiplier": true, } // Create a new rate limit config entry. // @param requestsPerUnit supplies the requests per unit of time for the entry. // @param unit supplies the unit of time for the entry. +// @param unitMultiplier supplies the multiplier for the unit of time for the entry. // @param rlStats supplies the stats structure associated with the RateLimit // @param unlimited supplies whether the rate limit is unlimited // @return the new config entry. func NewRateLimit(requestsPerUnit uint32, unit pb.RateLimitResponse_RateLimit_Unit, rlStats stats.RateLimitStats, - unlimited bool, shadowMode bool, name string, replaces []string, detailedMetric bool) *RateLimit { - + unlimited bool, shadowMode bool, name string, replaces []string, detailedMetric bool, unitMultiplier uint32, +) *RateLimit { return &RateLimit{ FullKey: rlStats.GetKey(), Stats: rlStats, Limit: &pb.RateLimitResponse_RateLimit{ RequestsPerUnit: requestsPerUnit, Unit: unit, + UnitMultiplier: unitMultiplier, }, Unlimited: unlimited, ShadowMode: shadowMode, @@ -99,8 +103,8 @@ func (this *rateLimitDescriptor) dump() string { ret := "" if this.limit != nil { ret += fmt.Sprintf( - "%s: unit=%s requests_per_unit=%d, shadow_mode: %t\n", this.limit.FullKey, - this.limit.Limit.Unit.String(), this.limit.Limit.RequestsPerUnit, this.limit.ShadowMode) + "%s: unit=%s, unit_multiplier=%d, requests_per_unit=%d, shadow_mode: %t\n", this.limit.FullKey, + this.limit.Limit.Unit.String(), this.limit.Limit.UnitMultiplier, this.limit.Limit.RequestsPerUnit, this.limit.ShadowMode) } for _, descriptor := range this.descriptors { ret += descriptor.dump() @@ -143,8 +147,7 @@ func (this *rateLimitDescriptor) loadDescriptors(config RateLimitConfigToLoad, p if descriptorConfig.RateLimit != nil { unlimited := descriptorConfig.RateLimit.Unlimited - value, present := - pb.RateLimitResponse_RateLimit_Unit_value[strings.ToUpper(descriptorConfig.RateLimit.Unit)] + value, present := pb.RateLimitResponse_RateLimit_Unit_value[strings.ToUpper(descriptorConfig.RateLimit.Unit)] validUnit := present && value != int32(pb.RateLimitResponse_RateLimit_UNKNOWN) if unlimited { @@ -159,6 +162,18 @@ func (this *rateLimitDescriptor) loadDescriptors(config RateLimitConfigToLoad, p fmt.Sprintf("invalid rate limit unit '%s'", descriptorConfig.RateLimit.Unit))) } + var unitMultiplier uint32 + if descriptorConfig.RateLimit.UnitMultiplier == nil { + unitMultiplier = 1 + } else { + unitMultiplier = *descriptorConfig.RateLimit.UnitMultiplier + if unitMultiplier == 0 { + panic(newRateLimitConfigError( + config.Name, + "invalid unit multiplier of 0")) + } + } + replaces := make([]string, len(descriptorConfig.RateLimit.Replaces)) for i, e := range descriptorConfig.RateLimit.Replaces { replaces[i] = e.Name @@ -168,10 +183,12 @@ func (this *rateLimitDescriptor) loadDescriptors(config RateLimitConfigToLoad, p descriptorConfig.RateLimit.RequestsPerUnit, pb.RateLimitResponse_RateLimit_Unit(value), statsManager.NewStats(newParentKey), unlimited, descriptorConfig.ShadowMode, descriptorConfig.RateLimit.Name, replaces, descriptorConfig.DetailedMetric, + unitMultiplier, ) + rateLimitDebugString = fmt.Sprintf( - " ratelimit={requests_per_unit=%d, unit=%s, unlimited=%t, shadow_mode=%t}", rateLimit.Limit.RequestsPerUnit, - rateLimit.Limit.Unit.String(), rateLimit.Unlimited, rateLimit.ShadowMode) + " ratelimit={requests_per_unit=%d, unit=%s, unit_multiplier=%d, unlimited=%t, shadow_mode=%t}", rateLimit.Limit.RequestsPerUnit, + rateLimit.Limit.Unit.String(), unitMultiplier, rateLimit.Unlimited, rateLimit.ShadowMode) for _, replaces := range descriptorConfig.RateLimit.Replaces { if replaces.Name == "" { @@ -277,8 +294,8 @@ func (this *rateLimitConfigImpl) Dump() string { } func (this *rateLimitConfigImpl) GetLimit( - ctx context.Context, domain string, descriptor *pb_struct.RateLimitDescriptor) *RateLimit { - + ctx context.Context, domain string, descriptor *pb_struct.RateLimitDescriptor, +) *RateLimit { logger.Debugf("starting get limit lookup") var rateLimit *RateLimit = nil value := this.domains[domain] @@ -300,6 +317,7 @@ func (this *rateLimitConfigImpl) GetLimit( "", []string{}, false, + 1, ) return rateLimit } @@ -352,7 +370,10 @@ func (this *rateLimitConfigImpl) GetLimit( descriptorsMap = nextDescriptor.descriptors } else { if rateLimit != nil && rateLimit.DetailedMetric { - rateLimit = NewRateLimit(rateLimit.Limit.RequestsPerUnit, rateLimit.Limit.Unit, this.statsManager.NewStats(rateLimit.FullKey), rateLimit.Unlimited, rateLimit.ShadowMode, rateLimit.Name, rateLimit.Replaces, rateLimit.DetailedMetric) + rateLimit = NewRateLimit(rateLimit.Limit.RequestsPerUnit, rateLimit.Limit.Unit, + this.statsManager.NewStats(rateLimit.FullKey), rateLimit.Unlimited, + rateLimit.ShadowMode, rateLimit.Name, rateLimit.Replaces, + rateLimit.DetailedMetric, rateLimit.Limit.UnitMultiplier) } break @@ -417,8 +438,8 @@ func ConfigFileContentToYaml(fileName, content string) *YamlRoot { // @param mergeDomainConfigs defines whether multiple configurations referencing the same domain will be merged or rejected throwing an error. // @return a new config. func NewRateLimitConfigImpl( - configs []RateLimitConfigToLoad, statsManager stats.Manager, mergeDomainConfigs bool) RateLimitConfig { - + configs []RateLimitConfigToLoad, statsManager stats.Manager, mergeDomainConfigs bool, +) RateLimitConfig { ret := &rateLimitConfigImpl{map[string]*rateLimitDomain{}, statsManager, mergeDomainConfigs} for _, config := range configs { ret.loadConfig(config) @@ -430,8 +451,8 @@ func NewRateLimitConfigImpl( type rateLimitConfigLoaderImpl struct{} func (this *rateLimitConfigLoaderImpl) Load( - configs []RateLimitConfigToLoad, statsManager stats.Manager, mergeDomainConfigs bool) RateLimitConfig { - + configs []RateLimitConfigToLoad, statsManager stats.Manager, mergeDomainConfigs bool, +) RateLimitConfig { return NewRateLimitConfigImpl(configs, statsManager, mergeDomainConfigs) } diff --git a/src/limiter/base_limiter.go b/src/limiter/base_limiter.go index b76366cf..69d66960 100644 --- a/src/limiter/base_limiter.go +++ b/src/limiter/base_limiter.go @@ -33,7 +33,8 @@ type LimitInfo struct { } func NewRateLimitInfo(limit *config.RateLimit, limitBeforeIncrease uint32, limitAfterIncrease uint32, - nearLimitThreshold uint32, overLimitThreshold uint32) *LimitInfo { + nearLimitThreshold uint32, overLimitThreshold uint32, +) *LimitInfo { return &LimitInfo{ limit: limit, limitBeforeIncrease: limitBeforeIncrease, limitAfterIncrease: limitAfterIncrease, nearLimitThreshold: nearLimitThreshold, overLimitThreshold: overLimitThreshold, @@ -43,7 +44,8 @@ func NewRateLimitInfo(limit *config.RateLimit, limitBeforeIncrease uint32, limit // Generates cache keys for given rate limit request. Each cache key is represented by a concatenation of // domain, descriptor and current timestamp. func (this *BaseRateLimiter) GenerateCacheKeys(request *pb.RateLimitRequest, - limits []*config.RateLimit, hitsAddend uint32) []CacheKey { + limits []*config.RateLimit, hitsAddend uint32, +) []CacheKey { assert.Assert(len(request.Descriptors) == len(limits)) cacheKeys := make([]CacheKey, len(request.Descriptors)) now := this.timeSource.UnixNow() @@ -79,7 +81,8 @@ func (this *BaseRateLimiter) IsOverLimitThresholdReached(limitInfo *LimitInfo) b // Generates response descriptor status based on cache key, over the limit with local cache, over the limit and // near the limit thresholds. Thresholds are checked in order and are mutually exclusive. func (this *BaseRateLimiter) GetResponseDescriptorStatus(key string, limitInfo *LimitInfo, - isOverLimitWithLocalCache bool, hitsAddend uint32) *pb.RateLimitResponse_DescriptorStatus { + isOverLimitWithLocalCache bool, hitsAddend uint32, +) *pb.RateLimitResponse_DescriptorStatus { if key == "" { return this.generateResponseDescriptorStatus(pb.RateLimitResponse_OK, nil, 0) @@ -113,7 +116,8 @@ func (this *BaseRateLimiter) GetResponseDescriptorStatus(key string, limitInfo * // similar to mongo_1h, mongo_2h, etc. In the hour 1 (0h0m - 0h59m), the cache key is mongo_1h, we start // to get ratelimited in the 50th minute, the ttl of local_cache will be set as 1 hour(0h50m-1h49m). // In the time of 1h1m, since the cache key becomes different (mongo_2h), it won't get ratelimited. - err := this.localCache.Set([]byte(key), []byte{}, int(utils.UnitToDivider(limitInfo.limit.Limit.Unit))) + + err := this.localCache.Set([]byte(key), []byte{}, int(utils.UnitToDividerWithMultiplier(limitInfo.limit.Limit.Unit, limitInfo.limit.Limit.UnitMultiplier))) if err != nil { logger.Errorf("Failing to set local cache key: %s", key) } @@ -140,7 +144,8 @@ func (this *BaseRateLimiter) GetResponseDescriptorStatus(key string, limitInfo * } func NewBaseRateLimit(timeSource utils.TimeSource, jitterRand *rand.Rand, expirationJitterMaxSeconds int64, - localCache *freecache.Cache, nearLimitRatio float32, cacheKeyPrefix string, statsManager stats.Manager) *BaseRateLimiter { + localCache *freecache.Cache, nearLimitRatio float32, cacheKeyPrefix string, statsManager stats.Manager, +) *BaseRateLimiter { return &BaseRateLimiter{ timeSource: timeSource, JitterRand: jitterRand, @@ -194,13 +199,14 @@ func (this *BaseRateLimiter) increaseShadowModeStats(isOverLimitWithLocalCache b } func (this *BaseRateLimiter) generateResponseDescriptorStatus(responseCode pb.RateLimitResponse_Code, - limit *pb.RateLimitResponse_RateLimit, limitRemaining uint32) *pb.RateLimitResponse_DescriptorStatus { + limit *pb.RateLimitResponse_RateLimit, limitRemaining uint32, +) *pb.RateLimitResponse_DescriptorStatus { if limit != nil { return &pb.RateLimitResponse_DescriptorStatus{ Code: responseCode, CurrentLimit: limit, LimitRemaining: limitRemaining, - DurationUntilReset: utils.CalculateReset(&limit.Unit, this.timeSource), + DurationUntilReset: utils.CalculateReset(&limit.Unit, this.timeSource, limit.UnitMultiplier), } } else { return &pb.RateLimitResponse_DescriptorStatus{ diff --git a/src/limiter/cache_key.go b/src/limiter/cache_key.go index 4aeab204..d3778ae4 100644 --- a/src/limiter/cache_key.go +++ b/src/limiter/cache_key.go @@ -46,8 +46,8 @@ func isPerSecondLimit(unit pb.RateLimitResponse_RateLimit_Unit) bool { // @param now supplies the current unix time. // @return CacheKey struct. func (this *CacheKeyGenerator) GenerateCacheKey( - domain string, descriptor *pb_struct.RateLimitDescriptor, limit *config.RateLimit, now int64) CacheKey { - + domain string, descriptor *pb_struct.RateLimitDescriptor, limit *config.RateLimit, now int64, +) CacheKey { if limit == nil { return CacheKey{ Key: "", @@ -70,7 +70,8 @@ func (this *CacheKeyGenerator) GenerateCacheKey( b.WriteByte('_') } - divider := utils.UnitToDivider(limit.Limit.Unit) + divider := utils.UnitToDividerWithMultiplier(limit.Limit.Unit, limit.Limit.UnitMultiplier) + b.WriteString(strconv.FormatInt((now/divider)*divider, 10)) return CacheKey{ diff --git a/src/memcached/cache_impl.go b/src/memcached/cache_impl.go index a79451df..61da4141 100644 --- a/src/memcached/cache_impl.go +++ b/src/memcached/cache_impl.go @@ -64,8 +64,8 @@ var _ limiter.RateLimitCache = (*rateLimitMemcacheImpl)(nil) func (this *rateLimitMemcacheImpl) DoLimit( ctx context.Context, request *pb.RateLimitRequest, - limits []*config.RateLimit) []*pb.RateLimitResponse_DescriptorStatus { - + limits []*config.RateLimit, +) []*pb.RateLimitResponse_DescriptorStatus { logger.Debugf("starting cache lookup") // request.HitsAddend could be 0 (default value) if not specified by the caller in the Ratelimit request. @@ -148,7 +148,8 @@ func (this *rateLimitMemcacheImpl) DoLimit( } func (this *rateLimitMemcacheImpl) increaseAsync(cacheKeys []limiter.CacheKey, isOverLimitWithLocalCache []bool, - limits []*config.RateLimit, hitsAddend uint64) { + limits []*config.RateLimit, hitsAddend uint64, +) { defer this.waitGroup.Done() for i, cacheKey := range cacheKeys { if cacheKey.Key == "" || isOverLimitWithLocalCache[i] { @@ -157,7 +158,7 @@ func (this *rateLimitMemcacheImpl) increaseAsync(cacheKeys []limiter.CacheKey, i _, err := this.client.Increment(cacheKey.Key, hitsAddend) if err == memcache.ErrCacheMiss { - expirationSeconds := utils.UnitToDivider(limits[i].Limit.Unit) + expirationSeconds := utils.UnitToDividerWithMultiplier(limits[i].Limit.Unit, limits[i].Limit.UnitMultiplier) if this.expirationJitterMaxSeconds > 0 { expirationSeconds += this.jitterRand.Int63n(this.expirationJitterMaxSeconds) } @@ -290,7 +291,8 @@ func runAsync(task func()) { } func NewRateLimitCacheImpl(client Client, timeSource utils.TimeSource, jitterRand *rand.Rand, - expirationJitterMaxSeconds int64, localCache *freecache.Cache, statsManager stats.Manager, nearLimitRatio float32, cacheKeyPrefix string) limiter.RateLimitCache { + expirationJitterMaxSeconds int64, localCache *freecache.Cache, statsManager stats.Manager, nearLimitRatio float32, cacheKeyPrefix string, +) limiter.RateLimitCache { return &rateLimitMemcacheImpl{ client: client, timeSource: timeSource, @@ -303,7 +305,8 @@ func NewRateLimitCacheImpl(client Client, timeSource utils.TimeSource, jitterRan } func NewRateLimitCacheImplFromSettings(s settings.Settings, timeSource utils.TimeSource, jitterRand *rand.Rand, - localCache *freecache.Cache, scope gostats.Scope, statsManager stats.Manager) limiter.RateLimitCache { + localCache *freecache.Cache, scope gostats.Scope, statsManager stats.Manager, +) limiter.RateLimitCache { return NewRateLimitCacheImpl( CollectStats(newMemcacheFromSettings(s), scope.Scope("memcache")), timeSource, diff --git a/src/redis/fixed_cache_impl.go b/src/redis/fixed_cache_impl.go index 4ec34b3d..1ecde682 100644 --- a/src/redis/fixed_cache_impl.go +++ b/src/redis/fixed_cache_impl.go @@ -44,8 +44,8 @@ func pipelineAppendtoGet(client Client, pipeline *Pipeline, key string, result * func (this *fixedRateLimitCacheImpl) DoLimit( ctx context.Context, request *pb.RateLimitRequest, - limits []*config.RateLimit) []*pb.RateLimitResponse_DescriptorStatus { - + limits []*config.RateLimit, +) []*pb.RateLimitResponse_DescriptorStatus { logger.Debugf("starting cache lookup") // request.HitsAddend could be 0 (default value) if not specified by the caller in the RateLimit request. @@ -152,7 +152,7 @@ func (this *fixedRateLimitCacheImpl) DoLimit( logger.Debugf("looking up cache key: %s", cacheKey.Key) - expirationSeconds := utils.UnitToDivider(limits[i].Limit.Unit) + expirationSeconds := utils.UnitToDividerWithMultiplier(limits[i].Limit.Unit, limits[i].Limit.UnitMultiplier) if this.baseRateLimiter.ExpirationJitterMaxSeconds > 0 { expirationSeconds += this.baseRateLimiter.JitterRand.Int63n(this.baseRateLimiter.ExpirationJitterMaxSeconds) } @@ -218,7 +218,8 @@ func (this *fixedRateLimitCacheImpl) Flush() {} func NewFixedRateLimitCacheImpl(client Client, perSecondClient Client, timeSource utils.TimeSource, jitterRand *rand.Rand, expirationJitterMaxSeconds int64, localCache *freecache.Cache, nearLimitRatio float32, cacheKeyPrefix string, statsManager stats.Manager, - stopCacheKeyIncrementWhenOverlimit bool) limiter.RateLimitCache { + stopCacheKeyIncrementWhenOverlimit bool, +) limiter.RateLimitCache { return &fixedRateLimitCacheImpl{ client: client, perSecondClient: perSecondClient, diff --git a/src/service/ratelimit.go b/src/service/ratelimit.go index e918d9bb..0fdd4e47 100644 --- a/src/service/ratelimit.go +++ b/src/service/ratelimit.go @@ -138,8 +138,9 @@ func (this *service) constructLimitsToCheck(request *pb.RateLimitRequest, ctx co logger.Debugf("descriptor is unlimited, not passing to the cache") } else { logger.Debugf( - "applying limit: %d requests per %s, shadow_mode: %t", + "applying limit: %d requests per %d %s, shadow_mode: %t", limitsToCheck[i].Limit.RequestsPerUnit, + limitsToCheck[i].Limit.UnitMultiplier, limitsToCheck[i].Limit.Unit.String(), limitsToCheck[i].ShadowMode, ) @@ -177,8 +178,8 @@ func (this *service) constructLimitsToCheck(request *pb.RateLimitRequest, ctx co const MaxUint32 = uint32(1<<32 - 1) func (this *service) shouldRateLimitWorker( - ctx context.Context, request *pb.RateLimitRequest) *pb.RateLimitResponse { - + ctx context.Context, request *pb.RateLimitRequest, +) *pb.RateLimitResponse { checkServiceErr(request.Domain != "", "rate limit domain must not be empty") checkServiceErr(len(request.Descriptors) != 0, "rate limit descriptor list must not be empty") @@ -258,18 +259,18 @@ func (this *service) rateLimitRemainingHeader(descriptor *pb.RateLimitResponse_D } func (this *service) rateLimitResetHeader( - descriptor *pb.RateLimitResponse_DescriptorStatus) *core.HeaderValue { - + descriptor *pb.RateLimitResponse_DescriptorStatus, +) *core.HeaderValue { return &core.HeaderValue{ Key: this.customHeaderResetHeader, - Value: strconv.FormatInt(utils.CalculateReset(&descriptor.CurrentLimit.Unit, this.customHeaderClock).GetSeconds(), 10), + Value: strconv.FormatInt(utils.CalculateReset(&descriptor.CurrentLimit.Unit, this.customHeaderClock, descriptor.CurrentLimit.UnitMultiplier).GetSeconds(), 10), } } func (this *service) ShouldRateLimit( ctx context.Context, - request *pb.RateLimitRequest) (finalResponse *pb.RateLimitResponse, finalError error) { - + request *pb.RateLimitRequest, +) (finalResponse *pb.RateLimitResponse, finalError error) { // Generate trace _, span := tracer.Start(ctx, "ShouldRateLimit Execution", trace.WithAttributes( @@ -316,8 +317,8 @@ func (this *service) GetCurrentConfig() (config.RateLimitConfig, bool) { } func NewService(cache limiter.RateLimitCache, configProvider provider.RateLimitConfigProvider, statsManager stats.Manager, - health *server.HealthChecker, clock utils.TimeSource, shadowMode, forceStart bool, healthyWithAtLeastOneConfigLoad bool) RateLimitServiceServer { - + health *server.HealthChecker, clock utils.TimeSource, shadowMode, forceStart bool, healthyWithAtLeastOneConfigLoad bool, +) RateLimitServiceServer { newService := &service{ configLock: sync.RWMutex{}, configUpdateEvent: configProvider.ConfigUpdateEvent(), diff --git a/src/utils/utilities.go b/src/utils/utilities.go index 48f7f7ca..4c7c45c5 100644 --- a/src/utils/utilities.go +++ b/src/utils/utilities.go @@ -31,8 +31,22 @@ func UnitToDivider(unit pb.RateLimitResponse_RateLimit_Unit) int64 { panic("should not get here") } -func CalculateReset(unit *pb.RateLimitResponse_RateLimit_Unit, timeSource TimeSource) *duration.Duration { - sec := UnitToDivider(*unit) +// Convert a rate limit into a time divider reflecting the unit multiplier. +// @param unit supplies the unit to convert. +// @param unitMultiplier supplies the unit multiplier to scale the unit with. +// @return the scaled divider to use in time computations. +func UnitToDividerWithMultiplier(unit pb.RateLimitResponse_RateLimit_Unit, unitMultiplier uint32) int64 { + // TODO: Should this stay as a safe-guard? + // Already validated in rateLimitDescriptor::loadDescriptors, here redundantly to avoid the risk of multiplying with 0 + if unitMultiplier == 0 { + unitMultiplier = 1 + } + + return UnitToDivider(unit) * int64(unitMultiplier) +} + +func CalculateReset(unit *pb.RateLimitResponse_RateLimit_Unit, timeSource TimeSource, unitMultiplier uint32) *duration.Duration { + sec := UnitToDividerWithMultiplier(*unit, unitMultiplier) now := timeSource.UnixNow() return &duration.Duration{Seconds: sec - now%sec} } diff --git a/test/config/basic_config.yaml b/test/config/basic_config.yaml index 1ce7c9af..68129e89 100644 --- a/test/config/basic_config.yaml +++ b/test/config/basic_config.yaml @@ -68,3 +68,4 @@ descriptors: rate_limit: unit: minute requests_per_unit: 70 + unit_multiplier: 5 diff --git a/test/config/config_test.go b/test/config/config_test.go index bceaa2e6..54610eef 100644 --- a/test/config/config_test.go +++ b/test/config/config_test.go @@ -947,3 +947,42 @@ func TestDetailedMetric(t *testing.T) { }) } } + +func TestUnitMultiplier(t *testing.T) { + assert := assert.New(t) + stats := stats.NewStore(stats.NewNullSink(), false) + + rlConfig := config.NewRateLimitConfigImpl(loadFile("unit_multiplier.yaml"), mockstats.NewMockStatManager(stats), false) + rlConfig.Dump() + + rl := rlConfig.GetLimit( + context.TODO(), "test-domain", + &pb_struct.RateLimitDescriptor{ + Entries: []*pb_struct.RateLimitDescriptor_Entry{{Key: "key1"}}, + }) + + assert.EqualValues(20, rl.Limit.RequestsPerUnit) + assert.Equal(pb.RateLimitResponse_RateLimit_MINUTE, rl.Limit.Unit) + assert.EqualValues(5, rl.Limit.UnitMultiplier) + + rl = rlConfig.GetLimit( + context.TODO(), "test-domain", + &pb_struct.RateLimitDescriptor{ + Entries: []*pb_struct.RateLimitDescriptor_Entry{{Key: "key2"}}, + }) + + assert.EqualValues(25, rl.Limit.RequestsPerUnit) + assert.Equal(pb.RateLimitResponse_RateLimit_MINUTE, rl.Limit.Unit) + assert.EqualValues(1, rl.Limit.UnitMultiplier) +} + +func TestZeroUnitMultiplier(t *testing.T) { + expectConfigPanic( + t, + func() { + config.NewRateLimitConfigImpl( + loadFile("zero_unit_multiplier.yaml"), + mockstats.NewMockStatManager(stats.NewStore(stats.NewNullSink(), false)), false) + }, + "zero_unit_multiplier.yaml: invalid unit multiplier of 0") +} diff --git a/test/config/unit_multiplier.yaml b/test/config/unit_multiplier.yaml new file mode 100644 index 00000000..983199a2 --- /dev/null +++ b/test/config/unit_multiplier.yaml @@ -0,0 +1,14 @@ +domain: test-domain +descriptors: + # some unit_multiplier + - key: key1 + rate_limit: + unit: minute + requests_per_unit: 20 + unit_multiplier: 5 + + # no unit_multiplier + - key: key2 + rate_limit: + unit: minute + requests_per_unit: 25 diff --git a/test/config/zero_unit_multiplier.yaml b/test/config/zero_unit_multiplier.yaml new file mode 100644 index 00000000..091ca3f2 --- /dev/null +++ b/test/config/zero_unit_multiplier.yaml @@ -0,0 +1,8 @@ +domain: test-domain +descriptors: + # 0 unit_multiplier + - key: key1 + rate_limit: + unit: second + requests_per_unit: 5 + unit_multiplier: 0 diff --git a/test/limiter/base_limiter_test.go b/test/limiter/base_limiter_test.go index b3babcee..43164e43 100644 --- a/test/limiter/base_limiter_test.go +++ b/test/limiter/base_limiter_test.go @@ -29,7 +29,7 @@ func TestGenerateCacheKeys(t *testing.T) { timeSource.EXPECT().UnixNow().Return(int64(1234)) baseRateLimit := limiter.NewBaseRateLimit(timeSource, rand.New(jitterSource), 3600, nil, 0.8, "", sm) request := common.NewRateLimitRequest("domain", [][][2]string{{{"key", "value"}}}, 1) - limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false)} + limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false, 1)} assert.Equal(uint64(0), limits[0].Stats.TotalHits.Value()) cacheKeys := baseRateLimit.GenerateCacheKeys(request, limits, 1) assert.Equal(1, len(cacheKeys)) @@ -48,7 +48,7 @@ func TestGenerateCacheKeysPrefix(t *testing.T) { timeSource.EXPECT().UnixNow().Return(int64(1234)) baseRateLimit := limiter.NewBaseRateLimit(timeSource, rand.New(jitterSource), 3600, nil, 0.8, "prefix:", sm) request := common.NewRateLimitRequest("domain", [][][2]string{{{"key", "value"}}}, 1) - limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false)} + limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false, 1)} assert.Equal(uint64(0), limits[0].Stats.TotalHits.Value()) cacheKeys := baseRateLimit.GenerateCacheKeys(request, limits, 1) assert.Equal(1, len(cacheKeys)) @@ -56,6 +56,24 @@ func TestGenerateCacheKeysPrefix(t *testing.T) { assert.Equal(uint64(1), limits[0].Stats.TotalHits.Value()) } +func TestGenerateCacheKeysWithUnitModifier(t *testing.T) { + assert := assert.New(t) + controller := gomock.NewController(t) + defer controller.Finish() + timeSource := mock_utils.NewMockTimeSource(controller) + statsStore := stats.NewStore(stats.NewNullSink(), false) + sm := mockstats.NewMockStatManager(statsStore) + timeSource.EXPECT().UnixNow().Return(int64(1234)) + baseRateLimit := limiter.NewBaseRateLimit(timeSource, nil, 3600, nil, 0.8, "", sm) + request := common.NewRateLimitRequest("domain", [][][2]string{{{"key", "value"}}}, 1) + limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false, 30)} + assert.Equal(uint64(0), limits[0].Stats.TotalHits.Value()) + cacheKeys := baseRateLimit.GenerateCacheKeys(request, limits, 1) + assert.Equal(1, len(cacheKeys)) + assert.Equal("domain_key_value_1230", cacheKeys[0].Key) + assert.Equal(uint64(1), limits[0].Stats.TotalHits.Value()) +} + func TestOverLimitWithLocalCache(t *testing.T) { assert := assert.New(t) controller := gomock.NewController(t) @@ -102,7 +120,7 @@ func TestGetResponseStatusOverLimitWithLocalCache(t *testing.T) { statsStore := stats.NewStore(stats.NewNullSink(), false) sm := mockstats.NewMockStatManager(statsStore) baseRateLimit := limiter.NewBaseRateLimit(timeSource, nil, 3600, nil, 0.8, "", sm) - limits := []*config.RateLimit{config.NewRateLimit(5, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false)} + limits := []*config.RateLimit{config.NewRateLimit(5, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false, 1)} limitInfo := limiter.NewRateLimitInfo(limits[0], 2, 6, 4, 5) // As `isOverLimitWithLocalCache` is passed as `true`, immediate response is returned with no checks of the limits. responseStatus := baseRateLimit.GetResponseDescriptorStatus("key", limitInfo, true, 2) @@ -125,7 +143,7 @@ func TestGetResponseStatusOverLimitWithLocalCacheShadowMode(t *testing.T) { sm := mockstats.NewMockStatManager(statsStore) baseRateLimit := limiter.NewBaseRateLimit(timeSource, nil, 3600, nil, 0.8, "", sm) // This limit is in ShadowMode - limits := []*config.RateLimit{config.NewRateLimit(5, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, true, "", nil, false)} + limits := []*config.RateLimit{config.NewRateLimit(5, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, true, "", nil, false, 1)} limitInfo := limiter.NewRateLimitInfo(limits[0], 2, 6, 4, 5) // As `isOverLimitWithLocalCache` is passed as `true`, immediate response is returned with no checks of the limits. responseStatus := baseRateLimit.GetResponseDescriptorStatus("key", limitInfo, true, 2) @@ -149,7 +167,7 @@ func TestGetResponseStatusOverLimit(t *testing.T) { localCache := freecache.NewCache(100) sm := mockstats.NewMockStatManager(statsStore) baseRateLimit := limiter.NewBaseRateLimit(timeSource, nil, 3600, localCache, 0.8, "", sm) - limits := []*config.RateLimit{config.NewRateLimit(5, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false)} + limits := []*config.RateLimit{config.NewRateLimit(5, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false, 1)} limitInfo := limiter.NewRateLimitInfo(limits[0], 2, 7, 4, 5) responseStatus := baseRateLimit.GetResponseDescriptorStatus("key", limitInfo, false, 1) assert.Equal(pb.RateLimitResponse_OVER_LIMIT, responseStatus.GetCode()) @@ -175,7 +193,7 @@ func TestGetResponseStatusOverLimitShadowMode(t *testing.T) { sm := mockstats.NewMockStatManager(statsStore) baseRateLimit := limiter.NewBaseRateLimit(timeSource, nil, 3600, localCache, 0.8, "", sm) // Key is in shadow_mode: true - limits := []*config.RateLimit{config.NewRateLimit(5, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, true, "", nil, false)} + limits := []*config.RateLimit{config.NewRateLimit(5, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, true, "", nil, false, 1)} limitInfo := limiter.NewRateLimitInfo(limits[0], 2, 7, 4, 5) responseStatus := baseRateLimit.GetResponseDescriptorStatus("key", limitInfo, false, 1) assert.Equal(pb.RateLimitResponse_OK, responseStatus.GetCode()) @@ -197,7 +215,7 @@ func TestGetResponseStatusBelowLimit(t *testing.T) { statsStore := stats.NewStore(stats.NewNullSink(), false) sm := mockstats.NewMockStatManager(statsStore) baseRateLimit := limiter.NewBaseRateLimit(timeSource, nil, 3600, nil, 0.8, "", sm) - limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false)} + limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false, 1)} limitInfo := limiter.NewRateLimitInfo(limits[0], 2, 6, 9, 10) responseStatus := baseRateLimit.GetResponseDescriptorStatus("key", limitInfo, false, 1) assert.Equal(pb.RateLimitResponse_OK, responseStatus.GetCode()) @@ -218,7 +236,7 @@ func TestGetResponseStatusBelowLimitShadowMode(t *testing.T) { statsStore := stats.NewStore(stats.NewNullSink(), false) sm := mockstats.NewMockStatManager(statsStore) baseRateLimit := limiter.NewBaseRateLimit(timeSource, nil, 3600, nil, 0.8, "", sm) - limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, true, "", nil, false)} + limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, true, "", nil, false, 1)} limitInfo := limiter.NewRateLimitInfo(limits[0], 2, 6, 9, 10) responseStatus := baseRateLimit.GetResponseDescriptorStatus("key", limitInfo, false, 1) assert.Equal(pb.RateLimitResponse_OK, responseStatus.GetCode()) diff --git a/test/limiter/cache_key_test.go b/test/limiter/cache_key_test.go new file mode 100644 index 00000000..56eb523b --- /dev/null +++ b/test/limiter/cache_key_test.go @@ -0,0 +1,98 @@ +package limiter + +import ( + "strconv" + "strings" + "testing" + "time" + + pb "github.com/envoyproxy/go-control-plane/envoy/service/ratelimit/v3" + "github.com/golang/mock/gomock" + stats "github.com/lyft/gostats" + "github.com/stretchr/testify/assert" + + "github.com/envoyproxy/ratelimit/src/config" + "github.com/envoyproxy/ratelimit/src/limiter" + "github.com/envoyproxy/ratelimit/test/common" + mockstats "github.com/envoyproxy/ratelimit/test/mocks/stats" +) + +func TestCacheKeyGenerator(t *testing.T) { + cacheKeyGenerator := limiter.NewCacheKeyGenerator("") + assert := assert.New(t) + controller := gomock.NewController(t) + defer controller.Finish() + + statsStore := stats.NewStore(stats.NewNullSink(), false) + sm := mockstats.NewMockStatManager(statsStore) + request := common.NewRateLimitRequest("domain", [][][2]string{{{"key", "value"}}}, 1) + timestamp := time.Date(2024, 5, 4, 12, 30, 15, 30, time.UTC) + + limit := config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false, 1) + cacheKey := cacheKeyGenerator.GenerateCacheKey("domain", request.Descriptors[0], limit, timestamp.Unix()) + assert.Equal("domain_key_value_1714825815", cacheKey.Key) // Rounded down to the nearest seconds (2024-05-04 12:30:15) + assert.True(cacheKey.PerSecond) + + limit = config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, sm.NewStats("key_value"), false, false, "", nil, false, 1) + cacheKey = cacheKeyGenerator.GenerateCacheKey("domain", request.Descriptors[0], limit, timestamp.Unix()) + assert.Equal("domain_key_value_1714825800", cacheKey.Key) // Rounded down to the nearest minute (2024-05-04 12:30:00) + assert.False(cacheKey.PerSecond) + + limit = config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_HOUR, sm.NewStats("key_value"), false, false, "", nil, false, 1) + cacheKey = cacheKeyGenerator.GenerateCacheKey("domain", request.Descriptors[0], limit, timestamp.Unix()) + assert.Equal("domain_key_value_1714824000", cacheKey.Key) // Rounded down to the nearest hour (2024-05-04 12:00:00) + assert.False(cacheKey.PerSecond) + + timestamp = time.Date(2024, 5, 4, 12, 59, 59, 30, time.UTC) + limit = config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_HOUR, sm.NewStats("key_value"), false, false, "", nil, false, 1) + cacheKey = cacheKeyGenerator.GenerateCacheKey("domain", request.Descriptors[0], limit, timestamp.Unix()) + assert.Equal("domain_key_value_1714824000", cacheKey.Key) // Also rounded down to 2024-05-04 12:00:00 + assert.False(cacheKey.PerSecond) +} + +func TestCacheKeyGeneratorWithUnitMultiplier(t *testing.T) { + cacheKeyGenerator := limiter.NewCacheKeyGenerator("") + assert := assert.New(t) + controller := gomock.NewController(t) + defer controller.Finish() + + statsStore := stats.NewStore(stats.NewNullSink(), false) + sm := mockstats.NewMockStatManager(statsStore) + request := common.NewRateLimitRequest("domain", [][][2]string{{{"key", "value"}}}, 1) + + // Rounding down to nearest 5 minutes (creating 5-minute long buckets) + limit := config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, sm.NewStats("key_value"), false, false, "", nil, false, 5) + testsRoundingDownToNearestFiveMinutes := []struct { + input time.Time + expectedTimestamp time.Time + }{ + {time.Date(2024, 5, 4, 12, 0, 15, 30, time.UTC), time.Date(2024, 5, 4, 12, 0, 0, 0, time.UTC)}, + {time.Date(2024, 5, 4, 12, 1, 15, 30, time.UTC), time.Date(2024, 5, 4, 12, 0, 0, 0, time.UTC)}, + {time.Date(2024, 5, 4, 12, 4, 15, 30, time.UTC), time.Date(2024, 5, 4, 12, 0, 0, 0, time.UTC)}, + {time.Date(2024, 5, 4, 12, 5, 15, 30, time.UTC), time.Date(2024, 5, 4, 12, 5, 0, 0, time.UTC)}, + } + + for _, testCase := range testsRoundingDownToNearestFiveMinutes { + cacheKey := cacheKeyGenerator.GenerateCacheKey("domain", request.Descriptors[0], limit, testCase.input.Unix()) + unixTime, _ := strconv.ParseInt(strings.Replace(cacheKey.Key, "domain_key_value_", "", 1), 10, 64) + assert.Equal(testCase.expectedTimestamp, time.Unix(unixTime, 0).UTC()) + } + + // Rounding down to nearest 2 hours (creating 2-hour long buckets) + limit = config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_HOUR, sm.NewStats("key_value"), false, false, "", nil, false, 2) + testsRoundingDownToNearestThreeHours := []struct { + input time.Time + expectedTimestamp time.Time + }{ + {time.Date(2024, 5, 4, 12, 0, 15, 30, time.UTC), time.Date(2024, 5, 4, 12, 0, 0, 0, time.UTC)}, + {time.Date(2024, 5, 4, 13, 0, 15, 30, time.UTC), time.Date(2024, 5, 4, 12, 0, 0, 0, time.UTC)}, + {time.Date(2024, 5, 4, 14, 4, 15, 30, time.UTC), time.Date(2024, 5, 4, 14, 0, 0, 0, time.UTC)}, + {time.Date(2024, 5, 4, 15, 5, 15, 30, time.UTC), time.Date(2024, 5, 4, 14, 0, 0, 0, time.UTC)}, + } + + for _, testCase := range testsRoundingDownToNearestThreeHours { + cacheKey := cacheKeyGenerator.GenerateCacheKey("domain", request.Descriptors[0], limit, testCase.input.Unix()) + unixTime, _ := strconv.ParseInt(strings.Replace(cacheKey.Key, "domain_key_value_", "", 1), 10, 64) + assert.Equal(testCase.expectedTimestamp, time.Unix(unixTime, 0).UTC()) + } +} diff --git a/test/memcached/cache_impl_test.go b/test/memcached/cache_impl_test.go index 14022ecb..8c931c98 100644 --- a/test/memcached/cache_impl_test.go +++ b/test/memcached/cache_impl_test.go @@ -53,10 +53,10 @@ func TestMemcached(t *testing.T) { client.EXPECT().Increment("domain_key_value_1234", uint64(1)).Return(uint64(5), nil) request := common.NewRateLimitRequest("domain", [][][2]string{{{"key", "value"}}}, 1) - limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false)} + limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false, 1)} assert.Equal( - []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 5, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}}, + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 5, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(1), limits[0].Stats.TotalHits.Value()) assert.Equal(uint64(0), limits[0].Stats.OverLimit.Value()) @@ -77,12 +77,12 @@ func TestMemcached(t *testing.T) { }, 1) limits = []*config.RateLimit{ nil, - config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, sm.NewStats("key2_value2_subkey2_subvalue2"), false, false, "", nil, false), + config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, sm.NewStats("key2_value2_subkey2_subvalue2"), false, false, "", nil, false, 1), } assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ {Code: pb.RateLimitResponse_OK, CurrentLimit: nil, LimitRemaining: 0}, - {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[1].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[1].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[1].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[1].Limit.Unit, timeSource, limits[1].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(1), limits[1].Stats.TotalHits.Value()) @@ -111,13 +111,13 @@ func TestMemcached(t *testing.T) { {{"key3", "value3"}, {"subkey3", "subvalue3"}}, }, 1) limits = []*config.RateLimit{ - config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_HOUR, sm.NewStats("key3_value3"), false, false, "", nil, false), - config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_DAY, sm.NewStats("key3_value3_subkey3_subvalue3"), false, false, "", nil, false), + config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_HOUR, sm.NewStats("key3_value3"), false, false, "", nil, false, 1), + config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_DAY, sm.NewStats("key3_value3_subkey3_subvalue3"), false, false, "", nil, false, 1), } assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, - {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[1].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[1].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, + {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[1].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[1].Limit.Unit, timeSource, limits[1].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(1), limits[0].Stats.TotalHits.Value()) @@ -150,10 +150,10 @@ func TestMemcachedGetError(t *testing.T) { client.EXPECT().Increment("domain_key_value_1234", uint64(1)).Return(uint64(5), nil) request := common.NewRateLimitRequest("domain", [][][2]string{{{"key", "value"}}}, 1) - limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false)} + limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false, 1)} assert.Equal( - []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 9, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}}, + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 9, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(1), limits[0].Stats.TotalHits.Value()) assert.Equal(uint64(0), limits[0].Stats.OverLimit.Value()) @@ -168,10 +168,10 @@ func TestMemcachedGetError(t *testing.T) { client.EXPECT().Increment("domain_key_value1_1234", uint64(1)).Return(uint64(5), nil) request = common.NewRateLimitRequest("domain", [][][2]string{{{"key", "value1"}}}, 1) - limits = []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value1"), false, false, "", nil, false)} + limits = []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value1"), false, false, "", nil, false, 1)} assert.Equal( - []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 9, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}}, + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 9, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(1), limits[0].Stats.TotalHits.Value()) assert.Equal(uint64(0), limits[0].Stats.OverLimit.Value()) @@ -183,7 +183,8 @@ func TestMemcachedGetError(t *testing.T) { func testLocalCacheStats(localCacheStats stats.StatGenerator, statsStore stats.Store, sink *common.TestStatSink, expectedHitCount int, expectedMissCount int, expectedLookUpCount int, expectedExpiredCount int, - expectedEntryCount int) func(*testing.T) { + expectedEntryCount int, +) func(*testing.T) { return func(t *testing.T) { localCacheStats.GenerateStats() statsStore.Flush() @@ -241,12 +242,12 @@ func TestOverLimitWithLocalCache(t *testing.T) { request := common.NewRateLimitRequest("domain", [][][2]string{{{"key4", "value4"}}}, 1) limits := []*config.RateLimit{ - config.NewRateLimit(15, pb.RateLimitResponse_RateLimit_HOUR, sm.NewStats("key4_value4"), false, false, "", nil, false), + config.NewRateLimit(15, pb.RateLimitResponse_RateLimit_HOUR, sm.NewStats("key4_value4"), false, false, "", nil, false, 1), } assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 4, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 4, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(1), limits[0].Stats.TotalHits.Value()) @@ -267,7 +268,7 @@ func TestOverLimitWithLocalCache(t *testing.T) { assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 2, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 2, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(2), limits[0].Stats.TotalHits.Value()) @@ -288,7 +289,7 @@ func TestOverLimitWithLocalCache(t *testing.T) { assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(3), limits[0].Stats.TotalHits.Value()) @@ -306,7 +307,7 @@ func TestOverLimitWithLocalCache(t *testing.T) { client.EXPECT().Increment("domain_key4_value4_997200", uint64(1)).Times(0) assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(4), limits[0].Stats.TotalHits.Value()) @@ -342,12 +343,12 @@ func TestNearLimit(t *testing.T) { request := common.NewRateLimitRequest("domain", [][][2]string{{{"key4", "value4"}}}, 1) limits := []*config.RateLimit{ - config.NewRateLimit(15, pb.RateLimitResponse_RateLimit_HOUR, sm.NewStats("key4_value4"), false, false, "", nil, false), + config.NewRateLimit(15, pb.RateLimitResponse_RateLimit_HOUR, sm.NewStats("key4_value4"), false, false, "", nil, false, 1), } assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 4, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 4, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(1), limits[0].Stats.TotalHits.Value()) @@ -364,7 +365,7 @@ func TestNearLimit(t *testing.T) { assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 2, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 2, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(2), limits[0].Stats.TotalHits.Value()) @@ -382,7 +383,7 @@ func TestNearLimit(t *testing.T) { assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(3), limits[0].Stats.TotalHits.Value()) @@ -399,10 +400,10 @@ func TestNearLimit(t *testing.T) { client.EXPECT().Increment("domain_key5_value5_1234", uint64(3)).Return(uint64(5), nil) request = common.NewRateLimitRequest("domain", [][][2]string{{{"key5", "value5"}}}, 3) - limits = []*config.RateLimit{config.NewRateLimit(20, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key5_value5"), false, false, "", nil, false)} + limits = []*config.RateLimit{config.NewRateLimit(20, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key5_value5"), false, false, "", nil, false, 1)} assert.Equal( - []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 15, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}}, + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 15, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(3), limits[0].Stats.TotalHits.Value()) assert.Equal(uint64(0), limits[0].Stats.OverLimit.Value()) @@ -417,10 +418,10 @@ func TestNearLimit(t *testing.T) { client.EXPECT().Increment("domain_key6_value6_1234", uint64(2)).Return(uint64(7), nil) request = common.NewRateLimitRequest("domain", [][][2]string{{{"key6", "value6"}}}, 2) - limits = []*config.RateLimit{config.NewRateLimit(8, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key6_value6"), false, false, "", nil, false)} + limits = []*config.RateLimit{config.NewRateLimit(8, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key6_value6"), false, false, "", nil, false, 1)} assert.Equal( - []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 1, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}}, + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 1, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(2), limits[0].Stats.TotalHits.Value()) assert.Equal(uint64(0), limits[0].Stats.OverLimit.Value()) @@ -435,10 +436,10 @@ func TestNearLimit(t *testing.T) { client.EXPECT().Increment("domain_key7_value7_1234", uint64(3)).Return(uint64(19), nil) request = common.NewRateLimitRequest("domain", [][][2]string{{{"key7", "value7"}}}, 3) - limits = []*config.RateLimit{config.NewRateLimit(20, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key7_value7"), false, false, "", nil, false)} + limits = []*config.RateLimit{config.NewRateLimit(20, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key7_value7"), false, false, "", nil, false, 1)} assert.Equal( - []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 1, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}}, + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 1, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(3), limits[0].Stats.TotalHits.Value()) assert.Equal(uint64(0), limits[0].Stats.OverLimit.Value()) @@ -453,10 +454,10 @@ func TestNearLimit(t *testing.T) { client.EXPECT().Increment("domain_key8_value8_1234", uint64(3)).Return(uint64(22), nil) request = common.NewRateLimitRequest("domain", [][][2]string{{{"key8", "value8"}}}, 3) - limits = []*config.RateLimit{config.NewRateLimit(20, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key8_value8"), false, false, "", nil, false)} + limits = []*config.RateLimit{config.NewRateLimit(20, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key8_value8"), false, false, "", nil, false, 1)} assert.Equal( - []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}}, + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(3), limits[0].Stats.TotalHits.Value()) assert.Equal(uint64(2), limits[0].Stats.OverLimit.Value()) @@ -471,10 +472,10 @@ func TestNearLimit(t *testing.T) { client.EXPECT().Increment("domain_key9_value9_1234", uint64(7)).Return(uint64(22), nil) request = common.NewRateLimitRequest("domain", [][][2]string{{{"key9", "value9"}}}, 7) - limits = []*config.RateLimit{config.NewRateLimit(20, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key9_value9"), false, false, "", nil, false)} + limits = []*config.RateLimit{config.NewRateLimit(20, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key9_value9"), false, false, "", nil, false, 1)} assert.Equal( - []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}}, + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(7), limits[0].Stats.TotalHits.Value()) assert.Equal(uint64(2), limits[0].Stats.OverLimit.Value()) @@ -489,10 +490,10 @@ func TestNearLimit(t *testing.T) { client.EXPECT().Increment("domain_key10_value10_1234", uint64(3)).Return(uint64(30), nil) request = common.NewRateLimitRequest("domain", [][][2]string{{{"key10", "value10"}}}, 3) - limits = []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key10_value10"), false, false, "", nil, false)} + limits = []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key10_value10"), false, false, "", nil, false, 1)} assert.Equal( - []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}}, + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(3), limits[0].Stats.TotalHits.Value()) assert.Equal(uint64(3), limits[0].Stats.OverLimit.Value()) @@ -533,10 +534,10 @@ func TestMemcacheWithJitter(t *testing.T) { ).Return(nil) request := common.NewRateLimitRequest("domain", [][][2]string{{{"key", "value"}}}, 1) - limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false)} + limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false, 1)} assert.Equal( - []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 9, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}}, + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 9, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(1), limits[0].Stats.TotalHits.Value()) assert.Equal(uint64(0), limits[0].Stats.OverLimit.Value()) @@ -576,10 +577,10 @@ func TestMemcacheAdd(t *testing.T) { uint64(2), nil) request := common.NewRateLimitRequest("domain", [][][2]string{{{"key", "value"}}}, 1) - limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false)} + limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false, 1)} assert.Equal( - []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 9, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}}, + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 9, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(1), limits[0].Stats.TotalHits.Value()) assert.Equal(uint64(0), limits[0].Stats.OverLimit.Value()) @@ -600,10 +601,10 @@ func TestMemcacheAdd(t *testing.T) { ).Return(nil) request = common.NewRateLimitRequest("domain", [][][2]string{{{"key2", "value2"}}}, 1) - limits = []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, sm.NewStats("key2_value2"), false, false, "", nil, false)} + limits = []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, sm.NewStats("key2_value2"), false, false, "", nil, false, 1)} assert.Equal( - []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 9, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}}, + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 9, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(1), limits[0].Stats.TotalHits.Value()) assert.Equal(uint64(0), limits[0].Stats.OverLimit.Value()) @@ -673,7 +674,7 @@ func TestMemcachedTracer(t *testing.T) { client.EXPECT().Increment("domain_key_value_1234", uint64(1)).Return(uint64(5), nil) request := common.NewRateLimitRequest("domain", [][][2]string{{{"key", "value"}}}, 1) - limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false)} + limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false, 1)} cache.DoLimit(context.Background(), request, limits) @@ -694,3 +695,45 @@ func getMultiResult(vals map[string]int) map[string]*memcache.Item { } return result } + +func TestMemcacheWithUnitMultiplier(t *testing.T) { + assert := assert.New(t) + controller := gomock.NewController(t) + defer controller.Finish() + + timeSource := mock_utils.NewMockTimeSource(controller) + client := mock_memcached.NewMockClient(controller) + statsStore := stats.NewStore(stats.NewNullSink(), false) + sm := mockstats.NewMockStatManager(statsStore) + cache := memcached.NewRateLimitCacheImpl(client, timeSource, nil, 0, nil, sm, 0.8, "") + + timeSource.EXPECT().UnixNow().Return(int64(1234)).MaxTimes(3) + + // Key is not found in memcache + client.EXPECT().GetMulti([]string{"domain_key_value_1230"}).Return(nil, nil) + // First increment attempt will fail + client.EXPECT().Increment("domain_key_value_1230", uint64(1)).Return( + uint64(0), memcache.ErrCacheMiss) + // Add succeeds + client.EXPECT().Add( + &memcache.Item{ + Key: "domain_key_value_1230", + Value: []byte(strconv.FormatUint(1, 10)), + // 30 seconds due to the unit multiplier + Expiration: int32(30), + }, + ).Return(nil) + + request := common.NewRateLimitRequest("domain", [][][2]string{{{"key", "value"}}}, 1) + limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false, 30)} + + assert.Equal( + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 9, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, + cache.DoLimit(context.Background(), request, limits)) + assert.Equal(uint64(1), limits[0].Stats.TotalHits.Value()) + assert.Equal(uint64(0), limits[0].Stats.OverLimit.Value()) + assert.Equal(uint64(0), limits[0].Stats.NearLimit.Value()) + assert.Equal(uint64(1), limits[0].Stats.WithinLimit.Value()) + + cache.Flush() +} diff --git a/test/redis/bench_test.go b/test/redis/bench_test.go index b0b2a215..a7d45e18 100644 --- a/test/redis/bench_test.go +++ b/test/redis/bench_test.go @@ -49,7 +49,7 @@ func BenchmarkParallelDoLimit(b *testing.B) { cache := redis.NewFixedRateLimitCacheImpl(client, nil, utils.NewTimeSourceImpl(), rand.New(utils.NewLockedSource(time.Now().Unix())), 10, nil, 0.8, "", sm, true) request := common.NewRateLimitRequest("domain", [][][2]string{{{"key", "value"}}}, 1) - limits := []*config.RateLimit{config.NewRateLimit(1000000000, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false)} + limits := []*config.RateLimit{config.NewRateLimit(1000000000, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false, 1)} // wait for the pool to fill up for { diff --git a/test/redis/fixed_cache_impl_test.go b/test/redis/fixed_cache_impl_test.go index 8c7f3c47..fe418ca9 100644 --- a/test/redis/fixed_cache_impl_test.go +++ b/test/redis/fixed_cache_impl_test.go @@ -69,10 +69,10 @@ func testRedis(usePerSecondRedis bool) func(*testing.T) { clientUsed.EXPECT().PipeDo(gomock.Any()).Return(nil) request := common.NewRateLimitRequest("domain", [][][2]string{{{"key", "value"}}}, 1) - limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false)} + limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false, 1)} assert.Equal( - []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 5, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}}, + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 5, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(1), limits[0].Stats.TotalHits.Value()) assert.Equal(uint64(0), limits[0].Stats.OverLimit.Value()) @@ -94,12 +94,12 @@ func testRedis(usePerSecondRedis bool) func(*testing.T) { }, 1) limits = []*config.RateLimit{ nil, - config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, sm.NewStats("key2_value2_subkey2_subvalue2"), false, false, "", nil, false), + config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, sm.NewStats("key2_value2_subkey2_subvalue2"), false, false, "", nil, false, 1), } assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ {Code: pb.RateLimitResponse_OK, CurrentLimit: nil, LimitRemaining: 0}, - {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[1].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[1].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[1].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[1].Limit.Unit, timeSource, limits[1].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(1), limits[1].Stats.TotalHits.Value()) @@ -124,13 +124,13 @@ func testRedis(usePerSecondRedis bool) func(*testing.T) { {{"key3", "value3"}, {"subkey3", "subvalue3"}}, }, 1) limits = []*config.RateLimit{ - config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_HOUR, sm.NewStats("key3_value3"), false, false, "", nil, false), - config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_DAY, sm.NewStats("key3_value3_subkey3_subvalue3"), false, false, "", nil, false), + config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_HOUR, sm.NewStats("key3_value3"), false, false, "", nil, false, 1), + config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_DAY, sm.NewStats("key3_value3_subkey3_subvalue3"), false, false, "", nil, false, 1), } assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, - {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[1].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[1].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, + {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[1].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[1].Limit.Unit, timeSource, limits[1].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(1), limits[0].Stats.TotalHits.Value()) @@ -146,7 +146,8 @@ func testRedis(usePerSecondRedis bool) func(*testing.T) { func testLocalCacheStats(localCacheStats gostats.StatGenerator, statsStore gostats.Store, sink *common.TestStatSink, expectedHitCount int, expectedMissCount int, expectedLookUpCount int, expectedExpiredCount int, - expectedEntryCount int) func(*testing.T) { + expectedEntryCount int, +) func(*testing.T) { return func(t *testing.T) { localCacheStats.GenerateStats() statsStore.Flush() @@ -196,20 +197,20 @@ func TestOverLimitWithLocalCache(t *testing.T) { // Test Near Limit Stats. Under Near Limit Ratio timeSource.EXPECT().UnixNow().Return(int64(1000000)).MaxTimes(3) - client.EXPECT().PipeAppend(gomock.Any(), gomock.Any(), "INCRBY", "domain_key4_value4_997200", uint32(1)).SetArg(1, uint32(11)).DoAndReturn(pipeAppend) + client.EXPECT().PipeAppend(gomock.Any(), gomock.Any(), "INCRBY", "domain_key4_value4_993600", uint32(1)).SetArg(1, uint32(11)).DoAndReturn(pipeAppend) client.EXPECT().PipeAppend(gomock.Any(), gomock.Any(), - "EXPIRE", "domain_key4_value4_997200", int64(3600)).DoAndReturn(pipeAppend) + "EXPIRE", "domain_key4_value4_993600", int64(7200)).DoAndReturn(pipeAppend) client.EXPECT().PipeDo(gomock.Any()).Return(nil) request := common.NewRateLimitRequest("domain", [][][2]string{{{"key4", "value4"}}}, 1) limits := []*config.RateLimit{ - config.NewRateLimit(15, pb.RateLimitResponse_RateLimit_HOUR, sm.NewStats("key4_value4"), false, false, "", nil, false), + config.NewRateLimit(15, pb.RateLimitResponse_RateLimit_HOUR, sm.NewStats("key4_value4"), false, false, "", nil, false, 2), } assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 4, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 4, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(1), limits[0].Stats.TotalHits.Value()) @@ -223,14 +224,14 @@ func TestOverLimitWithLocalCache(t *testing.T) { // Test Near Limit Stats. At Near Limit Ratio, still OK timeSource.EXPECT().UnixNow().Return(int64(1000000)).MaxTimes(3) - client.EXPECT().PipeAppend(gomock.Any(), gomock.Any(), "INCRBY", "domain_key4_value4_997200", uint32(1)).SetArg(1, uint32(13)).DoAndReturn(pipeAppend) + client.EXPECT().PipeAppend(gomock.Any(), gomock.Any(), "INCRBY", "domain_key4_value4_993600", uint32(1)).SetArg(1, uint32(13)).DoAndReturn(pipeAppend) client.EXPECT().PipeAppend(gomock.Any(), gomock.Any(), - "EXPIRE", "domain_key4_value4_997200", int64(3600)).DoAndReturn(pipeAppend) + "EXPIRE", "domain_key4_value4_993600", int64(7200)).DoAndReturn(pipeAppend) client.EXPECT().PipeDo(gomock.Any()).Return(nil) assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 2, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 2, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(2), limits[0].Stats.TotalHits.Value()) @@ -244,14 +245,14 @@ func TestOverLimitWithLocalCache(t *testing.T) { // Test Over limit stats timeSource.EXPECT().UnixNow().Return(int64(1000000)).MaxTimes(3) - client.EXPECT().PipeAppend(gomock.Any(), gomock.Any(), "INCRBY", "domain_key4_value4_997200", uint32(1)).SetArg(1, uint32(16)).DoAndReturn(pipeAppend) + client.EXPECT().PipeAppend(gomock.Any(), gomock.Any(), "INCRBY", "domain_key4_value4_993600", uint32(1)).SetArg(1, uint32(16)).DoAndReturn(pipeAppend) client.EXPECT().PipeAppend(gomock.Any(), gomock.Any(), - "EXPIRE", "domain_key4_value4_997200", int64(3600)).DoAndReturn(pipeAppend) + "EXPIRE", "domain_key4_value4_993600", int64(7200)).DoAndReturn(pipeAppend) client.EXPECT().PipeDo(gomock.Any()).Return(nil) assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(3), limits[0].Stats.TotalHits.Value()) @@ -270,7 +271,7 @@ func TestOverLimitWithLocalCache(t *testing.T) { "EXPIRE", "domain_key4_value4_997200", int64(3600)).Times(0) assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(4), limits[0].Stats.TotalHits.Value()) @@ -304,12 +305,12 @@ func TestNearLimit(t *testing.T) { request := common.NewRateLimitRequest("domain", [][][2]string{{{"key4", "value4"}}}, 1) limits := []*config.RateLimit{ - config.NewRateLimit(15, pb.RateLimitResponse_RateLimit_HOUR, sm.NewStats("key4_value4"), false, false, "", nil, false), + config.NewRateLimit(15, pb.RateLimitResponse_RateLimit_HOUR, sm.NewStats("key4_value4"), false, false, "", nil, false, 1), } assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 4, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 4, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(1), limits[0].Stats.TotalHits.Value()) @@ -326,7 +327,7 @@ func TestNearLimit(t *testing.T) { assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 2, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 2, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(2), limits[0].Stats.TotalHits.Value()) @@ -344,7 +345,7 @@ func TestNearLimit(t *testing.T) { assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(3), limits[0].Stats.TotalHits.Value()) @@ -360,10 +361,10 @@ func TestNearLimit(t *testing.T) { client.EXPECT().PipeDo(gomock.Any()).Return(nil) request = common.NewRateLimitRequest("domain", [][][2]string{{{"key5", "value5"}}}, 3) - limits = []*config.RateLimit{config.NewRateLimit(20, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key5_value5"), false, false, "", nil, false)} + limits = []*config.RateLimit{config.NewRateLimit(20, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key5_value5"), false, false, "", nil, false, 1)} assert.Equal( - []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 15, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}}, + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 15, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(3), limits[0].Stats.TotalHits.Value()) assert.Equal(uint64(0), limits[0].Stats.OverLimit.Value()) @@ -377,10 +378,10 @@ func TestNearLimit(t *testing.T) { client.EXPECT().PipeDo(gomock.Any()).Return(nil) request = common.NewRateLimitRequest("domain", [][][2]string{{{"key6", "value6"}}}, 2) - limits = []*config.RateLimit{config.NewRateLimit(8, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key6_value6"), false, false, "", nil, false)} + limits = []*config.RateLimit{config.NewRateLimit(8, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key6_value6"), false, false, "", nil, false, 1)} assert.Equal( - []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 1, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}}, + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 1, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(2), limits[0].Stats.TotalHits.Value()) assert.Equal(uint64(0), limits[0].Stats.OverLimit.Value()) @@ -394,10 +395,10 @@ func TestNearLimit(t *testing.T) { client.EXPECT().PipeDo(gomock.Any()).Return(nil) request = common.NewRateLimitRequest("domain", [][][2]string{{{"key7", "value7"}}}, 3) - limits = []*config.RateLimit{config.NewRateLimit(20, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key7_value7"), false, false, "", nil, false)} + limits = []*config.RateLimit{config.NewRateLimit(20, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key7_value7"), false, false, "", nil, false, 1)} assert.Equal( - []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 1, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}}, + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 1, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(3), limits[0].Stats.TotalHits.Value()) assert.Equal(uint64(0), limits[0].Stats.OverLimit.Value()) @@ -411,10 +412,10 @@ func TestNearLimit(t *testing.T) { client.EXPECT().PipeDo(gomock.Any()).Return(nil) request = common.NewRateLimitRequest("domain", [][][2]string{{{"key8", "value8"}}}, 3) - limits = []*config.RateLimit{config.NewRateLimit(20, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key8_value8"), false, false, "", nil, false)} + limits = []*config.RateLimit{config.NewRateLimit(20, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key8_value8"), false, false, "", nil, false, 1)} assert.Equal( - []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}}, + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(3), limits[0].Stats.TotalHits.Value()) assert.Equal(uint64(2), limits[0].Stats.OverLimit.Value()) @@ -428,10 +429,10 @@ func TestNearLimit(t *testing.T) { client.EXPECT().PipeDo(gomock.Any()).Return(nil) request = common.NewRateLimitRequest("domain", [][][2]string{{{"key9", "value9"}}}, 7) - limits = []*config.RateLimit{config.NewRateLimit(20, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key9_value9"), false, false, "", nil, false)} + limits = []*config.RateLimit{config.NewRateLimit(20, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key9_value9"), false, false, "", nil, false, 1)} assert.Equal( - []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}}, + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(7), limits[0].Stats.TotalHits.Value()) assert.Equal(uint64(2), limits[0].Stats.OverLimit.Value()) @@ -445,10 +446,10 @@ func TestNearLimit(t *testing.T) { client.EXPECT().PipeDo(gomock.Any()).Return(nil) request = common.NewRateLimitRequest("domain", [][][2]string{{{"key10", "value10"}}}, 3) - limits = []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key10_value10"), false, false, "", nil, false)} + limits = []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key10_value10"), false, false, "", nil, false, 1)} assert.Equal( - []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}}, + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OVER_LIMIT, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(3), limits[0].Stats.TotalHits.Value()) assert.Equal(uint64(3), limits[0].Stats.OverLimit.Value()) @@ -456,6 +457,54 @@ func TestNearLimit(t *testing.T) { assert.Equal(uint64(0), limits[0].Stats.WithinLimit.Value()) } +func TestRedisWithUnitMultiplier(t *testing.T) { + assert := assert.New(t) + controller := gomock.NewController(t) + defer controller.Finish() + + client := mock_redis.NewMockClient(controller) + timeSource := mock_utils.NewMockTimeSource(controller) + statsStore := gostats.NewStore(gostats.NewNullSink(), false) + sm := stats.NewMockStatManager(statsStore) + cache := redis.NewFixedRateLimitCacheImpl(client, nil, timeSource, rand.New(rand.NewSource(1)), 0, nil, 0.8, "", sm, false) + + timeSource.EXPECT().UnixNow().Return(int64(1000000)).MaxTimes(3) + client.EXPECT().PipeAppend(gomock.Any(), gomock.Any(), "INCRBY", "domain_key_value_999990", uint32(1)).SetArg(1, uint32(5)).DoAndReturn(pipeAppend) + client.EXPECT().PipeAppend(gomock.Any(), gomock.Any(), "EXPIRE", "domain_key_value_999990", int64(30)).DoAndReturn(pipeAppend) + client.EXPECT().PipeDo(gomock.Any()).Return(nil) + + request := common.NewRateLimitRequest("domain", [][][2]string{{{"key", "value"}}}, 1) + limits := []*config.RateLimit{ + config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false, 30), + } + + assert.Equal( + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 5, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, + cache.DoLimit(context.Background(), request, limits)) + assert.Equal(uint64(1), limits[0].Stats.TotalHits.Value()) + assert.Equal(uint64(0), limits[0].Stats.OverLimit.Value()) + assert.Equal(uint64(0), limits[0].Stats.NearLimit.Value()) + assert.Equal(uint64(1), limits[0].Stats.WithinLimit.Value()) + + timeSource.EXPECT().UnixNow().Return(int64(1000000)).MaxTimes(3) + client.EXPECT().PipeAppend(gomock.Any(), gomock.Any(), "INCRBY", "domain_key2_value2_999600", uint32(1)).SetArg(1, uint32(5)).DoAndReturn(pipeAppend) + client.EXPECT().PipeAppend(gomock.Any(), gomock.Any(), "EXPIRE", "domain_key2_value2_999600", int64(600)).DoAndReturn(pipeAppend) + client.EXPECT().PipeDo(gomock.Any()).Return(nil) + + request = common.NewRateLimitRequest("domain", [][][2]string{{{"key2", "value2"}}}, 1) + limits = []*config.RateLimit{ + config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, sm.NewStats("key2_value2"), false, false, "", nil, false, 10), + } + + assert.Equal( + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 5, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, + cache.DoLimit(context.Background(), request, limits)) + assert.Equal(uint64(1), limits[0].Stats.TotalHits.Value()) + assert.Equal(uint64(0), limits[0].Stats.OverLimit.Value()) + assert.Equal(uint64(0), limits[0].Stats.NearLimit.Value()) + assert.Equal(uint64(1), limits[0].Stats.WithinLimit.Value()) +} + func TestRedisWithJitter(t *testing.T) { assert := assert.New(t) controller := gomock.NewController(t) @@ -475,10 +524,10 @@ func TestRedisWithJitter(t *testing.T) { client.EXPECT().PipeDo(gomock.Any()).Return(nil) request := common.NewRateLimitRequest("domain", [][][2]string{{{"key", "value"}}}, 1) - limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false)} + limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false, 1)} assert.Equal( - []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 5, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}}, + []*pb.RateLimitResponse_DescriptorStatus{{Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 5, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}}, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(1), limits[0].Stats.TotalHits.Value()) assert.Equal(uint64(0), limits[0].Stats.OverLimit.Value()) @@ -510,12 +559,12 @@ func TestOverLimitWithLocalCacheShadowRule(t *testing.T) { request := common.NewRateLimitRequest("domain", [][][2]string{{{"key4", "value4"}}}, 1) limits := []*config.RateLimit{ - config.NewRateLimit(15, pb.RateLimitResponse_RateLimit_HOUR, sm.NewStats("key4_value4"), false, true, "", nil, false), + config.NewRateLimit(15, pb.RateLimitResponse_RateLimit_HOUR, sm.NewStats("key4_value4"), false, true, "", nil, false, 1), } assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 4, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 4, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(1), limits[0].Stats.TotalHits.Value()) @@ -536,7 +585,7 @@ func TestOverLimitWithLocalCacheShadowRule(t *testing.T) { assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 2, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 2, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(2), limits[0].Stats.TotalHits.Value()) @@ -558,7 +607,7 @@ func TestOverLimitWithLocalCacheShadowRule(t *testing.T) { // The result should be OK since limit is in ShadowMode assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(3), limits[0].Stats.TotalHits.Value()) @@ -580,7 +629,7 @@ func TestOverLimitWithLocalCacheShadowRule(t *testing.T) { // The result should be OK since limit is in ShadowMode assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) @@ -618,7 +667,7 @@ func TestRedisTracer(t *testing.T) { client.EXPECT().PipeDo(gomock.Any()).Return(nil) request := common.NewRateLimitRequest("domain", [][][2]string{{{"key", "value"}}}, 1) - limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false)} + limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_SECOND, sm.NewStats("key_value"), false, false, "", nil, false, 1)} cache.DoLimit(context.Background(), request, limits) spanStubs := testSpanExporter.GetSpans() @@ -658,14 +707,14 @@ func TestOverLimitWithStopCacheKeyIncrementWhenOverlimitConfig(t *testing.T) { request := common.NewRateLimitRequest("domain", [][][2]string{{{"key4", "value4"}}, {{"key5", "value5"}}}, 1) limits := []*config.RateLimit{ - config.NewRateLimit(15, pb.RateLimitResponse_RateLimit_HOUR, sm.NewStats("key4_value4"), false, false, "", nil, false), - config.NewRateLimit(14, pb.RateLimitResponse_RateLimit_HOUR, sm.NewStats("key5_value5"), false, false, "", nil, false), + config.NewRateLimit(15, pb.RateLimitResponse_RateLimit_HOUR, sm.NewStats("key4_value4"), false, false, "", nil, false, 1), + config.NewRateLimit(14, pb.RateLimitResponse_RateLimit_HOUR, sm.NewStats("key5_value5"), false, false, "", nil, false, 1), } assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 4, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, - {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[1].Limit, LimitRemaining: 3, DurationUntilReset: utils.CalculateReset(&limits[1].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 4, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, + {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[1].Limit, LimitRemaining: 3, DurationUntilReset: utils.CalculateReset(&limits[1].Limit.Unit, timeSource, limits[1].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(1), limits[0].Stats.TotalHits.Value()) @@ -696,8 +745,8 @@ func TestOverLimitWithStopCacheKeyIncrementWhenOverlimitConfig(t *testing.T) { assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 2, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, - {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[1].Limit, LimitRemaining: 1, DurationUntilReset: utils.CalculateReset(&limits[1].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 2, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, + {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[1].Limit, LimitRemaining: 1, DurationUntilReset: utils.CalculateReset(&limits[1].Limit.Unit, timeSource, limits[1].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(2), limits[0].Stats.TotalHits.Value()) @@ -728,8 +777,8 @@ func TestOverLimitWithStopCacheKeyIncrementWhenOverlimitConfig(t *testing.T) { assert.Equal( []*pb.RateLimitResponse_DescriptorStatus{ - {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 1, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource)}, - {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[1].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[1].Limit.Unit, timeSource)}, + {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[0].Limit, LimitRemaining: 1, DurationUntilReset: utils.CalculateReset(&limits[0].Limit.Unit, timeSource, limits[0].Limit.UnitMultiplier)}, + {Code: pb.RateLimitResponse_OK, CurrentLimit: limits[1].Limit, LimitRemaining: 0, DurationUntilReset: utils.CalculateReset(&limits[1].Limit.Unit, timeSource, limits[1].Limit.UnitMultiplier)}, }, cache.DoLimit(context.Background(), request, limits)) assert.Equal(uint64(3), limits[0].Stats.TotalHits.Value()) diff --git a/test/service/ratelimit_test.go b/test/service/ratelimit_test.go index 4d2cde53..e769f078 100644 --- a/test/service/ratelimit_test.go +++ b/test/service/ratelimit_test.go @@ -153,7 +153,7 @@ func TestService(test *testing.T) { request = common.NewRateLimitRequest( "different-domain", [][][2]string{{{"foo", "bar"}}, {{"hello", "world"}}}, 1) limits := []*config.RateLimit{ - config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, t.statsManager.NewStats("key"), false, false, "", nil, false), + config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, t.statsManager.NewStats("key"), false, false, "", nil, false, 1), nil, } t.config.EXPECT().GetLimit(context.Background(), "different-domain", request.Descriptors[0]).Return(limits[0]) @@ -187,7 +187,7 @@ func TestService(test *testing.T) { // Config should still be valid. Also make sure order does not affect results. limits = []*config.RateLimit{ nil, - config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, t.statsManager.NewStats("key"), false, false, "", nil, false), + config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, t.statsManager.NewStats("key"), false, false, "", nil, false, 1), } t.config.EXPECT().GetLimit(context.Background(), "different-domain", request.Descriptors[0]).Return(limits[0]) t.config.EXPECT().GetLimit(context.Background(), "different-domain", request.Descriptors[1]).Return(limits[1]) @@ -241,7 +241,7 @@ func TestServiceGlobalShadowMode(test *testing.T) { // Global Shadow mode limits := []*config.RateLimit{ - config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, t.statsManager.NewStats("key"), false, false, "", nil, false), + config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, t.statsManager.NewStats("key"), false, false, "", nil, false, 1), nil, } t.config.EXPECT().GetLimit(context.Background(), "different-domain", request.Descriptors[0]).Return(limits[0]) @@ -281,8 +281,8 @@ func TestRuleShadowMode(test *testing.T) { request := common.NewRateLimitRequest( "different-domain", [][][2]string{{{"foo", "bar"}}, {{"hello", "world"}}}, 1) limits := []*config.RateLimit{ - config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, t.statsManager.NewStats("key"), false, true, "", nil, false), - config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, t.statsManager.NewStats("key"), false, true, "", nil, false), + config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, t.statsManager.NewStats("key"), false, true, "", nil, false, 1), + config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, t.statsManager.NewStats("key"), false, true, "", nil, false, 1), } t.config.EXPECT().GetLimit(context.Background(), "different-domain", request.Descriptors[0]).Return(limits[0]) t.config.EXPECT().GetLimit(context.Background(), "different-domain", request.Descriptors[1]).Return(limits[1]) @@ -314,8 +314,8 @@ func TestMixedRuleShadowMode(test *testing.T) { request := common.NewRateLimitRequest( "different-domain", [][][2]string{{{"foo", "bar"}}, {{"hello", "world"}}}, 1) limits := []*config.RateLimit{ - config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, t.statsManager.NewStats("key"), false, true, "", nil, false), - config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, t.statsManager.NewStats("key"), false, false, "", nil, false), + config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, t.statsManager.NewStats("key"), false, true, "", nil, false, 1), + config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, t.statsManager.NewStats("key"), false, false, "", nil, false, 1), } t.config.EXPECT().GetLimit(context.Background(), "different-domain", request.Descriptors[0]).Return(limits[0]) t.config.EXPECT().GetLimit(context.Background(), "different-domain", request.Descriptors[1]).Return(limits[1]) @@ -374,7 +374,7 @@ func TestServiceWithCustomRatelimitHeaders(test *testing.T) { request := common.NewRateLimitRequest( "different-domain", [][][2]string{{{"foo", "bar"}}, {{"hello", "world"}}}, 1) limits := []*config.RateLimit{ - config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, t.statsManager.NewStats("key"), false, false, "", nil, false), + config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, t.statsManager.NewStats("key"), false, false, "", nil, false, 1), nil, } t.config.EXPECT().GetLimit(context.Background(), "different-domain", request.Descriptors[0]).Return(limits[0]) @@ -427,7 +427,7 @@ func TestServiceWithDefaultRatelimitHeaders(test *testing.T) { request := common.NewRateLimitRequest( "different-domain", [][][2]string{{{"foo", "bar"}}, {{"hello", "world"}}}, 1) limits := []*config.RateLimit{ - config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, t.statsManager.NewStats("key"), false, false, "", nil, false), + config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, t.statsManager.NewStats("key"), false, false, "", nil, false, 1), nil, } t.config.EXPECT().GetLimit(context.Background(), "different-domain", request.Descriptors[0]).Return(limits[0]) @@ -487,7 +487,7 @@ func TestCacheError(test *testing.T) { service := t.setupBasicService() request := common.NewRateLimitRequest("different-domain", [][][2]string{{{"foo", "bar"}}}, 1) - limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, t.statsManager.NewStats("key"), false, false, "", nil, false)} + limits := []*config.RateLimit{config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, t.statsManager.NewStats("key"), false, false, "", nil, false, 1)} t.config.EXPECT().GetLimit(context.Background(), "different-domain", request.Descriptors[0]).Return(limits[0]) t.cache.EXPECT().DoLimit(context.Background(), request, limits).Do( func(context.Context, *pb.RateLimitRequest, []*config.RateLimit) { @@ -529,9 +529,9 @@ func TestUnlimited(test *testing.T) { request := common.NewRateLimitRequest( "some-domain", [][][2]string{{{"foo", "bar"}}, {{"hello", "world"}}, {{"baz", "qux"}}}, 1) limits := []*config.RateLimit{ - config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, t.statsManager.NewStats("foo_bar"), false, false, "", nil, false), + config.NewRateLimit(10, pb.RateLimitResponse_RateLimit_MINUTE, t.statsManager.NewStats("foo_bar"), false, false, "", nil, false, 1), nil, - config.NewRateLimit(55, pb.RateLimitResponse_RateLimit_SECOND, t.statsManager.NewStats("baz_qux"), true, false, "", nil, false), + config.NewRateLimit(55, pb.RateLimitResponse_RateLimit_SECOND, t.statsManager.NewStats("baz_qux"), true, false, "", nil, false, 1), } t.config.EXPECT().GetLimit(context.Background(), "some-domain", request.Descriptors[0]).Return(limits[0]) t.config.EXPECT().GetLimit(context.Background(), "some-domain", request.Descriptors[1]).Return(limits[1])