Skip to content

Commit

Permalink
build: enable unused warnings (#1723)
Browse files Browse the repository at this point in the history
These were disabled some time back during some version
updates where something wasn't working correctly. Re-enable
and cleanup a bunch of unused warnings.
  • Loading branch information
brharrington authored Nov 14, 2024
1 parent c053239 commit 193314c
Show file tree
Hide file tree
Showing 39 changed files with 58 additions and 127 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ object Ticks {
} else {
valueTickSizes
.find(t => r / t._1 <= n)
.fold(sciTicks(v1, v2, n))(t => decimalTicks(v1, v2, n, t, scale))
.fold(sciTicks(v1, v2))(t => decimalTicks(v1, v2, n, t, scale))
}
}

Expand Down Expand Up @@ -313,15 +313,15 @@ object Ticks {

binaryValueTickSizes
.find(t => r / t._1 <= n)
.fold(sciTicks(v1, v2, n))(t => binaryTicks(v1, v2, t))
.fold(sciTicks(v1, v2))(t => binaryTicks(v1, v2, t))
}

def duration(v1: Double, v2: Double, n: Int): List[ValueTick] = {
val r = validateAndGetRange(v1, v2)

durationValueTickSizes
.find(t => r / t._1 <= n)
.fold(sciTicks(v1, v2, n))(t => durationTicks(v1, v2, t))
.fold(sciTicks(v1, v2))(t => durationTicks(v1, v2, t))
}

/**
Expand Down Expand Up @@ -364,7 +364,7 @@ object Ticks {
if (range < 1e-12) 1.0 else range
}

private def sciTicks(v1: Double, v2: Double, n: Int): List[ValueTick] = {
private def sciTicks(v1: Double, v2: Double): List[ValueTick] = {
List(ValueTick(v1, 0.0), ValueTick(v2, 0.0))
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,7 @@ class TicksSuite extends FunSuite {
}

test("sanity check, 0 to y") {
for (i <- 0 until 100; j <- 2 until 10) {
for (_ <- 0 until 100; j <- 2 until 10) {
val v = Random.nextDouble() * 1e12
try {
val ticks = Ticks.value(0.0, v, j)
Expand All @@ -245,7 +245,7 @@ class TicksSuite extends FunSuite {
}

test("sanity check, y1 to y2") {
for (i <- 0 until 100; j <- 2 until 10) {
for (_ <- 0 until 100; j <- 2 until 10) {
val v1 = Random.nextDouble() * 1e4
val v2 = v1 + Random.nextDouble() * 1e3
try {
Expand Down Expand Up @@ -379,7 +379,7 @@ class TicksSuite extends FunSuite {
}

test("binary sanity check, 0 to y") {
for (i <- 0 until 100; j <- 2 until 10) {
for (_ <- 0 until 100; j <- 2 until 10) {
val v = Random.nextDouble() * 1e12
try {
val ticks = Ticks.binary(0.0, v, j)
Expand All @@ -392,7 +392,7 @@ class TicksSuite extends FunSuite {
}

test("binary sanity check, y1 to y2") {
for (i <- 0 until 100; j <- 2 until 10) {
for (_ <- 0 until 100; j <- 2 until 10) {
val v1 = Random.nextDouble() * 1e4
val v2 = v1 + Random.nextDouble() * 1e3
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ class MemoryBlockStore(step: Long, blockSize: Int, numBlocks: Int) extends Block
}
}

private def fill(blk: Block, buf: Array[Double], start: Long, end: Long, aggr: Int): Unit = {
private def fill(blk: Block, buf: Array[Double], start: Long, end: Long): Unit = {
val s = start / step
val e = end / step
val bs = blk.start / step
Expand Down Expand Up @@ -198,7 +198,7 @@ class MemoryBlockStore(step: Long, blockSize: Int, numBlocks: Int) extends Block
val buffer = ArrayHelper.fill(size, Double.NaN)
var pos = 0
while (pos < numBlocks) {
if (blocks(pos) != null) fill(blocks(pos), buffer, start, end, aggr)
if (blocks(pos) != null) fill(blocks(pos), buffer, start, end)
pos += 1
}
buffer
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ class RoaringTagIndex[T <: TaggedItem](items: Array[T], stats: IndexStats) exten
case LessThan(k, v) => lessThan(k, v, false)
case LessThanEqual(k, v) => lessThan(k, v, true)
case q: In => findImpl(q.toOrQuery, offset)
case q: PatternQuery => strPattern(q, offset)
case q: PatternQuery => strPattern(q)
case HasKey(k) => hasKey(k, offset)
case True => all.clone()
case False => new RoaringBitmap()
Expand Down Expand Up @@ -289,7 +289,7 @@ class RoaringTagIndex[T <: TaggedItem](items: Array[T], stats: IndexStats) exten
}
}

private def strPattern(q: Query.PatternQuery, offset: Int): RoaringBitmap = {
private def strPattern(q: Query.PatternQuery): RoaringBitmap = {
val kp = keyMap.get(q.k, -1)
val vidx = itemIndex.get(kp)
if (vidx == null) new RoaringBitmap()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ class TaggedItemIndex private (
case LessThan(k, v) => lessThan(k, v, false)
case LessThanEqual(k, v) => lessThan(k, v, true)
case q: In => find(q.toOrQuery, offset)
case q: PatternQuery => strPattern(q, offset)
case q: PatternQuery => strPattern(q)
case HasKey(k) => hasKey(k, offset)
case True => all.clone()
case False => new RoaringBitmap()
Expand Down Expand Up @@ -164,7 +164,7 @@ class TaggedItemIndex private (
}
}

private def strPattern(q: Query.PatternQuery, offset: Int): RoaringBitmap = {
private def strPattern(q: Query.PatternQuery): RoaringBitmap = {
val kp = keyMap.get(q.k, -1)
val vidx = keyValueIdx.get(kp)
if (vidx == null) new RoaringBitmap()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -861,7 +861,7 @@ case class RollupBlock(sum: Block, count: Block, min: Block, max: Block) extends
def update(pos: Int, value: Double): Unit = {
if (!value.isNaN) {
updateSum(pos, value)
updateCount(pos, value)
updateCount(pos)
updateMin(pos, value)
updateMax(pos, value)
}
Expand All @@ -872,7 +872,7 @@ case class RollupBlock(sum: Block, count: Block, min: Block, max: Block) extends
block.update(pos, Math.addNaN(block.get(pos), value))
}

private def updateCount(pos: Int, value: Double): Unit = {
private def updateCount(pos: Int): Unit = {
val block = count.asInstanceOf[MutableBlock]
block.update(pos, Math.addNaN(block.get(pos), 1.0))
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ package com.netflix.atlas.core.util
*/
object Step {

private final val oneMilli = 1L
private final val oneSecond = 1000L
private final val oneMinute = 60000L
private final val oneHour = 60 * oneMinute
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -470,10 +470,10 @@ object Strings {
parseRefVar(refs, r)
case UnixDate(d) =>
val t = d.toLong match {
case v if v <= Integer.MAX_VALUE => Instant.ofEpochSecond(v)
case v if v <= millisCutoff => Instant.ofEpochMilli(v)
case v if v <= microsCutoff => ofEpoch(v, 1_000_000L, 1_000L)
case v => ofEpoch(v, 1_000_000_000L, 1L)
case v if v <= secondsCutoff => Instant.ofEpochSecond(v)
case v if v <= millisCutoff => Instant.ofEpochMilli(v)
case v if v <= microsCutoff => ofEpoch(v, 1_000_000L, 1_000L)
case v => ofEpoch(v, 1_000_000_000L, 1L)
}
ZonedDateTime.ofInstant(t, tz)
case str =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ package com.netflix.atlas.core.index

import com.netflix.atlas.core.model.Query
import com.netflix.atlas.core.model.Tag
import com.netflix.atlas.core.model.*
import com.netflix.atlas.core.model.TimeSeries
import munit.FunSuite

object TagIndexSuite {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,6 @@ class ClampSuite extends FunSuite {
)
)

private val des = StatefulExpr.Des(DataExpr.Sum(Query.Equal("name", "cpu")), 2, 0.1, 0.02)

def eval(expr: TimeSeriesExpr, data: List[List[Datapoint]]): List[List[TimeSeries]] = {
var state = Map.empty[StatefulExpr, Any]
data.map { ts =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@ package com.netflix.atlas.core.model
import com.netflix.atlas.core.stacklang.Interpreter
import munit.FunSuite

import scala.language.postfixOps

class PerStepSuite extends FunSuite {

private val interpreter = Interpreter(MathVocabulary.allWords)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -323,7 +323,6 @@ class PercentilesSuite extends FunSuite {
assertEquals(data.size, 2)
List("even", "odd").zip(data).foreach {
case (m, t) =>
val estimate = t.data(0L)
assertEquals(t.tags, Map("name" -> "test", "mode" -> m, "percentile" -> " 90.0"))
assertEquals(t.label, f"(percentile((mode=$m), 90.0) * 1000.0)")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -470,7 +470,6 @@ class StringsSuite extends FunSuite {
}

test("parseDate, epoch + 4h") {
val ref = ZonedDateTime.of(2012, 2, 2, 3, 0, 0, 0, ZoneOffset.UTC)
val expected = ZonedDateTime.of(1970, 1, 1, 4, 0, 0, 0, ZoneOffset.UTC)
assertEquals(parseDate("epoch+4h", ZoneOffset.UTC), expected)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,6 @@ package com.netflix.atlas.core.validation

import com.typesafe.config.Config

class ConfigConstructorTestRule(config: Config) extends TagRule {
class ConfigConstructorTestRule(@scala.annotation.nowarn config: Config) extends TagRule {
override def validate(k: String, v: String): String = TagRule.Pass
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,10 @@ import org.apache.pekko.http.scaladsl.model.headers.*
import org.apache.pekko.stream.scaladsl.Compression
import org.apache.pekko.stream.scaladsl.Source
import org.apache.pekko.util.ByteString
import com.fasterxml.jackson.annotation.JsonProperty
import com.netflix.atlas.json.Json
import com.netflix.atlas.pekko.ByteStringInputStream
import com.typesafe.scalalogging.StrictLogging

import java.nio.charset.StandardCharsets
import scala.util.Failure
import scala.util.Success

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,6 @@ import com.netflix.atlas.core.model.DataExpr
import com.netflix.atlas.core.model.Query
import com.netflix.atlas.eval.model.AggrDatapoint
import com.netflix.atlas.eval.model.AggrValuesInfo
import com.netflix.atlas.eval.model.ExprType
import com.netflix.atlas.eval.model.LwcExpression
import com.netflix.atlas.eval.model.LwcMessages
import com.netflix.atlas.eval.model.TimeGroup
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ import com.netflix.atlas.core.model.TimeSeries
import com.netflix.atlas.core.util.IdentityMap
import com.netflix.atlas.eval.model.ArrayData
import com.netflix.atlas.eval.model.ChunkData
import com.netflix.atlas.eval.model.ExprType
import com.netflix.atlas.eval.model.TimeGroup
import com.netflix.atlas.eval.model.TimeGroupsTuple
import com.netflix.atlas.eval.model.TimeSeriesMessage
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ import java.nio.file.Path
import java.nio.file.Paths
import java.util.UUID
import org.apache.pekko.NotUsed
import org.apache.pekko.http.scaladsl.model.HttpRequest
import org.apache.pekko.http.scaladsl.model.Uri
import org.apache.pekko.stream.IOResult
import org.apache.pekko.stream.Materializer
Expand All @@ -36,7 +35,6 @@ import com.netflix.atlas.eval.model.ExprType
import com.netflix.atlas.eval.stream.Evaluator.DataSource
import com.netflix.atlas.eval.stream.Evaluator.DataSources
import com.netflix.atlas.json.JsonSupport
import com.netflix.atlas.pekko.AccessLogger
import com.netflix.atlas.pekko.DiagnosticMessage
import com.netflix.atlas.pekko.PekkoHttpClient
import com.netflix.atlas.pekko.StreamOps
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,6 @@ import org.apache.pekko.http.scaladsl.model.HttpResponse
import org.apache.pekko.stream.scaladsl.Flow
import com.netflix.atlas.eval.stream.Evaluator.DataSource
import com.netflix.atlas.eval.stream.Evaluator.DataSources
import com.netflix.atlas.json.JsonSupport
import com.netflix.atlas.pekko.AccessLogger

import scala.util.Try

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,6 @@ class EddaSourceSuite extends FunSuite {
}

test("substitute, IPv6 preferred if available") {
val uri = "http://{ip}:{port}"
val instance = EddaSource.Instance("i-1", Some("1.2.3.4"), Some("::1"))
assertEquals(instance.substitute("http://{ip}:{port}"), "http://[::1]:7101")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,11 @@ import java.time.Duration
import java.util.concurrent.TimeoutException
import java.util.concurrent.atomic.AtomicInteger
import org.apache.pekko.actor.ActorSystem
import org.apache.pekko.stream.scaladsl.BroadcastHub
import org.apache.pekko.stream.scaladsl.Flow
import org.apache.pekko.stream.scaladsl.Keep
import org.apache.pekko.stream.scaladsl.Sink
import org.apache.pekko.stream.scaladsl.Source
import org.apache.pekko.stream.scaladsl.StreamConverters
import com.netflix.atlas.chart.util.SrcPath
import com.netflix.atlas.core.model.FilterExpr.Filter
import com.netflix.atlas.core.util.Streams
import com.netflix.atlas.eval.model.ArrayData
import com.netflix.atlas.eval.model.LwcDatapoint
import com.netflix.atlas.eval.model.LwcDiagnosticMessage
Expand All @@ -44,9 +40,7 @@ import com.netflix.atlas.eval.stream.Evaluator.MessageEnvelope
import com.netflix.atlas.json.Json
import com.netflix.atlas.json.JsonSupport
import com.netflix.atlas.pekko.DiagnosticMessage
import com.netflix.atlas.pekko.StreamOps
import com.netflix.spectator.api.DefaultRegistry
import com.netflix.spectator.api.NoopRegistry
import com.typesafe.config.ConfigFactory
import com.typesafe.config.ConfigValueFactory
import nl.jqno.equalsverifier.EqualsVerifier
Expand All @@ -57,7 +51,6 @@ import org.apache.pekko.util.ByteString

import java.nio.file.Path
import scala.concurrent.Await
import scala.concurrent.Future
import scala.concurrent.Promise
import scala.concurrent.duration.DurationInt
import scala.jdk.CollectionConverters.CollectionHasAsScala
Expand Down Expand Up @@ -815,7 +808,7 @@ class EvaluatorSuite extends FunSuite {

val evaluator = new Evaluator(config, registry, system)

val future = Source(sampleData(1, 10))
val future = Source(input)
.via(Flow.fromProcessor(() => evaluator.createDatapointProcessor(sources)))
.runWith(Sink.seq[MessageEnvelope])
Await.result(future, scala.concurrent.duration.Duration.Inf).toList
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ import com.netflix.atlas.eval.model.AggrDatapoint
import com.netflix.atlas.eval.model.LwcMessages
import com.netflix.atlas.eval.stream.Evaluator.DataSource
import com.netflix.atlas.eval.stream.Evaluator.DataSources
import com.netflix.atlas.json.JsonSupport
import com.netflix.atlas.pekko.DiagnosticMessage
import com.typesafe.config.ConfigFactory
import munit.FunSuite
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ import com.netflix.spectator.api.Registry
import com.typesafe.config.ConfigFactory
import org.apache.pekko.http.scaladsl.model.StatusCodes

import scala.util.Failure
import scala.util.Success
import scala.util.Try

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ class IntIntMap {
val map = new java.util.HashMap[Int, Int](10)
var i = 0
while (i < values800.length) {
map.compute(values800(i), (k, v) => v + 1)
map.compute(values800(i), (_, v) => v + 1)
i += 1
}
bh.consume(map)
Expand All @@ -94,7 +94,7 @@ class IntIntMap {
val map = new java.util.HashMap[Int, Int](10)
var i = 0
while (i < values8k.length) {
map.compute(values8k(i), (k, v) => v + 1)
map.compute(values8k(i), (_, v) => v + 1)
i += 1
}
bh.consume(map)
Expand Down
Loading

0 comments on commit 193314c

Please sign in to comment.