-
Notifications
You must be signed in to change notification settings - Fork 24
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
The goal is to allow deploying a dedicated FLP instance as just a k8s cache component, alongside with the "traditional" FLP deployments. The k8s cache is a Kafka producer, and traditional instances are consumers. - New FLP binary, k8s-cache, that just starts the informers and writes update events to Kafka - New optional config for kubernetes enrichment, allowing to read from kafka instead of using informers
- Loading branch information
Showing
25 changed files
with
1,033 additions
and
431 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,4 +1,5 @@ | ||
/flowlogs-pipeline | ||
/confgenerator | ||
/k8s-cache | ||
/bin/ | ||
cover.out |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,78 @@ | ||
package main | ||
|
||
import ( | ||
"flag" | ||
"fmt" | ||
"os" | ||
|
||
"github.com/netobserv/flowlogs-pipeline/pkg/api" | ||
"github.com/netobserv/flowlogs-pipeline/pkg/pipeline/transform/kubernetes" | ||
"github.com/netobserv/flowlogs-pipeline/pkg/pipeline/utils" | ||
"github.com/sirupsen/logrus" | ||
"gopkg.in/yaml.v2" | ||
) | ||
|
||
var ( | ||
buildVersion = "unknown" | ||
buildDate = "unknown" | ||
app = "flp-cache" | ||
configPath = flag.String("config", "", "path to a config file") | ||
versionFlag = flag.Bool("v", false, "print version") | ||
log = logrus.WithField("module", "main") | ||
) | ||
|
||
type Config struct { | ||
KubeConfigPath string `yaml:"kubeConfigPath"` | ||
KafkaConfig api.EncodeKafka `yaml:"kafkaConfig"` | ||
PProfPort int32 `yaml:"pprofPort"` // TODO: manage pprof | ||
LogLevel string `yaml:"logLevel"` | ||
} | ||
|
||
func main() { | ||
flag.Parse() | ||
|
||
appVersion := fmt.Sprintf("%s [build version: %s, build date: %s]", app, buildVersion, buildDate) | ||
if *versionFlag { | ||
fmt.Println(appVersion) | ||
os.Exit(0) | ||
} | ||
|
||
cfg, err := readConfig(*configPath) | ||
if err != nil { | ||
log.WithError(err).Fatal("error reading config file") | ||
} | ||
|
||
lvl, err := logrus.ParseLevel(cfg.LogLevel) | ||
if err != nil { | ||
log.Errorf("Log level %s not recognized, using info", cfg.LogLevel) | ||
lvl = logrus.InfoLevel | ||
} | ||
logrus.SetLevel(lvl) | ||
log.Infof("Starting %s at log level %s", appVersion, lvl) | ||
log.Infof("Configuration: %#v", cfg) | ||
|
||
err = kubernetes.InitInformerDatasource(cfg.KubeConfigPath, &cfg.KafkaConfig) | ||
if err != nil { | ||
log.WithError(err).Fatal("error initializing Kubernetes & informers") | ||
} | ||
|
||
stopCh := utils.SetupElegantExit() | ||
<-stopCh | ||
} | ||
|
||
func readConfig(path string) (*Config, error) { | ||
var cfg Config | ||
if len(path) == 0 { | ||
return &cfg, nil | ||
} | ||
yamlFile, err := os.ReadFile(path) | ||
if err != nil { | ||
return nil, err | ||
} | ||
err = yaml.Unmarshal(yamlFile, &cfg) | ||
if err != nil { | ||
return nil, err | ||
} | ||
|
||
return &cfg, err | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,29 @@ | ||
# We do not use --platform feature to auto fill this ARG because of incompatibility between podman and docker | ||
ARG TARGETPLATFORM=linux/amd64 | ||
ARG BUILDPLATFORM=linux/amd64 | ||
FROM --platform=$BUILDPLATFORM docker.io/library/golang:1.22 as builder | ||
|
||
ARG TARGETPLATFORM | ||
ARG TARGETARCH=amd64 | ||
WORKDIR /app | ||
|
||
# Copy source code | ||
COPY go.mod . | ||
COPY go.sum . | ||
COPY Makefile . | ||
COPY .mk/ .mk/ | ||
COPY .bingo/ .bingo/ | ||
COPY vendor/ vendor/ | ||
COPY .git/ .git/ | ||
COPY cmd/ cmd/ | ||
COPY pkg/ pkg/ | ||
|
||
RUN git status --porcelain | ||
RUN GOARCH=$TARGETARCH make build_k8s_cache | ||
|
||
# final stage | ||
FROM --platform=$TARGETPLATFORM registry.access.redhat.com/ubi9/ubi-minimal:9.4 | ||
|
||
COPY --from=builder /app/k8s-cache /app/ | ||
|
||
ENTRYPOINT ["/app/k8s-cache"] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,117 @@ | ||
package kafka | ||
|
||
import ( | ||
"errors" | ||
"os" | ||
"time" | ||
|
||
"github.com/netobserv/flowlogs-pipeline/pkg/api" | ||
"github.com/netobserv/flowlogs-pipeline/pkg/pipeline/utils" | ||
kafkago "github.com/segmentio/kafka-go" | ||
"github.com/sirupsen/logrus" | ||
) | ||
|
||
var klog = logrus.WithField("component", "kafka-reader") | ||
|
||
const defaultBatchReadTimeout = int64(1000) | ||
const defaultKafkaBatchMaxLength = 500 | ||
const defaultKafkaCommitInterval = 500 | ||
|
||
func NewReader(config *api.IngestKafka) (*kafkago.Reader, int, error) { | ||
startOffsetString := config.StartOffset | ||
var startOffset int64 | ||
switch startOffsetString { | ||
case "FirstOffset", "": | ||
startOffset = kafkago.FirstOffset | ||
case "LastOffset": | ||
startOffset = kafkago.LastOffset | ||
default: | ||
startOffset = kafkago.FirstOffset | ||
klog.Errorf("illegal value for StartOffset: %s; using default\n", startOffsetString) | ||
} | ||
klog.Debugf("startOffset = %v", startOffset) | ||
groupBalancers := make([]kafkago.GroupBalancer, 0) | ||
for _, gb := range config.GroupBalancers { | ||
switch gb { | ||
case "range": | ||
groupBalancers = append(groupBalancers, &kafkago.RangeGroupBalancer{}) | ||
case "roundRobin": | ||
groupBalancers = append(groupBalancers, &kafkago.RoundRobinGroupBalancer{}) | ||
case "rackAffinity": | ||
groupBalancers = append(groupBalancers, &kafkago.RackAffinityGroupBalancer{}) | ||
default: | ||
klog.Warningf("groupbalancers parameter missing") | ||
groupBalancers = append(groupBalancers, &kafkago.RoundRobinGroupBalancer{}) | ||
} | ||
} | ||
|
||
batchReadTimeout := defaultBatchReadTimeout | ||
if config.BatchReadTimeout != 0 { | ||
batchReadTimeout = config.BatchReadTimeout | ||
} | ||
klog.Debugf("batchReadTimeout = %d", batchReadTimeout) | ||
|
||
commitInterval := int64(defaultKafkaCommitInterval) | ||
if config.CommitInterval != 0 { | ||
commitInterval = config.CommitInterval | ||
} | ||
klog.Debugf("commitInterval = %d", config.CommitInterval) | ||
|
||
dialer := &kafkago.Dialer{ | ||
Timeout: kafkago.DefaultDialer.Timeout, | ||
DualStack: kafkago.DefaultDialer.DualStack, | ||
} | ||
if config.TLS != nil { | ||
klog.Infof("Using TLS configuration: %v", config.TLS) | ||
tlsConfig, err := config.TLS.Build() | ||
if err != nil { | ||
return nil, 0, err | ||
} | ||
dialer.TLS = tlsConfig | ||
} | ||
|
||
if config.SASL != nil { | ||
m, err := utils.SetupSASLMechanism(config.SASL) | ||
if err != nil { | ||
return nil, 0, err | ||
} | ||
dialer.SASLMechanism = m | ||
} | ||
|
||
readerConfig := kafkago.ReaderConfig{ | ||
Brokers: config.Brokers, | ||
Topic: config.Topic, | ||
GroupID: config.GroupID, | ||
GroupBalancers: groupBalancers, | ||
StartOffset: startOffset, | ||
CommitInterval: time.Duration(commitInterval) * time.Millisecond, | ||
Dialer: dialer, | ||
} | ||
|
||
if readerConfig.GroupID == "" { | ||
// Use hostname | ||
readerConfig.GroupID = os.Getenv("HOSTNAME") | ||
} | ||
|
||
if config.PullQueueCapacity > 0 { | ||
readerConfig.QueueCapacity = config.PullQueueCapacity | ||
} | ||
|
||
if config.PullMaxBytes > 0 { | ||
readerConfig.MaxBytes = config.PullMaxBytes | ||
} | ||
|
||
bml := defaultKafkaBatchMaxLength | ||
if config.BatchMaxLen != 0 { | ||
bml = config.BatchMaxLen | ||
} | ||
|
||
klog.Debugf("reader config: %#v", readerConfig) | ||
|
||
kafkaReader := kafkago.NewReader(readerConfig) | ||
if kafkaReader == nil { | ||
return nil, 0, errors.New("NewIngestKafka: failed to create kafka-go reader") | ||
} | ||
|
||
return kafkaReader, bml, nil | ||
} |
Oops, something went wrong.