Skip to content

Commit

Permalink
Merge pull request #21 from chrroberts-pure/update-dep
Browse files Browse the repository at this point in the history
v1.0.2 - upgrade dependencies for CVE & docker improvements
  • Loading branch information
genegr authored Mar 28, 2023
2 parents b9f2768 + 1694d16 commit 50905c4
Show file tree
Hide file tree
Showing 6 changed files with 106 additions and 536 deletions.
23 changes: 14 additions & 9 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ GOCMD=go
GOTEST=$(GOCMD) test
GOVET=$(GOCMD) vet
BINARY_NAME=pure-fb-om-exporter
VERSION?=1.0.1
VERSION?=1.0.2
SERVICE_PORT?=9491
DOCKER_REGISTRY?= quay.io/purestorage/
EXPORT_RESULT?=false # for CI please set EXPORT_RESULT to true
Expand All @@ -18,9 +18,13 @@ RESET := $(shell tput -Txterm sgr0)
all: help

## Build:
build: ## Build your project and put the output binary in out/bin/
init:
$(GOCMD) mod init $(MODULE_NAME)
$(GOCMD) mod tidy

build: ## Build project and put the output binary in out/bin/
mkdir -p out/bin
GO111MODULE=on $(GOCMD) build -o out/bin/$(BINARY_NAME) cmd/fb-om-exporter/main.go
CGO_ENABLED=0 GO111MODULE=on $(GOCMD) build -a -tags 'netgo osusergo static_build' -ldflags='-X main.version=v$(VERSION)' -o out/bin/$(BINARY_NAME) cmd/fb-om-exporter/main.go

clean: ## Remove build related file
rm -fr ./bin
Expand Down Expand Up @@ -53,15 +57,15 @@ lint: lint-go lint-dockerfile lint-yaml ## Run all available linters

lint-dockerfile: ## Lint your Dockerfile
# If dockerfile is present we lint it.
ifeq ($(shell test -e ./Dockerfile && echo -n yes),yes)
ifeq ($(shell test -e ./build/docker/Dockerfile && echo -n yes),yes)
$(eval CONFIG_OPTION = $(shell [ -e $(shell pwd)/.hadolint.yaml ] && echo "-v $(shell pwd)/.hadolint.yaml:/root/.config/hadolint.yaml" || echo "" ))
$(eval OUTPUT_OPTIONS = $(shell [ "${EXPORT_RESULT}" == "true" ] && echo "--format checkstyle" || echo "" ))
$(eval OUTPUT_FILE = $(shell [ "${EXPORT_RESULT}" == "true" ] && echo "| tee /dev/tty > checkstyle-report.xml" || echo "" ))
docker run --rm -i $(CONFIG_OPTION) hadolint/hadolint hadolint $(OUTPUT_OPTIONS) - < ./Dockerfile $(OUTPUT_FILE)
$(eval OUTPUT_OPTIONS = $(shell [ "${EXPORT_RESULT}" = "true" ] && echo "--format checkstyle" || echo "" ))
$(eval OUTPUT_FILE = $(shell [ "${EXPORT_RESULT}" = "true" ] && echo "| tee /dev/tty > checkstyle-report.xml" || echo "" ))
docker run --rm -i $(CONFIG_OPTION) hadolint/hadolint hadolint $(OUTPUT_OPTIONS) - < ./build/docker/Dockerfile $(OUTPUT_FILE)
endif

lint-go: ## Use golintci-lint on your project
$(eval OUTPUT_OPTIONS = $(shell [ "${EXPORT_RESULT}" == "true" ] && echo "--out-format checkstyle ./... | tee /dev/tty > checkstyle-report.xml" || echo "" ))
$(eval OUTPUT_OPTIONS = $(shell [ "${EXPORT_RESULT}" = "true" ] && echo "--out-format checkstyle ./... | tee /dev/tty > checkstyle-report.xml" || echo "" ))
docker run --rm -v $(shell pwd):/app -w /app golangci/golangci-lint:latest-alpine golangci-lint run --deadline=65s $(OUTPUT_OPTIONS)

lint-yaml: ## Use yamllint on the yaml file of your projects
Expand All @@ -73,7 +77,7 @@ endif

## Docker:
docker-build: ## Use the dockerfile to build the container
docker build --rm --tag $(BINARY_NAME) --file build/docker/Dockerfile .
docker build --rm --tag $(BINARY_NAME) --build-arg VERSION=$(VERSION) --file build/docker/Dockerfile .

docker-release: ## Release the container with tag latest and version
docker tag $(BINARY_NAME) $(DOCKER_REGISTRY)$(BINARY_NAME):latest
Expand All @@ -93,3 +97,4 @@ help: ## Show this help.
if (/^[a-zA-Z_-]+:.*?##.*$$/) {printf " ${YELLOW}%-20s${GREEN}%s${RESET}\n", $$1, $$2} \
else if (/^## .*$$/) {printf " ${CYAN}%s${RESET}\n", substr($$1,4)} \
}' $(MAKEFILE_LIST)

98 changes: 49 additions & 49 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -76,12 +76,12 @@ Authentication is used by the exporter as the mechanism to cross authenticate to
The exporter understands the following requests:


URL | GET parameters | description
---|---|---
http://\<exporter-host\>:\<port\>/metrics | endpoint | Full array metrics
http://\<exporter-host\>:\<port\>/metrics/array | endpoint | Array metrics
http://\<exporter-host\>:\<port\>/metrics/clients | endpoint | Clients metrics
http://\<exporter-host\>:\<port\>/metrics/usage | endpoint | Quotas usage metrics
| URL | GET parameters | description |
| ------------------------------------------------- | -------------- | -------------------- |
| http://\<exporter-host\>:\<port\>/metrics | endpoint | Full array metrics |
| http://\<exporter-host\>:\<port\>/metrics/array | endpoint | Array metrics |
| http://\<exporter-host\>:\<port\>/metrics/clients | endpoint | Clients metrics |
| http://\<exporter-host\>:\<port\>/metrics/usage | endpoint | Quotas usage metrics |


Depending on the target array, scraping for the whole set of metrics could result into timeout issues, in which case it is suggested either to increase the scraping timeout or to scrape each single endpoint instead.
Expand Down Expand Up @@ -112,49 +112,49 @@ A simple but complete example to deploy a full monitoring stack on kubernetes ca

### Metrics Collected

|Metric Name |Description |
|------------------------------------------------------|---------------------------------------------------------|
|purefb_alerts_open |FlashBlade open alert events |
|purefb_info |FlashBlade system information |
|purefb_array_http_specific_performance_latency_usec |FlashBlade array HTTP specific latency |
|purefb_array_http_specific_performance_throughput_iops|FlashBlade array HTTP specific throughput |
|purefb_array_nfs_specific_performance_latency_usec |FlashBlade array NFS specific latency |
|purefb_array_nfs_specific_performance_throughput_iops |FlashBlade array NFS specific throughput |
|purefb_array_performance_latency_usec |FlashBlade array latency |
|purefb_array_performance_throughput_iops |FlashBlade array throughput |
|purefb_array_performance_bandwidth_bytes |FlashBlade array throughput |
|purefb_array_performance_average_bytes |FlashBlade array average operations size |
|purefb_array_performance_replication |FlashBlade array replication throughput |
|purefb_array_s3_performance_latency_usec |FlashBlade array latency |
|purefb_array_s3_performance_throughput_iops |FlashBlade array throughput |
|purefb_array_space_data_reduction_ratio |FlashBlade space data reduction |
|purefb_array_space_bytes |FlashBlade space in bytes |
|purefb_array_space_parity |FlashBlade space parity |
|purefb_array_space_utilization |FlashBlade array space utilization in percent |
|purefb_buckets_performance_latency_usec |FlashBlade buckets latency |
|purefb_buckets_performance_throughput_iops |FlashBlade buckets throughput |
|purefb_buckets_performance_bandwidth_bytes |FlashBlade buckets bandwidth |
|purefb_buckets_performance_average_bytes |FlashBlade buckets average operations size |
|purefb_buckets_s3_specific_performance_latency_usec |FlashBlade buckets S3 specific latency |
|purefb_buckets_s3_specific_performance_throughput_iops|FlashBlade buckets S3 specific throughput |
|purefb_buckets_space_data_reduction_ratio |FlashBlade buckets space data reduction |
|purefb_buckets_space_bytes |FlashBlade buckets space in bytes |
|purefb_clients_performance_latency_usec |FlashBlade clients latency |
|purefb_clients_performance_throughput_iops |FlashBlade clients throughput |
|purefb_clients_performance_bandwidth_bytes |FlashBlade clients bandwidth |
|purefb_clients_performance_average_bytes |FlashBlade clients average operations size |
|purefb_file_systems_performance_latency_usec |FlashBlade file systems latency |
|purefb_file_systems_performance_throughput_iops |FlashBlade file systems throughput |
|purefb_file_systems_performance_bandwidth_bytes |FlashBlade file systems bandwidth |
|purefb_file_systems_performance_average_bytes |FlashBlade file systems average operations size |
|purefb_file_systems_space_data_reduction_ratio |FlashBlade file systems space data reduction |
|purefb_file_systems_space_bytes |FlashBlade file systems space in bytes |
|purefb_hardware_health |FlashBlade hardware component health status |
|purefb_hardware_connectors_performance_throughput_pkts|FlashBlade hardware connectors performance throughput |
|purefb_hardware_connectors_performance_bandwidth_bytes|FlashBlade hardware connectors performance bandwidth |
|purefb_shardware_connectors_performance_errors |FlashBlade hardware connectors performance errors per sec|
|purefb_file_system_usage_users_bytes |FlashBlade file system users usage |
|purefb_file_system_usage_groups_bytes |FlashBlade file system groups usage |
| Metric Name | Description |
| ------------------------------------------------------ | --------------------------------------------------------- |
| purefb_alerts_open | FlashBlade open alert events |
| purefb_info | FlashBlade system information |
| purefb_array_http_specific_performance_latency_usec | FlashBlade array HTTP specific latency |
| purefb_array_http_specific_performance_throughput_iops | FlashBlade array HTTP specific throughput |
| purefb_array_nfs_specific_performance_latency_usec | FlashBlade array NFS specific latency |
| purefb_array_nfs_specific_performance_throughput_iops | FlashBlade array NFS specific throughput |
| purefb_array_performance_latency_usec | FlashBlade array latency |
| purefb_array_performance_throughput_iops | FlashBlade array throughput |
| purefb_array_performance_bandwidth_bytes | FlashBlade array throughput |
| purefb_array_performance_average_bytes | FlashBlade array average operations size |
| purefb_array_performance_replication | FlashBlade array replication throughput |
| purefb_array_s3_performance_latency_usec | FlashBlade array latency |
| purefb_array_s3_performance_throughput_iops | FlashBlade array throughput |
| purefb_array_space_data_reduction_ratio | FlashBlade space data reduction |
| purefb_array_space_bytes | FlashBlade space in bytes |
| purefb_array_space_parity | FlashBlade space parity |
| purefb_array_space_utilization | FlashBlade array space utilization in percent |
| purefb_buckets_performance_latency_usec | FlashBlade buckets latency |
| purefb_buckets_performance_throughput_iops | FlashBlade buckets throughput |
| purefb_buckets_performance_bandwidth_bytes | FlashBlade buckets bandwidth |
| purefb_buckets_performance_average_bytes | FlashBlade buckets average operations size |
| purefb_buckets_s3_specific_performance_latency_usec | FlashBlade buckets S3 specific latency |
| purefb_buckets_s3_specific_performance_throughput_iops | FlashBlade buckets S3 specific throughput |
| purefb_buckets_space_data_reduction_ratio | FlashBlade buckets space data reduction |
| purefb_buckets_space_bytes | FlashBlade buckets space in bytes |
| purefb_clients_performance_latency_usec | FlashBlade clients latency |
| purefb_clients_performance_throughput_iops | FlashBlade clients throughput |
| purefb_clients_performance_bandwidth_bytes | FlashBlade clients bandwidth |
| purefb_clients_performance_average_bytes | FlashBlade clients average operations size |
| purefb_file_systems_performance_latency_usec | FlashBlade file systems latency |
| purefb_file_systems_performance_throughput_iops | FlashBlade file systems throughput |
| purefb_file_systems_performance_bandwidth_bytes | FlashBlade file systems bandwidth |
| purefb_file_systems_performance_average_bytes | FlashBlade file systems average operations size |
| purefb_file_systems_space_data_reduction_ratio | FlashBlade file systems space data reduction |
| purefb_file_systems_space_bytes | FlashBlade file systems space in bytes |
| purefb_hardware_health | FlashBlade hardware component health status |
| purefb_hardware_connectors_performance_throughput_pkts | FlashBlade hardware connectors performance throughput |
| purefb_hardware_connectors_performance_bandwidth_bytes | FlashBlade hardware connectors performance bandwidth |
| purefb_shardware_connectors_performance_errors | FlashBlade hardware connectors performance errors per sec |
| purefb_file_system_usage_users_bytes | FlashBlade file system users usage |
| purefb_file_system_usage_groups_bytes | FlashBlade file system groups usage |

## Monitoring On-Premise with Prometheus and Grafana
Take a holistic overview of your Pure Storage FlashBlade estate on-premise with Prometheus and Grafana to summarize statistics such as:
Expand Down
11 changes: 8 additions & 3 deletions build/docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
FROM golang:alpine
FROM golang:alpine as build
ARG VERSION=development

WORKDIR /usr/src/app

Expand All @@ -7,8 +8,12 @@ COPY go.mod go.sum ./
RUN go mod download && go mod verify

COPY . .
RUN go build -v -o /usr/local/bin/pure-fb-om-exporter cmd/fb-om-exporter/main.go
RUN CGO_ENABLED=1 go build -a -tags 'netgo osusergo static_build' -ldflags="-X main.version=v$VERSION" -v -o /usr/local/bin/pure-fb-om-exporter cmd/fb-om-exporter/main.go


# alpine is used here as it seems to be the minimal image that passes quay.io vulnerability scan
FROM alpine
COPY --from=build /usr/local/bin/pure-fb-om-exporter /pure-fb-om-exporter
EXPOSE 9491
ENTRYPOINT ["/usr/local/bin/pure-fb-om-exporter"]
ENTRYPOINT ["/pure-fb-om-exporter"]
CMD ["--host", "0.0.0.0", "--port", "9491"]
4 changes: 2 additions & 2 deletions cmd/fb-om-exporter/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import (
"github.com/prometheus/client_golang/prometheus/promhttp"
)

var version string = "1.0.1"
var version string = "development"
var debug bool = false

func main() {
Expand All @@ -25,7 +25,7 @@ func main() {
flag.Parse()
addr := fmt.Sprintf("%s:%d", *host, *port)
debug = *d
log.Printf("Start Pure Storage FlashBlade exporter v%s on %s", version, addr)
log.Printf("Start Pure Storage FlashBlade exporter %s on %s", version, addr)

http.HandleFunc("/", index)
http.HandleFunc("/metrics/array", func(w http.ResponseWriter, r *http.Request) {
Expand Down
22 changes: 11 additions & 11 deletions go.mod
Original file line number Diff line number Diff line change
@@ -1,21 +1,21 @@
module purestorage/fb-openmetrics-exporter

go 1.19
go 1.20

require (
github.com/go-resty/resty/v2 v2.7.0
github.com/prometheus/client_golang v1.13.0
github.com/prometheus/client_golang v1.14.0
)

require (
github.com/beorn7/perks v1.0.1 // indirect
github.com/cespare/xxhash/v2 v2.1.2 // indirect
github.com/golang/protobuf v1.5.2 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect
github.com/prometheus/client_model v0.2.0 // indirect
github.com/prometheus/common v0.37.0 // indirect
github.com/prometheus/procfs v0.8.0 // indirect
golang.org/x/net v0.0.0-20220225172249-27dd8689420f // indirect
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a // indirect
google.golang.org/protobuf v1.28.1 // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/golang/protobuf v1.5.3 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
github.com/prometheus/client_model v0.3.0 // indirect
github.com/prometheus/common v0.42.0 // indirect
github.com/prometheus/procfs v0.9.0 // indirect
golang.org/x/net v0.8.0 // indirect
golang.org/x/sys v0.6.0 // indirect
google.golang.org/protobuf v1.30.0 // indirect
)
Loading

0 comments on commit 50905c4

Please sign in to comment.