Skip to content

Commit 4442a58

Browse files
authored
feat: api cache system with dragonfly (#436)
**Describe the pull request** This Pull Request introduces a new feature to our API - a cache system implemented with Dragonfly. The cache system is primarily used to cache all proxies from the intranet, aiming to prevent bursts on the intra API. By caching frequently requested data, the system reduces the load on the intra API, resulting in improved performance and response time. Dragonfly, a robust caching library, is utilized to handle the caching mechanism effectively, providing features such as cache expiration and invalidation. The cache system is designed to gracefully handle cache expiration and ensure that the most up-to-date data is served to the users. **Checklist** - [ ] I have linked the relative issue to this pull request - [ ] I have made the modifications or added tests related to my PR - [ ] I have added/updated the documentation for my RP - [ ] I put my PR in Ready for Review only when all the checklist is checked **Breaking changes ?** no
1 parent 8d0f22e commit 4442a58

29 files changed

+759
-85
lines changed

.devcontainer/Dockerfile

+5-5
Original file line numberDiff line numberDiff line change
@@ -12,13 +12,13 @@ RUN if [ "${NODE_VERSION}" != "none" ]; then su vscode -c "umask 0002 && . /usr/
1212
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
1313
&& apt-get -y install --no-install-recommends gnupg2 postgresql-client \
1414
iputils-ping dnsutils vim htop nano sudo curl build-essential zsh wget \
15-
fonts-powerline tig ca-certificates software-properties-common && \
15+
fonts-powerline tig ca-certificates software-properties-common redis-tools && \
1616
# Register kubectl source list
17-
curl -fsSLo /usr/share/keyrings/kubernetes-archive-keyring.gpg https://packages.cloud.google.com/apt/doc/apt-key.gpg && \
18-
echo "deb [signed-by=/usr/share/keyrings/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | tee /etc/apt/sources.list.d/kubernetes.list && \
17+
curl -fsSLo /etc/apt/trusted.gpg.d/kubernetes-archive-keyring.gpg https://dl.k8s.io/apt/doc/apt-key.gpg && \
18+
echo "deb [signed-by=/etc/apt/trusted.gpg.d/kubernetes-archive-keyring.gpg] https://apt.kubernetes.io/ kubernetes-xenial main" | tee /etc/apt/sources.list.d/kubernetes.list && \
1919
# Register helm source list
20-
curl https://baltocdn.com/helm/signing.asc | gpg --dearmor | tee /usr/share/keyrings/helm.gpg > /dev/null && \
21-
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/helm.gpg] https://baltocdn.com/helm/stable/debian/ all main" | tee /etc/apt/sources.list.d/helm-stable-debian.list && \
20+
curl https://baltocdn.com/helm/signing.asc | gpg --dearmor | tee /etc/apt/trusted.gpg.d/helm.gpg > /dev/null && \
21+
echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/trusted.gpg.d/helm.gpg] https://baltocdn.com/helm/stable/debian/ all main" | tee /etc/apt/sources.list.d/helm-stable-debian.list && \
2222
# Run install of kubectl, helm and terraform
2323
apt-get update && \
2424
apt-get install kubectl helm && \

.devcontainer/docker-compose.yaml

+13
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ services:
2525
GO_ENV: development
2626
APP_VERSION: indev
2727
DATABASE_URL: postgresql://postgres:postgres@database:5432/s42?sslmode=disable
28+
KEYVALUE_STORE_URL: redis://:@dragonfly:6379
2829
AMQP_URL: amqp://rabbitmq:rabbitmq@rabbitmq:5672
2930
CORS_ORIGIN: http://localhost:3000
3031
SEARCHENGINE_MEILISEARCH_HOST: http://meilisearch:7700
@@ -60,6 +61,17 @@ services:
6061
# Add "forwardPorts": ["5432"] to **devcontainer.json** to forward PostgreSQL locally.
6162
# (Adding the "ports" property to this file will not forward from a Codespace.)
6263

64+
dragonfly:
65+
hostname: s42-dragonfly
66+
image: 'docker.dragonflydb.io/dragonflydb/dragonfly'
67+
restart: unless-stopped
68+
ulimits:
69+
memlock: -1
70+
volumes:
71+
- dragonfly-data:/data
72+
# Use "forwardPorts" in **devcontainer.json** to forward an workspace port locally.
73+
# (Adding the "ports" property to this file will not forward from a Codespace.)
74+
6375
rabbitmq:
6476
hostname: s42-rabbitmq
6577
image: ghcr.io/42atomys/s42-rabbitmq:3.10.2-management
@@ -91,6 +103,7 @@ services:
91103

92104
volumes:
93105
postgres-data:
106+
dragonfly-data:
94107
meilisearch-data:
95108
minio-data:
96109

.devcontainer/postStartCommand.sh

+1-1
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ make -f build/Makefile devcontainer-init
4141
# Create the s42-users bucket
4242
go install github.com/minio/mc@latest
4343
mc alias set s3 http://minio:9000 $AWS_ACCESS_KEY_ID $AWS_SECRET_ACCESS_KEY
44-
mc mb s3/$S3_BUCKET_USERS --ignore-existing --region europe-west1
44+
mc mb s3/s42-users --ignore-existing --region europe-west1
4545

4646
# Install and configure kubeseal
4747
go install github.com/bitnami-labs/sealed-secrets/cmd/[email protected]

.github/workflows/linters.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ jobs:
3030
- name: Setup go
3131
uses: actions/setup-go@v4
3232
with:
33-
go-version: "1.18"
33+
go-version: "1.20"
3434
check-latest: true
3535
- name: Setup protoc
3636
uses: arduino/setup-protoc@v1

.github/workflows/tests.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,7 @@ jobs:
126126
- name: Setup go
127127
uses: actions/setup-go@v4
128128
with:
129-
go-version: "1.18"
129+
go-version: "1.20"
130130
check-latest: true
131131
- name: Setup protoc
132132
uses: arduino/setup-protoc@v1

.vscode/launch.json

+19-4
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,9 @@
88
"mode": "debug",
99
"program": "${workspaceFolder}/main.go",
1010
"args": ["serve", "api", "-g"],
11+
"env": {
12+
"DEBUG": "true"
13+
},
1114
"showLog": true
1215
},
1316
{
@@ -16,7 +19,10 @@
1619
"request": "launch",
1720
"mode": "debug",
1821
"program": "${workspaceFolder}/main.go",
19-
"args": ["serve", "jwtks"]
22+
"args": ["serve", "jwtks"],
23+
"env": {
24+
"DEBUG": "true"
25+
}
2026
},
2127
{
2228
"name": "Launch Interface (with chrome debug)",
@@ -36,23 +42,32 @@
3642
"request": "launch",
3743
"mode": "debug",
3844
"program": "${workspaceFolder}/main.go",
39-
"args": ["jobs", "webhooks"]
45+
"args": ["jobs", "webhooks"],
46+
"env": {
47+
"DEBUG": "true"
48+
}
4049
},
4150
{
4251
"name": "Launch crawler (campus)",
4352
"type": "go",
4453
"request": "launch",
4554
"mode": "debug",
4655
"program": "${workspaceFolder}/main.go",
47-
"args": ["jobs", "crawler", "campus"]
56+
"args": ["jobs", "crawler", "campus"],
57+
"env": {
58+
"DEBUG": "true"
59+
}
4860
},
4961
{
5062
"name": "Launch crawler (locations)",
5163
"type": "go",
5264
"request": "launch",
5365
"mode": "debug",
5466
"program": "${workspaceFolder}/main.go",
55-
"args": ["jobs", "crawler", "locations"]
67+
"args": ["jobs", "crawler", "locations"],
68+
"env": {
69+
"DEBUG": "true"
70+
}
5671
}
5772
]
5873
}

.vscode/tasks.json

+17
Original file line numberDiff line numberDiff line change
@@ -198,6 +198,23 @@
198198
"panel": "dedicated"
199199
}
200200
},
201+
{
202+
"label": "Run dragonfly cli (redis-cli)",
203+
"detail": "Execute redis-cli on container",
204+
"type": "process",
205+
"isBackground": true,
206+
"command": "redis-cli",
207+
"icon": {
208+
"id": "debug-start",
209+
"color": "terminal.ansiRed"
210+
},
211+
"problemMatcher": [],
212+
"args": ["-h", "dragonfly", "-p", "6379"],
213+
"presentation": {
214+
"focus": true,
215+
"panel": "dedicated"
216+
}
217+
},
201218
{
202219
"label": "Populate DB with campus",
203220
"detail": "Execute the campus crawler to populate your development database with campus (need 42 credentials)",

build/Dockerfile

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
# GOLANG BUILD - BUILD
2-
FROM golang:1.18 AS go-build
2+
FROM golang:1.20 AS go-build
33

44
WORKDIR /build
55
COPY . /build

cmd/api.go

+18-17
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,9 @@ package cmd
22

33
import (
44
"bytes"
5+
"context"
56
"fmt"
6-
"io/ioutil"
7+
"io"
78
"net/http"
89
"os"
910
"strings"
@@ -20,11 +21,13 @@ import (
2021
"github.com/rs/cors"
2122
"github.com/rs/zerolog/log"
2223
"github.com/spf13/cobra"
24+
"github.com/vektah/gqlparser/v2/gqlerror"
2325
"go.opentelemetry.io/otel"
2426

2527
"atomys.codes/stud42/internal/api"
2628
modelsutils "atomys.codes/stud42/internal/models"
2729
"atomys.codes/stud42/internal/pkg/searchengine"
30+
"atomys.codes/stud42/pkg/cache"
2831
"atomys.codes/stud42/pkg/otelgql"
2932
)
3033

@@ -39,27 +42,25 @@ var apiCmd = &cobra.Command{
3942
Short: "Serve the API in production",
4043

4144
PreRun: func(cmd *cobra.Command, args []string) {
42-
if err := modelsutils.Connect(); err != nil {
43-
log.Fatal().Err(err).Msg("failed to connect to database")
44-
}
45-
46-
if err := modelsutils.Migrate(); err != nil {
47-
log.Fatal().Err(err).Msg("failed to migrate database")
48-
}
49-
5045
searchengine.Initizialize()
5146
},
5247

5348
Run: func(cmd *cobra.Command, args []string) {
5449
tracer := otel.GetTracerProvider().Tracer("graphql-api")
55-
srv := handler.NewDefaultServer(api.NewSchema(modelsutils.Client(), tracer))
56-
// srv.SetRecoverFunc(func(ctx context.Context, err interface{}) error {
57-
// // notify bug tracker...
58-
// log.Error().Err(err.(error)).Msg("unhandled error")
59-
// return gqlerror.Errorf("Internal server error!")
60-
// })
50+
cacheClient, _ := cmd.Context().Value(keyValueCtxKey{}).(*cache.Client)
51+
gqlCacheClient, err := cacheClient.NewGQLCache(30 * time.Minute)
52+
if err != nil {
53+
log.Fatal().Err(err).Msg("failed to init gql cache")
54+
}
6155

56+
srv := handler.NewDefaultServer(api.NewSchema(modelsutils.Client(), cacheClient, tracer))
57+
srv.SetRecoverFunc(func(ctx context.Context, err interface{}) error {
58+
// notify bug tracker...
59+
log.Error().Err(err.(error)).Msg("unhandled api error")
60+
return gqlerror.Errorf("Internal server error!")
61+
})
6262
srv.Use(entgql.Transactioner{TxOpener: modelsutils.Client()})
63+
srv.Use(extension.AutomaticPersistedQuery{Cache: gqlCacheClient})
6364
srv.Use(extension.FixedComplexityLimit(64))
6465
srv.Use(otelgql.Middleware(tracer))
6566

@@ -96,7 +97,7 @@ var apiCmd = &cobra.Command{
9697
const _50KB = (1 << 10) * 50
9798

9899
limitedBody := http.MaxBytesReader(w, r.Body, _50KB)
99-
bodyBytes, err := ioutil.ReadAll(limitedBody)
100+
bodyBytes, err := io.ReadAll(limitedBody)
100101
limitedBody.Close()
101102

102103
// if r.Body reach the max size limit, the request will be canceled
@@ -106,7 +107,7 @@ var apiCmd = &cobra.Command{
106107
return
107108
}
108109

109-
r.Body = ioutil.NopCloser(bytes.NewBuffer(bodyBytes))
110+
r.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
110111
h.ServeHTTP(w, r)
111112
}
112113
return http.HandlerFunc(fn)

cmd/campus.go

-9
Original file line numberDiff line numberDiff line change
@@ -15,15 +15,6 @@ import (
1515
var campusCmd = &cobra.Command{
1616
Use: "campus",
1717
Short: "Crawl all campus of 42 network and update the database",
18-
PreRun: func(cmd *cobra.Command, args []string) {
19-
if err := modelsutils.Connect(); err != nil {
20-
log.Fatal().Err(err).Msg("failed to connect to database")
21-
}
22-
23-
if err := modelsutils.Migrate(); err != nil {
24-
log.Fatal().Err(err).Msg("failed to migrate database")
25-
}
26-
},
2718
Run: func(cmd *cobra.Command, args []string) {
2819
log.Info().Msg("Start the crawling of all campus of 42 network")
2920
campuses, err := duoapi.CampusAll(cmd.Context())

cmd/locations.go

-11
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@ import (
1515
"atomys.codes/stud42/internal/models/generated/campus"
1616
"atomys.codes/stud42/internal/models/generated/location"
1717
"atomys.codes/stud42/internal/models/generated/user"
18-
"atomys.codes/stud42/internal/pkg/searchengine"
1918
"atomys.codes/stud42/pkg/duoapi"
2019
)
2120

@@ -25,16 +24,6 @@ var locationsCmd = &cobra.Command{
2524
Short: "Crawl all active locations of specific campus and update the database",
2625
Long: `Crawl all active locations of specific campus and update the database.
2726
For any closed locations, the location will be marked as inactive in the database.`,
28-
PreRun: func(cmd *cobra.Command, args []string) {
29-
if err := modelsutils.Connect(); err != nil {
30-
log.Fatal().Err(err).Msg("failed to connect to database")
31-
}
32-
33-
if err := modelsutils.Migrate(); err != nil {
34-
log.Fatal().Err(err).Msg("failed to migrate database")
35-
}
36-
searchengine.Initizialize()
37-
},
3827
Run: func(cmd *cobra.Command, args []string) {
3928
var campusID = cmd.Flag("campus_id").Value.String()
4029
db := modelsutils.Client()

cmd/reindexusers.go

-10
Original file line numberDiff line numberDiff line change
@@ -22,16 +22,6 @@ This operation is useful when the meilisearch index is corrupted or when the
2222
meilisearch index is not up to date. This operation will take a long time to
2323
complete. This operation will drop the meilisearch index and recreate it with
2424
all the users.`,
25-
PreRun: func(cmd *cobra.Command, args []string) {
26-
if err := modelsutils.Connect(); err != nil {
27-
log.Fatal().Err(err).Msg("failed to connect to database")
28-
}
29-
30-
if err := modelsutils.Migrate(); err != nil {
31-
log.Fatal().Err(err).Msg("failed to migrate database")
32-
}
33-
searchengine.Initizialize()
34-
},
3525
Run: func(cmd *cobra.Command, args []string) {
3626
log.Info().Msg("Prepare the re-indexation of the users")
3727

cmd/root.go

+26
Original file line numberDiff line numberDiff line change
@@ -4,17 +4,43 @@ import (
44
"context"
55
"strings"
66

7+
modelsutils "atomys.codes/stud42/internal/models"
8+
"atomys.codes/stud42/pkg/cache"
79
"github.com/rs/zerolog/log"
810
"github.com/spf13/cobra"
911
"github.com/spf13/viper"
1012
)
1113

1214
var cfgFile string
1315

16+
type keyValueCtxKey struct{}
17+
1418
// rootCmd represents the base command when called without any subcommands
1519
var rootCmd = &cobra.Command{
1620
Use: "api",
1721
Short: "stud42 API",
22+
PersistentPreRun: func(cmd *cobra.Command, args []string) {
23+
var cacheClient *cache.Client
24+
var err error
25+
26+
keyValueStoreUrl := viper.GetString("keyvalue-store-url")
27+
if keyValueStoreUrl != "" {
28+
cacheClient, err = cache.NewClient(viper.GetString("keyvalue-store-url"))
29+
if err != nil {
30+
log.Fatal().Err(err).Msg("failed to create cache")
31+
}
32+
33+
cmd.SetContext(context.WithValue(cmd.Context(), keyValueCtxKey{}, cacheClient))
34+
}
35+
36+
if modelsutils.Connect(cacheClient) != nil {
37+
log.Fatal().Msg("Failed to connect to database")
38+
}
39+
40+
if err := modelsutils.Migrate(); err != nil {
41+
log.Fatal().Err(err).Msg("failed to migrate database")
42+
}
43+
},
1844
}
1945

2046
// Execute adds all child commands to the root command and sets flags appropriately.

deploy/stacks/apps/s42/api.tf

+8
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,9 @@ module "api" {
5151
DATABASE_HOST = "postgres.${var.namespace}.svc.cluster.local"
5252
DATABASE_NAME = "s42"
5353
DATABASE_URL = "postgresql://postgres:$(DATABASE_PASSWORD)@$(DATABASE_HOST):5432/$(DATABASE_NAME)?sslmode=disable"
54+
KEYVALUE_STORE_HOST = "dragonfly.${var.namespace}.svc.cluster.local"
55+
KEYVALUE_STORE_PORT = "6379"
56+
KEYVALUE_STORE_URL = "redis://:$(DFLY_PASSWORD)@$(KEYVALUE_STORE_HOST):$(KEYVALUE_STORE_PORT)"
5457
SEARCHENGINE_MEILISEARCH_HOST = "http://meilisearch.${var.namespace}.svc.cluster.local:7700"
5558
}
5659

@@ -60,6 +63,11 @@ module "api" {
6063
name = "postgres-credentials"
6164
}
6265

66+
DFLY_PASSWORD = {
67+
key = "DFLY_PASSWORD"
68+
name = "dragonfly-credentials"
69+
}
70+
6371
GITHUB_TOKEN = {
6472
key = "GITHUB_TOKEN"
6573
name = "github-token"

0 commit comments

Comments
 (0)