Skip to content

Commit

Permalink
Merge pull request #115 from sandagomipieris/master
Browse files Browse the repository at this point in the history
Adding scrapy app server helm artifacts
  • Loading branch information
sajithaliyanage authored Jul 11, 2021
2 parents a6c0785 + 178038f commit 7b269fa
Show file tree
Hide file tree
Showing 4 changed files with 139 additions and 1 deletion.
8 changes: 8 additions & 0 deletions crawlerx_helm/templates/scrapy_app/configmap.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
---
apiVersion: "v1"
kind: "ConfigMap"
metadata:
name: {{ include "crawlerx_helm.fullname" . }}-scrapy-configs
data:
MONGODB_URL: {{ include "crawlerx_helm.fullname" . }}-mongodb-service
RABBITMQ_URL: {{ include "crawlerx_helm.fullname" . }}-rabbitmq-service
75 changes: 75 additions & 0 deletions crawlerx_helm/templates/scrapy_app/deployment.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "crawlerx_helm.fullname" . }}-scrapy-deployment
namespace: {{ .Release.Namespace }}
labels:
chart: '{{ .Chart.Name }}-{{ .Chart.Version | replace "+" "_" }}'
spec:
replicas: {{ .Values.crawlerx.deployment.scrapyApp.replicas }}
strategy:
type: RollingUpdate
rollingUpdate:
maxUnavailable: {{ .Values.crawlerx.deployment.scrapyApp.strategy.rollingUpdate.maxSurge }}
maxSurge: {{ .Values.crawlerx.deployment.scrapyApp.strategy.rollingUpdate.maxSurge }}
selector:
matchLabels:
app: {{ include "crawlerx_helm.fullname" . }}-scrapy-selector
version: "current"
template:
metadata:
labels:
app: {{ include "crawlerx_helm.fullname" . }}-scrapy-selector
version: "current"
spec:
containers:
- name: "crawlerx_web_appication"
{{- if .Values.crawlerx.deployment.scrapyApp.dockerRegistry }}
image: "{{ .Values.crawlerx.deployment.scrapyApp.dockerRegistry }}/{{ .Values.crawlerx.deployment.scrapyApp.imageName }}:{{ .Values.crawlerx.deployment.scrapyApp.imageTag }}"
{{- else }}
image: "{{ .Values.crawlerx.deployment.scrapyApp.imageName }}:{{ .Values.crawlerx.deployment.scrapyApp.imageTag }}"
{{ end }}
imagePullPolicy: Always
ports:
- containerPort: 6800
resources:
requests:
memory: {{ .Values.crawlerx.deployment.scrapyApp.resources.requests.memory }}
cpu: {{ .Values.crawlerx.deployment.scrapyApp.resources.requests.cpu }}
limits:
memory: {{ .Values.crawlerx.deployment.scrapyApp.resources.limits.memory }}
cpu: {{ .Values.crawlerx.deployment.scrapyApp.resources.limits.cpu }}
{{- if .Values.crawlerx.deployment.scrapyApp.envs }}
env:
{{- range $key, $val := .Values.crawlerx.deployment.scrapyApp.envs }}
- name: {{ $key }}
value: {{ $val | quote }}
{{- end }}
{{- end }}
envFrom:
- configMapRef:
name: {{ include "crawlerx_helm.fullname" . }}-backend-configs
{{- if .Values.image.livenessProbe }}
livenessProbe:
exec:
command:
- /bin/sh
- -c
- nc -z localhost 6800
initialDelaySeconds: {{ .Values.crawlerx.deployment.scrapyApp.livenessProbe.initialDelaySeconds }}
periodSeconds: {{ .Values.crawlerx.deployment.scrapyApp.livenessProbe.periodSeconds }}
readinessProbe:
exec:
command:
- /bin/sh
- -c
- nc -z localhost 6800
initialDelaySeconds: {{ .Values.crawlerx.deployment.scrapyApp.readinessProbe.initialDelaySeconds }}
periodSeconds: {{ .Values.crawlerx.deployment.scrapyApp.readinessProbe.periodSeconds }}
{{- if .Values.crawlerx.deployment.scrapyApp.imagePullSecrets }}
imagePullSecrets:
- name: {{ .Values.crawlerx.deployment.scrapyApp.imagePullSecrets }}
{{- else if and (not (eq .Values.crawlerx.subscription.username "")) (not (eq .Values.crawlerx.subscription.password "")) }}
imagePullSecrets:
- name: crawlerx-deployment-secret
{{ end }}
14 changes: 14 additions & 0 deletions crawlerx_helm/templates/scrapy_app/service.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
apiVersion: v1
kind: Service
metadata:
name: {{ include "crawlerx_helm.fullname" . }}-scrapy-service
namespace : {{ .Release.Namespace }}
labels:
chart: "{{ .Chart.Name }}-{{ .Chart.Version | replace "+" "_" }}"
spec:
type: ClusterIP
ports:
- name: http
port: 6800
selector:
app: "{{ include "crawlerx_helm.fullname" . }}-scrapy-selector"
43 changes: 42 additions & 1 deletion crawlerx_helm/values.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ crawlerx:
envs:
# ENV_NAME: ENV_VALUE

webApp:
backend:
# dockerRegistry: ""
imageName: "scorelabs/crawlerx-backend"
imageTag: "1.0.0"
Expand Down Expand Up @@ -90,6 +90,47 @@ crawlerx:
envs:
# ENV_NAME: ENV_VALUE

scrapyApp:
# dockerRegistry: ""
imageName: "scorelabs/crawlerx-scrapy-app"
imageTag: "1.0.0"
# imagePullSecrets: secret
# Number of deployment replicas
replicas: 1
strategy:
rollingUpdate:
# The maximum number of pods that can be scheduled above the desired number of pods.
maxSurge: 1
# The maximum number of pods that can be unavailable during the update.
maxUnavailable: 0
# Indicates whether the container is running.
livenessProbe:
# Number of seconds after the container has started before liveness probes are initiated.
initialDelaySeconds: 35
# How often (in seconds) to perform the probe.
periodSeconds: 10
# Indicates whether the container is ready to service requests.
readinessProbe:
# Number of seconds after the container has started before readiness probes are initiated.
initialDelaySeconds: 35
# How often (in seconds) to perform the probe.
periodSeconds: 10
# These are the minimum resource recommendations
resources:
requests:
# The minimum amount of memory that should be allocated for a Pod
memory: "512Mi"
# The minimum amount of CPU that should be allocated for a Pod
cpu: "400m"
limits:
# The maximum amount of memory that should be allocated for a Pod
memory: "1Gi"
# The maximum amount of CPU that should be allocated for a Pod
cpu: "1000m"
# Environment variables for the deployment.
envs:
# ENV_NAME: ENV_VALUE

# Ingress level values for the CrawlerX deployments
ingress:
webApp:
Expand Down

0 comments on commit 7b269fa

Please sign in to comment.