-
Notifications
You must be signed in to change notification settings - Fork 22
/
argo_workflow.yaml
279 lines (253 loc) · 12.3 KB
/
argo_workflow.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
generateName: docker-hub-classification
spec:
entrypoint: default
# Create a volume for containers to store their output data.
volumeClaimTemplates:
- metadata:
name: workdir
spec:
accessModes: ["ReadWriteOnce"]
resources:
requests:
storage: 10Gi
# Arguments of the workflows
arguments:
parameters:
# model version
- name: model-version
value: "v1"
# The name of the S3 bucket where the data is stored.
- name: bucket
value: "<bucket-name>"
# Docker registry username
- name: username
value: "dev"
# The path to the input data in the S3 bucket, in csv.gz format
- name: input-data-key
value: <input-data-key-path>
# The path to the docker cert path in the S3 bucket (e.g. Docker UCP bundle)
- name: docker-cert-key
value: <docker-cert-key-path>
# mount path
- name: mount-path
value: /mnt/workspace/data
# loss function
- name: loss
value: binary_crossentropy
# Percentage of the dataset used to test the model (e.g. 0.2 == 20%)
- name: test-size
value: 0.2
# batch size
- name: batch-size
value: 100
# number of epochs
- name: epochs
value: 15
# validation split
- name: validation-split
value: 0.1
# output train data directory path
- name: output-train-csv
value: train_data.csv
# output test data directory path
- name: output-test-csv
value: test_data.csv
# output model
- name: output-model
value: hub_classifier.h5
# output vectorized descriptions
- name: output-vectorized-descriptions
value: vectorized_descriptions.pckl
# output raw data directory path
- name: output-raw-csv
value: hub_stackshare_combined_v2.csv
# selected categories
- name: selected-categories
value: "devops,build-test-deploy,languages & frameworks,data stores,programming languages,application hosting,databases,web servers,application utilities,support-sales-and-marketing,operating systems,monitoring tools,continuous integration,self-hosted blogging / cms,open source service discovery,message queue,frameworks (full stack),in-memory databases,crm,search as a service,log management,monitoring,collaboration,virtual machine platforms & containers,server configuration and automation,big data tools,database tools,machine learning tools,code collaboration & version_control,load balancer / reverse proxy,web cache,java build tools,search engines,container tools,package managers,project management,infrastructure build tools,static site generators,code review,microframeworks (backend),assets and media,version control system,front end package manager,headless browsers,data science notebooks,ecommerce,background processing,cross-platform mobile development,issue tracking,analytics,secrets management,text editor,graph databases,cluster management,exception monitoring,business tools,business intelligence,localhost tools,realtime backend / api,microservices tools,chatops,git tools,hosted package repository,js build tools / js task runners,libraries,platform as a service,general analytics,group chat & notifications,browser testing,serverless / task processing,css pre-processors / extensions,image processing and management,integrated development environment,stream processing,cross-platform desktop development,continuous deployment,machine learning,data science,monitoring metrics,metrics,continuous delivery,build automation"
#The container image to use in the workflow
- name: registry
value: ""
#The container image to use in the workflow
- name: image-name
value: data-team/base:latest
templates:
##################################
# Define the steps of the workflow
##################################
- name: default
steps:
- - name: import-data
template: import-data
- - name: process-data
template: process-data
- - name: training
template: training
- - name: build-push-image
template: build-push-image
- - name: deploy-model
template: deploy-model
#################################################
# Import / Unzip
# Imports the input data & docker certs and unpack them.
#################################################
- name: import-data
container:
image: alpine:latest
command: [sh, -c]
args: [
"gzip -d {{workflow.parameters.mount-path}}/hub_stackshare_combined_v2.csv.gz", #mkdir {{workflow.parameters.mount-path}}/docker-cert-bundle; unzip {{workflow.parameters.mount-path}}/docker-cert-bundle.zip -d {{workflow.parameters.mount-path}}/docker-cert-bundle",
]
volumeMounts:
- name: workdir
mountPath: "{{workflow.parameters.mount-path}}/"
inputs:
artifacts:
- name: data
path: "{{workflow.parameters.mount-path}}/hub_stackshare_combined_v2.csv.gz"
s3:
endpoint: s3.amazonaws.com
bucket: "{{workflow.parameters.bucket}}"
key: "{{workflow.parameters.input-data-key}}"
accessKeySecret:
name: s3-credentials
key: accessKey
secretKeySecret:
name: s3-credentials
key: secretKey
# - name: docker-cert-bundle
# path: "{{workflow.parameters.mount-path}}/docker-cert-bundle.zip"
# s3:
# endpoint: s3.amazonaws.com
# bucket: "{{workflow.parameters.bucket}}"
# key: "{{workflow.parameters.docker-cert-key}}"
# accessKeySecret:
# name: s3-credentials
# key: accessKey
# secretKeySecret:
# name: s3-credentials
# key: secretKey
outputs:
artifacts:
- name: raw-csv
path: "{{workflow.parameters.mount-path}}/{{workflow.parameters.output-raw-csv}}"
#########################################################################
# Process Data
#########################################################################
- name: process-data
container:
image: "{{workflow.parameters.registry}}{{workflow.parameters.image-name}}"
imagePullPolicy: "IfNotPresent"
command: [sh, -c]
args:
[
"python /src/process_data.py --mount_path {{workflow.parameters.mount-path}} --input_csv {{workflow.parameters.output-raw-csv}} --output_train_csv {{workflow.parameters.output-train-csv}} --output_test_csv {{workflow.parameters.output-test-csv}} --test_size {{workflow.parameters.test-size}} --selected_categories '{{workflow.parameters.selected-categories}}'",
]
volumeMounts:
- name: workdir
mountPath: "{{workflow.parameters.mount-path}}/"
outputs:
artifacts:
- name: output-train-csv
path: "{{workflow.parameters.mount-path}}/{{workflow.parameters.output-train-csv}}"
- name: output-test-csv
path: "{{workflow.parameters.mount-path}}/{{workflow.parameters.output-test-csv}}"
- name: selected-categories
path: "{{workflow.parameters.mount-path}}/selected_categories.pckl"
#######################################
# Training and ML model extraction
#######################################
- name: training
container:
image: "{{workflow.parameters.registry}}{{workflow.parameters.image-name}}"
imagePullPolicy: "IfNotPresent"
command: [sh, -c]
args:
[
"python /src/train.py --mount_path {{workflow.parameters.mount-path}} --input_train_csv {{workflow.parameters.output-train-csv}} --input_test_csv {{workflow.parameters.output-test-csv}} --output_model {{workflow.parameters.output-model}} --output_vectorized_descriptions {{workflow.parameters.output-vectorized-descriptions}};cp /src/models/* {{workflow.parameters.mount-path}}/",
]
volumeMounts:
- name: workdir
mountPath: "{{workflow.parameters.mount-path}}/"
outputs:
artifacts:
- name: output-model
path: "{{workflow.parameters.mount-path}}/{{workflow.parameters.output-model}}"
- name: output-vectorized-descriptions
path: "{{workflow.parameters.mount-path}}/{{workflow.parameters.output-vectorized-descriptions}}"
#######################################
# Build and push a docker image using the Seldon-Core Docker wrapper
#######################################
- name: build-push-image
container:
image: docker:17.10
command: [sh, -c]
args:
[
"cd {{workflow.parameters.mount-path}};sleep 15;rm *.csv;docker run -v {{workflow.parameters.mount-path}}:/model seldonio/core-python-wrapper:0.7 /model DockerHubClassification {{workflow.parameters.model-version}} {{workflow.parameters.registry}}{{workflow.parameters.username}} --base-image=python:3.6 --image-name=dockerhubclassifier;cd build/;./build_image.sh;echo $DOCKER_PASSWORD | docker login -u $DOCKER_USERNAME --password-stdin;./push_image.sh;",
]
volumeMounts:
- name: workdir
mountPath: "{{workflow.parameters.mount-path}}/"
env:
- name: DOCKER_HOST #the docker daemon can be access on the standard port on localhost
value: 127.0.0.1
- name: DOCKER_USERNAME # name of env var
valueFrom:
secretKeyRef:
name: docker-credentials # name of an existing k8s secret
key: username # 'key' subcomponent of the secret
- name: DOCKER_PASSWORD # name of env var
valueFrom:
secretKeyRef:
name: docker-credentials # name of an existing k8s secret
key: password # 'key' subcomponent of the secret
sidecars:
- name: dind
image: docker:17.10-dind #Docker already provides an image for running a Docker daemon
securityContext:
privileged: true #the Docker daemon can only run in a privileged container
# mirrorVolumeMounts will mount the same volumes specified in the main container
# to the sidecar (including artifacts), at the same mountPaths. This enables
# dind daemon to (partially) see the same filesystem as the main container in
# order to use features such as docker volume binding.
mirrorVolumeMounts: true
#######################################
# Deploy model
#######################################
- name: deploy-model
resource: #indicates that this is a resource template
action: apply #can be any kubectl action (e.g. create, delete, apply, patch)
#successCondition: ?
manifest: |
apiVersion: "machinelearning.seldon.io/v1alpha2"
kind: "SeldonDeployment"
metadata:
labels:
app: "seldon"
name: "docker-hub-classification-model-serving-{{workflow.parameters.model-version}}"
namespace: kubeflow
spec:
annotations:
deployment_version: "{{workflow.parameters.model-version}}"
project_name: "Docker Hub ML Project"
name: "docker-hub-classifier"
predictors:
- annotations:
predictor_version: "{{workflow.parameters.model-version}}"
componentSpecs:
- spec:
containers:
- image: "{{workflow.parameters.registry}}{{workflow.parameters.username}}/dockerhubclassifier:{{workflow.parameters.model-version}}"
imagePullPolicy: "Always"
name: "docker-hub-classification-model-serving-{{workflow.parameters.model-version}}"
graph:
children: []
endpoint:
type: "REST"
name: "docker-hub-classification-model-serving-{{workflow.parameters.model-version}}"
type: "MODEL"
name: "docker-hub-classification-model-serving-{{workflow.parameters.model-version}}"
replicas: 3