forked from OpenJobDescription/openjd-specifications
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathalgorithmic-art.yaml
194 lines (185 loc) · 7.36 KB
/
algorithmic-art.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# ----
# Demonstrates
# ----
# This demonstrates a Job that will render a series of animation frames
# and encode videos from the created frames. Notably, this shows off:
# - Step Environment to create and use an ephemeral Python virtual environment
# in the Session working directory.
# - Using associative combination of Task Parameters to create a wedge-style
# render step where we create multiple animations that differ only in "simulation"
# metaparameters.
# - Step dependencies where the Step to encode videos must wait for the rendering
# Step to complete before it can run.
# - The use of embedded files
# - Job parameters; including path-typed parameters.
# - Path mapping a workstation path to a render-host path.
#
# Run with:
# mkdir outputs
# openjd run algorithmic-art.yaml \
# --job-param RenderScript=$(pwd)/scripts/algorithmic-art.py \
# --job-param OutputDirectory=$(pwd)/outputs \
# --job-param NumAnimationFrames=50 \
# --step RenderImages
# openjd run algorithmic-art.yaml \
# --job-param RenderScript=$(pwd)/scripts/algorithmic-art.py \
# --job-param OutputDirectory=$(pwd)/outputs \
# --job-param NumAnimationFrames=50 \
# --step EncodeVideos
#
# To see path mapping in action (see: https://github.com/OpenJobDescription/openjd-specifications/wiki/How-Jobs-Are-Run#path-mapping):
# mkdir local_outputs remote_outputs
# PATH_MAPPING_RULES="{\"version\":\"pathmapping-1.0\", \"path_mapping_rules\": [{\"source_path_format\": \"POSIX\", \"source_path\": \"$(pwd)/local_outputs\", \"destination_path\": \"$(pwd)/remote_outputs\"}]}"
# openjd run algorithmic-art.yaml \
# --job-param RenderScript=$(pwd)/scripts/algorithmic-art.py \
# --job-param OutputDirectory=$(pwd)/local_outputs \
# --job-param NumAnimationFrames=50 \
# --step RenderImages \
# --path-mapping-rules "${PATH_MAPPING_RULES}"
# openjd run algorithmic-art.yaml \
# --job-param RenderScript=$(pwd)/scripts/algorithmic-art.py \
# --job-param OutputDirectory=$(pwd)/_local_outputs \
# --job-param NumAnimationFrames=50 \
# --step EncodeVideos \
# --path-mapping-rules "${PATH_MAPPING_RULES}"
# Notice how the value of "OutputDirectory" is $(pwd)/local_outputs, but the files are written to
# $(pwd)/remote_outputs
#
# ----
# Requirements (tested with ffmpeg 4.4.2)
# ----
# - bash shell
# - python 3.9+
# - ffmpeg
#
# -----
# Contributors to this template:
# Daniel Neilson (https://github.com/ddneilson)
specificationVersion: jobtemplate-2023-09
# The name can be used by the render manager as the name of the created job, so we can
# use job parameters to differentiate submission names.
name: AlgorithmicArtSample-{{Param.NumAnimationFrames}}
parameterDefinitions:
- name: RenderScript
type: PATH
description: >
The fully-qualified location of the 'algorithmic-art.py' script file in the filesystem.
- name: OutputDirectory
type: PATH
description: >
The fully-qualified location on the disk to put the output files generated by this job.
- name: NumAnimationFrames
type: INT
description: >
The number of animation frames to generate for each of the videos.
steps:
- name: RenderImages
description: >
This generates animation frames for some algorithmic art. Multiple sequences of animation frames
are generated -- one for each of a set of input metaparameters to the image generation algorithm.
parameterSpace:
taskParameterDefinitions:
# The wedge parameters: StarFactor & SwirlFactor
# Both StarFactor & SwirlFactor must have the exact same number of values
# since they will be associatively combined.
# Note: If you change the values for StarFactor and/or SwirlFactor here, then you
# must also change them in the EncodeVideos step's parameter space definition.
- name: StarFactor
type: INT
range: "3-5"
- name: SwirlFactor
type: FLOAT
range: [ 5.5, 10.0, 15.0 ]
- name: Frame
type: INT
range: "0-{{Param.NumAnimationFrames}}"
# Generate all of the frames for each combination of StarFactor & SwirlFactor
# This yields the parameter space:
# {StarFactor=3,SwirlFactor=5.5,Frame=0..MaxFrame},
# {StarFactor=4,SwirlFactor=3.0,Frame=0..MaxFrame},
# {StarFactor=5,SwirlFactor=3.5,Frame=0..MaxFrame}
combination: "(StarFactor,SwirlFactor) * Frame"
script:
actions:
onRun:
command: python
args:
- "{{Param.RenderScript}}"
- "{{Param.OutputDirectory}}"
- "{{Task.Param.Frame}} of {{Param.NumAnimationFrames}}"
- "{{Task.Param.StarFactor}}"
- "{{Task.Param.SwirlFactor}}"
# Step Environment' at the start of the Sesssion
stepEnvironments:
- name: PythonVenv
description: >
Creates a Python virtual environment in the Session Working directory, installs the
required Python libraries, and ensures that actions that are run while this Environment
is active will automatically be using the virtual environment.
script:
actions:
onEnter:
command: bash
args: [ "{{Env.File.Enter }}" ]
onExit:
command: bash
args: [ "{{Env.File.Exit}}" ]
embeddedFiles:
- name: Enter
type: TEXT
runnable: True
data: |
python3 -m venv .venv
source .venv/bin/activate
pip install --upgrade pip
pip install numpy pillow
# Export the environment variables required to have an activated virtual env.
# Open Job Description intercepts the stdout of all running processes and looks for
# lines starting in "openjd_<something>" as an instruction to the runtime.
# These environment variables will no longer be applied once this environment is exited.
echo "openjd_env: PATH=${PATH}"
echo "openjd_env: VIRTUAL_ENV=${VIRTUAL_ENV}"
echo "openjd_env: VIRTUAL_ENV_PROMPT=${VIRTUAL_ENV_PROMPT}"
- name: Exit
type: TEXT
runnable: True
data: |
# Delete the virtual environment.
# Not strictly required since the Session Working Directory will be deleted
# at the end of the Session, but included for demonstration.
rm -rf .venv
- name: EncodeVideos
dependencies:
- dependsOn: RenderImages
parameterSpace:
taskParameterDefinitions:
- name: StarFactor
type: INT
range: "3-5"
- name: SwirlFactor
type: FLOAT
range: [ 5.5, 10.0, 15.0 ]
# This yields the parameter space:
# {StarFactor=3,SwirlFactor=5.5},
# {StarFactor=4,SwirlFactor=3.0},
# {StarFactor=5,SwirlFactor=3.5}
combination: "(StarFactor,SwirlFactor)"
script:
actions:
onRun:
command: bash
args: [ "{{Task.File.Encode}}"]
embeddedFiles:
- name: Encode
type: TEXT
runnable: True
data: |
# Reference: https://academysoftwarefoundation.github.io/EncodingGuidelines/Quickstart.html
FILE_PREFIX="{{Param.OutputDirectory}}/algart-{{Task.Param.StarFactor}}-{{Task.Param.SwirlFactor}}"
ffmpeg -y -r 10 -start_number 1 -i ${FILE_PREFIX}-%04d.png -pix_fmt yuv420p \
-vf "scale=in_color_matrix=bt709:out_color_matrix=bt709" \
-frames:v 100 -c:v libx264 -preset slower \
-color_range tv -colorspace bt709 -color_primaries bt709 -color_trc iec61966-2-1 \
-movflags faststart ${FILE_PREFIX}.mp4