-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy pathinstall.sh
213 lines (170 loc) · 8.85 KB
/
install.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
#!/bin/bash
echo "============================="
echo "OS Environment Check"
echo "============================="
echo "-> Check root in Docker container, non-root users are 1000 and above"
id -u
whoami
echo "-> Check OS is Ubuntu"
cat /etc/os-release | grep "PRETTY_NAME"
echo "-> Check dpkg is installed"
dpkg --version | grep "dpkg"
echo "-> Check curl is installed"
curl -V | grep "curl"
echo "-> Check wget is installed"
wget --version | grep "GNU Wget"
#echo "============================="
#echo "Pre-requisite OS Packages"
#echo "============================="
# IBM Cloud Delivery Pipelines are permitted sudo to apt-get and dpkg
# Documented here, but this is held under .bluemix folder
# See documentation: https://cloud.ibm.com/docs/services/ContinuousDelivery?topic=ContinuousDelivery-deliverypipeline_about#deliverypipeline_jobs
#echo "-> Installing necessary OS Packages"
#sudo apt-get --assume-yes install -y yarn g++ make python curl git openssh gnupg
echo "============================="
echo "Wiki.js - binaries download via Bash"
echo "============================="
echo "-> Creating Directory Structure"
mkdir -p wiki
mkdir -p logs
#chown -R node:node ./wiki ./logs
echo "-> Fetching latest Wiki.js version..."
WIKIJS_LATEST_VERSION=$(
curl -s https://api.github.com/repos/Requarks/wiki/releases/latest \
| grep "tag_name" \
| cut -d ":" -f 2,3 \
| tr -d \" \
| tr -d , \
| tr -d " " \
)
echo "Wiki.js version is $WIKIJS_LATEST_VERSION"
echo "-> Fetching latest Wiki.js version build release..."
# Use cURL follow re-direct and retain re-direct filename
# Leveraging the lessons learnt from Gist here - https://gist.github.com/steinwaywhw/a4cd19cda655b8249d908261a62687f8
# DO NOT USE auto-generated GitHub tarball_url or zipball_url, which is a snapshot of the GitHub repository sourec code at time of Release
# Instead use the Release's published/packaged "assets" using browser_download_url, and removing the url for the Windows release
WIKIJS_LATEST_DL_URL=$(curl -s https://api.github.com/repos/Requarks/wiki/releases/latest \
| grep "browser_download_url.*" \
| grep -v ".*windows.*" \
| cut -d ":" -f 2,3 \
| tr -d \" \
| tr -d , \
| tr -d " " \
)
curl -s -O -J -L $WIKIJS_LATEST_DL_URL
WIKIJS_LATEST_DL_FILE=$(find . -type f -iname '*wiki*.tar.gz' -print)
tar xzf $WIKIJS_LATEST_DL_FILE -C ./wiki
rm $WIKIJS_LATEST_DL_FILE
echo "Downloaded file is $WIKIJS_LATEST_DL_FILE from $WIKIJS_LATEST_DL_URL"
echo "Extracted to $PWD/wiki"
echo "Removed file $WIKIJS_LATEST_DL_FILE"
# tar on macOS would not work with above, as this requires filename after operators
function pandoc_install()
{
echo "-> Fetching latest Pandoc version..."
PANDOC_LATEST_VERSION=$(
curl -s https://api.github.com/repos/jgm/pandoc/releases/latest \
| grep "tag_name" \
| cut -d ":" -f 2,3 \
| tr -d \" \
| tr -d , \
| tr -d " " \
)
echo "Pandoc version is $PANDOC_LATEST_VERSION"
echo "-> Fetching latest Pandoc version build release..."
# Use cURL follow re-direct and retain re-direct filename
# Leveraging the lessons learnt from Gist here - https://gist.github.com/steinwaywhw/a4cd19cda655b8249d908261a62687f8
# DO NOT USE auto-generated GitHub tarball_url or zipball_url, which is a snapshot of the GitHub repository sourec code at time of Release
# Instead use the Release's published/packaged "assets" using browser_download_url, and removing the url for the Windows release
PANDOC_LATEST_DL_URL=$(curl -s https://api.github.com/repos/jgm/pandoc/releases/latest \
| grep "browser_download_url.*" \
| grep ".*.deb.*" \
| cut -d ":" -f 2,3 \
| tr -d \" \
| tr -d , \
| tr -d " " \
)
curl -s -O -J -L $PANDOC_LATEST_DL_URL
PANDOC_LATEST_DL_FILE=$(find . -type f -iname '*pandoc*.deb' -print)
echo "Downloaded file is $PANDOC_LATEST_DL_FILE from $PANDOC_LATEST_DL_URL"
echo "Installing Pandoc..."
#sudo dpkg -i $PANDOC_LATEST_DL_FILE
dpkg -i $PANDOC_LATEST_DL_FILE
echo "Removing Pandoc .deb file"
rm $PANDOC_LATEST_DL_FILE
echo "---"
echo "For Pandoc PDF output, install LaTeX distribution TeX Live"
#sudo apt-get --assume-yes install texlive
apt-get --assume-yes install texlive
}
# Described here for re-use by others as a standalone install script,
# but this is executed by IBM Cloud Continuous Delivery pipeline where sudo privileges are available for dpkg
#pandoc_install
echo "============================="
echo "Wiki.js - prepare CF App for deployment to IBM Cloud Foundry public"
echo "============================="
# Automatically generate and echo new env varaiables from the Cloud Foundry Public System-Provided Environment Variables for a CF Service Instance on IBM Cloud
export cf_auto_env_db_service_cred_user=$(echo $VCAP_SERVICES | jq '."databases-for-postgresql"[].credentials.connection.postgres.authentication.username' | sed -e 's/^\"//' -e 's/\"$//')
export cf_auto_env_db_service_cred_password=$(echo $VCAP_SERVICES | jq '."databases-for-postgresql"[].credentials.connection.postgres.authentication.password' | sed -e 's/^\"//' -e 's/\"$//')
export cf_auto_env_db_service_database_schema=$(echo $VCAP_SERVICES | jq '."databases-for-postgresql"[].credentials.connection.postgres.database' | sed -e 's/^\"//' -e 's/\"$//')
export cf_auto_env_db_service_host=$(echo $VCAP_SERVICES | jq '."databases-for-postgresql"[].credentials.connection.postgres.hosts[].hostname' | sed -e 's/^\"//' -e 's/\"$//')
export cf_auto_env_db_service_port=$(echo $VCAP_SERVICES | jq '."databases-for-postgresql"[].credentials.connection.postgres.hosts[].port')
export cf_auto_env_db_service_name=$(echo $VCAP_SERVICES | jq '."databases-for-postgresql"[].instance_name' | sed -e 's/^\"//' -e 's/\"$//')
#export cf_auto_env_db_service_name=$(echo $VCAP_SERVICES | jq '."databases-for-postgresql"[].name' | sed -e 's/^\"//' -e 's/\"$//')
export cf_auto_env_db_service_product_label=$(echo $VCAP_SERVICES | jq '."databases-for-postgresql"[].label' | sed -e 's/^\"//' -e 's/\"$//')
# Automatically generate and echo new env varaiables from the Cloud Foundry Public System-Provided Environment Variables for the Deployed CF Application on IBM Cloud
export cf_auto_env_app_id=$(echo $VCAP_APPLICATION | jq '.application_id' | sed -e 's/^\"//' -e 's/\"$//')
export cf_auto_env_app_name=$(echo $VCAP_APPLICATION | jq '.application_name' | sed -e 's/^\"//' -e 's/\"$//')
export cf_auto_env_app_uris=$(echo $VCAP_APPLICATION | jq '.application_uris[]' | sed -e 's/^\"//' -e 's/\"$//')
export cf_auto_env_api=$(echo $VCAP_APPLICATION | jq '.cf_api' | sed -e 's/^\"//' -e 's/\"$//')
export cf_auto_env_region=$(echo $VCAP_APPLICATION | jq '.application_uris[]' | sed -e 's/^\"//' -e 's/\"$//' | sed -e 's/^wikijs-cf\.//' -e 's/\.cf\..*//' | head -n 1)
export cf_auto_env_org_space_name=$(echo $VCAP_APPLICATION | jq '.space_name' | sed -e 's/^\"//' -e 's/\"$//')
# Workaround because wiki/config.yml does not accept Envrionment Variables, therefore inject into the config.yml with sed
sed -i "0,/RE/s/ host:/ host: $cf_auto_env_db_service_host/" ./wiki/config.yml
sed -i "0,/RE/s/ port:/ port: $cf_auto_env_db_service_port/" ./wiki/config.yml
sed -i "0,/RE/s/ user:/ user: $cf_auto_env_db_service_cred_user/" ./wiki/config.yml
sed -i "0,/RE/s/ pass:/ pass: $cf_auto_env_db_service_cred_password/" ./wiki/config.yml
sed -i "0,/RE/s/ db:/ db: $cf_auto_env_db_service_database_schema/" ./wiki/config.yml
# Workaround if choose not to use provided node_modules folder from Wiki.js Release package
# NOTE: If execute npm install without removing provided node_modules, errors will occur such as "404 Not Found - GET https://registry.npmjs.org/elasticsearch6 - Not found"
echo "-> Replacing node_modules; deletion then npm install"
rm -rf ./wiki/node_modules
cd wiki
npm install
echo "============================="
echo "Completed downloading and preparing CF App of Wiki.js version $WIKIJS_LATEST_VERSION, beginning npm start (i.e. node server)"
echo ""
cf_url1=$(echo $cf_auto_env_app_uris | awk '{print $1}')
cf_url2=$(echo $cf_auto_env_app_uris | awk '{print $2}')
echo "Starting on URLs:"
echo "https://$cf_url1"
echo "https://$cf_url2"
echo "============================="
DEBUGGING="FALSE"
if [ "$DEBUGGING" = "TRUE" ]; then
echo "============================="
echo "DEBUGGING"
echo "============================="
echo "---"
echo "-> DEBUGGING - echo custom generated Environment Variables from the Cloud Foundry Public System-Provided Environment Variables"
env | grep "cf_auto_env_.*" | sort
echo "----"
echo "-> DEBUGGING - echo standard Cloud Foundry Public System-Provided Environment Variables for CF Container System Variables on IBM Cloud"
env | grep "BLUEMIX.*"
env | grep "^BUILD_DIR="
env | grep "^CACHE_DIR="
env | grep "CF_.*" | sort
env | grep "^HOME="
env | grep "INIT_.*"
env | grep "MEMORY_.*"
env | grep "^PATH="
env | grep "^PORT="
env | grep "^PWD="
echo "----"
echo "-> DEBUGGING - Home Directory"
echo "Directory Contents of $PWD"
ls -lha
echo "-> DEBUGGING - Extracted Wiki.js Directory"
echo "Directory Contents of $PWD"
ls -lha ./$PWD/wiki
fi