-
Notifications
You must be signed in to change notification settings - Fork 0
/
warehouse-upload-to-storage-bucket.sh
117 lines (95 loc) · 2.81 KB
/
warehouse-upload-to-storage-bucket.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
#!/usr/bin/env bash
PATH_TO_IDENTITY_KEYPAIR=$1
set -x
set -e
shopt -s nullglob
here=$(dirname "$0")
panic() {
echo "error: $*" >&2
exit 1
}
#shellcheck source=/dev/null
source ~/service-env.sh
#shellcheck source=/dev/null
source ~/service-env-warehouse-*.sh
#shellcheck source=./configure-metrics.sh
source "$here"/configure-metrics.sh
if [[ -z $STORAGE_BUCKET ]]; then
echo STORAGE_BUCKET environment variable not defined
exit 1
fi
identity_keypair=$PATH_TO_IDENTITY_KEYPAIR
identity_pubkey=$(solana-keygen pubkey "$identity_keypair")
datapoint_error() {
declare event=$1
declare args=$2
declare comma=
if [[ -n $args ]]; then
comma=,
fi
$metricsWriteDatapoint "infra-warehouse-node,host_id=$identity_pubkey error=1,event=\"$event\"$comma$args"
}
datapoint() {
declare event=$1
declare args=$2
declare comma=
if [[ -n $args ]]; then
comma=,
fi
$metricsWriteDatapoint "infra-warehouse-node,host_id=$identity_pubkey error=0,event=\"$event\"$comma$args"
}
while true; do
# Look for new ledger fragments from `warehouse.sh`
for new_ledger in ~/"$STORAGE_BUCKET".inbox/*; do
mkdir -p ~/"$STORAGE_BUCKET"
mv $new_ledger ~/"$STORAGE_BUCKET"
done
# Check for rocksdb/ directories and compress them
for rocksdb in ~/"$STORAGE_BUCKET"/*/rocksdb; do
SECONDS=
(
cd "$(dirname "$rocksdb")"
declare archive_dir=$PWD
if [[ -n $GOOGLE_APPLICATION_CREDENTIALS ]]; then
if [[ ! -f "$archive_dir"/.bigtable ]]; then
echo "Uploading $archive_dir to BigTable"
SECONDS=
while ! timeout 48h solana-ledger-tool --ledger "$archive_dir" bigtable upload; do
echo "bigtable upload failed..."
datapoint_error bigtable-upload-failure
sleep 30
done
touch "$archive_dir"/.bigtable
echo Ledger upload to bigtable took $SECONDS seconds
datapoint bigtable-upload-complete "duration_secs=$SECONDS"
fi
fi
echo "Creating rocksdb.tar.bz2 in $archive_dir"
rm -rf rocksdb.tar.bz2
tar jcf rocksdb.tar.bz2 rocksdb
rm -rf rocksdb
echo "$archive_dir/rocksdb.tar.bz2 created in $SECONDS seconds"
)
datapoint created-rocksdb-tar-bz2 "duration_secs=$SECONDS"
done
if [[ ! -d ~/"$STORAGE_BUCKET" ]]; then
echo "Nothing to upload, ~/$STORAGE_BUCKET does not exist"
sleep 60m
continue
fi
killall gsutil || true
SECONDS=
(
# export GOOGLE_APPLICATION_CREDENTIALS=
set -x
while ! timeout 8h gsutil -m rsync -r ~/"$STORAGE_BUCKET" gs://"$STORAGE_BUCKET"/; do
exit 1
echo "gsutil rsync failed..."
datapoint_error gsutil-rsync-failure
sleep 30
done
)
echo Ledger upload to storage bucket took $SECONDS seconds
datapoint ledger-upload-complete "duration_secs=$SECONDS"
rm -rf ~/"$STORAGE_BUCKET"
done