forked from Seagate/cortx
-
Notifications
You must be signed in to change notification settings - Fork 0
/
weekly_report.sh
executable file
·118 lines (95 loc) · 4.01 KB
/
weekly_report.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
#! /bin/bash
# this is currently running on Windows Subsystem Linux and sometimes mail is flakey
# sudo service postfix status may be needed
# migrated it to run in ssc-vm but now it looks like cron doesn't load the env var I need
# this is probably not the right way to do this but manually source the .bashrc
. ~/.bashrc
# this only works for some dumb reason if you're calling the script with the full path
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
cd $DIR
mail_verbose_prefix="Weekly CORTX Verbose Reports"
mail_scrape_prefix="Weekly CORTX Scraping"
mail_subj_prefix="Weekly CORTX Community Report"
#mail_subj_prefix="TESTING COMMUNITY METRICS" # use this for testing
Email="[email protected]"
server=gtx201.nrm.minn.seagate.com
# start with a git pull in case things were updated elsewhere
git pull
# use command line to control what runs. By default, both the scrape and the report happen.
scrape=1
report=1
if [ "$1" == "noscrape" ]; then
scrape=0
elif [ "$1" == "noreport" ]; then
report=0
fi
function run_command {
Command=$1
subject=$2
Email=$3
echo "Command $Command , subj $subject , email $Email"
tfile=$(mktemp /tmp/cortx_community.XXXXXXXXX.txt)
$Command &> $tfile
mail -s "$subject" -r $Email $Email < $tfile
}
function group_activity {
group=$1
gname=$2
tfile=$(mktemp /tmp/cortx_community.XXXXXXXXX)
./get_personal_activity.py "$group" -w > $tfile
mail -s "$mail_verbose_prefix - $gname Activity" -r $Email $Email < $tfile
}
function scp_report {
report=$1
directory=$2
src=`ls /tmp/$report*html`
ts=`date +%Y-%m-%d`
base=`basename $src .html`
src2=/tmp/$base.$ts.html
scp $src 535110@$server:/home/535110/public_html/latest
cp $src $src2
scp $src2 535110@$server:/home/535110/public_html/$directory
}
if [ $scrape == 1 ]; then
echo "Doing scrape"
run_command "./scrape_slack.py" "$mail_scrape_prefix - Slack" $Email
run_command "./scrape_projects.py -v" "$mail_scrape_prefix - Projects" $Email
run_command "./scrape_metrics.py CORTX" "$mail_scrape_prefix - Github" $Email
for p in 'Ceph' 'MinIO' 'DAOS' 'Swift' 'OpenIO' 'ECS'
do
run_command "./scrape_metrics.py -t $p" "$mail_scrape_prefix - $p Github" $Email
done
./commit_pickles.sh | mail -s "Weekly Pickle Commit for CORTX Community" -r $Email $Email
fi
if [ $report == 1 ]; then
echo "Doing report"
ts=`date +%Y-%m-%d`
# mail activity reports
for group in 'EU R&D' Innersource External Unknown
do
group_activity "$group" "$group"
done
group_activity 'VenkyOS,johnbent,justinzw,TechWriter-Mayur,hessio,Saumya-Sunder,novium258' 'Open Source Team'
jupyter_args="--ExecutePreprocessor.timeout=180 --output-dir=/tmp --no-input"
/bin/rm -rf /tmp/CORTX_Metrics_* # clean up any old crap
exec_report=CORTX_Metrics_Topline_Report
jupyter nbconvert --execute --to slides --SlidesExporter.reveal_theme=serif --SlidesExporter.reveal_scroll=True $jupyter_args --output $exec_report $exec_report.ipynb
scp_report $exec_report exec_reports
cc_report=CORTX_Metrics_Community_Activity
jupyter nbconvert --execute --to html $jupyter_args --output $cc_report $cc_report.ipynb
scp_report $cc_report community_reports
bulk_report=CORTX_Metrics_Graphs
jupyter nbconvert --execute --to html $jupyter_args --output $bulk_report $bulk_report.ipynb
scp_report $bulk_report bulk_graphs
compare_report=CORTX_Metrics_Compare_Projects
jupyter nbconvert --execute --to html $jupyter_args --output $compare_report $compare_report.ipynb
scp_report $compare_report compare_projects
# mail the metrics as a CSV
tfile="/tmp/cortx_community_stats.$ts.csv"
tfile2="/tmp/cortx_community_stats.$ts.txt"
printf "Weekly autogenerated reports are available at http://gtx201.nrm.minn.seagate.com/~535110/. Enjoy!\n\nSummary Stats Also Below and attached as CSV.\n" > $tfile2
./print_metrics.py -c -a -s | grep -v '^Statistics' > $tfile
./print_metrics.py >> $tfile2
mail -s "$mail_subj_prefix - Report Available Plus Summary plus Attached CSV" -r $Email -a $tfile $Email < $tfile2
fi
exit