-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathstrassen-unit-intermediates.py
151 lines (118 loc) · 3.99 KB
/
strassen-unit-intermediates.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
import boto3
import json
import pickle
import time
from collections import namedtuple
import aws
from split import Split
### NUMPY, SCIPY, SKLEARN MAGIC
import os
import ctypes
import platform
if platform.system() != 'Darwin': # don't do this on my local machine
for d, _, files in os.walk('lib'):
for f in files:
if f.endswith('.a'):
continue
ctypes.cdll.LoadLibrary(os.path.join(d, f))
import numpy as np
### NUMPY, SCIPY, SKLEARN MAGIC END
deploy_nr = 'INT202'
s3_client = boto3.client('s3')
s3_upload_time = 0
# HANDLERS
'''
{
"state-machine-name": "v4_to10_bs4k-8kx8k",
"executionName": "v4_to10_bs4k-8kx8k",
"matA": {
"bucket": "jmue-multiplication-benchmarks",
"folder": "sq_8kx8k_bs2k",
"block-size": 2000,
"columns": 8000,
"rows": 8000
},
"matB": {
"bucket": "jmue-multiplication-benchmarks",
"folder": "sq_8kx8k_bs2k-2",
"block-size": 2000,
"columns": 8000,
"rows": 8000
},
"result": {
"bucket": "jmue-multiplication-benchmarks",
"folder": "sq_8kx8k_bs2k-result",
"block-size": 2000,
"split": {
"x": 0,
"y": 0
}
},
"split": 0,
"unit": 0,
"split-size": 4000
}
'''
def intermediate_0(event, context):
return execute(m_0, 0, event, context)
def intermediate_1(event, context):
return execute(m_1, 1, event, context)
def intermediate_2(event, context):
return execute(m_2, 2, event, context)
def intermediate_3(event, context):
return execute(m_3, 3, event, context)
def intermediate_4(event, context):
return execute(m_4, 4, event, context)
def intermediate_5(event, context):
return execute(m_5, 5, event, context)
def intermediate_6(event, context):
return execute(m_6, 6, event, context)
def execute(intermediate_method, intermediate_index, event, context):
execution_start = context.get_remaining_time_in_millis()
result_split = Split(event['result'], event['result']['split'], event['split-size'])
left_split = Split.left_inputsplit_for(event['matA'], result_split, event['unit'])
right_split = Split.right_inputsplit_for(event['matB'], result_split, event['unit'])
result = intermediate_method(left_split, right_split)
upload_to_s3(result, event, context, key="S{}_U{}_m{}".format(event['split'], event['unit'], intermediate_index))
aws.cleanup_tmp()
return {
'intermediate': intermediate_index,
'split': event['split'],
'unit': event['unit'] ,
'time-profile': {
's3-up': s3_upload_time,
's3-down': left_split.s3_download_time + right_split.s3_download_time,
'execution': execution_start - context.get_remaining_time_in_millis(),
'lambda': 'intermediate'
},
'deploy-nr': deploy_nr,
'remaining_time_at_exec_start': execution_start
}
# INTERMEDIATE METHODS
# x is left input split, y is the right input split
def m_0(x, y):
return (x.block(0,0) + x.block(1,1)).dot(y.block(0,0) + y.block(1,1))
def m_1(x, y):
return (x.block(1,0) + x.block(1,1)).dot(y.block(0,0))
def m_2(x, y):
return x.block(0,0).dot(y.block(0,1) - y.block(1,1))
def m_3(x, y):
return x.block(1,1).dot(y.block(1,0) - y.block(0,0))
def m_4(x, y):
return (x.block(0,0) + x.block(0,1)).dot(y.block(1,1))
def m_5(x, y):
return (x.block(1,0) - x.block(0,0)).dot(y.block(0,0) + y.block(0,1))
def m_6(x, y):
return (x.block(0,1) - x.block(1,1)).dot(y.block(1,0) + y.block(1,1))
def upload_to_s3(result, event, context, key):
global s3_upload_time
s3_upload_time = 0
start = context.get_remaining_time_in_millis()
aws.write_to_s3(
data=result,
bucket=event['result']['bucket'],
folder=event['result']['folder'],
key=key,
s3_client=Split.s3_client
)
s3_upload_time = start - context.get_remaining_time_in_millis()