-
Notifications
You must be signed in to change notification settings - Fork 100
Expand file tree
/
Copy pathfunction_cloudflare.py
More file actions
56 lines (42 loc) · 1.6 KB
/
function_cloudflare.py
File metadata and controls
56 lines (42 loc) · 1.6 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
import datetime
import os
from pyodide.ffi import run_sync
from pyodide.http import pyfetch
from . import storage
client = storage.storage.get_instance()
SEBS_USER_AGENT = "SeBS/1.2 (https://github.com/spcl/serverless-benchmarks) SeBS Benchmark Suite/1.2"
async def do_request(url, download_path):
headers = {'User-Agent': SEBS_USER_AGENT}
res = await pyfetch(url, headers=headers)
bs = await res.bytes()
with open(download_path, 'wb') as f:
f.write(bs)
def handler(event):
bucket = event.get('bucket').get('bucket')
output_prefix = event.get('bucket').get('output')
url = event.get('object').get('url')
name = os.path.basename(url)
download_path = '/tmp/{}'.format(name)
process_begin = datetime.datetime.now()
run_sync(do_request(url, download_path))
size = os.path.getsize(download_path)
process_end = datetime.datetime.now()
upload_begin = datetime.datetime.now()
key_name = client.upload(bucket, os.path.join(output_prefix, name), download_path)
upload_end = datetime.datetime.now()
process_time = (process_end - process_begin) / datetime.timedelta(microseconds=1)
upload_time = (upload_end - upload_begin) / datetime.timedelta(microseconds=1)
return {
'result': {
'bucket': bucket,
'url': url,
'key': key_name
},
'measurement': {
'download_time': 0,
'download_size': 0,
'upload_time': upload_time,
'upload_size': size,
'compute_time': process_time
}
}