forked from ray-project/ray
-
Notifications
You must be signed in to change notification settings - Fork 0
/
copy_files.py
127 lines (104 loc) · 3.48 KB
/
copy_files.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
import argparse
import os
import subprocess
import sys
import time
from collections import OrderedDict
import requests
from aws_requests_auth.boto_utils import BotoAWSRequestsAuth
def retry(f):
def inner():
resp = None
for _ in range(5):
resp = f()
print("Getting Presigned URL, status_code", resp.status_code)
if resp.status_code >= 500:
print("errored, retrying...")
print(resp.text)
time.sleep(5)
else:
return resp
if resp is None or resp.status_code >= 500:
print("still errorred after many retries")
sys.exit(1)
return inner
@retry
def perform_auth():
auth = BotoAWSRequestsAuth(
aws_host="vop4ss7n22.execute-api.us-west-2.amazonaws.com",
aws_region="us-west-2",
aws_service="execute-api",
)
resp = requests.get(
"https://vop4ss7n22.execute-api.us-west-2.amazonaws.com/endpoint/",
auth=auth,
params={"job_id": os.environ["BUILDKITE_JOB_ID"]},
)
return resp
def handle_docker_login(resp):
pwd = resp.json()["docker_password"]
subprocess.check_call(
["docker", "login", "--username", "raytravisbot", "--password", pwd]
)
def gather_paths(dir_path):
dir_path = dir_path.replace("/", os.path.sep)
assert os.path.exists(dir_path)
if os.path.isdir(dir_path):
paths = [os.path.join(dir_path, f) for f in os.listdir(dir_path)]
else:
paths = [dir_path]
return paths
dest_resp_mapping = {
"wheels": "presigned_resp_prod_wheels",
"branch_wheels": "presigned_resp_prod_wheels",
"jars": "presigned_resp_prod_wheels",
"branch_jars": "presigned_resp_prod_wheels",
"logs": "presigned_logs",
}
def upload_paths(paths, resp, destination):
dest_key = dest_resp_mapping[destination]
c = resp.json()[dest_key]
of = OrderedDict(c["fields"])
sha = os.environ["BUILDKITE_COMMIT"]
branch = os.environ["BUILDKITE_BRANCH"]
bk_job_id = os.environ["BUILDKITE_JOB_ID"]
current_os = sys.platform
for path in paths:
fn = os.path.split(path)[-1]
of["key"] = {
"wheels": f"latest/{fn}",
"branch_wheels": f"{branch}/{sha}/{fn}",
"jars": f"jars/latest/{current_os}/{fn}",
"branch_jars": f"jars/{branch}/{sha}/{current_os}/{fn}",
"logs": f"bazel_events/{branch}/{sha}/{bk_job_id}/{fn}",
}[destination]
of["file"] = open(path, "rb")
r = requests.post(c["url"], files=of)
print(f"Uploaded {path} to {of['key']}", r.status_code)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Helper script to upload files to S3 bucket"
)
parser.add_argument("--path", type=str, required=False)
parser.add_argument("--destination", type=str)
args = parser.parse_args()
if os.environ.get("RAYCI_SKIP_UPLOAD", "false") == "true":
print("Skipping upload.")
sys.exit(0)
assert args.destination in {
"branch_jars",
"branch_wheels",
"jars",
"logs",
"wheels",
"docker_login",
}
assert "BUILDKITE_JOB_ID" in os.environ
assert "BUILDKITE_COMMIT" in os.environ
resp = perform_auth()
if args.destination == "docker_login":
handle_docker_login(resp)
else:
paths = gather_paths(args.path)
print("Planning to upload", paths)
upload_paths(paths, resp, args.destination)