-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathsetup-localstack.py
More file actions
executable file
·133 lines (94 loc) · 4.61 KB
/
setup-localstack.py
File metadata and controls
executable file
·133 lines (94 loc) · 4.61 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
#!/usr/bin/env python
import sys, argparse, boto3, glob, socket, posixpath as pp
import os, time, copy
import botocore.exceptions
import deploy
common = dict(
Runtime='python3.6', Environment=dict(Variables={})
)
if 'AWS_LAMBDA_DLQ_ARN' in os.environ:
common.update(DeadLetterConfig=dict(TargetArn=os.environ['AWS_LAMBDA_DLQ_ARN']))
parser = argparse.ArgumentParser(description='Set up localstack environment.')
parser.add_argument('code_path', help='Path to Lambda code zip file')
arguments = parser.parse_args()
# Names of things
host_address = socket.gethostbyname(socket.gethostname())
BUCKETNAME = 'districtgraphs' # 'planscore'
ENDPOINT_S3 = 'http://{}:4572'.format(host_address)
ENDPOINT_LAM = 'http://{}:4574'.format(host_address)
ENDPOINT_API = 'http://{}:4567'.format(host_address)
AWS_CREDS = dict(aws_access_key_id='nobody', aws_secret_access_key='nothing')
CODE_PATH = arguments.code_path
# Lambda function setup
print('--> Set up Lambda', ENDPOINT_LAM)
lam = boto3.client('lambda', endpoint_url=ENDPOINT_LAM, region_name='us-east-1', **AWS_CREDS)
env = {
'S3_ENDPOINT_URL': ENDPOINT_S3,
'LAMBDA_ENDPOINT_URL': ENDPOINT_LAM,
}
print(' Environment:', ' '.join(['='.join(kv) for kv in env.items()]))
function_arn1 = deploy.publish_function(lam, 'DistrictGraphs-upload_file', CODE_PATH, env, 'nobody')
function_arn2 = deploy.publish_function(lam, 'DistrictGraphs-read_file', CODE_PATH, env, 'nobody')
function_arn3 = deploy.publish_function(lam, 'DistrictGraphs-build_district', CODE_PATH, env, 'nobody')
# API Gateway setup
print('--> Set up API Gateway', ENDPOINT_API)
api = boto3.client('apigateway', endpoint_url=ENDPOINT_API, region_name='us-east-1', **AWS_CREDS)
rest_api_ids = {
deploy.update_api(api, 'DistrictGraphs', function_arn1, 'DistrictGraphs-upload_file', 'Nobody'),
deploy.update_api(api, 'DistrictGraphs', function_arn2, 'DistrictGraphs-read_file', 'Nobody'),
}
for rest_api_id in rest_api_ids:
deploy.deploy_api(api, rest_api_id)
# S3 Bucket setup
print('--> Set up S3', ENDPOINT_S3)
s3 = boto3.client('s3', endpoint_url=ENDPOINT_S3, **AWS_CREDS)
print(' Create bucket', BUCKETNAME)
s3.create_bucket(Bucket=BUCKETNAME)
with open('madison3.pickle', 'rb') as file:
print(f' Put object graphs/{file.name}')
data = file.read()
s3.put_object(Bucket=BUCKETNAME, Key=f'graphs/{file.name}',
ACL='public-read', Body=data, ContentType='application/binary')
s3.put_object(Bucket=BUCKETNAME, Key='graphs/55/55025-tabblock.pickle',
ACL='public-read', Body=data, ContentType='application/binary')
exit()
with open('tl_2018_55_tabblock10.shp.pickle.gz', 'rb') as file:
print(f' Put object graphs/{file.name}')
data = file.read()
s3.put_object(Bucket=BUCKETNAME, Key=f'graphs/{file.name}',
ACL='public-read', Body=data, ContentType='application/binary',
ContentEncoding='gzip')
exit()
prefix1 = pp.join('data', 'XX', '001')
basedir1 = pp.join(pp.dirname(__file__), 'planscore', 'tests', 'data', 'XX')
upload(prefix1, basedir1, pp.join(basedir1, '12', '*', '*.geojson'))
prefix2 = pp.join('uploads', 'sample-NC-1-992')
basedir2 = pp.join(pp.dirname(__file__), 'data', 'sample-NC-1-992')
upload(prefix2, basedir2, pp.join(basedir2, '*.*'))
upload(prefix2, basedir2, pp.join(basedir2, '*', '*.*'))
prefix3 = pp.join('uploads', 'sample-NC-1-992-simple')
basedir3 = pp.join(pp.dirname(__file__), 'data', 'sample-NC-1-992-simple')
upload(prefix3, basedir3, pp.join(basedir3, '*.*'))
upload(prefix3, basedir3, pp.join(basedir3, '*', '*.*'))
prefix4 = pp.join('uploads', 'sample-NC-1-992-incomplete')
basedir4 = pp.join(pp.dirname(__file__), 'data', 'sample-NC-1-992-incomplete')
upload(prefix4, basedir4, pp.join(basedir4, '*.*'))
prefix5 = pp.join('data', 'XX', '003')
basedir5 = pp.join(pp.dirname(__file__), 'planscore', 'tests', 'data', 'XX-sim')
upload(prefix5, basedir5, pp.join(basedir5, '12', '*', '*.geojson'))
prefix6 = pp.join('uploads', 'sample-NC5.1')
basedir6 = pp.join(pp.dirname(__file__), 'data', 'sample-NC5.1')
upload(prefix6, basedir6, pp.join(basedir6, '*.*'))
upload(prefix6, basedir6, pp.join(basedir6, '*', '*.*'))
# Lambda function setup
print('--> Set up Lambda', ENDPOINT_LAM)
lam = boto3.client('lambda', endpoint_url=ENDPOINT_LAM, region_name='us-east-1', **AWS_CREDS)
env = {
'PLANSCORE_SECRET': 'localstack',
'WEBSITE_BASE': 'http://127.0.0.1:5000/',
'S3_ENDPOINT_URL': ENDPOINT_S3,
'LAMBDA_ENDPOINT_URL': ENDPOINT_LAM,
}
print(' Environment:', ' '.join(['='.join(kv) for kv in env.items()]))
for function_name in deploy.functions.keys():
deploy.publish_function(lam, function_name, CODE_PATH, env, 'nobody')