-
Notifications
You must be signed in to change notification settings - Fork 35
/
Copy pathload.py
164 lines (145 loc) · 5.45 KB
/
load.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
import boto3
import os
import csv
import time
import copy
import argparse
tablename = "northwind"
dynamodb = boto3.resource('dynamodb', region_name="us-east-2")
def setup():
dynamodb.create_table(TableName=tablename,
KeySchema=[
{
'AttributeName': 'pk',
'KeyType': 'HASH'
},
{
'AttributeName': 'sk',
'KeyType': 'RANGE'
},
],
AttributeDefinitions=[
{
'AttributeName': 'pk',
'AttributeType': 'S'
},
{
'AttributeName': 'sk',
'AttributeType': 'S'
},
{
'AttributeName': 'data',
'AttributeType': 'S'
}
],
GlobalSecondaryIndexes=[
{
'IndexName': 'gsi_1',
'KeySchema': [
{
'AttributeName': 'sk',
'KeyType': 'HASH'
},
{
'AttributeName': 'data',
'KeyType': 'RANGE'
},
],
'Projection': {
'ProjectionType': 'ALL'
}
},
],
BillingMode='PAY_PER_REQUEST'
)
print("waiting 60 seconds for table to create...")
time.sleep(60)
def teardown():
table = dynamodb.Table(tablename)
table.delete()
def ddb_batch_write(items):
dynamodb.batch_write_item(
RequestItems={tablename : items}
)
print(items)
def load_dynamo_data(data):
while len(data) > 25:
ddb_batch_write(data[:24])
data = data[24:]
if data:
ddb_batch_write(data)
def load_csv(filename):
with open(filename, 'r') as f:
data = []
for row in csv.DictReader(f):
data.append(row)
return data
def load_csvs():
categories = load_csv("csv/categories.csv")
customers = load_csv("csv/customers.csv")
employees = load_csv("csv/employees.csv")
orders = load_csv("csv/orders.csv")
order_details = load_csv("csv/order_details.csv")
products = load_csv("csv/products.csv")
shippers = load_csv("csv/shippers.csv")
suppliers = load_csv("csv/suppliers.csv")
#clean up data (remove ambiguous IDs)
for row in categories:
row["categoryID"] = "categories#"+row["categoryID"]
for row in suppliers:
row["supplierID"] = "suppliers#"+row["supplierID"]
for row in shippers:
row["shipperID"] = "shippers#"+row["shipperID"]
for row in products:
row["productID"] = "products#"+row["productID"]
row["supplierID"] = "suppliers#"+row["supplierID"]
row["categoryID"] = "categories#"+row["categoryID"]
for row in orders:
row["employeeID"] = "employees#"+row["employeeID"]
row["shipVia"] = "shippers#"+row["shipVia"]
for row in order_details:
row["productID"] = "products#"+row["productID"]
for row in employees:
row["employeeID"] = "employees#"+row["employeeID"]
if row["reportsTo"] and row["reportsTo"] != "NULL":
row["reportsTo"] = "employees#"+row["reportsTo"]
return categories, customers, employees, orders, order_details, products, shippers, suppliers
def build_node_list(node_rows, pk, sk, gs1_sk):
partition = []
for row in node_rows:
node_row = copy.deepcopy(row)
node_row["pk"] = node_row.pop(pk, pk)
node_row["sk"] = node_row.pop(sk, sk)
node_row["data"] = build_composite_sort_key(node_row, gs1_sk)
partition.append({'PutRequest': {'Item': node_row}})
return partition
def build_composite_sort_key(row, keyname):
elements = keyname.split("#")
key = [row.pop(element, element) for element in elements]
return "#".join(key)
def build_adjacency_lists(categories, customers, employees, orders, order_details, products, shippers, suppliers):
#1. build items for nodes (no edges)
adjacency_lists = build_node_list(categories, "categoryID", "categoryName", "description")
adjacency_lists += build_node_list(customers, "customerID", "contactName", "country#region#city#address", )
adjacency_lists += build_node_list(shippers, "shipperID", "companyName", "phone")
adjacency_lists += build_node_list(suppliers, "supplierID", "SUPPLIER", "country#region#city#address", )
adjacency_lists += build_node_list(employees, "employeeID", "reportsTo", "hireDate")
adjacency_lists += build_node_list(orders, "orderID", "ORDER", "orderDate")
adjacency_lists += build_node_list(products, "productID", "PRODUCT", "discontinued")
adjacency_lists += build_node_list(order_details, "orderID", "productID", "unitPrice")
return adjacency_lists
def handler():
parser = argparse.ArgumentParser()
parser.add_argument("--setup", help="create DynamoDB table before loading data", action='store_true')
parser.add_argument("--teardown", help="delete DynamoDB table", action='store_true')
args = parser.parse_args()
if args.teardown:
teardown()
else:
if args.setup:
setup()
categories, customers, employees, orders, order_details, products, shippers, suppliers = load_csvs()
table_data = build_adjacency_lists(categories, customers, employees, orders, order_details, products, shippers, suppliers)
load_dynamo_data(table_data)
if __name__ == "__main__":
handler()