Skip to content

Commit

Permalink
Merge pull request #5 from becksteadn/build
Browse files Browse the repository at this point in the history
change s3 upload process
  • Loading branch information
scriptingislife authored Apr 6, 2019
2 parents da14714 + 4058963 commit 55aa5ab
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 22 deletions.
17 changes: 7 additions & 10 deletions src/lambda_function.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,10 @@ def lambda_handler(event, context):
DB_TABLE = os.environ.get('GLIMPSE_DB_TABLE')
pp = pprint.PrettyPrinter(indent=4)

print('*'*STAR_LEN)
print('[!] Getting Environment Variables')
print(f'Using S3 bucket "{BUCKET_NAME}"')
print(f' with path "{SCREENSHOT_DIR}""')
print(f'Using DynamoDB table "{DB_TABLE}""')
print('*'*STAR_LEN)

now = datetime.now()
timestamp = now.strftime("%Y-%m-%d %H:%M:%S")
Expand All @@ -75,24 +74,23 @@ def lambda_handler(event, context):
exists = True
db_data['timescanned'] = timestamp

s3 = S3(BUCKET_NAME)
s3_key = s3.get_key(remote_path)

# Don't update if update==false or the parameter doesn't exist
if 'update' not in event.keys() or str(event['update']).lower() != 'true':
# Don't force an update
if exists:
print('*'*STAR_LEN)
print('[!] Existing Data')
pp.pprint(return_data)
print('*'*STAR_LEN)
return return_data


glimpse = gd.GlimpseDriver()
try:
glimpse.driver.get(url)
glimpse.screenshot(local_path)
s3.upload_file(s3_key, local_path)

s3 = S3(BUCKET_NAME)
#s3_key = s3.get_key(remote_path)
s3.upload_file(local_path, remote_path)

db_data['effectiveurl'] = glimpse.driver.current_url
db_data['title'] = glimpse.driver.title
Expand All @@ -107,9 +105,8 @@ def lambda_handler(event, context):
# db_data['numscans'] = 1
db_data['numscans'] += 1

print('*'*STAR_LEN)
print('[!] Adding New Data')
pp.pprint(db_data)
print('*'*STAR_LEN)

db.put(db_data)

Expand Down
14 changes: 2 additions & 12 deletions src/s3_help.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,5 @@ def __init__(self, bucket):
def get_key(self, remote_path):
return self.resource.Object(bucket_name=self.bucket_name, key=remote_path)

def upload_file(self, key, local_path):
key.upload_file(local_path)

def check_exists(self, folder, filename):
try:
self.resource.Object(self.bucket_name, folder + filename).load()
return {'screenshot': 'https://{}.s3.amazonaws.com/{}'.format(self.bucket_name, folder) + filename }
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "404" or e.response['Error']['Code'] == "403":
return False
else:
raise
def upload_file(self, local_path, remote_path):
self.resource.meta.client.upload_file(local_path,self.bucket_name, remote_path)

0 comments on commit 55aa5ab

Please sign in to comment.