diff --git a/src/lambda_function.py b/src/lambda_function.py index 4e2fdd2..bd878db 100644 --- a/src/lambda_function.py +++ b/src/lambda_function.py @@ -48,11 +48,10 @@ def lambda_handler(event, context): DB_TABLE = os.environ.get('GLIMPSE_DB_TABLE') pp = pprint.PrettyPrinter(indent=4) - print('*'*STAR_LEN) + print('[!] Getting Environment Variables') print(f'Using S3 bucket "{BUCKET_NAME}"') print(f' with path "{SCREENSHOT_DIR}""') print(f'Using DynamoDB table "{DB_TABLE}""') - print('*'*STAR_LEN) now = datetime.now() timestamp = now.strftime("%Y-%m-%d %H:%M:%S") @@ -75,16 +74,12 @@ def lambda_handler(event, context): exists = True db_data['timescanned'] = timestamp - s3 = S3(BUCKET_NAME) - s3_key = s3.get_key(remote_path) - # Don't update if update==false or the parameter doesn't exist if 'update' not in event.keys() or str(event['update']).lower() != 'true': # Don't force an update if exists: - print('*'*STAR_LEN) + print('[!] Existing Data') pp.pprint(return_data) - print('*'*STAR_LEN) return return_data @@ -92,7 +87,10 @@ def lambda_handler(event, context): try: glimpse.driver.get(url) glimpse.screenshot(local_path) - s3.upload_file(s3_key, local_path) + + s3 = S3(BUCKET_NAME) + #s3_key = s3.get_key(remote_path) + s3.upload_file(local_path, remote_path) db_data['effectiveurl'] = glimpse.driver.current_url db_data['title'] = glimpse.driver.title @@ -107,9 +105,8 @@ def lambda_handler(event, context): # db_data['numscans'] = 1 db_data['numscans'] += 1 - print('*'*STAR_LEN) + print('[!] Adding New Data') pp.pprint(db_data) - print('*'*STAR_LEN) db.put(db_data) diff --git a/src/s3_help.py b/src/s3_help.py index bba0b70..885953d 100644 --- a/src/s3_help.py +++ b/src/s3_help.py @@ -9,15 +9,5 @@ def __init__(self, bucket): def get_key(self, remote_path): return self.resource.Object(bucket_name=self.bucket_name, key=remote_path) - def upload_file(self, key, local_path): - key.upload_file(local_path) - - def check_exists(self, folder, filename): - try: - self.resource.Object(self.bucket_name, folder + filename).load() - return {'screenshot': 'https://{}.s3.amazonaws.com/{}'.format(self.bucket_name, folder) + filename } - except botocore.exceptions.ClientError as e: - if e.response['Error']['Code'] == "404" or e.response['Error']['Code'] == "403": - return False - else: - raise + def upload_file(self, local_path, remote_path): + self.resource.meta.client.upload_file(local_path,self.bucket_name, remote_path)