Skip to content

Commit

Permalink
style: 💄 Apply formatting changes
Browse files Browse the repository at this point in the history
  • Loading branch information
glima91 authored and actions-user committed Sep 26, 2022
1 parent 724734a commit 793250a
Show file tree
Hide file tree
Showing 2 changed files with 72 additions and 69 deletions.
113 changes: 58 additions & 55 deletions README-CLOUD-FUNCTION.md
Original file line number Diff line number Diff line change
Expand Up @@ -128,75 +128,78 @@ As informações provenientes da URL são organizadas em um dicionário após a
```javascript
// Import the Google Cloud client library
const {BigQuery} = require('@google-cloud/bigquery');
const { BigQuery } = require('@google-cloud/bigquery');
const bigquery = new BigQuery();
// Request origin allowed in cloud function
var request_origin = process.env.REQUEST_ORIGIN;
request_origin = request_origin.split(",")
request_origin = request_origin.split(',');

// Select what kind of data req.body contains. If the data
// Select what kind of data req.body contains. If the data
// comes from sendPixel method (used on GTM custom template) use "url" else use "json"
const input_option = 'json'; // url ou json

async function insertRowsAsStream(request, input_option) {
const datasetId = 'dp6_media_quality';
const tableId = 'media-quality-raw';
var json_data;
var json_data_raw;

if (input_option == 'url') {
const url = decodeURI(request.protocol + '://' + request.get('host') + request.originalUrl);

json_data = {
client_id: url.match('client_id=([^&]+)')[1],
media_name: url.match('media_name=([^&]+)')[1],
tracking_id: url.match('tracking_id=([^&]+)')[1],
media_event: url.match('media_event=([^&]+)')[1],
tag_id: url.match('tag_id=([^&]+)')[1],
tag_name: url.match('tag_name=([^&]+)')[1],
status: url.match('status=([^&]+)')[1],
datalayer_event: url.match('datalayer_event=([^&]+)')[1],
timestamp: Date.now() / 1000,
page: url.match('page=([^&]+)')[1],
container_version: url.match('container_version=([^&]+)')[1],
};
}

const datasetId = 'dp6_media_quality';
const tableId = 'media-quality-raw';
var json_data;
var json_data_raw;

if (input_option == "url"){
const url = decodeURI(request.protocol + '://' + request.get('host') + request.originalUrl);

json_data = {
client_id: url.match("client_id=([^&]+)")[1],
media_name: url.match("media_name=([^&]+)")[1],
tracking_id: url.match("tracking_id=([^&]+)")[1],
media_event: url.match("media_event=([^&]+)")[1],
tag_id: url.match("tag_id=([^&]+)")[1],
tag_name: url.match("tag_name=([^&]+)")[1],
status: url.match("status=([^&]+)")[1],
datalayer_event: url.match("datalayer_event=([^&]+)")[1],
timestamp: Date.now() / 1000,
page: url.match("page=([^&]+)")[1],
container_version: url.match("container_version=([^&]+)")[1],
};
if (input_option == 'json') {
try {
// Parse a JSON
json_data_raw = JSON.parse(request.body);
} catch (e) {
json_data_raw = request.body;
}

if (input_option == "json"){
try {
// Parse a JSON
json_data_raw = JSON.parse(request.body);
} catch (e) {
json_data_raw = request.body;
}

json_data_raw["timestamp"] = Date.now() /1000;
json_data_raw['timestamp'] = Date.now() / 1000;

lst_allowed_fields = [
'client_id',
'media_name',
'tracking_id',
'media_event',
'tag_id',
'tag_name',
'status',
'datalayer_event',
'timestamp',
'page',
'container_version',
];
json_data = Object.fromEntries(Object.entries(json_data_raw).filter(([key]) => lst_allowed_fields.includes(key)));
}

lst_allowed_fields = ["client_id","media_name", "tracking_id", "media_event", "tag_id", "tag_name", "status", "datalayer_event","timestamp", "page", "container_version"]
json_data = Object.fromEntries(Object.entries(json_data_raw).filter(([key]) => lst_allowed_fields.includes(key)));
// Insert data into a table
await bigquery.dataset(datasetId).table(tableId).insert(json_data);
}

}



// Insert data into a table
await bigquery
.dataset(datasetId)
.table(tableId)
.insert(json_data);
exports.gtm_monitor = (req, res) => {
if (req.body && request_origin.includes(req.headers.origin)) {
insertRowsAsStream(req, input_option);
res.sendStatus(200);
} else {
console.log('Requisição inválida. Verifique o payload ou a variável REQUEST_ORIGIN...');
res.sendStatus(403);
}

exports.gtm_monitor = (req, res) =>{

if(req.body && request_origin.includes(req.headers.origin)){
insertRowsAsStream(req, input_option);
res.sendStatus(200);
} else
{
console.log("Requisição inválida. Verifique o payload ou a variável REQUEST_ORIGIN...");
res.sendStatus(403);
}
};
```
Expand Down
28 changes: 14 additions & 14 deletions README-GTM-API.md
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ def bq_insert_to_table(data, table_id, client) -> None:
data (list of JSON): data to be inserted into table
table_id (string): table id from Big Query in format <projectId>.<datasetId>.<tableName>
"""

table_obj = client.get_table(table_id)
errors = client.insert_rows(table=table_obj, rows=data)
if errors == []:
Expand All @@ -147,11 +147,11 @@ def bq_insert_to_table(data, table_id, client) -> None:


def _get_credentials():
r""" Get credentials from GCP.
r""" Get credentials from GCP.
If constant RUN_AS_CLOUD_FUNCTION is true the credential will be acquired from GCP credential's default.
If constant RUN_AS_CLOUD_FUNCTION is false the credential will be acquired from JSON file.
"""
credentials = None
# Creates a Credentials instance from a service account json file
Expand All @@ -169,7 +169,7 @@ def _get_credentials():

def list_tags(gtm_account, gtm_container, gtm_workspace, api_key, token):
r""" List all GTM tags
Args:
gtm_account (string): Google Tag Manager account number
gtm_container (string): Google Tag Manager container number
Expand All @@ -194,21 +194,21 @@ def list_tags(gtm_account, gtm_container, gtm_workspace, api_key, token):

def _parse_media_tags(list_of_tags):
r"""Filter media tags and parse data
Args:
list_of_tags (json): dictionary with all tags
Output:
json with parsed data for media tags
"""

media_json_list = []
current_date = datetime.datetime.now()
current_date_formatted = current_date.strftime("%Y-%m-%d")
for tag in list_of_tags["tag"]:
add_to_list = False
tracking_id = "undefined"

json_sanity_check = ("monitoringMetadata" in tag) and ("map" in tag["monitoringMetadata"])

if json_sanity_check == True:
Expand All @@ -217,8 +217,8 @@ def _parse_media_tags(list_of_tags):
add_to_list = True
if param.get("key") == "tracking_id":
tracking_id = param["value"]
if add_to_list:

if add_to_list:
reduced_json = { "account_id": tag["accountId"],
"container_id": tag["containerId"],
"firing_trigger_id": tag["firingTriggerId"][0],
Expand All @@ -227,16 +227,16 @@ def _parse_media_tags(list_of_tags):
"tracking_id": tracking_id,
"tag_id": tag["tagId"],
"tag_type": tag["type"],
"snapshot_date": current_date_formatted,
"snapshot_date": current_date_formatted,
"timestamp": current_date }
media_json_list.append(reduced_json)
return media_json_list




def main(request):

# Get credentials and token
credentials = _get_credentials()
token = credentials.token
Expand Down

0 comments on commit 793250a

Please sign in to comment.