From b58d97c4b2599d7a3cd8b91047ac662a4572afda Mon Sep 17 00:00:00 2001 From: LeoFrachet Date: Fri, 29 Dec 2017 11:36:32 -0500 Subject: [PATCH 1/9] Add GTFS object --- gtfs.js | 283 ++++++++++++ helpers/csv.js | 100 +++++ helpers/export.js | 186 ++++++++ helpers/getters.js | 132 ++++++ helpers/import.js | 142 ++++++ helpers/logging_iterator_wrapper.js | 35 ++ helpers/schema.js | 167 +++++++ index.js | 7 + package.json | 43 ++ sample/agency.txt | 2 + sample/calendar.txt | 2 + sample/calendar_dates.txt | 3 + sample/feed_info.txt | 2 + sample/frequencies.txt | 3 + sample/routes.txt | 2 + sample/shapes.txt | 3 + sample/stop_times.txt | 3 + sample/stops.txt | 3 + sample/transfers.txt | 3 + sample/trips.txt | 2 + tests.js | 650 ++++++++++++++++++++++++++++ 21 files changed, 1773 insertions(+) create mode 100644 gtfs.js create mode 100644 helpers/csv.js create mode 100644 helpers/export.js create mode 100644 helpers/getters.js create mode 100644 helpers/import.js create mode 100644 helpers/logging_iterator_wrapper.js create mode 100644 helpers/schema.js create mode 100644 index.js create mode 100644 package.json create mode 100644 sample/agency.txt create mode 100644 sample/calendar.txt create mode 100644 sample/calendar_dates.txt create mode 100644 sample/feed_info.txt create mode 100644 sample/frequencies.txt create mode 100644 sample/routes.txt create mode 100644 sample/shapes.txt create mode 100644 sample/stop_times.txt create mode 100644 sample/stops.txt create mode 100644 sample/transfers.txt create mode 100644 sample/trips.txt create mode 100644 tests.js diff --git a/gtfs.js b/gtfs.js new file mode 100644 index 0000000..496f67d --- /dev/null +++ b/gtfs.js @@ -0,0 +1,283 @@ +'use strict'; + +/* eslint-disable no-underscore-dangle */ + +const fs = require('fs-extra'); + +const forEachWithLog = require('./helpers/logging_iterator_wrapper'); +const { exportGtfs } = require('./helpers/export'); +const getters = require('./helpers/getters'); +const { importTable } = require('./helpers/import'); +const schema = require('./helpers/schema'); + +function addItems(items, tableName, gtfs) { + if (items instanceof Array === false) { + throw new Error(`items must be an array instead of: ${items}`); + } + + const indexedTable = gtfs.getIndexedTable(tableName); + const indexKeys = schema.indexKeysByTableName[tableName]; + + if (indexKeys.indexKey) { + items.forEach(item => indexedTable.set(item[indexKeys.indexKey], item)); + return; + } + + if (indexKeys.firstIndexKey && indexKeys.secondIndexKey) { + items.forEach((item) => { + if (indexedTable.has(item[indexKeys.firstIndexKey]) === false) { + indexedTable.set(item[indexKeys.firstIndexKey], new Map()); + } + + indexedTable.get(item[indexKeys.firstIndexKey]).set(item[indexKeys.secondIndexKey], item); + }); + } +} + +function getIndexedTableOfGtfs(tableName, gtfs, options) { + if (gtfs._tables.has(tableName) === false) { + importTable(gtfs, tableName, options); + console.log(`[Importation] Table ${tableName} has been imported.`); + } + + return gtfs._tables.get(tableName); +} + +function forEachItem(iterator, tableName, gtfs) { + if (typeof iterator !== 'function') { + throw new Error(`iterator mulst be a function, instead of a ${typeof iterator}.`); + } + + const indexedTable = gtfs.getIndexedTable(tableName); + const deepness = schema.deepnessByTableName[tableName]; + + if (deepness === 1) { + forEachWithLog(`Iterating:${tableName}`, indexedTable, (item) => { + iterator(item); + }); + return; + } + + if (deepness === 2) { + forEachWithLog(`Iterating:${tableName}`, indexedTable, (indexedSubTable) => { + indexedSubTable.forEach(iterator); + }); + } +} + +function removeItems(items, tableName, gtfs) { + if (items instanceof Array === false) { + throw new Error(`items must be an array instead of: ${items}`); + } + + const indexedTable = gtfs.getIndexedTable(tableName); + const indexKeys = schema.indexKeysByTableName[tableName]; + + if (indexKeys.indexKey) { + items.forEach(item => indexedTable.delete(item[indexKeys.indexKey])); + return; + } + + if (indexKeys.firstIndexKey && indexKeys.secondIndexKey) { + items.forEach((item) => { + if (indexedTable.has(item[indexKeys.firstIndexKey]) === true) { + indexedTable.get(item[indexKeys.firstIndexKey]).delete(item[indexKeys.secondIndexKey]); + } + + if (indexedTable.get(item[indexKeys.firstIndexKey]).size === 0) { + indexedTable.delete(item[indexKeys.firstIndexKey]); + } + }); + } +} + +function setIndexedItems(indexedItems, tableName, gtfs) { + if (indexedItems instanceof Map === false && schema.deepnessByTableName[tableName] !== 0) { + throw new Error(`indexedItems must be a Map instead of: ${indexedItems}`); + } + + gtfs._tables.set(tableName, indexedItems); +} + +class Gtfs { + constructor(path, regexPatternObjectsByTableName) { + if (typeof path !== 'string' || path.length === 0) { + throw new Error(`Gtfs need a valid input path as string, instead of: "${path}".`); + } + + path = (path[path.length - 1] === '/') ? path : `${path}/`; + + if (fs.existsSync(path) === false) { + throw new Error(`inputPath: "${path}" is not a valid folder.`); + } + + this.isGtfs = true; + + this._path = path; + this._regexPatternObjectsByTableName = regexPatternObjectsByTableName || {}; + this._tables = new Map(); + } + + /* io */ + exportAtPath(path, callback) { exportGtfs(this, path, callback); } + getPath() { return this._path; } + + /* Generic table & item manipulation */ + addItemInTable(item, tableName) { addItems([item], tableName, this); } + addItemsInTable(items, tableName) { addItems(items, tableName, this); } + forEachItemInTable(tableName, iterator) { forEachItem(iterator, tableName, this); } + forEachTableName(iterator) { this.getTableNames().forEach(iterator); } + getIndexedTable(tableName, forcedValuesByKeys) { return getIndexedTableOfGtfs(tableName, this, forcedValuesByKeys); } + getItemWithIndexInTable(index, tableName) { return getters.getItemWithIndex(index, tableName, this); } + getTableNames() { return new Set([...schema.tableNames, ...this._tables.keys()]); } + getParentItem(item, tableName) { return getters.getParentItem(item, tableName, this); } + removeItemInTable(item, tableName) { removeItems([item], tableName, this); } + removeItemsInTable(items, tableName) { removeItems(items, tableName, this); } + setIndexedItemsAsTable(indexedItems, tableName) { setIndexedItems(indexedItems, tableName, this); } + + /* agency.txt */ + addAgency(agency) { addItems([agency], 'agency', this); } + addAgencies(agencies) { addItems(agencies, 'agency', this); } + forEachAgency(iterator) { forEachItem(iterator, 'agency', this); } + getAgencyOfRoute(route) { return getters.getParentItem(route, 'agency', this); } + getAgencyWithId(agencyId) { return getters.getItemWithIndex(agencyId, 'agency', this); } + getIndexedAgencies() { return getIndexedTableOfGtfs('agency', this); } + removeAgency(agency) { removeItems([agency], 'agency', this); } + removeAgencies(agencies) { removeItems(agencies, 'agency', this); } + setIndexedAgencies(indexedAgencies) { setIndexedItems(indexedAgencies, 'agency', this); } + + /* stops.txt */ + addStop(stop) { addItems([stop], 'stops', this); } + addStops(stops) { addItems(stops, 'stops', this); } + forEachStop(iterator) { forEachItem(iterator, 'stops', this); } + getIndexedStops() { return getIndexedTableOfGtfs('stops', this); } + getStopOfStopTime(stopTime) { return getters.getParentItem(stopTime, 'stops', this); } + getStopWithId(stopId) { return getters.getItemWithIndex(stopId, 'stops', this); } + removeStop(stop) { removeItems([stop], 'stops', this); } + removeStops(stops) { removeItems(stops, 'stops', this); } + setIndexedStops(indexedStops) { setIndexedItems(indexedStops, 'stops', this); } + + /* routes.txt */ + addRoute(route) { addItems([route], 'routes', this); } + addRoutes(routes) { addItems(routes, 'routes', this); } + forEachRoute(iterator) { forEachItem(iterator, 'routes', this); } + getIndexedRoutes() { return getIndexedTableOfGtfs('routes', this); } + getRouteOfStopTime(stopTime) { return getters.getGrandParentItem(stopTime, 'trips', 'routes', this); } + getRouteOfTrip(trip) { return getters.getParentItem(trip, 'routes', this); } + getRouteWithId(routeId) { return getters.getItemWithIndex(routeId, 'routes', this); } + removeRoute(route) { removeItems([route], 'routes', this); } + removeRoutes(routes) { removeItems(routes, 'routes', this); } + setIndexedRoutes(indexedRoutes) { setIndexedItems(indexedRoutes, 'routes', this); } + + /* trips.txt */ + addTrip(trip) { addItems([trip], 'trips', this); } + addTrips(trips) { addItems(trips, 'trips', this); } + forEachTrip(iterator) { forEachItem(iterator, 'trips', this); } + getIndexedTrips() { return getIndexedTableOfGtfs('trips', this); } + getTripOfStopTime(stopTime) { return getters.getParentItem(stopTime, 'trips', this); } + getTripWithId(tripId) { return getters.getItemWithIndex(tripId, 'trips', this); } + removeTrip(trip) { removeItems([trip], 'trips', this); } + removeTrips(trips) { removeItems(trips, 'trips', this); } + setIndexedTrips(indexedTrips) { setIndexedItems(indexedTrips, 'trips', this); } + + /* stop_times.txt */ + addStopTime(stopTime) { addItems([stopTime], 'stop_times', this); } + addStopTimes(stopTimes) { addItems(stopTimes, 'stop_times', this); } + forEachStopTime(iterator) { forEachItem(iterator, 'stop_times', this); } + forEachStopTimeOfTrip(trip, iterator) { + const stopTimeByStopSequence = this.getStopTimeByStopSequenceOfTrip(trip); + if (stopTimeByStopSequence instanceof Map) { + stopTimeByStopSequence.forEach(iterator); + } + } + getIndexedStopTimes() { return getIndexedTableOfGtfs('stop_times', this); } + getStopTimeByStopSequenceOfTrip(trip) { return getters.getIndexedItemsWithParent(trip, 'stop_times', this); } + getStopTimeWithTripIdAndStopSequence(tripId, stopSequence) { + return getters.getItemWithIndexes(tripId, stopSequence, 'stop_times', this); + } + removeStopTime(stopTime) { removeItems([stopTime], 'stop_times', this); } + removeStopTimes(stopTimes) { removeItems(stopTimes, 'stop_times', this); } + setIndexedStopTimes(indexedStopTimes) { setIndexedItems(indexedStopTimes, 'stop_times', this); } + + /* calendar.txt */ + addCalendar(calendar) { addItems([calendar], 'calendar', this); } + addCalendars(calendars) { addItems(calendars, 'calendar', this); } + forEachCalendar(iterator) { forEachItem(iterator, 'calendar', this); } + getCalendarOfTrip(trip) { return getters.getParentItem(trip, 'calendar', this); } + getCalendarOfStopTime(stopTime) { + return getters.getGrandParentItem(stopTime, 'trips', 'calendar', this); + } + getCalendarWithServiceId(serviceId) { return getters.getItemWithIndex(serviceId, 'calendar', this); } + getIndexedCalendars() { return getIndexedTableOfGtfs('calendar', this); } + removeCalendar(calendar) { removeItems([calendar], 'calendar', this); } + removeCalendars(calendars) { removeItems(calendars, 'calendar', this); } + setIndexedCalendars(indexedCalendars) { setIndexedItems(indexedCalendars, 'calendar', this); } + + /* calendar_dates.txt */ + addCalendarDate(calendarDate) { addItems([calendarDate], 'calendar_dates', this); } + addCalendarDates(calendarDates) { addItems(calendarDates, 'calendar_dates', this); } + forEachCalendarDate(iterator) { forEachItem(iterator, 'calendar_dates', this); } + getCalendarDateByDateOfServiceId(serviceId) { + return getters.getIndexedItemsWithParentIndex(serviceId, 'calendar_dates', this); + } + getCalendarDateByDateOfTrip(trip) { return getters.getIndexedItemsWithParent(trip, 'calendar_dates', this); } + getCalendarDateWithServiceIdAndDate(serviceId, date) { + return getters.getItemWithIndexes(serviceId, date, 'calendar_dates', this); + } + getIndexedCalendarDates() { return getIndexedTableOfGtfs('calendar_dates', this); } + removeCalendarDate(calendarDate) { removeItems([calendarDate], 'calendar_dates', this); } + removeCalendarDates(calendarDates) { removeItems(calendarDates, 'calendar_dates', this); } + setIndexedCalendarDates(indexedCalendarDates) { setIndexedItems(indexedCalendarDates, 'calendar_dates', this); } + + /* fare_attributes.txt */ + // Not used, therefore not implemented + + /* fare_rules.txt */ + // Not used, therefore not implemented + + /* shapes.txt */ + addShapePoint(shapePoint) { addItems([shapePoint], 'shapes', this); } + addShapePoints(shapePoints) { addItems(shapePoints, 'shapes', this); } + forEachShapePoint(iterator) { forEachItem(iterator, 'shapes', this); } + getIndexedShapePoints() { return getIndexedTableOfGtfs('shapes', this); } + getShapePointByShapePointSequenceOfShapeId(shapeId) { + return getters.getIndexedItemsWithParentIndex(shapeId, 'shapes', this); + } + getShapePointByShapePointSequenceOfTrip(trip) { return getters.getIndexedItemsWithParent(trip, 'shapes', this); } + getShapePointWithTripIdAndShapePointSequence(tripId, shapePointSequence) { + return getters.getItemWithIndexes(tripId, shapePointSequence, 'shapes', this); + } + removeShapePoint(shapePoint) { removeItems([shapePoint], 'shapes', this); } + removeShapePoints(shapePoints) { removeItems(shapePoints, 'shapes', this); } + setIndexedShapePoints(indexedShapes) { setIndexedItems(indexedShapes, 'shapes', this); } + + /* frequencies.txt */ + addFrequency(frequency) { addItems([frequency], 'frequencies', this); } + addFrequencies(frequencies) { addItems(frequencies, 'frequencies', this); } + forEachFrequency(iterator) { forEachItem(iterator, 'frequencies', this); } + getIndexedFrequencies() { return getIndexedTableOfGtfs('frequencies', this); } + getFrequencyWithTripIdAndStartTime(tripId, startTime) { + return getters.getItemWithIndexes(tripId, startTime, 'frequencies', this); + } + removeFrequency(frequency) { removeItems([frequency], 'frequencies', this); } + removeFrequencies(frequencies) { removeItems(frequencies, 'frequencies', this); } + setIndexedFrequencies(indexedFrequencies) { setIndexedItems(indexedFrequencies, 'frequencies', this); } + + /* transfers.txt */ + addTransfer(transfer) { addItems([transfer], 'transfers', this); } + addTransfers(transfers) { addItems(transfers, 'transfers', this); } + forEachTransfer(iterator) { forEachItem(iterator, 'transfers', this); } + getIndexedTransfers() { return getIndexedTableOfGtfs('transfers', this); } + getTransfertWithFromStopIdAndToStopId(fromStopId, toStopId) { + return getters.getItemWithIndexes(fromStopId, toStopId, 'transfers', this); + } + removeTransfer(transfer) { removeItems([transfer], 'transfers', this); } + removeTransfers(transfers) { removeItems(transfers, 'transfers', this); } + setIndexedTransfers(indexedTransfers) { setIndexedItems(indexedTransfers, 'transfers', this); } + + /* feed_info.txt */ + getFeedInfo() { return getIndexedTableOfGtfs('feed_info', this); } + setFeedInfo(feedInfo) { setIndexedItems(feedInfo, 'feed_info', this); } +} + +module.exports = Gtfs; diff --git a/helpers/csv.js b/helpers/csv.js new file mode 100644 index 0000000..d59b34a --- /dev/null +++ b/helpers/csv.js @@ -0,0 +1,100 @@ +'use strict'; + +/** + * Private functions + */ + +const SPECIAL_CHARACTERS_REGEX = /[",\\]/; + +function formatRegularValue(value) { + if (value === undefined || value === null) { + return ''; + } + + value = (typeof value === 'string') ? value : String(value); + + if (value.match(SPECIAL_CHARACTERS_REGEX)) { + return `"${value.replace(/"/g, '""')}"`; + } + + return value; +} + +/** + * Public functions + */ + +function fromObjectToCsvString(object, sortedKeys) { + return `${sortedKeys.map(key => formatRegularValue(object[key], key)).join(',')}\n`; +} + +/* + Source: http://stackoverflow.com/questions/8493195/how-can-i-parse-a-csv-string-with-javascript + Explaination: + (?!\s*$) # Don't match empty last value. + \s* # Strip whitespace before value. + (?: # Group for value alternatives. + '([^'\\]*(?:\\[\S\s][^'\\]*)*)' # Either $1: Single quoted string, + | "([^"\\]*(?:\\[\S\s][^"\\]*)*)" # or $2: Double quoted string, + | ([^,'"\s\\]*(?:\s+[^,'"\s\\]+)*) # or $3: Non-comma, non-quote stuff. + ) # End group of value alternatives. + \s* # Strip whitespace after value. + (?:,|$) # Field ends on comma or EOS. + + For practical reason, we remove the case with single quoted string. +*/ + +// eslint-disable-next-line max-len +// var re_valid_original = /^\s*(?:'[^'\\]*(?:\\[\S\s][^'\\]*)*'|"[^"\\]*(?:\\[\S\s][^"\\]*)*"|[^,'"\s\\]*(?:\s+[^,'"\s\\]+)*)\s*(?:,\s*(?:'[^'\\]*(?:\\[\S\s][^'\\]*)*'|"[^"\\]*(?:\\[\S\s][^"\\]*)*"|[^,'"\s\\]*(?:\s+[^,'"\s\\]+)*)\s*)*$/; + +// eslint-disable-next-line max-len +// var re_value_original = /(?!\s*$)\s*(?:'([^'\\]*(?:\\[\S\s][^'\\]*)*)'|"([^"\\]*(?:\\[\S\s][^"\\]*)*)"|([^,'"\s\\]*(?:\s+[^,'"\s\\]+)*))\s*(?:,|$)/g; + +// eslint-disable-next-line max-len +const reValid = /^\s*(?:"[^"\\]*(?:\\[\S\s][^"\\]*)*"|[^,"\s\\]*(?:\s+[^,"\s\\]+)*)\s*(?:,\s*(?:"[^"\\]*(?:\\[\S\s][^"\\]*)*"|[^,"\s\\]*(?:\s+[^,"\s\\]+)*)\s*)*$/; + +// eslint-disable-next-line max-len +const reValue = /(?!\s*$)\s*(?:"([^"\\]*(?:\\[\S\s][^"\\]*)*)"|([^,"\s\\]*(?:\s+[^,"\s\\]+)*))\s*(?:,|$)/g; + +function fromCsvStringToArray(string, tableName) { + string = string.trim(); + + if (string.length === 0) { + return null; + } + + if (string.includes('"') === false) { + return string.split(','); + } + + if (!reValid.test(string)) { + if (string.match(/""/)) { + string = string.replace(/""/g, '\\"'); + return fromCsvStringToArray(string, tableName); + } + process.notices.addWarning(`Row not valid in table ${tableName}: ${string}`); + return null; + } + + const a = []; // Initialize array to receive values. + // "Walk" the string using replace with callback. + string.replace(reValue, (m0, /* m1, */ m2, m3) => { + // Remove backslash from \' in single quoted values. + /* if (m1 !== undefined) a.push(m1.replace(/\\'/g, "'")); */ + // Remove backslash from \" in double quoted values. + /* else */ + if (m2 !== undefined) a.push(m2.replace(/\\"/g, '"')); + else if (m3 !== undefined) a.push(m3); + return ''; // Return empty string. + }); + // Handle special case of empty last value. + if (/,\s*$/.test(string)) { + a.push(''); + } + return a; +} + +module.exports = { + fromCsvStringToArray, + fromObjectToCsvString, +}; diff --git a/helpers/export.js b/helpers/export.js new file mode 100644 index 0000000..a94984f --- /dev/null +++ b/helpers/export.js @@ -0,0 +1,186 @@ +'use strict'; + +/* eslint-disable no-underscore-dangle */ + +const acomb = require('acomb'); +const async = require('async'); +const fs = require('fs-extra'); + +const { fromObjectToCsvString } = require('./csv'); +const schema = require('./schema'); + +/** + * Private functions + */ + +function getHHmmss() { + const date = new Date(); + return `${date.getHours()}:${date.getMinutes()}:${date.getSeconds()}`; +} + +function resetOutputFolder(outputPath, callback) { + fs.remove(outputPath, (removeError) => { + if (removeError) { + callback(removeError); + return; + } + + fs.mkdirp(outputPath, (makeDirectoryError) => { + if (makeDirectoryError) { + callback(makeDirectoryError); + return; + } + + callback(); + }); + }); +} + +function copyUntouchedTable(inputPath, outputPath, tableName, callback) { + const fullPathToInputFile = `${inputPath + tableName}.txt`; + const fullPathToOutputFile = `${outputPath + tableName}.txt`; + + fs.open(fullPathToInputFile, 'r', (err) => { + if (err && err.code === 'ENOENT') { + console.log(`[${getHHmmss()}] Table doesn't exist and won't be added: ${tableName}`); + callback(); + return; + } + if (err) { + console.log(err); + callback(); + return; + } + + fs.copy(fullPathToInputFile, fullPathToOutputFile, (copyError) => { + if (copyError) { + console.log(copyError); + } + + console.log(`[${getHHmmss()}] Table has been copied: ${tableName}`); + callback(); + }); + }); +} + +function getActualKeysForTable(gtfs, tableName) { + const deepness = schema.deepnessByTableName[tableName]; + let sampleItem; + + if (deepness === 0) { + sampleItem = gtfs.getIndexedTable(tableName); + } else if (deepness === 1) { + sampleItem = gtfs.getIndexedTable(tableName).values().next().value; + } else if (deepness === 2) { + sampleItem = gtfs.getIndexedTable(tableName).values().next().value.values().next().value; + } + + const keys = [...schema.keysByTableName[tableName]]; + + if (sampleItem) { + Object.keys(sampleItem).forEach((key) => { + if (schema.keysByTableName[tableName].includes(key) === false) { + keys.push(key); + } + }); + } + + if (keys.length === 0) { + throw new Error(`No keys found for table ${tableName}`); + } + + return keys; +} + +function exportTable(tableName, gtfs, outputPath, callback) { + const keys = getActualKeysForTable(gtfs, tableName); + const outputFullPath = `${outputPath + tableName}.txt`; + const firstRow = `${keys.join(',')}\n`; + + fs.writeFile(outputFullPath, firstRow, (err) => { + if (err) { throw err; } + /* About acomb.ensureAsync: + If the function async.eachSeries run without doing anything, just calling the callback (which + happens when there is a lot of empty object), it crashes. It is a known bug of async. + The acomb.ensureAsync fonction prevent that. It should be removed when the async module + will be fixed. + 2015-03-10 + */ + const deepness = schema.deepnessByTableName[tableName]; + + if (deepness === 0) { + const row = fromObjectToCsvString(gtfs.getIndexedTable(tableName), keys); + fs.appendFile(outputFullPath, row, callback); + return; + } + + let rowsBuffer = []; + + async.eachSeries(gtfs.getIndexedTable(tableName), acomb.ensureAsync(([key, object], subDone) => { + if (deepness === 1) { + rowsBuffer.push(fromObjectToCsvString(object, keys)); + } else if (deepness === 2) { + object.forEach((subObject) => { + rowsBuffer.push(fromObjectToCsvString(subObject, keys)); + }); + } + + if (rowsBuffer.length < 100) { + subDone(); + return; + } + + fs.appendFile(outputFullPath, rowsBuffer.join(''), (appendingError) => { + if (appendingError) { throw appendingError; } + + rowsBuffer = []; + subDone(); + }); + }), () => { + if (rowsBuffer.length === 0) { + console.log(`[${getHHmmss()}] Table has been exported: ${tableName}`); + callback(); + return; + } + + fs.appendFile(outputFullPath, rowsBuffer.join(''), (appendingError) => { + if (appendingError) { throw appendingError; } + + console.log(`[${getHHmmss()}] Table has been exported: ${tableName}`); + callback(); + }); + }); + }); +} + +/** + * Public function + */ + +exports.exportGtfs = (gtfs, outputPath, callback) => { + if (typeof outputPath !== 'string') { + throw new Error(`Gtfs need a valid output path as string, instead of: "${outputPath}".`); + } + if (outputPath.match(/\/$/) === null) { + outputPath += '/'; + } + + resetOutputFolder(outputPath, (resetOutputFolderError) => { + if (resetOutputFolderError) { + callback(resetOutputFolderError); + return; + } + + console.log(`Will start exportation of tables: ${Array.from(gtfs.getTableNames()).join(', ')}`); + + async.eachSeries(gtfs.getTableNames(), (tableName, done) => { + if (gtfs._tables.has(tableName) === true) { + console.log(`[${getHHmmss()}] Table will be exported: ${tableName}`); + exportTable(tableName, gtfs, outputPath, done); + } else { + console.log(`[${getHHmmss()}] Table will be copied: ${tableName}`); + copyUntouchedTable(gtfs.getPath(), outputPath, tableName, done); + } + }, callback); + }); +}; diff --git a/helpers/getters.js b/helpers/getters.js new file mode 100644 index 0000000..9540b97 --- /dev/null +++ b/helpers/getters.js @@ -0,0 +1,132 @@ +'use strict'; + +const schema = require('./schema'); + +function getGrandParentItem(itemWithForeignIndexId, parentTableName, grandParentTableName, gtfs) { + if ( + itemWithForeignIndexId === undefined || + itemWithForeignIndexId === null || + typeof itemWithForeignIndexId !== 'object' + ) { + throw new Error(`itemWithForeignIndexId must be a plain object, instead of an "${typeof itemWithForeignIndexId}"`); + } + if (schema.tableNames.includes(parentTableName) === false) { + throw new Error(`Cannot find table with name "${parentTableName}"`); + } + if (schema.tableNames.includes(grandParentTableName) === false) { + throw new Error(`Cannot find table with name "${grandParentTableName}"`); + } + + /* Reach parent item */ + const parentIndexKey = schema.indexKeysByTableName[parentTableName].indexKey; + + if (itemWithForeignIndexId[parentIndexKey] === undefined) { + throw new Error(`itemWithForeignIndexId should contain the foreign index key "${parentIndexKey}"`); + } + + const parentItem = gtfs.getItemWithIndexInTable(itemWithForeignIndexId[parentIndexKey], parentTableName); + + if (!parentItem) { + return null; + } + + /* Reach grandparent item */ + const grandParentIndexKey = schema.indexKeysByTableName[grandParentTableName].indexKey; + + if (!parentItem[grandParentIndexKey]) { + throw new Error(`parentItem should contain the foreign index key "${grandParentIndexKey}"${parentItem}`); + } + + return gtfs.getItemWithIndexInTable(parentItem[grandParentIndexKey], grandParentTableName); +} + +function getIndexedItemsWithParent(parentItem, tableName, gtfs) { + if (schema.deepnessByTableName[tableName] !== 2) { + throw new Error(`Table "${tableName}" is not of deepness 2.`); + } + if (parentItem === undefined || parentItem === null || typeof parentItem !== 'object') { + throw new Error(`Parent item should be a plain object, instead of an "${typeof parentItem}"`); + } + + const firstIndexKey = schema.indexKeysByTableName[tableName].firstIndexKey; + + if (parentItem[firstIndexKey] === undefined) { + throw new Error(`Parent item should contain the foreign index key "${firstIndexKey}"`); + } + + const indexedTable = gtfs.getIndexedTable(tableName); + + return indexedTable.get(parentItem[firstIndexKey]); +} + +function getIndexedItemsWithParentIndex(parentIndex, tableName, gtfs) { + if (schema.deepnessByTableName[tableName] !== 2) { + throw new Error(`Table "${tableName}" is not of deepness 2.`); + } + if (typeof parentIndex !== 'string') { + throw new Error(`Parent item index should be a string, instead of an "${typeof parentIndex}"`); + } + + const indexedTable = gtfs.getIndexedTable(tableName); + + return indexedTable.get(parentIndex); +} + +function getItemWithIndex(index, tableName, gtfs) { + if (schema.deepnessByTableName[tableName] !== 1) { + throw new Error(`Cannot access item with only one index in "${tableName}", since the deepness is not 1.`); + } + if (typeof index !== 'string') { + throw new Error(`Index should be a string, instead of an "${typeof index}": ${JSON.stringify(index)}`); + } + + const indexedTable = gtfs.getIndexedTable(tableName); + + return indexedTable.get(index); +} + +function getItemWithIndexes(firstIndex, secondIndex, tableName, gtfs) { + if (schema.deepnessByTableName[tableName] !== 2) { + throw new Error(`Cannot access item with two indexes in "${tableName}", since the deep is not 2.`); + } + if (firstIndex === undefined || firstIndex === null || typeof firstIndex !== 'string') { + throw new Error(`First index should be a string, instead of an "${typeof firstIndex}"`); + } + if (secondIndex === undefined || secondIndex === null || typeof secondIndex !== 'string') { + throw new Error(`Second index should be a string, instead of an "${typeof secondIndex}"`); + } + + const indexedTable = gtfs.getIndexedTable(tableName); + + return (indexedTable.has(firstIndex)) ? indexedTable.get(firstIndex).get(secondIndex) : null; +} + +function getParentItem(itemWithForeignIndexId, tableName, gtfs) { + if ( + itemWithForeignIndexId === undefined || + itemWithForeignIndexId === null || + typeof itemWithForeignIndexId !== 'object' + ) { + throw new Error(`itemWithForeignIndexId must be a plain object, instead of an "${typeof itemWithForeignIndexId}"`); + } + + const indexKey = schema.indexKeysByTableName[tableName].indexKey; + + if (itemWithForeignIndexId[indexKey] === undefined) { + throw new Error( + `itemWithForeignIndexId should contain the foreign index key "${indexKey}", ` + + `but is: ${JSON.stringify(itemWithForeignIndexId)}` + ); + } + + return gtfs.getItemWithIndexInTable(itemWithForeignIndexId[indexKey], tableName); +} + +module.exports = { + getGrandParentItem, + getIndexedItemsWithParent, + getIndexedItemsWithParentIndex, + getItemWithIndex, + getItemWithIndexes, + getParentItem, +}; diff --git a/helpers/import.js b/helpers/import.js new file mode 100644 index 0000000..230af85 --- /dev/null +++ b/helpers/import.js @@ -0,0 +1,142 @@ +'use strict'; + +/* eslint-disable no-underscore-dangle */ + +const fs = require('fs-extra'); + +const eachWithLog = require('./logging_iterator_wrapper'); +const { fromCsvStringToArray } = require('./csv'); +const schema = require('./schema'); + +exports.importTable = (gtfs, tableName, options) => { + options = options || {}; + const indexKeys = options.indexKeys || schema.indexKeysByTableName[tableName]; + const fullPath = `${gtfs.getPath() + tableName}.txt`; + + if (fs.existsSync(fullPath)) { + const fileContent = fs.readFileSync(fullPath); + const rows = getRows(fileContent, gtfs._regexPatternObjectsByTableName, tableName); + + gtfs._tables.set(tableName, processRows(gtfs, tableName, indexKeys, rows)); + return; + } + + console.log(`Empty table will be set for table ${tableName} (no input file at path ${gtfs._path}).`); + + gtfs._tables.set(tableName, new Map()); +}; + +/** + * Private functions + */ + +function getRows(buffer, regexPatternObjectsByTableName, tableName) { + const rows = []; + let rowsSlice; + let position = 0; + const length = 50000; + let merge; + const regexPatternObjects = regexPatternObjectsByTableName[tableName]; + + while (position < buffer.length) { + rowsSlice = buffer.toString('utf8', position, Math.min(buffer.length, position + length)); + + if (regexPatternObjects) { + regexPatternObjects.forEach((regexPatternObject) => { + const modifiedRowsSlice = rowsSlice.replace(regexPatternObject.regex, regexPatternObject.pattern || ''); + if (modifiedRowsSlice !== rowsSlice) { + if (process.notices && process.notices.addInfo) { + process.notices.addInfo( + __filename, `Applying regex replace to table: "${tableName}". regex: "${regexPatternObject.regex}".` + ); + } + rowsSlice = modifiedRowsSlice; + } + }); + } + + rowsSlice.split('\n').forEach((row, i) => { + if (i === 0 && merge) { + rows[rows.length - 1] += row; + } else { + rows.push(row); + } + }); + + merge = rowsSlice[rowsSlice.length] !== '\n'; + position += length; + } + + return rows; +} + +function processRows(gtfs, tableName, indexKeys, rows) { + let table = new Map(); + + if (rows === undefined || rows === null || rows.length === 0) { + return table; + } + + const sortedKeys = fromCsvStringToArray(rows[0], tableName).map(key => key.trim()); + + checkThatKeysIncludeIndexKeys(sortedKeys, indexKeys, tableName); + + eachWithLog(`Importation:${tableName}`, rows, (row, index) => { + if (index !== 0 && row && row.length > 0) { + const arrayOfValues = fromCsvStringToArray(row, tableName, gtfs).map(key => key.trim()); + + if (arrayOfValues !== null) { + const item = sortedKeys.reduce((accumulator, key, i) => { + accumulator[key] = arrayOfValues[i]; + return accumulator; + }, {}); + + if (sortedKeys.length !== arrayOfValues.length) { + if (process.notices && process.notices.addWarning) { + process.notices.addWarning(`Row not valid in table: ${JSON.stringify(item)}`); + } + return; + } + + if (indexKeys.indexKey) { + table.set(item[indexKeys.indexKey], item); + } else if (indexKeys.firstIndexKey && indexKeys.secondIndexKey) { + if (table.has(item[indexKeys.firstIndexKey]) === false) { + table.set(item[indexKeys.firstIndexKey], new Map()); + } + + table.get(item[indexKeys.firstIndexKey]).set(item[indexKeys.secondIndexKey], item); + } else if (indexKeys.singleton) { + table = item; + } + } + } + + rows[index] = undefined; + }); + + return table; +} + +function checkThatKeysIncludeIndexKeys(sortedKeys, indexKeys, tableName) { + const deepness = (indexKeys.indexKey) ? 1 : 0; + + if (deepness === 1 && sortedKeys.includes(indexKeys.indexKey) === false && indexKeys.indexKey !== 'agency_id') { + /* Field agency_id is optional in table agency.txt according to the specification. */ + throw new Error( + `Keys of table ${tableName} do not contain the index key: ${indexKeys.indexKey}.\n` + + ` The values are: ${JSON.stringify(indexKeys.indexKey)}` + ); + } + + if ( + deepness === 2 && + (sortedKeys.includes(indexKeys.firstIndexKey) === false || sortedKeys.includes(indexKeys.secondIndexKey) === false) + ) { + throw new Error( + `Keys of table ${tableName} do not contain the index keys: ` + + `${indexKeys.firstIndexKey} and ${indexKeys.secondIndexKey}.\n` + + ` The values are: ${JSON.stringify(indexKeys.indexKey)}` + ); + } +} diff --git a/helpers/logging_iterator_wrapper.js b/helpers/logging_iterator_wrapper.js new file mode 100644 index 0000000..bf1ff1b --- /dev/null +++ b/helpers/logging_iterator_wrapper.js @@ -0,0 +1,35 @@ +'use strict'; + +module.exports = (prefix, valueByKey, iteratee) => { + if ( + valueByKey instanceof Array === false && + valueByKey instanceof Map === false && + valueByKey instanceof Set === false + ) { + throw new Error('valueByKey should be an Array, a Map or a Set.'); + } + + let lastLogAt = Date.now(); + let numberOfKeysDone = 0; + let interval = 2000; + let oneProgressionLogHasBeenPrinted = false; + + valueByKey.forEach((value, key) => { + iteratee(value, key); + + numberOfKeysDone += 1; + + if (Date.now() - lastLogAt > interval && process.env.TEST === undefined) { + const percentageDone = (numberOfKeysDone / valueByKey.size()) * 100; + console.log(`[${prefix}] ${percentageDone.toPrecision(2)}% done`); + + lastLogAt = Date.now(); + oneProgressionLogHasBeenPrinted = true; + interval = (interval < 10000) ? interval + 2000 : 10000; + } + }); + + if (oneProgressionLogHasBeenPrinted && process.env.TEST === undefined) { + console.log(`[${prefix}] Done`); + } +}; diff --git a/helpers/schema.js b/helpers/schema.js new file mode 100644 index 0000000..8452910 --- /dev/null +++ b/helpers/schema.js @@ -0,0 +1,167 @@ +'use strict'; + +const version = '2017.12.11'; + +const keysByTableName = { + agency: [ + 'agency_id', + 'agency_name', + 'agency_url', + 'agency_timezone', + 'agency_lang', + 'agency_phone', + 'agency_fare_url', + 'agency_email', + ], + stops: [ + 'stop_id', + 'stop_code', + 'stop_name', + 'stop_desc', + 'stop_lat', + 'stop_lon', + 'zone_id', + 'stop_url', + 'location_type', + 'parent_station', + 'stop_timezone', + 'wheelchair_boarding', + ], + routes: [ + 'route_id', + 'agency_id', + 'route_short_name', + 'route_long_name', + 'route_desc', + 'route_type', + 'route_url', + 'route_color', + 'route_text_color', + 'route_sort_order', + ], + trips: [ + 'route_id', + 'service_id', + 'trip_id', + 'trip_headsign', + 'trip_short_name', + 'direction_id', + 'block_id', + 'shape_id', + 'wheelchair_accessible', + 'bikes_allowed', + ], + stop_times: [ + 'trip_id', + 'arrival_time', + 'departure_time', + 'stop_id', + 'stop_sequence', + 'stop_headsign', + 'pickup_type', + 'drop_off_type', + 'shape_dist_traveled', + 'timepoint', + ], + calendar: [ + 'service_id', + 'monday', + 'tuesday', + 'wednesday', + 'thursday', + 'friday', + 'saturday', + 'sunday', + 'start_date', + 'end_date', + ], + calendar_dates: [ + 'service_id', + 'date', + 'exception_type', + ], + fare_attributes: [ + 'fare_id', + 'price', + 'currency_type', + 'payment_method', + 'transfers', + 'agency_id', + 'transfer_duration', + ], + fare_rules: [ + 'fare_id', + 'route_id', + 'origin_id', + 'destination_id', + 'contains_id', + ], + shapes: [ + 'shape_id', + 'shape_pt_lat', + 'shape_pt_lon', + 'shape_pt_sequence', + 'shape_dist_traveled', + ], + frequencies: [ + 'trip_id', + 'start_time', + 'end_time', + 'headway_secs', + 'exact_times', + ], + transfers: [ + 'from_stop_id', + 'to_stop_id', + 'transfer_type', + 'min_transfer_time', + ], + feed_info: [ + 'feed_publisher_name', + 'feed_publisher_url', + 'feed_lang', + 'feed_start_date', + 'feed_end_date', + 'feed_version', + ], +}; + +const indexKeysByTableName = { + agency: { indexKey: 'agency_id' }, + calendar: { indexKey: 'service_id' }, + calendar_dates: { firstIndexKey: 'service_id', secondIndexKey: 'date' }, + fare_attributes: { indexKey: 'fare_id' }, + frequencies: { firstIndexKey: 'trip_id', secondIndexKey: 'start_time' }, + routes: { indexKey: 'route_id' }, + stop_times: { firstIndexKey: 'trip_id', secondIndexKey: 'stop_sequence' }, + stops: { indexKey: 'stop_id' }, + trips: { indexKey: 'trip_id' }, + shapes: { firstIndexKey: 'shape_id', secondIndexKey: 'shape_pt_sequence' }, + transfers: { firstIndexKey: 'from_stop_id', secondIndexKey: 'to_stop_id' }, + feed_info: { singleton: true }, +}; + +const tableNames = Object.keys(indexKeysByTableName); + +const deepnessByTableName = tableNames.reduce((accumulator, tableName) => { + if (indexKeysByTableName[tableName].singleton) { + accumulator[tableName] = 0; + } else if (indexKeysByTableName[tableName].indexKey) { + accumulator[tableName] = 1; + } else if ( + indexKeysByTableName[tableName].firstIndexKey && + indexKeysByTableName[tableName].secondIndexKey + ) { + accumulator[tableName] = 2; + } + + return accumulator; +}, {}); + +module.exports = { + deepnessByTableName, + indexKeysByTableName, + keysByTableName, + tableNames, + version, +}; diff --git a/index.js b/index.js new file mode 100644 index 0000000..31a7e51 --- /dev/null +++ b/index.js @@ -0,0 +1,7 @@ +'use strict'; + +const Gtfs = require('gtfs'); + +module.exports = { + Gtfs, +}; diff --git a/package.json b/package.json new file mode 100644 index 0000000..de44ab7 --- /dev/null +++ b/package.json @@ -0,0 +1,43 @@ +{ + "name": "@transit/gtfs", + "version": "1.0.0", + "description": "A Node.js librairie for GTFS", + "main": "index.js", + "scripts": { + "test": "./node_modules/.bin/mocha tests.js" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/TransitApp/gtfsNodeLib.git" + }, + "keywords": [ + "GTFS", + "Node", + "Transit" + ], + "author": "Transit Inc.", + "license": "MIT", + "bugs": { + "url": "https://github.com/TransitApp/gtfsNodeLib/issues" + }, + "homepage": "https://github.com/TransitApp/gtfsNodeLib#readme", + "dependencies": { + "acomb": "1.2.2", + "async": "2.6.0", + "fs-extra": "5.0.0" + }, + "jshintConfig": { + "esversion": 6, + "node": true, + "globals": { + "__rootname": false + } + }, + "devDependencies": { + "chai": "^3.5.0", + "eslint": "^3.16.0", + "eslint-config-transit": "^1.0.3", + "eslint-plugin-import": "^2.2.0", + "mocha": "^3.5.3" + } +} diff --git a/sample/agency.txt b/sample/agency.txt new file mode 100644 index 0000000..358f415 --- /dev/null +++ b/sample/agency.txt @@ -0,0 +1,2 @@ +agency_id,agency_name,agency_url,agency_timezone,agency_lang,agency_phone,agency_fare_url,agency_email +agency_0,Agency 0,http://google.com,America/New_York,en,(310) 555-0222,http://google.com,contact@google.com diff --git a/sample/calendar.txt b/sample/calendar.txt new file mode 100644 index 0000000..af1b136 --- /dev/null +++ b/sample/calendar.txt @@ -0,0 +1,2 @@ +service_id,monday,tuesday,wednesday,thursday,friday,saturday,sunday,start_date,end_date +service_0,1,1,1,1,1,1,1,20000101,21001231 \ No newline at end of file diff --git a/sample/calendar_dates.txt b/sample/calendar_dates.txt new file mode 100644 index 0000000..d0634d8 --- /dev/null +++ b/sample/calendar_dates.txt @@ -0,0 +1,3 @@ +service_id,date,exception_type +service_0,20171228,1 +service_0,20171231,2 diff --git a/sample/feed_info.txt b/sample/feed_info.txt new file mode 100644 index 0000000..97c46ff --- /dev/null +++ b/sample/feed_info.txt @@ -0,0 +1,2 @@ +feed_publisher_name,feed_publisher_url,feed_lang,feed_start_date,feed_end_date,feed_version +Publisher Name,http://google.com,en,20000101,21001231,42 diff --git a/sample/frequencies.txt b/sample/frequencies.txt new file mode 100644 index 0000000..885b1d5 --- /dev/null +++ b/sample/frequencies.txt @@ -0,0 +1,3 @@ +trip_id,start_time,end_time,headway_secs,exact_times +trip_0,10:00:00,15:00:00,600, +trip_0,15:00:00,20:00:00,1200, diff --git a/sample/routes.txt b/sample/routes.txt new file mode 100644 index 0000000..68b771e --- /dev/null +++ b/sample/routes.txt @@ -0,0 +1,2 @@ +agency_id,route_id,route_short_name,route_long_name,route_type +agency_0,route_0,R0,Route 0,3 \ No newline at end of file diff --git a/sample/shapes.txt b/sample/shapes.txt new file mode 100644 index 0000000..86bae15 --- /dev/null +++ b/sample/shapes.txt @@ -0,0 +1,3 @@ +shape_id,shape_pt_lat,shape_pt_lon,shape_pt_sequence,shape_dist_traveled +shape_0,37.728631,-122.431282,1,0 +shape_0,37.74103,-122.422482,2,10 diff --git a/sample/stop_times.txt b/sample/stop_times.txt new file mode 100644 index 0000000..2812781 --- /dev/null +++ b/sample/stop_times.txt @@ -0,0 +1,3 @@ +trip_id,arrival_time,departure_time,stop_id,stop_sequence,pickup_type,drop_off_type,stop_headsign +trip_0,10:00:00,10:00:00,stop_0,0,,,Stop Headsign 0 +trip_0,20:00:00,20:00:00,stop_1,1,,,Stop Headsign 1 \ No newline at end of file diff --git a/sample/stops.txt b/sample/stops.txt new file mode 100644 index 0000000..e46ace6 --- /dev/null +++ b/sample/stops.txt @@ -0,0 +1,3 @@ +stop_id,stop_code,stop_name,stop_desc,stop_lat,stop_lon +stop_0,SC0,Stop 0,Some stop,37.728631,-122.431282 +stop_1,SC1,Stop 1,Some other stop,37.74103,-122.422482 diff --git a/sample/transfers.txt b/sample/transfers.txt new file mode 100644 index 0000000..030f40e --- /dev/null +++ b/sample/transfers.txt @@ -0,0 +1,3 @@ +from_stop_id,to_stop_id,transfer_type,min_transfer_time +stop_0,stop_1,0, +stop_1,stop_0,1, diff --git a/sample/trips.txt b/sample/trips.txt new file mode 100644 index 0000000..7b0c55c --- /dev/null +++ b/sample/trips.txt @@ -0,0 +1,2 @@ +route_id,service_id,trip_id,trip_headsign,shape_id +route_0,service_0,trip_0,Trip 0,shape_0 diff --git a/tests.js b/tests.js new file mode 100644 index 0000000..269d30a --- /dev/null +++ b/tests.js @@ -0,0 +1,650 @@ +'use strict'; +/* Run the tests with mocha: mocha tests.js */ + +// eslint-disable-next-line import/no-extraneous-dependencies + +const { expect } = require('chai'); +const fs = require('fs-extra'); + +const Gtfs = require('../properGtfsObject'); + +// eslint-disable-next-line no-undef +describe('Tests on GTFS', () => { + // eslint-disable-next-line no-undef + it('Test on meta functions', (done) => { + const path = `${__dirname}/gtfs_sample/`; + const gtfs = new Gtfs(path); + + expect(gtfs.isGtfs).to.equal(true); + expect(gtfs.getPath()).to.equal(path); + + done(); + }); + + // eslint-disable-next-line no-undef + it('Test on generic table functions', (done) => { + const path = `${__dirname}/gtfs_sample/`; + const gtfs = new Gtfs(path); + + const indexedAgencies = gtfs.getIndexedTable('agency'); + expect(indexedAgencies.get('agency_0').agency_name).to.equal('Agency 0'); + + const agency0 = gtfs.getItemWithIndexInTable('agency_0', 'agency'); + expect(agency0.agency_name).to.equal('Agency 0'); + + const expectedTableNames = [ + 'agency', 'calendar', 'calendar_dates', 'fare_attributes', 'frequencies', + 'routes', 'stop_times', 'stops', 'trips', 'shapes', 'transfers', 'feed_info', + ]; + expect(Array.from(gtfs.getTableNames())).to.deep.equal(expectedTableNames); + + const tableNames = []; + gtfs.forEachTableName((tableName) => { + tableNames.push(tableName); + }); + expect(tableNames).to.deep.equal(expectedTableNames); + + const ROUTE_TABLE_NAME = 'routes'; + const route0 = gtfs.getRouteWithId('route_0'); + + gtfs.addItemInTable({ route_id: 'route_1', route_long_name: 'Route 1' }, ROUTE_TABLE_NAME); + expect(sortedKeys(gtfs.getIndexedRoutes())).to.deep.equal(['route_0', 'route_1']); + + gtfs.addItemsInTable([ + { route_id: 'route_2', route_long_name: 'Route 2' }, + { route_id: 'route_3', route_long_name: 'Route 3' }, + ], ROUTE_TABLE_NAME); + expect(sortedKeys(gtfs.getIndexedRoutes())).to.deep.equal(['route_0', 'route_1', 'route_2', 'route_3']); + + gtfs.removeItemInTable(gtfs.getRouteWithId('route_2'), ROUTE_TABLE_NAME); + expect(sortedKeys(gtfs.getIndexedRoutes())).to.deep.equal(['route_0', 'route_1', 'route_3']); + + gtfs.removeItemsInTable([gtfs.getRouteWithId('route_0'), gtfs.getRouteWithId('route_3')], ROUTE_TABLE_NAME); + expect(sortedKeys(gtfs.getIndexedRoutes())).to.deep.equal(['route_1']); + + gtfs.setIndexedItemsAsTable(new Map([['route_0', route0]]), ROUTE_TABLE_NAME); + expect(sortedKeys(gtfs.getIndexedRoutes())).to.deep.equal(['route_0']); + + const routeIds = []; + gtfs.forEachItemInTable(ROUTE_TABLE_NAME, (route) => { + routeIds.push(route.route_id); + }); + expect(routeIds).to.deep.equal(['route_0']); + + const trip0 = gtfs.getTripWithId('trip_0'); + const routeOfTrip0 = gtfs.getParentItem(trip0, ROUTE_TABLE_NAME); + expect(routeOfTrip0.route_long_name).to.equal('Route 0'); + + done(); + }); + + // eslint-disable-next-line no-undef + it('Tests on agencies', (done) => { + const path = `${__dirname}/gtfs_sample`; + const gtfs = new Gtfs(path); + + expect(sortedKeys(gtfs.getIndexedAgencies())).to.deep.equal(['agency_0']); + + const agency0 = gtfs.getAgencyWithId('agency_0'); + expect(agency0.agency_name).to.equal('Agency 0'); + + gtfs.addAgency({ agency_id: 'agency_1', agency_name: 'Agency 1' }); + expect(sortedKeys(gtfs.getIndexedAgencies())).to.deep.equal(['agency_0', 'agency_1']); + + gtfs.addAgencies([ + { agency_id: 'agency_2', agency_name: 'Agency 2' }, + { agency_id: 'agency_3', agency_name: 'Agency 3' }, + ]); + expect(sortedKeys(gtfs.getIndexedAgencies())).to.deep.equal(['agency_0', 'agency_1', 'agency_2', 'agency_3']); + + gtfs.removeAgency(gtfs.getAgencyWithId('agency_2')); + expect(sortedKeys(gtfs.getIndexedAgencies())).to.deep.equal(['agency_0', 'agency_1', 'agency_3']); + + gtfs.removeAgencies([gtfs.getAgencyWithId('agency_0'), gtfs.getAgencyWithId('agency_3')]); + expect(sortedKeys(gtfs.getIndexedAgencies())).to.deep.equal(['agency_1']); + + gtfs.setIndexedAgencies(new Map([['agency_0', agency0]])); + expect(sortedKeys(gtfs.getIndexedAgencies())).to.deep.equal(['agency_0']); + + const agencyIds = []; + gtfs.forEachAgency((agency) => { + agencyIds.push(agency.agency_id); + }); + expect(agencyIds).to.deep.equal(['agency_0']); + + const route0 = gtfs.getRouteWithId('route_0'); + const agencyOfRoute0 = gtfs.getAgencyOfRoute(route0); + expect(agencyOfRoute0.agency_name).to.deep.equal('Agency 0'); + + done(); + }); + + // eslint-disable-next-line no-undef + it('Tests on stops', (done) => { + const path = `${__dirname}/gtfs_sample`; + const gtfs = new Gtfs(path); + + expect(sortedKeys(gtfs.getIndexedStops())).to.deep.equal(['stop_0', 'stop_1']); + + const stop0 = gtfs.getStopWithId('stop_0'); + const stop1 = gtfs.getStopWithId('stop_1'); + expect(stop0.stop_name).to.equal('Stop 0'); + expect(stop1.stop_name).to.equal('Stop 1'); + + gtfs.addStop({ stop_id: 'stop_2', stop_name: 'Stop 2' }); + expect(sortedKeys(gtfs.getIndexedStops())).to.deep.equal(['stop_0', 'stop_1', 'stop_2']); + + gtfs.addStops([ + { stop_id: 'stop_3', stop_name: 'Stop 3' }, + { stop_id: 'stop_4', stop_name: 'Stop 4' }, + ]); + expect(sortedKeys(gtfs.getIndexedStops())).to.deep.equal(['stop_0', 'stop_1', 'stop_2', 'stop_3', 'stop_4']); + + gtfs.removeStop(gtfs.getStopWithId('stop_2')); + expect(sortedKeys(gtfs.getIndexedStops())).to.deep.equal(['stop_0', 'stop_1', 'stop_3', 'stop_4']); + + gtfs.removeStops([gtfs.getStopWithId('stop_1'), gtfs.getStopWithId('stop_3')]); + expect(sortedKeys(gtfs.getIndexedStops())).to.deep.equal(['stop_0', 'stop_4']); + + gtfs.setIndexedStops(new Map([['stop_0', stop0], ['stop_1', stop1]])); + expect(sortedKeys(gtfs.getIndexedStops())).to.deep.equal(['stop_0', 'stop_1']); + + const stopIds = []; + gtfs.forEachStop((stop) => { + stopIds.push(stop.stop_id); + }); + expect(stopIds.sort()).to.deep.equal(['stop_0', 'stop_1']); + + const stopTime00 = gtfs.getStopTimeWithTripIdAndStopSequence('trip_0', '0'); + const stopOfStopTime00 = gtfs.getStopOfStopTime(stopTime00); + expect(stopOfStopTime00.stop_name).to.equal('Stop 0'); + + done(); + }); + + // eslint-disable-next-line no-undef + it('Tests on routes', (done) => { + const path = `${__dirname}/gtfs_sample`; + const gtfs = new Gtfs(path); + + expect(sortedKeys(gtfs.getIndexedRoutes())).to.deep.equal(['route_0']); + + const route0 = gtfs.getRouteWithId('route_0'); + expect(route0.route_long_name).to.equal('Route 0'); + + gtfs.addRoute({ route_id: 'route_1', route_long_name: 'Route 1' }); + expect(sortedKeys(gtfs.getIndexedRoutes())).to.deep.equal(['route_0', 'route_1']); + + gtfs.addRoutes([ + { route_id: 'route_2', route_long_name: 'Route 2' }, + { route_id: 'route_3', route_long_name: 'Route 3' }, + ]); + expect(sortedKeys(gtfs.getIndexedRoutes())).to.deep.equal(['route_0', 'route_1', 'route_2', 'route_3']); + + gtfs.removeRoute(gtfs.getRouteWithId('route_2')); + expect(sortedKeys(gtfs.getIndexedRoutes())).to.deep.equal(['route_0', 'route_1', 'route_3']); + + gtfs.removeRoutes([gtfs.getRouteWithId('route_0'), gtfs.getRouteWithId('route_3')]); + expect(sortedKeys(gtfs.getIndexedRoutes())).to.deep.equal(['route_1']); + + gtfs.setIndexedRoutes(new Map([['route_0', route0]])); + expect(sortedKeys(gtfs.getIndexedRoutes())).to.deep.equal(['route_0']); + + const routeIds = []; + gtfs.forEachRoute((route) => { + routeIds.push(route.route_id); + }); + expect(routeIds).to.deep.equal(['route_0']); + + const trip0 = gtfs.getTripWithId('trip_0'); + const routeOfTrip0 = gtfs.getRouteOfTrip(trip0); + expect(routeOfTrip0.route_long_name).to.equal('Route 0'); + + const stopTime00 = gtfs.getStopTimeWithTripIdAndStopSequence('trip_0', '0'); + const routeOfStopTime00 = gtfs.getRouteOfStopTime(stopTime00); + expect(routeOfStopTime00.route_long_name).to.equal('Route 0'); + + done(); + }); + + // eslint-disable-next-line no-undef + it('Tests on trips', (done) => { + const path = `${__dirname}/gtfs_sample`; + const gtfs = new Gtfs(path); + + expect(sortedKeys(gtfs.getIndexedTrips())).to.deep.equal(['trip_0']); + + const trip0 = gtfs.getTripWithId('trip_0'); + expect(trip0.trip_headsign).to.equal('Trip 0'); + + gtfs.addTrip({ trip_id: 'trip_1', trip_headsign: 'Trip 1' }); + expect(sortedKeys(gtfs.getIndexedTrips())).to.deep.equal(['trip_0', 'trip_1']); + + gtfs.addTrips([ + { trip_id: 'trip_2', trip_headsign: 'Trip 2' }, + { trip_id: 'trip_3', trip_headsign: 'Trip 3' }, + ]); + expect(sortedKeys(gtfs.getIndexedTrips())).to.deep.equal(['trip_0', 'trip_1', 'trip_2', 'trip_3']); + + gtfs.removeTrip(gtfs.getTripWithId('trip_2')); + expect(sortedKeys(gtfs.getIndexedTrips())).to.deep.equal(['trip_0', 'trip_1', 'trip_3']); + + gtfs.removeTrips([gtfs.getTripWithId('trip_0'), gtfs.getTripWithId('trip_3')]); + expect(sortedKeys(gtfs.getIndexedTrips())).to.deep.equal(['trip_1']); + + gtfs.setIndexedTrips(new Map([['trip_0', trip0]])); + expect(sortedKeys(gtfs.getIndexedTrips())).to.deep.equal(['trip_0']); + + const tripIds = []; + gtfs.forEachTrip((trip) => { + tripIds.push(trip.trip_id); + }); + expect(tripIds).to.deep.equal(['trip_0']); + + const stopTime00 = gtfs.getStopTimeWithTripIdAndStopSequence('trip_0', '0'); + const tripOfStopTime00 = gtfs.getTripOfStopTime(stopTime00); + expect(tripOfStopTime00.trip_headsign).to.equal('Trip 0'); + + done(); + }); + + // eslint-disable-next-line no-undef + it('Tests on stop times', (done) => { + const path = `${__dirname}/gtfs_sample`; + const gtfs = new Gtfs(path); + + expect(sortedKeys(gtfs.getIndexedStopTimes())).to.deep.equal(['trip_0']); + + const trip0 = gtfs.getTripWithId('trip_0'); + expect(sortedKeys(gtfs.getStopTimeByStopSequenceOfTrip(trip0))).to.deep.equal(['0', '1']); + + const stopTime0 = gtfs.getStopTimeWithTripIdAndStopSequence('trip_0', '0'); + const stopTime1 = gtfs.getStopTimeWithTripIdAndStopSequence('trip_0', '1'); + expect(stopTime0.stop_headsign).to.equal('Stop Headsign 0'); + expect(stopTime1.stop_headsign).to.equal('Stop Headsign 1'); + + gtfs.addStopTime({ trip_id: 'trip_0', stop_id: 'stop_0', stop_sequence: '2', stop_headsign: 'Stop Headsign 2' }); + expect(sortedKeys(gtfs.getStopTimeByStopSequenceOfTrip(trip0))).to.deep.equal(['0', '1', '2']); + + gtfs.addStopTimes([ + { trip_id: 'trip_0', stop_id: 'stop_1', stop_sequence: '3', stop_headsign: 'Stop Headsign 3' }, + { trip_id: 'trip_0', stop_id: 'stop_0', stop_sequence: '4', stop_headsign: 'Stop Headsign 4' }, + ]); + expect(sortedKeys(gtfs.getStopTimeByStopSequenceOfTrip(trip0))).to.deep.equal(['0', '1', '2', '3', '4']); + + gtfs.removeStopTime(gtfs.getStopTimeWithTripIdAndStopSequence('trip_0', '2')); + expect(sortedKeys(gtfs.getStopTimeByStopSequenceOfTrip(trip0))).to.deep.equal(['0', '1', '3', '4']); + + gtfs.removeStopTimes([ + gtfs.getStopTimeWithTripIdAndStopSequence('trip_0', '0'), + gtfs.getStopTimeWithTripIdAndStopSequence('trip_0', '3'), + ]); + expect(sortedKeys(gtfs.getStopTimeByStopSequenceOfTrip(trip0))).to.deep.equal(['1', '4']); + + gtfs.setIndexedStopTimes(new Map([ + ['trip_0', new Map([ + ['0', { trip_id: 'trip_0', stop_id: 'stop_0', stop_sequence: '0', stop_headsign: 'Stop Headsign 000' }], + ['1', { trip_id: 'trip_0', stop_id: 'stop_1', stop_sequence: '1', stop_headsign: 'Stop Headsign 011' }], + ])], + ['trip_1', new Map([ + ['5', { trip_id: 'trip_1', stop_id: 'stop_1', stop_sequence: '5', stop_headsign: 'Stop Headsign 115' }], + ['6', { trip_id: 'trip_1', stop_id: 'stop_0', stop_sequence: '6', stop_headsign: 'Stop Headsign 106' }], + ])], + ])); + expect(sortedKeys(gtfs.getIndexedStopTimes())).to.deep.equal(['trip_0', 'trip_1']); + expect(sortedKeys(gtfs.getStopTimeByStopSequenceOfTrip({ trip_id: 'trip_0' }))).to.deep.equal(['0', '1']); + expect(sortedKeys(gtfs.getStopTimeByStopSequenceOfTrip({ trip_id: 'trip_1' }))).to.deep.equal(['5', '6']); + + const stopHeadsigns = []; + gtfs.forEachStopTime((stopTime) => { + stopHeadsigns.push(stopTime.stop_headsign); + }); + const expectedStopHeadsigns = ['Stop Headsign 000', 'Stop Headsign 011', 'Stop Headsign 106', 'Stop Headsign 115']; + expect(stopHeadsigns.sort()).to.deep.equal(expectedStopHeadsigns); + + const stopHeadsignsOfTrip0 = []; + gtfs.forEachStopTimeOfTrip({ trip_id: 'trip_0' }, (stopTime) => { + stopHeadsignsOfTrip0.push(stopTime.stop_headsign); + }); + const expectedStopHeadsignsOfTrip0 = ['Stop Headsign 000', 'Stop Headsign 011']; + expect(stopHeadsignsOfTrip0.sort()).to.deep.equal(expectedStopHeadsignsOfTrip0); + + done(); + }); + + // eslint-disable-next-line no-undef + it('Tests on calendars', (done) => { + const path = `${__dirname}/gtfs_sample`; + const gtfs = new Gtfs(path); + + expect(sortedKeys(gtfs.getIndexedCalendars())).to.deep.equal(['service_0']); + + const calendar0 = gtfs.getCalendarWithServiceId('service_0'); + expect(calendar0.start_date).to.equal('20000101'); + + gtfs.addCalendar({ service_id: 'service_1', start_date: '20010101', end_date: '20010101' }); + expect(sortedKeys(gtfs.getIndexedCalendars())).to.deep.equal(['service_0', 'service_1']); + + gtfs.addCalendars([ + { service_id: 'service_2', start_date: '20020101', end_date: '20020101' }, + { service_id: 'service_3', start_date: '20030101', end_date: '20030101' }, + ]); + expect(sortedKeys(gtfs.getIndexedCalendars())).to.deep.equal(['service_0', 'service_1', 'service_2', 'service_3']); + + gtfs.removeCalendar(gtfs.getCalendarWithServiceId('service_2')); + expect(sortedKeys(gtfs.getIndexedCalendars())).to.deep.equal(['service_0', 'service_1', 'service_3']); + + gtfs.removeCalendars([gtfs.getCalendarWithServiceId('service_0'), gtfs.getCalendarWithServiceId('service_3')]); + expect(sortedKeys(gtfs.getIndexedCalendars())).to.deep.equal(['service_1']); + + gtfs.setIndexedCalendars(new Map([['service_0', calendar0]])); + expect(sortedKeys(gtfs.getIndexedCalendars())).to.deep.equal(['service_0']); + + const serviceIds = []; + gtfs.forEachCalendar((calendar) => { + serviceIds.push(calendar.service_id); + }); + expect(serviceIds).to.deep.equal(['service_0']); + + const trip0 = gtfs.getTripWithId('trip_0'); + const calendarOfTrip0 = gtfs.getCalendarOfTrip(trip0); + expect(calendarOfTrip0.start_date).to.equal('20000101'); + + const stopTime00 = gtfs.getStopTimeWithTripIdAndStopSequence('trip_0', '0'); + const calendarOfStopTime00 = gtfs.getCalendarOfStopTime(stopTime00); + expect(calendarOfStopTime00.start_date).to.equal('20000101'); + + done(); + }); + + // eslint-disable-next-line no-undef + it('Tests on calendar dates', (done) => { + const path = `${__dirname}/gtfs_sample`; + const gtfs = new Gtfs(path); + + expect(sortedKeys(gtfs.getIndexedCalendarDates())).to.deep.equal(['service_0']); + + const trip0 = gtfs.getTripWithId('trip_0'); + expect(sortedKeys(gtfs.getCalendarDateByDateOfTrip(trip0))).to.deep.equal(['20171228', '20171231']); + expect(sortedKeys(gtfs.getCalendarDateByDateOfServiceId('service_0'))).to.deep.equal(['20171228', '20171231']); + + const calendarDate28 = gtfs.getCalendarDateWithServiceIdAndDate('service_0', '20171228'); + const calendarDate31 = gtfs.getCalendarDateWithServiceIdAndDate('service_0', '20171231'); + expect(calendarDate28.exception_type).to.equal('1'); + expect(calendarDate31.exception_type).to.equal('2'); + + gtfs.addCalendarDate({ service_id: 'service_0', date: '20180101', exception_type: '2' }); + expect(sortedKeys(gtfs.getCalendarDateByDateOfTrip(trip0))).to.deep.equal(['20171228', '20171231', '20180101']); + + gtfs.addCalendarDates([ + { service_id: 'service_0', date: '20180102', exception_type: '1' }, + { service_id: 'service_0', date: '20180103', exception_type: '1' }, + ]); + const expectedDates1 = ['20171228', '20171231', '20180101', '20180102', '20180103']; + expect(sortedKeys(gtfs.getCalendarDateByDateOfTrip(trip0))).to.deep.equal(expectedDates1); + + gtfs.removeCalendarDate(gtfs.getCalendarDateWithServiceIdAndDate('service_0', '20180101')); + const expectedDates2 = ['20171228', '20171231', '20180102', '20180103']; + expect(sortedKeys(gtfs.getCalendarDateByDateOfTrip(trip0))).to.deep.equal(expectedDates2); + + gtfs.removeCalendarDates([ + gtfs.getCalendarDateWithServiceIdAndDate('service_0', '20171228'), + gtfs.getCalendarDateWithServiceIdAndDate('service_0', '20180102'), + ]); + expect(sortedKeys(gtfs.getCalendarDateByDateOfTrip(trip0))).to.deep.equal(['20171231', '20180103']); + + gtfs.setIndexedCalendarDates(new Map([ + ['service_0', new Map([ + ['20171228', { trip_id: 'service_0', date: '20171228', exception_type: '1' }], + ['20171231', { trip_id: 'service_0', date: '20171231', exception_type: '2' }], + ])], + ['service_1', new Map([ + ['20180101', { trip_id: 'service_1', date: '20180101', exception_type: '2' }], + ['20180102', { trip_id: 'service_1', date: '20180102', exception_type: '1' }], + ])], + ])); + expect(sortedKeys(gtfs.getIndexedCalendarDates())).to.deep.equal(['service_0', 'service_1']); + const expectedDates3 = ['20171228', '20171231']; + expect(sortedKeys(gtfs.getCalendarDateByDateOfTrip({ service_id: 'service_0' }))).to.deep.equal(expectedDates3); + const expectedDates4 = ['20180101', '20180102']; + expect(sortedKeys(gtfs.getCalendarDateByDateOfTrip({ service_id: 'service_1' }))).to.deep.equal(expectedDates4); + + const exceptionsTypes = []; + gtfs.forEachCalendarDate((calendarDate) => { + exceptionsTypes.push(calendarDate.exception_type); + }); + expect(exceptionsTypes.sort()).to.deep.equal(['1', '1', '2', '2']); + + done(); + }); + + // eslint-disable-next-line no-undef + it('Tests on shapes', (done) => { + const path = `${__dirname}/gtfs_sample`; + const gtfs = new Gtfs(path); + + expect(sortedKeys(gtfs.getIndexedShapePoints())).to.deep.equal(['shape_0']); + + const trip0 = gtfs.getTripWithId('trip_0'); + expect(sortedKeys(gtfs.getShapePointByShapePointSequenceOfTrip(trip0))).to.deep.equal(['1', '2']); + expect(sortedKeys(gtfs.getShapePointByShapePointSequenceOfShapeId('shape_0'))).to.deep.equal(['1', '2']); + + const shapePoint1 = gtfs.getShapePointWithTripIdAndShapePointSequence('shape_0', '1'); + const shapePoint2 = gtfs.getShapePointWithTripIdAndShapePointSequence('shape_0', '2'); + expect(shapePoint1.shape_dist_traveled).to.equal('0'); + expect(shapePoint2.shape_dist_traveled).to.equal('10'); + + gtfs.addShapePoint({ shape_id: 'shape_0', shape_pt_sequence: '3', shape_dist_traveled: '100' }); + expect(sortedKeys(gtfs.getShapePointByShapePointSequenceOfTrip(trip0))).to.deep.equal(['1', '2', '3']); + + gtfs.addShapePoints([ + { shape_id: 'shape_0', shape_pt_sequence: '4', shape_dist_traveled: '1000' }, + { shape_id: 'shape_0', shape_pt_sequence: '5', shape_dist_traveled: '10000' }, + ]); + expect(sortedKeys(gtfs.getShapePointByShapePointSequenceOfTrip(trip0))).to.deep.equal(['1', '2', '3', '4', '5']); + + gtfs.removeShapePoint(gtfs.getShapePointWithTripIdAndShapePointSequence('shape_0', '3')); + expect(sortedKeys(gtfs.getShapePointByShapePointSequenceOfTrip(trip0))).to.deep.equal(['1', '2', '4', '5']); + + gtfs.removeShapePoints([ + gtfs.getShapePointWithTripIdAndShapePointSequence('shape_0', '2'), + gtfs.getShapePointWithTripIdAndShapePointSequence('shape_0', '5'), + ]); + expect(sortedKeys(gtfs.getShapePointByShapePointSequenceOfTrip(trip0))).to.deep.equal(['1', '4']); + + gtfs.setIndexedShapePoints(new Map([ + ['shape_0', new Map([ + ['1', { shape_id: 'shape_0', shape_pt_sequence: '1', shape_dist_traveled: '0' }], + ['2', { shape_id: 'shape_0', shape_pt_sequence: '2', shape_dist_traveled: '20' }], + ])], + ['shape_1', new Map([ + ['6', { shape_id: 'shape_1', shape_pt_sequence: '6', shape_dist_traveled: '0' }], + ['7', { shape_id: 'shape_1', shape_pt_sequence: '7', shape_dist_traveled: '21' }], + ])], + ])); + expect(sortedKeys(gtfs.getIndexedShapePoints())).to.deep.equal(['shape_0', 'shape_1']); + expect(sortedKeys(gtfs.getShapePointByShapePointSequenceOfTrip({ shape_id: 'shape_0' }))).to.deep.equal(['1', '2']); + expect(sortedKeys(gtfs.getShapePointByShapePointSequenceOfTrip({ shape_id: 'shape_1' }))).to.deep.equal(['6', '7']); + + const shapeDistanceTraveled = []; + gtfs.forEachShapePoint((shapePoint) => { + shapeDistanceTraveled.push(shapePoint.shape_dist_traveled); + }); + expect(shapeDistanceTraveled.sort()).to.deep.equal(['0', '0', '20', '21']); + + done(); + }); + + // eslint-disable-next-line no-undef + it('Tests on frequencies', (done) => { + const path = `${__dirname}/gtfs_sample`; + const gtfs = new Gtfs(path); + + expect(sortedKeys(gtfs.getIndexedFrequencies())).to.deep.equal(['trip_0']); + + const frequency010h = gtfs.getFrequencyWithTripIdAndStartTime('trip_0', '10:00:00'); + expect(frequency010h.headway_secs).to.equal('600'); + + gtfs.addFrequency({ trip_id: 'trip_1', start_time: '20:00:00', end_time: '25:00:00' }); + expect(sortedKeys(gtfs.getIndexedFrequencies())).to.deep.equal(['trip_0', 'trip_1']); + + gtfs.addFrequencies([ + { trip_id: 'trip_2', start_time: '20:00:00', end_time: '25:00:00' }, + { trip_id: 'trip_3', start_time: '20:00:00', end_time: '25:00:00' }, + ]); + expect(sortedKeys(gtfs.getIndexedFrequencies())).to.deep.equal(['trip_0', 'trip_1', 'trip_2', 'trip_3']); + + gtfs.removeFrequency(gtfs.getFrequencyWithTripIdAndStartTime('trip_2', '20:00:00')); + expect(sortedKeys(gtfs.getIndexedFrequencies())).to.deep.equal(['trip_0', 'trip_1', 'trip_3']); + + gtfs.removeFrequencies([ + gtfs.getFrequencyWithTripIdAndStartTime('trip_0', '10:00:00'), + gtfs.getFrequencyWithTripIdAndStartTime('trip_0', '15:00:00'), + ]); + expect(sortedKeys(gtfs.getIndexedFrequencies())).to.deep.equal(['trip_1', 'trip_3']); + + gtfs.setIndexedFrequencies(new Map([ + ['trip_0', new Map([ + ['05:00:00', { trip_id: 'trip_0', start_time: '05:00:00', end_time: '10:00:00' }], + ['10:00:00', { trip_id: 'trip_0', start_time: '10:00:00', end_time: '15:00:00' }], + ])], + ['trip_1', new Map([ + ['05:00:00', { trip_id: 'trip_1', start_time: '05:00:00', end_time: '10:00:00' }], + ['10:00:00', { trip_id: 'trip_1', start_time: '10:00:00', end_time: '16:00:00' }], + ])], + ])); + expect(sortedKeys(gtfs.getIndexedFrequencies())).to.deep.equal(['trip_0', 'trip_1']); + const frequency110h = gtfs.getFrequencyWithTripIdAndStartTime('trip_1', '10:00:00'); + expect(frequency110h.end_time).to.equal('16:00:00'); + + const endTimes = []; + gtfs.forEachFrequency((frequency) => { + endTimes.push(frequency.end_time); + }); + expect(endTimes.sort()).to.deep.equal(['10:00:00', '10:00:00', '15:00:00', '16:00:00']); + + done(); + }); + + // eslint-disable-next-line no-undef + it('Tests on transfers', (done) => { + const path = `${__dirname}/gtfs_sample`; + const gtfs = new Gtfs(path); + + expect(sortedKeys(gtfs.getIndexedTransfers())).to.deep.equal(['stop_0', 'stop_1']); + + const transfer01 = gtfs.getTransfertWithFromStopIdAndToStopId('stop_0', 'stop_1'); + expect(transfer01.transfer_type).to.equal('0'); + + gtfs.addTransfer({ from_stop_id: 'stop_2', to_stop_id: 'stop_0', transfer_type: '3' }); + expect(sortedKeys(gtfs.getIndexedTransfers())).to.deep.equal(['stop_0', 'stop_1', 'stop_2']); + + gtfs.addTransfers([ + { from_stop_id: 'stop_3', to_stop_id: 'stop_0', transfer_type: '3' }, + { from_stop_id: 'stop_4', to_stop_id: 'stop_0', transfer_type: '3' }, + ]); + expect(sortedKeys(gtfs.getIndexedTransfers())).to.deep.equal(['stop_0', 'stop_1', 'stop_2', 'stop_3', 'stop_4']); + + gtfs.removeTransfer(gtfs.getTransfertWithFromStopIdAndToStopId('stop_0', 'stop_1')); + expect(sortedKeys(gtfs.getIndexedTransfers())).to.deep.equal(['stop_1', 'stop_2', 'stop_3', 'stop_4']); + + gtfs.removeTransfers([ + gtfs.getTransfertWithFromStopIdAndToStopId('stop_1', 'stop_0'), + gtfs.getTransfertWithFromStopIdAndToStopId('stop_3', 'stop_0'), + ]); + expect(sortedKeys(gtfs.getIndexedTransfers())).to.deep.equal(['stop_2', 'stop_4']); + + gtfs.setIndexedTransfers(new Map([ + ['stop_0', new Map([ + ['stop_1', { from_stop_id: 'stop_0', to_stop_id: 'stop_1', transfer_type: '0' }], + ['stop_2', { from_stop_id: 'stop_0', to_stop_id: 'stop_2', transfer_type: '3' }], + ])], + ['stop_1', new Map([ + ['stop_0', { from_stop_id: 'stop_1', to_stop_id: 'stop_0', transfer_type: '1' }], + ['stop_3', { from_stop_id: 'stop_1', to_stop_id: 'stop_3', transfer_type: '3' }], + ])], + ])); + expect(sortedKeys(gtfs.getIndexedTransfers())).to.deep.equal(['stop_0', 'stop_1']); + const transfer02 = gtfs.getTransfertWithFromStopIdAndToStopId('stop_0', 'stop_2'); + expect(transfer02.transfer_type).to.equal('3'); + const transfer10 = gtfs.getTransfertWithFromStopIdAndToStopId('stop_1', 'stop_0'); + expect(transfer10.transfer_type).to.equal('1'); + + const transferTypes = []; + gtfs.forEachTransfer((transfer) => { + transferTypes.push(transfer.transfer_type); + }); + expect(transferTypes.sort()).to.deep.equal(['0', '1', '3', '3']); + + done(); + }); + + // eslint-disable-next-line no-undef + it('Tests on feed info', (done) => { + const path = `${__dirname}/gtfs_sample`; + const gtfs = new Gtfs(path); + + expect(gtfs.getFeedInfo().feed_lang).to.equal('en'); + + gtfs.setFeedInfo({ + feed_publisher_name: 'Some other name', + feed_publisher_url: 'http://google.ca', + feed_lang: 'en-CA', + }); + + expect(gtfs.getFeedInfo().feed_lang).to.equal('en-CA'); + + done(); + }); + + // eslint-disable-next-line no-undef + it('Tests on exporting', (done) => { + const path = `${__dirname}/gtfs_sample`; + const gtfs = new Gtfs(path); + + gtfs.getFeedInfo().feed_lang = 'fr'; + gtfs.getFeedInfo().some_extra_field = 'some_extra_value'; + + gtfs.forEachRoute((route) => { + route.route_desc = 'Some new description'; + route.some_extra_route_field = 'some_extra_route_value'; + }); + + const outputPath = `${__dirname}/temp_4865ce67d01f96a489fbd0e71ad8800b/`; + gtfs.exportAtPath(outputPath, (err) => { + if (err) { + console.log(err); + } + + fs.readFile(`${outputPath}routes.txt`, (readRoutesError, routesTxt) => { + if (readRoutesError) { throw readRoutesError; } + + expect(String(routesTxt)).to.equal( + 'route_id,agency_id,route_short_name,route_long_name,route_desc,route_type,route_url,route_color,' + + 'route_text_color,route_sort_order,some_extra_route_field\n' + + 'route_0,agency_0,R0,Route 0,Some new description,3,,,,,some_extra_route_value\n' + ); + + fs.readFile(`${outputPath}feed_info.txt`, (readFeedInfoError, feedInfoTxt) => { + if (readFeedInfoError) { throw readFeedInfoError; } + + expect(String(feedInfoTxt)).to.equal( + 'feed_publisher_name,feed_publisher_url,feed_lang,feed_start_date,feed_end_date,feed_version,' + + 'some_extra_field\n' + + 'Publisher Name,http://google.com,fr,20000101,21001231,42,some_extra_value\n' + ); + + fs.remove(outputPath, (removeError) => { + if (removeError) { throw removeError; } + + done(); + }); + }); + }); + }); + }); +}); + +function sortedKeys(map) { + return Array.from(map.keys()).sort(); +} From 4bd6805c3b78a01a889718664964dc017eeb7330 Mon Sep 17 00:00:00 2001 From: LeoFrachet Date: Fri, 29 Dec 2017 11:43:44 -0500 Subject: [PATCH 2/9] Add engine --- package.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/package.json b/package.json index de44ab7..131abee 100644 --- a/package.json +++ b/package.json @@ -39,5 +39,8 @@ "eslint-config-transit": "^1.0.3", "eslint-plugin-import": "^2.2.0", "mocha": "^3.5.3" + }, + "engines": { + "node": "^8.1.4" } } From 919dcd8186622b7401cab4b7d52c9ab4732afd17 Mon Sep 17 00:00:00 2001 From: LeoFrachet Date: Fri, 29 Dec 2017 11:44:12 -0500 Subject: [PATCH 3/9] Update gitignore --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 00cbbdf..0b44364 100644 --- a/.gitignore +++ b/.gitignore @@ -57,3 +57,5 @@ typings/ # dotenv environment variables file .env +package-lock.json +.idea/* From a922794fe7624a92e3d12c1ca54555c51400666b Mon Sep 17 00:00:00 2001 From: LeoFrachet Date: Fri, 29 Dec 2017 13:27:04 -0500 Subject: [PATCH 4/9] Style change according to review --- helpers/csv.js | 2 +- helpers/import.js | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/helpers/csv.js b/helpers/csv.js index d59b34a..7fea0ce 100644 --- a/helpers/csv.js +++ b/helpers/csv.js @@ -63,7 +63,7 @@ function fromCsvStringToArray(string, tableName) { return null; } - if (string.includes('"') === false) { + if (!string.includes('"')) { return string.split(','); } diff --git a/helpers/import.js b/helpers/import.js index 230af85..4bc73b7 100644 --- a/helpers/import.js +++ b/helpers/import.js @@ -34,12 +34,12 @@ function getRows(buffer, regexPatternObjectsByTableName, tableName) { const rows = []; let rowsSlice; let position = 0; - const length = 50000; + const batchLength = 50000; let merge; const regexPatternObjects = regexPatternObjectsByTableName[tableName]; while (position < buffer.length) { - rowsSlice = buffer.toString('utf8', position, Math.min(buffer.length, position + length)); + rowsSlice = buffer.toString('utf8', position, Math.min(buffer.length, position + batchLength)); if (regexPatternObjects) { regexPatternObjects.forEach((regexPatternObject) => { @@ -64,7 +64,7 @@ function getRows(buffer, regexPatternObjectsByTableName, tableName) { }); merge = rowsSlice[rowsSlice.length] !== '\n'; - position += length; + position += batchLength; } return rows; From b901f662ff71672c7e46003c38d4099d05cf0c98 Mon Sep 17 00:00:00 2001 From: LeoFrachet Date: Fri, 29 Dec 2017 13:27:13 -0500 Subject: [PATCH 5/9] Clean access to mocha --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 131abee..5b9dd2c 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "description": "A Node.js librairie for GTFS", "main": "index.js", "scripts": { - "test": "./node_modules/.bin/mocha tests.js" + "test": "mocha tests.js" }, "repository": { "type": "git", From d5f9324a66bb11363598a1183ddc10e64d18db4a Mon Sep 17 00:00:00 2001 From: LeoFrachet Date: Fri, 29 Dec 2017 13:44:44 -0500 Subject: [PATCH 6/9] Fix few paths --- helpers/csv.js | 2 +- helpers/import.js | 12 ++++-------- index.js | 11 ++++++++++- tests.js | 30 +++++++++++++++--------------- 4 files changed, 30 insertions(+), 25 deletions(-) diff --git a/helpers/csv.js b/helpers/csv.js index 7fea0ce..d9b2552 100644 --- a/helpers/csv.js +++ b/helpers/csv.js @@ -72,7 +72,7 @@ function fromCsvStringToArray(string, tableName) { string = string.replace(/""/g, '\\"'); return fromCsvStringToArray(string, tableName); } - process.notices.addWarning(`Row not valid in table ${tableName}: ${string}`); + process.notices.addWarning(__filename, `Row not valid in table ${tableName}: ${string}`); return null; } diff --git a/helpers/import.js b/helpers/import.js index 4bc73b7..bcdbeec 100644 --- a/helpers/import.js +++ b/helpers/import.js @@ -45,11 +45,9 @@ function getRows(buffer, regexPatternObjectsByTableName, tableName) { regexPatternObjects.forEach((regexPatternObject) => { const modifiedRowsSlice = rowsSlice.replace(regexPatternObject.regex, regexPatternObject.pattern || ''); if (modifiedRowsSlice !== rowsSlice) { - if (process.notices && process.notices.addInfo) { - process.notices.addInfo( - __filename, `Applying regex replace to table: "${tableName}". regex: "${regexPatternObject.regex}".` - ); - } + process.notices.addInfo( + __filename, `Applying regex replace to table: "${tableName}". regex: "${regexPatternObject.regex}".` + ); rowsSlice = modifiedRowsSlice; } }); @@ -92,9 +90,7 @@ function processRows(gtfs, tableName, indexKeys, rows) { }, {}); if (sortedKeys.length !== arrayOfValues.length) { - if (process.notices && process.notices.addWarning) { - process.notices.addWarning(`Row not valid in table: ${JSON.stringify(item)}`); - } + process.notices.addWarning(__filename, `Row not valid in table: ${JSON.stringify(item)}`); return; } diff --git a/index.js b/index.js index 31a7e51..c07254e 100644 --- a/index.js +++ b/index.js @@ -1,6 +1,15 @@ 'use strict'; -const Gtfs = require('gtfs'); +const Gtfs = require('./gtfs'); + +/* Fallback to replace Transit's internal notice system */ + +if (process.notices === undefined) { + process.notices = { + addInfo: (title, content) => { console.log(`[Info] ${title}:\n${content}`); }, + addWarning: (title, content) => { console.log(`[Warning] ${title}:\n${content}`); }, + } +} module.exports = { Gtfs, diff --git a/tests.js b/tests.js index 269d30a..ee5b683 100644 --- a/tests.js +++ b/tests.js @@ -6,13 +6,13 @@ const { expect } = require('chai'); const fs = require('fs-extra'); -const Gtfs = require('../properGtfsObject'); +const { Gtfs } = require('./index'); // eslint-disable-next-line no-undef describe('Tests on GTFS', () => { // eslint-disable-next-line no-undef it('Test on meta functions', (done) => { - const path = `${__dirname}/gtfs_sample/`; + const path = `${__dirname}/sample/`; const gtfs = new Gtfs(path); expect(gtfs.isGtfs).to.equal(true); @@ -23,7 +23,7 @@ describe('Tests on GTFS', () => { // eslint-disable-next-line no-undef it('Test on generic table functions', (done) => { - const path = `${__dirname}/gtfs_sample/`; + const path = `${__dirname}/sample/`; const gtfs = new Gtfs(path); const indexedAgencies = gtfs.getIndexedTable('agency'); @@ -80,7 +80,7 @@ describe('Tests on GTFS', () => { // eslint-disable-next-line no-undef it('Tests on agencies', (done) => { - const path = `${__dirname}/gtfs_sample`; + const path = `${__dirname}/sample`; const gtfs = new Gtfs(path); expect(sortedKeys(gtfs.getIndexedAgencies())).to.deep.equal(['agency_0']); @@ -121,7 +121,7 @@ describe('Tests on GTFS', () => { // eslint-disable-next-line no-undef it('Tests on stops', (done) => { - const path = `${__dirname}/gtfs_sample`; + const path = `${__dirname}/sample`; const gtfs = new Gtfs(path); expect(sortedKeys(gtfs.getIndexedStops())).to.deep.equal(['stop_0', 'stop_1']); @@ -164,7 +164,7 @@ describe('Tests on GTFS', () => { // eslint-disable-next-line no-undef it('Tests on routes', (done) => { - const path = `${__dirname}/gtfs_sample`; + const path = `${__dirname}/sample`; const gtfs = new Gtfs(path); expect(sortedKeys(gtfs.getIndexedRoutes())).to.deep.equal(['route_0']); @@ -209,7 +209,7 @@ describe('Tests on GTFS', () => { // eslint-disable-next-line no-undef it('Tests on trips', (done) => { - const path = `${__dirname}/gtfs_sample`; + const path = `${__dirname}/sample`; const gtfs = new Gtfs(path); expect(sortedKeys(gtfs.getIndexedTrips())).to.deep.equal(['trip_0']); @@ -250,7 +250,7 @@ describe('Tests on GTFS', () => { // eslint-disable-next-line no-undef it('Tests on stop times', (done) => { - const path = `${__dirname}/gtfs_sample`; + const path = `${__dirname}/sample`; const gtfs = new Gtfs(path); expect(sortedKeys(gtfs.getIndexedStopTimes())).to.deep.equal(['trip_0']); @@ -314,7 +314,7 @@ describe('Tests on GTFS', () => { // eslint-disable-next-line no-undef it('Tests on calendars', (done) => { - const path = `${__dirname}/gtfs_sample`; + const path = `${__dirname}/sample`; const gtfs = new Gtfs(path); expect(sortedKeys(gtfs.getIndexedCalendars())).to.deep.equal(['service_0']); @@ -359,7 +359,7 @@ describe('Tests on GTFS', () => { // eslint-disable-next-line no-undef it('Tests on calendar dates', (done) => { - const path = `${__dirname}/gtfs_sample`; + const path = `${__dirname}/sample`; const gtfs = new Gtfs(path); expect(sortedKeys(gtfs.getIndexedCalendarDates())).to.deep.equal(['service_0']); @@ -420,7 +420,7 @@ describe('Tests on GTFS', () => { // eslint-disable-next-line no-undef it('Tests on shapes', (done) => { - const path = `${__dirname}/gtfs_sample`; + const path = `${__dirname}/sample`; const gtfs = new Gtfs(path); expect(sortedKeys(gtfs.getIndexedShapePoints())).to.deep.equal(['shape_0']); @@ -477,7 +477,7 @@ describe('Tests on GTFS', () => { // eslint-disable-next-line no-undef it('Tests on frequencies', (done) => { - const path = `${__dirname}/gtfs_sample`; + const path = `${__dirname}/sample`; const gtfs = new Gtfs(path); expect(sortedKeys(gtfs.getIndexedFrequencies())).to.deep.equal(['trip_0']); @@ -528,7 +528,7 @@ describe('Tests on GTFS', () => { // eslint-disable-next-line no-undef it('Tests on transfers', (done) => { - const path = `${__dirname}/gtfs_sample`; + const path = `${__dirname}/sample`; const gtfs = new Gtfs(path); expect(sortedKeys(gtfs.getIndexedTransfers())).to.deep.equal(['stop_0', 'stop_1']); @@ -581,7 +581,7 @@ describe('Tests on GTFS', () => { // eslint-disable-next-line no-undef it('Tests on feed info', (done) => { - const path = `${__dirname}/gtfs_sample`; + const path = `${__dirname}/sample`; const gtfs = new Gtfs(path); expect(gtfs.getFeedInfo().feed_lang).to.equal('en'); @@ -599,7 +599,7 @@ describe('Tests on GTFS', () => { // eslint-disable-next-line no-undef it('Tests on exporting', (done) => { - const path = `${__dirname}/gtfs_sample`; + const path = `${__dirname}/sample`; const gtfs = new Gtfs(path); gtfs.getFeedInfo().feed_lang = 'fr'; From 2ad008f5f0616b961efc664e04b54c3fd39bfed4 Mon Sep 17 00:00:00 2001 From: LeoFrachet Date: Fri, 29 Dec 2017 13:53:10 -0500 Subject: [PATCH 7/9] Use debug instead of the console log --- .gitignore | 2 +- gtfs.js | 3 ++- helpers/export.js | 21 ++++++++++++--------- helpers/import.js | 3 ++- helpers/logging_iterator_wrapper.js | 6 ++++-- index.js | 7 +++++-- package.json | 1 + tests.js | 6 ++---- 8 files changed, 29 insertions(+), 20 deletions(-) diff --git a/.gitignore b/.gitignore index 0b44364..420ef6c 100644 --- a/.gitignore +++ b/.gitignore @@ -58,4 +58,4 @@ typings/ .env package-lock.json -.idea/* +.idea/ diff --git a/gtfs.js b/gtfs.js index 496f67d..6621752 100644 --- a/gtfs.js +++ b/gtfs.js @@ -2,6 +2,7 @@ /* eslint-disable no-underscore-dangle */ +const infoLog = require('debug')('gtfsNodeLib:i'); const fs = require('fs-extra'); const forEachWithLog = require('./helpers/logging_iterator_wrapper'); @@ -37,7 +38,7 @@ function addItems(items, tableName, gtfs) { function getIndexedTableOfGtfs(tableName, gtfs, options) { if (gtfs._tables.has(tableName) === false) { importTable(gtfs, tableName, options); - console.log(`[Importation] Table ${tableName} has been imported.`); + infoLog(`[Importation] Table ${tableName} has been imported.`); } return gtfs._tables.get(tableName); diff --git a/helpers/export.js b/helpers/export.js index a94984f..1acd2b3 100644 --- a/helpers/export.js +++ b/helpers/export.js @@ -4,6 +4,9 @@ const acomb = require('acomb'); const async = require('async'); +const infoLog = require('debug')('gtfsNodeLib:i'); +const warningLog = require('debug')('gtfsNodeLib:w'); +const errorLog = require('debug')('gtfsNodeLib:e'); const fs = require('fs-extra'); const { fromObjectToCsvString } = require('./csv'); @@ -42,22 +45,22 @@ function copyUntouchedTable(inputPath, outputPath, tableName, callback) { fs.open(fullPathToInputFile, 'r', (err) => { if (err && err.code === 'ENOENT') { - console.log(`[${getHHmmss()}] Table doesn't exist and won't be added: ${tableName}`); + warningLog(`[${getHHmmss()}] Table doesn't exist and won't be added: ${tableName}`); callback(); return; } if (err) { - console.log(err); + errorLog(err); callback(); return; } fs.copy(fullPathToInputFile, fullPathToOutputFile, (copyError) => { if (copyError) { - console.log(copyError); + errorLog(copyError); } - console.log(`[${getHHmmss()}] Table has been copied: ${tableName}`); + infoLog(`[${getHHmmss()}] Table has been copied: ${tableName}`); callback(); }); }); @@ -138,7 +141,7 @@ function exportTable(tableName, gtfs, outputPath, callback) { }); }), () => { if (rowsBuffer.length === 0) { - console.log(`[${getHHmmss()}] Table has been exported: ${tableName}`); + infoLog(`[${getHHmmss()}] Table has been exported: ${tableName}`); callback(); return; } @@ -146,7 +149,7 @@ function exportTable(tableName, gtfs, outputPath, callback) { fs.appendFile(outputFullPath, rowsBuffer.join(''), (appendingError) => { if (appendingError) { throw appendingError; } - console.log(`[${getHHmmss()}] Table has been exported: ${tableName}`); + infoLog(`[${getHHmmss()}] Table has been exported: ${tableName}`); callback(); }); }); @@ -171,14 +174,14 @@ exports.exportGtfs = (gtfs, outputPath, callback) => { return; } - console.log(`Will start exportation of tables: ${Array.from(gtfs.getTableNames()).join(', ')}`); + infoLog(`Will start exportation of tables: ${Array.from(gtfs.getTableNames()).join(', ')}`); async.eachSeries(gtfs.getTableNames(), (tableName, done) => { if (gtfs._tables.has(tableName) === true) { - console.log(`[${getHHmmss()}] Table will be exported: ${tableName}`); + infoLog(`[${getHHmmss()}] Table will be exported: ${tableName}`); exportTable(tableName, gtfs, outputPath, done); } else { - console.log(`[${getHHmmss()}] Table will be copied: ${tableName}`); + infoLog(`[${getHHmmss()}] Table will be copied: ${tableName}`); copyUntouchedTable(gtfs.getPath(), outputPath, tableName, done); } }, callback); diff --git a/helpers/import.js b/helpers/import.js index bcdbeec..778dcce 100644 --- a/helpers/import.js +++ b/helpers/import.js @@ -2,6 +2,7 @@ /* eslint-disable no-underscore-dangle */ +const infoLog = require('debug')('gtfsNodeLib:i'); const fs = require('fs-extra'); const eachWithLog = require('./logging_iterator_wrapper'); @@ -21,7 +22,7 @@ exports.importTable = (gtfs, tableName, options) => { return; } - console.log(`Empty table will be set for table ${tableName} (no input file at path ${gtfs._path}).`); + infoLog(`Empty table will be set for table ${tableName} (no input file at path ${gtfs._path}).`); gtfs._tables.set(tableName, new Map()); }; diff --git a/helpers/logging_iterator_wrapper.js b/helpers/logging_iterator_wrapper.js index bf1ff1b..d9fe280 100644 --- a/helpers/logging_iterator_wrapper.js +++ b/helpers/logging_iterator_wrapper.js @@ -1,5 +1,7 @@ 'use strict'; +const infoLog = require('debug')('gtfsNodeLib:i'); + module.exports = (prefix, valueByKey, iteratee) => { if ( valueByKey instanceof Array === false && @@ -21,7 +23,7 @@ module.exports = (prefix, valueByKey, iteratee) => { if (Date.now() - lastLogAt > interval && process.env.TEST === undefined) { const percentageDone = (numberOfKeysDone / valueByKey.size()) * 100; - console.log(`[${prefix}] ${percentageDone.toPrecision(2)}% done`); + infoLog(`[${prefix}] ${percentageDone.toPrecision(2)}% done`); lastLogAt = Date.now(); oneProgressionLogHasBeenPrinted = true; @@ -30,6 +32,6 @@ module.exports = (prefix, valueByKey, iteratee) => { }); if (oneProgressionLogHasBeenPrinted && process.env.TEST === undefined) { - console.log(`[${prefix}] Done`); + infoLog(`[${prefix}] Done`); } }; diff --git a/index.js b/index.js index c07254e..341c616 100644 --- a/index.js +++ b/index.js @@ -1,13 +1,16 @@ 'use strict'; +const infoLog = require('debug')('gtfsNodeLib:i'); +const warningLog = require('debug')('gtfsNodeLib:w'); + const Gtfs = require('./gtfs'); /* Fallback to replace Transit's internal notice system */ if (process.notices === undefined) { process.notices = { - addInfo: (title, content) => { console.log(`[Info] ${title}:\n${content}`); }, - addWarning: (title, content) => { console.log(`[Warning] ${title}:\n${content}`); }, + addInfo: (title, content) => { infoLog(`[Info] ${title}:\n${content}`); }, + addWarning: (title, content) => { warningLog(`[Warning] ${title}:\n${content}`); }, } } diff --git a/package.json b/package.json index 5b9dd2c..8eeceac 100644 --- a/package.json +++ b/package.json @@ -24,6 +24,7 @@ "dependencies": { "acomb": "1.2.2", "async": "2.6.0", + "debug": "3.1.0", "fs-extra": "5.0.0" }, "jshintConfig": { diff --git a/tests.js b/tests.js index ee5b683..51f0cd0 100644 --- a/tests.js +++ b/tests.js @@ -611,10 +611,8 @@ describe('Tests on GTFS', () => { }); const outputPath = `${__dirname}/temp_4865ce67d01f96a489fbd0e71ad8800b/`; - gtfs.exportAtPath(outputPath, (err) => { - if (err) { - console.log(err); - } + gtfs.exportAtPath(outputPath, (exportError) => { + if (exportError) { throw exportError; } fs.readFile(`${outputPath}routes.txt`, (readRoutesError, routesTxt) => { if (readRoutesError) { throw readRoutesError; } From 7ab29cf160122c5fe1295b58987266cff5f74dea Mon Sep 17 00:00:00 2001 From: LeoFrachet Date: Fri, 29 Dec 2017 14:05:51 -0500 Subject: [PATCH 8/9] Handle case with empty tables --- helpers/export.js | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/helpers/export.js b/helpers/export.js index 1acd2b3..72bf96c 100644 --- a/helpers/export.js +++ b/helpers/export.js @@ -16,6 +16,10 @@ const schema = require('./schema'); * Private functions */ +function getSample(iterable) { + return (iterable && iterable.values().next()) ? iterable.values().next().value : undefined; +} + function getHHmmss() { const date = new Date(); return `${date.getHours()}:${date.getMinutes()}:${date.getSeconds()}`; @@ -67,19 +71,16 @@ function copyUntouchedTable(inputPath, outputPath, tableName, callback) { } function getActualKeysForTable(gtfs, tableName) { + const keys = [...schema.keysByTableName[tableName]]; const deepness = schema.deepnessByTableName[tableName]; - let sampleItem; + let sampleItem = gtfs.getIndexedTable(tableName); - if (deepness === 0) { - sampleItem = gtfs.getIndexedTable(tableName); - } else if (deepness === 1) { - sampleItem = gtfs.getIndexedTable(tableName).values().next().value; + if (deepness === 1) { + sampleItem = getSample(sampleItem); } else if (deepness === 2) { - sampleItem = gtfs.getIndexedTable(tableName).values().next().value.values().next().value; + sampleItem = getSample(getSample(sampleItem)); } - const keys = [...schema.keysByTableName[tableName]]; - if (sampleItem) { Object.keys(sampleItem).forEach((key) => { if (schema.keysByTableName[tableName].includes(key) === false) { From 2e2e1e4a03629680e00010e5ab7cd96ec1c45e49 Mon Sep 17 00:00:00 2001 From: LeoFrachet Date: Fri, 29 Dec 2017 14:19:14 -0500 Subject: [PATCH 9/9] Try to improve clarity in the code --- helpers/export.js | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/helpers/export.js b/helpers/export.js index 72bf96c..f03ea21 100644 --- a/helpers/export.js +++ b/helpers/export.js @@ -73,12 +73,15 @@ function copyUntouchedTable(inputPath, outputPath, tableName, callback) { function getActualKeysForTable(gtfs, tableName) { const keys = [...schema.keysByTableName[tableName]]; const deepness = schema.deepnessByTableName[tableName]; - let sampleItem = gtfs.getIndexedTable(tableName); + const table = gtfs.getIndexedTable(tableName); + let sampleItem; - if (deepness === 1) { - sampleItem = getSample(sampleItem); + if (deepness === 0) { + sampleItem = table; + } else if (deepness === 1) { + sampleItem = getSample(table); } else if (deepness === 2) { - sampleItem = getSample(getSample(sampleItem)); + sampleItem = getSample(getSample(table)); } if (sampleItem) {