diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 0000000..30f5812
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,18 @@
+# Ignore everything
+**
+
+# Allow files and directories
+!.env*
+!/package.json
+!/yarn.lock
+!/tsconfig.json
+!/src/**
+!/node_modules/arweave/**
+!/rds-combined-ca-bundle.pem
+
+
+# Ignore unnecessary files inside allowed directories
+# This should go after the allowed directories
+**/*.log
+**/.DS_Store
+**/Thumbs.db
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..63278e2
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,16 @@
+*#
+*~
+dist
+# package directories
+node_modules
+package-lock.json
+jspm_packages
+
+result
+
+# Serverless directories
+.serverless
+.terraform
+
+.DS_STORE
+.idea
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..620795d
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,19 @@
+FROM node:16.3
+
+RUN apt update && apt install bash git python3
+
+WORKDIR /usr/app
+
+COPY ./package.json .
+COPY ./yarn.lock .
+COPY ./.env .
+COPY ./tsconfig.json .
+COPY ./src ./src
+COPY ./node_modules ./node_modules
+
+RUN yarn install
+RUN yarn build
+
+ENTRYPOINT ["node"]
+
+CMD ["dist/gateway/app.js"]
diff --git a/ec2/README.md b/ec2/README.md
new file mode 100644
index 0000000..057dd9e
--- /dev/null
+++ b/ec2/README.md
@@ -0,0 +1,3 @@
+### osx with linux nixos-builders
+
+`sudo nix build .#packages.x86_64-linux.import-blocks -j0`
diff --git a/ec2/base.nix b/ec2/base.nix
new file mode 100644
index 0000000..2fb5109
--- /dev/null
+++ b/ec2/base.nix
@@ -0,0 +1,43 @@
+{ config, lib, pkgs, ... }:
+
+{
+
+ imports = [ ];
+
+ config = {
+ nix.autoOptimiseStore = true;
+ users.users.root.openssh.authorizedKeys.keys = [];
+ services.tailscale.enable = true;
+ networking.firewall.trustedInterfaces = [ "tailscale0" ];
+
+ # Tell the firewall to implicitly trust packets routed over Tailscale:
+ # config.
+ security.auditd.enable = true;
+ security.audit.enable = true;
+ security.audit.rules = [
+ "-a exit,always -F arch=b64 -S execve"
+ ];
+
+ nix.trustedUsers = [ "root" "@wheel" ];
+ security.sudo.enable = true;
+ security.sudo.wheelNeedsPassword = false;
+ environment.defaultPackages = lib.mkForce [];
+
+ services.openssh = {
+ passwordAuthentication = false;
+ allowSFTP = false; # Don't set this if you need sftp
+ challengeResponseAuthentication = false;
+ extraConfig = ''
+ AllowTcpForwarding yes
+ X11Forwarding no
+ AllowAgentForwarding no
+ AllowStreamLocalForwarding no
+ AuthenticationMethods publickey
+ '';
+ };
+
+ # PCI compliance
+ environment.systemPackages = with pkgs; [ clamav ];
+
+ };
+}
diff --git a/ec2/flake.lock b/ec2/flake.lock
new file mode 100644
index 0000000..0070134
--- /dev/null
+++ b/ec2/flake.lock
@@ -0,0 +1,63 @@
+{
+ "nodes": {
+ "nixlib": {
+ "locked": {
+ "lastModified": 1636849918,
+ "narHash": "sha256-nzUK6dPcTmNVrgTAC1EOybSMsrcx+QrVPyqRdyKLkjA=",
+ "owner": "nix-community",
+ "repo": "nixpkgs.lib",
+ "rev": "28a5b0557f14124608db68d3ee1f77e9329e9dd5",
+ "type": "github"
+ },
+ "original": {
+ "owner": "nix-community",
+ "repo": "nixpkgs.lib",
+ "type": "github"
+ }
+ },
+ "nixos-generators": {
+ "inputs": {
+ "nixlib": "nixlib",
+ "nixpkgs": [
+ "nixpkgs"
+ ]
+ },
+ "locked": {
+ "lastModified": 1637655461,
+ "narHash": "sha256-kXZPbclN3gKwjhp2/RYFDFpAsSBwzX1iLF4EcnHZsPQ=",
+ "owner": "nix-community",
+ "repo": "nixos-generators",
+ "rev": "05a3eb158a9c7746a5d463726d7f7cccf07500e4",
+ "type": "github"
+ },
+ "original": {
+ "owner": "nix-community",
+ "repo": "nixos-generators",
+ "type": "github"
+ }
+ },
+ "nixpkgs": {
+ "locked": {
+ "lastModified": 1639347265,
+ "narHash": "sha256-q5feWoC64+h6T6J89o2HJJ8DOnB/4vwMODBlZIgeIlA=",
+ "owner": "NixOS",
+ "repo": "nixpkgs",
+ "rev": "b0bf5f888d377dd2f36d90340df6dc9f035aaada",
+ "type": "github"
+ },
+ "original": {
+ "id": "nixpkgs",
+ "ref": "nixos-unstable",
+ "type": "indirect"
+ }
+ },
+ "root": {
+ "inputs": {
+ "nixos-generators": "nixos-generators",
+ "nixpkgs": "nixpkgs"
+ }
+ }
+ },
+ "root": "root",
+ "version": 7
+}
diff --git a/ec2/flake.nix b/ec2/flake.nix
new file mode 100644
index 0000000..085a753
--- /dev/null
+++ b/ec2/flake.nix
@@ -0,0 +1,50 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+{
+ inputs = {
+ nixpkgs.url = "nixpkgs/nixos-unstable";
+ nixos-generators = {
+ url = "github:nix-community/nixos-generators";
+ inputs.nixpkgs.follows = "nixpkgs";
+ };
+ };
+
+ outputs = { self, nixpkgs, nixos-generators, ... }:
+
+ let
+ system = "x86_64-linux";
+ pkgs = (import nixpkgs {
+ inherit overlays system;
+ config = { allowUnfree = true; };
+ });
+ overlays = [ (import ./import-blocks/overlay.nix) ];
+
+ in {
+ packages.x86_64-linux = {
+ import-blocks = nixos-generators.nixosGenerate {
+ inherit pkgs;
+ modules = [
+ ./base.nix
+ ./import-blocks/module.nix
+ ];
+ format = "amazon";
+ };
+ };
+ };
+}
diff --git a/ec2/import-blocks/module.nix b/ec2/import-blocks/module.nix
new file mode 100644
index 0000000..3335cd2
--- /dev/null
+++ b/ec2/import-blocks/module.nix
@@ -0,0 +1,66 @@
+{ pkgs, lib, config, modulesPath, ... }:
+
+
+{
+ imports = [ "${modulesPath}/virtualisation/amazon-image.nix" ];
+
+ config = {
+ ec2.hvm = true;
+ time.timeZone = "Europe/Berlin";
+ networking.hostName = "import-blocks";
+ # services.tailscale.enable = true;
+ # networking.firewall.trustedInterfaces = [ "tailscale0" ];
+
+ security.auditd.enable = true;
+ security.audit.enable = true;
+ security.audit.rules = [
+ "-a exit,always -F arch=b64 -S execve"
+ ];
+
+ nix.trustedUsers = [ "root" "@wheel" ];
+ security.sudo.enable = true;
+ security.sudo.wheelNeedsPassword = false;
+
+ nix.autoOptimiseStore = true;
+
+ users.users.root.openssh.authorizedKeys.keys = [];
+
+
+ services.openssh = {
+ passwordAuthentication = false;
+ allowSFTP = false; # Don't set this if you need sftp
+ challengeResponseAuthentication = false;
+ extraConfig = ''
+ AllowTcpForwarding yes
+ X11Forwarding no
+ AllowAgentForwarding no
+ AllowStreamLocalForwarding no
+ AuthenticationMethods publickey
+ '';
+ };
+
+ # PCI compliance
+ environment.systemPackages = with pkgs; [ clamav ];
+
+ systemd.services.import-blocks = {
+ description = "import-block poller";
+ after = [ "network.target" ];
+ wantedBy = [ "multi-user.target" ];
+
+ environment = {
+ ARWEAVE_DOTENV_PATH = "/var/dotenv";
+ };
+
+ script = ''
+ ${pkgs.import-blocks-wrapped}/bin/import-blocks
+ '';
+
+ serviceConfig = {
+ Restart = "on-failure";
+ RestartSec = "5s";
+ TimeoutStartSec = 0;
+ KillSignal = "SIGINT";
+ };
+ };
+ };
+}
diff --git a/ec2/import-blocks/overlay.nix b/ec2/import-blocks/overlay.nix
new file mode 100644
index 0000000..c53f6d2
--- /dev/null
+++ b/ec2/import-blocks/overlay.nix
@@ -0,0 +1,9 @@
+final: prev: {
+ import-blocks = (import ./package-lock.nix prev)."@arweave/import-blocks";
+ import-blocks-wrapped = prev.writeShellScriptBin "import-blocks" ''
+ cd ${final.import-blocks}/lib/node_modules/@arweave/import-blocks
+ ${prev.bash}/bin/bash /etc/ec2-metadata/user-data
+ ${prev.nodejs_latest}/bin/node src/index.mjs
+ '';
+
+}
diff --git a/ec2/import-blocks/package-lock.nix b/ec2/import-blocks/package-lock.nix
new file mode 100644
index 0000000..15058c2
--- /dev/null
+++ b/ec2/import-blocks/package-lock.nix
@@ -0,0 +1,3865 @@
+{pkgs, stdenv, lib, nodejs, fetchurl, fetchgit, fetchFromGitHub, jq, makeWrapper, python3, runCommand, runCommandCC, xcodebuild, ... }:
+
+let
+ packageNix = import ./package.nix;
+ copyNodeModules = {dependencies ? [] }:
+ (lib.lists.foldr (dep: acc:
+ let pkgName = if (builtins.hasAttr "packageName" dep)
+ then dep.packageName else dep.name;
+ in
+ acc + ''
+ if [[ ! -f "node_modules/${pkgName}" && \
+ ! -d "node_modules/${pkgName}" && \
+ ! -L "node_modules/${pkgName}" && \
+ ! -e "node_modules/${pkgName}" ]]
+ then
+ mkdir -p "node_modules/${pkgName}"
+ cp -rLT "${dep}/lib/node_modules/${pkgName}" "node_modules/${pkgName}"
+ chmod -R +rw "node_modules/${pkgName}"
+ fi
+ '')
+ "" dependencies);
+ linkNodeModules = {dependencies ? [], extraDependencies ? []}:
+ (lib.lists.foldr (dep: acc:
+ let pkgName = if (builtins.hasAttr "packageName" dep)
+ then dep.packageName else dep.name;
+ in (acc + (lib.optionalString
+ ((lib.findSingle (px: px.packageName == dep.packageName) "none" "found" extraDependencies) == "none")
+ ''
+ if [[ ! -f "node_modules/${pkgName}" && \
+ ! -d "node_modules/${pkgName}" && \
+ ! -L "node_modules/${pkgName}" && \
+ ! -e "node_modules/${pkgName}" ]]
+ then
+ mkdir -p "node_modules/${pkgName}"
+ ln -s "${dep}/lib/node_modules/${pkgName}"/* "node_modules/${pkgName}"
+ ${lib.optionalString (builtins.hasAttr "dependencies" dep)
+ ''
+ rm -rf "node_modules/${pkgName}/node_modules"
+ (cd node_modules/${dep.packageName}; ${linkNodeModules { inherit (dep) dependencies; inherit extraDependencies;}})
+ ''}
+ fi
+ '')))
+ "" dependencies);
+ gitignoreSource =
+ (import (fetchFromGitHub {
+ owner = "hercules-ci";
+ repo = "gitignore.nix";
+ rev = "5b9e0ff9d3b551234b4f3eb3983744fa354b17f1";
+ sha256 = "o/BdVjNwcB6jOmzZjOH703BesSkkS5O7ej3xhyO8hAY=";
+ }) { inherit lib; }).gitignoreSource;
+ transitiveDepInstallPhase = {dependencies ? [], pkgName}: ''
+ export packageDir="$(pwd)"
+ mkdir -p $out/lib/node_modules/${pkgName}
+ cd $out/lib/node_modules/${pkgName}
+ cp -rfT "$packageDir" "$(pwd)"
+ ${copyNodeModules { inherit dependencies; }} '';
+ transitiveDepUnpackPhase = {dependencies ? [], pkgName}: ''
+ unpackFile "$src";
+ # not ideal, but some perms are fubar
+ chmod -R +777 . || true
+ packageDir="$(find . -maxdepth 1 -type d | tail -1)"
+ cd "$packageDir"
+ '';
+ getNodeDep = packageName: dependencies:
+ (let depList = if ((builtins.typeOf dependencies) == "set")
+ then (builtins.attrValues dependencies)
+ else dependencies;
+ in (builtins.head
+ (builtins.filter (p: p.packageName == packageName) depList)));
+ nodeSources = runCommand "node-sources" {} ''
+ tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
+ mv node-* $out
+ '';
+ linkBins = ''
+ ${goBinLink}/bin/bin-link
+'';
+ flattenScript = args: '' ${goFlatten}/bin/flatten ${args}'';
+ sanitizeName = nm: lib.strings.sanitizeDerivationName
+ (builtins.replaceStrings [ "@" "/" ] [ "_at_" "_" ] nm);
+ jsnixDrvOverrides = { drv_, jsnixDeps, dedupedDeps, isolateDeps }:
+ let drv = drv_ (pkgs // { inherit nodejs copyNodeModules gitignoreSource jsnixDeps nodeModules getNodeDep; });
+ skipUnpackFor = if (builtins.hasAttr "skipUnpackFor" drv)
+ then drv.skipUnpackFor else [];
+ copyUnpackFor = if (builtins.hasAttr "copyUnpackFor" drv)
+ then drv.copyUnpackFor else [];
+ pkgJsonFile = runCommand "package.json" { buildInputs = [jq]; } ''
+ echo ${toPackageJson { inherit jsnixDeps; extraDeps = (if (builtins.hasAttr "extraDependencies" drv) then drv.extraDependencies else []); }} > $out
+ cat <<< $(cat $out | jq) > $out
+ '';
+ copyDeps = builtins.attrValues jsnixDeps;
+ copyDepsStr = builtins.concatStringsSep " " (builtins.map (dep: if (builtins.hasAttr "packageName" dep) then dep.packageName else dep.name) copyDeps);
+ extraDeps = (builtins.map (dep: if (builtins.hasAttr "packageName" dep) then dep.packageName else dep.name)
+ (lib.optionals (builtins.hasAttr "extraDependencies" drv) drv.extraDependencies));
+ extraDepsStr = builtins.concatStringsSep " " extraDeps;
+ buildDepDep = lib.lists.unique (lib.lists.concatMap (d: d.buildInputs)
+ (copyDeps ++ (lib.optionals (builtins.hasAttr "extraDependencies" drv) drv.extraDependencies)));
+ nodeModules = runCommandCC "${sanitizeName packageNix.name}_node_modules"
+ { buildInputs = [nodejs] ++ buildDepDep;
+ fixupPhase = "true";
+ doCheck = false;
+ doInstallCheck = false;
+ version = builtins.hashString "sha512" (lib.strings.concatStrings copyDeps); }
+ ''
+ echo 'unpack dependencies...'
+ mkdir -p $out/lib/node_modules
+ cd $out/lib
+ ${linkNodeModules { dependencies = builtins.attrValues isolateDeps; }}
+ ${copyNodeModules {
+ dependencies = copyDeps;
+ }}
+ ${copyNodeModules {
+ dependencies = builtins.attrValues dedupedDeps;
+ }}
+ chmod -R +rw node_modules
+ ${copyNodeModules {
+ dependencies = (lib.optionals (builtins.hasAttr "extraDependencies" drv) drv.extraDependencies);
+ }}
+ ${lib.optionalString ((builtins.length extraDeps) > 0) "echo 'resolving incoming transient deps of ${extraDepsStr}...'"}
+ ${lib.optionalString ((builtins.length extraDeps) > 0) (flattenScript extraDepsStr)}
+ ${lib.optionalString (builtins.hasAttr "nodeModulesUnpack" drv) drv.nodeModulesUnpack}
+ echo 'link nodejs bins to out-dir...'
+ ${linkBins}
+ '';
+ in stdenv.mkDerivation (drv // {
+ passthru = { inherit nodeModules pkgJsonFile; };
+ version = packageNix.version;
+ name = sanitizeName packageNix.name;
+ preUnpackBan_ = mkPhaseBan "preUnpack" drv;
+ unpackBan_ = mkPhaseBan "unpackPhase" drv;
+ postUnpackBan_ = mkPhaseBan "postUnpack" drv;
+ preConfigureBan_ = mkPhaseBan "preConfigure" drv;
+ configureBan_ = mkPhaseBan "configurePhase" drv;
+ postConfigureBan_ = mkPhaseBan "postConfigure" drv;
+ src = if (builtins.hasAttr "src" packageNix) then packageNix.src else gitignoreSource ./.;
+ packageName = packageNix.name;
+ doStrip = false;
+ doFixup = false;
+ doUnpack = true;
+ NODE_PATH = "./node_modules";
+ buildInputs = [ nodejs jq ] ++ lib.optionals (builtins.hasAttr "buildInputs" drv) drv.buildInputs;
+
+ configurePhase = ''
+ ln -s ${nodeModules}/lib/node_modules node_modules
+ cat ${pkgJsonFile} > package.json
+ '';
+ buildPhase = ''
+ runHook preBuild
+ ${lib.optionalString (builtins.hasAttr "buildPhase" drv) drv.buildPhase}
+ runHook postBuild
+ '';
+ installPhase = ''
+ runHook preInstall
+ mkdir -p $out/lib/node_modules/${packageNix.name}
+ cp -rfT ./ $out/lib/node_modules/${packageNix.name}
+ runHook postInstall
+ '';
+ });
+ toPackageJson = { jsnixDeps ? {}, extraDeps ? [] }:
+ let
+ main = if (builtins.hasAttr "main" packageNix) then packageNix else throw "package.nix is missing main attribute";
+ pkgName = if (builtins.hasAttr "packageName" packageNix)
+ then packageNix.packageName else packageNix.name;
+ packageNixDeps = if (builtins.hasAttr "dependencies" packageNix)
+ then packageNix.dependencies
+ else {};
+ extraDeps_ = lib.lists.foldr (dep: acc: { "${dep.packageName}" = dep; } // acc) {} extraDeps;
+ allDeps = extraDeps_ // packageNixDeps;
+ prodDeps = lib.lists.foldr
+ (depName: acc: acc // {
+ "${depName}" = (if ((builtins.typeOf allDeps."${depName}") == "string")
+ then allDeps."${depName}"
+ else
+ if (((builtins.typeOf allDeps."${depName}") == "set") &&
+ ((builtins.typeOf allDeps."${depName}".version) == "string"))
+ then allDeps."${depName}".version
+ else "latest");}) {} (builtins.attrNames allDeps);
+ safePkgNix = lib.lists.foldr (key: acc:
+ if ((builtins.typeOf packageNix."${key}") != "lambda")
+ then (acc // { "${key}" = packageNix."${key}"; })
+ else acc)
+ {} (builtins.attrNames packageNix);
+ in lib.strings.escapeNixString
+ (builtins.toJSON (safePkgNix // { dependencies = prodDeps; name = pkgName; }));
+ mkPhaseBan = phaseName: usrDrv:
+ if (builtins.hasAttr phaseName usrDrv) then
+ throw "jsnix error: using ${phaseName} isn't supported at this time"
+ else "";
+ mkPhase = pkgs_: {phase, pkgName}:
+ lib.optionalString ((builtins.hasAttr "${pkgName}" packageNix.dependencies) &&
+ (builtins.typeOf packageNix.dependencies."${pkgName}" == "set") &&
+ (builtins.hasAttr "${phase}" packageNix.dependencies."${pkgName}"))
+ (if builtins.typeOf packageNix.dependencies."${pkgName}"."${phase}" == "string"
+ then
+ packageNix.dependencies."${pkgName}"."${phase}"
+ else
+ (packageNix.dependencies."${pkgName}"."${phase}" (pkgs_ // { inherit getNodeDep; })));
+ mkExtraBuildInputs = pkgs_: {pkgName}:
+ lib.optionals ((builtins.hasAttr "${pkgName}" packageNix.dependencies) &&
+ (builtins.typeOf packageNix.dependencies."${pkgName}" == "set") &&
+ (builtins.hasAttr "extraBuildInputs" packageNix.dependencies."${pkgName}"))
+ (if builtins.typeOf packageNix.dependencies."${pkgName}"."extraBuildInputs" == "list"
+ then
+ packageNix.dependencies."${pkgName}"."extraBuildInputs"
+ else
+ (packageNix.dependencies."${pkgName}"."extraBuildInputs" (pkgs_ // { inherit getNodeDep; })));
+ mkExtraDependencies = pkgs_: {pkgName}:
+ lib.optionals ((builtins.hasAttr "${pkgName}" packageNix.dependencies) &&
+ (builtins.typeOf packageNix.dependencies."${pkgName}" == "set") &&
+ (builtins.hasAttr "extraDependencies" packageNix.dependencies."${pkgName}"))
+ (if builtins.typeOf packageNix.dependencies."${pkgName}"."extraDependencies" == "list"
+ then
+ packageNix.dependencies."${pkgName}"."extraDependencies"
+ else
+ (packageNix.dependencies."${pkgName}"."extraDependencies" (pkgs_ // { inherit getNodeDep; })));
+ mkUnpackScript = { dependencies ? [], extraDependencies ? [], pkgName }:
+ let copyNodeDependencies =
+ if ((builtins.hasAttr "${pkgName}" packageNix.dependencies) &&
+ (builtins.typeOf packageNix.dependencies."${pkgName}" == "set") &&
+ (builtins.hasAttr "copyNodeDependencies" packageNix.dependencies."${pkgName}") &&
+ (builtins.typeOf packageNix.dependencies."${pkgName}"."copyNodeDependencies" == "bool") &&
+ (packageNix.dependencies."${pkgName}"."copyNodeDependencies" == true))
+ then true else false;
+ in ''
+ ${copyNodeModules { dependencies = dependencies ++ extraDependencies; }}
+ chmod -R +rw $(pwd)
+ '';
+ mkBuildScript = { dependencies ? [], pkgName }:
+ let extraNpmFlags =
+ if ((builtins.hasAttr "${pkgName}" packageNix.dependencies) &&
+ (builtins.typeOf packageNix.dependencies."${pkgName}" == "set") &&
+ (builtins.hasAttr "npmFlags" packageNix.dependencies."${pkgName}") &&
+ (builtins.typeOf packageNix.dependencies."${pkgName}"."npmFlags" == "string"))
+ then packageNix.dependencies."${pkgName}"."npmFlags" else "";
+ in ''
+ runHook preBuild
+ export HOME=$TMPDIR
+ npm --offline config set node_gyp ${nodejs}/lib/node_modules/npm/node_modules/node-gyp/bin/node-gyp.js
+ npm --offline config set omit dev
+ NODE_PATH="$(pwd)/node_modules:$NODE_PATH" \
+ npm --offline --nodedir=${nodeSources} --location="$(pwd)" \
+ ${extraNpmFlags} "--production" "--preserve-symlinks" \
+ rebuild --build-from-source
+ runHook postBuild
+ '';
+ mkInstallScript = { pkgName }: ''
+ runHook preInstall
+ export packageDir="$(pwd)"
+ mkdir -p $out/lib/node_modules/${pkgName}
+ cd $out/lib/node_modules/${pkgName}
+ cp -rfT "$packageDir" "$(pwd)"
+ if [[ -d "$out/lib/node_modules/${pkgName}/bin" ]]
+ then
+ mkdir -p $out/bin
+ ln -s "$out/lib/node_modules/${pkgName}/bin"/* $out/bin
+ fi
+ cd $out/lib/node_modules/${pkgName}
+ runHook postInstall
+ '';
+ goBinLink = pkgs.buildGoModule {
+ pname = "bin-link";
+ version = "0.0.0";
+ vendorSha256 = null;
+ buildInputs = [ pkgs.nodejs ];
+ src = pkgs.fetchFromGitHub {
+ owner = "hlolli";
+ repo = "jsnix";
+ rev = "a66cf91ad49833ef3d84064c1037d942c97838bb";
+ sha256 = "AvDZXUSxuJa5lZ7zRdXWIDYTYfbH2VfpuHbvZBrT9f0=";
+ };
+ preBuild = ''
+ cd go/bin-link
+ '';
+};
+ goFlatten = pkgs.buildGoModule {
+ pname = "flatten";
+ version = "0.0.0";
+ vendorSha256 = null;
+ buildInputs = [ pkgs.nodejs ];
+ src = pkgs.fetchFromGitHub {
+ owner = "hlolli";
+ repo = "jsnix";
+ rev = "a66cf91ad49833ef3d84064c1037d942c97838bb";
+ sha256 = "AvDZXUSxuJa5lZ7zRdXWIDYTYfbH2VfpuHbvZBrT9f0=";
+ };
+ preBuild = ''
+ cd go/flatten
+ '';
+};
+ sources = rec {
+ "@sindresorhus/is-4.2.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "_at_sindresorhus_slash_is";
+ packageName = "@sindresorhus/is";
+ version = "4.2.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "@sindresorhus/is"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "@sindresorhus/is"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/@sindresorhus/is/-/is-4.2.0.tgz";
+ sha512 = "VkE3KLBmJwcCaVARtQpfuKcKv8gcBmUubrfHGF84dXuuW6jgsRYxPtzcIhPyK9WAPpRt2/xY6zkD9MnRaJzSyw==";
+ };
+ };
+ "@szmarczak/http-timer-5.0.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "_at_szmarczak_slash_http-timer";
+ packageName = "@szmarczak/http-timer";
+ version = "5.0.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "@szmarczak/http-timer"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "@szmarczak/http-timer"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-5.0.1.tgz";
+ sha512 = "+PmQX0PiAYPMeVYe237LJAYvOMYW1j2rH5YROyS3b4CTVJum34HfRvKvAzozHAQG0TnHNdUfY9nCeUyRAs//cw==";
+ };
+ };
+ "@types/cacheable-request-6.0.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "_at_types_slash_cacheable-request";
+ packageName = "@types/cacheable-request";
+ version = "6.0.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "@types/cacheable-request"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "@types/cacheable-request"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.2.tgz";
+ sha512 = "B3xVo+dlKM6nnKTcmm5ZtY/OL8bOAOd2Olee9M1zft65ox50OzjEHW91sDiU9j6cvW8Ejg1/Qkf4xd2kugApUA==";
+ };
+ };
+ "@types/http-cache-semantics-4.0.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "_at_types_slash_http-cache-semantics";
+ packageName = "@types/http-cache-semantics";
+ version = "4.0.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "@types/http-cache-semantics"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "@types/http-cache-semantics"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz";
+ sha512 = "SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ==";
+ };
+ };
+ "@types/keyv-3.1.3" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "_at_types_slash_keyv";
+ packageName = "@types/keyv";
+ version = "3.1.3";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "@types/keyv"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "@types/keyv"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/@types/keyv/-/keyv-3.1.3.tgz";
+ sha512 = "FXCJgyyN3ivVgRoml4h94G/p3kY+u/B86La+QptcqJaWtBWtmc6TtkNfS40n9bIvyLteHh7zXOtgbobORKPbDg==";
+ };
+ };
+ "@types/node-16.11.12" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "_at_types_slash_node";
+ packageName = "@types/node";
+ version = "16.11.12";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "@types/node"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "@types/node"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/@types/node/-/node-16.11.12.tgz";
+ sha512 = "+2Iggwg7PxoO5Kyhvsq9VarmPbIelXP070HMImEpbtGCoyWNINQj4wzjbQCXzdHTRXnqufutJb5KAURZANNBAw==";
+ };
+ };
+ "@types/responselike-1.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "_at_types_slash_responselike";
+ packageName = "@types/responselike";
+ version = "1.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "@types/responselike"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "@types/responselike"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/@types/responselike/-/responselike-1.0.0.tgz";
+ sha512 = "85Y2BjiufFzaMIlvJDvTTB8Fxl2xfLo4HgmHzVBz08w4wDePCTjYw66PdrolO0kzli3yam/YCgRufyo1DdQVTA==";
+ };
+ };
+ "base64-js-1.5.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "base64-js";
+ packageName = "base64-js";
+ version = "1.5.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "base64-js"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "base64-js"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz";
+ sha512 = "AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==";
+ };
+ };
+ "buffer-4.9.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "buffer";
+ packageName = "buffer";
+ version = "4.9.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "buffer"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "buffer"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz";
+ sha512 = "xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==";
+ };
+ };
+ "buffer-writer-2.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "buffer-writer";
+ packageName = "buffer-writer";
+ version = "2.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "buffer-writer"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "buffer-writer"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz";
+ sha512 = "a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==";
+ };
+ };
+ "cacheable-lookup-6.0.4" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "cacheable-lookup";
+ packageName = "cacheable-lookup";
+ version = "6.0.4";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "cacheable-lookup"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "cacheable-lookup"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-6.0.4.tgz";
+ sha512 = "mbcDEZCkv2CZF4G01kr8eBd/5agkt9oCqz75tJMSIsquvRZ2sL6Hi5zGVKi/0OSC9oO1GHfJ2AV0ZIOY9vye0A==";
+ };
+ };
+ "cacheable-request-7.0.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "cacheable-request";
+ packageName = "cacheable-request";
+ version = "7.0.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "cacheable-request"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "cacheable-request"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/cacheable-request/-/cacheable-request-7.0.2.tgz";
+ sha512 = "pouW8/FmiPQbuGpkXQ9BAPv/Mo5xDGANgSNXzTzJ8DrKGuXOssM4wIQRjfanNRh3Yu5cfYPvcorqbhg2KIJtew==";
+ };
+ };
+ "clone-response-1.0.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "clone-response";
+ packageName = "clone-response";
+ version = "1.0.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "clone-response"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "clone-response"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/clone-response/-/clone-response-1.0.2.tgz";
+ sha1 = "d1dc973920314df67fbeb94223b4ee350239e96b";
+ };
+ };
+ "colorette-2.0.16" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "colorette";
+ packageName = "colorette";
+ version = "2.0.16";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "colorette"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "colorette"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/colorette/-/colorette-2.0.16.tgz";
+ sha512 = "hUewv7oMjCp+wkBv5Rm0v87eJhq4woh5rSR+42YSQJKecCqgIqNkZ6lAlQms/BwHPJA5NKMRlpxPRv0n8HQW6g==";
+ };
+ };
+ "commander-7.2.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "commander";
+ packageName = "commander";
+ version = "7.2.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "commander"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "commander"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz";
+ sha512 = "QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==";
+ };
+ };
+ "debug-4.3.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "debug";
+ packageName = "debug";
+ version = "4.3.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "debug"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "debug"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz";
+ sha512 = "mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==";
+ };
+ };
+ "decompress-response-6.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "decompress-response";
+ packageName = "decompress-response";
+ version = "6.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "decompress-response"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "decompress-response"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz";
+ sha512 = "aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==";
+ };
+ };
+ "defer-to-connect-2.0.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "defer-to-connect";
+ packageName = "defer-to-connect";
+ version = "2.0.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "defer-to-connect"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "defer-to-connect"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz";
+ sha512 = "4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==";
+ };
+ };
+ "end-of-stream-1.4.4" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "end-of-stream";
+ packageName = "end-of-stream";
+ version = "1.4.4";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "end-of-stream"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "end-of-stream"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz";
+ sha512 = "+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==";
+ };
+ };
+ "escalade-3.1.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "escalade";
+ packageName = "escalade";
+ version = "3.1.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "escalade"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "escalade"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz";
+ sha512 = "k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==";
+ };
+ };
+ "esm-3.2.25" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "esm";
+ packageName = "esm";
+ version = "3.2.25";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "esm"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "esm"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/esm/-/esm-3.2.25.tgz";
+ sha512 = "U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==";
+ };
+ };
+ "events-1.1.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "events";
+ packageName = "events";
+ version = "1.1.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "events"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "events"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/events/-/events-1.1.1.tgz";
+ sha1 = "9ebdb7635ad099c70dcc4c2a1f5004288e8bd924";
+ };
+ };
+ "form-data-encoder-1.7.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "form-data-encoder";
+ packageName = "form-data-encoder";
+ version = "1.7.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "form-data-encoder"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "form-data-encoder"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-1.7.1.tgz";
+ sha512 = "EFRDrsMm/kyqbTQocNvRXMLjc7Es2Vk+IQFx/YW7hkUH1eBl4J1fqiP34l74Yt0pFLCNpc06fkbVk00008mzjg==";
+ };
+ };
+ "function-bind-1.1.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "function-bind";
+ packageName = "function-bind";
+ version = "1.1.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "function-bind"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "function-bind"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz";
+ sha512 = "yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==";
+ };
+ };
+ "get-stream-5.2.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "get-stream";
+ packageName = "get-stream";
+ version = "5.2.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "get-stream"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "get-stream"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz";
+ sha512 = "nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==";
+ };
+ };
+ "get-stream-6.0.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "get-stream";
+ packageName = "get-stream";
+ version = "6.0.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "get-stream"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "get-stream"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz";
+ sha512 = "ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==";
+ };
+ };
+ "getopts-2.2.5" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "getopts";
+ packageName = "getopts";
+ version = "2.2.5";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "getopts"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "getopts"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/getopts/-/getopts-2.2.5.tgz";
+ sha512 = "9jb7AW5p3in+IiJWhQiZmmwkpLaR/ccTWdWQCtZM66HJcHHLegowh4q4tSD7gouUyeNvFWRavfK9GXosQHDpFA==";
+ };
+ };
+ "has-1.0.3" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "has";
+ packageName = "has";
+ version = "1.0.3";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "has"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "has"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/has/-/has-1.0.3.tgz";
+ sha512 = "f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==";
+ };
+ };
+ "http-cache-semantics-4.1.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "http-cache-semantics";
+ packageName = "http-cache-semantics";
+ version = "4.1.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "http-cache-semantics"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "http-cache-semantics"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz";
+ sha512 = "carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ==";
+ };
+ };
+ "http2-wrapper-2.1.10" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "http2-wrapper";
+ packageName = "http2-wrapper";
+ version = "2.1.10";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "http2-wrapper"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "http2-wrapper"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/http2-wrapper/-/http2-wrapper-2.1.10.tgz";
+ sha512 = "QHgsdYkieKp+6JbXP25P+tepqiHYd+FVnDwXpxi/BlUcoIB0nsmTOymTNvETuTO+pDuwcSklPE72VR3DqV+Haw==";
+ };
+ };
+ "ieee754-1.1.13" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "ieee754";
+ packageName = "ieee754";
+ version = "1.1.13";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "ieee754"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "ieee754"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz";
+ sha512 = "4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==";
+ };
+ };
+ "interpret-2.2.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "interpret";
+ packageName = "interpret";
+ version = "2.2.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "interpret"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "interpret"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/interpret/-/interpret-2.2.0.tgz";
+ sha512 = "Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==";
+ };
+ };
+ "is-core-module-2.8.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "is-core-module";
+ packageName = "is-core-module";
+ version = "2.8.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "is-core-module"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "is-core-module"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.0.tgz";
+ sha512 = "vd15qHsaqrRL7dtH6QNuy0ndJmRDrS9HAM1CAiSifNUFv4x1a0CCVsj18hJ1mShxIG6T2i1sO78MkP56r0nYRw==";
+ };
+ };
+ "isarray-1.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "isarray";
+ packageName = "isarray";
+ version = "1.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "isarray"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "isarray"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz";
+ sha1 = "bb935d48582cba168c06834957a54a3e07124f11";
+ };
+ };
+ "jmespath-0.15.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "jmespath";
+ packageName = "jmespath";
+ version = "0.15.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "jmespath"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "jmespath"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz";
+ sha1 = "a3f222a9aae9f966f5d27c796510e28091764217";
+ };
+ };
+ "json-buffer-3.0.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "json-buffer";
+ packageName = "json-buffer";
+ version = "3.0.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "json-buffer"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "json-buffer"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz";
+ sha512 = "4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==";
+ };
+ };
+ "keyv-4.0.4" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "keyv";
+ packageName = "keyv";
+ version = "4.0.4";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "keyv"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "keyv"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/keyv/-/keyv-4.0.4.tgz";
+ sha512 = "vqNHbAc8BBsxk+7QBYLW0Y219rWcClspR6WSeoHYKG5mnsSoOH+BL1pWq02DDCVdvvuUny5rkBlzMRzoqc+GIg==";
+ };
+ };
+ "lodash-4.17.21" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "lodash";
+ packageName = "lodash";
+ version = "4.17.21";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "lodash"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "lodash"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz";
+ sha512 = "v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==";
+ };
+ };
+ "lowercase-keys-2.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "lowercase-keys";
+ packageName = "lowercase-keys";
+ version = "2.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "lowercase-keys"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "lowercase-keys"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz";
+ sha512 = "tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==";
+ };
+ };
+ "lowercase-keys-3.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "lowercase-keys";
+ packageName = "lowercase-keys";
+ version = "3.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "lowercase-keys"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "lowercase-keys"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-3.0.0.tgz";
+ sha512 = "ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ==";
+ };
+ };
+ "mimic-response-1.0.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "mimic-response";
+ packageName = "mimic-response";
+ version = "1.0.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "mimic-response"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "mimic-response"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz";
+ sha512 = "j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==";
+ };
+ };
+ "mimic-response-3.1.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "mimic-response";
+ packageName = "mimic-response";
+ version = "3.1.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "mimic-response"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "mimic-response"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz";
+ sha512 = "z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==";
+ };
+ };
+ "ms-2.1.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "ms";
+ packageName = "ms";
+ version = "2.1.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "ms"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "ms"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz";
+ sha512 = "sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==";
+ };
+ };
+ "normalize-url-6.1.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "normalize-url";
+ packageName = "normalize-url";
+ version = "6.1.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "normalize-url"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "normalize-url"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz";
+ sha512 = "DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==";
+ };
+ };
+ "once-1.4.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "once";
+ packageName = "once";
+ version = "1.4.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "once"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "once"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/once/-/once-1.4.0.tgz";
+ sha1 = "583b1aa775961d4b113ac17d9c50baef9dd76bd1";
+ };
+ };
+ "p-cancelable-3.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "p-cancelable";
+ packageName = "p-cancelable";
+ version = "3.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "p-cancelable"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "p-cancelable"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/p-cancelable/-/p-cancelable-3.0.0.tgz";
+ sha512 = "mlVgR3PGuzlo0MmTdk4cXqXWlwQDLnONTAg6sm62XkMJEiRxN3GL3SffkYvqwonbkJBcrI7Uvv5Zh9yjvn2iUw==";
+ };
+ };
+ "p-timeout-5.0.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "p-timeout";
+ packageName = "p-timeout";
+ version = "5.0.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "p-timeout"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "p-timeout"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/p-timeout/-/p-timeout-5.0.2.tgz";
+ sha512 = "sEmji9Yaq+Tw+STwsGAE56hf7gMy9p0tQfJojIAamB7WHJYJKf1qlsg9jqBWG8q9VCxKPhZaP/AcXwEoBcYQhQ==";
+ };
+ };
+ "packet-reader-1.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "packet-reader";
+ packageName = "packet-reader";
+ version = "1.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "packet-reader"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "packet-reader"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz";
+ sha512 = "HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==";
+ };
+ };
+ "path-parse-1.0.7" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "path-parse";
+ packageName = "path-parse";
+ version = "1.0.7";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "path-parse"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "path-parse"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz";
+ sha512 = "LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==";
+ };
+ };
+ "pg-connection-string-2.5.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "pg-connection-string";
+ packageName = "pg-connection-string";
+ version = "2.5.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "pg-connection-string"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "pg-connection-string"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz";
+ sha512 = "r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==";
+ };
+ };
+ "pg-int8-1.0.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "pg-int8";
+ packageName = "pg-int8";
+ version = "1.0.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "pg-int8"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "pg-int8"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz";
+ sha512 = "WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==";
+ };
+ };
+ "pg-pool-3.4.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "pg-pool";
+ packageName = "pg-pool";
+ version = "3.4.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "pg-pool"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "pg-pool"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/pg-pool/-/pg-pool-3.4.1.tgz";
+ sha512 = "TVHxR/gf3MeJRvchgNHxsYsTCHQ+4wm3VIHSS19z8NC0+gioEhq1okDY1sm/TYbfoP6JLFx01s0ShvZ3puP/iQ==";
+ };
+ };
+ "pg-protocol-1.5.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "pg-protocol";
+ packageName = "pg-protocol";
+ version = "1.5.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "pg-protocol"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "pg-protocol"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz";
+ sha512 = "muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ==";
+ };
+ };
+ "pg-types-2.2.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "pg-types";
+ packageName = "pg-types";
+ version = "2.2.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "pg-types"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "pg-types"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz";
+ sha512 = "qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==";
+ };
+ };
+ "pgpass-1.0.5" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "pgpass";
+ packageName = "pgpass";
+ version = "1.0.5";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "pgpass"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "pgpass"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz";
+ sha512 = "FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==";
+ };
+ };
+ "postgres-array-2.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "postgres-array";
+ packageName = "postgres-array";
+ version = "2.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "postgres-array"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "postgres-array"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz";
+ sha512 = "VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==";
+ };
+ };
+ "postgres-bytea-1.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "postgres-bytea";
+ packageName = "postgres-bytea";
+ version = "1.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "postgres-bytea"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "postgres-bytea"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz";
+ sha1 = "027b533c0aa890e26d172d47cf9ccecc521acd35";
+ };
+ };
+ "postgres-date-1.0.7" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "postgres-date";
+ packageName = "postgres-date";
+ version = "1.0.7";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "postgres-date"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "postgres-date"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz";
+ sha512 = "suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==";
+ };
+ };
+ "postgres-interval-1.2.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "postgres-interval";
+ packageName = "postgres-interval";
+ version = "1.2.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "postgres-interval"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "postgres-interval"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz";
+ sha512 = "9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==";
+ };
+ };
+ "pump-3.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "pump";
+ packageName = "pump";
+ version = "3.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "pump"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "pump"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz";
+ sha512 = "LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==";
+ };
+ };
+ "punycode-1.3.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "punycode";
+ packageName = "punycode";
+ version = "1.3.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "punycode"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "punycode"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz";
+ sha1 = "9653a036fb7c1ee42342f2325cceefea3926c48d";
+ };
+ };
+ "querystring-0.2.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "querystring";
+ packageName = "querystring";
+ version = "0.2.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "querystring"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "querystring"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz";
+ sha1 = "b209849203bb25df820da756e747005878521620";
+ };
+ };
+ "quick-lru-5.1.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "quick-lru";
+ packageName = "quick-lru";
+ version = "5.1.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "quick-lru"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "quick-lru"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz";
+ sha512 = "WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==";
+ };
+ };
+ "rechoir-0.7.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "rechoir";
+ packageName = "rechoir";
+ version = "0.7.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "rechoir"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "rechoir"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/rechoir/-/rechoir-0.7.0.tgz";
+ sha512 = "ADsDEH2bvbjltXEP+hTIAmeFekTFK0V2BTxMkok6qILyAJEXV0AFfoWcAq4yfll5VdIMd/RVXq0lR+wQi5ZU3Q==";
+ };
+ };
+ "resolve-1.20.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "resolve";
+ packageName = "resolve";
+ version = "1.20.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "resolve"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "resolve"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz";
+ sha512 = "wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==";
+ };
+ };
+ "resolve-alpn-1.2.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "resolve-alpn";
+ packageName = "resolve-alpn";
+ version = "1.2.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "resolve-alpn"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "resolve-alpn"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/resolve-alpn/-/resolve-alpn-1.2.1.tgz";
+ sha512 = "0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==";
+ };
+ };
+ "resolve-from-5.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "resolve-from";
+ packageName = "resolve-from";
+ version = "5.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "resolve-from"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "resolve-from"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz";
+ sha512 = "qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==";
+ };
+ };
+ "responselike-2.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "responselike";
+ packageName = "responselike";
+ version = "2.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "responselike"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "responselike"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/responselike/-/responselike-2.0.0.tgz";
+ sha512 = "xH48u3FTB9VsZw7R+vvgaKeLKzT6jOogbQhEe/jewwnZgzPcnyWui2Av6JpoYZF/91uueC+lqhWqeURw5/qhCw==";
+ };
+ };
+ "retry-0.13.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "retry";
+ packageName = "retry";
+ version = "0.13.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "retry"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "retry"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz";
+ sha512 = "XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==";
+ };
+ };
+ "sax-1.2.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "sax";
+ packageName = "sax";
+ version = "1.2.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "sax"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "sax"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz";
+ sha1 = "7b8e656190b228e81a66aea748480d828cd2d37a";
+ };
+ };
+ "split2-4.1.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "split2";
+ packageName = "split2";
+ version = "4.1.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "split2"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "split2"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/split2/-/split2-4.1.0.tgz";
+ sha512 = "VBiJxFkxiXRlUIeyMQi8s4hgvKCSjtknJv/LVYbrgALPwf5zSKmEwV9Lst25AkvMDnvxODugjdl6KZgwKM1WYQ==";
+ };
+ };
+ "tarn-3.0.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "tarn";
+ packageName = "tarn";
+ version = "3.0.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "tarn"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "tarn"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/tarn/-/tarn-3.0.2.tgz";
+ sha512 = "51LAVKUSZSVfI05vjPESNc5vwqqZpbXCsU+/+wxlOrUjk2SnFTt97v9ZgQrD4YmxYW1Px6w2KjaDitCfkvgxMQ==";
+ };
+ };
+ "tildify-2.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "tildify";
+ packageName = "tildify";
+ version = "2.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "tildify"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "tildify"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/tildify/-/tildify-2.0.0.tgz";
+ sha512 = "Cc+OraorugtXNfs50hU9KS369rFXCfgGLpfCfvlc+Ud5u6VWmUQsOAa9HbTvheQdYnrdJqqv1e5oIqXppMYnSw==";
+ };
+ };
+ "url-0.10.3" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "url";
+ packageName = "url";
+ version = "0.10.3";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "url"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "url"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/url/-/url-0.10.3.tgz";
+ sha1 = "021e4d9c7705f21bbf37d03ceb58767402774c64";
+ };
+ };
+ "uuid-3.3.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "uuid";
+ packageName = "uuid";
+ version = "3.3.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "uuid"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "uuid"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz";
+ sha512 = "yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==";
+ };
+ };
+ "wrappy-1.0.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "wrappy";
+ packageName = "wrappy";
+ version = "1.0.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "wrappy"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "wrappy"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz";
+ sha1 = "b5243d8f3ec1aa35f1364605bc0d1036e30ab69f";
+ };
+ };
+ "xml2js-0.4.19" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "xml2js";
+ packageName = "xml2js";
+ version = "0.4.19";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "xml2js"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "xml2js"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz";
+ sha512 = "esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==";
+ };
+ };
+ "xmlbuilder-9.0.7" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "xmlbuilder";
+ packageName = "xmlbuilder";
+ version = "9.0.7";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "xmlbuilder"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "xmlbuilder"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz";
+ sha1 = "132ee63d2ec5565c557e20f4c22df9aca686b10d";
+ };
+ };
+ "xtend-4.0.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "xtend";
+ packageName = "xtend";
+ version = "4.0.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "xtend"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "xtend"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz";
+ sha512 = "LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==";
+ };
+ };
+ "yocto-queue-1.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "yocto-queue";
+ packageName = "yocto-queue";
+ version = "1.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "yocto-queue"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "yocto-queue"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.0.0.tgz";
+ sha512 = "9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g==";
+ };
+ };
+ "yoctodelay-1.2.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "yoctodelay";
+ packageName = "yoctodelay";
+ version = "1.2.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "yoctodelay"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "yoctodelay"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/yoctodelay/-/yoctodelay-1.2.0.tgz";
+ sha512 = "12y/P9MSig9/5BEhBgylss+fkHiCRZCvYR81eH35NW9uw801cvJt31EAV+WOLcwZRZbLiIQl/hxcdXXXFmGvXg==";
+ };
+ };
+ };
+ jsnixDeps = {
+ async-retry = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "async-retry"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "async-retry";
+ packageName = "async-retry";
+ version = "1.3.3";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz";
+ sha512 = "wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "async-retry"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "async-retry"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "async-retry"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "async-retry"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "async-retry"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "async-retry"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "async-retry"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "async-retry"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "async-retry"; });
+ meta = {
+ description = "Retrying made simple, easy and async";
+ license = "MIT";
+ homepage = "https://github.com/vercel/async-retry#readme";
+ };
+ };
+ aws-sdk = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "aws-sdk"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "aws-sdk";
+ packageName = "aws-sdk";
+ version = "2.1046.0";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1046.0.tgz";
+ sha512 = "ocwHclMXdIA+NWocUyvp9Ild3/zy2vr5mHp3mTyodf0WU5lzBE8PocCVLSWhMAXLxyia83xv2y5f5AzAcetbqA==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "aws-sdk"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "aws-sdk"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "aws-sdk"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "aws-sdk"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "aws-sdk"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "aws-sdk"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "aws-sdk"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "aws-sdk"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "aws-sdk"; });
+ meta = {
+ description = "AWS SDK for JavaScript";
+ license = "Apache-2.0";
+ homepage = "https://github.com/aws/aws-sdk-js";
+ };
+ };
+ dotenv = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "dotenv"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "dotenv";
+ packageName = "dotenv";
+ version = "10.0.0";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/dotenv/-/dotenv-10.0.0.tgz";
+ sha512 = "rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "dotenv"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "dotenv"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "dotenv"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "dotenv"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "dotenv"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "dotenv"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "dotenv"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "dotenv"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "dotenv"; });
+ meta = {
+ description = "Loads environment variables from .env file";
+ license = "BSD-2-Clause";
+ homepage = "https://github.com/motdotla/dotenv#readme";
+ };
+ };
+ exit-hook = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "exit-hook"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "exit-hook";
+ packageName = "exit-hook";
+ version = "3.0.0";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/exit-hook/-/exit-hook-3.0.0.tgz";
+ sha512 = "ElRvnoj3dvOc5WjnQx0CF66rS0xehV6eZdcmqZX17uOLPy3me43frl8UD73Frkx5Aq5kgziMDECjDJR2X1oBFQ==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "exit-hook"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "exit-hook"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "exit-hook"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "exit-hook"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "exit-hook"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "exit-hook"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "exit-hook"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "exit-hook"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "exit-hook"; });
+ meta = {
+ description = "Run some code when the process exits";
+ license = "MIT";
+ homepage = "https://github.com/sindresorhus/exit-hook#readme";
+ };
+ };
+ got = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "got"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "got";
+ packageName = "got";
+ version = "12.0.0";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/got/-/got-12.0.0.tgz";
+ sha512 = "gNNNghQ1yw0hyzie1FLK6gY90BQlXU9zSByyRygnbomHPruKQ6hAKKbpO1RfNZp8b+qNzNipGeRG3tUelKcVsA==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "got"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "got"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "got"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "got"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "got"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "got"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "got"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "got"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "got"; });
+ meta = {
+ description = "Human-friendly and powerful HTTP request library for Node.js";
+ license = "MIT";
+ homepage = "https://github.com/sindresorhus/got#readme";
+ };
+ };
+ knex = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "knex"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "knex";
+ packageName = "knex";
+ version = "0.95.14";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/knex/-/knex-0.95.14.tgz";
+ sha512 = "j4qLjWySrC/JRRVtOpoR2LcS1yBOsd7Krc6mEukPvmTDX/w11pD52Pq9FYR56/kLXGeAV8jFdWBjsZFi1mscWg==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "knex"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "knex"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "knex"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "knex"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "knex"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "knex"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "knex"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "knex"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "knex"; });
+ meta = {
+ description = "A batteries-included SQL query & schema builder for PostgresSQL, MySQL, CockroachDB, MSSQL and SQLite3";
+ license = "MIT";
+ homepage = "https://knexjs.org";
+ };
+ };
+ moment = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "moment"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "moment";
+ packageName = "moment";
+ version = "2.29.1";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/moment/-/moment-2.29.1.tgz";
+ sha512 = "kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "moment"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "moment"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "moment"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "moment"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "moment"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "moment"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "moment"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "moment"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "moment"; });
+ meta = {
+ description = "Parse, validate, manipulate, and display dates";
+ license = "MIT";
+ homepage = "https://momentjs.com";
+ };
+ };
+ p-limit = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "p-limit"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "p-limit";
+ packageName = "p-limit";
+ version = "4.0.0";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/p-limit/-/p-limit-4.0.0.tgz";
+ sha512 = "5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "p-limit"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "p-limit"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "p-limit"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "p-limit"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "p-limit"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "p-limit"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "p-limit"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "p-limit"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "p-limit"; });
+ meta = {
+ description = "Run multiple promise-returning & async functions with limited concurrency";
+ license = "MIT";
+ homepage = "https://github.com/sindresorhus/p-limit#readme";
+ };
+ };
+ p-min-delay = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "p-min-delay"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "p-min-delay";
+ packageName = "p-min-delay";
+ version = "4.0.1";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/p-min-delay/-/p-min-delay-4.0.1.tgz";
+ sha512 = "Tgkn+fy2VYNWw9bLy4BwiF+1ZMIgTDBIpaIChi1HC3N4nwRpandJnG1jAEXiYCcrTZKYQJdBWzLJauAeYDXsBg==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "p-min-delay"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "p-min-delay"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "p-min-delay"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "p-min-delay"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "p-min-delay"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "p-min-delay"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "p-min-delay"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "p-min-delay"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "p-min-delay"; });
+ meta = {
+ description = "Delay a promise a minimum amount of time";
+ license = "MIT";
+ homepage = "https://github.com/sindresorhus/p-min-delay#readme";
+ };
+ };
+ p-wait-for = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "p-wait-for"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "p-wait-for";
+ packageName = "p-wait-for";
+ version = "4.1.0";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/p-wait-for/-/p-wait-for-4.1.0.tgz";
+ sha512 = "i8nE5q++9h8oaQHWltS1Tnnv4IoMDOlqN7C0KFG2OdbK0iFJIt6CROZ8wfBM+K4Pxqfnq4C4lkkpXqTEpB5DZw==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "p-wait-for"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "p-wait-for"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "p-wait-for"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "p-wait-for"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "p-wait-for"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "p-wait-for"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "p-wait-for"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "p-wait-for"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "p-wait-for"; });
+ meta = {
+ description = "Wait for a condition to be true";
+ license = "MIT";
+ homepage = "https://github.com/sindresorhus/p-wait-for#readme";
+ };
+ };
+ p-whilst = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "p-whilst"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "p-whilst";
+ packageName = "p-whilst";
+ version = "3.0.0";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/p-whilst/-/p-whilst-3.0.0.tgz";
+ sha512 = "vaiNNmeIUGtMzf121RTb3CCC0Nl4WNeHjbmPjRcwPo6vQiHEJRpHbeOcyLBZspuyz2yG+G2xwzVIiULd1Mk6MA==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "p-whilst"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "p-whilst"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "p-whilst"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "p-whilst"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "p-whilst"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "p-whilst"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "p-whilst"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "p-whilst"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "p-whilst"; });
+ meta = {
+ description = "While a condition returns true, calls a function repeatedly, and then resolves the promise";
+ license = "MIT";
+ homepage = "https://github.com/sindresorhus/p-whilst#readme";
+ };
+ };
+ pg = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "pg"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "pg";
+ packageName = "pg";
+ version = "8.7.1";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/pg/-/pg-8.7.1.tgz";
+ sha512 = "7bdYcv7V6U3KAtWjpQJJBww0UEsWuh4yQ/EjNf2HeO/NnvKjpvhEIe/A/TleP6wtmSKnUnghs5A9jUoK6iDdkA==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "pg"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "pg"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "pg"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "pg"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "pg"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "pg"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "pg"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "pg"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "pg"; });
+ meta = {
+ description = "PostgreSQL client - pure javascript & libpq with the same API";
+ license = "MIT";
+ homepage = "https://github.com/brianc/node-postgres";
+ };
+ };
+ ramda = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "ramda"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "ramda";
+ packageName = "ramda";
+ version = "0.27.1";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/ramda/-/ramda-0.27.1.tgz";
+ sha512 = "PgIdVpn5y5Yns8vqb8FzBUEYn98V3xcPgawAkkgj0YJ0qDsnHCiNmZYfOGMgOvoB0eWFLpYbhxUR3mxfDIMvpw==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "ramda"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "ramda"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "ramda"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "ramda"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "ramda"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "ramda"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "ramda"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "ramda"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "ramda"; });
+ meta = {
+ description = "A practical functional library for JavaScript programmers.";
+ license = "MIT";
+ homepage = "https://ramdajs.com/";
+ };
+ };
+ };
+ dedupedDeps = {
+ retry = sources."retry-0.13.1" {
+ dependencies = [];
+ };
+ base64-js = sources."base64-js-1.5.1" {
+ dependencies = [];
+ };
+ buffer = sources."buffer-4.9.2" {
+ dependencies = [];
+ };
+ events = sources."events-1.1.1" {
+ dependencies = [];
+ };
+ ieee754 = sources."ieee754-1.1.13" {
+ dependencies = [];
+ };
+ isarray = sources."isarray-1.0.0" {
+ dependencies = [];
+ };
+ jmespath = sources."jmespath-0.15.0" {
+ dependencies = [];
+ };
+ punycode = sources."punycode-1.3.2" {
+ dependencies = [];
+ };
+ querystring = sources."querystring-0.2.0" {
+ dependencies = [];
+ };
+ sax = sources."sax-1.2.1" {
+ dependencies = [];
+ };
+ url = sources."url-0.10.3" {
+ dependencies = [];
+ };
+ uuid = sources."uuid-3.3.2" {
+ dependencies = [];
+ };
+ xml2js = sources."xml2js-0.4.19" {
+ dependencies = [];
+ };
+ xmlbuilder = sources."xmlbuilder-9.0.7" {
+ dependencies = [];
+ };
+ "@sindresorhus/is" = sources."@sindresorhus/is-4.2.0" {
+ dependencies = [];
+ };
+ "@szmarczak/http-timer" = sources."@szmarczak/http-timer-5.0.1" {
+ dependencies = [];
+ };
+ "@types/cacheable-request" = sources."@types/cacheable-request-6.0.2" {
+ dependencies = [];
+ };
+ "@types/http-cache-semantics" = sources."@types/http-cache-semantics-4.0.1" {
+ dependencies = [];
+ };
+ "@types/keyv" = sources."@types/keyv-3.1.3" {
+ dependencies = [];
+ };
+ "@types/node" = sources."@types/node-16.11.12" {
+ dependencies = [];
+ };
+ "@types/responselike" = sources."@types/responselike-1.0.0" {
+ dependencies = [];
+ };
+ cacheable-lookup = sources."cacheable-lookup-6.0.4" {
+ dependencies = [];
+ };
+ cacheable-request = sources."cacheable-request-7.0.2" {
+ dependencies = [
+ (sources."get-stream-5.2.0" {
+ dependencies = [];
+ })
+ (sources."lowercase-keys-2.0.0" {
+ dependencies = [];
+ })
+ ];
+ };
+ clone-response = sources."clone-response-1.0.2" {
+ dependencies = [];
+ };
+ decompress-response = sources."decompress-response-6.0.0" {
+ dependencies = [
+ (sources."mimic-response-3.1.0" {
+ dependencies = [];
+ })
+ ];
+ };
+ defer-to-connect = sources."defer-to-connect-2.0.1" {
+ dependencies = [];
+ };
+ end-of-stream = sources."end-of-stream-1.4.4" {
+ dependencies = [];
+ };
+ form-data-encoder = sources."form-data-encoder-1.7.1" {
+ dependencies = [];
+ };
+ get-stream = sources."get-stream-6.0.1" {
+ dependencies = [];
+ };
+ http-cache-semantics = sources."http-cache-semantics-4.1.0" {
+ dependencies = [];
+ };
+ http2-wrapper = sources."http2-wrapper-2.1.10" {
+ dependencies = [];
+ };
+ json-buffer = sources."json-buffer-3.0.1" {
+ dependencies = [];
+ };
+ keyv = sources."keyv-4.0.4" {
+ dependencies = [];
+ };
+ lowercase-keys = sources."lowercase-keys-3.0.0" {
+ dependencies = [];
+ };
+ mimic-response = sources."mimic-response-1.0.1" {
+ dependencies = [];
+ };
+ normalize-url = sources."normalize-url-6.1.0" {
+ dependencies = [];
+ };
+ once = sources."once-1.4.0" {
+ dependencies = [];
+ };
+ p-cancelable = sources."p-cancelable-3.0.0" {
+ dependencies = [];
+ };
+ pump = sources."pump-3.0.0" {
+ dependencies = [];
+ };
+ quick-lru = sources."quick-lru-5.1.1" {
+ dependencies = [];
+ };
+ resolve-alpn = sources."resolve-alpn-1.2.1" {
+ dependencies = [];
+ };
+ responselike = sources."responselike-2.0.0" {
+ dependencies = [
+ (sources."lowercase-keys-2.0.0" {
+ dependencies = [];
+ })
+ ];
+ };
+ wrappy = sources."wrappy-1.0.2" {
+ dependencies = [];
+ };
+ colorette = sources."colorette-2.0.16" {
+ dependencies = [];
+ };
+ commander = sources."commander-7.2.0" {
+ dependencies = [];
+ };
+ debug = sources."debug-4.3.2" {
+ dependencies = [];
+ };
+ escalade = sources."escalade-3.1.1" {
+ dependencies = [];
+ };
+ esm = sources."esm-3.2.25" {
+ dependencies = [];
+ };
+ function-bind = sources."function-bind-1.1.1" {
+ dependencies = [];
+ };
+ getopts = sources."getopts-2.2.5" {
+ dependencies = [];
+ };
+ has = sources."has-1.0.3" {
+ dependencies = [];
+ };
+ interpret = sources."interpret-2.2.0" {
+ dependencies = [];
+ };
+ is-core-module = sources."is-core-module-2.8.0" {
+ dependencies = [];
+ };
+ lodash = sources."lodash-4.17.21" {
+ dependencies = [];
+ };
+ ms = sources."ms-2.1.2" {
+ dependencies = [];
+ };
+ path-parse = sources."path-parse-1.0.7" {
+ dependencies = [];
+ };
+ pg-connection-string = sources."pg-connection-string-2.5.0" {
+ dependencies = [];
+ };
+ rechoir = sources."rechoir-0.7.0" {
+ dependencies = [];
+ };
+ resolve = sources."resolve-1.20.0" {
+ dependencies = [];
+ };
+ resolve-from = sources."resolve-from-5.0.0" {
+ dependencies = [];
+ };
+ tarn = sources."tarn-3.0.2" {
+ dependencies = [];
+ };
+ tildify = sources."tildify-2.0.0" {
+ dependencies = [];
+ };
+ yocto-queue = sources."yocto-queue-1.0.0" {
+ dependencies = [];
+ };
+ yoctodelay = sources."yoctodelay-1.2.0" {
+ dependencies = [];
+ };
+ p-timeout = sources."p-timeout-5.0.2" {
+ dependencies = [];
+ };
+ buffer-writer = sources."buffer-writer-2.0.0" {
+ dependencies = [];
+ };
+ packet-reader = sources."packet-reader-1.0.0" {
+ dependencies = [];
+ };
+ pg-int8 = sources."pg-int8-1.0.1" {
+ dependencies = [];
+ };
+ pg-pool = sources."pg-pool-3.4.1" {
+ dependencies = [];
+ };
+ pg-protocol = sources."pg-protocol-1.5.0" {
+ dependencies = [];
+ };
+ pg-types = sources."pg-types-2.2.0" {
+ dependencies = [];
+ };
+ pgpass = sources."pgpass-1.0.5" {
+ dependencies = [];
+ };
+ postgres-array = sources."postgres-array-2.0.0" {
+ dependencies = [];
+ };
+ postgres-bytea = sources."postgres-bytea-1.0.0" {
+ dependencies = [];
+ };
+ postgres-date = sources."postgres-date-1.0.7" {
+ dependencies = [];
+ };
+ postgres-interval = sources."postgres-interval-1.2.0" {
+ dependencies = [];
+ };
+ split2 = sources."split2-4.1.0" {
+ dependencies = [];
+ };
+ xtend = sources."xtend-4.0.2" {
+ dependencies = [];
+ };
+ };
+ isolateDeps = {};
+in
+jsnixDeps // (if builtins.hasAttr "packageDerivation" packageNix then {
+ "${packageNix.name}" = jsnixDrvOverrides {
+ inherit dedupedDeps jsnixDeps isolateDeps;
+ drv_ = packageNix.packageDerivation;
+ };
+} else {})
\ No newline at end of file
diff --git a/ec2/import-blocks/package.nix b/ec2/import-blocks/package.nix
new file mode 100644
index 0000000..82737aa
--- /dev/null
+++ b/ec2/import-blocks/package.nix
@@ -0,0 +1,43 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+{
+ name = "@ar.io/import-blocks";
+ version = "1.0.0";
+ main = "src/index.mjs";
+
+ dependencies = {
+ async-retry = "^1.3.3";
+ aws-sdk = "^2.1046.0";
+ dotenv = "^10.0.0";
+ exit-hook = "^3.0.0";
+ got = "^12.0.0";
+ knex = "^0.95.14";
+ moment = "latest";
+ pg = "^8.7.1";
+ p-limit = "^4.0.0";
+ p-wait-for = "^4.1.0";
+ p-whilst = "^3.0.0";
+ p-min-delay = "^4.0.1";
+ ramda = "^0.27.1";
+ };
+ packageDerivation = { jsnixDeps, ... }@pkgs: {
+ buildInputs = [ ];
+ # buildPhase = "tsc --build tsconfig.json";
+ };
+}
diff --git a/ec2/import-blocks/src/block-db.mjs b/ec2/import-blocks/src/block-db.mjs
new file mode 100644
index 0000000..ebbfa59
--- /dev/null
+++ b/ec2/import-blocks/src/block-db.mjs
@@ -0,0 +1,192 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import R from "ramda";
+import pLimit from "p-limit";
+import moment from "moment";
+import { enqueue } from "./sqs.mjs";
+
+// The pg driver and knex don't know the destination column types,
+// and they don't correctly serialize json fields, so this needs
+// to be done manually.
+const serialize = (row) => {
+ return R.reduce((result, key) => {
+ const value = row[key];
+ result[key] =
+ value && typeof value == "object" ? JSON.stringify(value) : value;
+ return result;
+ }, {})(Object.keys(row));
+};
+
+const upsert = async (
+ connection,
+ { table, conflictKeys, rows, transaction }
+) => {
+ const updateFields = Object.keys(rows[0])
+ .filter((field) => !conflictKeys.includes(field))
+ .map((field) => `${field} = excluded.${field}`)
+ .join(",");
+
+ const query = connection.insert(rows).into(table);
+
+ if (transaction) {
+ query.transacting(transaction);
+ }
+
+ const { sql, bindings } = query.toSQL();
+
+ const upsertSql = sql.concat(
+ ` ON CONFLICT (${conflictKeys
+ .map((key) => `"${key}"`)
+ .join(",")}) DO UPDATE SET ${updateFields};`
+ );
+
+ return await connection.raw(upsertSql, bindings);
+};
+
+const txImportQueueUrl = process.env.ARWEAVE_SQS_IMPORT_TXS_URL;
+
+const enqueueTxImports = async (queueUrl, txIds) => {
+ const parallelize = pLimit(10);
+ console.log(`[import-blocks] queuing block txs`);
+ await Promise.all(
+ txIds.map((txid) => {
+ return parallelize(() => {
+ return enqueue(queueUrl, { id: txid, message: { id: txid } });
+ });
+ })
+ );
+};
+
+export const saveBlocks = async (connection, blocks) => {
+ const blockTxMappings = blocks.reduce((map, block) => {
+ return map.concat({
+ block,
+ txs: block.txs.map((tx_id) => {
+ return { height: block.height, id: tx_id };
+ }),
+ });
+ }, []);
+
+ console.log(`[block-db] inserting block headers into blocks table`);
+
+ for (const map of R.reverse(blockTxMappings)) {
+ const { block, txs } = map;
+ await connection.transaction(async (knexTransaction) => {
+ console.log(`[block-db] saving block`, {
+ height: block.height,
+ id: block.id,
+ });
+
+ await upsert(knexTransaction, {
+ table: "blocks",
+ conflictKeys: ["height"],
+ rows: [serialize(block)],
+ transaction: knexTransaction,
+ });
+
+ const parallelize = pLimit(10);
+ console.log(`[block-db] setting tx block heights`);
+ await Promise.all(
+ txs.map((item) => {
+ return parallelize(() => {
+ return upsert(knexTransaction, {
+ table: "transactions",
+ conflictKeys: ["id"],
+ rows: [item],
+ transaction: knexTransaction,
+ });
+ });
+ })
+ );
+ });
+
+ console.log(`[block-db] enqueue-ing tx-imports`);
+
+ // requeue *all* transactions involved in blocks that have forked.
+ // Some of them may have been imported already and purged, so we
+ // reimport everything to make sure there are no gaps.
+ await enqueueTxImports(txImportQueueUrl, block.txs);
+ }
+};
+
+const blockFields = [
+ "id",
+ "height",
+ "mined_at",
+ "previous_block",
+ "txs",
+ "extended",
+];
+
+const extendedFields = [
+ "diff",
+ "hash",
+ "reward_addr",
+ "last_retarget",
+ "tx_root",
+ "tx_tree",
+ "reward_pool",
+ "weave_size",
+ "block_size",
+ "cumulative_diff",
+ "hash_list_merkle",
+ "tags",
+];
+
+export const getHighestBlock = async (connection) => {
+ const block = await connection
+ .select(blockFields)
+ .from("blocks")
+ .orderBy("height", "desc")
+ .first();
+
+ if (block) {
+ return block;
+ } else {
+ console.error(
+ "Failed to get latest block from the block database, assuming 0"
+ );
+ return {
+ id: "7wIU7KolICAjClMlcZ38LZzshhI7xGkm2tDCJR7Wvhe3ESUo2-Z4-y0x1uaglRJE",
+ height: 0,
+ };
+ }
+};
+
+export const getRecentBlocks = async (connection) => {
+ return connection
+ .select(blockFields)
+ .from("blocks")
+ .orderBy("height", "desc")
+ .limit(400);
+};
+
+/**
+ * Format a full block into a stripped down version for storage in the postgres DB.
+ */
+export const fullBlockToDbBlock = (block) => {
+ return {
+ id: block.indep_hash,
+ height: block.height,
+ previous_block: block.previous_block,
+ txs: block.txs,
+ mined_at: moment(block.timestamp * 1000).format(),
+ extended: R.pick(extendedFields)(block),
+ };
+};
diff --git a/ec2/import-blocks/src/env.mjs b/ec2/import-blocks/src/env.mjs
new file mode 100644
index 0000000..cbed791
--- /dev/null
+++ b/ec2/import-blocks/src/env.mjs
@@ -0,0 +1,28 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import dotenv from "dotenv";
+
+if (process.env.ARWEAVE_DOTENV_PATH) {
+ dotenv.config({
+ silent: true,
+ path: process.env.ARWEAVE_DOTENV_PATH,
+ });
+} else {
+ dotenv.config({ silent: true });
+}
diff --git a/ec2/import-blocks/src/index.mjs b/ec2/import-blocks/src/index.mjs
new file mode 100644
index 0000000..a0911d3
--- /dev/null
+++ b/ec2/import-blocks/src/index.mjs
@@ -0,0 +1,244 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import "./env.mjs";
+import R from "ramda";
+import retry from "async-retry";
+import pWaitFor from "p-wait-for";
+import pMinDelay from "p-min-delay";
+import pWhilst from "p-whilst";
+import exitHook from "exit-hook";
+import got from "got";
+import { createDbClient } from "./postgres.mjs";
+import { shuffle } from "./utils.mjs";
+import {
+ fullBlockToDbBlock,
+ getHighestBlock,
+ getRecentBlocks,
+ saveBlocks,
+} from "./block-db.mjs";
+
+let exitSignaled = false;
+
+exitHook(() => {
+ exitSignaled = true;
+});
+
+const nodes = new Set();
+
+async function refreshNodes() {
+ let jsonResponse;
+ try {
+ await retry(
+ async () => {
+ jsonResponse = await got("https://arweave.net/health").json();
+ },
+ {
+ retries: 5,
+ }
+ );
+ } catch (error) {
+ console.error(error);
+ }
+
+ if (typeof jsonResponse === "object" && Array.isArray(jsonResponse.origins)) {
+ for (const origin of jsonResponse.origins) {
+ if (origin.status === 200) {
+ nodes.add(origin.endpoint);
+ } else {
+ nodes.remove(origin.endpoint);
+ }
+ }
+ }
+}
+
+let latestBlock;
+let lastBlock;
+
+export const getNewestBlock = async () => {
+ for (const node of nodes.values()) {
+ let response;
+ try {
+ response = await got(node + "/block/current").json();
+ // const response = await got(node + "/block/height/705101").json();
+ } catch {}
+ if (typeof response === "object" && typeof response.height === "number") {
+ return response;
+ }
+ }
+};
+
+export const getSpecificBlock = async (hash) => {
+ let block;
+ for (const node of nodes.values()) {
+ try {
+ const response = await got(node + "/block/hash/" + hash).json();
+
+ if (typeof response === "object" && response.indep_hash === hash) {
+ block = response;
+ }
+ } catch (error) {
+ console.error(error);
+ }
+ if (block) {
+ return block;
+ }
+ }
+ return block;
+};
+
+export const getSpecificBlockHeight = async (height) => {
+ let block;
+ for (const node of nodes.values()) {
+ try {
+ const response = await got(node + "/block/height/" + height).json();
+
+ if (typeof response === "object") {
+ block = response;
+ }
+ } catch (error) {
+ console.error(error);
+ }
+ if (block) {
+ return block;
+ }
+ }
+ return block;
+};
+
+(async () => {
+ console.log("starting import-blocks...");
+ console.log(process.env);
+ await refreshNodes();
+
+ const dbRead = await createDbClient({
+ user: "read",
+ });
+ const dbWrite = await createDbClient({
+ user: "write",
+ });
+
+ lastBlock = await getHighestBlock(dbRead);
+ latestBlock = await getNewestBlock();
+
+ pWhilst(
+ () => !exitSignaled,
+ async () => {
+ console.log("Polling for new block...");
+ try {
+ // await pMinDelay(getNewestBlock(), 5 * 1000);
+ await new Promise((resolve) => setTimeout(resolve, 5 * 1000));
+ latestBlock = await getNewestBlock();
+ // mega-gap scenario
+ // dont import more than 100 block in single go
+ // due do crazy memory needed to do so.
+ if (
+ lastBlock &&
+ latestBlock &&
+ latestBlock.height - lastBlock.height > 100
+ ) {
+ console.log("far behind: resolving next 100 blocks");
+ latestBlock = await getSpecificBlockHeight(lastBlock.height + 100);
+ }
+
+ console.log("Comparing", lastBlock.height, latestBlock.height);
+ if (lastBlock && latestBlock && latestBlock.height > lastBlock.height) {
+ console.log("New block detected: ", latestBlock.height);
+ if (
+ typeof latestBlock === "object" &&
+ (latestBlock.height === 0 ||
+ latestBlock.previous_block === lastBlock.id)
+ ) {
+ console.log("current block matches new block's previous block");
+ await saveBlocks(dbWrite, [fullBlockToDbBlock(latestBlock)]);
+ console.log("[import-blocks] saveBlock completed!");
+ lastBlock = latestBlock;
+ } else {
+ console.log("gap detected...");
+ const gapDiff_ = await resolveGap(
+ (await getRecentBlocks(dbRead)).map((block) => block.id),
+ [latestBlock],
+ {
+ maxDepth: 3000,
+ }
+ );
+ const gapDiff = gapDiff_.map(fullBlockToDbBlock);
+
+ console.log(`[import-blocks] resolved fork/gap`, {
+ length: gapDiff.length,
+ });
+
+ await saveBlocks(dbWrite, gapDiff);
+ console.log("[import-blocks] saveBlocks completed!");
+ lastBlock = latestBlock;
+ }
+ }
+ } catch (error) {
+ console.error(error);
+ }
+ }
+ );
+})();
+
+/**
+ * Try and find the branch point between the chain in our database and the chain
+ * belonging to the new block we've just received. If we find a branch point,
+ * We'll return the diff as a sorted array containing all the missing blocks.
+ */
+export const resolveGap = async (
+ mainChainIds,
+ fork,
+ { currentDepth = 0, maxDepth = 10 }
+) => {
+ // Grab the last known block from the forked chain (blocks are appended, newest -> oldest).
+ const block = fork[fork.length - 1];
+
+ // genesis fix
+ if (!block || block.height === 0) return fork;
+
+ console.log(`[import-blocks] resolving fork/gap`, {
+ id: block.indep_hash,
+ height: block.height,
+ });
+
+ // If this block has a previous_block value that intersects with the the main chain ids,
+ // then it means we've resolved the fork. The fork array now contains the block
+ // diff between the two chains, sorted by height descending.
+ if (mainChainIds.includes(block.previous_block)) {
+ console.log(`[import-blocks] resolved fork`, {
+ id: block.indep_hash,
+ height: block.height,
+ });
+
+ return fork;
+ }
+
+ if (currentDepth >= maxDepth) {
+ throw new Error(`Couldn't resolve fork within maxDepth of ${maxDepth}`);
+ }
+
+ const previousBlock = await getSpecificBlock(block.previous_block);
+
+ // If we didn't intersect the mainChainIds array then we're still working backwards
+ // through the forked chain and haven't found the branch point yet.
+ // We'll add this previous block block to the end of the fork and try again.
+ return resolveGap(mainChainIds, [...fork, previousBlock], {
+ currentDepth: currentDepth + 1,
+ maxDepth,
+ });
+};
diff --git a/ec2/import-blocks/src/postgres.mjs b/ec2/import-blocks/src/postgres.mjs
new file mode 100644
index 0000000..e0ee341
--- /dev/null
+++ b/ec2/import-blocks/src/postgres.mjs
@@ -0,0 +1,109 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import "./env.mjs";
+import AWS from "aws-sdk";
+import knex from "knex";
+
+const rds = new AWS.RDS();
+
+const awsSM = new AWS.SecretsManager({
+ region: process.env.AWS_REGION,
+});
+
+function getSecretValue(secretName) {
+ return new Promise((resolve, reject) => {
+ awsSM.getSecretValue({ SecretId: secretName }, function (err, data) {
+ if (err) {
+ if (err.code === "DecryptionFailureException")
+ // Secrets Manager can't decrypt the protected secret text using the provided KMS key.
+ // Deal with the exception here, and/or rethrow at your discretion.
+ reject(err);
+ else if (err.code === "InternalServiceErrorException")
+ // An error occurred on the server side.
+ // Deal with the exception here, and/or rethrow at your discretion.
+ reject(err);
+ else if (err.code === "InvalidParameterException")
+ // You provided an invalid value for a parameter.
+ // Deal with the exception here, and/or rethrow at your discretion.
+ reject(err);
+ else if (err.code === "InvalidRequestException")
+ // You provided a parameter value that is not valid for the current state of the resource.
+ // Deal with the exception here, and/or rethrow at your discretion.
+ reject(err);
+ else if (err.code === "ResourceNotFoundException")
+ // We can't find the resource that you asked for.
+ // Deal with the exception here, and/or rethrow at your discretion.
+ reject(err);
+ else reject(err);
+ } else {
+ resolve(data.SecretString);
+ }
+ });
+ });
+}
+
+export async function createDbClient({ user }) {
+ const rdsReadRoleSecret = {
+ username: "",
+ password: "",
+ url: process.env.ARWEAVE_DB_READ_HOST,
+ };
+ const rdsWriteRoleSecret = {
+ username: "",
+ password: "",
+ url: process.env.ARWEAVE_DB_WRITE_HOST,
+ };
+
+ try {
+ const rdsProxySecretRead = JSON.parse(await getSecretValue("read"));
+ rdsReadRoleSecret.username = rdsProxySecretRead.username;
+ rdsReadRoleSecret.password = rdsProxySecretRead.password;
+ } catch (error) {
+ console.error(error);
+ }
+
+ try {
+ const rdsProxySecretWrite = JSON.parse(await getSecretValue("write"));
+ rdsWriteRoleSecret.username = rdsProxySecretWrite.username;
+ rdsWriteRoleSecret.password = rdsProxySecretWrite.password;
+ } catch (error) {
+ console.error(error);
+ }
+
+ const roleSecret = user === "read" ? rdsReadRoleSecret : rdsWriteRoleSecret;
+
+ return await knex({
+ client: "pg",
+ pool: {
+ min: 1,
+ max: 2,
+ acquireTimeoutMillis: 20000,
+ idleTimeoutMillis: 30000,
+ reapIntervalMillis: 40000,
+ },
+ connection: {
+ host: roleSecret.url,
+ user: roleSecret.username,
+ database: "arweave",
+ ssl: false,
+ password: roleSecret.password,
+ expirationChecker: () => true,
+ },
+ });
+}
diff --git a/ec2/import-blocks/src/sqs.mjs b/ec2/import-blocks/src/sqs.mjs
new file mode 100644
index 0000000..ac6a5ef
--- /dev/null
+++ b/ec2/import-blocks/src/sqs.mjs
@@ -0,0 +1,64 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import AWS from "aws-sdk";
+
+const sqs = new AWS.SQS({
+ maxRetries: 3,
+ httpOptions: { timeout: 5000, connectTimeout: 5000 },
+});
+
+function* chunk(arr, n) {
+ for (let i = 0; i < arr.length; i += n) {
+ yield arr.slice(i, i + n);
+ }
+}
+
+export const enqueue = async (queueUrl, message, options) => {
+ if (!queueUrl) {
+ throw new Error(`Queue URL undefined`);
+ }
+
+ await sqs
+ .sendMessage({
+ QueueUrl: queueUrl,
+ MessageBody: JSON.stringify(message),
+ MessageGroupId: options && options.messagegroup,
+ MessageDeduplicationId: options && options.deduplicationId,
+ DelaySeconds: options && options.delaySeconds,
+ })
+ .promise();
+};
+
+export const enqueueBatch = async (queueUrl, messages) => {
+ for (const messageChnk of chunk(messages, 10)) {
+ await sqs
+ .sendMessageBatch({
+ QueueUrl: queueUrl,
+ Entries: messageChnk.map((message) => {
+ return {
+ Id: message.id,
+ MessageBody: JSON.stringify(message),
+ MessageGroupId: message.messagegroup,
+ MessageDeduplicationId: message.deduplicationId,
+ };
+ }),
+ })
+ .promise();
+ }
+};
diff --git a/ec2/import-blocks/src/utils.mjs b/ec2/import-blocks/src/utils.mjs
new file mode 100644
index 0000000..eb7ec8f
--- /dev/null
+++ b/ec2/import-blocks/src/utils.mjs
@@ -0,0 +1,34 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import R from "ramda";
+
+const shuffler = R.curry(function (random, list) {
+ var idx = -1;
+ var len = list.length;
+ var position;
+ var result = [];
+ while (++idx < len) {
+ position = Math.floor((idx + 1) * random());
+ result[idx] = result[position];
+ result[position] = list[idx];
+ }
+ return result;
+});
+
+export const shuffle = shuffler(Math.random);
diff --git a/ecs/README.md b/ecs/README.md
new file mode 100644
index 0000000..39e3e1b
--- /dev/null
+++ b/ecs/README.md
@@ -0,0 +1,8 @@
+## osx develop
+
+When building docker image locally on osx (rarely necessary), you'll need a remote nix builder.
+Build the docker image from osx with this command
+
+```console
+sudo nix -L build .#packages.x86_64-linux.import-bundles -j0
+```
diff --git a/ecs/flake.lock b/ecs/flake.lock
new file mode 100644
index 0000000..49c2500
--- /dev/null
+++ b/ecs/flake.lock
@@ -0,0 +1,26 @@
+{
+ "nodes": {
+ "nixpkgs": {
+ "locked": {
+ "lastModified": 1643524588,
+ "narHash": "sha256-Qh5AazxdOQRORbGkkvpKoovDl6ej/4PhDabFsqnueqw=",
+ "owner": "NixOS",
+ "repo": "nixpkgs",
+ "rev": "efeefb2af1469a5d1f0ae7ca8f0dfd9bb87d5cfb",
+ "type": "github"
+ },
+ "original": {
+ "id": "nixpkgs",
+ "ref": "nixos-unstable",
+ "type": "indirect"
+ }
+ },
+ "root": {
+ "inputs": {
+ "nixpkgs": "nixpkgs"
+ }
+ }
+ },
+ "root": "root",
+ "version": 7
+}
diff --git a/ecs/flake.nix b/ecs/flake.nix
new file mode 100644
index 0000000..59cb39f
--- /dev/null
+++ b/ecs/flake.nix
@@ -0,0 +1,39 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+{
+ inputs = {
+ nixpkgs.url = "nixpkgs/nixos-unstable";
+ };
+
+ outputs = { self, nixpkgs, ... }:
+
+ let
+ system = "x86_64-linux";
+ pkgs = (import nixpkgs {
+ inherit overlays system;
+ config = { allowUnfree = true; };
+ });
+ overlays = [ (import ./import-bundles/overlay.nix) ];
+
+ in {
+ packages.x86_64-linux = {
+ import-bundles = pkgs.importBundlesDocker;
+ };
+ };
+}
diff --git a/ecs/import-bundles/overlay.nix b/ecs/import-bundles/overlay.nix
new file mode 100644
index 0000000..fa5e897
--- /dev/null
+++ b/ecs/import-bundles/overlay.nix
@@ -0,0 +1,20 @@
+final: prev:
+
+let packageLock = (import ./package-lock.nix prev);
+in {
+ inherit (packageLock) "@arweave/import-bundles";
+
+ importBundlesDocker = prev.dockerTools.buildLayeredImage {
+ name = "import-bundles";
+ tag = "latest";
+ created = "now";
+ extraCommands = "mkdir -m 0777 tmp";
+ config = {
+ User = "1000:1000";
+ Cmd = [ "${final."@arweave/import-bundles"}/bin/import-bundles-start" ];
+ ExposedPorts = {
+ "3000" = {};
+ };
+ };
+ };
+}
diff --git a/ecs/import-bundles/package-lock.nix b/ecs/import-bundles/package-lock.nix
new file mode 100644
index 0000000..25c3939
--- /dev/null
+++ b/ecs/import-bundles/package-lock.nix
@@ -0,0 +1,3963 @@
+{pkgs, stdenv, lib, nodejs, fetchurl, fetchgit, fetchFromGitHub, jq, makeWrapper, python3, runCommand, runCommandCC, xcodebuild, ... }:
+
+let
+ packageNix = import ./package.nix;
+ copyNodeModules = {dependencies ? [] }:
+ (lib.lists.foldr (dep: acc:
+ let pkgName = if (builtins.hasAttr "packageName" dep)
+ then dep.packageName else dep.name;
+ in
+ acc + ''
+ if [[ ! -f "node_modules/${pkgName}" && \
+ ! -d "node_modules/${pkgName}" && \
+ ! -L "node_modules/${pkgName}" && \
+ ! -e "node_modules/${pkgName}" ]]
+ then
+ mkdir -p "node_modules/${pkgName}"
+ cp -rLT "${dep}/lib/node_modules/${pkgName}" "node_modules/${pkgName}"
+ chmod -R +rw "node_modules/${pkgName}"
+ fi
+ '')
+ "" dependencies);
+ linkNodeModules = {dependencies ? [], extraDependencies ? []}:
+ (lib.lists.foldr (dep: acc:
+ let pkgName = if (builtins.hasAttr "packageName" dep)
+ then dep.packageName else dep.name;
+ in (acc + (lib.optionalString
+ ((lib.findSingle (px: px.packageName == dep.packageName) "none" "found" extraDependencies) == "none")
+ ''
+ if [[ ! -f "node_modules/${pkgName}" && \
+ ! -d "node_modules/${pkgName}" && \
+ ! -L "node_modules/${pkgName}" && \
+ ! -e "node_modules/${pkgName}" ]]
+ then
+ mkdir -p "node_modules/${pkgName}"
+ ln -s "${dep}/lib/node_modules/${pkgName}"/* "node_modules/${pkgName}"
+ ${lib.optionalString (builtins.hasAttr "dependencies" dep)
+ ''
+ rm -rf "node_modules/${pkgName}/node_modules"
+ (cd node_modules/${dep.packageName}; ${linkNodeModules { inherit (dep) dependencies; inherit extraDependencies;}})
+ ''}
+ fi
+ '')))
+ "" dependencies);
+ gitignoreSource =
+ (import (fetchFromGitHub {
+ owner = "hercules-ci";
+ repo = "gitignore.nix";
+ rev = "5b9e0ff9d3b551234b4f3eb3983744fa354b17f1";
+ sha256 = "o/BdVjNwcB6jOmzZjOH703BesSkkS5O7ej3xhyO8hAY=";
+ }) { inherit lib; }).gitignoreSource;
+ transitiveDepInstallPhase = {dependencies ? [], pkgName}: ''
+ export packageDir="$(pwd)"
+ mkdir -p $out/lib/node_modules/${pkgName}
+ cd $out/lib/node_modules/${pkgName}
+ cp -rfT "$packageDir" "$(pwd)"
+ ${copyNodeModules { inherit dependencies; }} '';
+ transitiveDepUnpackPhase = {dependencies ? [], pkgName}: ''
+ unpackFile "$src";
+ # not ideal, but some perms are fubar
+ chmod -R +777 . || true
+ packageDir="$(find . -maxdepth 1 -type d | tail -1)"
+ cd "$packageDir"
+ '';
+ getNodeDep = packageName: dependencies:
+ (let depList = if ((builtins.typeOf dependencies) == "set")
+ then (builtins.attrValues dependencies)
+ else dependencies;
+ in (builtins.head
+ (builtins.filter (p: p.packageName == packageName) depList)));
+ nodeSources = runCommand "node-sources" {} ''
+ tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
+ mv node-* $out
+ '';
+ linkBins = ''
+ ${goBinLink}/bin/bin-link
+'';
+ flattenScript = args: '' ${goFlatten}/bin/flatten ${args}'';
+ sanitizeName = nm: lib.strings.sanitizeDerivationName
+ (builtins.replaceStrings [ "@" "/" ] [ "_at_" "_" ] nm);
+ jsnixDrvOverrides = { drv_, jsnixDeps, dedupedDeps, isolateDeps }:
+ let drv = drv_ (pkgs // { inherit nodejs copyNodeModules gitignoreSource jsnixDeps nodeModules getNodeDep; });
+ skipUnpackFor = if (builtins.hasAttr "skipUnpackFor" drv)
+ then drv.skipUnpackFor else [];
+ copyUnpackFor = if (builtins.hasAttr "copyUnpackFor" drv)
+ then drv.copyUnpackFor else [];
+ pkgJsonFile = runCommand "package.json" { buildInputs = [jq]; } ''
+ echo ${toPackageJson { inherit jsnixDeps; extraDeps = (if (builtins.hasAttr "extraDependencies" drv) then drv.extraDependencies else []); }} > $out
+ cat <<< $(cat $out | jq) > $out
+ '';
+ copyDeps = builtins.attrValues jsnixDeps;
+ copyDepsStr = builtins.concatStringsSep " " (builtins.map (dep: if (builtins.hasAttr "packageName" dep) then dep.packageName else dep.name) copyDeps);
+ extraDeps = (builtins.map (dep: if (builtins.hasAttr "packageName" dep) then dep.packageName else dep.name)
+ (lib.optionals (builtins.hasAttr "extraDependencies" drv) drv.extraDependencies));
+ extraDepsStr = builtins.concatStringsSep " " extraDeps;
+ buildDepDep = lib.lists.unique (lib.lists.concatMap (d: d.buildInputs)
+ (copyDeps ++ (lib.optionals (builtins.hasAttr "extraDependencies" drv) drv.extraDependencies)));
+ nodeModules = runCommandCC "${sanitizeName packageNix.name}_node_modules"
+ { buildInputs = [nodejs] ++ buildDepDep;
+ fixupPhase = "true";
+ doCheck = false;
+ doInstallCheck = false;
+ version = builtins.hashString "sha512" (lib.strings.concatStrings copyDeps); }
+ ''
+ echo 'unpack dependencies...'
+ mkdir -p $out/lib/node_modules
+ cd $out/lib
+ ${linkNodeModules { dependencies = builtins.attrValues isolateDeps; }}
+ ${copyNodeModules {
+ dependencies = copyDeps;
+ }}
+ ${copyNodeModules {
+ dependencies = builtins.attrValues dedupedDeps;
+ }}
+ chmod -R +rw node_modules
+ ${copyNodeModules {
+ dependencies = (lib.optionals (builtins.hasAttr "extraDependencies" drv) drv.extraDependencies);
+ }}
+ ${lib.optionalString ((builtins.length extraDeps) > 0) "echo 'resolving incoming transient deps of ${extraDepsStr}...'"}
+ ${lib.optionalString ((builtins.length extraDeps) > 0) (flattenScript extraDepsStr)}
+ ${lib.optionalString (builtins.hasAttr "nodeModulesUnpack" drv) drv.nodeModulesUnpack}
+ echo 'link nodejs bins to out-dir...'
+ ${linkBins}
+ '';
+ in stdenv.mkDerivation (drv // {
+ passthru = { inherit nodeModules pkgJsonFile; };
+ version = packageNix.version;
+ name = sanitizeName packageNix.name;
+ preUnpackBan_ = mkPhaseBan "preUnpack" drv;
+ unpackBan_ = mkPhaseBan "unpackPhase" drv;
+ postUnpackBan_ = mkPhaseBan "postUnpack" drv;
+ preConfigureBan_ = mkPhaseBan "preConfigure" drv;
+ configureBan_ = mkPhaseBan "configurePhase" drv;
+ postConfigureBan_ = mkPhaseBan "postConfigure" drv;
+ src = if (builtins.hasAttr "src" packageNix) then packageNix.src else gitignoreSource ./.;
+ packageName = packageNix.name;
+ doStrip = false;
+ doFixup = false;
+ doUnpack = true;
+ NODE_PATH = "./node_modules";
+ buildInputs = [ nodejs jq ] ++ lib.optionals (builtins.hasAttr "buildInputs" drv) drv.buildInputs;
+
+ configurePhase = ''
+ ln -s ${nodeModules}/lib/node_modules node_modules
+ cat ${pkgJsonFile} > package.json
+ '';
+ buildPhase = ''
+ runHook preBuild
+ ${lib.optionalString (builtins.hasAttr "buildPhase" drv) drv.buildPhase}
+ runHook postBuild
+ '';
+ installPhase = ''
+ runHook preInstall
+ mkdir -p $out/lib/node_modules/${packageNix.name}
+ cp -rfT ./ $out/lib/node_modules/${packageNix.name}
+ runHook postInstall
+ '';
+ });
+ toPackageJson = { jsnixDeps ? {}, extraDeps ? [] }:
+ let
+ main = if (builtins.hasAttr "main" packageNix) then packageNix else throw "package.nix is missing main attribute";
+ pkgName = if (builtins.hasAttr "packageName" packageNix)
+ then packageNix.packageName else packageNix.name;
+ packageNixDeps = if (builtins.hasAttr "dependencies" packageNix)
+ then packageNix.dependencies
+ else {};
+ extraDeps_ = lib.lists.foldr (dep: acc: { "${dep.packageName}" = dep; } // acc) {} extraDeps;
+ allDeps = extraDeps_ // packageNixDeps;
+ prodDeps = lib.lists.foldr
+ (depName: acc: acc // {
+ "${depName}" = (if ((builtins.typeOf allDeps."${depName}") == "string")
+ then allDeps."${depName}"
+ else
+ if (((builtins.typeOf allDeps."${depName}") == "set") &&
+ ((builtins.typeOf allDeps."${depName}".version) == "string"))
+ then allDeps."${depName}".version
+ else "latest");}) {} (builtins.attrNames allDeps);
+ safePkgNix = lib.lists.foldr (key: acc:
+ if ((builtins.typeOf packageNix."${key}") != "lambda")
+ then (acc // { "${key}" = packageNix."${key}"; })
+ else acc)
+ {} (builtins.attrNames packageNix);
+ in lib.strings.escapeNixString
+ (builtins.toJSON (safePkgNix // { dependencies = prodDeps; name = pkgName; }));
+ mkPhaseBan = phaseName: usrDrv:
+ if (builtins.hasAttr phaseName usrDrv) then
+ throw "jsnix error: using ${phaseName} isn't supported at this time"
+ else "";
+ mkPhase = pkgs_: {phase, pkgName}:
+ lib.optionalString ((builtins.hasAttr "${pkgName}" packageNix.dependencies) &&
+ (builtins.typeOf packageNix.dependencies."${pkgName}" == "set") &&
+ (builtins.hasAttr "${phase}" packageNix.dependencies."${pkgName}"))
+ (if builtins.typeOf packageNix.dependencies."${pkgName}"."${phase}" == "string"
+ then
+ packageNix.dependencies."${pkgName}"."${phase}"
+ else
+ (packageNix.dependencies."${pkgName}"."${phase}" (pkgs_ // { inherit getNodeDep; })));
+ mkExtraBuildInputs = pkgs_: {pkgName}:
+ lib.optionals ((builtins.hasAttr "${pkgName}" packageNix.dependencies) &&
+ (builtins.typeOf packageNix.dependencies."${pkgName}" == "set") &&
+ (builtins.hasAttr "extraBuildInputs" packageNix.dependencies."${pkgName}"))
+ (if builtins.typeOf packageNix.dependencies."${pkgName}"."extraBuildInputs" == "list"
+ then
+ packageNix.dependencies."${pkgName}"."extraBuildInputs"
+ else
+ (packageNix.dependencies."${pkgName}"."extraBuildInputs" (pkgs_ // { inherit getNodeDep; })));
+ mkExtraDependencies = pkgs_: {pkgName}:
+ lib.optionals ((builtins.hasAttr "${pkgName}" packageNix.dependencies) &&
+ (builtins.typeOf packageNix.dependencies."${pkgName}" == "set") &&
+ (builtins.hasAttr "extraDependencies" packageNix.dependencies."${pkgName}"))
+ (if builtins.typeOf packageNix.dependencies."${pkgName}"."extraDependencies" == "list"
+ then
+ packageNix.dependencies."${pkgName}"."extraDependencies"
+ else
+ (packageNix.dependencies."${pkgName}"."extraDependencies" (pkgs_ // { inherit getNodeDep; })));
+ mkUnpackScript = { dependencies ? [], extraDependencies ? [], pkgName }:
+ let copyNodeDependencies =
+ if ((builtins.hasAttr "${pkgName}" packageNix.dependencies) &&
+ (builtins.typeOf packageNix.dependencies."${pkgName}" == "set") &&
+ (builtins.hasAttr "copyNodeDependencies" packageNix.dependencies."${pkgName}") &&
+ (builtins.typeOf packageNix.dependencies."${pkgName}"."copyNodeDependencies" == "bool") &&
+ (packageNix.dependencies."${pkgName}"."copyNodeDependencies" == true))
+ then true else false;
+ in ''
+ ${copyNodeModules { dependencies = dependencies ++ extraDependencies; }}
+ chmod -R +rw $(pwd)
+ '';
+ mkBuildScript = { dependencies ? [], pkgName }:
+ let extraNpmFlags =
+ if ((builtins.hasAttr "${pkgName}" packageNix.dependencies) &&
+ (builtins.typeOf packageNix.dependencies."${pkgName}" == "set") &&
+ (builtins.hasAttr "npmFlags" packageNix.dependencies."${pkgName}") &&
+ (builtins.typeOf packageNix.dependencies."${pkgName}"."npmFlags" == "string"))
+ then packageNix.dependencies."${pkgName}"."npmFlags" else "";
+ in ''
+ runHook preBuild
+ export HOME=$TMPDIR
+ npm --offline config set node_gyp ${nodejs}/lib/node_modules/npm/node_modules/node-gyp/bin/node-gyp.js
+ npm --offline config set omit dev
+ NODE_PATH="$(pwd)/node_modules:$NODE_PATH" \
+ npm --offline --nodedir=${nodeSources} --location="$(pwd)" \
+ ${extraNpmFlags} "--production" "--preserve-symlinks" \
+ rebuild --build-from-source
+ runHook postBuild
+ '';
+ mkInstallScript = { pkgName }: ''
+ runHook preInstall
+ export packageDir="$(pwd)"
+ mkdir -p $out/lib/node_modules/${pkgName}
+ cd $out/lib/node_modules/${pkgName}
+ cp -rfT "$packageDir" "$(pwd)"
+ if [[ -d "$out/lib/node_modules/${pkgName}/bin" ]]
+ then
+ mkdir -p $out/bin
+ ln -s "$out/lib/node_modules/${pkgName}/bin"/* $out/bin
+ fi
+ cd $out/lib/node_modules/${pkgName}
+ runHook postInstall
+ '';
+ goBinLink = pkgs.buildGoModule {
+ pname = "bin-link";
+ version = "0.0.0";
+ vendorSha256 = null;
+ buildInputs = [ pkgs.nodejs ];
+ src = pkgs.fetchFromGitHub {
+ owner = "hlolli";
+ repo = "jsnix";
+ rev = "a66cf91ad49833ef3d84064c1037d942c97838bb";
+ sha256 = "AvDZXUSxuJa5lZ7zRdXWIDYTYfbH2VfpuHbvZBrT9f0=";
+ };
+ preBuild = ''
+ cd go/bin-link
+ '';
+};
+ goFlatten = pkgs.buildGoModule {
+ pname = "flatten";
+ version = "0.0.0";
+ vendorSha256 = null;
+ buildInputs = [ pkgs.nodejs ];
+ src = pkgs.fetchFromGitHub {
+ owner = "hlolli";
+ repo = "jsnix";
+ rev = "a66cf91ad49833ef3d84064c1037d942c97838bb";
+ sha256 = "AvDZXUSxuJa5lZ7zRdXWIDYTYfbH2VfpuHbvZBrT9f0=";
+ };
+ preBuild = ''
+ cd go/flatten
+ '';
+};
+ sources = rec {
+ "@sindresorhus/is-4.2.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "_at_sindresorhus_slash_is";
+ packageName = "@sindresorhus/is";
+ version = "4.2.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "@sindresorhus/is"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "@sindresorhus/is"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/@sindresorhus/is/-/is-4.2.0.tgz";
+ sha512 = "VkE3KLBmJwcCaVARtQpfuKcKv8gcBmUubrfHGF84dXuuW6jgsRYxPtzcIhPyK9WAPpRt2/xY6zkD9MnRaJzSyw==";
+ };
+ };
+ "@szmarczak/http-timer-5.0.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "_at_szmarczak_slash_http-timer";
+ packageName = "@szmarczak/http-timer";
+ version = "5.0.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "@szmarczak/http-timer"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "@szmarczak/http-timer"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-5.0.1.tgz";
+ sha512 = "+PmQX0PiAYPMeVYe237LJAYvOMYW1j2rH5YROyS3b4CTVJum34HfRvKvAzozHAQG0TnHNdUfY9nCeUyRAs//cw==";
+ };
+ };
+ "@types/cacheable-request-6.0.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "_at_types_slash_cacheable-request";
+ packageName = "@types/cacheable-request";
+ version = "6.0.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "@types/cacheable-request"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "@types/cacheable-request"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.2.tgz";
+ sha512 = "B3xVo+dlKM6nnKTcmm5ZtY/OL8bOAOd2Olee9M1zft65ox50OzjEHW91sDiU9j6cvW8Ejg1/Qkf4xd2kugApUA==";
+ };
+ };
+ "@types/http-cache-semantics-4.0.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "_at_types_slash_http-cache-semantics";
+ packageName = "@types/http-cache-semantics";
+ version = "4.0.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "@types/http-cache-semantics"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "@types/http-cache-semantics"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz";
+ sha512 = "SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ==";
+ };
+ };
+ "@types/keyv-3.1.3" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "_at_types_slash_keyv";
+ packageName = "@types/keyv";
+ version = "3.1.3";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "@types/keyv"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "@types/keyv"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/@types/keyv/-/keyv-3.1.3.tgz";
+ sha512 = "FXCJgyyN3ivVgRoml4h94G/p3kY+u/B86La+QptcqJaWtBWtmc6TtkNfS40n9bIvyLteHh7zXOtgbobORKPbDg==";
+ };
+ };
+ "@types/node-17.0.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "_at_types_slash_node";
+ packageName = "@types/node";
+ version = "17.0.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "@types/node"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "@types/node"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/@types/node/-/node-17.0.2.tgz";
+ sha512 = "JepeIUPFDARgIs0zD/SKPgFsJEAF0X5/qO80llx59gOxFTboS9Amv3S+QfB7lqBId5sFXJ99BN0J6zFRvL9dDA==";
+ };
+ };
+ "@types/responselike-1.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "_at_types_slash_responselike";
+ packageName = "@types/responselike";
+ version = "1.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "@types/responselike"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "@types/responselike"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/@types/responselike/-/responselike-1.0.0.tgz";
+ sha512 = "85Y2BjiufFzaMIlvJDvTTB8Fxl2xfLo4HgmHzVBz08w4wDePCTjYw66PdrolO0kzli3yam/YCgRufyo1DdQVTA==";
+ };
+ };
+ "base64-js-1.5.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "base64-js";
+ packageName = "base64-js";
+ version = "1.5.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "base64-js"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "base64-js"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz";
+ sha512 = "AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==";
+ };
+ };
+ "buffer-4.9.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "buffer";
+ packageName = "buffer";
+ version = "4.9.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "buffer"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "buffer"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz";
+ sha512 = "xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==";
+ };
+ };
+ "buffer-writer-2.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "buffer-writer";
+ packageName = "buffer-writer";
+ version = "2.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "buffer-writer"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "buffer-writer"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz";
+ sha512 = "a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==";
+ };
+ };
+ "cacheable-lookup-6.0.4" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "cacheable-lookup";
+ packageName = "cacheable-lookup";
+ version = "6.0.4";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "cacheable-lookup"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "cacheable-lookup"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-6.0.4.tgz";
+ sha512 = "mbcDEZCkv2CZF4G01kr8eBd/5agkt9oCqz75tJMSIsquvRZ2sL6Hi5zGVKi/0OSC9oO1GHfJ2AV0ZIOY9vye0A==";
+ };
+ };
+ "cacheable-request-7.0.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "cacheable-request";
+ packageName = "cacheable-request";
+ version = "7.0.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "cacheable-request"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "cacheable-request"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/cacheable-request/-/cacheable-request-7.0.2.tgz";
+ sha512 = "pouW8/FmiPQbuGpkXQ9BAPv/Mo5xDGANgSNXzTzJ8DrKGuXOssM4wIQRjfanNRh3Yu5cfYPvcorqbhg2KIJtew==";
+ };
+ };
+ "clone-response-1.0.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "clone-response";
+ packageName = "clone-response";
+ version = "1.0.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "clone-response"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "clone-response"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/clone-response/-/clone-response-1.0.2.tgz";
+ sha1 = "d1dc973920314df67fbeb94223b4ee350239e96b";
+ };
+ };
+ "colorette-2.0.16" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "colorette";
+ packageName = "colorette";
+ version = "2.0.16";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "colorette"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "colorette"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/colorette/-/colorette-2.0.16.tgz";
+ sha512 = "hUewv7oMjCp+wkBv5Rm0v87eJhq4woh5rSR+42YSQJKecCqgIqNkZ6lAlQms/BwHPJA5NKMRlpxPRv0n8HQW6g==";
+ };
+ };
+ "commander-7.2.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "commander";
+ packageName = "commander";
+ version = "7.2.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "commander"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "commander"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz";
+ sha512 = "QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==";
+ };
+ };
+ "debug-4.3.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "debug";
+ packageName = "debug";
+ version = "4.3.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "debug"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "debug"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz";
+ sha512 = "mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==";
+ };
+ };
+ "debug-4.3.3" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "debug";
+ packageName = "debug";
+ version = "4.3.3";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "debug"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "debug"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz";
+ sha512 = "/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==";
+ };
+ };
+ "decompress-response-6.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "decompress-response";
+ packageName = "decompress-response";
+ version = "6.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "decompress-response"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "decompress-response"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz";
+ sha512 = "aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==";
+ };
+ };
+ "defer-to-connect-2.0.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "defer-to-connect";
+ packageName = "defer-to-connect";
+ version = "2.0.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "defer-to-connect"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "defer-to-connect"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz";
+ sha512 = "4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==";
+ };
+ };
+ "end-of-stream-1.4.4" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "end-of-stream";
+ packageName = "end-of-stream";
+ version = "1.4.4";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "end-of-stream"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "end-of-stream"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz";
+ sha512 = "+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==";
+ };
+ };
+ "escalade-3.1.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "escalade";
+ packageName = "escalade";
+ version = "3.1.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "escalade"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "escalade"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz";
+ sha512 = "k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==";
+ };
+ };
+ "esm-3.2.25" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "esm";
+ packageName = "esm";
+ version = "3.2.25";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "esm"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "esm"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/esm/-/esm-3.2.25.tgz";
+ sha512 = "U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==";
+ };
+ };
+ "events-1.1.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "events";
+ packageName = "events";
+ version = "1.1.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "events"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "events"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/events/-/events-1.1.1.tgz";
+ sha1 = "9ebdb7635ad099c70dcc4c2a1f5004288e8bd924";
+ };
+ };
+ "form-data-encoder-1.7.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "form-data-encoder";
+ packageName = "form-data-encoder";
+ version = "1.7.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "form-data-encoder"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "form-data-encoder"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-1.7.1.tgz";
+ sha512 = "EFRDrsMm/kyqbTQocNvRXMLjc7Es2Vk+IQFx/YW7hkUH1eBl4J1fqiP34l74Yt0pFLCNpc06fkbVk00008mzjg==";
+ };
+ };
+ "function-bind-1.1.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "function-bind";
+ packageName = "function-bind";
+ version = "1.1.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "function-bind"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "function-bind"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz";
+ sha512 = "yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==";
+ };
+ };
+ "get-stream-5.2.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "get-stream";
+ packageName = "get-stream";
+ version = "5.2.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "get-stream"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "get-stream"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz";
+ sha512 = "nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==";
+ };
+ };
+ "get-stream-6.0.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "get-stream";
+ packageName = "get-stream";
+ version = "6.0.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "get-stream"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "get-stream"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz";
+ sha512 = "ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==";
+ };
+ };
+ "getopts-2.2.5" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "getopts";
+ packageName = "getopts";
+ version = "2.2.5";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "getopts"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "getopts"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/getopts/-/getopts-2.2.5.tgz";
+ sha512 = "9jb7AW5p3in+IiJWhQiZmmwkpLaR/ccTWdWQCtZM66HJcHHLegowh4q4tSD7gouUyeNvFWRavfK9GXosQHDpFA==";
+ };
+ };
+ "has-1.0.3" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "has";
+ packageName = "has";
+ version = "1.0.3";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "has"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "has"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/has/-/has-1.0.3.tgz";
+ sha512 = "f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==";
+ };
+ };
+ "http-cache-semantics-4.1.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "http-cache-semantics";
+ packageName = "http-cache-semantics";
+ version = "4.1.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "http-cache-semantics"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "http-cache-semantics"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz";
+ sha512 = "carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ==";
+ };
+ };
+ "http2-wrapper-2.1.10" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "http2-wrapper";
+ packageName = "http2-wrapper";
+ version = "2.1.10";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "http2-wrapper"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "http2-wrapper"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/http2-wrapper/-/http2-wrapper-2.1.10.tgz";
+ sha512 = "QHgsdYkieKp+6JbXP25P+tepqiHYd+FVnDwXpxi/BlUcoIB0nsmTOymTNvETuTO+pDuwcSklPE72VR3DqV+Haw==";
+ };
+ };
+ "ieee754-1.1.13" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "ieee754";
+ packageName = "ieee754";
+ version = "1.1.13";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "ieee754"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "ieee754"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz";
+ sha512 = "4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==";
+ };
+ };
+ "interpret-2.2.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "interpret";
+ packageName = "interpret";
+ version = "2.2.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "interpret"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "interpret"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/interpret/-/interpret-2.2.0.tgz";
+ sha512 = "Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==";
+ };
+ };
+ "is-core-module-2.8.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "is-core-module";
+ packageName = "is-core-module";
+ version = "2.8.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "is-core-module"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "is-core-module"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.0.tgz";
+ sha512 = "vd15qHsaqrRL7dtH6QNuy0ndJmRDrS9HAM1CAiSifNUFv4x1a0CCVsj18hJ1mShxIG6T2i1sO78MkP56r0nYRw==";
+ };
+ };
+ "isarray-1.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "isarray";
+ packageName = "isarray";
+ version = "1.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "isarray"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "isarray"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz";
+ sha1 = "bb935d48582cba168c06834957a54a3e07124f11";
+ };
+ };
+ "jmespath-0.15.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "jmespath";
+ packageName = "jmespath";
+ version = "0.15.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "jmespath"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "jmespath"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz";
+ sha1 = "a3f222a9aae9f966f5d27c796510e28091764217";
+ };
+ };
+ "json-buffer-3.0.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "json-buffer";
+ packageName = "json-buffer";
+ version = "3.0.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "json-buffer"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "json-buffer"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz";
+ sha512 = "4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==";
+ };
+ };
+ "keyv-4.0.4" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "keyv";
+ packageName = "keyv";
+ version = "4.0.4";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "keyv"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "keyv"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/keyv/-/keyv-4.0.4.tgz";
+ sha512 = "vqNHbAc8BBsxk+7QBYLW0Y219rWcClspR6WSeoHYKG5mnsSoOH+BL1pWq02DDCVdvvuUny5rkBlzMRzoqc+GIg==";
+ };
+ };
+ "lodash-4.17.21" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "lodash";
+ packageName = "lodash";
+ version = "4.17.21";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "lodash"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "lodash"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz";
+ sha512 = "v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==";
+ };
+ };
+ "lowercase-keys-2.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "lowercase-keys";
+ packageName = "lowercase-keys";
+ version = "2.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "lowercase-keys"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "lowercase-keys"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz";
+ sha512 = "tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==";
+ };
+ };
+ "lowercase-keys-3.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "lowercase-keys";
+ packageName = "lowercase-keys";
+ version = "3.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "lowercase-keys"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "lowercase-keys"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-3.0.0.tgz";
+ sha512 = "ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ==";
+ };
+ };
+ "mimic-response-1.0.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "mimic-response";
+ packageName = "mimic-response";
+ version = "1.0.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "mimic-response"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "mimic-response"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz";
+ sha512 = "j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==";
+ };
+ };
+ "mimic-response-3.1.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "mimic-response";
+ packageName = "mimic-response";
+ version = "3.1.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "mimic-response"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "mimic-response"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz";
+ sha512 = "z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==";
+ };
+ };
+ "ms-2.1.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "ms";
+ packageName = "ms";
+ version = "2.1.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "ms"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "ms"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz";
+ sha512 = "sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==";
+ };
+ };
+ "normalize-url-6.1.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "normalize-url";
+ packageName = "normalize-url";
+ version = "6.1.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "normalize-url"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "normalize-url"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz";
+ sha512 = "DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==";
+ };
+ };
+ "once-1.4.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "once";
+ packageName = "once";
+ version = "1.4.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "once"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "once"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/once/-/once-1.4.0.tgz";
+ sha1 = "583b1aa775961d4b113ac17d9c50baef9dd76bd1";
+ };
+ };
+ "p-cancelable-3.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "p-cancelable";
+ packageName = "p-cancelable";
+ version = "3.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "p-cancelable"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "p-cancelable"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/p-cancelable/-/p-cancelable-3.0.0.tgz";
+ sha512 = "mlVgR3PGuzlo0MmTdk4cXqXWlwQDLnONTAg6sm62XkMJEiRxN3GL3SffkYvqwonbkJBcrI7Uvv5Zh9yjvn2iUw==";
+ };
+ };
+ "p-timeout-5.0.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "p-timeout";
+ packageName = "p-timeout";
+ version = "5.0.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "p-timeout"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "p-timeout"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/p-timeout/-/p-timeout-5.0.2.tgz";
+ sha512 = "sEmji9Yaq+Tw+STwsGAE56hf7gMy9p0tQfJojIAamB7WHJYJKf1qlsg9jqBWG8q9VCxKPhZaP/AcXwEoBcYQhQ==";
+ };
+ };
+ "packet-reader-1.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "packet-reader";
+ packageName = "packet-reader";
+ version = "1.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "packet-reader"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "packet-reader"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz";
+ sha512 = "HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==";
+ };
+ };
+ "path-parse-1.0.7" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "path-parse";
+ packageName = "path-parse";
+ version = "1.0.7";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "path-parse"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "path-parse"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz";
+ sha512 = "LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==";
+ };
+ };
+ "pg-connection-string-2.5.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "pg-connection-string";
+ packageName = "pg-connection-string";
+ version = "2.5.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "pg-connection-string"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "pg-connection-string"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz";
+ sha512 = "r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==";
+ };
+ };
+ "pg-int8-1.0.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "pg-int8";
+ packageName = "pg-int8";
+ version = "1.0.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "pg-int8"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "pg-int8"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz";
+ sha512 = "WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==";
+ };
+ };
+ "pg-pool-3.4.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "pg-pool";
+ packageName = "pg-pool";
+ version = "3.4.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "pg-pool"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "pg-pool"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/pg-pool/-/pg-pool-3.4.1.tgz";
+ sha512 = "TVHxR/gf3MeJRvchgNHxsYsTCHQ+4wm3VIHSS19z8NC0+gioEhq1okDY1sm/TYbfoP6JLFx01s0ShvZ3puP/iQ==";
+ };
+ };
+ "pg-protocol-1.5.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "pg-protocol";
+ packageName = "pg-protocol";
+ version = "1.5.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "pg-protocol"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "pg-protocol"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz";
+ sha512 = "muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ==";
+ };
+ };
+ "pg-types-2.2.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "pg-types";
+ packageName = "pg-types";
+ version = "2.2.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "pg-types"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "pg-types"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz";
+ sha512 = "qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==";
+ };
+ };
+ "pgpass-1.0.5" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "pgpass";
+ packageName = "pgpass";
+ version = "1.0.5";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "pgpass"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "pgpass"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz";
+ sha512 = "FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==";
+ };
+ };
+ "postgres-array-2.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "postgres-array";
+ packageName = "postgres-array";
+ version = "2.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "postgres-array"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "postgres-array"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz";
+ sha512 = "VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==";
+ };
+ };
+ "postgres-bytea-1.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "postgres-bytea";
+ packageName = "postgres-bytea";
+ version = "1.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "postgres-bytea"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "postgres-bytea"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz";
+ sha1 = "027b533c0aa890e26d172d47cf9ccecc521acd35";
+ };
+ };
+ "postgres-date-1.0.7" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "postgres-date";
+ packageName = "postgres-date";
+ version = "1.0.7";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "postgres-date"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "postgres-date"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz";
+ sha512 = "suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==";
+ };
+ };
+ "postgres-interval-1.2.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "postgres-interval";
+ packageName = "postgres-interval";
+ version = "1.2.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "postgres-interval"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "postgres-interval"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz";
+ sha512 = "9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==";
+ };
+ };
+ "pump-3.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "pump";
+ packageName = "pump";
+ version = "3.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "pump"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "pump"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz";
+ sha512 = "LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==";
+ };
+ };
+ "punycode-1.3.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "punycode";
+ packageName = "punycode";
+ version = "1.3.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "punycode"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "punycode"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz";
+ sha1 = "9653a036fb7c1ee42342f2325cceefea3926c48d";
+ };
+ };
+ "querystring-0.2.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "querystring";
+ packageName = "querystring";
+ version = "0.2.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "querystring"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "querystring"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz";
+ sha1 = "b209849203bb25df820da756e747005878521620";
+ };
+ };
+ "quick-lru-5.1.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "quick-lru";
+ packageName = "quick-lru";
+ version = "5.1.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "quick-lru"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "quick-lru"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz";
+ sha512 = "WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==";
+ };
+ };
+ "rechoir-0.7.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "rechoir";
+ packageName = "rechoir";
+ version = "0.7.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "rechoir"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "rechoir"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/rechoir/-/rechoir-0.7.0.tgz";
+ sha512 = "ADsDEH2bvbjltXEP+hTIAmeFekTFK0V2BTxMkok6qILyAJEXV0AFfoWcAq4yfll5VdIMd/RVXq0lR+wQi5ZU3Q==";
+ };
+ };
+ "resolve-1.20.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "resolve";
+ packageName = "resolve";
+ version = "1.20.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "resolve"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "resolve"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz";
+ sha512 = "wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==";
+ };
+ };
+ "resolve-alpn-1.2.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "resolve-alpn";
+ packageName = "resolve-alpn";
+ version = "1.2.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "resolve-alpn"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "resolve-alpn"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/resolve-alpn/-/resolve-alpn-1.2.1.tgz";
+ sha512 = "0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==";
+ };
+ };
+ "resolve-from-5.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "resolve-from";
+ packageName = "resolve-from";
+ version = "5.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "resolve-from"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "resolve-from"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz";
+ sha512 = "qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==";
+ };
+ };
+ "responselike-2.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "responselike";
+ packageName = "responselike";
+ version = "2.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "responselike"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "responselike"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/responselike/-/responselike-2.0.0.tgz";
+ sha512 = "xH48u3FTB9VsZw7R+vvgaKeLKzT6jOogbQhEe/jewwnZgzPcnyWui2Av6JpoYZF/91uueC+lqhWqeURw5/qhCw==";
+ };
+ };
+ "retry-0.13.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "retry";
+ packageName = "retry";
+ version = "0.13.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "retry"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "retry"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz";
+ sha512 = "XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==";
+ };
+ };
+ "sax-1.2.1" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "sax";
+ packageName = "sax";
+ version = "1.2.1";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "sax"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "sax"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz";
+ sha1 = "7b8e656190b228e81a66aea748480d828cd2d37a";
+ };
+ };
+ "split2-4.1.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "split2";
+ packageName = "split2";
+ version = "4.1.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "split2"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "split2"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/split2/-/split2-4.1.0.tgz";
+ sha512 = "VBiJxFkxiXRlUIeyMQi8s4hgvKCSjtknJv/LVYbrgALPwf5zSKmEwV9Lst25AkvMDnvxODugjdl6KZgwKM1WYQ==";
+ };
+ };
+ "tarn-3.0.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "tarn";
+ packageName = "tarn";
+ version = "3.0.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "tarn"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "tarn"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/tarn/-/tarn-3.0.2.tgz";
+ sha512 = "51LAVKUSZSVfI05vjPESNc5vwqqZpbXCsU+/+wxlOrUjk2SnFTt97v9ZgQrD4YmxYW1Px6w2KjaDitCfkvgxMQ==";
+ };
+ };
+ "tildify-2.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "tildify";
+ packageName = "tildify";
+ version = "2.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "tildify"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "tildify"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/tildify/-/tildify-2.0.0.tgz";
+ sha512 = "Cc+OraorugtXNfs50hU9KS369rFXCfgGLpfCfvlc+Ud5u6VWmUQsOAa9HbTvheQdYnrdJqqv1e5oIqXppMYnSw==";
+ };
+ };
+ "url-0.10.3" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "url";
+ packageName = "url";
+ version = "0.10.3";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "url"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "url"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/url/-/url-0.10.3.tgz";
+ sha1 = "021e4d9c7705f21bbf37d03ceb58767402774c64";
+ };
+ };
+ "uuid-3.3.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "uuid";
+ packageName = "uuid";
+ version = "3.3.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "uuid"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "uuid"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz";
+ sha512 = "yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==";
+ };
+ };
+ "wrappy-1.0.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "wrappy";
+ packageName = "wrappy";
+ version = "1.0.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "wrappy"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "wrappy"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz";
+ sha1 = "b5243d8f3ec1aa35f1364605bc0d1036e30ab69f";
+ };
+ };
+ "xml2js-0.4.19" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "xml2js";
+ packageName = "xml2js";
+ version = "0.4.19";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "xml2js"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "xml2js"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz";
+ sha512 = "esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==";
+ };
+ };
+ "xmlbuilder-9.0.7" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "xmlbuilder";
+ packageName = "xmlbuilder";
+ version = "9.0.7";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "xmlbuilder"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "xmlbuilder"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz";
+ sha1 = "132ee63d2ec5565c557e20f4c22df9aca686b10d";
+ };
+ };
+ "xtend-4.0.2" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "xtend";
+ packageName = "xtend";
+ version = "4.0.2";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "xtend"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "xtend"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz";
+ sha512 = "LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==";
+ };
+ };
+ "yocto-queue-1.0.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "yocto-queue";
+ packageName = "yocto-queue";
+ version = "1.0.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "yocto-queue"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "yocto-queue"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.0.0.tgz";
+ sha512 = "9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g==";
+ };
+ };
+ "yoctodelay-1.2.0" = {dependencies ? []}:
+
+ stdenv.mkDerivation {
+ name = "yoctodelay";
+ packageName = "yoctodelay";
+ version = "1.2.0";
+ extraDependencies = [];
+ buildInputs = [
+ jq
+ nodejs
+ ];
+ NODE_OPTIONS = "--preserve-symlinks";
+ unpackPhase = transitiveDepUnpackPhase { inherit dependencies; pkgName = "yoctodelay"; } + '''';
+ patchPhase = ''
+ if [ -f "package.json" ]; then
+ cat <<< $(jq 'del(.scripts)' package.json) > package.json
+ fi
+
+ '';
+ configurePhase = "true";
+ buildPhase = "true";
+ fixupPhase = "true";
+ installPhase = transitiveDepInstallPhase { inherit dependencies; pkgName = "yoctodelay"; };
+ doCheck = false;
+ doInstallCheck = false;
+ src = fetchurl {
+ url = "https://registry.npmjs.org/yoctodelay/-/yoctodelay-1.2.0.tgz";
+ sha512 = "12y/P9MSig9/5BEhBgylss+fkHiCRZCvYR81eH35NW9uw801cvJt31EAV+WOLcwZRZbLiIQl/hxcdXXXFmGvXg==";
+ };
+ };
+ };
+ jsnixDeps = {
+ async-retry = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "async-retry"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "async-retry";
+ packageName = "async-retry";
+ version = "1.3.3";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz";
+ sha512 = "wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "async-retry"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "async-retry"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "async-retry"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "async-retry"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "async-retry"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "async-retry"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "async-retry"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "async-retry"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "async-retry"; });
+ meta = {
+ description = "Retrying made simple, easy and async";
+ license = "MIT";
+ homepage = "https://github.com/vercel/async-retry#readme";
+ };
+ };
+ aws-sdk = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "aws-sdk"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "aws-sdk";
+ packageName = "aws-sdk";
+ version = "2.1047.0";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1047.0.tgz";
+ sha512 = "aZg6HzcwgRpXLi8HnpwBwK+NTXlWPjLSChvdeJ+/IE9912aoAKyaV+Ydo+9h6XH0cQhkvZ2u3pFINWZVbwo+TA==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "aws-sdk"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "aws-sdk"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "aws-sdk"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "aws-sdk"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "aws-sdk"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "aws-sdk"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "aws-sdk"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "aws-sdk"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "aws-sdk"; });
+ meta = {
+ description = "AWS SDK for JavaScript";
+ license = "Apache-2.0";
+ homepage = "https://github.com/aws/aws-sdk-js";
+ };
+ };
+ dotenv = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "dotenv"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "dotenv";
+ packageName = "dotenv";
+ version = "10.0.0";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/dotenv/-/dotenv-10.0.0.tgz";
+ sha512 = "rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "dotenv"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "dotenv"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "dotenv"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "dotenv"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "dotenv"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "dotenv"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "dotenv"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "dotenv"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "dotenv"; });
+ meta = {
+ description = "Loads environment variables from .env file";
+ license = "BSD-2-Clause";
+ homepage = "https://github.com/motdotla/dotenv#readme";
+ };
+ };
+ exit-hook = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "exit-hook"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "exit-hook";
+ packageName = "exit-hook";
+ version = "3.0.0";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/exit-hook/-/exit-hook-3.0.0.tgz";
+ sha512 = "ElRvnoj3dvOc5WjnQx0CF66rS0xehV6eZdcmqZX17uOLPy3me43frl8UD73Frkx5Aq5kgziMDECjDJR2X1oBFQ==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "exit-hook"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "exit-hook"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "exit-hook"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "exit-hook"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "exit-hook"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "exit-hook"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "exit-hook"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "exit-hook"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "exit-hook"; });
+ meta = {
+ description = "Run some code when the process exits";
+ license = "MIT";
+ homepage = "https://github.com/sindresorhus/exit-hook#readme";
+ };
+ };
+ got = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "got"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "got";
+ packageName = "got";
+ version = "12.0.0";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/got/-/got-12.0.0.tgz";
+ sha512 = "gNNNghQ1yw0hyzie1FLK6gY90BQlXU9zSByyRygnbomHPruKQ6hAKKbpO1RfNZp8b+qNzNipGeRG3tUelKcVsA==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "got"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "got"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "got"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "got"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "got"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "got"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "got"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "got"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "got"; });
+ meta = {
+ description = "Human-friendly and powerful HTTP request library for Node.js";
+ license = "MIT";
+ homepage = "https://github.com/sindresorhus/got#readme";
+ };
+ };
+ knex = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "knex"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "knex";
+ packageName = "knex";
+ version = "0.95.15";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/knex/-/knex-0.95.15.tgz";
+ sha512 = "Loq6WgHaWlmL2bfZGWPsy4l8xw4pOE+tmLGkPG0auBppxpI0UcK+GYCycJcqz9W54f2LiGewkCVLBm3Wq4ur/w==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "knex"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "knex"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "knex"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "knex"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "knex"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "knex"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "knex"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "knex"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "knex"; });
+ meta = {
+ description = "A batteries-included SQL query & schema builder for PostgresSQL, MySQL, CockroachDB, MSSQL and SQLite3";
+ license = "MIT";
+ homepage = "https://knexjs.org";
+ };
+ };
+ moment = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "moment"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "moment";
+ packageName = "moment";
+ version = "2.29.1";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/moment/-/moment-2.29.1.tgz";
+ sha512 = "kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "moment"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "moment"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "moment"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "moment"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "moment"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "moment"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "moment"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "moment"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "moment"; });
+ meta = {
+ description = "Parse, validate, manipulate, and display dates";
+ license = "MIT";
+ homepage = "https://momentjs.com";
+ };
+ };
+ p-limit = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "p-limit"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "p-limit";
+ packageName = "p-limit";
+ version = "4.0.0";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/p-limit/-/p-limit-4.0.0.tgz";
+ sha512 = "5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "p-limit"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "p-limit"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "p-limit"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "p-limit"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "p-limit"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "p-limit"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "p-limit"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "p-limit"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "p-limit"; });
+ meta = {
+ description = "Run multiple promise-returning & async functions with limited concurrency";
+ license = "MIT";
+ homepage = "https://github.com/sindresorhus/p-limit#readme";
+ };
+ };
+ p-min-delay = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "p-min-delay"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "p-min-delay";
+ packageName = "p-min-delay";
+ version = "4.0.1";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/p-min-delay/-/p-min-delay-4.0.1.tgz";
+ sha512 = "Tgkn+fy2VYNWw9bLy4BwiF+1ZMIgTDBIpaIChi1HC3N4nwRpandJnG1jAEXiYCcrTZKYQJdBWzLJauAeYDXsBg==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "p-min-delay"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "p-min-delay"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "p-min-delay"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "p-min-delay"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "p-min-delay"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "p-min-delay"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "p-min-delay"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "p-min-delay"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "p-min-delay"; });
+ meta = {
+ description = "Delay a promise a minimum amount of time";
+ license = "MIT";
+ homepage = "https://github.com/sindresorhus/p-min-delay#readme";
+ };
+ };
+ p-wait-for = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "p-wait-for"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "p-wait-for";
+ packageName = "p-wait-for";
+ version = "4.1.0";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/p-wait-for/-/p-wait-for-4.1.0.tgz";
+ sha512 = "i8nE5q++9h8oaQHWltS1Tnnv4IoMDOlqN7C0KFG2OdbK0iFJIt6CROZ8wfBM+K4Pxqfnq4C4lkkpXqTEpB5DZw==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "p-wait-for"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "p-wait-for"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "p-wait-for"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "p-wait-for"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "p-wait-for"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "p-wait-for"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "p-wait-for"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "p-wait-for"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "p-wait-for"; });
+ meta = {
+ description = "Wait for a condition to be true";
+ license = "MIT";
+ homepage = "https://github.com/sindresorhus/p-wait-for#readme";
+ };
+ };
+ p-whilst = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "p-whilst"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "p-whilst";
+ packageName = "p-whilst";
+ version = "3.0.0";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/p-whilst/-/p-whilst-3.0.0.tgz";
+ sha512 = "vaiNNmeIUGtMzf121RTb3CCC0Nl4WNeHjbmPjRcwPo6vQiHEJRpHbeOcyLBZspuyz2yG+G2xwzVIiULd1Mk6MA==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "p-whilst"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "p-whilst"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "p-whilst"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "p-whilst"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "p-whilst"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "p-whilst"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "p-whilst"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "p-whilst"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "p-whilst"; });
+ meta = {
+ description = "While a condition returns true, calls a function repeatedly, and then resolves the promise";
+ license = "MIT";
+ homepage = "https://github.com/sindresorhus/p-whilst#readme";
+ };
+ };
+ pg = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "pg"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "pg";
+ packageName = "pg";
+ version = "8.7.1";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/pg/-/pg-8.7.1.tgz";
+ sha512 = "7bdYcv7V6U3KAtWjpQJJBww0UEsWuh4yQ/EjNf2HeO/NnvKjpvhEIe/A/TleP6wtmSKnUnghs5A9jUoK6iDdkA==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "pg"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "pg"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "pg"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "pg"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "pg"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "pg"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "pg"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "pg"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "pg"; });
+ meta = {
+ description = "PostgreSQL client - pure javascript & libpq with the same API";
+ license = "MIT";
+ homepage = "https://github.com/brianc/node-postgres";
+ };
+ };
+ ramda = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "ramda"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "ramda";
+ packageName = "ramda";
+ version = "0.27.1";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/ramda/-/ramda-0.27.1.tgz";
+ sha512 = "PgIdVpn5y5Yns8vqb8FzBUEYn98V3xcPgawAkkgj0YJ0qDsnHCiNmZYfOGMgOvoB0eWFLpYbhxUR3mxfDIMvpw==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "ramda"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "ramda"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "ramda"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "ramda"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "ramda"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "ramda"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "ramda"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "ramda"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "ramda"; });
+ meta = {
+ description = "A practical functional library for JavaScript programmers.";
+ license = "MIT";
+ homepage = "https://ramdajs.com/";
+ };
+ };
+ sqs-consumer = let
+ dependencies = [];
+ extraDependencies = [] ++
+ mkExtraDependencies
+ (pkgs // { inherit jsnixDeps dependencies; })
+ { pkgName = "sqs-consumer"; };
+ in
+ stdenv.mkDerivation {
+ inherit dependencies extraDependencies;
+ name = "sqs-consumer";
+ packageName = "sqs-consumer";
+ version = "5.6.0";
+ src = fetchurl {
+ url = "https://registry.npmjs.org/sqs-consumer/-/sqs-consumer-5.6.0.tgz";
+ sha512 = "p+K3UV8GwF1//Nfq7swbm/Un137IwxewzxapfTyyEVpdmzPKEDYrAzuGJvP87YWVSWzbkvxQ0By0vhamouGdxg==";
+ };
+ buildInputs = [ nodejs python3 makeWrapper jq ] ++
+ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.xcodebuild ]) ++
+ (mkExtraBuildInputs (pkgs // { inherit jsnixDeps dependencies; }) { pkgName = "sqs-consumer"; });
+ doFixup = false;
+ doStrip = false;
+ NODE_OPTIONS = "--preserve-symlinks";
+ passAsFile = [ "unpackScript" "buildScript" "installScript" ];
+ unpackScript = mkUnpackScript { dependencies = dependencies ++ extraDependencies;
+ pkgName = "sqs-consumer"; };
+ buildScript = mkBuildScript { inherit dependencies; pkgName = "sqs-consumer"; };
+ buildPhase = ''
+ source $unpackScriptPath
+ runHook preBuild
+ if [ -z "$preBuild" ]; then
+ runHook preInstall
+ fi
+ source $buildScriptPath
+ if [ -z "$postBuild" ]; then
+ runHook postBuild
+ fi
+ '';
+ patchPhase = ''
+ if [ -z "$prePatch" ]; then
+ runHook prePatch
+ fi
+
+ if [ -z "$postPatch" ]; then
+ runHook postPatch
+ fi
+ '';
+ installScript = mkInstallScript { pkgName = "sqs-consumer"; };
+ installPhase = ''
+ if [ -z "$preInstall" ]; then
+ runHook preInstall
+ fi
+ source $installScriptPath
+ if [ -z "$postInstall" ]; then
+ runHook postInstall
+ fi
+ '';
+ preInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preInstall"; pkgName = "sqs-consumer"; });
+ postInstall = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postInstall"; pkgName = "sqs-consumer"; });
+ preBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "preBuild"; pkgName = "sqs-consumer"; });
+ postBuild = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "postBuild"; pkgName = "sqs-consumer"; });
+ fixupPhase = "true";
+ installCheckPhase = (mkPhase (pkgs // { inherit jsnixDeps nodejs dependencies; }) { phase = "installCheckPhase"; pkgName = "sqs-consumer"; });
+ meta = {
+ description = "Build SQS-based Node applications without the boilerplate";
+ license = "Apache-2.0";
+ homepage = "https://github.com/BBC/sqs-consumer";
+ };
+ };
+ };
+ dedupedDeps = {
+ retry = sources."retry-0.13.1" {
+ dependencies = [];
+ };
+ base64-js = sources."base64-js-1.5.1" {
+ dependencies = [];
+ };
+ buffer = sources."buffer-4.9.2" {
+ dependencies = [];
+ };
+ events = sources."events-1.1.1" {
+ dependencies = [];
+ };
+ ieee754 = sources."ieee754-1.1.13" {
+ dependencies = [];
+ };
+ isarray = sources."isarray-1.0.0" {
+ dependencies = [];
+ };
+ jmespath = sources."jmespath-0.15.0" {
+ dependencies = [];
+ };
+ punycode = sources."punycode-1.3.2" {
+ dependencies = [];
+ };
+ querystring = sources."querystring-0.2.0" {
+ dependencies = [];
+ };
+ sax = sources."sax-1.2.1" {
+ dependencies = [];
+ };
+ url = sources."url-0.10.3" {
+ dependencies = [];
+ };
+ uuid = sources."uuid-3.3.2" {
+ dependencies = [];
+ };
+ xml2js = sources."xml2js-0.4.19" {
+ dependencies = [];
+ };
+ xmlbuilder = sources."xmlbuilder-9.0.7" {
+ dependencies = [];
+ };
+ "@sindresorhus/is" = sources."@sindresorhus/is-4.2.0" {
+ dependencies = [];
+ };
+ "@szmarczak/http-timer" = sources."@szmarczak/http-timer-5.0.1" {
+ dependencies = [];
+ };
+ "@types/cacheable-request" = sources."@types/cacheable-request-6.0.2" {
+ dependencies = [];
+ };
+ "@types/http-cache-semantics" = sources."@types/http-cache-semantics-4.0.1" {
+ dependencies = [];
+ };
+ "@types/keyv" = sources."@types/keyv-3.1.3" {
+ dependencies = [];
+ };
+ "@types/node" = sources."@types/node-17.0.2" {
+ dependencies = [];
+ };
+ "@types/responselike" = sources."@types/responselike-1.0.0" {
+ dependencies = [];
+ };
+ cacheable-lookup = sources."cacheable-lookup-6.0.4" {
+ dependencies = [];
+ };
+ cacheable-request = sources."cacheable-request-7.0.2" {
+ dependencies = [
+ (sources."get-stream-5.2.0" {
+ dependencies = [];
+ })
+ (sources."lowercase-keys-2.0.0" {
+ dependencies = [];
+ })
+ ];
+ };
+ clone-response = sources."clone-response-1.0.2" {
+ dependencies = [];
+ };
+ decompress-response = sources."decompress-response-6.0.0" {
+ dependencies = [
+ (sources."mimic-response-3.1.0" {
+ dependencies = [];
+ })
+ ];
+ };
+ defer-to-connect = sources."defer-to-connect-2.0.1" {
+ dependencies = [];
+ };
+ end-of-stream = sources."end-of-stream-1.4.4" {
+ dependencies = [];
+ };
+ form-data-encoder = sources."form-data-encoder-1.7.1" {
+ dependencies = [];
+ };
+ get-stream = sources."get-stream-6.0.1" {
+ dependencies = [];
+ };
+ http-cache-semantics = sources."http-cache-semantics-4.1.0" {
+ dependencies = [];
+ };
+ http2-wrapper = sources."http2-wrapper-2.1.10" {
+ dependencies = [];
+ };
+ json-buffer = sources."json-buffer-3.0.1" {
+ dependencies = [];
+ };
+ keyv = sources."keyv-4.0.4" {
+ dependencies = [];
+ };
+ lowercase-keys = sources."lowercase-keys-3.0.0" {
+ dependencies = [];
+ };
+ mimic-response = sources."mimic-response-1.0.1" {
+ dependencies = [];
+ };
+ normalize-url = sources."normalize-url-6.1.0" {
+ dependencies = [];
+ };
+ once = sources."once-1.4.0" {
+ dependencies = [];
+ };
+ p-cancelable = sources."p-cancelable-3.0.0" {
+ dependencies = [];
+ };
+ pump = sources."pump-3.0.0" {
+ dependencies = [];
+ };
+ quick-lru = sources."quick-lru-5.1.1" {
+ dependencies = [];
+ };
+ resolve-alpn = sources."resolve-alpn-1.2.1" {
+ dependencies = [];
+ };
+ responselike = sources."responselike-2.0.0" {
+ dependencies = [
+ (sources."lowercase-keys-2.0.0" {
+ dependencies = [];
+ })
+ ];
+ };
+ wrappy = sources."wrappy-1.0.2" {
+ dependencies = [];
+ };
+ colorette = sources."colorette-2.0.16" {
+ dependencies = [];
+ };
+ commander = sources."commander-7.2.0" {
+ dependencies = [];
+ };
+ debug = sources."debug-4.3.2" {
+ dependencies = [];
+ };
+ escalade = sources."escalade-3.1.1" {
+ dependencies = [];
+ };
+ esm = sources."esm-3.2.25" {
+ dependencies = [];
+ };
+ function-bind = sources."function-bind-1.1.1" {
+ dependencies = [];
+ };
+ getopts = sources."getopts-2.2.5" {
+ dependencies = [];
+ };
+ has = sources."has-1.0.3" {
+ dependencies = [];
+ };
+ interpret = sources."interpret-2.2.0" {
+ dependencies = [];
+ };
+ is-core-module = sources."is-core-module-2.8.0" {
+ dependencies = [];
+ };
+ lodash = sources."lodash-4.17.21" {
+ dependencies = [];
+ };
+ ms = sources."ms-2.1.2" {
+ dependencies = [];
+ };
+ path-parse = sources."path-parse-1.0.7" {
+ dependencies = [];
+ };
+ pg-connection-string = sources."pg-connection-string-2.5.0" {
+ dependencies = [];
+ };
+ rechoir = sources."rechoir-0.7.0" {
+ dependencies = [];
+ };
+ resolve = sources."resolve-1.20.0" {
+ dependencies = [];
+ };
+ resolve-from = sources."resolve-from-5.0.0" {
+ dependencies = [];
+ };
+ tarn = sources."tarn-3.0.2" {
+ dependencies = [];
+ };
+ tildify = sources."tildify-2.0.0" {
+ dependencies = [];
+ };
+ yocto-queue = sources."yocto-queue-1.0.0" {
+ dependencies = [];
+ };
+ yoctodelay = sources."yoctodelay-1.2.0" {
+ dependencies = [];
+ };
+ p-timeout = sources."p-timeout-5.0.2" {
+ dependencies = [];
+ };
+ buffer-writer = sources."buffer-writer-2.0.0" {
+ dependencies = [];
+ };
+ packet-reader = sources."packet-reader-1.0.0" {
+ dependencies = [];
+ };
+ pg-int8 = sources."pg-int8-1.0.1" {
+ dependencies = [];
+ };
+ pg-pool = sources."pg-pool-3.4.1" {
+ dependencies = [];
+ };
+ pg-protocol = sources."pg-protocol-1.5.0" {
+ dependencies = [];
+ };
+ pg-types = sources."pg-types-2.2.0" {
+ dependencies = [];
+ };
+ pgpass = sources."pgpass-1.0.5" {
+ dependencies = [];
+ };
+ postgres-array = sources."postgres-array-2.0.0" {
+ dependencies = [];
+ };
+ postgres-bytea = sources."postgres-bytea-1.0.0" {
+ dependencies = [];
+ };
+ postgres-date = sources."postgres-date-1.0.7" {
+ dependencies = [];
+ };
+ postgres-interval = sources."postgres-interval-1.2.0" {
+ dependencies = [];
+ };
+ split2 = sources."split2-4.1.0" {
+ dependencies = [];
+ };
+ xtend = sources."xtend-4.0.2" {
+ dependencies = [];
+ };
+ };
+ isolateDeps = {};
+in
+jsnixDeps // (if builtins.hasAttr "packageDerivation" packageNix then {
+ "${packageNix.name}" = jsnixDrvOverrides {
+ inherit dedupedDeps jsnixDeps isolateDeps;
+ drv_ = packageNix.packageDerivation;
+ };
+} else {})
\ No newline at end of file
diff --git a/ecs/import-bundles/package.nix b/ecs/import-bundles/package.nix
new file mode 100644
index 0000000..73d5e70
--- /dev/null
+++ b/ecs/import-bundles/package.nix
@@ -0,0 +1,59 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+{
+ name = "@ar.io/import-bundles";
+ version = "1.0.0";
+ main = "src/index.mjs";
+
+ dependencies = {
+ arbundles = "^0.6.13";
+ async-retry = "^1.3.3";
+ aws-sdk = "^2.1046.0";
+ dotenv = "^10.0.0";
+ exit-hook = "^3.0.0";
+ got = "^12.0.0";
+ knex = "^0.95.14";
+ moment = "latest";
+ pg = "^8.7.1";
+ p-limit = "^4.0.0";
+ p-wait-for = "^4.1.0";
+ p-whilst = "^3.0.0";
+ p-min-delay = "^4.0.1";
+ ramda = "^0.27.1";
+ sqs-consumer = "^5.6.0";
+ };
+
+ packageDerivation = { jsnixDeps, ... }@pkgs: {
+ buildInputs = [
+ pkgs.openssl
+ pkgs.makeWrapper
+ ];
+
+ postInstall = ''
+ mkdir -p $out/bin
+ mkdir -p $out/lib/node_modules/@arweave/import-bundles
+
+ cp -rT $(pwd) $out/lib/node_modules/@arweave/import-bundles
+ makeWrapper ${pkgs.nodejs_latest}/bin/node $out/bin/import-bundles-start \
+ --run "cd $out/lib/node_modules/@arweave/import-bundles" \
+ --prefix NODE_ENV : production \
+ --prefix NODE_PATH : "./node_modules"
+ '';
+ };
+}
diff --git a/ecs/import-bundles/src/bundle.mjs b/ecs/import-bundles/src/bundle.mjs
new file mode 100644
index 0000000..c270616
--- /dev/null
+++ b/ecs/import-bundles/src/bundle.mjs
@@ -0,0 +1,26 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { getTagValue } from "./utils.mjs";
+
+export const isTxAns104 = (tx) => {
+ return (
+ getTagValue(tx.tags, "bundle-format") == "binary" &&
+ getTagValue(tx.tags, "bundle-version") == "2.0.0"
+ );
+};
diff --git a/ecs/import-bundles/src/index.mjs b/ecs/import-bundles/src/index.mjs
new file mode 100644
index 0000000..2c9b2f4
--- /dev/null
+++ b/ecs/import-bundles/src/index.mjs
@@ -0,0 +1,134 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import AWS from "aws-sdk";
+import R from "ramda";
+import { Consumer } from "sqs-consumer";
+import verifyAndIndexStream from "arbundles/stream";
+import { Bundle, DataItem } from "arbundles";
+import exitHook from "exit-hook";
+import got from "got";
+import { createDbClient } from "./postgres.mjs";
+import { shuffle } from "./utils.mjs";
+
+let exitSignaled = false;
+
+exitHook(() => {
+ exitSignaled = true;
+});
+
+const nodes = new Set();
+
+async function refreshNodes() {
+ let jsonResponse;
+ try {
+ await retry(
+ async () => {
+ jsonResponse = await got("https://arweave.net/health").json();
+ },
+ {
+ retries: 5,
+ }
+ );
+ } catch (error) {
+ console.error(error);
+ }
+
+ if (typeof jsonResponse === "object" && Array.isArray(jsonResponse.origins)) {
+ for (const origin of jsonResponse.origins) {
+ if (origin.status === 200) {
+ nodes.add(origin.endpoint);
+ } else {
+ nodes.remove(origin.endpoint);
+ }
+ }
+ }
+}
+
+const getSpecificTxHeader = async (id) => {
+ let tx;
+ for (const node of nodes.values()) {
+ try {
+ const response = await got(node + "/tx/" + id).json();
+
+ if (typeof response === "object" && response.id === id) {
+ tx = response;
+ }
+ } catch (error) {
+ console.error(error);
+ }
+ if (tx) {
+ return tx;
+ }
+ }
+ return tx;
+};
+
+let dbRead;
+let dbWrite;
+
+const app = Consumer.create({
+ queueUrl: process.env.ARWEAVE_SQS_IMPORT_BUNDLES_URL,
+ handleMessage: async (message) => {
+ console.log("MESSAGE", message);
+ // do some work with `message`
+ const tx = getSpecificTxHeader(message.tx_id);
+ const txDataSize = parseInt(tx["data_size"]);
+
+ const maybeStream = await getData(tx.id || "", { log });
+ },
+ sqs: new AWS.SQS({
+ httpOptions: {
+ agent: new https.Agent({
+ keepAlive: true,
+ }),
+ },
+ }),
+});
+
+app.on("error", (err) => {
+ console.error("[SQS] ERROR", err.message);
+});
+
+app.on("processing_error", (err) => {
+ console.error("[SQS] PROCESSING ERROR", err.message);
+});
+
+(async () => {
+ console.log("Starting import-bundles job..");
+ await refreshNodes();
+
+ setInterval(async () => {
+ try {
+ await refreshNodes();
+ } catch (error) {
+ console.error("Failed to refresh nodes", error);
+ }
+ }, 1000 * 60 * 60);
+
+ console.log("opening new dbWrite connection");
+ dbWrite = await createDbClient({
+ user: "write",
+ });
+ console.log("opening new dbRead connection");
+ dbRead = await createDbClient({
+ user: "read",
+ });
+ console.log("start polling sqs messages...");
+ app.start();
+})();
diff --git a/ecs/import-bundles/src/postgres.mjs b/ecs/import-bundles/src/postgres.mjs
new file mode 100644
index 0000000..fab34ed
--- /dev/null
+++ b/ecs/import-bundles/src/postgres.mjs
@@ -0,0 +1,147 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import "./env.mjs";
+import AWS from "aws-sdk";
+import knex from "knex";
+
+const rds = new AWS.RDS();
+
+const awsSM = new AWS.SecretsManager({
+ region: process.env.AWS_REGION,
+});
+
+function getSecretValue(secretName) {
+ return new Promise((resolve, reject) => {
+ awsSM.getSecretValue({ SecretId: secretName }, function (err, data) {
+ if (err) {
+ if (err.code === "DecryptionFailureException")
+ // Secrets Manager can't decrypt the protected secret text using the provided KMS key.
+ // Deal with the exception here, and/or rethrow at your discretion.
+ reject(err);
+ else if (err.code === "InternalServiceErrorException")
+ // An error occurred on the server side.
+ // Deal with the exception here, and/or rethrow at your discretion.
+ reject(err);
+ else if (err.code === "InvalidParameterException")
+ // You provided an invalid value for a parameter.
+ // Deal with the exception here, and/or rethrow at your discretion.
+ reject(err);
+ else if (err.code === "InvalidRequestException")
+ // You provided a parameter value that is not valid for the current state of the resource.
+ // Deal with the exception here, and/or rethrow at your discretion.
+ reject(err);
+ else if (err.code === "ResourceNotFoundException")
+ // We can't find the resource that you asked for.
+ // Deal with the exception here, and/or rethrow at your discretion.
+ reject(err);
+ else reject(err);
+ } else {
+ resolve(data.SecretString);
+ }
+ });
+ });
+}
+
+export async function createDbClient({ user }) {
+ const rdsReadRoleSecret = {
+ username: "",
+ password: "",
+ url: process.env.ARWEAVE_DB_READ_HOST,
+ };
+ const rdsWriteRoleSecret = {
+ username: "",
+ password: "",
+ url: process.env.ARWEAVE_DB_WRITE_HOST,
+ };
+
+ try {
+ const rdsProxySecretRead = JSON.parse(await getSecretValue("read"));
+ rdsReadRoleSecret.username = rdsProxySecretRead.username;
+ rdsReadRoleSecret.password = rdsProxySecretRead.password;
+ } catch (error) {
+ console.error(error);
+ }
+
+ try {
+ const rdsProxySecretWrite = JSON.parse(await getSecretValue("write"));
+ rdsWriteRoleSecret.username = rdsProxySecretWrite.username;
+ rdsWriteRoleSecret.password = rdsProxySecretWrite.password;
+ } catch (error) {
+ console.error(error);
+ }
+
+ const roleSecret = user === "read" ? rdsReadRoleSecret : rdsWriteRoleSecret;
+
+ return await knex({
+ client: "pg",
+ pool: {
+ min: 1,
+ max: 2,
+ acquireTimeoutMillis: 20000,
+ idleTimeoutMillis: 30000,
+ reapIntervalMillis: 40000,
+ },
+ connection: {
+ host: roleSecret.url,
+ user: roleSecret.username,
+ database: "arweave",
+ ssl: false,
+ password: roleSecret.password,
+ expirationChecker: () => true,
+ },
+ });
+}
+
+// The pg driver and knex don't know the destination column types,
+// and they don't correctly serialize json fields, so this needs
+// to be done manually.
+const serialize = (row) => {
+ return R.reduce((result, key) => {
+ const value = row[key];
+ result[key] =
+ value && typeof value == "object" ? JSON.stringify(value) : value;
+ return result;
+ }, {})(Object.keys(row));
+};
+
+const upsert = async (
+ connection,
+ { table, conflictKeys, rows, transaction }
+) => {
+ const updateFields = Object.keys(rows[0])
+ .filter((field) => !conflictKeys.includes(field))
+ .map((field) => `${field} = excluded.${field}`)
+ .join(",");
+
+ const query = connection.insert(rows).into(table);
+
+ if (transaction) {
+ query.transacting(transaction);
+ }
+
+ const { sql, bindings } = query.toSQL();
+
+ const upsertSql = sql.concat(
+ ` ON CONFLICT (${conflictKeys
+ .map((key) => `"${key}"`)
+ .join(",")}) DO UPDATE SET ${updateFields};`
+ );
+
+ return await connection.raw(upsertSql, bindings);
+};
diff --git a/ecs/import-bundles/src/sqs.mjs b/ecs/import-bundles/src/sqs.mjs
new file mode 100644
index 0000000..ac6a5ef
--- /dev/null
+++ b/ecs/import-bundles/src/sqs.mjs
@@ -0,0 +1,64 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import AWS from "aws-sdk";
+
+const sqs = new AWS.SQS({
+ maxRetries: 3,
+ httpOptions: { timeout: 5000, connectTimeout: 5000 },
+});
+
+function* chunk(arr, n) {
+ for (let i = 0; i < arr.length; i += n) {
+ yield arr.slice(i, i + n);
+ }
+}
+
+export const enqueue = async (queueUrl, message, options) => {
+ if (!queueUrl) {
+ throw new Error(`Queue URL undefined`);
+ }
+
+ await sqs
+ .sendMessage({
+ QueueUrl: queueUrl,
+ MessageBody: JSON.stringify(message),
+ MessageGroupId: options && options.messagegroup,
+ MessageDeduplicationId: options && options.deduplicationId,
+ DelaySeconds: options && options.delaySeconds,
+ })
+ .promise();
+};
+
+export const enqueueBatch = async (queueUrl, messages) => {
+ for (const messageChnk of chunk(messages, 10)) {
+ await sqs
+ .sendMessageBatch({
+ QueueUrl: queueUrl,
+ Entries: messageChnk.map((message) => {
+ return {
+ Id: message.id,
+ MessageBody: JSON.stringify(message),
+ MessageGroupId: message.messagegroup,
+ MessageDeduplicationId: message.deduplicationId,
+ };
+ }),
+ })
+ .promise();
+ }
+};
diff --git a/ecs/import-bundles/src/utils.mjs b/ecs/import-bundles/src/utils.mjs
new file mode 100644
index 0000000..acd1032
--- /dev/null
+++ b/ecs/import-bundles/src/utils.mjs
@@ -0,0 +1,64 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import R from "ramda";
+
+const shuffler = R.curry(function (random, list) {
+ var idx = -1;
+ var len = list.length;
+ var position;
+ var result = [];
+ while (++idx < len) {
+ position = Math.floor((idx + 1) * random());
+ result[idx] = result[position];
+ result[position] = list[idx];
+ }
+ return result;
+});
+
+export const shuffle = shuffler(Math.random);
+
+export function fromB64Url(input) {
+ const paddingLength = input.length % 4 == 0 ? 0 : 4 - (input.length % 4);
+
+ const base64 = input
+ .replace(/\-/g, "+")
+ .replace(/\_/g, "/")
+ .concat("=".repeat(paddingLength));
+
+ return Buffer.from(base64, "base64");
+}
+
+export const getTagValue = (tags, name) => {
+ const contentTypeTag = tags.find((tag) => {
+ try {
+ return (
+ fromB64Url(tag.name).toString().toLowerCase() == name.toLowerCase()
+ );
+ } catch (error) {
+ return undefined;
+ }
+ });
+ try {
+ return contentTypeTag
+ ? fromB64Url(contentTypeTag.value).toString()
+ : undefined;
+ } catch (error) {
+ return undefined;
+ }
+};
diff --git a/package.json b/package.json
new file mode 100644
index 0000000..f55b70b
--- /dev/null
+++ b/package.json
@@ -0,0 +1,119 @@
+{
+ "name": "@ar.io/arweave-gateway",
+ "version": "1.0.0",
+ "description": "The code and infra for arweave.net",
+ "main": "dist/gateway/app.js",
+ "module": true,
+ "scripts": {
+ "build": "rimraf ./dist; tsc && cp ./src/gateway/routes/graphql-v2/schema/types.graphql ./dist/gateway/routes/graphql-v2/schema/types.graphql",
+ "build:lambda": "yarn run build && yarn run bundle:jobs",
+ "bundle:jobs": "npm run bundle:dispatch-txs && npm run bundle:export-chunks && npm run bundle:import-bundles && npm run bundle:import-txs && npm run bundle:import-chunks",
+ "bundle:dispatch-txs": "cross-env NODE_PATH=./node_modules node --max-old-space-size=4096 ./scripts/bundle-jobs.cjs ./dist/jobs/dispatch-txs.js",
+ "bundle:export-chunks": "cross-env NODE_PATH=./node_modules node --max-old-space-size=4096 ./scripts/bundle-jobs.cjs ./dist/jobs/export-chunks.js",
+ "bundle:import-bundles": "cross-env NODE_PATH=./node_modules node --max-old-space-size=4096 ./scripts/bundle-jobs.cjs ./dist/jobs/import-bundles.js",
+ "bundle:import-txs": "cross-env NODE_PATH=./node_modules node --max-old-space-size=4096 ./scripts/bundle-jobs.cjs ./dist/jobs/import-txs.js",
+ "bundle:import-chunks": "cross-env NODE_PATH=./node_modules node --max-old-space-size=4096 ./scripts/bundle-jobs.cjs ./dist/jobs/import-chunks.js",
+ "graphql-types": "graphql-codegen --config codegen.yml",
+ "postinstall": "patch-package"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/ar-io/arweave-gateway.git"
+ },
+ "keywords": [],
+ "author": "ArweaveTeam",
+ "license": "GPL-3.0",
+ "bugs": {
+ "url": "https://github.com/ar-io/arweave-gateway/issues"
+ },
+ "homepage": "https://github.com/ar-io/arweave-gateway#readme",
+ "resolutions": {
+ "**/ethers": "^5.5.4",
+ "**/lodash": "4.17.21",
+ "**/uuid": "7.x",
+ "**/@szmarczak/http-timer": "4.x"
+ },
+ "dependencies": {
+ "@hapi/joi": "^17.1.1",
+ "@types/node-cron": "^3.0.1",
+ "abort-controller": "^3.0.0",
+ "apollo-server-express": "^3.6.2",
+ "arbundles": "^0.6.13",
+ "arweave": "^1.10.23",
+ "asn1.js": "^5.4.1",
+ "async-retry": "^1.3.3",
+ "aws-sdk": "^2.1063.0",
+ "base64url": "^3.0.1",
+ "body-parser": "^1.19.1",
+ "browserify": "^17.0.0",
+ "bufferutil": "^4.0.6",
+ "cloneable-readable": "^2.1.0",
+ "crypto-js": "^4.1.1",
+ "crypto-random-string": "^4.0.0",
+ "dotenv": "^14.3.2",
+ "encoding": "^0.1.13",
+ "express": "^4.17.2",
+ "express-async-errors": "^3.1.1",
+ "express-async-handler": "^1.2.0",
+ "express-promise-router": "^4.1.1",
+ "express-validator": "^6.14.0",
+ "got": "11.x",
+ "graphql": "^16.2.0",
+ "graphql-fields": "^2.0.3",
+ "graphql-tools": "^8.2.0",
+ "helmet": "^5.0.2",
+ "http-errors": "^2.0.0",
+ "js-sha256": "^0.9.0",
+ "knex": "^1.0.1",
+ "lodash": "^4.17.21",
+ "mime": "^3.0.0",
+ "moment": "^2.29.1",
+ "morgan": "^1.10.0",
+ "node-cron": "^3.0.0",
+ "node-fetch": "2.x",
+ "patch-package": "^6.4.7",
+ "path": "^0.12.7",
+ "pg": "^8.7.1",
+ "pg-query-stream": "^4.2.1",
+ "postinstall-postinstall": "^2.1.0",
+ "pump": "^3.0.0",
+ "ramda": "^0.28.0",
+ "rfc4648": "^1.5.1",
+ "sha256": "^0.2.0",
+ "shortid": "^2.2.16",
+ "tar": "^6.1.11",
+ "utf-8-validate": "^5.0.8",
+ "webidl-conversions": "^7.0.0",
+ "winston": "^3.4.0"
+ },
+ "devDependencies": {
+ "@graphql-codegen/cli": "2.4.0",
+ "@graphql-codegen/typescript": "2.4.2",
+ "@graphql-codegen/typescript-resolvers": "2.4.3",
+ "@types/async-retry": "^1.4.3",
+ "@types/aws-lambda": "^8.10.92",
+ "@types/chai": "^4.3.0",
+ "@types/express": "^4.17.13",
+ "@types/express-validator": "^3.0.0",
+ "@types/graphql-fields": "^1.3.4",
+ "@types/hapi__joi": "^17.1.8",
+ "@types/helmet": "4.0.0",
+ "@types/http-errors": "^1.8.2",
+ "@types/mocha": "^9.1.0",
+ "@types/node": "^17.0.12",
+ "@types/node-fetch": "^3.0.3",
+ "@types/pg": "^8.6.4",
+ "@types/pump": "^1.1.1",
+ "@types/shortid": "0.0.29",
+ "chai": "^4.3.6",
+ "cross-env": "^7.0.3",
+ "esmify": "^2.1.1",
+ "mocha": "^9.2.0",
+ "nanoid": "^3.2.0",
+ "rimraf": "^3.0.2",
+ "semver": "*",
+ "ts-node": "^10.4.0",
+ "ts-node-dev": "^1.1.8",
+ "typescript": "4.x"
+ }
+}
diff --git a/patches/@szmarczak+http-timer+4.0.6.patch b/patches/@szmarczak+http-timer+4.0.6.patch
new file mode 100644
index 0000000..5dac369
--- /dev/null
+++ b/patches/@szmarczak+http-timer+4.0.6.patch
@@ -0,0 +1,13 @@
+diff --git a/node_modules/@szmarczak/http-timer/dist/source/index.js b/node_modules/@szmarczak/http-timer/dist/source/index.js
+index 6f07245..1fe63a9 100644
+--- a/node_modules/@szmarczak/http-timer/dist/source/index.js
++++ b/node_modules/@szmarczak/http-timer/dist/source/index.js
+@@ -2,7 +2,7 @@
+ Object.defineProperty(exports, "__esModule", { value: true });
+ const defer_to_connect_1 = require("defer-to-connect");
+ const util_1 = require("util");
+-const nodejsMajorVersion = Number(process.versions.node.split('.')[0]);
++const nodejsMajorVersion = 16;
+ const timer = (request) => {
+ if (request.timings) {
+ return request.timings;
diff --git a/patches/knex+1.0.1.patch b/patches/knex+1.0.1.patch
new file mode 100644
index 0000000..08e356f
--- /dev/null
+++ b/patches/knex+1.0.1.patch
@@ -0,0 +1,39 @@
+diff --git a/node_modules/knex/lib/knex-builder/internal/config-resolver.js b/node_modules/knex/lib/knex-builder/internal/config-resolver.js
+index dc2f322..8c9c380 100644
+--- a/node_modules/knex/lib/knex-builder/internal/config-resolver.js
++++ b/node_modules/knex/lib/knex-builder/internal/config-resolver.js
+@@ -1,3 +1,4 @@
++const Dialect = require('../../dialects/postgres/index.js');
+ const Client = require('../../client');
+ const { SUPPORTED_CLIENTS } = require('../../constants');
+
+@@ -5,8 +6,6 @@ const parseConnection = require('./parse-connection');
+ const { resolveClientNameWithAliases } = require('../../util/helpers');
+
+ function resolveConfig(config) {
+- let Dialect;
+- let resolvedConfig;
+
+ // If config is a string, try to parse it
+ const parsedConfig =
+@@ -19,11 +18,9 @@ function resolveConfig(config) {
+ arguments.length === 0 ||
+ (!parsedConfig.client && !parsedConfig.dialect)
+ ) {
+- Dialect = Client;
+ }
+ // If user provided Client constructor as a parameter, use it
+ else if (typeof parsedConfig.client === 'function') {
+- Dialect = parsedConfig.client;
+ }
+ // If neither applies, let's assume user specified name of a client or dialect as a string
+ else {
+@@ -34,8 +31,6 @@ function resolveConfig(config) {
+ );
+ }
+
+- const resolvedClientName = resolveClientNameWithAliases(clientName);
+- Dialect = require(`../../dialects/${resolvedClientName}/index.js`);
+ }
+
+ // If config connection parameter is passed as string, try to parse it
diff --git a/scripts/bundle-jobs.cjs b/scripts/bundle-jobs.cjs
new file mode 100644
index 0000000..51b63db
--- /dev/null
+++ b/scripts/bundle-jobs.cjs
@@ -0,0 +1,47 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+const fs = require("fs");
+const path = require("path");
+const browserify = require("browserify");
+
+const jobJs = process.argv[2];
+
+const b = browserify([process.cwd() + "/dist/jobs/" + path.basename(jobJs)], {
+ bare: true,
+ node: true,
+ noBuiltins: true,
+ standalone: "global",
+});
+
+console.log(process.cwd() + "/dist/jobs/" + path.basename(jobJs));
+
+const stream = fs.createWriteStream(
+ path.resolve(process.cwd(), "./dist/jobs/") +
+ "/" +
+ path.basename(jobJs, ".js") +
+ "-min.js"
+);
+
+const p = new Promise((r) => {
+ stream.on("end", r);
+});
+
+b.exclude("pg-native");
+
+b.bundle().pipe(stream);
diff --git a/sql/gateway-schema.sql b/sql/gateway-schema.sql
new file mode 100644
index 0000000..49306d0
--- /dev/null
+++ b/sql/gateway-schema.sql
@@ -0,0 +1,421 @@
+ -- Arweave Gateway
+ -- Copyright (C) 2022 Permanent Data Solutions, Inc
+
+ -- This program is free software: you can redistribute it and/or modify
+ -- it under the terms of the GNU General Public License as published by
+ -- the Free Software Foundation, either version 3 of the License, or
+ -- (at your option) any later version.
+
+ -- This program is distributed in the hope that it will be useful,
+ -- but WITHOUT ANY WARRANTY; without even the implied warranty of
+ -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ -- GNU General Public License for more details.
+
+ -- You should have received a copy of the GNU General Public License
+ -- along with this program. If not, see .
+
+
+--
+-- Name: blocks; Type: TABLE; Schema: public; Owner: root
+--
+
+CREATE TABLE public.blocks (
+ id character(64) NOT NULL,
+ height integer NOT NULL,
+ mined_at timestamp without time zone NOT NULL,
+ txs jsonb NOT NULL,
+ created_at timestamp without time zone DEFAULT now() NOT NULL,
+ extended jsonb,
+ previous_block character varying NOT NULL
+);
+
+
+ALTER TABLE public.blocks OWNER TO root;
+
+--
+-- Name: blocks_tx_map; Type: TABLE; Schema: public; Owner: root
+--
+
+CREATE TABLE public.blocks_tx_map (
+ tx_id character(43) NOT NULL,
+ block_id character(64)
+);
+
+
+ALTER TABLE public.blocks_tx_map OWNER TO root;
+
+--
+-- Name: bundle_status; Type: TABLE; Schema: public; Owner: root
+--
+
+CREATE TABLE public.bundle_status (
+ id character(43) NOT NULL,
+ created_at timestamp without time zone DEFAULT now() NOT NULL,
+ updated_at timestamp without time zone,
+ attempts smallint DEFAULT 0 NOT NULL,
+ status character varying,
+ error character varying,
+ bundle_meta text
+);
+
+
+ALTER TABLE public.bundle_status OWNER TO root;
+
+--
+-- Name: chunks; Type: TABLE; Schema: public; Owner: root
+--
+
+CREATE TABLE public.chunks (
+ data_root character varying NOT NULL,
+ data_size bigint NOT NULL,
+ "offset" bigint NOT NULL,
+ data_path character varying NOT NULL,
+ chunk_size integer NOT NULL,
+ exported_started_at timestamp without time zone,
+ exported_completed_at timestamp without time zone,
+ created_at timestamp without time zone DEFAULT now() NOT NULL
+);
+
+
+ALTER TABLE public.chunks OWNER TO root;
+
+--
+-- Name: hash_list; Type: TABLE; Schema: public; Owner: root
+--
+
+CREATE TABLE public.hash_list (
+ indep_hash character varying
+);
+
+
+ALTER TABLE public.hash_list OWNER TO root;
+
+--
+-- Name: tags; Type: TABLE; Schema: public; Owner: root
+--
+
+CREATE TABLE public.tags (
+ tx_id character(43) NOT NULL,
+ index integer NOT NULL,
+ name character varying NOT NULL,
+ value character varying NOT NULL,
+ created_at timestamp without time zone DEFAULT now() NOT NULL
+);
+
+
+ALTER TABLE public.tags OWNER TO root;
+
+--
+-- Name: tags_grouped; Type: TABLE; Schema: public; Owner: root
+--
+
+CREATE TABLE public.tags_grouped (
+ tx_id character(43) NOT NULL,
+ tags jsonb
+);
+
+
+ALTER TABLE public.tags_grouped OWNER TO root;
+
+--
+-- Name: transactions; Type: TABLE; Schema: public; Owner: root
+--
+
+CREATE TABLE public.transactions (
+ id character(43) NOT NULL,
+ owner character varying,
+ tags jsonb,
+ target character(43),
+ quantity character varying,
+ reward character varying,
+ signature character varying,
+ last_tx character varying,
+ data_size bigint,
+ content_type character varying,
+ format smallint,
+ created_at timestamp without time zone DEFAULT now() NOT NULL,
+ deleted_at timestamp without time zone,
+ height integer,
+ owner_address character(43),
+ data_root character(43),
+ parent character(43)
+);
+
+
+ALTER TABLE public.transactions OWNER TO root;
+
+--
+-- Name: blocks blocks_pkey; Type: CONSTRAINT; Schema: public; Owner: root
+--
+
+ALTER TABLE ONLY public.blocks
+ ADD CONSTRAINT blocks_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: blocks_tx_map blocks_tx_map_pkey; Type: CONSTRAINT; Schema: public; Owner: root
+--
+
+ALTER TABLE ONLY public.blocks_tx_map
+ ADD CONSTRAINT blocks_tx_map_pkey PRIMARY KEY (tx_id);
+
+
+--
+-- Name: bundle_status bundle_status_pkey; Type: CONSTRAINT; Schema: public; Owner: root
+--
+
+ALTER TABLE ONLY public.bundle_status
+ ADD CONSTRAINT bundle_status_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: chunks chunks_pkey; Type: CONSTRAINT; Schema: public; Owner: root
+--
+
+ALTER TABLE ONLY public.chunks
+ ADD CONSTRAINT chunks_pkey PRIMARY KEY (data_root, data_size, "offset");
+
+
+--
+-- Name: tags_grouped tags_grouped_pkey; Type: CONSTRAINT; Schema: public; Owner: root
+--
+
+ALTER TABLE ONLY public.tags_grouped
+ ADD CONSTRAINT tags_grouped_pkey PRIMARY KEY (tx_id);
+
+
+--
+-- Name: tags tags_pkey; Type: CONSTRAINT; Schema: public; Owner: root
+--
+
+ALTER TABLE ONLY public.tags
+ ADD CONSTRAINT tags_pkey PRIMARY KEY (tx_id, index);
+
+
+--
+-- Name: transactions transactions_pkey; Type: CONSTRAINT; Schema: public; Owner: root
+--
+
+ALTER TABLE ONLY public.transactions
+ ADD CONSTRAINT transactions_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: blocks_created_at; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX blocks_created_at ON public.blocks USING btree (created_at);
+
+
+--
+-- Name: blocks_height; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE UNIQUE INDEX blocks_height ON public.blocks USING btree (height);
+
+
+--
+-- Name: blocks_height_sorted; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE UNIQUE INDEX blocks_height_sorted ON public.blocks USING btree (height DESC);
+
+
+--
+-- Name: blocks_id_hash; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX blocks_id_hash ON public.blocks USING hash (id);
+
+
+--
+-- Name: blocks_tx_block_id_hash; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX blocks_tx_block_id_hash ON public.blocks_tx_map USING hash (block_id);
+
+
+--
+-- Name: chunks_created_at; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX chunks_created_at ON public.chunks USING btree (created_at);
+
+
+--
+-- Name: chunks_data_root; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX chunks_data_root ON public.chunks USING hash (data_root);
+
+
+--
+-- Name: chunks_data_root_data_size; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX chunks_data_root_data_size ON public.chunks USING btree (data_root, data_size);
+
+
+--
+-- Name: chunks_exported_completed_at; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX chunks_exported_completed_at ON public.chunks USING btree (exported_completed_at);
+
+
+--
+-- Name: chunks_exported_started_at; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX chunks_exported_started_at ON public.chunks USING btree (exported_started_at);
+
+
+--
+-- Name: index_created_at; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX index_created_at ON public.bundle_status USING btree (created_at);
+
+
+--
+-- Name: index_updated_at; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX index_updated_at ON public.bundle_status USING btree (updated_at);
+
+
+--
+-- Name: tags_name; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX tags_name ON public.tags USING hash (name);
+
+
+--
+-- Name: tags_name_txid; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX tags_name_txid ON public.tags USING btree (name, tx_id);
+
+
+--
+-- Name: tags_name_value; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX tags_name_value ON public.tags USING btree (name, value);
+
+
+--
+-- Name: tags_tx_id; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX tags_tx_id ON public.tags USING hash (tx_id);
+
+
+--
+-- Name: tags_value; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX tags_value ON public.tags USING hash (value);
+
+
+--
+-- Name: transactions_created_at; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX transactions_created_at ON public.transactions USING btree (created_at DESC);
+
+
+--
+-- Name: transactions_height; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX transactions_height ON public.transactions USING btree (height);
+
+
+--
+-- Name: transactions_height_id_sorted; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX transactions_height_id_sorted ON public.transactions USING btree (height DESC, id);
+
+
+--
+-- Name: transactions_height_sorted; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX transactions_height_sorted ON public.transactions USING btree (height DESC);
+
+
+--
+-- Name: transactions_owner_address_hash; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX transactions_owner_address_hash ON public.transactions USING hash (owner_address);
+
+
+--
+-- Name: transactions_owner_hash; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX transactions_owner_hash ON public.transactions USING hash (id);
+
+
+--
+-- Name: transactions_parent; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX transactions_parent ON public.transactions USING hash (parent);
+
+
+--
+-- Name: transactions_target; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX transactions_target ON public.transactions USING hash (target);
+
+
+--
+-- Name: tx_id_hash; Type: INDEX; Schema: public; Owner: root
+--
+
+CREATE INDEX tx_id_hash ON public.blocks_tx_map USING hash (tx_id);
+
+--
+-- Name: tags tags_tx_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: root
+--
+
+ALTER TABLE ONLY public.tags
+ ADD CONSTRAINT tags_tx_id_fkey FOREIGN KEY (tx_id) REFERENCES public.transactions(id) ON UPDATE CASCADE ON DELETE CASCADE;
+
+
+--
+-- Name: transactions transactions_height_fkey; Type: FK CONSTRAINT; Schema: public; Owner: root
+--
+
+ALTER TABLE ONLY public.transactions
+ ADD CONSTRAINT transactions_height_fkey FOREIGN KEY (height) REFERENCES public.blocks(height) ON UPDATE SET NULL ON DELETE SET NULL DEFERRABLE;
+
+
+--
+-- Name: blocks_tx_map transactions_map_block_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: root
+--
+
+ALTER TABLE ONLY public.blocks_tx_map
+ ADD CONSTRAINT transactions_map_block_id_fkey FOREIGN KEY (block_id) REFERENCES public.blocks(id) ON UPDATE CASCADE ON DELETE CASCADE;
+
+
+--
+-- Name: transactions transactions_parent_fkey; Type: FK CONSTRAINT; Schema: public; Owner: root
+--
+
+ALTER TABLE ONLY public.transactions
+ ADD CONSTRAINT transactions_parent_fkey FOREIGN KEY (parent) REFERENCES public.transactions(id);
+
+
+--
+-- Name: transactions transactions_parent_fkey1; Type: FK CONSTRAINT; Schema: public; Owner: root
+--
+
+ALTER TABLE ONLY public.transactions
+ ADD CONSTRAINT transactions_parent_fkey1 FOREIGN KEY (parent) REFERENCES public.transactions(id);
diff --git a/src/cli/queue.ts b/src/cli/queue.ts
new file mode 100644
index 0000000..c2d34b5
--- /dev/null
+++ b/src/cli/queue.ts
@@ -0,0 +1,100 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { SQS } from "aws-sdk";
+import { readFileSync } from "fs";
+import { sequentialBatch } from "../lib/helpers";
+import { enqueueBatch } from "../lib/queues";
+import { ImportTx } from "../interfaces/messages";
+
+handler();
+
+export async function handler(): Promise {
+ const args = process.argv.slice(2);
+ const csvPath = args[0];
+ const queueUrl = args[1];
+
+ const rows = readFileSync(csvPath, "utf8").split("\n");
+
+ let count = 0;
+ let total = rows.length;
+
+ console.log(`queueUrl: ${queueUrl}\ninputData: ${total} rows`);
+
+ await sequentialBatch(rows, 50, async (batch: string[]) => {
+ await Promise.all([
+ enqueueBatch(
+ queueUrl,
+ batch.slice(0, 10).map((id) => {
+ return {
+ id,
+ message: { id },
+ };
+ })
+ ),
+ enqueueBatch(
+ queueUrl,
+ batch.slice(10, 20).map((id) => {
+ return {
+ id,
+ message: { id },
+ };
+ })
+ ),
+ enqueueBatch(
+ queueUrl,
+ batch.slice(20, 30).map((id) => {
+ return {
+ id,
+ message: { id },
+ };
+ })
+ ),
+ enqueueBatch(
+ queueUrl,
+ batch.slice(30, 40).map((id) => {
+ return {
+ id,
+ message: { id },
+ };
+ })
+ ),
+ enqueueBatch(
+ queueUrl,
+ batch.slice(40, 50).map((id) => {
+ return {
+ id,
+ message: { id },
+ };
+ })
+ ),
+ ]);
+
+ // console.log(
+ // batch.map((id) => {
+ // return {
+ // id,
+ // message: { id },
+ // };
+ // })
+ // );
+
+ count = count + batch.length;
+ console.log(`${count}/${total}`);
+ });
+}
diff --git a/src/data/transactions.ts b/src/data/transactions.ts
new file mode 100644
index 0000000..32a54e9
--- /dev/null
+++ b/src/data/transactions.ts
@@ -0,0 +1,243 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { Logger } from "winston";
+import fetch from "node-fetch";
+import got from "got";
+import {
+ fetchTransactionData,
+ getTagValue,
+ Tag,
+ DataBundleWrapper,
+} from "../lib/arweave";
+import { Readable } from "stream";
+import { getStream, putStream, put, get, objectHeader } from "../lib/buckets";
+import { query as queryChunks } from "../database/chunk-db";
+import { query as transactionsQuery } from "../database/transaction-db";
+import { getConnectionPool } from "../database/postgres";
+import { NotFound } from "http-errors";
+import Knex from "knex";
+import { streamToJson, bufferToStream, fromB64Url } from "../lib/encoding";
+import { b64UrlDecode } from "arweave/node/lib/utils";
+
+interface DataStream {
+ status?: number | undefined;
+ stream?: Readable;
+ contentType?: string;
+ contentLength?: number;
+ cached?: boolean;
+ tags?: Tag[];
+}
+
+export const getData = async (
+ txid: string,
+ { log }: { log: Logger }
+): Promise => {
+ log.info("[get-data] searching for tx: s3 tx cache", { txid });
+ const s3CacheResponse = await streamCachedData({ txid, log });
+
+ if (s3CacheResponse) {
+ return s3CacheResponse;
+ }
+
+ const connection = getConnectionPool("read");
+
+ const [txHeader] = await transactionsQuery(connection, {
+ id: txid,
+ limit: 1,
+ select: ["data_root", "data_size", "content_type", "parent"],
+ });
+
+ try {
+ if (txHeader && txHeader.data_size > 0 && txHeader.parent) {
+ log.info(`[get-data] item is data bundle item, searching for parent tx`, {
+ txid,
+ bundlerDataPath,
+ parent: txHeader.parent,
+ });
+ const parent = await getData(txHeader.parent, { log });
+ if (parent.stream) {
+ log.info(`[get-data] item is data bundle item, found parent tx`, {
+ txid,
+ parent: txHeader.parent,
+ });
+ const parentData = await streamToJson(parent.stream);
+ const item = parentData.items.find((item) => {
+ return item.id == txid;
+ });
+
+ if (item) {
+ const data = fromB64Url(item.data);
+ return {
+ stream: bufferToStream(data),
+ contentType: getTagValue(item.tags, "content-type"),
+ contentLength: data.byteLength,
+ status: parent.status,
+ };
+ }
+ }
+ }
+
+ log.info("[get-data] searching for tx: s3 chunk cache", { txid });
+
+ if (txHeader && txHeader.data_root && txHeader.data_size > 0) {
+ const contentType = txHeader.content_type || undefined;
+ const contentLength = parseInt(txHeader.data_size);
+
+ const chunks = (await queryChunks(connection, {
+ select: ["offset", "chunk_size"],
+ order: "asc",
+ }).where({ data_root: txHeader.data_root })) as {
+ offset: number;
+ chunk_size: number;
+ }[];
+
+ const cachedChunksSum = chunks.reduce(
+ (carry, { chunk_size }) => carry + (chunk_size || 0),
+ 0
+ );
+
+ const hasAllChunks = cachedChunksSum == contentLength;
+
+ log.warn(`[get-data] cached chunks do not equal tx data_size`, {
+ txid,
+ cachedChunksSum,
+ contentLength,
+ hasAllChunks,
+ });
+
+ if (hasAllChunks) {
+ const { stream } = await streamCachedChunks({
+ offsets: chunks.map((chunk) => chunk.offset),
+ root: txHeader.data_root,
+ });
+
+ if (stream) {
+ return {
+ stream,
+ contentType,
+ contentLength,
+ };
+ }
+ }
+ }
+ } catch (error) {
+ log.error(error);
+ }
+
+ try {
+ log.info("[get-data] searching for tx: arweave nodes", { txid });
+ const { stream, contentType, contentLength, tags } =
+ await fetchTransactionData(txid);
+
+ if (stream) {
+ return {
+ contentType,
+ contentLength,
+ stream,
+ tags,
+ };
+ }
+ } catch (error) {
+ log.error(error);
+ }
+
+ throw new NotFound();
+};
+
+export const streamCachedData = async ({
+ txid,
+ log,
+}: {
+ log?: Logger;
+ txid: string;
+}): Promise => {
+ if (log) {
+ log.info(`[get-data] begin streamCachedData`, { txid });
+ }
+ try {
+ const maybeStream = await getStream("tx-data", `tx/${txid}`);
+
+ if (maybeStream) {
+ const { stream, contentType, contentLength, tags } = maybeStream;
+ return {
+ stream,
+ contentType,
+ contentLength,
+ tags,
+ cached: true,
+ };
+ }
+ } catch (error: any) {
+ if (error.code != "NotFound") {
+ if (log) {
+ log.info("[get-data] error in streamCachedData", error || "");
+ } else {
+ console.error("[get-data] error in streamCachedData", error || "");
+ }
+
+ throw new NotFound();
+ }
+ }
+};
+
+export const streamCachedChunks = async ({
+ root,
+ offsets,
+}: {
+ root: string;
+ offsets: number[];
+}): Promise<{
+ stream: Readable;
+}> => {
+ let index = 0;
+
+ // will throw an error if the first chunk doesn't exist
+ await objectHeader("tx-data", `chunks/${root}/${offsets[0]}`);
+
+ const stream = new Readable({
+ autoDestroy: true,
+ read: async function () {
+ try {
+ const offset = offsets[index];
+
+ if (!offset) {
+ this.push(null);
+ return;
+ }
+
+ const { Body } = await get("tx-data", `chunks/${root}/${offset}`);
+
+ if (Body) {
+ index = index + 1;
+ this.push(Body);
+ return;
+ }
+
+ throw new NotFound();
+ } catch (error) {
+ this.emit("error", error);
+ this.destroy();
+ }
+ },
+ });
+
+ return {
+ stream,
+ };
+};
diff --git a/src/database/block-db.ts b/src/database/block-db.ts
new file mode 100644
index 0000000..4ec02bb
--- /dev/null
+++ b/src/database/block-db.ts
@@ -0,0 +1,282 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import * as R from "ramda";
+import { Knex } from "knex";
+import { ImportTx } from "../interfaces/messages";
+import { Block } from "../lib/arweave";
+import { upsert, DBConnection } from "./postgres";
+import moment from "moment";
+import { pick, transform } from "lodash";
+import { sequentialBatch } from "../lib/helpers";
+import log from "../lib/log";
+import { ISO8601DateTimeString } from "../lib/encoding";
+import { enqueueBatch, getQueueUrl } from "../lib/queues";
+
+export interface DatabaseBlock {
+ id: string;
+ previous_block: string;
+ mined_at: string;
+ height: number;
+ txs: string[];
+ extended: object;
+}
+
+export interface DatabaseBlockTxMap {
+ block_id: string;
+ tx_id: string;
+}
+
+const blockFields = [
+ "id",
+ "height",
+ "mined_at",
+ "previous_block",
+ "txs",
+ "extended",
+];
+
+const extendedFields = [
+ "diff",
+ "hash",
+ "reward_addr",
+ "last_retarget",
+ "tx_root",
+ "tx_tree",
+ "reward_pool",
+ "weave_size",
+ "block_size",
+ "cumulative_diff",
+ "hash_list_merkle",
+ "tags",
+];
+
+export const getLatestBlock = async (
+ connection: Knex
+): Promise => {
+ const block = await connection
+ .select(blockFields)
+ .from("blocks")
+ .orderBy("height", "desc")
+ .first();
+
+ if (block) {
+ return block;
+ }
+
+ throw new Error("Failed to get latest block from the block database");
+};
+
+export const getBlock = async (
+ connection: Knex,
+ predicate: { height: number } | { id: string }
+): Promise => {
+ return connection.select(blockFields).from("blocks").where(predicate).first();
+};
+
+export const getRecentBlocks = async (
+ connection: Knex
+): Promise => {
+ return connection
+ .select(blockFields)
+ .from("blocks")
+ .orderBy("height", "desc")
+ .limit(400);
+};
+
+type ITxMapping = { txs: TxBlockHeight[]; block: DatabaseBlock };
+
+const enqueueTxImports = async (queueUrl: string, txIds: string[]) => {
+ await sequentialBatch(txIds, 10, async (ids: string[]) => {
+ log.info(`[import-blocks] queuing block txs`, {
+ ids,
+ });
+ await enqueueBatch(
+ queueUrl,
+ ids.map((id) => {
+ return {
+ id: id,
+ message: {
+ id,
+ },
+ };
+ })
+ );
+ });
+};
+
+export const saveBlocks = async (
+ connection: DBConnection,
+ blocks: DatabaseBlock[]
+) => {
+ const txImportQueueUrl = await getQueueUrl("import-txs");
+ const blockTxMappings: ITxMapping[] = blocks.reduce((map, block) => {
+ return map.concat({
+ block,
+ txs: block.txs.map((tx_id: string) => {
+ return { height: block.height, id: tx_id };
+ }),
+ });
+ }, [] as ITxMapping[]);
+
+ for (const map of R.reverse(blockTxMappings)) {
+ const { block, txs } = map;
+ await connection.transaction(async (knexTransaction) => {
+ log.info(`[block-db] saving block`, {
+ height: block.height,
+ id: block.id,
+ });
+
+ await upsert(knexTransaction, {
+ table: "blocks",
+ conflictKeys: ["height"],
+ rows: [serialize(block)],
+ transaction: knexTransaction,
+ });
+
+ await sequentialBatch(txs, 10, async (batch: TxBlockHeight[]) => {
+ log.info(`[block-db] setting tx block heights`, {
+ txs: batch.map((item) => {
+ return { id: item.id, height: item.height };
+ }),
+ });
+
+ await upsert(knexTransaction, {
+ table: "transactions",
+ conflictKeys: ["id"],
+ rows: batch,
+ transaction: knexTransaction,
+ });
+ });
+ });
+ // log.info(`[block-db] setting bundle data item heights`);
+
+ // for (const items_ of R.splitEvery(10, txs)) {
+ // await Promise.all(
+ // items_.map((item: TxBlockHeight) =>
+ // connection.raw(
+ // `UPDATE transactions SET height = ? WHERE parent = ? AND height IS NULL`,
+ // [item.height, item.id]
+ // )
+ // )
+ // );
+ // }
+
+ log.info(`[block-db] enqueue-ing tx-imports`);
+
+ // requeue *all* transactions involved in blocks that have forked.
+ // Some of them may have been imported already and purged, so we
+ // reimport everything to make sure there are no gaps.
+ await enqueueTxImports(txImportQueueUrl, block.txs);
+ }
+};
+
+interface TxBlockHeight {
+ id: string;
+ height: number;
+}
+
+export const fullBlocksToDbBlocks = (blocks: Block[]): DatabaseBlock[] => {
+ return blocks.map(fullBlockToDbBlock);
+};
+/**
+ * Format a full block into a stripped down version for storage in the postgres DB.
+ */
+export const fullBlockToDbBlock = (block: Block): DatabaseBlock => {
+ return {
+ id: block.indep_hash,
+ height: block.height,
+ previous_block: block.previous_block,
+ txs: block.txs,
+ mined_at: moment(block.timestamp * 1000).format(),
+ extended: pick(block, extendedFields),
+ };
+};
+
+// The pg driver and knex don't know the destination column types,
+// and they don't correctly serialize json fields, so this needs
+// to be done manually.
+const serialize = (row: DatabaseBlock): object => {
+ return transform(row, (result: any, value: any, key: string) => {
+ result[key] =
+ value && typeof value == "object" ? JSON.stringify(value) : value;
+ });
+};
+
+type BlockSortOrder = "HEIGHT_ASC" | "HEIGHT_DESC";
+
+const orderByClauses: { [key in BlockSortOrder]: string } = {
+ HEIGHT_ASC: "blocks.height ASC NULLS LAST, id ASC",
+ HEIGHT_DESC: "blocks.height DESC NULLS FIRST, id ASC",
+};
+interface BlockQuery {
+ id?: string;
+ ids?: string[];
+ limit?: number;
+ offset?: number;
+ select?: any;
+ before?: ISO8601DateTimeString;
+ sortOrder?: BlockSortOrder;
+ minHeight?: number;
+ maxHeight?: number;
+}
+
+export const queryBlocks = (
+ connection: Knex,
+ {
+ limit = 100000,
+ select,
+ offset = 0,
+ before,
+ id,
+ ids,
+ sortOrder = "HEIGHT_DESC",
+ minHeight = -1,
+ maxHeight = -1,
+ }: BlockQuery
+): Knex.QueryInterface => {
+ const query = connection.queryBuilder().select(select).from("blocks");
+
+ if (id) {
+ query.where("blocks.id", id);
+ }
+
+ if (ids) {
+ query.whereIn("blocks.id", ids);
+ }
+
+ if (before) {
+ query.where("blocks.created_at", "<", before);
+ }
+
+ if (minHeight >= 0) {
+ query.where("blocks.height", ">=", minHeight);
+ }
+
+ if (maxHeight >= 0) {
+ query.where("blocks.height", "<=", maxHeight);
+ }
+
+ query.limit(limit).offset(offset);
+
+ if (Object.keys(orderByClauses).includes(sortOrder)) {
+ query.orderByRaw(orderByClauses[sortOrder]);
+ }
+
+ return query;
+};
diff --git a/src/database/bundle-import-db.ts b/src/database/bundle-import-db.ts
new file mode 100644
index 0000000..3354da6
--- /dev/null
+++ b/src/database/bundle-import-db.ts
@@ -0,0 +1,66 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { upsert } from "./postgres";
+import { Knex } from "knex";
+import { String } from "aws-sdk/clients/acm";
+
+export interface DataBundleStatus {
+ id: string;
+ status: "pending" | "complete" | "error" | "invalid";
+ attempts: number;
+ error: string | null;
+ bundle_meta?: string;
+}
+
+const table = "bundle_status";
+
+const fields = ["id", "status", "attempts", "error"];
+
+export const saveBundleStatus = async (
+ connection: Knex,
+ rows: Partial[]
+) => {
+ return upsert(connection, {
+ table,
+ conflictKeys: ["id"],
+ rows,
+ });
+};
+
+export const getBundleImport = async (
+ connection: Knex,
+ id: string
+): Promise> => {
+ const result = await connection
+ .select(fields)
+ .from("bundle_status")
+ .where({ id })
+ .first();
+
+ if (result) {
+ return {
+ id: result.id,
+ status: result.status,
+ attempts: result.attempts,
+ error: result.error,
+ };
+ }
+
+ return {};
+};
diff --git a/src/database/chunk-db.ts b/src/database/chunk-db.ts
new file mode 100644
index 0000000..f9038cf
--- /dev/null
+++ b/src/database/chunk-db.ts
@@ -0,0 +1,147 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { upsert } from "./postgres";
+import { Knex } from "knex";
+import moment from "moment";
+export interface DatabaseChunk {
+ data_root: string;
+ data_size: number;
+ data_path: string;
+ offset: number;
+ chunk_size: number;
+}
+
+const chunkFields = [
+ "data_root",
+ "data_size",
+ "data_path",
+ "offset",
+ "chunk_size",
+];
+
+export const saveChunk = async (connection: Knex, chunk: DatabaseChunk) => {
+ await upsert(connection, {
+ table: "chunks",
+ conflictKeys: ["data_root", "data_size", "offset"],
+ rows: [chunk],
+ });
+};
+
+interface ChunkQuery {
+ limit?: number;
+ offset?: number;
+ root?: string;
+ select?: (keyof DatabaseChunk)[];
+ order?: "asc" | "desc";
+}
+
+export const query = (
+ connection: Knex,
+ { select, order = "asc", root }: ChunkQuery
+): Knex.QueryBuilder[]> => {
+ const query = connection
+ .queryBuilder()
+ .select(select || "*")
+ .from("chunks");
+
+ query.orderBy("offset", order);
+
+ return query;
+};
+
+export const getPendingExports = async (
+ connection: Knex,
+ { limit = 100 }: { limit: number }
+): Promise => {
+ // select * from chunks where data_root in
+ // (select data_root from chunks group by data_root, data_size having sum(chunk_size) = data_size)
+ // and exported_started_at is null order by created_at asc
+ const query = connection
+ .select(chunkFields)
+ .from("chunks")
+ .whereIn("data_root", (query) => {
+ query
+ .select("data_root")
+ .from("chunks")
+ .groupBy(["data_root", "data_size"])
+ .havingRaw("sum(chunk_size) = data_size");
+ })
+ .whereNull("exported_started_at")
+ .orderBy("created_at", "asc");
+
+ if (limit) {
+ query.limit(limit);
+ }
+
+ return query;
+};
+
+export const startedExport = async (
+ connection: Knex,
+ chunk: {
+ data_root: string;
+ data_size: number;
+ offset: number;
+ }
+) => {
+ const query = connection
+ .update({
+ exported_started_at: moment().format(),
+ })
+ .from("chunks")
+ .where(chunk);
+
+ await query;
+};
+
+export const completedExport = async (
+ connection: Knex,
+ chunk: {
+ data_root: string;
+ data_size: number;
+ offset: number;
+ }
+) => {
+ await connection
+ .update({
+ exported_completed_at: moment().format(),
+ })
+ .from("chunks")
+ .where(chunk);
+};
+
+export const queryRecentChunks = async (
+ connection: Knex,
+ {
+ root,
+ size,
+ }: {
+ root: string;
+ size: number;
+ }
+) => {
+ return connection
+ .select(["data_root", "data_size", "offset"])
+ .from("chunks")
+ .where({
+ data_root: root,
+ data_size: size,
+ })
+ .orderBy("offset", "asc");
+};
diff --git a/src/database/postgres.ts b/src/database/postgres.ts
new file mode 100644
index 0000000..511be16
--- /dev/null
+++ b/src/database/postgres.ts
@@ -0,0 +1,155 @@
+import AWS from "aws-sdk";
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import knex, { Knex } from "knex";
+import log from "../lib/log";
+import { wait } from "../lib/helpers";
+
+export type ConnectionMode = "read" | "write";
+
+export type DBConnection = Knex | Knex.Transaction;
+
+let poolCache: {
+ read: null | Knex;
+ write: null | Knex;
+} = {
+ read: null,
+ write: null,
+};
+
+export const initConnectionPool = (
+ mode: ConnectionMode,
+ config?: PoolConfig
+) => {
+ if (!poolCache[mode]) {
+ log.info(`[postgres] creating connection: ${mode}`);
+ poolCache[mode] = createConnectionPool(mode, config);
+ }
+};
+
+export const getConnectionPool = (mode: ConnectionMode): Knex => {
+ log.info(`[postgres] reusing connection: ${mode}`);
+ return poolCache[mode]!;
+};
+
+export const releaseConnectionPool = async (
+ mode?: ConnectionMode
+): Promise => {
+ if (mode) {
+ if (poolCache[mode]) {
+ log.info(`[postgres] destroying connection: ${mode}`);
+ await poolCache[mode]!.destroy();
+ poolCache[mode] = null;
+ }
+ } else {
+ await Promise.all([
+ releaseConnectionPool("read"),
+ releaseConnectionPool("write"),
+ ]);
+ await wait(200);
+ }
+};
+
+interface PoolConfig {
+ min: number;
+ max: number;
+}
+
+export const createConnectionPool = (
+ mode: ConnectionMode = "write",
+ { min, max }: PoolConfig = { min: 1, max: 10 }
+): Knex => {
+ // newline
+ const host = {
+ read: process.env.ARWEAVE_DB_READ_HOST,
+ write: process.env.ARWEAVE_DB_WRITE_HOST,
+ }[mode];
+
+ const password = {
+ read: process.env.PSQL_READ_PASSWORD,
+ write: process.env.PSQL_WRITE_PASSWORD,
+ }[mode];
+
+ const hostDisplayName = `${process.env.AWS_REGION} ${mode}@${host}:${5432}`;
+
+ log.info(`[postgres] connecting to db: ${hostDisplayName}`);
+
+ const client = knex({
+ acquireConnectionTimeout: 120000,
+ client: "pg",
+ pool: {
+ min,
+ max,
+ acquireTimeoutMillis: 120000,
+ idleTimeoutMillis: 30000,
+ reapIntervalMillis: 40000,
+ },
+ connection: {
+ host,
+ user: mode,
+ database: "arweave",
+ ssl: {
+ rejectUnauthorized: false,
+ },
+ password,
+ expirationChecker: () => true,
+ connectTimeout: 90000,
+ },
+ });
+
+ return client;
+};
+
+interface UpsertOptions {
+ table: string;
+ conflictKeys: string[];
+ rows: T;
+ transaction?: Knex.Transaction;
+}
+
+/**
+ * Generate a postgres upsert statement. This manually appends a raw section to the query.
+ *
+ * INSERT (col, col, col) VALUES (val, val, val) ON CONFLICT (id,index) SO UPDATE SET x = excluded.x...
+ */
+export const upsert = (
+ connection: DBConnection,
+ { table, conflictKeys, rows, transaction }: UpsertOptions
+) => {
+ const updateFields = Object.keys(rows[0])
+ .filter((field) => !conflictKeys.includes(field))
+ .map((field) => `${field} = excluded.${field}`)
+ .join(",");
+
+ const query = connection.insert(rows).into(table);
+
+ if (transaction) {
+ query.transacting(transaction);
+ }
+
+ const { sql, bindings } = query.toSQL();
+
+ const upsertSql = sql.concat(
+ ` ON CONFLICT (${conflictKeys
+ .map((key) => `"${key}"`)
+ .join(",")}) DO UPDATE SET ${updateFields};`
+ );
+
+ return connection.raw(upsertSql, bindings);
+};
diff --git a/src/database/transaction-db.ts b/src/database/transaction-db.ts
new file mode 100644
index 0000000..2a213ec
--- /dev/null
+++ b/src/database/transaction-db.ts
@@ -0,0 +1,429 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { upsert } from "./postgres";
+import log from "../lib/log";
+import { Knex } from "knex";
+import {
+ TransactionHeader,
+ getTagValue,
+ Tag,
+ utf8DecodeTag,
+ DataBundleItem,
+} from "../lib/arweave";
+import {
+ fromB64Url,
+ sha256B64Url,
+ ISO8601DateTimeString,
+} from "../lib/encoding";
+import { pick, uniqBy } from "lodash";
+import moment from "moment";
+import { TagFilter } from "../gateway/routes/graphql-v2/schema/types";
+import { sequentialBatch } from "../lib/helpers";
+import { DataItem } from "arbundles";
+
+interface DatabaseTag {
+ tx_id: string;
+ index: number;
+ name: string | undefined;
+ value: string | undefined;
+ // value_numeric: string | undefined;
+}
+
+const txFields = [
+ "format",
+ "id",
+ "signature",
+ "owner",
+ "owner_address",
+ "target",
+ "reward",
+ "last_tx",
+ "tags",
+ "quantity",
+ "quantity",
+ "content_type",
+ "data_size",
+ "data_root",
+];
+
+export const getTxIds = async (
+ connection: Knex,
+ predicates: object
+): Promise => {
+ return await connection.pluck("id").from("transactions").where(predicates);
+};
+
+export const getTx = async (
+ connection: Knex,
+ predicates: object
+): Promise => {
+ return connection.select().from("transactions").where(predicates).first();
+};
+
+type TxSortOrder = "HEIGHT_ASC" | "HEIGHT_DESC";
+
+const orderByClauses: { [key in TxSortOrder]: string } = {
+ HEIGHT_ASC: "transactions.height ASC NULLS LAST, id ASC",
+ HEIGHT_DESC: "transactions.height DESC NULLS FIRST, id ASC",
+};
+
+interface TxQuery {
+ to?: string[];
+ from?: string[];
+ id?: string;
+ ids?: string[];
+ tags?: TagFilter[];
+ parents?: string[];
+ limit?: number;
+ offset?: number;
+ select?: any;
+ blocks?: boolean;
+ before?: ISO8601DateTimeString;
+ sortOrder?: TxSortOrder;
+ status?: "any" | "confirmed" | "pending";
+ pendingMinutes?: number;
+ minHeight?: number;
+ maxHeight?: number;
+}
+
+export const query = (
+ connection: Knex,
+ {
+ to,
+ from,
+ tags,
+ parents,
+ limit = 100000,
+ offset = 0,
+ id,
+ ids,
+ status,
+ select,
+ before,
+ blocks = false,
+ sortOrder = "HEIGHT_DESC",
+ pendingMinutes = 60,
+ minHeight = -1,
+ maxHeight = -1,
+ }: TxQuery
+): Knex.QueryBuilder => {
+ const query = connection
+ .queryBuilder()
+ .select(
+ select || {
+ id: "transactions.id",
+ height: "transactions.height",
+ tags: "transactions.tags",
+ }
+ )
+ .from("transactions");
+
+ if (blocks) {
+ query.leftJoin("blocks", "transactions.height", "blocks.height");
+ }
+
+ if (pendingMinutes >= 0) {
+ query.where((query) => {
+ // Include recent pending transactions up to pendingMinutes old.
+ // After this threshold they will be considered orphaned so not included in results.
+ query.whereNotNull("transactions.height");
+
+ query.orWhere(
+ "transactions.created_at",
+ ">",
+ moment().subtract(pendingMinutes, "minutes").toISOString()
+ );
+ });
+ }
+
+ if (status == "confirmed") {
+ query.whereNotNull("transactions.height");
+ }
+
+ if (before) {
+ query.where("transactions.created_at", "<", before);
+ }
+
+ if (id) {
+ query.where("transactions.id", id);
+ }
+
+ if (ids) {
+ query.whereIn("transactions.id", ids);
+ }
+
+ if (parents) {
+ query.whereIn("transactions.parent", parents);
+ }
+
+ if (to) {
+ query.whereIn("transactions.target", to);
+ }
+
+ if (from) {
+ query.whereIn("transactions.owner_address", from);
+ }
+
+ if (tags) {
+ tags.forEach((tag, index) => {
+ const tagAlias = `${index}_${index}`;
+
+ query.join(`tags as ${tagAlias}`, (join) => {
+ join.on("transactions.id", `${tagAlias}.tx_id`);
+
+ join.andOnIn(`${tagAlias}.name`, [tag.name]);
+
+ if (tag.op == "EQ") {
+ join.andOnIn(`${tagAlias}.value`, tag.values);
+ }
+
+ if (tag.op == "NEQ") {
+ join.andOnNotIn(`${tagAlias}.value`, tag.values);
+ }
+ });
+ });
+ }
+
+ if (minHeight >= 0) {
+ query.where("transactions.height", ">=", minHeight);
+ }
+
+ if (maxHeight >= 0) {
+ query.where("transactions.height", "<=", maxHeight);
+ }
+
+ query.limit(limit).offset(offset);
+
+ if (Object.keys(orderByClauses).includes(sortOrder)) {
+ query.orderByRaw(orderByClauses[sortOrder]);
+ }
+
+ log.info("[grqphql/v2/104] RAW", { queryRaw: query.toString() });
+ return query;
+};
+
+export const hasTx = async (connection: Knex, id: string): Promise => {
+ const result = await connection
+ .first("id")
+ .from("transactions")
+ .where({ id })
+ .whereNotNull("owner");
+
+ return !!(result && result.id);
+};
+
+export const hasTxs = async (
+ connection: Knex,
+ ids: string[]
+): Promise => {
+ return await connection.pluck("id").from("transactions").whereIn("id", ids);
+};
+
+export const saveTx = async (connection: Knex, tx: TransactionHeader) => {
+ return await connection.transaction(async (knexTransaction) => {
+ await upsert(knexTransaction, {
+ table: "transactions",
+ conflictKeys: ["id"],
+ rows: [
+ txToRow({
+ tx,
+ }),
+ ],
+ });
+
+ if (tx.tags.length > 0) {
+ await upsert(knexTransaction, {
+ table: "tags",
+ conflictKeys: ["tx_id", "index"],
+ rows: txTagsToRows(tx.id, tx.tags),
+ });
+ }
+ });
+};
+
+export const saveBundleDataItem = async (
+ connection: Knex,
+ tx: DataBundleItem,
+ { parent }: { parent: string }
+) => {
+ const maybeHeight = await connection
+ .select("height")
+ .from("transactions")
+ .where({ id: parent });
+
+ return await connection.transaction(async (knexTransaction: any) => {
+ await upsert(knexTransaction, {
+ table: "transactions",
+ conflictKeys: ["id"],
+ rows: [
+ {
+ parent,
+ format: 1,
+ id: tx.id,
+ signature: tx.signature,
+ owner: tx.owner,
+ owner_address: sha256B64Url(fromB64Url(tx.owner)),
+ target: tx.target,
+ reward: 0,
+ last_tx: tx.nonce,
+ tags: JSON.stringify(tx.tags),
+ quantity: 0,
+ data_size: tx.dataSize || fromB64Url((tx as any).data).byteLength,
+ ...(maybeHeight && maybeHeight.length > 0
+ ? maybeHeight[0]
+ : undefined),
+ },
+ ],
+ });
+
+ if (tx.tags.length > 0) {
+ await upsert(knexTransaction, {
+ table: "tags",
+ conflictKeys: ["tx_id", "index"],
+ rows: txTagsToRows(tx.id, tx.tags),
+ });
+ }
+ });
+};
+
+export const saveBundleDataItems = async (
+ connection: Knex,
+ bundleId: string,
+ items: DataBundleItem[]
+) => {
+ const maybeHeight = await connection
+ .select("height")
+ .from("transactions")
+ .where({ id: bundleId });
+
+ return await connection.transaction(async (knexTransaction: any) => {
+ log.info(`[import-bundles] importing tx bundle items to gql db`, {
+ bundle: bundleId,
+ batchSize: items.length,
+ });
+
+ const tags: DatabaseTag[] = [];
+
+ const rows = uniqBy(items, "id").map((item) => {
+ console.error({ item });
+ if (item.tags.length > 0) {
+ tags.push(...txTagsToRows(item.id, item.tags));
+ }
+
+ log.info(`[import-bundles] importing tx bundle item to gql db`, {
+ parent: bundleId,
+ format: 1,
+ id: item.id,
+ signature: item.signature,
+ owner: item.owner,
+ owner_address: sha256B64Url(fromB64Url(item.owner)),
+ target: item.target || "",
+ reward: 0,
+ // @ts-ignore
+ last_tx: item.nonce || item.anchor || "",
+ tags: JSON.stringify(item.tags || []),
+ quantity: 0,
+ data_size: item.dataSize ?? fromB64Url((item as any).data).byteLength,
+ ...(maybeHeight && maybeHeight.length > 0 ? maybeHeight[0] : undefined),
+ });
+
+ return {
+ parent: bundleId,
+ format: 1,
+ id: item.id,
+ signature: item.signature,
+ owner: item.owner,
+ owner_address: sha256B64Url(fromB64Url(item.owner)),
+ target: item.target || "",
+ reward: 0,
+ // @ts-ignore
+ last_tx: item.nonce || item.anchor || "",
+ tags: JSON.stringify(item.tags || []),
+ quantity: 0,
+ data_size: item.dataSize ?? fromB64Url((item as any).data).byteLength,
+ ...(maybeHeight && maybeHeight.length > 0 ? maybeHeight[0] : undefined),
+ };
+ });
+
+ await upsert(knexTransaction, {
+ table: "transactions",
+ conflictKeys: ["id"],
+ rows: rows,
+ });
+
+ if (tags.length > 0) {
+ await sequentialBatch(tags, 500, async (items: DatabaseTag[]) => {
+ log.info(`[import-bundles] importing tx bundle tags to gql db`, {
+ bundle: bundleId,
+ batchSize: items.length,
+ });
+
+ log.info(
+ `[import-bundles] importing tx bundle item tags to gql db`,
+ tags
+ );
+
+ await upsert(knexTransaction, {
+ table: "tags",
+ conflictKeys: ["tx_id", "index"],
+ rows: tags,
+ });
+ });
+ }
+ });
+};
+
+const txToRow = ({ tx }: { tx: TransactionHeader | DataBundleItem }) => {
+ return pick(
+ {
+ ...tx,
+ content_type: getTagValue(tx.tags, "content-type"),
+ format: (tx as any).format || 0,
+ data_size:
+ (tx as any).data_size ||
+ ((tx as any).data
+ ? fromB64Url((tx as any).data).byteLength
+ : undefined),
+ tags: JSON.stringify(tx.tags),
+ owner_address: sha256B64Url(fromB64Url(tx.owner)),
+ },
+ txFields
+ );
+};
+
+const txTagsToRows = (tx_id: string, tags: Tag[]): DatabaseTag[] => {
+ return (
+ tags
+ .map((tag, index) => {
+ const { name, value } = utf8DecodeTag(tag);
+
+ return {
+ tx_id,
+ index,
+ name,
+ value,
+ };
+ })
+ // The name and values columns are indexed, so ignore any values that are too large.
+ // Postgres will throw an error otherwise: index row size 5088 exceeds maximum 2712 for index "tags_name_value"
+ .filter(
+ ({ name, value }) => (name?.length || 0) + (value?.length || 0) < 2712
+ )
+ );
+};
diff --git a/src/gateway/app.ts b/src/gateway/app.ts
new file mode 100644
index 0000000..155d980
--- /dev/null
+++ b/src/gateway/app.ts
@@ -0,0 +1,138 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import "./env";
+import express from "express";
+import helmet from "helmet";
+import {
+ initConnectionPool,
+ releaseConnectionPool,
+} from "../database/postgres";
+import log from "../lib/log";
+import { handler as corsMiddleware } from "./middleware/cors";
+import { handler as jsonBodyMiddleware } from "./middleware/json-body";
+import {
+ configureRequestLogging,
+ handler as requestLoggingMiddleware,
+} from "./middleware/request-log";
+import { handler as sandboxMiddleware } from "./middleware/sandbox";
+import { handler as arqlHandler } from "./routes/arql";
+import { handler as dataHandler } from "./routes/data";
+import { apolloServer } from "./routes/graphql";
+import { apolloServer as apolloServerV2 } from "./routes/graphql-v2";
+import { handler as healthHandler } from "./routes/health";
+import { handler as newTxHandler } from "./routes/new-tx";
+import { handler as newChunkHandler } from "./routes/new-chunk";
+import { handler as proxyHandler } from "./routes/proxy";
+import { handler as webhookHandler } from "./routes/webhooks";
+
+import { logMiddleware } from "./middleware/log.middleware";
+
+require("express-async-errors");
+
+initConnectionPool("read", { min: 1, max: 100 });
+
+const app = express();
+
+const dataPathRegex =
+ /^\/?([a-zA-Z0-9-_]{43})\/?$|^\/?([a-zA-Z0-9-_]{43})\/(.*)$/i;
+
+const port = process.env.APP_PORT;
+
+app.set("trust proxy", 1);
+
+// Global middleware
+
+app.use(configureRequestLogging);
+
+// app.use(requestLoggingMiddleware);
+
+app.use(helmet.hidePoweredBy());
+
+app.use(corsMiddleware);
+
+app.use(sandboxMiddleware);
+app.use(logMiddleware);
+
+app.get("/favicon.ico", (req, res) => {
+ res.status(204).end();
+});
+
+app.options("/tx", (req, res) => {
+ res.send("OK").end();
+});
+
+app.post("/tx", jsonBodyMiddleware, newTxHandler);
+
+app.post("/chunk", jsonBodyMiddleware, newChunkHandler);
+
+app.options("/chunk", (req, res) => {
+ res.send("OK").end();
+});
+
+app.post("/webhook", jsonBodyMiddleware, webhookHandler);
+
+app.post("/arql", jsonBodyMiddleware, arqlHandler);
+
+app.get("/health", healthHandler);
+
+app.get(dataPathRegex, dataHandler);
+
+const apolloServerInstanceArql = apolloServer();
+
+const apolloServerInstanceGql = apolloServerV2({ introspection: true });
+
+Promise.all([
+ apolloServerInstanceArql.start(),
+ apolloServerInstanceGql.start(),
+]).then(() => {
+ apolloServerInstanceArql.applyMiddleware({ app, path: "/arql" });
+ apolloServerInstanceGql.applyMiddleware({
+ app,
+ path: "/graphql",
+ });
+ log.info(`[app] Started on http://localhost:${port}`);
+ const server = app.listen(port, () => {
+ try {
+ log.info(
+ `[${new Date().toLocaleString()}] Using version `,
+ require("../../package.json").version
+ );
+ } catch (e) {
+ log.info(`'Unable to retrieve the package version.'`);
+ }
+
+ // The apollo middleare *must* be applied after the standard arql handler
+ // as arql is the default behaviour. If the graphql handler
+ // is invoked first it will emit an error if it received an arql request.
+ });
+
+ server.keepAliveTimeout = 120 * 1000;
+ server.headersTimeout = 120 * 1000;
+ app.get("*", proxyHandler);
+});
+
+// console.log([server.headersTimeout]);
+
+process.on("SIGINT", function () {
+ log.info("\nGracefully shutting down from SIGINT");
+ releaseConnectionPool().then(() => {
+ log.info("[app] DB connections closed");
+ process.exit(1);
+ });
+});
diff --git a/src/gateway/env.ts b/src/gateway/env.ts
new file mode 100644
index 0000000..5eae13b
--- /dev/null
+++ b/src/gateway/env.ts
@@ -0,0 +1,22 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { config as dotenvConfig } from "dotenv";
+dotenvConfig({
+ silent: true,
+} as any);
diff --git a/src/gateway/express.d.ts b/src/gateway/express.d.ts
new file mode 100644
index 0000000..6a16464
--- /dev/null
+++ b/src/gateway/express.d.ts
@@ -0,0 +1,28 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+declare global {
+ declare module "express-serve-static-core" {
+ export interface Request {
+ id: string;
+ log: import("winston").Logger;
+ }
+
+ export interface Response {}
+ }
+}
diff --git a/src/gateway/middleware/cors.ts b/src/gateway/middleware/cors.ts
new file mode 100644
index 0000000..498ea79
--- /dev/null
+++ b/src/gateway/middleware/cors.ts
@@ -0,0 +1,26 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { RequestHandler } from "express";
+
+export const handler: RequestHandler = (req, res, next) => {
+ res.header("Access-Control-Allow-Origin", "*");
+ res.header("Access-Control-Allow-Methods", req.method);
+ res.header("Access-Control-Allow-Headers", "Content-Type");
+ next();
+};
diff --git a/src/gateway/middleware/json-body.ts b/src/gateway/middleware/json-body.ts
new file mode 100644
index 0000000..32f1ca0
--- /dev/null
+++ b/src/gateway/middleware/json-body.ts
@@ -0,0 +1,21 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { json } from "body-parser";
+
+export const handler = json({ limit: "15mb", type: () => true });
diff --git a/src/gateway/middleware/log.middleware.ts b/src/gateway/middleware/log.middleware.ts
new file mode 100644
index 0000000..9859028
--- /dev/null
+++ b/src/gateway/middleware/log.middleware.ts
@@ -0,0 +1,39 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import morgan from "morgan";
+import id from "shortid";
+import { Request, Response, NextFunction } from "express";
+
+export function logConfigurationMiddleware(
+ req: Request,
+ res: Response,
+ next: NextFunction
+) {
+ const trace = id.generate();
+ req.id = trace;
+ res.header("X-Trace", trace);
+ return next();
+}
+morgan.token("trace", (req: Request) => {
+ return req.id || "UNKNOWN";
+});
+
+export const logMiddleware = morgan(
+ '[http] :remote-addr - :remote-user [:date] ":method :url HTTP/:http-version" :status :res[content-length] :response-time ms ":referrer" ":user-agent" [trace=:trace]'
+);
diff --git a/src/gateway/middleware/request-log.ts b/src/gateway/middleware/request-log.ts
new file mode 100644
index 0000000..cf7a742
--- /dev/null
+++ b/src/gateway/middleware/request-log.ts
@@ -0,0 +1,55 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import morgan, { token as morganToken } from "morgan";
+import { RequestHandler, Request } from "express";
+import shortId from "shortid";
+import log from "../../lib/log";
+import { createLogger, transports, format } from "winston";
+
+export const configureRequestLogging: RequestHandler = (req, res, next) => {
+ const traceId = shortId.generate();
+ req.id = traceId;
+ res.header("X-Trace", traceId);
+ req.log = log.child({
+ trace: traceId,
+ });
+ next();
+};
+
+morganToken("trace", (req) => {
+ return getTraceId(req);
+});
+
+morganToken("aws_trace", (req) => {
+ return getAwsTraceId(req);
+});
+
+export const handler = morgan({
+ stream: { write: (str: string) => log.log("info", str) },
+});
+
+const getTraceId = (req: any): string => {
+ return req.id || "";
+};
+
+const getAwsTraceId = (req: any): string => {
+ return req.headers["x-amzn-trace-id"]
+ ? (req.headers["x-amzn-trace-id"] as string)
+ : "";
+};
diff --git a/src/gateway/middleware/sandbox.ts b/src/gateway/middleware/sandbox.ts
new file mode 100644
index 0000000..81131c3
--- /dev/null
+++ b/src/gateway/middleware/sandbox.ts
@@ -0,0 +1,75 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { fromB64Url, toB32 } from "../../lib/encoding";
+import { RequestHandler, Request } from "express";
+import querystring from "querystring";
+
+const getTxIdFromPath = (path: string): string | undefined => {
+ const matches = path.match(/^\/?([a-z0-9-_]{43})/i) || [];
+ return matches[1];
+};
+
+export const handler: RequestHandler = (req, res, next) => {
+ const txid = getTxIdFromPath(req.path);
+
+ if (txid && !req.headers["x-amz-cf-id"]) {
+ const currentSandbox = getRequestSandbox(req);
+ const expectedSandbox = expectedTxSandbox(txid);
+ let queryString = "";
+
+ if (
+ req &&
+ typeof req === "object" &&
+ req.path &&
+ typeof req.query === "object" &&
+ Object.keys(req.query).length > 0
+ ) {
+ try {
+ queryString = (
+ ((req.path || "").endsWith("/") ? "?" : "/?") +
+ querystring.stringify(req.query as any)
+ ).replace(/\/\//i, "/"); // fix double slash
+ } catch (error) {
+ req.log.info("[sandbox] error making queryString", error as any);
+ queryString = "";
+ }
+ }
+
+ if (currentSandbox !== expectedSandbox) {
+ return res.redirect(
+ 302,
+ `${process.env.SANDBOX_PROTOCOL}://${expectedSandbox}.` +
+ `${process.env.SANDBOX_HOST}${req.path}${queryString || ""}`.replace(
+ /\/\//i,
+ "/"
+ )
+ );
+ }
+ }
+
+ next();
+};
+
+const expectedTxSandbox = (id: string): string => {
+ return toB32(fromB64Url(id));
+};
+
+const getRequestSandbox = (req: Request) => {
+ return req.headers.host!.split(".")[0].toLowerCase();
+};
diff --git a/src/gateway/middleware/validate-body.ts b/src/gateway/middleware/validate-body.ts
new file mode 100644
index 0000000..42c796d
--- /dev/null
+++ b/src/gateway/middleware/validate-body.ts
@@ -0,0 +1,43 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import Joi, { Schema, ValidationError } from "@hapi/joi";
+import { BadRequest } from "http-errors";
+
+export const parseInput = (
+ schema: Schema,
+ payload: any,
+ options: { transform?: (validatedPayload: any) => T } = {}
+): T => {
+ const { transform } = options;
+ try {
+ const validatedPayload = Joi.attempt(payload, schema, {
+ abortEarly: false,
+ });
+ return transform ? transform(validatedPayload) : validatedPayload;
+ } catch (error: any) {
+ const report: ValidationError = error as ValidationError;
+ throw new BadRequest({
+ // We only want to expose the message and path, so ignore the other fields
+ validation: report.details.map(({ message, path }) => ({
+ message,
+ path,
+ })),
+ } as any);
+ }
+};
diff --git a/src/gateway/routes/arql/index.ts b/src/gateway/routes/arql/index.ts
new file mode 100644
index 0000000..1200deb
--- /dev/null
+++ b/src/gateway/routes/arql/index.ts
@@ -0,0 +1,199 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { getConnectionPool } from "../../../database/postgres";
+import knex, { Knex } from "knex";
+import { query as txQuery } from "../../../database/transaction-db";
+import { RequestHandler } from "express";
+import createError from "http-errors";
+import { Logger } from "winston";
+
+type ArqlQuery = ArqlBooleanQuery | ArqlTagMatch;
+
+interface ArqlTagMatch {
+ op: "equals";
+ expr1: string;
+ expr2: string;
+}
+
+interface ArqlTagCompare {
+ op: "compare";
+ expr1: string;
+ expr2: {
+ type: ArqlTagMatchQueryType;
+ op: ArqlTagMatchQueryOp;
+ value: number | string;
+ };
+}
+
+type ArqlTagMatchQueryType = "string" | "numeric";
+type ArqlTagMatchQueryOp = "eq" | "gt" | "lt" | "gte" | "lte";
+
+interface ArqlTagMatchQuery {
+ type: ArqlTagMatchQueryType;
+ op: ArqlTagMatchQueryOp;
+}
+interface ArqlBooleanQuery {
+ op: "and" | "or";
+ expr1: ArqlQuery;
+ expr2: ArqlQuery;
+}
+
+type ArqlResultSet = string[];
+
+export const defaultMaxResults = 5000;
+
+export const handler: RequestHandler = async (req, res, next: Function) => {
+ if (req.body && req.body.query) {
+ req.log.info(`[graphql] resolving arql using graphql`);
+ return next();
+ }
+
+ const pool = getConnectionPool("read");
+
+ try {
+ validateQuery(req.body);
+ } catch (error) {
+ req.log.info(`[arql] invalid query`, { query: req.body });
+ throw error;
+ }
+
+ const limit = Math.min(
+ Number.isInteger(parseInt(req.query.limit! as string))
+ ? parseInt(req.query.limit! as string)
+ : defaultMaxResults,
+ defaultMaxResults
+ );
+
+ req.log.info(`[arql] valid query`, { query: req.body, limit });
+
+ const results = await executeQuery(pool, req.body, { limit });
+
+ req.log.info(`[arql] results: ${results.length}`);
+
+ res.send(results);
+
+ res.end();
+};
+
+const executeQuery = async (
+ connection: Knex,
+ arqlQuery: ArqlQuery,
+ {
+ limit = defaultMaxResults,
+ offset = 0,
+ log = undefined,
+ }: { limit?: number; offset?: number; log?: Logger }
+): Promise => {
+ const sqlQuery = arqlToSqlQuery(txQuery(connection, {}), arqlQuery)
+ .limit(limit)
+ .offset(offset);
+
+ if (log) {
+ log.info(`[arql] execute sql`, {
+ sql: sqlQuery.toSQL(),
+ });
+ }
+
+ return await sqlQuery.pluck("transactions.id");
+};
+
+const validateQuery = (arqlQuery: ArqlQuery): boolean => {
+ try {
+ if (arqlQuery.op == "equals") {
+ if (typeof arqlQuery.expr1 != "string") {
+ throw new createError.BadRequest(
+ `Invalid value supplied for expr1: '${
+ arqlQuery.expr1
+ }', expected string got ${typeof arqlQuery.expr1}`
+ );
+ }
+
+ if (typeof arqlQuery.expr2 != "string") {
+ throw new createError.BadRequest(
+ `Invalid value supplied for expr2: '${
+ arqlQuery.expr2
+ }', expected string got ${typeof arqlQuery.expr2}`
+ );
+ }
+ //
+ return true;
+ }
+ if (["and", "or"].includes(arqlQuery.op)) {
+ return validateQuery(arqlQuery.expr1) && validateQuery(arqlQuery.expr2);
+ }
+
+ throw new createError.BadRequest(
+ `Invalid value supplied for op: '${arqlQuery.op}', expected 'equals', 'and', 'or'.`
+ );
+ } catch (error) {
+ if (error instanceof createError.BadRequest) {
+ throw error;
+ }
+ throw new createError.BadRequest(`Failed to parse arql query`);
+ }
+};
+
+const arqlToSqlQuery = (
+ sqlQuery: Knex.QueryInterface,
+ arqlQuery: ArqlQuery
+): Knex.QueryInterface => {
+ switch (arqlQuery.op) {
+ case "equals":
+ return sqlQuery.where((sqlQuery) => {
+ switch (arqlQuery.expr1) {
+ case "to":
+ sqlQuery.whereIn("transactions.target", [arqlQuery.expr2]);
+ break;
+ case "from":
+ sqlQuery.whereIn("transactions.owner_address", [arqlQuery.expr2]);
+ break;
+ default:
+ sqlQuery.whereIn("transactions.id", (query: any) => {
+ query.select("tx_id").from("tags");
+ if (arqlQuery.expr2.includes("%")) {
+ query
+ .where("tags.name", "=", arqlQuery.expr1)
+ .where("tags.value", "LIKE", arqlQuery.expr2);
+ } else {
+ query.where({
+ "tags.name": arqlQuery.expr1,
+ "tags.value": arqlQuery.expr2,
+ });
+ }
+ });
+ break;
+ }
+ });
+
+ case "and":
+ return arqlToSqlQuery(sqlQuery, arqlQuery.expr1).andWhere(
+ (sqlQuery: any) => {
+ arqlToSqlQuery(sqlQuery, arqlQuery.expr2);
+ }
+ );
+ case "or":
+ return arqlToSqlQuery(sqlQuery, arqlQuery.expr1).orWhere(
+ (sqlQuery: any) => {
+ arqlToSqlQuery(sqlQuery, arqlQuery.expr2);
+ }
+ );
+ default:
+ throw new createError.BadRequest();
+ }
+};
diff --git a/src/gateway/routes/data/index.ts b/src/gateway/routes/data/index.ts
new file mode 100644
index 0000000..bc69dad
--- /dev/null
+++ b/src/gateway/routes/data/index.ts
@@ -0,0 +1,329 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import fetch from "node-fetch";
+import { fetchTransactionData, getTagValue, Tag } from "../../../lib/arweave";
+import {
+ resolveManifestPath,
+ PathManifest,
+} from "../../../lib/arweave-path-manifest";
+import { getStream, putStream, put, get } from "../../../lib/buckets";
+import { RequestHandler, Request, Response } from "express";
+import { streamToJson, jsonToBuffer, fromB64Url } from "../../../lib/encoding";
+import { Readable } from "stream";
+import { NotFound } from "http-errors";
+import { query } from "../../../database/transaction-db";
+import { StreamTap } from "../../../lib/stream-tap";
+import pump from "pump";
+import { getData } from "../../../data/transactions";
+import { concat } from "lodash";
+import { arweaveNodesGet } from "../../../lib/hosts";
+
+const DEFAULT_TYPE = "text/html";
+
+interface Bundle {
+ items: { id: string; data: string; tags: Tag[] }[];
+}
+
+export const handler: RequestHandler = async (req, res) => {
+ const index: number = Math.floor(Math.random() * arweaveNodesGet.length);
+ const host: string = arweaveNodesGet[index];
+ const txid = getTxIdFromPath(req.path);
+
+ if (txid) {
+ const { stream, contentType, contentLength, tags, cached, status } =
+ await getData(txid, req);
+
+ req.log.info("tx stream", {
+ stream: stream && stream?.readable,
+ contentType,
+ contentLength,
+ cached,
+ tags,
+ });
+
+ if (status) {
+ res.status(status);
+ }
+
+ if (contentLength == 0) {
+ setDataHeaders({ contentType, etag: txid, res });
+
+ res.end();
+ }
+
+ const blacklistHosts = arweaveNodesGet;
+
+ let blhost;
+ if (blacklistHosts && blacklistHosts.length > 0) {
+ try {
+ blhost =
+ blacklistHosts[Math.floor(Math.random() * blacklistHosts.length)];
+ } catch (error) {
+ req.log.info(`[is_tx_blacklisted] ERROR getting the list of nodes`, {
+ error,
+ });
+ }
+ } else {
+ req.log.info("[is_tx_blacklisted] No hosts available, skipping.");
+ }
+
+ let response;
+ if (blhost) {
+ try {
+ req.log.info(`[is_tx_blacklisted] checking if ${txid} is blacklisted`, {
+ host: blhost,
+ txid,
+ link: `${blhost}/is_tx_blacklisted/${txid}`,
+ });
+ response = await fetch(`${blhost}/is_tx_blacklisted/${txid}`);
+ } catch (error) {
+ req.log.info(
+ `[is_tx_blacklisted] no/failed response host ${blhost} for ${txid} `,
+ { error }
+ );
+ }
+
+ if (response) {
+ if (response.status !== 200) {
+ req.log.info(
+ `[is_tx_blacklisted] failed on host ${blhost} for ${txid}`
+ );
+ }
+
+ if ((await response.text()) === "true") {
+ res.status(451).send("Transaction blacklisted.");
+ return;
+ }
+ }
+ }
+
+ if (stream && contentLength) {
+ if (contentType == "application/x.arweave-manifest+json") {
+ req.log.info("[get-data] manifest content-type detected", { txid });
+
+ const manifest = await streamToJson(stream);
+
+ let cacheRequest: any = null;
+
+ if (!cached) {
+ cacheRequest = put("tx-data", `tx/${txid}`, jsonToBuffer(manifest), {
+ contentType,
+ tags,
+ });
+ }
+
+ return await Promise.all([
+ cacheRequest,
+ handleManifest(req, res, manifest, txid),
+ ]);
+ }
+
+ setDataHeaders({ contentType, contentLength, etag: txid, res });
+
+ if (cached) {
+ stream.pipe(res);
+ } else {
+ await sendAndCache({
+ txid,
+ req,
+ res,
+ stream,
+ contentType,
+ contentLength,
+ tags,
+ });
+ }
+ }
+ }
+};
+
+const getTxIdFromPath = (path: string): string | undefined => {
+ const matches = path.match(/^\/?([a-z0-9-_]{43})/i) || [];
+ return matches[1];
+};
+
+const setDataHeaders = ({
+ res,
+ etag,
+ contentType,
+ contentLength,
+}: {
+ res: Response;
+ etag: string;
+ contentType?: string;
+ contentLength?: number;
+}) => {
+ res.header("Etag", etag);
+ if (contentType) {
+ res.type(contentType || DEFAULT_TYPE);
+ }
+ if (contentLength) {
+ res.header("Content-Length", contentLength.toString());
+ }
+};
+
+const sendAndCache = async ({
+ txid,
+ contentType,
+ contentLength,
+ tags,
+ stream,
+ res,
+ req,
+}: {
+ txid: string;
+ contentType?: string;
+ contentLength: number;
+ tags?: Tag[];
+ stream: Readable;
+ req: Request;
+ res: Response;
+}) => {
+ await new Promise(async (resolve, reject) => {
+ req.log.info("[get-data] streaming chunks from s3 cache", {
+ txid,
+ });
+
+ const { upload, stream: cacheStream } = await putStream(
+ "tx-data",
+ `tx/${txid}`,
+ {
+ contentType,
+ contentLength,
+ tags,
+ }
+ );
+
+ const copyToResponse = new StreamTap(res);
+
+ cacheStream.on("end", (error: any) => {
+ req.log.info("[get-data] cach stream ended", { txid, error });
+
+ if (copyToResponse.getBytesProcessed() != contentLength) {
+ req.log.warn(
+ `[get-data] cached content doesn't match expected data_size`,
+ { contentLength, processedBytes: copyToResponse.getBytesProcessed }
+ );
+ }
+
+ upload.send((err, data) => {
+ req.log.info("[get-data] s3 upload done", { data });
+ if (err) {
+ upload.abort();
+ reject(err);
+ }
+ resolve(data);
+ });
+ });
+
+ res.flushHeaders();
+
+ pump(stream, copyToResponse, cacheStream, async (err) => {
+ if (err) {
+ req.log.error("pump error", { err });
+ upload.abort();
+ res.end();
+ cacheStream.end();
+ stream.destroy();
+ console.log("rejecting...");
+ reject(err);
+ }
+ res.end();
+ });
+ });
+ req.log.info("[get-data] streaming handler complete");
+};
+
+const handleManifest = async (
+ req: Request,
+ res: Response,
+ manifest: PathManifest,
+ txid: string
+) => {
+ let safePath = req.path.replace(/\/\?.*/i, "");
+
+ // not risking "/" (index) paths
+ if (safePath.length > 1) {
+ safePath = safePath.replace(/\/$/i, "");
+ }
+
+ let subpath = unescape(getManifestSubpath(safePath) || "");
+
+ // not risking "/" (index) paths
+ if (subpath.length > 1) {
+ subpath = subpath.replace(/\/$/i, "");
+ }
+
+ if (req.path == `/${txid}`) {
+ return res.redirect(301, `${req.path}/`);
+ }
+
+ const resolvedTx = resolveManifestPath(manifest, subpath);
+
+ req.log.info("[get-data] resolved manifest path content", {
+ subpath,
+ resolvedTx,
+ });
+
+ if (resolvedTx) {
+ const { stream, contentType, contentLength, cached } = await getData(
+ resolvedTx,
+ req
+ );
+
+ setDataHeaders({ contentType, contentLength, etag: txid, res });
+
+ if (stream && contentLength && contentLength > 0) {
+ if (cached) {
+ return stream.pipe(res);
+ } else {
+ return sendAndCache({
+ txid: resolvedTx,
+ req,
+ res,
+ stream,
+ contentType,
+ contentLength,
+ });
+ }
+ } else {
+ req.log.info(
+ "[get-data] NotFound contentType, contentLength or stream went missing",
+ {
+ resolvedTx,
+ stream: typeof stream,
+ contentType,
+ contentLength,
+ }
+ );
+ }
+ }
+
+ throw new NotFound();
+};
+
+//@deprecated
+const getManifestSubpath = (requestPath: string): string | undefined => {
+ return getTransactionSubpath(requestPath);
+};
+
+const getTransactionSubpath = (requestPath: string): string | undefined => {
+ const subpath = requestPath.match(/^\/?[a-zA-Z0-9-_]{43}\/(.*)$/i);
+ return (subpath && subpath[1]) || undefined;
+};
diff --git a/src/gateway/routes/graphql-v2/index.ts b/src/gateway/routes/graphql-v2/index.ts
new file mode 100644
index 0000000..1c24e12
--- /dev/null
+++ b/src/gateway/routes/graphql-v2/index.ts
@@ -0,0 +1,53 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import {
+ ApolloServer,
+ ApolloServerExpressConfig,
+ gql,
+} from "apollo-server-express";
+import {
+ ApolloServerPluginLandingPageDisabled,
+ ApolloServerPluginLandingPageGraphQLPlayground,
+} from "apollo-server-core";
+import { getConnectionPool } from "../../../database/postgres";
+import { resolvers } from "./resolvers";
+import { readFileSync } from "fs";
+
+const typeDefs = gql(readFileSync(__dirname + "/schema/types.graphql", "utf8"));
+
+const apolloServer = (opts: ApolloServerExpressConfig = {}) => {
+ return new ApolloServer({
+ typeDefs,
+ resolvers,
+ debug: false,
+ plugins: [
+ ApolloServerPluginLandingPageDisabled(),
+ ApolloServerPluginLandingPageGraphQLPlayground(),
+ ],
+ context: ({ req }) => {
+ return {
+ req,
+ connection: getConnectionPool("read"),
+ };
+ },
+ ...opts,
+ });
+};
+
+export { apolloServer };
diff --git a/src/gateway/routes/graphql-v2/resolvers.ts b/src/gateway/routes/graphql-v2/resolvers.ts
new file mode 100644
index 0000000..e1cf2c6
--- /dev/null
+++ b/src/gateway/routes/graphql-v2/resolvers.ts
@@ -0,0 +1,302 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { TransactionHeader, utf8DecodeTag } from "../../../lib/arweave";
+import { IResolvers } from "@graphql-tools/utils";
+import { query } from "../../../database/transaction-db";
+import moment from "moment";
+import { ISO8601DateTimeString, winstonToAr } from "../../../lib/encoding";
+import { BadRequest } from "http-errors";
+import graphqlFields from "graphql-fields";
+import { QueryTransactionsArgs } from "./schema/types";
+import { DatabaseBlock, queryBlocks } from "../../../database/block-db";
+
+type Resolvers = IResolvers;
+
+const DEFAULT_PAGE_SIZE = 10;
+const MAX_PAGE_SIZE = 100;
+
+const txFieldMap = {
+ id: "transactions.id",
+ anchor: "transactions.last_tx",
+ recipient: "transactions.target",
+ tags: "transactions.tags",
+ fee: "transactions.reward",
+ quantity: "transactions.quantity",
+ data_size: "transactions.data_size",
+ data_type: "transactions.content_type",
+ parent: "transactions.parent",
+ owner: "transactions.owner",
+ owner_address: "transactions.owner_address",
+ signature: "transactions.signature",
+ block_id: "blocks.id",
+ block_timestamp: "blocks.mined_at",
+ block_height: "blocks.height",
+ block_previous: "blocks.previous_block",
+ block_extended: "blocks.extended",
+};
+
+const blockFieldMap = {
+ id: "blocks.id",
+ timestamp: "blocks.mined_at",
+ height: "blocks.height",
+ previous: "blocks.previous_block",
+ extended: "blocks.extended",
+};
+
+export const resolvers: Resolvers = {
+ Query: {
+ transaction: async (parent, queryParams, { req, connection }) => {
+ req.log.info("[grqphql/v2] transaction/request", queryParams);
+ const sqlQuery = query(connection, {
+ id: queryParams.id,
+ blocks: true,
+ select: txFieldMap,
+ limit: 2,
+ }).first();
+
+ return (await sqlQuery) as TransactionHeader;
+ },
+ transactions: async (
+ parent,
+ queryParams: QueryTransactionsArgs,
+ { req, connection },
+ info
+ ) => {
+ req.log.info("[grqphql/v2] transactions/request", {
+ queryParams,
+ fields: graphqlFields(info as any),
+ });
+
+ const { timestamp, offset } = parseCursor(
+ queryParams.after || newCursor()
+ );
+
+ const pageSize = Math.min(
+ queryParams.first || DEFAULT_PAGE_SIZE,
+ MAX_PAGE_SIZE
+ );
+
+ const results = await query(connection, {
+ // Add one to the limit, we'll remove this result but it tells
+ // us if there's another page of data to fetch.
+ limit: pageSize + 1,
+ offset: offset,
+ ids: queryParams.ids || undefined,
+ to: queryParams.recipients || undefined,
+ from: queryParams.owners || undefined,
+ tags: queryParams.tags || undefined,
+ parents: queryParams.bundledIn || undefined,
+ blocks: true,
+ before: timestamp,
+ select: txFieldMap,
+ minHeight: queryParams.block?.min || undefined,
+ maxHeight: queryParams.block?.max || undefined,
+ sortOrder: queryParams.sort || undefined,
+ });
+
+ req.log.info("[grqphql/v2] transactions/response", {
+ queryParams,
+ results: results.length,
+ pageSize,
+ offset,
+ });
+
+ const hasNextPage = results.length > pageSize;
+
+ return {
+ pageInfo: {
+ hasNextPage,
+ },
+ edges: Array.isArray(results)
+ ? results.slice(0, pageSize).map((node: any, index) => {
+ return {
+ cursor: encodeCursor({ timestamp, offset: offset + index + 1 }),
+ node,
+ };
+ })
+ : [],
+ };
+ },
+ block: async (parent, queryParams, { req, connection }) => {
+ req.log.info("[grqphql/v2] transaction/request", queryParams);
+ const sqlQuery = queryBlocks(connection, {
+ select: blockFieldMap,
+ id: queryParams.id,
+ }).first();
+
+ return (await sqlQuery) as any;
+ },
+ blocks: async (parent, queryParams, { req, connection }) => {
+ req.log.info("[grqphql/v2] blocks/request", queryParams);
+
+ const { timestamp, offset } = parseCursor(
+ queryParams.after || newCursor()
+ );
+
+ const pageSize = Math.min(
+ queryParams.first || DEFAULT_PAGE_SIZE,
+ MAX_PAGE_SIZE
+ );
+
+ const results: any = await queryBlocks(connection, {
+ ids: queryParams.ids,
+ select: blockFieldMap,
+ minHeight: queryParams.height?.min,
+ maxHeight: queryParams.height?.max,
+ sortOrder: queryParams.sort,
+ // +1 so we know if there is another page of results,
+ // this last result can be array.sliced off the response.
+ limit: pageSize + 1,
+ offset: offset,
+ before: timestamp,
+ });
+
+ req.log.info("[grqphql/v2] blocks/response", {
+ queryParams,
+ results: results.length,
+ pageSize,
+ offset,
+ });
+
+ const hasNextPage = results.length > pageSize;
+
+ return {
+ pageInfo: {
+ hasNextPage,
+ },
+ edges: async () => {
+ return results
+ .slice(0, pageSize)
+ .map((result: any, index: number) => {
+ return {
+ cursor: encodeCursor({ timestamp, offset: offset + index + 1 }),
+ node: result,
+ };
+ });
+ },
+ };
+ },
+ },
+ Transaction: {
+ block: (parent) => {
+ return parent?.block_id
+ ? {
+ id: parent?.block_id,
+ timestamp: parent?.block_timestamp,
+ height: parent?.block_height,
+ previous: parent?.block_previous,
+ extended: parent?.block_extended,
+ }
+ : null;
+ },
+ tags: (parent) => {
+ return Array.isArray(parent.tags) ? parent.tags.map(utf8DecodeTag) : [];
+ },
+ recipient: (parent) => {
+ if (parent && parent.recipient && typeof parent.recipient === "string") {
+ return parent.recipient.trim();
+ } else {
+ return "";
+ }
+ },
+ data: (parent) => {
+ return {
+ size: parent.data_size || 0,
+ type: parent.data_type,
+ };
+ },
+ quantity: (parent) => {
+ return {
+ ar: winstonToAr(parent.quantity || 0),
+ winston: parent.quantity || 0,
+ };
+ },
+ fee: (parent) => {
+ return {
+ ar: winstonToAr(parent.fee || 0),
+ winston: parent.fee || 0,
+ };
+ },
+ owner: (parent) => {
+ return {
+ address: parent && parent.owner_address ? parent.owner_address : "",
+ key: parent && parent.owner ? parent.owner : "",
+ };
+ },
+ parent: (parent) => {
+ if (parent.parent) {
+ return {
+ id: parent.parent,
+ };
+ }
+ },
+ bundledIn: (parent) => {
+ if (parent.parent) {
+ return {
+ id: parent.parent,
+ };
+ }
+ },
+ },
+ Block: {
+ // Not fully supported for old blocks yet
+ // reward: (parent) => {
+ // return {
+ // address: parent.extended.reward_addr,
+ // pool: parent.extended.reward_pool,
+ // };
+ // },
+ // size: (parent) => {
+ // return parent.extended?.block_size;
+ // },
+ timestamp: (parent) => {
+ return moment(parent?.timestamp).unix();
+ },
+ },
+};
+
+const newCursor = (): string => {
+ return encodeCursor({ timestamp: moment().toISOString(), offset: 0 });
+};
+
+const encodeCursor = ({
+ timestamp,
+ offset,
+}: {
+ timestamp: ISO8601DateTimeString;
+ offset: number;
+}): string => {
+ const string = JSON.stringify([timestamp, offset]);
+ return Buffer.from(string).toString("base64");
+};
+
+const parseCursor = (
+ cursor: string
+): { timestamp: ISO8601DateTimeString; offset: number } => {
+ try {
+ const [timestamp, offset] = JSON.parse(
+ Buffer.from(cursor, "base64").toString()
+ ) as [ISO8601DateTimeString, number];
+
+ return { timestamp, offset };
+ } catch (error) {
+ console.error(error);
+ throw new BadRequest("invalid cursor");
+ }
+};
diff --git a/src/gateway/routes/graphql-v2/schema/types.graphql b/src/gateway/routes/graphql-v2/schema/types.graphql
new file mode 100644
index 0000000..2fff8cc
--- /dev/null
+++ b/src/gateway/routes/graphql-v2/schema/types.graphql
@@ -0,0 +1,373 @@
+"""
+Arweave Gateway
+Copyright (C) 2022 Permanent Data Solutions, Inc
+
+This program is free software: you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation, either version 3 of the License, or
+(at your option) any later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program. If not, see .
+"""
+type Query {
+ """
+ Get a transaction by its id
+ """
+ transaction(id: ID!): Transaction
+ """
+ Get a paginated set of matching transactions using filters.
+ """
+ transactions(
+ """
+ Find transactions from a list of ids.
+ """
+ ids: [ID!]
+ """
+ Find transactions from a list of owner wallet addresses, or wallet owner public keys.
+ """
+ owners: [String!]
+ """
+ Find transactions from a list of recipient wallet addresses.
+ """
+ recipients: [String!]
+ """
+ Find transactions using tags.
+ """
+ tags: [TagFilter!]
+ """
+ Find data items from the given data bundles.
+ See: https://github.com/ArweaveTeam/arweave-standards/blob/master/ans/ANS-104.md
+ """
+ bundledIn: [ID!]
+ """
+ Find transactions within a given block height range.
+ """
+ block: BlockFilter
+ """
+ Result page size (max: 100)
+ """
+ first: Int = 10
+ """
+ A pagination cursor value, for fetching subsequent pages from a result set.
+ """
+ after: String
+ """
+ Optionally specify the result sort order.
+ """
+ sort: SortOrder = HEIGHT_DESC
+ """
+ @deprecated Don't use, kept for backwards compatability only!
+ """
+ parent: [ID!] @deprecated(reason: "Use `bundledIn`")
+ ): TransactionConnection!
+
+ block(id: String): Block
+
+ blocks(
+ """
+ Find blocks from a list of ids.
+ """
+ ids: [ID!]
+
+ """
+ Find blocks within a given block height range.
+ """
+ height: BlockFilter
+
+ """
+ Result page size (max: 100)
+ """
+ first: Int = 10
+ """
+ A pagination cursor value, for fetching subsequent pages from a result set.
+ """
+ after: String
+ """
+ Optionally specify the result sort order.
+ """
+ sort: SortOrder = HEIGHT_DESC
+ ): BlockConnection!
+}
+"""
+Optionally reverse the result sort order from `HEIGHT_DESC` (default) to `HEIGHT_ASC`.
+"""
+enum SortOrder {
+ """
+ Results are sorted by the transaction block height in ascending order, with the oldest transactions appearing first, and the most recent and pending/unconfirmed appearing last.
+ """
+ HEIGHT_ASC
+ """
+ Results are sorted by the transaction block height in descending order, with the most recent and unconfirmed/pending transactions appearing first.
+ """
+ HEIGHT_DESC
+}
+
+"""
+Find transactions with the folowing tag name and value
+"""
+input TagFilter {
+ """
+ The tag name
+ """
+ name: String!
+ """
+ An array of values to match against. If multiple values are passed then transactions with _any_ matching tag value from the set will be returned.
+
+ e.g.
+
+ \`{name: "app-name", values: ["app-1"]}\`
+
+ Returns all transactions where the \`app-name\` tag has a value of \`app-1\`.
+
+ \`{name: "app-name", values: ["app-1", "app-2", "app-3"]}\`
+
+ Returns all transactions where the \`app-name\` tag has a value of either \`app-1\` _or_ \`app-2\` _or_ \`app-3\`.
+ """
+ values: [String!]!
+
+ """
+ The operator to apply to to the tag filter. Defaults to EQ (equal).
+ """
+ op: TagOperator = EQ
+}
+
+"""
+Find blocks within a given range
+"""
+input BlockFilter {
+ """
+ Minimum block height to filter from
+ """
+ min: Int
+ """
+ Maximum block height to filter to
+ """
+ max: Int
+}
+
+"""
+Paginated result set using the GraphQL cursor spec,
+see: https://relay.dev/graphql/connections.htm.
+"""
+type BlockConnection {
+ pageInfo: PageInfo!
+ edges: [BlockEdge!]!
+}
+
+"""
+Paginated result set using the GraphQL cursor spec.
+"""
+type BlockEdge {
+ """
+ The cursor value for fetching the next page.
+
+ Pass this to the \`after\` parameter in \`blocks(after: $cursor)\`, the next page will start from the next item after this.
+ """
+ cursor: String!
+ """
+ A block object.
+ """
+ node: Block!
+}
+
+"""
+Paginated result set using the GraphQL cursor spec,
+see: https://relay.dev/graphql/connections.htm.
+"""
+type TransactionConnection {
+ pageInfo: PageInfo!
+ edges: [TransactionEdge!]!
+}
+
+"""
+Paginated result set using the GraphQL cursor spec.
+"""
+type TransactionEdge {
+ """
+ The cursor value for fetching the next page.
+
+ Pass this to the \`after\` parameter in \`transactions(after: $cursor)\`, the next page will start from the next item after this.
+ """
+ cursor: String!
+ """
+ A transaction object.
+ """
+ node: Transaction!
+}
+
+"""
+Paginated page info using the GraphQL cursor spec.
+"""
+type PageInfo {
+ hasNextPage: Boolean!
+}
+
+type Transaction {
+ id: ID!
+
+ anchor: String!
+ signature: String!
+ recipient: String!
+
+ owner: Owner!
+ fee: Amount!
+ quantity: Amount!
+ data: MetaData!
+ tags: [Tag!]!
+ """
+ Transactions with a null block are recent and unconfirmed, if they aren't mined into a block within 60 minutes they will be removed from results.
+ """
+ block: Block
+ """
+ @deprecated Don't use, kept for backwards compatability only!
+ """
+ parent: Parent @deprecated(reason: "Use `bundledIn`")
+ """
+ For bundled data items this references the containing bundle ID.
+ See: https://github.com/ArweaveTeam/arweave-standards/blob/master/ans/ANS-104.md
+ """
+ bundledIn: Bundle
+}
+
+"""
+The parent transaction for bundled transactions,
+see: https://github.com/ArweaveTeam/arweave-standards/blob/master/ans/ANS-102.md.
+"""
+type Parent {
+ id: ID!
+}
+
+"""
+The data bundle containing the current data item.
+See: https://github.com/ArweaveTeam/arweave-standards/blob/master/ans/ANS-104.md.
+"""
+type Bundle {
+ """
+ ID of the containing data bundle.
+ """
+ id: ID!
+}
+
+type Block {
+ """
+ The block ID.
+ """
+ id: ID!
+ """
+ The block timestamp (UTC).
+ """
+ timestamp: Int!
+ """
+ The block height.
+ """
+ height: Int!
+ """
+ The previous block ID.
+ """
+ previous: ID!
+ # """
+ # The block size (sum of all transaction data contained in this block).
+ # """
+ # size: String!
+ # """
+ # The reward address and current reward pool size.
+ # """
+ # reward: BlockReward!
+}
+
+# type BlockReward {
+# """
+# Miner address.
+# """
+# address: String!
+# """
+# Size of the reward pool.
+# """
+# pool: String!
+# }
+
+"""
+Basic metadata about the transaction data payload.
+"""
+type MetaData {
+ """
+ Size of the associated data in bytes.
+ """
+ size: String!
+ """
+ Type is derrived from the \`content-type\` tag on a transaction.
+ """
+ type: String
+}
+"""
+Representation of a value transfer between wallets, in both winson and ar.
+"""
+type Amount {
+ """
+ Amount as a winston string e.g. \`"1000000000000"\`.
+ """
+ winston: String!
+ """
+ Amount as an AR string e.g. \`"0.000000000001"\`.
+ """
+ ar: String!
+}
+
+"""
+Representation of a transaction owner.
+"""
+type Owner {
+ """
+ The owner's wallet address.
+ """
+ address: String!
+ """
+ The owner's public key as a base64url encoded string.
+ """
+ key: String!
+}
+
+type Tag {
+ """
+ UTF-8 tag name
+ """
+ name: String!
+ """
+ UTF-8 tag value
+ """
+ value: String!
+}
+
+"""
+The operator to apply to a tag value.
+"""
+enum TagOperator {
+ """
+ Equal
+ """
+ EQ
+ """
+ Not equal
+ """
+ NEQ
+}
+
+# """
+# Transaction statuses
+# """
+# enum Status {
+# """
+# Transaction is included in a block
+# """
+# CONFIRMED
+# """
+# Transaction is not yet included in a block
+# """
+# PENDING
+# }
diff --git a/src/gateway/routes/graphql-v2/schema/types.ts b/src/gateway/routes/graphql-v2/schema/types.ts
new file mode 100644
index 0000000..7ce1695
--- /dev/null
+++ b/src/gateway/routes/graphql-v2/schema/types.ts
@@ -0,0 +1,625 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { GraphQLResolveInfo } from "graphql";
+export type Maybe = T | null;
+export type Exact = {
+ [K in keyof T]: T[K];
+};
+export type RequireFields = {
+ [X in Exclude]?: T[X];
+} & { [P in K]-?: NonNullable };
+/** All built-in and custom scalars, mapped to their actual values */
+export type Scalars = {
+ ID: string;
+ String: string;
+ Boolean: boolean;
+ Int: number;
+ Float: number;
+};
+
+/** Representation of a value transfer between wallets, in both winson and ar. */
+export type Amount = {
+ __typename?: "Amount";
+ /** Amount as a winston string e.g. \`"1000000000000"\`. */
+ winston: Scalars["String"];
+ /** Amount as an AR string e.g. \`"0.000000000001"\`. */
+ ar: Scalars["String"];
+};
+
+export type Block = {
+ __typename?: "Block";
+ /** The block ID. */
+ id: Scalars["ID"];
+ /** The block timestamp (UTC). */
+ timestamp: Scalars["Int"];
+ /** The block height. */
+ height: Scalars["Int"];
+ /** The previous block ID. */
+ previous: Scalars["ID"];
+};
+
+/**
+ * Paginated result set using the GraphQL cursor spec,
+ * see: https://relay.dev/graphql/connections.htm.
+ */
+export type BlockConnection = {
+ __typename?: "BlockConnection";
+ pageInfo: PageInfo;
+ edges: Array;
+};
+
+/** Paginated result set using the GraphQL cursor spec. */
+export type BlockEdge = {
+ __typename?: "BlockEdge";
+ /**
+ * The cursor value for fetching the next page.
+ *
+ * Pass this to the \`after\` parameter in \`blocks(after: $cursor)\`, the next
+ * page will start from the next item after this.
+ */
+ cursor: Scalars["String"];
+ /** A block object. */
+ node: Block;
+};
+
+/** Find blocks within a given range */
+export type BlockHeightFilter = {
+ /** Minimum block height to filter from */
+ min?: Maybe;
+ /** Maximum block height to filter to */
+ max?: Maybe;
+};
+
+/**
+ * The data bundle containing the current data item.
+ * See: https://github.com/ArweaveTeam/arweave-standards/blob/master/ans/ANS-102.md.
+ */
+export type Bundle = {
+ __typename?: "Bundle";
+ /** ID of the containing data bundle. */
+ id: Scalars["ID"];
+};
+
+/** Basic metadata about the transaction data payload. */
+export type MetaData = {
+ __typename?: "MetaData";
+ /** Size of the associated data in bytes. */
+ size: Scalars["String"];
+ /** Type is derrived from the \`content-type\` tag on a transaction. */
+ type?: Maybe;
+};
+
+/** Representation of a transaction owner. */
+export type Owner = {
+ __typename?: "Owner";
+ /** The owner's wallet address. */
+ address: Scalars["String"];
+ /** The owner's public key as a base64url encoded string. */
+ key: Scalars["String"];
+};
+
+/** Paginated page info using the GraphQL cursor spec. */
+export type PageInfo = {
+ __typename?: "PageInfo";
+ hasNextPage: Scalars["Boolean"];
+};
+
+/**
+ * The parent transaction for bundled transactions,
+ * see: https://github.com/ArweaveTeam/arweave-standards/blob/master/ans/ANS-102.md.
+ */
+export type Parent = {
+ __typename?: "Parent";
+ id: Scalars["ID"];
+};
+
+export type Query = {
+ __typename?: "Query";
+ /** Get a transaction by its id */
+ transaction?: Maybe;
+ /** Get a paginated set of matching transactions using filters. */
+ transactions: TransactionConnection;
+ block?: Maybe;
+ blocks: BlockConnection;
+};
+
+export type QueryTransactionArgs = {
+ id: Scalars["ID"];
+};
+
+export type QueryTransactionsArgs = {
+ ids?: Maybe>;
+ owners?: Maybe>;
+ recipients?: Maybe>;
+ tags?: Maybe>;
+ bundledIn?: Maybe>;
+ block?: Maybe;
+ first?: Maybe;
+ after?: Maybe;
+ sort?: Maybe;
+};
+
+export type QueryBlockArgs = {
+ id?: Maybe;
+};
+
+export type QueryBlocksArgs = {
+ ids?: Maybe>;
+ height?: Maybe;
+ first?: Maybe;
+ after?: Maybe;
+ sort?: Maybe;
+};
+
+/** Optionally reverse the result sort order from `HEIGHT_DESC` (default) to `HEIGHT_ASC`. */
+export enum SortOrder {
+ /**
+ * Results are sorted by the transaction block height in ascending order, with
+ * the oldest transactions appearing first, and the most recent and
+ * pending/unconfirmed appearing last.
+ */
+ HeightAsc = "HEIGHT_ASC",
+ /**
+ * Results are sorted by the transaction block height in descending order, with
+ * the most recent and unconfirmed/pending transactions appearing first.
+ */
+ HeightDesc = "HEIGHT_DESC",
+}
+
+export type Tag = {
+ __typename?: "Tag";
+ /** UTF-8 tag name */
+ name: Scalars["String"];
+ /** UTF-8 tag value */
+ value: Scalars["String"];
+};
+
+/** Find transactions with the folowing tag name and value */
+export type TagFilter = {
+ /** The tag name */
+ name: Scalars["String"];
+ /**
+ * An array of values to match against. If multiple values are passed then
+ * transactions with _any_ matching tag value from the set will be returned.
+ *
+ * e.g.
+ *
+ * \`{name: "app-name", values: ["app-1"]}\`
+ *
+ * Returns all transactions where the \`app-name\` tag has a value of \`app-1\`.
+ *
+ * \`{name: "app-name", values: ["app-1", "app-2", "app-3"]}\`
+ *
+ * Returns all transactions where the \`app-name\` tag has a value of either \`app-1\` _or_ \`app-2\` _or_ \`app-3\`.
+ */
+ values: Array;
+ /** The operator to apply to to the tag filter. Defaults to EQ (equal). */
+ op?: Maybe;
+};
+
+/** The operator to apply to a tag value. */
+export enum TagOperator {
+ /** Equal */
+ Eq = "EQ",
+ /** Not equal */
+ Neq = "NEQ",
+}
+
+export type Transaction = {
+ __typename?: "Transaction";
+ id: Scalars["ID"];
+ anchor: Scalars["String"];
+ signature: Scalars["String"];
+ recipient: Scalars["String"];
+ owner: Owner;
+ fee: Amount;
+ quantity: Amount;
+ data: MetaData;
+ tags: Array;
+ /**
+ * Transactions with a null block are recent and unconfirmed, if they aren't
+ * mined into a block within 60 minutes they will be removed from results.
+ */
+ block?: Maybe;
+ /**
+ * Transactions with parent are Bundled Data Items as defined in the ANS-102 data spec.
+ * https://github.com/ArweaveTeam/arweave-standards/blob/master/ans/ANS-102.md
+ * @deprecated Use `bundledIn`
+ */
+ parent?: Maybe;
+ /**
+ * For bundled data items this references the containing bundle ID.
+ * See: https://github.com/ArweaveTeam/arweave-standards/blob/master/ans/ANS-102.md
+ */
+ bundledIn?: Maybe;
+};
+
+/**
+ * Paginated result set using the GraphQL cursor spec,
+ * see: https://relay.dev/graphql/connections.htm.
+ */
+export type TransactionConnection = {
+ __typename?: "TransactionConnection";
+ pageInfo: PageInfo;
+ edges: Array;
+};
+
+/** Paginated result set using the GraphQL cursor spec. */
+export type TransactionEdge = {
+ __typename?: "TransactionEdge";
+ /**
+ * The cursor value for fetching the next page.
+ *
+ * Pass this to the \`after\` parameter in \`transactions(after: $cursor)\`, the
+ * next page will start from the next item after this.
+ */
+ cursor: Scalars["String"];
+ /** A transaction object. */
+ node: Transaction;
+};
+
+export type ResolverTypeWrapper = Promise | T;
+
+export type LegacyStitchingResolver = {
+ fragment: string;
+ resolve: ResolverFn;
+};
+
+export type NewStitchingResolver = {
+ selectionSet: string;
+ resolve: ResolverFn;
+};
+export type StitchingResolver =
+ | LegacyStitchingResolver
+ | NewStitchingResolver;
+export type Resolver =
+ | ResolverFn
+ | StitchingResolver;
+
+export type ResolverFn = (
+ parent: TParent,
+ args: TArgs,
+ context: TContext,
+ info: GraphQLResolveInfo
+) => Promise | TResult;
+
+export type SubscriptionSubscribeFn = (
+ parent: TParent,
+ args: TArgs,
+ context: TContext,
+ info: GraphQLResolveInfo
+) => AsyncIterator | Promise>;
+
+export type SubscriptionResolveFn = (
+ parent: TParent,
+ args: TArgs,
+ context: TContext,
+ info: GraphQLResolveInfo
+) => TResult | Promise;
+
+export interface SubscriptionSubscriberObject<
+ TResult,
+ TKey extends string,
+ TParent,
+ TContext,
+ TArgs
+> {
+ subscribe: SubscriptionSubscribeFn<
+ { [key in TKey]: TResult },
+ TParent,
+ TContext,
+ TArgs
+ >;
+ resolve?: SubscriptionResolveFn<
+ TResult,
+ { [key in TKey]: TResult },
+ TContext,
+ TArgs
+ >;
+}
+
+export interface SubscriptionResolverObject {
+ subscribe: SubscriptionSubscribeFn;
+ resolve: SubscriptionResolveFn;
+}
+
+export type SubscriptionObject<
+ TResult,
+ TKey extends string,
+ TParent,
+ TContext,
+ TArgs
+> =
+ | SubscriptionSubscriberObject
+ | SubscriptionResolverObject;
+
+export type SubscriptionResolver<
+ TResult,
+ TKey extends string,
+ TParent = {},
+ TContext = {},
+ TArgs = {}
+> =
+ | ((
+ ...args: any[]
+ ) => SubscriptionObject)
+ | SubscriptionObject;
+
+export type TypeResolveFn = (
+ parent: TParent,
+ context: TContext,
+ info: GraphQLResolveInfo
+) => Maybe | Promise>;
+
+export type IsTypeOfResolverFn = (
+ obj: T,
+ info: GraphQLResolveInfo
+) => boolean | Promise;
+
+export type NextResolverFn = () => Promise;
+
+export type DirectiveResolverFn<
+ TResult = {},
+ TParent = {},
+ TContext = {},
+ TArgs = {}
+> = (
+ next: NextResolverFn,
+ parent: TParent,
+ args: TArgs,
+ context: TContext,
+ info: GraphQLResolveInfo
+) => TResult | Promise;
+
+/** Mapping between all available schema types and the resolvers types */
+export type ResolversTypes = {
+ Query: ResolverTypeWrapper<{}>;
+ ID: ResolverTypeWrapper;
+ Transaction: ResolverTypeWrapper;
+ String: ResolverTypeWrapper;
+ Owner: ResolverTypeWrapper;
+ Amount: ResolverTypeWrapper;
+ MetaData: ResolverTypeWrapper;
+ Tag: ResolverTypeWrapper;
+ Block: ResolverTypeWrapper;
+ Int: ResolverTypeWrapper;
+ Parent: ResolverTypeWrapper;
+ Bundle: ResolverTypeWrapper;
+ TagFilter: TagFilter;
+ TagOperator: TagOperator;
+ BlockHeightFilter: BlockHeightFilter;
+ SortOrder: SortOrder;
+ TransactionConnection: ResolverTypeWrapper;
+ PageInfo: ResolverTypeWrapper;
+ Boolean: ResolverTypeWrapper;
+ TransactionEdge: ResolverTypeWrapper;
+ BlockConnection: ResolverTypeWrapper;
+ BlockEdge: ResolverTypeWrapper;
+};
+
+/** Mapping between all available schema types and the resolvers parents */
+export type ResolversParentTypes = {
+ Query: {};
+ ID: Scalars["ID"];
+ Transaction: Transaction;
+ String: Scalars["String"];
+ Owner: Owner;
+ Amount: Amount;
+ MetaData: MetaData;
+ Tag: Tag;
+ Block: Block;
+ Int: Scalars["Int"];
+ Parent: Parent;
+ Bundle: Bundle;
+ TagFilter: TagFilter;
+ BlockHeightFilter: BlockHeightFilter;
+ TransactionConnection: TransactionConnection;
+ PageInfo: PageInfo;
+ Boolean: Scalars["Boolean"];
+ TransactionEdge: TransactionEdge;
+ BlockConnection: BlockConnection;
+ BlockEdge: BlockEdge;
+};
+
+export type AmountResolvers<
+ ContextType = any,
+ ParentType extends ResolversParentTypes["Amount"] = ResolversParentTypes["Amount"]
+> = {
+ winston?: Resolver;
+ ar?: Resolver;
+ __isTypeOf?: IsTypeOfResolverFn;
+};
+
+export type BlockResolvers<
+ ContextType = any,
+ ParentType extends ResolversParentTypes["Block"] = ResolversParentTypes["Block"]
+> = {
+ id?: Resolver;
+ timestamp?: Resolver;
+ height?: Resolver;
+ previous?: Resolver;
+ __isTypeOf?: IsTypeOfResolverFn;
+};
+
+export type BlockConnectionResolvers<
+ ContextType = any,
+ ParentType extends ResolversParentTypes["BlockConnection"] = ResolversParentTypes["BlockConnection"]
+> = {
+ pageInfo?: Resolver;
+ edges?: Resolver, ParentType, ContextType>;
+ __isTypeOf?: IsTypeOfResolverFn;
+};
+
+export type BlockEdgeResolvers<
+ ContextType = any,
+ ParentType extends ResolversParentTypes["BlockEdge"] = ResolversParentTypes["BlockEdge"]
+> = {
+ cursor?: Resolver;
+ node?: Resolver;
+ __isTypeOf?: IsTypeOfResolverFn;
+};
+
+export type BundleResolvers<
+ ContextType = any,
+ ParentType extends ResolversParentTypes["Bundle"] = ResolversParentTypes["Bundle"]
+> = {
+ id?: Resolver;
+ __isTypeOf?: IsTypeOfResolverFn;
+};
+
+export type MetaDataResolvers<
+ ContextType = any,
+ ParentType extends ResolversParentTypes["MetaData"] = ResolversParentTypes["MetaData"]
+> = {
+ size?: Resolver;
+ type?: Resolver, ParentType, ContextType>;
+ __isTypeOf?: IsTypeOfResolverFn;
+};
+
+export type OwnerResolvers<
+ ContextType = any,
+ ParentType extends ResolversParentTypes["Owner"] = ResolversParentTypes["Owner"]
+> = {
+ address?: Resolver;
+ key?: Resolver;
+ __isTypeOf?: IsTypeOfResolverFn;
+};
+
+export type PageInfoResolvers<
+ ContextType = any,
+ ParentType extends ResolversParentTypes["PageInfo"] = ResolversParentTypes["PageInfo"]
+> = {
+ hasNextPage?: Resolver;
+ __isTypeOf?: IsTypeOfResolverFn;
+};
+
+export type ParentResolvers<
+ ContextType = any,
+ ParentType extends ResolversParentTypes["Parent"] = ResolversParentTypes["Parent"]
+> = {
+ id?: Resolver;
+ __isTypeOf?: IsTypeOfResolverFn;
+};
+
+export type QueryResolvers<
+ ContextType = any,
+ ParentType extends ResolversParentTypes["Query"] = ResolversParentTypes["Query"]
+> = {
+ transaction?: Resolver<
+ Maybe,
+ ParentType,
+ ContextType,
+ RequireFields
+ >;
+ transactions?: Resolver<
+ ResolversTypes["TransactionConnection"],
+ ParentType,
+ ContextType,
+ RequireFields
+ >;
+ block?: Resolver<
+ Maybe,
+ ParentType,
+ ContextType,
+ RequireFields
+ >;
+ blocks?: Resolver<
+ ResolversTypes["BlockConnection"],
+ ParentType,
+ ContextType,
+ RequireFields
+ >;
+};
+
+export type TagResolvers<
+ ContextType = any,
+ ParentType extends ResolversParentTypes["Tag"] = ResolversParentTypes["Tag"]
+> = {
+ name?: Resolver;
+ value?: Resolver;
+ __isTypeOf?: IsTypeOfResolverFn;
+};
+
+export type TransactionResolvers<
+ ContextType = any,
+ ParentType extends ResolversParentTypes["Transaction"] = ResolversParentTypes["Transaction"]
+> = {
+ id?: Resolver;
+ anchor?: Resolver;
+ signature?: Resolver;
+ recipient?: Resolver;
+ owner?: Resolver;
+ fee?: Resolver;
+ quantity?: Resolver;
+ data?: Resolver;
+ tags?: Resolver, ParentType, ContextType>;
+ block?: Resolver, ParentType, ContextType>;
+ parent?: Resolver, ParentType, ContextType>;
+ bundledIn?: Resolver<
+ Maybe,
+ ParentType,
+ ContextType
+ >;
+ __isTypeOf?: IsTypeOfResolverFn;
+};
+
+export type TransactionConnectionResolvers<
+ ContextType = any,
+ ParentType extends ResolversParentTypes["TransactionConnection"] = ResolversParentTypes["TransactionConnection"]
+> = {
+ pageInfo?: Resolver;
+ edges?: Resolver<
+ Array,
+ ParentType,
+ ContextType
+ >;
+ __isTypeOf?: IsTypeOfResolverFn;
+};
+
+export type TransactionEdgeResolvers<
+ ContextType = any,
+ ParentType extends ResolversParentTypes["TransactionEdge"] = ResolversParentTypes["TransactionEdge"]
+> = {
+ cursor?: Resolver;
+ node?: Resolver;
+ __isTypeOf?: IsTypeOfResolverFn;
+};
+
+export type Resolvers = {
+ Amount?: AmountResolvers;
+ Block?: BlockResolvers;
+ BlockConnection?: BlockConnectionResolvers;
+ BlockEdge?: BlockEdgeResolvers;
+ Bundle?: BundleResolvers;
+ MetaData?: MetaDataResolvers;
+ Owner?: OwnerResolvers;
+ PageInfo?: PageInfoResolvers;
+ Parent?: ParentResolvers;
+ Query?: QueryResolvers;
+ Tag?: TagResolvers;
+ Transaction?: TransactionResolvers;
+ TransactionConnection?: TransactionConnectionResolvers;
+ TransactionEdge?: TransactionEdgeResolvers;
+};
+
+/**
+ * @deprecated
+ * Use "Resolvers" root object instead. If you wish to get "IResolvers", add "typesPrefix: I" to your config.
+ */
+export type IResolvers = Resolvers;
diff --git a/src/gateway/routes/graphql/index.ts b/src/gateway/routes/graphql/index.ts
new file mode 100644
index 0000000..24b92a0
--- /dev/null
+++ b/src/gateway/routes/graphql/index.ts
@@ -0,0 +1,41 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { ApolloServer, ApolloServerExpressConfig } from "apollo-server-express";
+import { ApolloServerPluginLandingPageDisabled } from "apollo-server-core";
+import { getConnectionPool } from "../../../database/postgres";
+import { resolvers } from "./resolvers";
+import { typeDefs } from "./schema";
+
+const apolloServer = (opts: ApolloServerExpressConfig = {}) => {
+ return new ApolloServer({
+ typeDefs,
+ resolvers,
+ debug: false,
+ plugins: [ApolloServerPluginLandingPageDisabled()],
+ context: () => {
+ console.log("context...");
+ return {
+ connection: getConnectionPool("read"),
+ };
+ },
+ ...opts,
+ });
+};
+
+export { apolloServer };
diff --git a/src/gateway/routes/graphql/resolvers.ts b/src/gateway/routes/graphql/resolvers.ts
new file mode 100644
index 0000000..04fae57
--- /dev/null
+++ b/src/gateway/routes/graphql/resolvers.ts
@@ -0,0 +1,119 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { TransactionHeader, utf8DecodeTag, Tag } from "../../../lib/arweave";
+import { query } from "../../../database/transaction-db";
+
+type Resolvers = any;
+
+type ResolverFn = (parent: any, args: any, ctx: any) => Promise;
+interface ResolverMap {
+ [field: string]: ResolverFn;
+}
+
+export const defaultMaxResults = 5000;
+
+export const resolvers: Resolvers = {
+ Query: {
+ transaction: async (
+ parent: any,
+ { id }: Record,
+ context: any
+ ) => {
+ return query(context.connection, {
+ id,
+ });
+ },
+ transactions: async (
+ parent: any,
+ { to, from, tags }: Record,
+ context: any
+ ) => {
+ const sqlQuery = query(context.connection, {
+ limit: defaultMaxResults,
+ to,
+ from,
+ tags: (tags || []).map((tag: Tag) => {
+ return {
+ name: tag.name,
+ values: [tag.value],
+ };
+ }),
+ });
+
+ // console.log(sqlQuery.toSQL());
+
+ const results = (await sqlQuery) as TransactionHeader[];
+
+ return results.map(({ id, tags = [] }: Partial) => {
+ return {
+ id,
+ tags: tags.map(utf8DecodeTag),
+ };
+ });
+ },
+ },
+ Transaction: {
+ linkedFromTransactions: async (
+ parent: any,
+ { byForeignTag, to, from, tags }: Record,
+ context: any
+ ) => {
+ const sqlQuery = query(context.connection, {
+ limit: defaultMaxResults,
+ to,
+ from,
+ tags: ((tags as any[]) || []).concat({
+ name: byForeignTag,
+ values: [parent.id],
+ }),
+ });
+
+ // console.log(sqlQuery.toSQL());
+
+ const results = (await sqlQuery) as TransactionHeader[];
+
+ return results.map(({ id, tags = [] }: Partial) => {
+ return {
+ id,
+ tags: tags.map(utf8DecodeTag),
+ };
+ });
+ },
+ countLinkedFromTransactions: async (
+ parent: any,
+ { byForeignTag, to, from, tags }: Record,
+ context: any
+ ) => {
+ const sqlQuery = query(context.connection, {
+ limit: defaultMaxResults,
+ to,
+ from,
+ tags: ((tags as any[]) || []).concat({
+ name: byForeignTag,
+ values: [parent.id],
+ }),
+ select: [],
+ }).count();
+
+ // console.log(sqlQuery.toSQL());
+
+ return (await sqlQuery.first()).count;
+ },
+ },
+};
diff --git a/src/gateway/routes/graphql/schema.ts b/src/gateway/routes/graphql/schema.ts
new file mode 100644
index 0000000..16b3365
--- /dev/null
+++ b/src/gateway/routes/graphql/schema.ts
@@ -0,0 +1,60 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { gql } from "apollo-server-express";
+
+export const typeDefs = gql`
+ type Query {
+ transaction(id: ID!): Transaction
+ transactions(
+ from: [String!]
+ to: [String!]
+ tags: [TagInput!]
+ ): [Transaction!]!
+ countTransactions(from: [String!], to: [String!], tags: [TagInput!]): Int!
+ }
+
+ type Transaction {
+ id: ID!
+ tags: [Tag!]!
+ tagValue(tagName: String!): String
+ linkedToTransaction(byOwnTag: String!): Transaction
+ linkedFromTransactions(
+ byForeignTag: String!
+ from: [String!]
+ to: [String!]
+ tags: [TagInput!]
+ ): [Transaction!]!
+ countLinkedFromTransactions(
+ byForeignTag: String!
+ from: [String!]
+ to: [String!]
+ tags: [TagInput!]
+ ): Int!
+ }
+
+ type Tag {
+ name: String!
+ value: String!
+ }
+
+ input TagInput {
+ name: String!
+ value: String!
+ }
+`;
diff --git a/src/gateway/routes/health/index.ts b/src/gateway/routes/health/index.ts
new file mode 100644
index 0000000..1f3547a
--- /dev/null
+++ b/src/gateway/routes/health/index.ts
@@ -0,0 +1,73 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import fetch from "node-fetch";
+import { RequestHandler } from "express";
+import { getLatestBlock } from "../../../database/block-db";
+import { getConnectionPool } from "../../../database/postgres";
+import log from "../../../lib/log";
+
+const origins = JSON.parse(process.env.ARWEAVE_NODES_GET || "") as string[];
+
+if (!Array.isArray(origins)) {
+ throw new Error(
+ `error.config: Invalid env var, process.env.ARWEAVE_NODES_GET: ${process.env.ARWEAVE_NODES_GET}`
+ );
+}
+
+export const handler: RequestHandler = async (req, res) => {
+ const healthStatus = {
+ region: process.env.AWS_REGION,
+ origins: await originHealth(),
+ database: await databaseHealth(),
+ };
+ res.send(healthStatus).end();
+};
+
+const originHealth = async () => {
+ try {
+ return await Promise.all(
+ origins.map(async (originUrl) => {
+ try {
+ const response = await fetch(`${originUrl}/info`);
+ return {
+ endpoint: originUrl,
+ status: response.status,
+ info: await response.json(),
+ };
+ } catch (error) {
+ console.error(error);
+ return error;
+ }
+ })
+ );
+ } catch (error) {
+ log.error(`[health-check] database error`, { error });
+ return false;
+ }
+};
+
+const databaseHealth = async () => {
+ try {
+ const pool = getConnectionPool("read");
+ return { block: await getLatestBlock(pool) };
+ } catch (error) {
+ log.error(`[health-check] database error`, { error });
+ return false;
+ }
+};
diff --git a/src/gateway/routes/new-chunk/index.ts b/src/gateway/routes/new-chunk/index.ts
new file mode 100644
index 0000000..2bb4174
--- /dev/null
+++ b/src/gateway/routes/new-chunk/index.ts
@@ -0,0 +1,124 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { fromB64Url } from "../../../lib/encoding";
+import { Chunk } from "../../../lib/arweave";
+import { enqueue, getQueueUrl } from "../../../lib/queues";
+import { pick } from "lodash";
+import { ImportChunk, ExportChunk } from "../../../interfaces/messages";
+import { RequestHandler } from "express";
+import { put } from "../../../lib/buckets";
+import NodeCryptoDriver from "arweave/node/lib/crypto/node-driver";
+import Arweave from "arweave/node";
+Arweave.crypto = new NodeCryptoDriver();
+
+import { validatePath } from "arweave/node/lib/merkle";
+import { BadRequest } from "http-errors";
+import Joi, { Schema, ValidationError } from "@hapi/joi";
+import { parseInput } from "../../middleware/validate-body";
+
+// the API defintion uses numeric string instead of numbers,
+// Joi.number will accept either number or string and coerce it.
+export const chunkSchema: Schema = Joi.object({
+ chunk: Joi.string().required(),
+ data_root: Joi.string().required(),
+ data_size: Joi.string()
+ .required()
+ .regex(/[0-9]*/), // After validation this must be transformed to a numeric type
+ offset: Joi.string()
+ .required()
+ .regex(/[0-9]*/), // After validation this must be transformed to a numeric type
+ data_path: Joi.string().required(),
+});
+
+export const handler: RequestHandler = async (req, res) => {
+ const chunk = parseInput(chunkSchema, req.body, {
+ transform: (validatedPayload) => {
+ return {
+ ...validatedPayload,
+ data_size: parseInt(validatedPayload.data_size),
+ offset: parseInt(validatedPayload.offset),
+ };
+ },
+ });
+
+ req.log.info(`[new-chunk] received new chunk`, {
+ ...chunk,
+ chunk: chunk.chunk && chunk.chunk.substr(0, 100) + "...",
+ });
+
+ const chunkData = parseB64UrlOrThrow(chunk.chunk, "chunk");
+
+ const dataPath = parseB64UrlOrThrow(chunk.data_path, "data_path");
+
+ const root = parseB64UrlOrThrow(chunk.data_root, "data_root");
+
+ const isValid = await validateChunk(
+ root,
+ chunk.offset,
+ chunk.data_size,
+ dataPath
+ );
+
+ req.log.warn("[new-chunk] validate chunk", {
+ isValid,
+ });
+
+ if (!isValid) {
+ throw new BadRequest("Chunk validation failed");
+ }
+
+ req.log.warn("[new-chunk] cached successfully");
+
+ const queueItem = {
+ size: chunkData.byteLength,
+ header: pick(chunk, ["data_root", "data_size", "data_path", "offset"]),
+ };
+
+ await Promise.all([
+ put("tx-data", `chunks/${chunk.data_root}/${chunk.offset}`, chunkData, {
+ contentType: "application/octet-stream",
+ }),
+ enqueue(getQueueUrl("import-chunks"), queueItem),
+ enqueue(getQueueUrl("export-chunks"), queueItem),
+ ]);
+
+ res.sendStatus(200).end();
+};
+
+const parseB64UrlOrThrow = (b64urlString: string, fieldName: string) => {
+ try {
+ return fromB64Url(b64urlString);
+ } catch (error) {
+ throw new BadRequest(`missing field: ${fieldName}`);
+ }
+};
+
+const validateChunk = async (
+ root: Buffer,
+ offset: number,
+ size: number,
+ proof: Buffer
+) => {
+ try {
+ return (await validatePath(root, offset, 0, size, proof)) !== false;
+ } catch (error) {
+ console.warn(error);
+ return false;
+ }
+};
diff --git a/src/gateway/routes/new-tx/index.ts b/src/gateway/routes/new-tx/index.ts
new file mode 100644
index 0000000..8ce4b9e
--- /dev/null
+++ b/src/gateway/routes/new-tx/index.ts
@@ -0,0 +1,204 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { put } from "../../../lib/buckets";
+import { fromB64Url } from "../../../lib/encoding";
+import { Transaction, getTagValue } from "../../../lib/arweave";
+import { enqueue, getQueueUrl } from "../../../lib/queues";
+import { pick } from "lodash";
+import {
+ ImportTx,
+ DispatchTx,
+ DataFormatVersion,
+} from "../../../interfaces/messages";
+import { RequestHandler } from "express";
+import { BadRequest } from "http-errors";
+import { attemptFallbackNodes } from "../../../lib/broadcast";
+
+import Joi, { Schema } from "@hapi/joi";
+import { parseInput } from "../../middleware/validate-body";
+
+export const txSchema: Schema = Joi.object({
+ id: Joi.string()
+ .required()
+ .regex(/^[a-zA-Z0-9_-]{43}$/),
+ owner: Joi.string().required(),
+ signature: Joi.string().required(),
+ reward: Joi.string()
+ .regex(/[0-9]*/)
+ .required(),
+ last_tx: Joi.string().optional().allow("").default(""),
+ target: Joi.string().optional().allow("").default(""),
+ quantity: Joi.string()
+ .regex(/[0-9]*/)
+ .optional()
+ .allow("")
+ .default(""),
+ data: Joi.string().optional().allow("").default(""),
+ tags: Joi.array()
+ .optional()
+ .items(
+ Joi.object({
+ name: Joi.string().required().allow("").default(""),
+ value: Joi.string().required().allow("").default(""),
+ })
+ )
+ .default([]),
+ format: Joi.number().optional().default(1),
+ data_root: Joi.string().optional().allow("").default(""),
+ data_size: Joi.string()
+ .regex(/[0-9]*/)
+ .optional()
+ .default(""),
+ data_tree: Joi.array().items(Joi.string()).optional().default([]),
+});
+
+const dispatchQueueUrl = getQueueUrl("dispatch-txs");
+const importQueueUrl = getQueueUrl("import-txs");
+
+export const handler: RequestHandler<{}, {}, Transaction> = async (
+ req,
+ res
+) => {
+ const tx = parseInput(txSchema, req.body);
+ const { data, ...senzaData } = tx;
+
+ // some clients are sending fractional values in reward, the
+ // nodes ALWAYS reject these, so let's make less suffering for the user
+ if (
+ typeof senzaData === "object" &&
+ typeof senzaData["reward"] === "string" &&
+ senzaData["reward"].length > 0 &&
+ senzaData["reward"].includes(".")
+ ) {
+ res
+ .status(400)
+ .send(
+ `Bad reward field, expected string-integer but got ${senzaData["reward"]}`
+ );
+ return;
+ }
+
+ req.log.info(`[new-tx] Submit right away`, senzaData);
+
+ try {
+ await attemptFallbackNodes(tx);
+ } catch (error) {
+ req.log.info(
+ "[new-tx] something went wrong sending new tx to fallback nodes",
+ error
+ );
+ }
+
+ req.log.info(`[new-tx]`, {
+ ...tx,
+ data: tx.data && tx.data.substr(0, 100) + "...",
+ });
+
+ const dataSize = getDataSize(tx);
+
+ req.log.info(`[new-tx] data_size: ${dataSize}`);
+
+ if (dataSize > 0) {
+ const dataBuffer = fromB64Url(tx.data);
+
+ if (dataBuffer.byteLength > 0) {
+ await put("tx-data", `tx/${tx.id}`, dataBuffer, {
+ contentType: getTagValue(tx.tags, "content-type"),
+ });
+ }
+ }
+
+ req.log.info(`[new-tx] queuing for dispatch to network`, {
+ id: tx.id,
+ queue: dispatchQueueUrl,
+ });
+
+ await enqueue(dispatchQueueUrl, {
+ data_format: getPayloadFormat(tx),
+ data_size: dataSize,
+ tx: pick(tx, [
+ "format",
+ "id",
+ "signature",
+ "owner",
+ "target",
+ "reward",
+ "last_tx",
+ "tags",
+ "quantity",
+ "data_size",
+ "data_tree",
+ "data_root",
+ ]),
+ });
+
+ req.log.info(`[new-tx] queuing for import`, {
+ id: tx.id,
+ queue: importQueueUrl,
+ });
+
+ await enqueue(importQueueUrl, {
+ tx: pick(tx, [
+ "format",
+ "id",
+ "signature",
+ "owner",
+ "target",
+ "reward",
+ "last_tx",
+ "tags",
+ "quantity",
+ "data_size",
+ "data_tree",
+ "data_root",
+ ]),
+ });
+
+ res.sendStatus(200).end();
+};
+
+const getDataSize = (tx: Transaction): number => {
+ if (tx.data_size) {
+ return parseInt(tx.data_size);
+ }
+ if (tx.data == "") {
+ return 0;
+ }
+
+ try {
+ return fromB64Url(tx.data).byteLength;
+ } catch (error) {
+ console.error(error);
+ throw new BadRequest();
+ }
+};
+
+const getPayloadFormat = (tx: Transaction): DataFormatVersion => {
+ if (tx.format == 1) {
+ return 1;
+ }
+
+ if (tx.format == 2) {
+ return tx.data && typeof tx.data == "string" && tx.data.length > 0
+ ? 2.0
+ : 2.1;
+ }
+
+ return 1;
+};
diff --git a/src/gateway/routes/proxy/index.ts b/src/gateway/routes/proxy/index.ts
new file mode 100644
index 0000000..3d0d8b5
--- /dev/null
+++ b/src/gateway/routes/proxy/index.ts
@@ -0,0 +1,85 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { fetchRequest } from "../../../lib/arweave";
+import { RequestHandler } from "express";
+import { BadGateway, NotFound, HttpError } from "http-errors";
+import { streamToString } from "../../../lib/encoding";
+import { Logger } from "winston";
+
+interface CachedResponse {
+ status: number;
+ contentType?: string;
+ contentLength?: number;
+ body?: string;
+}
+
+export const handler: RequestHandler = async (req, res) => {
+ const { log, method, path } = req;
+
+ req.log.info(`[proxy] request`, { method, path });
+
+ const { status, contentType, contentLength, body } = await proxyAndCache(
+ method,
+ // Remove slash prefix for node.net/info rather than node.net//info
+ path.replace(/^\//, ""),
+ log
+ );
+
+ if (contentType) {
+ res.type(contentType);
+ }
+
+ res.status(status);
+
+ return res.send(body).end();
+};
+
+const proxyAndCache = async (
+ method: string,
+ path: string,
+ log: Logger
+): Promise => {
+ let nodeStatuses: number[] = [];
+
+ const response = await fetchRequest(path);
+
+ if (response && response.body) {
+ const { statusCode: status, headers, body } = response;
+ const streamedBody = body;
+ const contentType =
+ (headers as any)["content-type"] ||
+ (headers as any)["Content-Type"] ||
+ undefined;
+ const contentLength = Buffer.byteLength(streamedBody, "utf8");
+
+ return {
+ body: streamedBody,
+ status,
+ contentType,
+ contentLength,
+ };
+ } else {
+ throw new NotFound();
+ }
+};
+
+const exposeError = (error: HttpError): HttpError => {
+ error.expose = true;
+ return error;
+};
diff --git a/src/gateway/routes/webhooks/index.ts b/src/gateway/routes/webhooks/index.ts
new file mode 100644
index 0000000..2eb3c03
--- /dev/null
+++ b/src/gateway/routes/webhooks/index.ts
@@ -0,0 +1,125 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { TransactionHeader, Block } from "../../../lib/arweave";
+import { enqueue, getQueueUrl } from "../../../lib/queues";
+import { pick } from "lodash";
+import { ImportTx, ImportBlock } from "../../../interfaces/messages";
+import { RequestHandler } from "express";
+import { NotFound, BadRequest } from "http-errors";
+
+export const handler: RequestHandler = async (req, res, next) => {
+ if (
+ process.env.WEBHOOK_TOKEN &&
+ process.env.WEBHOOK_TOKEN != req.query.token
+ ) {
+ req.log.info(`[webhook] invalid webhook token provided ${req.query.token}`);
+ throw new NotFound();
+ }
+
+ const {
+ transaction,
+ block,
+ }: { transaction: TransactionHeader; block: Block } = req.body;
+
+ if (!transaction && !block) {
+ throw new BadRequest();
+ }
+
+ if (transaction) {
+ req.log.info(`[webhook] importing transaction header`, {
+ id: transaction.id,
+ });
+ await importTx(transaction);
+ return res.sendStatus(200).end();
+ }
+
+ if (block) {
+ req.log.info(`[webhook] importing block`, { id: block.indep_hash });
+ await importBlock({
+ block,
+ source: req.headers["x-forwarded-for"]
+ ? req.headers["x-forwarded-for"][0]
+ : "0.0.0.0",
+ });
+ return res.sendStatus(200).end();
+ }
+ req.log.info(`[webhook] no valid payload provided`);
+ throw new BadRequest();
+};
+
+const importTx = async (tx: TransactionHeader): Promise => {
+ let dataSize = parseInt(tx.data_size || "0");
+ return enqueue(getQueueUrl("import-txs"), {
+ tx: pick(
+ {
+ ...(tx as any),
+ data_size: dataSize,
+ data_tree: tx.data_tree || [],
+ data_root: tx.data_root || "",
+ format: tx.format || 1,
+ },
+ [
+ "format",
+ "id",
+ "signature",
+ "owner",
+ "target",
+ "reward",
+ "last_tx",
+ "tags",
+ "quantity",
+ "data_size",
+ "data_tree",
+ "data_root",
+ ]
+ ),
+ });
+};
+
+const importBlock = async ({ source, block }: ImportBlock): Promise => {
+ await enqueue(
+ getQueueUrl("import-blocks"),
+ {
+ source: source,
+ block: pick(block, [
+ "nonce",
+ "previous_block",
+ "timestamp",
+ "last_retarget",
+ "diff",
+ "height",
+ "hash",
+ "indep_hash",
+ "txs",
+ "tx_root",
+ "wallet_list",
+ "reward_addr",
+ "reward_pool",
+ "weave_size",
+ "block_size",
+ "cumulative_diff",
+ "hash_list_merkle",
+ ]),
+ },
+ {
+ messagegroup: `source:${source}`,
+ deduplicationId: `source:${source}/${Date.now()}`,
+ }
+ );
+};
diff --git a/src/interfaces/messages.ts b/src/interfaces/messages.ts
new file mode 100644
index 0000000..11502ab
--- /dev/null
+++ b/src/interfaces/messages.ts
@@ -0,0 +1,51 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { TransactionHeader, Block, ChunkHeader } from "../lib/arweave";
+
+export type DataFormatVersion = 1.0 | 2.0 | 2.1;
+export interface DispatchTx {
+ tx: TransactionHeader;
+ data_size: number;
+ data_format: DataFormatVersion;
+}
+
+export interface ImportChunk {
+ header: ChunkHeader;
+ size: number;
+}
+
+export interface ExportChunk {
+ header: ChunkHeader;
+ size: number;
+}
+
+export interface ImportTx {
+ id?: string;
+ tx?: TransactionHeader;
+}
+
+export interface ImportBlock {
+ source: string;
+ block: Block;
+}
+
+export interface ImportBundle {
+ id?: string;
+ header?: TransactionHeader;
+}
diff --git a/src/jobs/dispatch-txs.ts b/src/jobs/dispatch-txs.ts
new file mode 100644
index 0000000..9db4ee8
--- /dev/null
+++ b/src/jobs/dispatch-txs.ts
@@ -0,0 +1,62 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { getQueueUrl, createQueueHandler } from "../lib/queues";
+import { publish } from "../lib/pub-sub";
+import { get } from "../lib/buckets";
+import { broadcastTx } from "../lib/broadcast";
+import { ImportTx, DispatchTx } from "../interfaces/messages";
+import { toB64url } from "../lib/encoding";
+import { Transaction } from "../lib/arweave";
+
+export const handler = createQueueHandler(
+ getQueueUrl("dispatch-txs"),
+ async (message) => {
+ console.log(message);
+ const { tx, data_size: dataSize, data_format } = message;
+
+ console.log(`data_size: ${dataSize}, tx: ${tx.id}`);
+
+ console.log(`broadcasting: ${tx.id}`);
+
+ const fullTx: Transaction = {
+ ...tx,
+ data:
+ (!data_format || data_format < 2.1) && dataSize > 0
+ ? await getEncodedData(tx.id)
+ : "",
+ };
+
+ await broadcastTx(fullTx);
+
+ console.log(`publishing: ${tx.id}`);
+
+ await publish(message);
+ }
+);
+
+const getEncodedData = async (txid: string): Promise => {
+ try {
+ const data = await get("tx-data", `tx/${txid}`);
+ return toB64url(data.Body as Buffer);
+ } catch (error) {
+ return "";
+ }
+};
+
+export default handler;
diff --git a/src/jobs/export-chunks.ts b/src/jobs/export-chunks.ts
new file mode 100644
index 0000000..f06dbac
--- /dev/null
+++ b/src/jobs/export-chunks.ts
@@ -0,0 +1,75 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { getQueueUrl, createQueueHandler } from "../lib/queues";
+import { get } from "../lib/buckets";
+import { broadcastChunk } from "../lib/broadcast";
+import { ExportChunk } from "../interfaces/messages";
+import { toB64url } from "../lib/encoding";
+import { completedExport } from "../database/chunk-db";
+import {
+ getConnectionPool,
+ releaseConnectionPool,
+ initConnectionPool,
+} from "../database/postgres";
+import { wait } from "../lib/helpers";
+import log from "../lib/log";
+
+export const handler = createQueueHandler(
+ getQueueUrl("export-chunks"),
+ async (message) => {
+ const { header } = message;
+
+ log.info(`[export-chunks] exporting chunk`, {
+ data_root: header.data_root,
+ offset: header.offset,
+ });
+
+ const fullChunk = {
+ ...header,
+ chunk: toB64url(
+ (await get("tx-data", `chunks/${header.data_root}/${header.offset}`))
+ .Body as Buffer
+ ),
+ };
+
+ await broadcastChunk(fullChunk);
+
+ const pool = getConnectionPool("write");
+
+ await completedExport(pool, {
+ data_size: header.data_size,
+ data_root: header.data_root,
+ offset: header.offset,
+ });
+ },
+ {
+ before: async () => {
+ log.info(`[export-chunks] handler:before database connection init`);
+ initConnectionPool("write");
+ await wait(100);
+ },
+ after: async () => {
+ log.info(`[export-chunks] handler:after database connection cleanup`);
+ await releaseConnectionPool("write");
+ await wait(100);
+ },
+ }
+);
+
+export default handler;
diff --git a/src/jobs/import-bundles.ts b/src/jobs/import-bundles.ts
new file mode 100644
index 0000000..3e1a10d
--- /dev/null
+++ b/src/jobs/import-bundles.ts
@@ -0,0 +1,477 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { Knex } from "knex";
+import {
+ TransactionHeader,
+ fetchTransactionData,
+ DataBundleWrapper,
+ getTagValue,
+ DataBundleItem,
+ fetchRequest,
+ fetchTransactionHeader,
+} from "../lib/arweave";
+import { getData, streamCachedChunks } from "../data/transactions";
+import log from "../lib/log";
+import {
+ saveBundleStatus,
+ getBundleImport,
+} from "../database/bundle-import-db";
+import { createQueueHandler, getQueueUrl, enqueue } from "../lib/queues";
+import { ImportBundle } from "../interfaces/messages";
+import {
+ getConnectionPool,
+ initConnectionPool,
+ releaseConnectionPool,
+} from "../database/postgres";
+import { streamToJson, fromB64Url, streamToBuffer } from "../lib/encoding";
+import { sequentialBatch } from "../lib/helpers";
+import { getTx, saveBundleDataItems } from "../database/transaction-db";
+import { buckets, put } from "../lib/buckets";
+import verifyAndIndexStream from "arbundles/stream";
+import { Bundle, DataItem } from "arbundles";
+import { base64 } from "rfc4648";
+import base64url from "base64url";
+import { head } from "lodash";
+import * as fs from "fs";
+import * as path from "path";
+import * as os from "os";
+import { s3 } from "../lib/buckets";
+import { PassThrough } from "stream";
+
+type PartialBy = Omit & Partial>;
+
+const MAX_RETRY = 9;
+const RETRY_BACKOFF_SECONDS = 60;
+const MAX_BACKOFF_SECONDS = 150;
+
+export const isTxAns104 = (tx: TransactionHeader): boolean => {
+ return (
+ // getTagValue(tx.tags, "content-type") == "application/json" &&
+ getTagValue(tx.tags, "bundle-format") == "binary" &&
+ getTagValue(tx.tags, "bundle-version") == "2.0.0"
+ );
+};
+
+async function calculateBackoffWaitTime(
+ retryNum: number,
+ numBytes: number,
+ reward: number
+): Promise {
+ log.info("[import-bundles] calculating backoff value");
+ let basePrice: number = -1;
+ try {
+ const response = await fetchRequest(`price/${numBytes}`);
+ basePrice =
+ response && response.body
+ ? parseInt(await streamToJson(response.body))
+ : 0;
+ } catch (error) {
+ throw new Error(
+ "[import-bundles] getting basePrice from nodes failed, " + error
+ );
+ }
+ if (basePrice === -1) {
+ // most likely redundant, but better safe than sorry
+ throw new Error(
+ `[import-bundles] something went wrong parsing basePrice from /price/${numBytes}`
+ );
+ }
+ const rewardMultiplier = reward / basePrice;
+
+ const waitMultiplier =
+ retryNum * RETRY_BACKOFF_SECONDS * (1 / rewardMultiplier);
+
+ const returnedBackoffTime = Math.max(MAX_BACKOFF_SECONDS, waitMultiplier);
+ log.info(
+ `[import-bundles] setting retry backoff time to ${returnedBackoffTime}`,
+ { rewardMultiplier, waitMultiplier, basePrice, retryNum, reward }
+ );
+ return returnedBackoffTime;
+}
+
+export const handler = createQueueHandler(
+ getQueueUrl("import-bundles"),
+ async ({ header, id }) => {
+ log.info("[import-bundles] importing tx bundle", {
+ bundle: {
+ id,
+ tx: header?.id,
+ },
+ });
+
+ const pool = getConnectionPool("write");
+
+ const tx = header ? header : await fetchTransactionHeader(id || "");
+
+ const txDataSize = parseInt(tx["data_size"]);
+
+ const { attempts = 0 } = await getBundleImport(pool, tx.id);
+
+ log.info("[import-bundles] importing tx bundle status", {
+ bundle: {
+ id: tx.id,
+ attempts,
+ },
+ });
+
+ const incrementedAttempts = attempts + 1;
+
+ let stream;
+
+ try {
+ if (tx && typeof tx.id === "string" && tx.id.length > 0) {
+ const maybeStream = await getData(tx.id || "", { log });
+ stream = maybeStream ? maybeStream.stream : undefined;
+ }
+ } catch (error) {
+ log.error("[import-bundles] error getting stream via getData", error);
+ }
+
+ const Bucket = buckets["tx-data"];
+
+ if (stream) {
+ const is104 = isTxAns104(tx);
+ const headTxObj = await s3
+ .headObject({
+ Key: `tx/${tx.id}`,
+ Bucket,
+ })
+ .promise()
+ .then((r) => r.ContentLength === txDataSize)
+ .catch((_) => false);
+
+ if (!headTxObj) {
+ let isSuccessful = false;
+ try {
+ await s3
+ .upload({
+ Key: `tx/${tx.id}`,
+ Bucket,
+ Body: stream,
+ })
+ .promise();
+ isSuccessful = true;
+ } catch (error) {
+ log.error(
+ "[import-bundles] error streaming from nodes direct to s3 bucket",
+ error
+ );
+ }
+ if (!isSuccessful) {
+ log.error(
+ "Data not available, neither in cache nor nodes, requeuing"
+ );
+ await retry(pool, tx, {
+ attempts: incrementedAttempts,
+ error: "Data not yet available",
+ });
+
+ throw new Error("Data not yet available, neither in cache nor nodes");
+ }
+
+ stream = s3
+ .getObject({ Key: `tx/${tx.id}`, Bucket })
+ .createReadStream();
+ }
+
+ log.info(`[import-bundles] is ANS-104: ${is104}`);
+ log.info("[import-bundles] streaming to buffer/json...");
+
+ const bundleImport = await getBundleImport(pool, tx.id);
+
+ let data: { items: DataBundleItem[] } | undefined;
+
+ if (
+ bundleImport.bundle_meta &&
+ typeof bundleImport.bundle_meta === "string" &&
+ bundleImport.bundle_meta.length > 0
+ ) {
+ data = JSON.parse(bundleImport.bundle_meta);
+ } else {
+ try {
+ if (is104) {
+ data = { items: (await verifyAndIndexStream(stream)) as any };
+ }
+ } catch (error) {
+ log.error(
+ `[import-bundles] validation call error in ${tx.id}\n\t`,
+ error
+ );
+ await invalid(pool, tx.id, {
+ attempts: incrementedAttempts,
+ error: (error as any).message,
+ });
+ return;
+ }
+
+ try {
+ if (!is104) {
+ data = (await streamToJson(stream)) as any;
+ }
+
+ data = typeof data !== "undefined" ? data : undefined;
+
+ log.info("[import-bundles] finished streaming to buffer/json");
+
+ if (!is104) validateAns102(data as any);
+
+ if (data) {
+ await updateBundle(pool, tx.id, data.items);
+ } else {
+ throw new Error("Data is null");
+ }
+ } catch (error: any) {
+ log.error("error", { id: tx.id, error });
+ await invalid(pool, tx.id, {
+ attempts: incrementedAttempts,
+ error: error.message,
+ });
+ }
+ }
+
+ log.info(
+ `[import-bundles] bundle: ${tx.id} is valid, moving on to indexing...`
+ );
+
+ // @ts-ignore
+ if (!data) throw new Error("Data is null");
+
+ // If data is ANS-104
+ if (is104) {
+ data.items.forEach(
+ (i) =>
+ (i.tags = i.tags.map((tag) => ({
+ name: base64url(tag.name),
+ value: base64url(tag.value),
+ })))
+ );
+ }
+
+ try {
+ await Promise.all([
+ sequentialBatch(
+ data.items,
+ 200,
+ async (items: PartialBy[]) => {
+ await Promise.all(
+ items.map(async (item) => {
+ const contentType = getTagValue(item.tags, "content-type");
+ console.log(
+ `bytes=${item.dataOffset}-${
+ item.dataOffset + item.dataSize - 1
+ }`
+ );
+ const bundleData = !is104
+ ? item && fromB64Url(item.data || "")
+ : // TODO: Get data by offset (item.offset)
+ s3
+ .getObject({
+ Key: `tx/${tx.id}`,
+ Bucket,
+ Range: `bytes=${item.dataOffset}-${
+ item.dataOffset + item.dataSize - 1
+ }`,
+ })
+ .createReadStream();
+
+ log.info(`[import-bundles] putting data item: ${item.id}`);
+
+ await put("tx-data", `tx/${item.id}`, bundleData, {
+ contentType: contentType || "application/octet-stream",
+ });
+ })
+ );
+ }
+ ),
+ sequentialBatch(data.items, 100, async (items: DataBundleItem[]) => {
+ await saveBundleDataItems(pool, tx.id, items);
+ }),
+ ]);
+ await complete(pool, tx.id, { attempts: incrementedAttempts });
+ } catch (error: any) {
+ log.error("error", error);
+ await retry(pool, tx, {
+ attempts: incrementedAttempts,
+ error: error.message + error.stack || "",
+ });
+ }
+ } else {
+ log.error("Data not available, requeuing");
+ await retry(pool, tx, {
+ attempts: incrementedAttempts,
+ error: "Data not yet available",
+ });
+ }
+ },
+ {
+ before: async () => {
+ log.info(`[import-bundles] handler:before database connection init`);
+ initConnectionPool("read");
+ initConnectionPool("write");
+ },
+ after: async () => {
+ log.info(`[import-bundles] handler:after database connection cleanup`);
+ await releaseConnectionPool("read");
+ await releaseConnectionPool("write");
+ },
+ }
+);
+
+const retry = async (
+ connection: Knex,
+ header: TransactionHeader,
+ { attempts, error }: { attempts: number; error?: any }
+) => {
+ if (attempts && attempts >= MAX_RETRY + 1) {
+ return saveBundleStatus(connection, [
+ {
+ id: header.id,
+ status: "error",
+ attempts,
+ error,
+ },
+ ]);
+ }
+
+ let numBytes: number = -1;
+ let reward: number = -1;
+
+ try {
+ numBytes =
+ typeof header.data_size === "number"
+ ? header.data_size
+ : parseInt(header.data_size);
+ } catch (error) {
+ log.error(
+ `[import-bundles] unable to get data_size out of ${header.data_size}`,
+ error
+ );
+ }
+
+ try {
+ reward =
+ typeof header.reward === "number"
+ ? header.reward
+ : parseInt(header.reward);
+ } catch (error) {
+ log.error(
+ `[import-bundles] unable to get reward out of ${header.reward}`,
+ error
+ );
+ }
+
+ if (numBytes === -1 || reward === -1) {
+ throw new Error(
+ `[import-bundles] something went wrong parsing the tx-headers (there should be an error message above)`
+ );
+ }
+
+ return calculateBackoffWaitTime(attempts, numBytes, reward).then((delay) => {
+ return Promise.all([
+ saveBundleStatus(connection, [
+ {
+ id: header.id,
+ status: "pending",
+ attempts,
+ error: error || null,
+ },
+ ]),
+ enqueue(
+ getQueueUrl("import-bundles"),
+ { header },
+ { delaySeconds: delay }
+ ),
+ ]);
+ });
+};
+
+const complete = async (
+ connection: Knex,
+ id: string,
+ { attempts }: { attempts: number }
+) => {
+ // TODO: Add column
+ await saveBundleStatus(connection, [
+ {
+ id,
+ status: "complete",
+ attempts,
+ error: null,
+ },
+ ]);
+};
+
+const invalid = async (
+ connection: Knex,
+ id: string,
+ { attempts, error }: { attempts: number; error?: string }
+) => {
+ await saveBundleStatus(connection, [
+ {
+ id,
+ status: "invalid",
+ attempts,
+ error: error || null,
+ },
+ ]);
+};
+
+const updateBundle = async (
+ connection: Knex,
+ id: string,
+ items: any[]
+) => {
+ await saveBundleStatus(connection, [
+ {
+ id,
+ status: "invalid",
+ bundle_meta: JSON.stringify(items),
+ },
+ ]);
+};
+
+const validateAns102 = (bundle: { items: DataBundleItem[] }) => {
+ bundle.items.forEach((item) => {
+ const fields = Object.keys(item);
+ const requiredFields = ["id", "owner", "signature", "data"];
+ requiredFields.forEach((requiredField) => {
+ if (!fields.includes(requiredField)) {
+ throw new Error(
+ `Invalid bundle detected, missing required field: ${requiredField}`
+ );
+ }
+ });
+ });
+};
+
+const validateAns104 = async (bundle: Bundle) => {
+ if (!(await bundle.verify())) {
+ throw new Error("Invalid ANS-104 bundle detected");
+ }
+};
+
+async function collectAsyncGenerator(g: AsyncGenerator): Promise {
+ const arr: any[] = [];
+ for await (const item of g) {
+ arr.push(item);
+ }
+ return arr;
+}
+
+export default handler;
diff --git a/src/jobs/import-chunks.ts b/src/jobs/import-chunks.ts
new file mode 100644
index 0000000..26c7eab
--- /dev/null
+++ b/src/jobs/import-chunks.ts
@@ -0,0 +1,57 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { getQueueUrl, createQueueHandler } from "../lib/queues";
+import { ImportChunk } from "../interfaces/messages";
+import { saveChunk } from "../database/chunk-db";
+import {
+ getConnectionPool,
+ initConnectionPool,
+ releaseConnectionPool,
+} from "../database/postgres";
+import log from "../lib/log";
+import { wait } from "../lib/helpers";
+
+export const handler = createQueueHandler(
+ getQueueUrl("import-chunks"),
+ async ({ header, size }) => {
+ const pool = getConnectionPool("write");
+ log.info(`[import-chunks] importing chunk`, {
+ root: header.data_root,
+ size: size,
+ });
+ await saveChunk(pool, {
+ ...header,
+ chunk_size: size,
+ });
+ },
+ {
+ before: async () => {
+ log.info(`[import-chunks] handler:before database connection init`);
+ initConnectionPool("write");
+ await wait(500);
+ },
+ after: async () => {
+ log.info(`[import-chunks] handler:after database connection cleanup`);
+ await releaseConnectionPool("write");
+ await wait(500);
+ },
+ }
+);
+
+export default handler;
diff --git a/src/jobs/import-txs.ts b/src/jobs/import-txs.ts
new file mode 100644
index 0000000..9391783
--- /dev/null
+++ b/src/jobs/import-txs.ts
@@ -0,0 +1,102 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { Knex } from "knex";
+import {
+ getConnectionPool,
+ initConnectionPool,
+ releaseConnectionPool,
+} from "../database/postgres";
+import { getTx, saveTx } from "../database/transaction-db";
+import { ImportTx, ImportBundle } from "../interfaces/messages";
+import {
+ fetchTransactionHeader,
+ getTagValue,
+ TransactionHeader,
+} from "../lib/arweave";
+import { isTxAns102, isTxAns104, wait } from "../lib/helpers";
+import log from "../lib/log";
+import { createQueueHandler, getQueueUrl, enqueue } from "../lib/queues";
+import { MIN_BINARY_SIZE } from "arbundles";
+
+export const handler = createQueueHandler(
+ getQueueUrl("import-txs"),
+ async ({ id, tx }) => {
+ const pool = getConnectionPool("write");
+
+ const header = tx || (await fetchTransactionHeader(id || ""));
+
+ if (tx) {
+ log.info(`[import-txs] importing tx header`, { id });
+ await save(pool, tx);
+ }
+
+ if (id) {
+ await save(pool, await fetchTransactionHeader(id));
+ }
+
+ await handleBundle(pool, header);
+ },
+ {
+ before: async () => {
+ log.info(`[import-txs] handler:before database connection init`);
+ initConnectionPool("write");
+ },
+ after: async () => {
+ log.info(`[import-txs] handler:after database connection cleanup`);
+ await releaseConnectionPool("write");
+ await wait(500);
+ },
+ }
+);
+
+const save = async (connection: Knex, tx: TransactionHeader) => {
+ log.info(`[import-txs] saving tx header`, { id: tx.id });
+
+ await saveTx(connection, tx);
+
+ log.info(`[import-txs] successfully saved tx header`, { id: tx.id });
+};
+
+const handleBundle = async (connection: Knex, tx: TransactionHeader) => {
+ const dataSize = parseInt(tx?.data_size || "0");
+ if (
+ (dataSize > 0 && isTxAns102(tx)) ||
+ (dataSize > MIN_BINARY_SIZE && isTxAns104(tx))
+ ) {
+ log.info(`[import-txs] detected data bundle tx`, { id: tx.id });
+
+ // A single bundle import will trigger the importing of all the contained txs,
+ // This process will queue all the txs and a consumer will keep polling until the
+ // bundle data is available and mined.
+ //
+ // Ideally we don't want to overdo this as it's quite spammy.
+ //
+ // For now, we'll only import bundled txs if it's the first time we've seen it,
+ // or if it's been seen before but failed to import for whatever reason.
+ //
+ // When we get tx sync download webhoooks this can be improved.
+ log.info(`[import-txs] queuing bundle for import`, { id: tx.id });
+
+ await enqueue(getQueueUrl("import-bundles"), { id: tx.id });
+
+ log.info(`[import-txs] successfully queued bundle for import`, {
+ id: tx.id,
+ });
+ }
+};
diff --git a/src/lib/arweave-path-manifest.ts b/src/lib/arweave-path-manifest.ts
new file mode 100644
index 0000000..7554677
--- /dev/null
+++ b/src/lib/arweave-path-manifest.ts
@@ -0,0 +1,49 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+export const resolveManifestPath = (
+ { index, paths }: PathManifest,
+ subpath: string | undefined
+): string | undefined => {
+ if (subpath && paths[subpath]) {
+ return paths[subpath] ? paths[subpath].id : undefined;
+ }
+
+ if (
+ !subpath &&
+ index &&
+ index.path &&
+ paths[index.path] &&
+ paths[index.path].id
+ ) {
+ return paths[index.path].id;
+ }
+};
+
+export interface PathManifest {
+ manifest: "arweave/paths";
+ version: string;
+ paths: {
+ [key: string]: {
+ id: string;
+ };
+ };
+ index?: {
+ path: string;
+ };
+}
diff --git a/src/lib/arweave.ts b/src/lib/arweave.ts
new file mode 100644
index 0000000..2c37776
--- /dev/null
+++ b/src/lib/arweave.ts
@@ -0,0 +1,382 @@
+import AbortController from "abort-controller";
+import { NotFound } from "http-errors";
+import { shuffle } from "lodash";
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import got, { GotReturn, Response } from "got/dist/source/index.js";
+import { Readable } from "stream";
+import log from "../lib/log";
+import {
+ Base64UrlEncodedString,
+ bufferToStream,
+ fromB64Url,
+ isValidUTF8,
+ streamToBuffer,
+ streamToJson,
+ WinstonString,
+} from "./encoding";
+import { arweaveNodesGet as origins } from "./hosts";
+import { DataItem } from "arbundles";
+
+interface ArFetchOptions {
+ stream?: boolean;
+ json?: boolean;
+ noStatusFilter?: boolean;
+}
+
+export type TransactionHeader = Omit;
+
+export type TransactionData = {
+ data: Buffer;
+ contentType: string | undefined;
+};
+
+export interface Transaction {
+ format: number;
+ id: string;
+ signature: string;
+ owner: string;
+ target: string;
+ data: Base64UrlEncodedString;
+ reward: WinstonString;
+ last_tx: string;
+ tags: Tag[];
+ quantity: WinstonString;
+ data_size: string;
+ data_root: string;
+ data_tree: string[];
+}
+
+export interface DataBundleWrapper {
+ items: (DataBundleItem | DataItem)[];
+}
+
+export interface DataBundleItem {
+ owner: string;
+ target: string;
+ nonce: string;
+ tags: Tag[];
+ data: Base64UrlEncodedString;
+ signature: string;
+ id: string;
+ dataOffset: number;
+ dataSize: number;
+}
+
+export interface Chunk {
+ data_root: string;
+ data_size: number;
+ data_path: string;
+ chunk: string;
+ offset: number;
+}
+
+export type ChunkHeader = Omit;
+
+export interface Tag {
+ name: Base64UrlEncodedString;
+ value: Base64UrlEncodedString;
+}
+
+export interface Block {
+ nonce: string;
+ previous_block: string;
+ timestamp: number;
+ last_retarget: number;
+ diff: string;
+ height: number;
+ hash: string;
+ indep_hash: string;
+ txs: string[];
+ tx_root: string;
+ wallet_list: string;
+ reward_addr: string;
+ reward_pool: number;
+ weave_size: number;
+ block_size: number;
+ cumulative_diff: string;
+ hash_list_merkle: string;
+}
+
+export interface DataResponse {
+ stream?: Readable;
+ contentLength: number;
+ contentType?: string;
+ tags?: Tag[];
+}
+
+export const fetchBlock = async (id: string): Promise => {
+ const endpoints = origins.map((host) => `${host}/block/hash/${id}`);
+
+ const response = await getFirstResponse(endpoints);
+
+ if (response && response.body) {
+ const block = await streamToJson(response.body as any);
+
+ //For now we don't care about the poa and it's takes up too much
+ // space when logged, so just remove it for now.
+ //@ts-ignore
+ delete block.poa;
+
+ return block as Block;
+ }
+
+ throw new Error(`Failed to fetch block: ${id}`);
+};
+
+export const fetchBlockByHeight = async (height: string): Promise => {
+ log.info(`[arweave] fetching block by height`, { height });
+
+ const endpoints = origins.map((host) => `${host}/block/height/${height}`);
+
+ const response = await getFirstResponse(endpoints);
+
+ if (response && response.body) {
+ const block = await streamToJson(response.body as any);
+
+ //For now we don't care about the poa and it's takes up too much
+ // space when logged, so just remove it for now.
+ //@ts-ignore
+ delete block.poa;
+
+ return block as Block;
+ }
+
+ throw new Error(`Failed to fetch block: ${height}`);
+};
+
+export const fetchTransactionHeader = async (
+ txid: string
+): Promise => {
+ log.info(`[arweave] fetching transaction header`, { txid });
+ const endpoints = origins.map((host) => `${host}/tx/${txid}`);
+
+ const response = await getFirstResponse(endpoints);
+
+ if (response && response.body) {
+ return (await streamToJson(response.body as any)) as TransactionHeader;
+ }
+
+ throw new NotFound();
+};
+
+const getContentLength = (headers: any): number => {
+ return parseInt(headers.get("content-length"));
+};
+
+export const fetchTransactionData = async (
+ txid: string
+): Promise => {
+ log.info(`[arweave] fetching data and tags`, { txid });
+
+ try {
+ const [tagsResponse, dataResponse] = await Promise.all([
+ fetchRequest(`tx/${txid}/tags`),
+ fetchRequest(`tx/${txid}/data`),
+ ]);
+
+ const tags =
+ tagsResponse && tagsResponse.body && tagsResponse.statusCode == 200
+ ? ((await streamToJson(tagsResponse.body)) as Tag[])
+ : [];
+
+ const contentType = getTagValue(tags, "content-type");
+
+ if (dataResponse && dataResponse.body) {
+ if (dataResponse.statusCode == 200) {
+ const content = fromB64Url(dataResponse.body.toString());
+
+ return {
+ tags,
+ contentType,
+ contentLength: content.byteLength,
+ stream: bufferToStream(content),
+ };
+ }
+
+ if (dataResponse && dataResponse.statusCode == 400) {
+ const { error } = await streamToJson<{ error: string }>(
+ dataResponse.body
+ );
+
+ if (error == "tx_data_too_big") {
+ const offsetResponse = await fetchRequest(`tx/${txid}/offset`);
+
+ if (offsetResponse && offsetResponse.body) {
+ const { size, offset } = await streamToJson(offsetResponse.body);
+ return {
+ tags,
+ contentType,
+ contentLength: parseInt(size),
+ stream: await streamChunks({
+ size: parseInt(size),
+ offset: parseInt(offset),
+ }),
+ };
+ }
+ }
+ }
+ }
+
+ log.info(`[arweave] failed to find tx`, { txid });
+ } catch (error: any) {
+ log.error(`[arweave] error finding tx`, { txid, error: error.message });
+ }
+
+ return { contentLength: 0 };
+};
+
+export const streamChunks = function ({
+ offset,
+ size,
+}: {
+ offset: number;
+ size: number;
+}): Readable {
+ let bytesReceived = 0;
+ let initialOffset = offset - size + 1;
+
+ const stream = new Readable({
+ autoDestroy: true,
+ read: async function () {
+ let next = initialOffset + bytesReceived;
+
+ try {
+ if (bytesReceived >= size) {
+ this.push(null);
+ return;
+ }
+
+ const response = await fetchRequest(`chunk/${next}`);
+
+ if (response && response.body) {
+ const data = fromB64Url((await streamToJson(response.body)).chunk);
+
+ if (stream.destroyed) {
+ return;
+ }
+
+ this.push(data);
+
+ bytesReceived += data.byteLength;
+ }
+ } catch (error: any) {
+ console.error("stream error", error);
+ stream.emit("error", error);
+ }
+ },
+ });
+
+ return stream;
+};
+
+export const fetchRequest = async (endpoint: string): Promise => {
+ const endpoints = origins.map((host) => `${host}/${endpoint}`);
+
+ return await getFirstResponse(endpoints);
+};
+
+export const streamRequest = async (
+ endpoint: string,
+ filter?: FilterFunction
+): Promise => {
+ const endpoints = origins.map(
+ // Replace any starting slashes
+ (host) => `${host}/${endpoint.replace(/^\//, "")}`
+ );
+
+ for (const url of shuffle(endpoints)) {
+ let response;
+ try {
+ response = await got.stream(url);
+ } catch (error: any) {
+ log.warn(`[arweave] request error`, {
+ message: error.message,
+ url,
+ });
+ }
+ return response;
+ }
+};
+
+export const getTagValue = (tags: Tag[], name: string): string | undefined => {
+ const contentTypeTag = tags.find((tag) => {
+ try {
+ return (
+ fromB64Url(tag.name).toString().toLowerCase() == name.toLowerCase()
+ );
+ } catch (error: any) {
+ return undefined;
+ }
+ });
+ try {
+ return contentTypeTag
+ ? fromB64Url(contentTypeTag.value).toString()
+ : undefined;
+ } catch (error) {
+ return undefined;
+ }
+};
+
+export const utf8DecodeTag = (tag: Tag): { name: string; value: string } => {
+ let name = "";
+ let value = "";
+ try {
+ const nameBuffer = fromB64Url(tag.name) || "";
+ if (isValidUTF8(nameBuffer)) {
+ name = nameBuffer.toString("utf8");
+ }
+ const valueBuffer = fromB64Url(tag.value) || "";
+ if (isValidUTF8(valueBuffer)) {
+ value = valueBuffer.toString("utf8");
+ }
+ } catch (error) {}
+ return {
+ name,
+ value,
+ };
+};
+
+type FilterFunction = (status: number) => boolean;
+
+const defaultFilter: FilterFunction = (status) =>
+ [200, 201, 202, 208].includes(status);
+
+const getFirstResponse = async (
+ urls: string[]
+): Promise => {
+ for (const url of shuffle(urls)) {
+ let response;
+ try {
+ response = await got.get(url, {
+ timeout: {
+ request: 5000,
+ },
+ });
+ } catch (error: any) {
+ log.warn(`[arweave] request error`, {
+ message: error.message,
+ url,
+ });
+ }
+ if (response && defaultFilter(response.statusCode)) {
+ return response;
+ }
+ }
+};
diff --git a/src/lib/base64url-stream.ts b/src/lib/base64url-stream.ts
new file mode 100644
index 0000000..771fe20
--- /dev/null
+++ b/src/lib/base64url-stream.ts
@@ -0,0 +1,61 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { Transform } from "stream";
+
+export class Base64DUrlecode extends Transform {
+ protected extra: string;
+ protected bytesProcessed: number;
+
+ constructor() {
+ super({ decodeStrings: false, objectMode: false });
+ this.extra = "";
+ this.bytesProcessed = 0;
+ }
+
+ _transform(chunk: Buffer, encoding: any, cb: Function) {
+ let conbinedChunk =
+ this.extra +
+ chunk
+ .toString("base64")
+ .replace(/\-/g, "+")
+ .replace(/\_/g, "/")
+ .replace(/(\r\n|\n|\r)/gm, "");
+
+ this.bytesProcessed += chunk.byteLength;
+
+ const remaining = chunk.length % 4;
+
+ this.extra = conbinedChunk.slice(chunk.length - remaining);
+
+ const buf = Buffer.from(
+ conbinedChunk.slice(0, chunk.length - remaining),
+ "base64"
+ );
+ this.push(buf);
+ cb();
+ }
+
+ _flush(cb: Function) {
+ if (this.extra.length) {
+ this.push(Buffer.from(this.extra, "base64"));
+ }
+
+ cb();
+ }
+}
diff --git a/src/lib/broadcast.ts b/src/lib/broadcast.ts
new file mode 100644
index 0000000..1894490
--- /dev/null
+++ b/src/lib/broadcast.ts
@@ -0,0 +1,202 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import fetch from "node-fetch";
+import { shuffle } from "lodash";
+import log from "./log";
+import { Chunk, Transaction } from "./arweave";
+import { arweaveNodesPut as arweaveNodes, arweaveFallbackNodes } from "./hosts";
+
+let ARWEAVE_DISPATCH_TX_CONFIRMATION_REQUIREMENT: number;
+
+try {
+ ARWEAVE_DISPATCH_TX_CONFIRMATION_REQUIREMENT = parseInt(
+ process.env.ARWEAVE_DISPATCH_TX_CONFIRMATION_REQUIREMENT as string
+ );
+} catch (error) {
+ log.info(
+ "ERROR: ARWEAVE_DISPATCH_TX_CONFIRMATION_REQUIREMENT was not defined or was not a number!"
+ );
+ ARWEAVE_DISPATCH_TX_CONFIRMATION_REQUIREMENT = 2;
+}
+
+export async function attemptFallbackNodes(tx: Transaction) {
+ log.info(`[broadcast-tx] broadcasting new tx to fallback nodes`, {
+ id: tx.id,
+ arweaveNodes,
+ });
+ for (const fallbackNode of arweaveFallbackNodes) {
+ try {
+ await fetch(`${fallbackNode}/tx`, {
+ method: "POST",
+ body: JSON.stringify(tx),
+ headers: { "Content-Type": "application/json" },
+ });
+ } catch (error: any) {
+ log.error(
+ `[broadcast-tx] attempting a fallback node "${fallbackNode}" failed`,
+ error
+ );
+ }
+ }
+}
+
+export async function broadcastTx(tx: Transaction) {
+ log.info(`[broadcast-tx] broadcasting new tx`, { id: tx.id, arweaveNodes });
+
+ let submitted = 0;
+
+ let retry = -1;
+ const retries = arweaveNodes.length * 2;
+ const shuffledArweaveNodes = shuffle(arweaveNodes);
+
+ while (
+ submitted < ARWEAVE_DISPATCH_TX_CONFIRMATION_REQUIREMENT &&
+ retries > retry
+ ) {
+ retry += 1;
+ await wait(100);
+
+ const index = retry % arweaveNodes.length;
+ let host: string = shuffledArweaveNodes[index];
+
+ log.info(`[broadcast-tx] sending`, { host, id: tx.id });
+ try {
+ const { status: existingStatus, ok: isReceived } = await fetch(
+ `${host}/tx/${tx.id}/id`
+ );
+
+ if (isReceived) {
+ log.info(`[broadcast-tx] already received`, {
+ host,
+ id: tx.id,
+ existingStatus,
+ });
+ submitted++;
+ break;
+ }
+
+ const {
+ status: postStatus,
+ ok: postOk,
+ text: bodyText,
+ } = await fetch(`${host}/tx`, {
+ method: "POST",
+ body: JSON.stringify(tx),
+ headers: { "Content-Type": "application/json" },
+ });
+
+ log.info(`[broadcast-tx] sent`, {
+ host,
+ id: tx.id,
+ postStatus,
+ });
+
+ if ([400, 410].includes(postStatus)) {
+ log.error(`[broadcast-tx] failed`, {
+ id: tx.id,
+ host,
+ error: postStatus,
+ body: await bodyText(),
+ });
+ } else {
+ submitted++;
+ }
+ } catch (e: any) {
+ log.error(`[broadcast-tx] failed`, {
+ id: tx.id,
+ host,
+ error: e.message,
+ });
+ return false;
+ }
+ }
+
+ return submitted >= ARWEAVE_DISPATCH_TX_CONFIRMATION_REQUIREMENT;
+}
+
+export async function broadcastChunk(chunk: Chunk) {
+ log.info(`[broadcast-chunk] broadcasting new chunk`, {
+ chunk: chunk.data_root,
+ });
+
+ let submitted = 0;
+
+ for (const host of arweaveNodes) {
+ await wait(50);
+
+ log.info(`[broadcast-chunk] sending`, { host, chunk: chunk.data_root });
+ try {
+ const response = await fetch(`${host}/chunk`, {
+ method: "POST",
+ body: JSON.stringify({
+ ...chunk,
+ data_size: chunk.data_size.toString(),
+ offset: chunk.offset.toString(),
+ }),
+ headers: {
+ "Content-Type": "application/json",
+ "arweave-data-root": chunk.data_root,
+ "arweave-data-size": chunk.data_size.toString(),
+ },
+ });
+
+ log.info(`[broadcast-chunk] sent`, {
+ host,
+ status: response.status,
+ });
+
+ if (!response.ok) {
+ log.warn(`[broadcast-chunk] response`, {
+ host,
+ chunk: chunk.data_root,
+ status: response.status,
+ body: await response.text(),
+ });
+ }
+
+ if ([400, 410].includes(response.status)) {
+ log.error(`[broadcast-chunk] failed or waiting for tx`, {
+ host,
+ error: response.status,
+ chunk: chunk.data_root,
+ });
+ } else {
+ submitted++;
+ }
+ } catch (error: any) {
+ log.warn(`[broadcast-chunk] failed to broadcast: ${host}`, {
+ error: error.message,
+ chunk: chunk.data_root,
+ });
+ }
+ }
+
+ if (submitted < 2) {
+ throw new Error(`Failed to successfully broadcast to 2 nodes`);
+ return false;
+ } else {
+ log.log(`[broadcast-chunk] complete`, {
+ submitted,
+ });
+ return true;
+ }
+}
+
+const wait = async (timeout: number) =>
+ new Promise((resolve) => setTimeout(resolve, timeout));
diff --git a/src/lib/buckets.ts b/src/lib/buckets.ts
new file mode 100644
index 0000000..09c2ef9
--- /dev/null
+++ b/src/lib/buckets.ts
@@ -0,0 +1,188 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { S3 } from "aws-sdk";
+import log from "../lib/log";
+import { Readable, PassThrough } from "stream";
+import { ManagedUpload, Metadata } from "aws-sdk/clients/s3";
+import { Tag } from "./arweave";
+import fetch from "node-fetch";
+
+export const buckets: { [key in BucketType]: string } = {
+ "tx-data": process.env.ARWEAVE_S3_TX_DATA_BUCKET!,
+};
+
+type BucketType = "tx-data";
+
+export type BucketObject = S3.GetObjectOutput;
+
+export const s3 = new S3({
+ httpOptions: { timeout: 30000, connectTimeout: 5000 },
+ logger: console,
+});
+
+export const put = async (
+ bucketType: BucketType,
+ key: string,
+ body: Buffer | Readable,
+ { contentType, tags }: { contentType?: string; tags?: Tag[] }
+) => {
+ const bucket = buckets[bucketType];
+
+ log.info(`[s3] uploading to bucket`, {
+ bucket,
+ key,
+ type: contentType,
+ });
+
+ await s3
+ .upload({
+ Key: key,
+ Bucket: bucket,
+ Body: body,
+ ContentType: contentType,
+ })
+ .promise();
+};
+
+export const putStream = async (
+ bucketType: BucketType,
+ key: string,
+ {
+ contentType,
+ contentLength,
+ tags,
+ }: { contentType?: string; contentLength?: number; tags?: Tag[] }
+): Promise<{ upload: ManagedUpload; stream: PassThrough }> => {
+ const bucket = buckets[bucketType];
+
+ log.info(`[s3] uploading to bucket`, {
+ bucket,
+ key,
+ type: contentType,
+ });
+
+ const cacheStream = new PassThrough({
+ objectMode: false,
+ autoDestroy: true,
+ highWaterMark: 512 * 1024,
+ writableHighWaterMark: 512 * 1024,
+ });
+
+ const upload = await s3.upload(
+ {
+ Key: key,
+ Bucket: bucket,
+ Body: cacheStream,
+ ContentType: contentType,
+ ContentLength: contentLength,
+ },
+ { partSize: 10 * 1024 * 1024, queueSize: 2 },
+ () => undefined
+ );
+
+ return { stream: cacheStream, upload };
+};
+
+export const get = async (
+ bucketType: BucketType,
+ key: string
+): Promise => {
+ const bucket = buckets[bucketType];
+ log.info(`[s3] getting data from bucket`, { bucket, key });
+ return s3
+ .getObject({
+ Key: key,
+ Bucket: bucket,
+ })
+ .promise();
+};
+
+export const getStream = async (
+ bucketType: BucketType,
+ key: string
+): Promise<
+ | {
+ contentType?: string;
+ contentLength: number;
+ stream: Readable;
+ tags?: Tag[];
+ }
+ | undefined
+> => {
+ log.info(`[s3] getting stream from bucket`, { key });
+
+ const s3Response: any = await s3
+ .headObject({
+ Key: key,
+ Bucket: buckets[bucketType],
+ })
+ .promise();
+
+ const { ContentType, ContentLength } = s3Response;
+
+ return {
+ contentLength: ContentLength || 0,
+ contentType: ContentType,
+ tags: [],
+ stream: s3
+ .getObject({
+ Key: key,
+ Bucket: buckets[bucketType],
+ })
+ .createReadStream(),
+ };
+};
+
+export const objectHeader = async (
+ bucketType: BucketType,
+ key: string
+): Promise<{
+ contentType?: string;
+ contentLength: number;
+ tags?: Tag[];
+}> => {
+ const bucket = buckets[bucketType];
+
+ const { ContentType, ContentLength, Metadata } = await s3
+ .headObject({
+ Key: key,
+ Bucket: bucket,
+ })
+ .promise();
+
+ return {
+ contentLength: ContentLength || 0,
+ contentType: ContentType,
+ tags: parseMetadataTags(Metadata || {}),
+ };
+};
+
+const parseMetadataTags = (metadata: Metadata): Tag[] => {
+ const rawTags = metadata["x-arweave-tags"];
+
+ if (rawTags) {
+ try {
+ return JSON.parse(rawTags) as Tag[];
+ } catch (error) {
+ log.info(`[s3] error parsing tags`, { metadata, rawTags });
+ }
+ }
+
+ return [];
+};
diff --git a/src/lib/encoding.ts b/src/lib/encoding.ts
new file mode 100644
index 0000000..53b77fe
--- /dev/null
+++ b/src/lib/encoding.ts
@@ -0,0 +1,134 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { base32 } from "rfc4648";
+import { createHash } from "crypto";
+import { Readable, PassThrough } from "stream";
+import { Base64DUrlecode } from "./base64url-stream";
+import Ar from "arweave/node/ar";
+
+const ar = new Ar();
+
+export type Base64EncodedString = string;
+export type Base64UrlEncodedString = string;
+export type WinstonString = string;
+export type ArString = string;
+export type ISO8601DateTimeString = string;
+
+export const sha256 = (buffer: Buffer): Buffer => {
+ return createHash("sha256").update(buffer).digest();
+};
+
+export function toB64url(buffer: Buffer): Base64UrlEncodedString {
+ return buffer
+ .toString("base64")
+ .replace(/\+/g, "-")
+ .replace(/\//g, "_")
+ .replace(/\=/g, "");
+}
+
+export function fromB64Url(input: Base64UrlEncodedString): Buffer {
+ const paddingLength = input.length % 4 == 0 ? 0 : 4 - (input.length % 4);
+
+ const base64 = input
+ .replace(/\-/g, "+")
+ .replace(/\_/g, "/")
+ .concat("=".repeat(paddingLength));
+
+ return Buffer.from(base64, "base64");
+}
+
+export function fromB32(input: string): Buffer {
+ return Buffer.from(
+ base32.parse(input, {
+ loose: true,
+ })
+ );
+}
+
+export function toB32(input: Buffer): string {
+ return base32.stringify(input, { pad: false }).toLowerCase();
+}
+
+export function sha256B64Url(input: Buffer): string {
+ return toB64url(createHash("sha256").update(input).digest());
+}
+
+export const streamToBuffer = async (stream: Readable): Promise => {
+ let buffer = Buffer.alloc(0);
+ return new Promise((resolve, reject) => {
+ stream.on("data", (chunk: Buffer) => {
+ buffer = Buffer.concat([buffer, chunk]);
+ });
+
+ stream.on("end", () => {
+ resolve(buffer);
+ });
+ });
+};
+
+export const streamToString = async (stream: Readable): Promise => {
+ return (await streamToBuffer(stream)).toString("utf-8");
+};
+
+export const bufferToJson = (input: Buffer): T => {
+ return JSON.parse(input.toString("utf8"));
+};
+
+export const jsonToBuffer = (input: object): Buffer => {
+ return Buffer.from(JSON.stringify(input));
+};
+
+export const streamToJson = async (
+ input: Readable | string
+): Promise => {
+ return typeof input === "string"
+ ? JSON.parse(input)
+ : bufferToJson(await streamToBuffer(input));
+};
+
+export const isValidUTF8 = function (buffer: Buffer) {
+ return Buffer.compare(Buffer.from(buffer.toString(), "utf8"), buffer) === 0;
+};
+
+export const streamDecoderb64url = (readable: Readable): Readable => {
+ const outputStream = new PassThrough({ objectMode: false });
+
+ const decoder = new Base64DUrlecode();
+
+ readable.pipe(decoder).pipe(outputStream);
+
+ return outputStream;
+};
+export const bufferToStream = (buffer: Buffer) => {
+ return new Readable({
+ objectMode: false,
+ read() {
+ this.push(buffer);
+ this.push(null);
+ },
+ });
+};
+
+export const winstonToAr = (amount: string) => {
+ return ar.winstonToAr(amount);
+};
+
+export const arToWinston = (amount: string) => {
+ return ar.arToWinston(amount);
+};
diff --git a/src/lib/helpers.ts b/src/lib/helpers.ts
new file mode 100644
index 0000000..d9129cc
--- /dev/null
+++ b/src/lib/helpers.ts
@@ -0,0 +1,62 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { chunk } from "lodash";
+import { getTagValue, TransactionHeader } from "../lib/arweave";
+
+/**
+ * Split a large array into batches and process each batch sequentially,
+ * using an awaited async function.
+ * @param items
+ * @param batchSize
+ * @param func
+ */
+export const sequentialBatch = async (
+ items: any[],
+ batchSize = 10,
+ func: Function
+) => {
+ const batches = chunk(items, batchSize);
+
+ for (let batchIndex = 0; batchIndex < batches.length; batchIndex++) {
+ const batch = batches[batchIndex];
+ await func(batch);
+ }
+};
+
+export const wait = async (ms: number) => {
+ return new Promise((resolve) => {
+ setTimeout(resolve, ms);
+ });
+};
+
+export const isTxAns102 = (tx: TransactionHeader): boolean => {
+ return (
+ // getTagValue(tx.tags, "content-type") == "application/json" &&
+ getTagValue(tx.tags, "bundle-format") == "json" &&
+ getTagValue(tx.tags, "bundle-version") == "1.0.0"
+ );
+};
+
+export const isTxAns104 = (tx: TransactionHeader): boolean => {
+ return (
+ // getTagValue(tx.tags, "content-type") == "application/json" &&
+ getTagValue(tx.tags, "bundle-format") == "binary" &&
+ getTagValue(tx.tags, "bundle-version") == "2.0.0"
+ );
+};
diff --git a/src/lib/hosts.ts b/src/lib/hosts.ts
new file mode 100644
index 0000000..d8255e1
--- /dev/null
+++ b/src/lib/hosts.ts
@@ -0,0 +1,29 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+export const arweaveNodesGet: string[] = JSON.parse(
+ process.env.ARWEAVE_NODES_GET || process.env.ARWEAVE_NODES || "[]"
+) as string[];
+
+export const arweaveNodesPut: string[] = JSON.parse(
+ process.env.ARWEAVE_NODES_PUT || process.env.ARWEAVE_NODES || "[]"
+) as string[];
+
+export const arweaveFallbackNodes: string[] = JSON.parse(
+ process.env.ARWEAVE_NODES_FALLBACK || "[]"
+) as string[];
diff --git a/src/lib/log.ts b/src/lib/log.ts
new file mode 100644
index 0000000..ecec3ed
--- /dev/null
+++ b/src/lib/log.ts
@@ -0,0 +1,26 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { createLogger, transports, format } from "winston";
+
+export default createLogger({
+ level: "info",
+ transports: new transports.Console({
+ format: format.simple(),
+ }),
+});
diff --git a/src/lib/pub-sub.ts b/src/lib/pub-sub.ts
new file mode 100644
index 0000000..1653bac
--- /dev/null
+++ b/src/lib/pub-sub.ts
@@ -0,0 +1,31 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { SNS } from "aws-sdk";
+
+const topicArn = process.env.ARWEAVE_SNS_EVENTS_ARN!;
+const sns = new SNS();
+
+export const publish = async (message: T) => {
+ await sns
+ .publish({
+ TopicArn: topicArn,
+ Message: JSON.stringify(message),
+ })
+ .promise();
+};
diff --git a/src/lib/queues.ts b/src/lib/queues.ts
new file mode 100644
index 0000000..d76ff64
--- /dev/null
+++ b/src/lib/queues.ts
@@ -0,0 +1,207 @@
+/**
+ * Arweave Gateway
+ * Copyright (C) 2022 Permanent Data Solutions, Inc
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+
+ * You should have received a copy of the GNU General Public License
+ * along with this program. If not, see .
+*/
+
+import { SQS } from "aws-sdk";
+import { SQSEvent, SQSHandler, SQSRecord } from "aws-lambda";
+import log from "../lib/log";
+
+type QueueType =
+ | "dispatch-txs"
+ | "import-txs"
+ | "import-blocks"
+ | "import-bundles"
+ | "import-chunks"
+ | "export-chunks";
+type SQSQueueUrl = string;
+type MessageGroup = string;
+type MessageDeduplicationId = string;
+type DelaySeconds = number;
+interface HandlerContext {
+ sqsMessage?: SQSRecord;
+}
+
+const queues: { [key in QueueType]: SQSQueueUrl } = {
+ "dispatch-txs": process.env.ARWEAVE_SQS_DISPATCH_TXS_URL!,
+ "import-chunks": process.env.ARWEAVE_SQS_IMPORT_CHUNKS_URL!,
+ "export-chunks": process.env.ARWEAVE_SQS_EXPORT_CHUNKS_URL!,
+ "import-txs": process.env.ARWEAVE_SQS_IMPORT_TXS_URL!,
+ "import-blocks": process.env.ARWEAVE_SQS_IMPORT_BLOCKS_URL!,
+ "import-bundles": process.env.ARWEAVE_SQS_IMPORT_BUNDLES_URL!,
+};
+
+const sqs = new SQS({
+ maxRetries: 3,
+ httpOptions: { timeout: 5000, connectTimeout: 5000 },
+});
+
+export const getQueueUrl = (type: QueueType): SQSQueueUrl => {
+ return queues[type];
+};
+
+function* chunks(arr: any[], n: number) {
+ for (let i = 0; i < arr.length; i += n) {
+ yield arr.slice(i, i + n);
+ }
+}
+
+export const enqueue = async (
+ queueUrl: SQSQueueUrl,
+ message: MessageType,
+ options?:
+ | {
+ messagegroup?: MessageGroup;
+ deduplicationId?: MessageDeduplicationId;
+ delaySeconds?: DelaySeconds;
+ }
+ | undefined
+) => {
+ if (!queueUrl) {
+ throw new Error(`Queue URL undefined`);
+ }
+
+ await sqs
+ .sendMessage({
+ QueueUrl: queueUrl,
+ MessageBody: JSON.stringify(message),
+ MessageGroupId: options && options.messagegroup,
+ MessageDeduplicationId: options && options.deduplicationId,
+ DelaySeconds: options && options.delaySeconds,
+ })
+ .promise();
+};
+
+export const enqueueBatch = async (
+ queueUrl: SQSQueueUrl,
+ messages: {
+ id: string;
+ message: MessageType;
+ messagegroup?: MessageGroup;
+ deduplicationId?: MessageDeduplicationId;
+ }[]
+) => {
+ for (const messageChnk of chunks(messages, 10)) {
+ await sqs
+ .sendMessageBatch({
+ QueueUrl: queueUrl,
+ Entries: messageChnk.map((message) => {
+ return {
+ Id: message.id,
+ MessageBody: JSON.stringify(message),
+ MessageGroupId: message.messagegroup,
+ MessageDeduplicationId: message.deduplicationId,
+ };
+ }),
+ })
+ .promise();
+ }
+};
+
+const deleteMessages = async (
+ queueUrl: SQSQueueUrl,
+ receipts: { Id: string; ReceiptHandle: string }[]
+) => {
+ if (!receipts.length) {
+ return;
+ }
+ for (const receiptChnk of chunks(receipts, 10)) {
+ await sqs
+ .deleteMessageBatch({
+ QueueUrl: queueUrl,
+ Entries: receiptChnk,
+ })
+ .promise();
+ }
+};
+
+export const createQueueHandler = (
+ queueUrl: SQSQueueUrl,
+ handler: (message: MessageType, sqsMessage: SQSRecord) => Promise,
+ hooks?: {
+ before?: () => Promise