diff --git a/.circleci/config.yml b/.circleci/config.yml index 40c7baceb..01a31a151 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -3,11 +3,12 @@ version: 2.1 orbs: docker-buildx: sensu/docker-buildx@1.1.1 aws-ecr: circleci/aws-ecr@8.1.2 + win: circleci/windows@5.0 executors: docker-rust: docker: - - image: cimg/rust:1.63.0 + - image: cimg/rust:1.65.0 image-ubuntu: machine: image: ubuntu-2204:2022.04.1 @@ -89,6 +90,7 @@ commands: shuttle-persist = { path = "$PWD/resources/persist" } shuttle-shared-db = { path = "$PWD/resources/shared-db" } shuttle-secrets = { path = "$PWD/resources/secrets" } + shuttle-static-folder = { path = "$PWD/resources/static-folder" } EOF install-rust: steps: @@ -106,6 +108,29 @@ commands: sudo unzip -o protoc-21.9-linux-x86_64.zip -d /usr bin/protoc &&\ sudo unzip -o protoc-21.9-linux-x86_64.zip -d /usr/ 'include/*' &&\ rm -f protoc-21.9-linux-x86_64.zip + make-artifact: + parameters: + target: + description: "Rust target to put in artifact" + type: string + suffix: + description: "Suffix that is on the binary" + type: string + default: "" + steps: + - run: + name: Make artifact + command: | + mkdir shuttle + mv target/<< parameters.target >>/release/cargo-shuttle<< parameters.suffix >> shuttle/cargo-shuttle<< parameters.suffix >> + mv LICENSE shuttle/ + mv README.md shuttle/ + mkdir artifacts + tar -cvzf artifacts/cargo-shuttle-${CIRCLE_TAG}-<< parameters.target >>.tar.gz shuttle + - persist_to_workspace: + root: artifacts + paths: + - cargo-shuttle-${CIRCLE_TAG}-<< parameters.target >>.tar.gz jobs: workspace-fmt: @@ -116,7 +141,9 @@ jobs: - install-protoc - run: cargo fmt --all --check - run: cargo install cargo-sort - - run: cargo sort --check --workspace + # TODO: this is incompatible with workspace inheritance, uncomment when + # https://github.com/DevinR528/cargo-sort/pull/29 is merged + # - run: cargo sort --check --workspace - run: cargo check --workspace --all-targets - save-cargo-cache workspace-clippy: @@ -151,7 +178,9 @@ jobs: - apply-patches - run: cargo fmt --all --check --manifest-path << parameters.path >>/Cargo.toml - run: cargo install cargo-sort - - run: cargo sort --check << parameters.path >> + # TODO: this is incompatible with workspace inheritance, uncomment when + # https://github.com/DevinR528/cargo-sort/pull/29 is merged + # - run: cargo sort --check << parameters.path >> - run: | cargo clippy --tests \ --all-targets \ @@ -186,7 +215,10 @@ jobs: executor: image-ubuntu steps: - install-rust + - install-protoc - checkout + - run: git submodule sync + - run: git submodule update --init - restore-cargo-cache - apply-patches - run: @@ -205,6 +237,8 @@ jobs: steps: - install-rust - checkout + - run: git submodule sync + - run: git submodule update --init - restore-buildx-cache - run: name: Make images for tests @@ -244,6 +278,89 @@ jobs: command: | PUSH=true PLATFORMS=linux/amd64 make images - save-buildx-cache + build-binaries-linux: + machine: + image: << parameters.image >> + resource_class: << parameters.resource_class >> + parameters: + target: + description: "Linux target to build for" + type: string + image: + description: "Machine image to use" + type: string + resource_class: + description: "The resource type to use for the machine" + type: string + steps: + - checkout + - run: sudo apt update && sudo DEBIAN_FRONTEND=noninteractive apt install -y libssl-dev musl-tools clang + - run: + name: Install Rust + command: curl --proto '=https' --tlsv1.3 https://sh.rustup.rs -sSf | bash -s -- -y --default-toolchain 1.65.0 --target << parameters.target >> + - run: + name: Build + command: | + # From https://github.com/briansmith/ring/issues/1414#issuecomment-1055177218 + export CC_aarch64_unknown_linux_musl=clang + # "vendored-openssl" is from https://github.com/cross-rs/cross/issues/229#issuecomment-597898074 + cargo build --release --package cargo-shuttle --features vendored-openssl --target << parameters.target >> + - make-artifact: + target: << parameters.target >> + build-binaries-windows: + executor: + name: win/server-2022 + size: medium + environment: + CARGO_NET_GIT_FETCH_WITH_CLI: "true" + steps: + - checkout + - run: choco install -y strawberryperl + - run: + name: Install Rust + command: | + wget -OutFile "C:\rustup-init.exe" https://static.rust-lang.org/rustup/dist/x86_64-pc-windows-msvc/rustup-init.exe + C:\rustup-init.exe -y --default-toolchain 1.65.0 --target x86_64-pc-windows-msvc + - run: + name: Build + command: | + # From https://github.com/alexcrichton/openssl-src-rs/issues/45 + # Because of https://github.com/openssl/openssl/issues/9048 + $env:OPENSSL_SRC_PERL="C:\Strawberry\perl\bin\perl.exe" + # "vendored-openssl" is from https://github.com/cross-rs/cross/issues/229#issuecomment-597898074 + ..\.cargo\bin\cargo.exe build --release --package cargo-shuttle --features vendored-openssl --target x86_64-pc-windows-msvc + - make-artifact: + target: x86_64-pc-windows-msvc + suffix: ".exe" + build-binaries-mac: + macos: + xcode: 12.5.1 + resource_class: medium + steps: + - checkout + - run: + name: Install Rust + command: curl --proto '=https' https://sh.rustup.rs -sSf | bash -s -- -y --default-toolchain 1.65.0 --target x86_64-apple-darwin + - run: + name: Build + command: | + # "vendored-openssl" is from https://github.com/cross-rs/cross/issues/229#issuecomment-597898074 + cargo build --release --package cargo-shuttle --features vendored-openssl --target x86_64-apple-darwin + - make-artifact: + target: x86_64-apple-darwin + publish-github-release: + docker: + - image: cimg/go:1.19.3 + steps: + - attach_workspace: + at: artifacts + - run: + name: "Publish Release on GitHub" + environment: + GITHUB_TOKEN: $GITHUB_TOKEN + command: | + go install github.com/tcnksm/ghr@v0.16.0 + ghr -u ${CIRCLE_PROJECT_USERNAME} -r ${CIRCLE_PROJECT_REPONAME} -c ${CIRCLE_SHA1} -delete -draft ${CIRCLE_TAG} artifacts/ workflows: version: 2 @@ -256,33 +373,16 @@ workflows: - workspace-fmt matrix: parameters: - framework: ["web-axum", "web-rocket", "web-poem", "web-thruster", "web-tide", "web-tower","web-warp", "web-salvo", "bot-serenity"] + framework: ["web-actix-web", "web-axum", "web-rocket", "web-poem", "web-thruster", "web-tide", "web-tower","web-warp", "web-salvo", "bot-serenity"] - check-standalone: matrix: parameters: path: - - examples/axum/hello-world - - examples/axum/websocket - - examples/poem/hello-world - - examples/poem/mongodb - - examples/poem/postgres - - examples/rocket/authentication - - examples/rocket/hello-world - - examples/rocket/postgres - - examples/rocket/url-shortener - - examples/thruster/hello-world - - examples/thruster/postgres - - examples/salvo/hello-world - - examples/serenity/hello-world - - examples/serenity/postgres - - examples/tide/hello-world - - examples/tide/postgres - - examples/tower/hello-world - - resources/aws-rds - resources/persist - resources/secrets - resources/shared-db + - resources/static-folder - service-test: requires: - workspace-clippy @@ -297,9 +397,55 @@ workflows: - service-test - platform-test - check-standalone + filters: + branches: + only: production - build-and-push: requires: - e2e-test filters: branches: - only: main + only: production + - build-binaries-linux: + name: build-binaries-x86_64 + image: ubuntu-2204:2022.04.1 + target: x86_64-unknown-linux-musl + resource_class: medium + filters: + tags: + only: /^v.*/ + branches: + only: production + - build-binaries-linux: + name: build-binaries-aarch64 + image: ubuntu-2004:202101-01 + target: aarch64-unknown-linux-musl + resource_class: arm.medium + filters: + tags: + only: /^v.*/ + branches: + only: production + - build-binaries-windows: + filters: + tags: + only: /^v.*/ + branches: + only: production + - build-binaries-mac: + filters: + tags: + only: /^v.*/ + branches: + only: production + - publish-github-release: + requires: + - build-binaries-x86_64 + - build-binaries-aarch64 + - build-binaries-windows + - build-binaries-mac + filters: + tags: + only: /^v.*/ + branches: + only: production diff --git a/.github/workflows/www.yml b/.github/workflows/www.yml deleted file mode 100644 index e91145498..000000000 --- a/.github/workflows/www.yml +++ /dev/null @@ -1,33 +0,0 @@ -name: website - -on: - push: - branches: - - main - paths: - - '.github/workflows/www.yml' - - 'www/**' - -jobs: - deploy-vercel: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: SpicyPizza/create-envfile@v1.3 - with: - directory: www - envkey_AUTH0_SECRET: ${{ secrets.AUTH0_SECRET }} - envkey_AUTH0_BASE_URL: https://www.shuttle.rs - envkey_AUTH0_ISSUER_BASE_URL: https://shuttle-prod.eu.auth0.com - envkey_AUTH0_CLIENT_ID: X77iwzR3Qm60kSIxxDEUVKOMFIQcDodp - envkey_AUTH0_CLIENT_SECRET: ${{ secrets.AUTH0_CLIENT_SECRET }} - envkey_SHUTTLE_API_BASE_URL: https://api.shuttle.rs - envkey_SHUTTLE_ADMIN_SECRET: ${{ secrets.SHUTTLE_ADMIN_SECRET }} - - uses: amondnet/vercel-action@v20 - with: - vercel-token: ${{ secrets.VERCEL_TOKEN }} - vercel-args: --prod - vercel-org-id: ${{ secrets.VERCEL_ORG_ID }} - vercel-project-id: ${{ secrets.VERCEL_WWW_PROJECT_ID }} - working-directory: ./www - scope: ${{ secrets.VERCEL_ORG_ID }} diff --git a/.gitignore b/.gitignore index 7a493cd6c..bb3141ba0 100644 --- a/.gitignore +++ b/.gitignore @@ -29,3 +29,10 @@ e2e/users.toml .shuttle-* docker-compose.rendered.yml + +.env +node_modules/ +package.json +yarn.lock + +*.wasm diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 000000000..77e658142 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "examples"] + path = examples + url = git@github.com:shuttle-hq/examples.git diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 276d489e2..145a72150 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -4,29 +4,41 @@ Raising [issues](https://github.com/shuttle-hq/shuttle/issues) is encouraged. We have some templates to help you get started. +## Docs + +If you found an error in our docs, or you simply want to make them better, contributions to our [docs](https://github.com/shuttle-hq/shuttle-docs) +are always appreciated! + ## Running Locally You can use Docker and docker-compose to test shuttle locally during development. See the [Docker install](https://docs.docker.com/get-docker/) and [docker-compose install](https://docs.docker.com/compose/install/) instructions if you do not have them installed already. +```bash +git clone git@github.com:shuttle-hq/shuttle.git +cd shuttle +``` + You should now be ready to setup a local environment to test code changes to core `shuttle` packages as follows: Build the required images with: ```bash -$ make images +make images ``` +> Note: The current [Makefile](https://github.com/shuttle-hq/shuttle/blob/main/Makefile) does not work on Windows systems, if you want to build the local environment on Windows you could use [Windows Subsystem for Linux](https://learn.microsoft.com/en-us/windows/wsl/install). + The images get built with [cargo-chef](https://github.com/LukeMathWalker/cargo-chef) and therefore support incremental builds (most of the time). So they will be much faster to re-build after an incremental change in your code - should you wish to deploy it locally straight away. You can now start a local deployment of shuttle and the required containers with: ```bash -$ make up +make up ``` -*Note*: Other useful commands can be found within the [Makefile](https://github.com/shuttle-hq/shuttle/blob/main/Makefile). +> Note: Other useful commands can be found within the [Makefile](https://github.com/shuttle-hq/shuttle/blob/main/Makefile). -The API is now accessible on `localhost:8000` (for app proxies) and `localhost:8001` (for the control plane). When running `cargo run --bin cargo-shuttle` (in a debug build), the CLI will point itself to `localhost` for its API calls. The deployment parameters can be tweaked by changing values in the [.env](./.env) file. +The API is now accessible on `localhost:8000` (for app proxies) and `localhost:8001` (for the control plane). When running `cargo run --bin cargo-shuttle` (in a debug build), the CLI will point itself to `localhost` for its API calls. In order to test local changes to the `shuttle-service` crate, you may want to add the below to a `.cargo/config.toml` file. (See [Overriding Dependencies](https://doc.rust-lang.org/cargo/reference/overriding-dependencies.html) for more) @@ -37,6 +49,13 @@ shuttle-aws-rds = { path = "[base]/shuttle/resources/aws-rds" } shuttle-persist = { path = "[base]/shuttle/resources/persist" } shuttle-shared-db = { path = "[base]/shuttle/resources/shared-db" } shuttle-secrets = { path = "[base]/shuttle/resources/secrets" } +shuttle-static-folder = { path = "[base]/shuttle/resources/static-folder" } +``` + +Prime gateway database with an admin user: + +```bash +docker compose --file docker-compose.rendered.yml --project-name shuttle-dev exec gateway /usr/local/bin/service --state=/var/lib/shuttle init --name admin --key test-key ``` Login to shuttle service in a new terminal window from the main shuttle directory: @@ -48,21 +67,42 @@ cargo run --bin cargo-shuttle -- login --api-key "test-key" cd into one of the examples: ```bash +git submodule init +git submodule update cd examples/rocket/hello-world/ ``` -Deploy the example: +Create a new project, this will start a deployer container: ```bash # the --manifest-path is used to locate the root of the shuttle workspace +cargo run --manifest-path ../../../Cargo.toml --bin cargo-shuttle -- project new +``` + +Verify that the deployer is healthy and in the ready state: + +```bash +cargo run --manifest-path ../../../Cargo.toml --bin cargo-shuttle -- project status +``` + +Deploy the example: + +```bash cargo run --manifest-path ../../../Cargo.toml --bin cargo-shuttle -- deploy ``` Test if the deploy is working: ```bash -# (the Host header should match the Host from the deploy output) -curl --header "Host: {app}.localhost.local" localhost:8000/hello +# the Host header should match the Host from the deploy output +curl --header "Host: {app}.unstable.shuttleapp.rs" localhost:8000/hello +``` + +View logs from the current deployment: + +```bash +# append `--follow` to this command for a live feed of logs +cargo run --manifest-path ../../../Cargo.toml --bin cargo-shuttle -- logs ``` ### Testing deployer only @@ -76,10 +116,10 @@ This prevents `gateway` from starting up. Now you can start deployer only using: ```bash provisioner_address=$(docker inspect --format '{{(index .NetworkSettings.Networks "shuttle_default").IPAddress}}' shuttle_prod_hello-world-rocket-app_run) -cargo run -p shuttle-deployer -- --provisioner-address $provisioner_address --provisioner-port 8000 --proxy-fqdn local.rs --admin-secret test-key +cargo run -p shuttle-deployer -- --provisioner-address $provisioner_address --provisioner-port 8000 --proxy-fqdn local.rs --admin-secret test-key --project ``` -The `--admin-secret` can safely be changed to your api-key to make testing easier. +The `--admin-secret` can safely be changed to your api-key to make testing easier. While `` needs to match the name of the project that will be deployed to this deployer. This is the `Cargo.toml` or `Shuttle.toml` name for the project. ### Using Podman instead of Docker If you are using Podman over Docker, then expose a rootless socket of Podman using the following command: @@ -96,7 +136,7 @@ export DOCKER_HOST=unix:///tmp/podman.sock shuttle can now be run locally using the steps shown earlier. -*NOTE*: Testing the `gateway` with a rootless Podman does not work since Podman does not allow access to the `deployer` containers via IP address! +> Note: Testing the `gateway` with a rootless Podman does not work since Podman does not allow access to the `deployer` containers via IP address! ## Running Tests @@ -104,15 +144,38 @@ shuttle has reasonable test coverage - and we are working on improving this every day. We encourage PRs to come with tests. If you're not sure about what a test should look like, feel free to [get in touch](https://discord.gg/H33rRDTm3p). -To run the test suite - just run `make test` at the root of the repository. +To run the unit tests for a spesific crate, from the root of the repository run: + +```bash +# replace with the name of the crate to test, e.g. `shuttle-common` +cargo test --package --all-features --lib -- --nocapture +``` + +To run the integration tests for a spesific crate (if it has any), from the root of the repository run: + +```bash +# replace with the name of the crate to test, e.g. `cargo-shuttle` +cargo test --package --all-features --test '*' -- --nocapture +``` + +To run the end-to-end tests, from the root of the repository run: + +```bash +make test +``` +> Note: Running all the end-to-end tests may take a long time, so it is recommended to run individual tests shipped as part of each crate in the workspace first. ## Committing We use the [Angular Commit Guidelines](https://github.com/angular/angular/blob/master/CONTRIBUTING.md#commit). We expect all commits to conform to these guidelines. Furthermore, commits should be squashed before being merged to master. -Also, make sure your commits don't trigger any warnings from Clippy by running: `cargo clippy --tests --all-targets`. If you have a good reason to contradict Clippy, insert an #allow[] macro, so that it won't complain. +Before committing: +- Make sure your commits don't trigger any warnings from Clippy by running: `cargo clippy --tests --all-targets`. If you have a good reason to contradict Clippy, insert an `#[allow(clippy::)]` macro, so that it won't complain. +- Make sure your code is correctly formatted: `cargo fmt --all --check`. +- Make sure your `Cargo.toml`'s are sorted: `cargo sort --workspace`. This command uses the [cargo-sort crate](https://crates.io/crates/cargo-sort) to sort the `Cargo.toml` dependencies alphabetically. +- If you've made changes to examples, make sure the above commands are ran there as well. ## Project Layout The folders in this repository relate to each other as follow: diff --git a/Cargo.lock b/Cargo.lock index 014ded2a6..4ab269fbc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3,13 +3,200 @@ version = 3 [[package]] -name = "Inflector" -version = "0.11.4" +name = "acme2" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3" +checksum = "453e534d4f46dcdddd7aa8619e9a664e153f34383d14710db0b0d76c2964db89" dependencies = [ - "lazy_static", + "base64 0.13.1", + "hyper", + "openssl", + "reqwest", + "serde", + "serde_json", + "thiserror", + "tokio", + "tracing", + "tracing-futures", +] + +[[package]] +name = "actix-codec" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57a7559404a7f3573127aab53c08ce37a6c6a315c374a31070f3c91cd1b4a7fe" +dependencies = [ + "bitflags", + "bytes 1.3.0", + "futures-core", + "futures-sink", + "log", + "memchr", + "pin-project-lite 0.2.9", + "tokio", + "tokio-util 0.7.3", +] + +[[package]] +name = "actix-http" +version = "3.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c83abf9903e1f0ad9973cc4f7b9767fd5a03a583f51a5b7a339e07987cd2724" +dependencies = [ + "actix-codec", + "actix-rt", + "actix-service", + "actix-utils", + "ahash", + "base64 0.13.1", + "bitflags", + "brotli", + "bytes 1.3.0", + "bytestring", + "derive_more", + "encoding_rs", + "flate2", + "futures-core", + "h2", + "http 0.2.8", + "httparse", + "httpdate", + "itoa 1.0.2", + "language-tags", + "local-channel", + "mime", + "percent-encoding", + "pin-project-lite 0.2.9", + "rand 0.8.5", + "sha1 0.10.4", + "smallvec", + "tracing", + "zstd", +] + +[[package]] +name = "actix-macros" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "465a6172cf69b960917811022d8f29bc0b7fa1398bc4f78b3c466673db1213b6" +dependencies = [ + "quote 1.0.21", + "syn 1.0.104", +] + +[[package]] +name = "actix-router" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d66ff4d247d2b160861fa2866457e85706833527840e4133f8f49aa423a38799" +dependencies = [ + "bytestring", + "http 0.2.8", + "regex", + "serde", + "tracing", +] + +[[package]] +name = "actix-rt" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ea16c295198e958ef31930a6ef37d0fb64e9ca3b6116e6b93a8bdae96ee1000" +dependencies = [ + "futures-core", + "tokio", +] + +[[package]] +name = "actix-server" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0da34f8e659ea1b077bb4637948b815cd3768ad5a188fdcd74ff4d84240cd824" +dependencies = [ + "actix-rt", + "actix-service", + "actix-utils", + "futures-core", + "futures-util", + "mio", + "num_cpus", + "socket2", + "tokio", + "tracing", +] + +[[package]] +name = "actix-service" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b894941f818cfdc7ccc4b9e60fa7e53b5042a2e8567270f9147d5591893373a" +dependencies = [ + "futures-core", + "paste", + "pin-project-lite 0.2.9", +] + +[[package]] +name = "actix-utils" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88a1dcdff1466e3c2488e1cb5c36a71822750ad43839937f85d2f4d9f8b705d8" +dependencies = [ + "local-waker", + "pin-project-lite 0.2.9", +] + +[[package]] +name = "actix-web" +version = "4.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d48f7b6534e06c7bfc72ee91db7917d4af6afe23e7d223b51e68fffbb21e96b9" +dependencies = [ + "actix-codec", + "actix-http", + "actix-macros", + "actix-router", + "actix-rt", + "actix-server", + "actix-service", + "actix-utils", + "actix-web-codegen", + "ahash", + "bytes 1.3.0", + "bytestring", + "cfg-if 1.0.0", + "cookie 0.16.0", + "derive_more", + "encoding_rs", + "futures-core", + "futures-util", + "http 0.2.8", + "itoa 1.0.2", + "language-tags", + "log", + "mime", + "once_cell", + "pin-project-lite 0.2.9", "regex", + "serde", + "serde_json", + "serde_urlencoded", + "smallvec", + "socket2", + "time 0.3.11", + "url", +] + +[[package]] +name = "actix-web-codegen" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa9362663c8643d67b2d5eafba49e4cb2c8a053a29ed00a0bea121f17c76b13" +dependencies = [ + "actix-router", + "proc-macro2 1.0.47", + "quote 1.0.21", + "syn 1.0.104", ] [[package]] @@ -167,19 +354,16 @@ dependencies = [ ] [[package]] -name = "ansi_term" -version = "0.12.1" +name = "anyhow" +version = "1.0.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" -dependencies = [ - "winapi", -] +checksum = "216261ddc8289130e551ddcd5ce8a064710c0d064a4d2895c67151c92b5443f6" [[package]] -name = "anyhow" -version = "1.0.62" +name = "arc-swap" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1485d4d2cc45e7b201ee3767015c96faa5904387c9d87c6efdd0fb511f12d305" +checksum = "983cd8b9d4b02a6dc6ffa557262eb5858a27a0038ffffe21a0f133eaa819a164" [[package]] name = "arrayref" @@ -199,6 +383,20 @@ version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" +[[package]] +name = "assert_cmd" +version = "2.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba45b8163c49ab5f972e59a8a5a03b6d2972619d486e19ec9fe744f7c2753d3c" +dependencies = [ + "bstr 1.0.1", + "doc-comment", + "predicates", + "predicates-core", + "predicates-tree", + "wait-timeout", +] + [[package]] name = "async-channel" version = "1.6.1" @@ -404,9 +602,9 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "10f203db73a71dfa2fb6dd22763990fa26f3d2625a6da2da900d23b87d26be27" dependencies = [ - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -421,9 +619,9 @@ version = "0.1.58" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e805d94e6b5001b651426cf4cd446b1ab5f319d27bab5c644f61de0a804360c" dependencies = [ - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -437,18 +635,9 @@ dependencies = [ "log", "pin-project-lite 0.2.9", "tokio", - "tokio-rustls 0.23.4", - "tungstenite 0.17.3", - "webpki-roots 0.22.3", -] - -[[package]] -name = "atoi" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "616896e05fc0e2649463a93a15183c6a16bf03413a7af88ef1285ddedfa9cda5" -dependencies = [ - "num-traits", + "tokio-rustls", + "tungstenite", + "webpki-roots", ] [[package]] @@ -503,9 +692,9 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "aws-config" -version = "0.47.0" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2a3ad9e793335d75b2d2faad583487efcc0df9154aff06f299a5c1fc8795698" +checksum = "56a636c44c77fa18bdba56126a34d30cfe5538fe88f7d34988fa731fee143ddd" dependencies = [ "aws-http", "aws-sdk-sso", @@ -517,7 +706,7 @@ dependencies = [ "aws-smithy-json", "aws-smithy-types", "aws-types", - "bytes 1.2.1", + "bytes 1.3.0", "hex 0.4.3", "http 0.2.8", "hyper", @@ -531,11 +720,12 @@ dependencies = [ [[package]] name = "aws-endpoint" -version = "0.47.0" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bd4e9dad553017821ee529f186e033700e8d61dd5c4b60066b4d8fe805b8cfc" +checksum = "6ca8f374874f6459aaa88dc861d7f5d834ca1ff97668eae190e97266b5f6c3fb" dependencies = [ "aws-smithy-http", + "aws-smithy-types", "aws-types", "http 0.2.8", "regex", @@ -544,14 +734,14 @@ dependencies = [ [[package]] name = "aws-http" -version = "0.47.0" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ef5a579a51d352b628b76f4855ba716be686305e5e59970c476d1ae2214e90d" +checksum = "78d41e19e779b73463f5f0c21b3aacc995f4ba783ab13a7ae9f5dfb159a551b4" dependencies = [ "aws-smithy-http", "aws-smithy-types", "aws-types", - "bytes 1.2.1", + "bytes 1.3.0", "http 0.2.8", "http-body", "lazy_static", @@ -562,9 +752,9 @@ dependencies = [ [[package]] name = "aws-sdk-rds" -version = "0.17.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59834848e38d12a46013354ea27bb44ef4686121f8cda2611daea0fea3e8eac1" +checksum = "0420bd9b4043fa26b87ee18af3b98c5cc960ff439456a091cd4dd044cbbcc046" dependencies = [ "aws-endpoint", "aws-http", @@ -577,7 +767,7 @@ dependencies = [ "aws-smithy-types", "aws-smithy-xml", "aws-types", - "bytes 1.2.1", + "bytes 1.3.0", "http 0.2.8", "tokio-stream", "tower", @@ -585,9 +775,9 @@ dependencies = [ [[package]] name = "aws-sdk-sso" -version = "0.17.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f014b8ad3178b414bf732b36741325ef659fc40752f8c292400fb7c4ecb7fdd0" +checksum = "86dcb1cb71aa8763b327542ead410424515cff0cde5b753eedd2917e09c63734" dependencies = [ "aws-endpoint", "aws-http", @@ -599,7 +789,7 @@ dependencies = [ "aws-smithy-json", "aws-smithy-types", "aws-types", - "bytes 1.2.1", + "bytes 1.3.0", "http 0.2.8", "tokio-stream", "tower", @@ -607,9 +797,9 @@ dependencies = [ [[package]] name = "aws-sdk-sts" -version = "0.17.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d37e45fdce84327c69fb924b9188fd889056c6afafbd494e8dd0daa400f9c082" +checksum = "fdfcf584297c666f6b472d5368a78de3bc714b6e0a53d7fbf76c3e347c292ab1" dependencies = [ "aws-endpoint", "aws-http", @@ -622,16 +812,16 @@ dependencies = [ "aws-smithy-types", "aws-smithy-xml", "aws-types", - "bytes 1.2.1", + "bytes 1.3.0", "http 0.2.8", "tower", ] [[package]] name = "aws-sig-auth" -version = "0.47.0" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6530e72945c11439e9b3c423c95a656a233d73c3a7d4acaf9789048e1bdf7da7" +checksum = "12cbe7b2be9e185c1fbce27fc9c41c66b195b32d89aa099f98768d9544221308" dependencies = [ "aws-sigv4", "aws-smithy-http", @@ -642,9 +832,9 @@ dependencies = [ [[package]] name = "aws-sigv4" -version = "0.47.0" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6351c3ba468b04bd819f64ea53538f5f53e3d6b366b27deabee41e73c9edb3af" +checksum = "03ff4cff8c4a101962d593ba94e72cd83891aecd423f0c6e3146bff6fb92c9e3" dependencies = [ "aws-smithy-http", "form_urlencoded", @@ -660,9 +850,9 @@ dependencies = [ [[package]] name = "aws-smithy-async" -version = "0.47.0" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86fc23ad8d050c241bdbfa74ae360be94a844ace8e218f64a2b2de77bfa9a707" +checksum = "7b3442b4c5d3fc39891a2e5e625735fba6b24694887d49c6518460fde98247a9" dependencies = [ "futures-util", "pin-project-lite 0.2.9", @@ -672,20 +862,20 @@ dependencies = [ [[package]] name = "aws-smithy-client" -version = "0.47.0" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e147b157f49ce77f2a86ec693a14c84b2441fa28be58ffb2febb77d5726c934" +checksum = "ff28d553714f8f54cd921227934fc13a536a1c03f106e56b362fd57e16d450ad" dependencies = [ "aws-smithy-async", "aws-smithy-http", "aws-smithy-http-tower", "aws-smithy-types", - "bytes 1.2.1", + "bytes 1.3.0", "fastrand", "http 0.2.8", "http-body", "hyper", - "hyper-rustls 0.22.1", + "hyper-rustls", "lazy_static", "pin-project-lite 0.2.9", "tokio", @@ -695,12 +885,12 @@ dependencies = [ [[package]] name = "aws-smithy-http" -version = "0.47.0" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cc1af50eac644ab6f58e5bae29328ba3092851fc2ce648ad139134699b2b66f" +checksum = "bf58ed4fefa61dbf038e5421a521cbc2c448ef69deff0ab1d915d8a10eda5664" dependencies = [ "aws-smithy-types", - "bytes 1.2.1", + "bytes 1.3.0", "bytes-utils", "futures-core", "http 0.2.8", @@ -709,6 +899,7 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite 0.2.9", + "pin-utils", "tokio", "tokio-util 0.7.3", "tracing", @@ -716,12 +907,12 @@ dependencies = [ [[package]] name = "aws-smithy-http-tower" -version = "0.47.0" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1bf4c4664dff2febf91f8796505c5bc8f38a0bff0d1397d1d3fdda17bd5c5d1" +checksum = "20c96d7bd35e7cf96aca1134b2f81b1b59ffe493f7c6539c051791cbbf7a42d3" dependencies = [ "aws-smithy-http", - "bytes 1.2.1", + "bytes 1.3.0", "http 0.2.8", "http-body", "pin-project-lite 0.2.9", @@ -731,18 +922,18 @@ dependencies = [ [[package]] name = "aws-smithy-json" -version = "0.47.0" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e6ebc76c3c108dd2a96506bf47dc31f75420811a19f1a09907524d1451789d2" +checksum = "d8324ba98c8a94187723cc16c37aefa09504646ee65c3d2c3af495bab5ea701b" dependencies = [ "aws-smithy-types", ] [[package]] name = "aws-smithy-query" -version = "0.47.0" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2956f1385c4daa883907a2c81d32256af8f95834c9de1bc0613fa68db63b88c4" +checksum = "83834ed2ff69ea6f6657baf205267dc2c0abe940703503a3e5d60ce23be3d306" dependencies = [ "aws-smithy-types", "urlencoding", @@ -750,9 +941,9 @@ dependencies = [ [[package]] name = "aws-smithy-types" -version = "0.47.0" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "352fb335ec1d57160a17a13e87aaa0a172ab780ddf58bfc85caedd3b7e47caed" +checksum = "8b02e06ea63498c43bc0217ea4d16605d4e58d85c12fc23f6572ff6d0a840c61" dependencies = [ "itoa 1.0.2", "num-integer", @@ -762,18 +953,18 @@ dependencies = [ [[package]] name = "aws-smithy-xml" -version = "0.47.0" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6cf2807fa715a5a3296feffb06ce45252bd0dfd48f52838128c48fb339ddbf5c" +checksum = "246e9f83dd1fdf5d347fa30ae4ad30a9d1d42ce4cd74a93d94afa874646f94cd" dependencies = [ "xmlparser", ] [[package]] name = "aws-types" -version = "0.47.0" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8140b89d76f67be2c136d7393e7e6d8edd65424eb58214839efbf4a2e4f7e8a3" +checksum = "05701d32da168b44f7ee63147781aed8723e792cc131cb9b18363b5393f17f70" dependencies = [ "aws-smithy-async", "aws-smithy-client", @@ -787,15 +978,15 @@ dependencies = [ [[package]] name = "axum" -version = "0.5.15" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9de18bc5f2e9df8f52da03856bf40e29b747de5a84e43aefff90e3dc4a21529b" +checksum = "744864363a200a5e724a7e61bc8c11b6628cf2e3ec519c8a1a48e609a8156b40" dependencies = [ "async-trait", "axum-core", - "base64 0.13.0", + "base64 0.13.1", "bitflags", - "bytes 1.2.1", + "bytes 1.3.0", "futures-util", "headers", "http 0.2.8", @@ -807,13 +998,15 @@ dependencies = [ "mime", "percent-encoding", "pin-project-lite 0.2.9", + "rustversion", "serde", "serde_json", + "serde_path_to_error", "serde_urlencoded", - "sha-1 0.10.0", + "sha-1", "sync_wrapper", "tokio", - "tokio-tungstenite 0.17.2", + "tokio-tungstenite", "tower", "tower-http 0.3.4", "tower-layer", @@ -822,29 +1015,46 @@ dependencies = [ [[package]] name = "axum-core" -version = "0.2.7" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4f44a0e6200e9d11a1cdc989e4b358f6e3d354fbf48478f345a17f4e43f8635" +checksum = "79b8558f5a0581152dc94dcd289132a1d377494bdeafcd41869b3258e3e2ad92" dependencies = [ "async-trait", - "bytes 1.2.1", + "bytes 1.3.0", "futures-util", "http 0.2.8", "http-body", "mime", + "rustversion", + "tower-layer", + "tower-service", ] [[package]] -name = "base-x" -version = "0.2.11" +name = "axum-server" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cbbc9d0964165b47557570cce6c952866c2678457aca742aafc9fb771d30270" +checksum = "8456dab8f11484979a86651da8e619b355ede5d61a160755155f6c344bd18c47" +dependencies = [ + "arc-swap", + "bytes 1.3.0", + "futures-util", + "http 0.2.8", + "http-body", + "hyper", + "pin-project-lite 0.2.9", + "rustls", + "rustls-pemfile 1.0.1", + "tokio", + "tokio-rustls", + "tower-service", +] [[package]] -name = "base64" -version = "0.11.0" +name = "base-x" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b41b7ea54a0c9d92199de89e20e58d49f02f8e699814ef3fdf266f6f748d15c7" +checksum = "4cbbc9d0964165b47557570cce6c952866c2678457aca742aafc9fb771d30270" [[package]] name = "base64" @@ -854,9 +1064,9 @@ checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" [[package]] name = "base64" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" [[package]] name = "binascii" @@ -941,9 +1151,9 @@ version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d82e7850583ead5f8bbef247e2a3c37a19bd576e8420cd262a6711921827e1e5" dependencies = [ - "base64 0.13.0", + "base64 0.13.1", "bollard-stubs", - "bytes 1.2.1", + "bytes 1.3.0", "futures-core", "futures-util", "hex 0.4.3", @@ -1001,7 +1211,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "99d76085681585d39016f4d3841eb019201fc54d2dd0d92ad1e4fab3bfb32754" dependencies = [ "ahash", - "base64 0.13.0", + "base64 0.13.1", "hex 0.4.3", "indexmap", "lazy_static", @@ -1010,7 +1220,7 @@ dependencies = [ "serde_bytes", "serde_json", "time 0.3.11", - "uuid 1.1.2", + "uuid 1.2.2", ] [[package]] @@ -1024,6 +1234,18 @@ dependencies = [ "regex-automata", ] +[[package]] +name = "bstr" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fca0852af221f458706eb0725c03e4ed6c46af9ac98e6a689d5e634215d594dd" +dependencies = [ + "memchr", + "once_cell", + "regex-automata", + "serde", +] + [[package]] name = "buf_redux" version = "0.8.4" @@ -1064,9 +1286,9 @@ checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38" [[package]] name = "bytes" -version = "1.2.1" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec8a7b6a70fde80372154c65702f00a0f56f3e1c36abbc6c440484be248856db" +checksum = "dfb24e866b15a1af2a1b663f10c6b6b8f397a84aadb828f12e5b289ec23a3a3c" [[package]] name = "bytes-utils" @@ -1074,7 +1296,7 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1934a3ef9cac8efde4966a92781e77713e1ba329f1d42e446c7d7eba340d8ef1" dependencies = [ - "bytes 1.2.1", + "bytes 1.3.0", "either", ] @@ -1084,6 +1306,15 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c58ec36aac5066d5ca17df51b3e70279f5670a72102f5752cb7e7c856adfc70" +[[package]] +name = "bytestring" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7f83e57d9154148e355404702e2694463241880b939570d7c97c014da7a69a1" +dependencies = [ + "bytes 1.3.0", +] + [[package]] name = "cache-padded" version = "1.2.0" @@ -1107,8 +1338,8 @@ checksum = "0b0e103ce36d217d568903ad27b14ec2238ecb5d65bad2e756a8f3c0d651506e" dependencies = [ "cap-primitives", "cap-std", - "io-lifetimes", - "windows-sys", + "io-lifetimes 0.7.4", + "windows-sys 0.36.1", ] [[package]] @@ -1120,12 +1351,12 @@ dependencies = [ "ambient-authority", "fs-set-times", "io-extras", - "io-lifetimes", + "io-lifetimes 0.7.4", "ipnet", "maybe-owned", - "rustix", + "rustix 0.35.12", "winapi-util", - "windows-sys", + "windows-sys 0.36.1", "winx", ] @@ -1147,9 +1378,9 @@ checksum = "c9d6e70b626eceac9d6fc790fe2d72cc3f2f7bc3c35f467690c54a526b0f56db" dependencies = [ "cap-primitives", "io-extras", - "io-lifetimes", + "io-lifetimes 0.7.4", "ipnet", - "rustix", + "rustix 0.35.12", ] [[package]] @@ -1160,22 +1391,22 @@ checksum = "c3a0524f7c4cff2ea547ae2b652bf7a348fd3e48f76556dc928d8b45ab2f1d50" dependencies = [ "cap-primitives", "once_cell", - "rustix", + "rustix 0.35.12", "winx", ] [[package]] name = "cargo" -version = "0.64.0" +version = "0.65.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7019448b7d0ffe19d4ab26a340d2efe6da8cf86c8cc01a352b90853e31cd8f7c" +checksum = "988ba7aa82c0944fd91d119ee24a5c1f865eb2797e0edd90f6c08c7252857ca5" dependencies = [ "anyhow", "atty", "bytesize", "cargo-platform", "cargo-util", - "clap 3.2.17", + "clap 3.2.23", "crates-io", "crossbeam-utils", "curl", @@ -1217,7 +1448,7 @@ dependencies = [ "tar", "tempfile", "termcolor", - "toml_edit", + "toml_edit 0.14.4", "unicode-width", "unicode-xid 0.2.3", "url", @@ -1227,13 +1458,13 @@ dependencies = [ [[package]] name = "cargo-edit" -version = "0.10.4" +version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a055abd7d5cdb5a4e0aad89962b2f172eaad613374056cab74124ff2cf978ea7" +checksum = "e3a5eba325b274fc14e17df48888c3c45146e03be7331cdbd585c377a2bc8058" dependencies = [ "anyhow", "cargo_metadata", - "clap 3.2.17", + "clap 4.0.27", "concolor-control", "crates-index", "dirs-next", @@ -1251,7 +1482,7 @@ dependencies = [ "serde_json", "subprocess", "termcolor", - "toml_edit", + "toml_edit 0.14.4", "ureq", "url", ] @@ -1267,44 +1498,55 @@ dependencies = [ [[package]] name = "cargo-shuttle" -version = "0.7.0" +version = "0.8.1" dependencies = [ "anyhow", + "assert_cmd", "async-trait", "bollard", "cargo", "cargo-edit", "cargo_metadata", "chrono", - "clap 3.2.17", + "clap 3.2.23", + "clap_complete", "crossbeam-channel", "crossterm", + "dialoguer", "dirs", + "flate2", "futures", + "git2", "headers", + "ignore", + "indicatif", "indoc", "log", + "openssl", "portpicker", "reqwest", "reqwest-middleware", "reqwest-retry", + "rexpect", "serde", "serde_json", "shuttle-common", "shuttle-secrets", "shuttle-service", - "sqlx 0.6.1", + "sqlx", + "strum", + "tar", "tempfile", "test-context", "tokio", - "tokio-tungstenite 0.17.2", + "tokio-tungstenite", "tokiotest-httpserver", "toml", - "toml_edit", + "toml_edit 0.15.0", "tracing", "tracing-subscriber", "url", - "uuid 1.1.2", + "uuid 1.2.2", "webbrowser", ] @@ -1332,15 +1574,16 @@ dependencies = [ [[package]] name = "cargo_metadata" -version = "0.15.0" +version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3abb7553d5b9b8421c6de7cb02606ff15e0c6eea7d8eadd75ef013fd636bec36" +checksum = "982a0cf6a99c350d7246035613882e376d58cebe571785abc5da4f648d53ac0a" dependencies = [ "camino", "cargo-platform", "semver 1.0.14", "serde", "serde_json", + "thiserror", ] [[package]] @@ -1372,9 +1615,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.22" +version = "0.4.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfd4d1b31faaa3a89d7934dbded3111da0d2ef28e3ebccdb4f0179f5929d1ef1" +checksum = "16b0a3d9ed01224b22057780a37bb8c5dbfe1be8ba48678e7bf57ec4b385411f" dependencies = [ "iana-time-zone", "js-sys", @@ -1412,61 +1655,70 @@ dependencies = [ [[package]] name = "clap" -version = "3.2.17" +version = "3.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29e724a68d9319343bb3328c9cc2dfde263f4b3142ee1059a9980580171c954b" +checksum = "71655c45cb9845d3270c9d6df84ebe72b4dad3c2ba3f7023ad47c144e4e473a5" dependencies = [ "atty", "bitflags", - "clap_derive 3.2.17", + "clap_derive 3.2.18", "clap_lex 0.2.4", "indexmap", "once_cell", "strsim", "termcolor", - "terminal_size", "textwrap", ] [[package]] name = "clap" -version = "4.0.18" +version = "4.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "335867764ed2de42325fafe6d18b8af74ba97ee0c590fa016f157535b42ab04b" +checksum = "0acbd8d28a0a60d7108d7ae850af6ba34cf2d1257fc646980e5f97ce14275966" dependencies = [ - "atty", "bitflags", - "clap_derive 4.0.18", + "clap_derive 4.0.21", "clap_lex 0.3.0", + "is-terminal 0.4.0", "once_cell", "strsim", "termcolor", + "terminal_size 0.2.3", +] + +[[package]] +name = "clap_complete" +version = "3.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f7a2e0a962c45ce25afce14220bc24f9dade0a1787f185cecf96bfba7847cd8" +dependencies = [ + "clap 3.2.23", ] [[package]] name = "clap_derive" -version = "3.2.17" +version = "3.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13547f7012c01ab4a0e8f8967730ada8f9fdf419e8b6c792788f39cf4e46eefa" +checksum = "ea0c8bce528c4be4da13ea6fead8965e95b6073585a2f05204bd8f4119f82a65" dependencies = [ "heck", "proc-macro-error", - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] name = "clap_derive" -version = "4.0.18" +version = "4.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16a1b0f6422af32d5da0c58e2703320f379216ee70198241c84173a8c5ac28f3" +checksum = "0177313f9f02afc995627906bbd8967e2be069f5261954222dac78290c2b9014" dependencies = [ "heck", "proc-macro-error", - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -1509,19 +1761,19 @@ dependencies = [ [[package]] name = "combine" -version = "4.6.4" +version = "4.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a604e93b79d1808327a6fca85a6f2d69de66461e7620f5a4cbf5fb4d1d7c948" +checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" dependencies = [ - "bytes 1.2.1", + "bytes 1.3.0", "memchr", ] [[package]] name = "comfy-table" -version = "6.1.0" +version = "6.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85914173c2f558d61613bfbbf1911f14e630895087a7ed2fafc0f5319e1536e7" +checksum = "e621e7e86c46fd8a14c32c6ae3cb95656621b4743a27d0cffedb831d46e7ad21" dependencies = [ "crossterm", "strum", @@ -1529,6 +1781,12 @@ dependencies = [ "unicode-width", ] +[[package]] +name = "comma" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55b672471b4e9f9e95499ea597ff64941a309b2cdbffcc46f2cc5e2d971fd335" + [[package]] name = "commoncrypto" version = "0.2.0" @@ -1573,6 +1831,20 @@ dependencies = [ "cache-padded", ] +[[package]] +name = "console" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c050367d967ced717c04b65d8c619d863ef9292ce0c5760028655a2fb298718c" +dependencies = [ + "encode_unicode", + "lazy_static", + "libc", + "terminal_size 0.1.17", + "unicode-width", + "winapi", +] + [[package]] name = "const_fn" version = "0.4.9" @@ -1585,6 +1857,12 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + [[package]] name = "cookie" version = "0.14.4" @@ -1592,7 +1870,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "03a5d7b21829bc7b4bf4754a978a241ae54ea55a40f92bb20216e54096f4b951" dependencies = [ "aes-gcm 0.8.0", - "base64 0.13.0", + "base64 0.13.1", "hkdf 0.10.0", "hmac 0.10.1", "percent-encoding", @@ -1609,7 +1887,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94d4706de1b0fa5b132270cddffa8585166037822e260a944fe161acd137ca05" dependencies = [ "aes-gcm 0.9.4", - "base64 0.13.0", + "base64 0.13.1", "hkdf 0.12.3", "hmac 0.12.1", "percent-encoding", @@ -1763,9 +2041,9 @@ dependencies = [ [[package]] name = "crates-index" -version = "0.18.8" +version = "0.18.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2519c91ad7a6e3250a64fb71162d2db1afe7bcf826a465f84d2052fd69639b7a" +checksum = "599f67b56f40863598cb30450427049935d05de2e36c61d33c050f04d7ec8cf2" dependencies = [ "git2", "hex 0.4.3", @@ -1779,6 +2057,7 @@ dependencies = [ "serde_derive", "serde_json", "smartstring", + "toml", ] [[package]] @@ -1795,30 +2074,15 @@ dependencies = [ "url", ] -[[package]] -name = "crc" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49fc9a695bca7f35f5f4c15cddc84415f66a74ea78eef08e90c5024f2b540e23" -dependencies = [ - "crc-catalog 1.1.1", -] - [[package]] name = "crc" version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53757d12b596c16c78b83458d732a5d1a17ab3f53f2f7412f6fb57cc8a140ab3" dependencies = [ - "crc-catalog 2.1.0", + "crc-catalog", ] -[[package]] -name = "crc-catalog" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccaeedb56da03b09f598226e25e80088cb4cd25f316e6e4df7d695f0feeb1403" - [[package]] name = "crc-catalog" version = "2.1.0" @@ -1846,9 +2110,9 @@ dependencies = [ [[package]] name = "crossbeam-deque" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e" +checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc" dependencies = [ "cfg-if 1.0.0", "crossbeam-epoch", @@ -1857,15 +2121,14 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.9" +version = "0.9.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07db9d94cbd326813772c968ccd25999e5f8ae22f4f8d1b11effa37ef6ce281d" +checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a" dependencies = [ "autocfg 1.1.0", "cfg-if 1.0.0", "crossbeam-utils", - "memoffset", - "once_cell", + "memoffset 0.7.1", "scopeguard", ] @@ -1914,6 +2177,16 @@ dependencies = [ "winapi", ] +[[package]] +name = "cruet" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e0d174765c7d11eb16f70a4213583aac2ca5ae1ebd1e233c6d5104bfb70fce3" +dependencies = [ + "once_cell", + "regex", +] + [[package]] name = "crypto-common" version = "0.1.3" @@ -1956,23 +2229,14 @@ dependencies = [ "subtle", ] -[[package]] -name = "ct-logs" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1a816186fa68d9e426e3cb4ae4dff1fcd8e4a2c34b781bf7a822574a0d0aac8" -dependencies = [ - "sct 0.6.1", -] - [[package]] name = "ctor" -version = "0.1.23" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdffe87e1d521a10f9696f833fe502293ea446d7f256c06128293a4119bdf4cb" +checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096" dependencies = [ "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -1993,6 +2257,12 @@ dependencies = [ "cipher 0.3.0", ] +[[package]] +name = "cty" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b365fabc795046672053e29c954733ec3b05e4be654ab130fe8f1f94d7051f35" + [[package]] name = "curl" version = "0.4.43" @@ -2052,10 +2322,10 @@ checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", "strsim", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -2066,10 +2336,10 @@ checksum = "649c91bc01e8b1eac09fb91e8dbc7d517684ca6be8ebc75bb9cafc894f9fdb6f" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", "strsim", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -2080,7 +2350,7 @@ checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" dependencies = [ "darling_core 0.13.4", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -2091,7 +2361,19 @@ checksum = "ddfc69c5bfcbd2fc09a0f38451d2daf0e372e367986a83906d1b0dbc88134fb5" dependencies = [ "darling_core 0.14.1", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", +] + +[[package]] +name = "dashmap" +version = "5.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3495912c9c1ccf2e18976439f4443f3fee0fd61f424ff99fde6a66b15ecb448f" +dependencies = [ + "cfg-if 1.0.0", + "hashbrown", + "lock_api", + "parking_lot_core 0.9.3", ] [[package]] @@ -2106,9 +2388,22 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", +] + +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case", + "proc-macro2 1.0.47", + "quote 1.0.21", + "rustc_version 0.4.0", + "syn 1.0.104", ] [[package]] @@ -2138,10 +2433,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "841ef46f4787d9097405cac4e70fb8644fc037b526e8c14054247c0263c400d0" dependencies = [ "bitflags", - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "proc-macro2-diagnostics", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", +] + +[[package]] +name = "dialoguer" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a92e7e37ecef6857fdc0c0c5d42fd5b0938e46590c2183cc92dd310a6d078eb1" +dependencies = [ + "console", + "fuzzy-matcher", + "tempfile", + "zeroize", ] [[package]] @@ -2150,6 +2457,12 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e25ea47919b1560c4e3b7fe0aaab9becf5b84a10325ddf7db0f0ba5e1026499" +[[package]] +name = "difflib" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" + [[package]] name = "digest" version = "0.9.0" @@ -2233,12 +2546,6 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" -[[package]] -name = "dotenv" -version = "0.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f" - [[package]] name = "dotenvy" version = "0.15.2" @@ -2250,9 +2557,9 @@ dependencies = [ [[package]] name = "dunce" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "453440c271cf5577fd2a40e4942540cb7d0d2f85e27c8d07dd0023c925a67541" +checksum = "0bd4b30a6560bbd9b4620f4de34c3f14f60848e58a9b7216801afcb4c7b31c3c" [[package]] name = "either" @@ -2260,6 +2567,12 @@ version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" +[[package]] +name = "encode_unicode" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" + [[package]] name = "encoding_rs" version = "0.8.31" @@ -2276,9 +2589,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21cdad81446a7f7dc43f6a77409efeb9733d2fa65553efef6018ef257c959b73" dependencies = [ "heck", - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -2296,9 +2609,9 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f58dc3c5e468259f19f2d46304a6b28f1c3d034442e14b322d2b850e36f6d5ae" dependencies = [ - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -2435,9 +2748,9 @@ checksum = "279fb028e20b3c4c320317955b77c5e0c9701f05a1d309905d6fc702cdc5053e" [[package]] name = "flate2" -version = "1.0.24" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6" +checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841" dependencies = [ "crc32fast", "libz-sys", @@ -2479,19 +2792,18 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" -version = "1.0.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191" +checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8" dependencies = [ - "matches", "percent-encoding", ] [[package]] name = "fqdn" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1281c1bcf107f1cc25ca345ce4688badf37c6c13c3f193cb4b9efdc057678ae2" +checksum = "3b5dd19b048b2dfde153588594b4f3da47b18afd18d171bb8d1d27741256bbaa" [[package]] name = "fs-set-times" @@ -2499,9 +2811,9 @@ version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a267b6a9304912e018610d53fe07115d8b530b160e85db4d2d3a59f3ddde1aec" dependencies = [ - "io-lifetimes", - "rustix", - "windows-sys", + "io-lifetimes 0.7.4", + "rustix 0.35.12", + "windows-sys 0.36.1", ] [[package]] @@ -2512,9 +2824,9 @@ checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" [[package]] name = "futures" -version = "0.3.23" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab30e97ab6aacfe635fad58f22c2bb06c8b685f7421eb1e064a729e2a5f481fa" +checksum = "38390104763dc37a5145a53c29c63c1290b5d316d6086ec32c293f6736051bb0" dependencies = [ "futures-channel", "futures-core", @@ -2527,9 +2839,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.23" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bfc52cbddcfd745bf1740338492bb0bd83d76c67b445f91c5fb29fae29ecaa1" +checksum = "52ba265a92256105f45b719605a571ffe2d1f0fea3807304b522c1d778f79eed" dependencies = [ "futures-core", "futures-sink", @@ -2537,15 +2849,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.23" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2acedae88d38235936c3922476b10fced7b2b68136f5e3c03c2d5be348a1115" +checksum = "04909a7a7e4633ae6c4a9ab280aeb86da1236243a77b694a49eacd659a4bd3ac" [[package]] name = "futures-executor" -version = "0.3.23" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d11aa21b5b587a64682c0094c2bdd4df0076c5324961a40cc3abd7f37930528" +checksum = "7acc85df6714c176ab5edf386123fafe217be88c0840ec11f199441134a074e2" dependencies = [ "futures-core", "futures-task", @@ -2565,9 +2877,9 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.23" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93a66fc6d035a26a3ae255a6d2bca35eda63ae4c5512bef54449113f7a1228e5" +checksum = "00f5fb52a06bdcadeb54e8d3671f8888a39697dcb0b81b23b55174030427f4eb" [[package]] name = "futures-lite" @@ -2586,32 +2898,32 @@ dependencies = [ [[package]] name = "futures-macro" -version = "0.3.23" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0db9cce532b0eae2ccf2766ab246f114b56b9cf6d445e00c2549fbc100ca045d" +checksum = "bdfb8ce053d86b91919aad980c220b1fb8401a9394410e1c289ed7e66b61835d" dependencies = [ - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] name = "futures-sink" -version = "0.3.23" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca0bae1fe9752cf7fd9b0064c674ae63f97b37bc714d745cbde0afb7ec4e6765" +checksum = "39c15cf1a4aa79df40f1bb462fb39676d0ad9e366c2a33b590d7c66f4f81fcf9" [[package]] name = "futures-task" -version = "0.3.23" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "842fc63b931f4056a24d59de13fb1272134ce261816e063e634ad0c15cdc5306" +checksum = "2ffb393ac5d9a6eaa9d3fdf37ae2776656b706e200c8e16b1bdb227f5198e6ea" [[package]] name = "futures-util" -version = "0.3.23" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0828a5471e340229c11c77ca80017937ce3c58cb788a17e5f1c2d5c485a9577" +checksum = "197676987abd2f9cadff84926f410af1c183608d36641465df73ae8211dc65d6" dependencies = [ "futures-channel", "futures-core", @@ -2625,6 +2937,15 @@ dependencies = [ "slab", ] +[[package]] +name = "fuzzy-matcher" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54614a3312934d066701a80f20f15fa3b56d67ac7722b39eea5b4c9dd1d66c94" +dependencies = [ + "thread_local", +] + [[package]] name = "fwdansi" version = "1.1.0" @@ -2722,9 +3043,9 @@ dependencies = [ [[package]] name = "git2" -version = "0.14.2" +version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3826a6e0e2215d7a41c2bfc7c9244123969273f3476b939a226aac0ab56e9e3c" +checksum = "d0155506aab710a86160ddb504a480d2964d7ab5b9e62419be69e0032bc5931c" dependencies = [ "bitflags", "libc", @@ -2760,7 +3081,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0a1e17342619edbc21a964c2afbeb6c820c6a2560032872f397bb97ea127bd0a" dependencies = [ "aho-corasick", - "bstr", + "bstr 0.2.17", "fnv", "log", "regex", @@ -2784,7 +3105,7 @@ version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37a82c6d637fc9515a4694bbf1cb2457b79d81ce52b3108bdeea58b07dd34a57" dependencies = [ - "bytes 1.2.1", + "bytes 1.3.0", "fnv", "futures-core", "futures-sink", @@ -2797,15 +3118,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "hashbrown" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" -dependencies = [ - "ahash", -] - [[package]] name = "hashbrown" version = "0.12.1" @@ -2815,22 +3127,13 @@ dependencies = [ "ahash", ] -[[package]] -name = "hashlink" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7249a3129cbc1ffccd74857f81464a323a152173cdb134e0fd81bc803b29facf" -dependencies = [ - "hashbrown 0.11.2", -] - [[package]] name = "hashlink" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d452c155cb93fecdfb02a73dd57b5d8e442c2063bd7aac72f1bc5e4263a43086" dependencies = [ - "hashbrown 0.12.1", + "hashbrown", ] [[package]] @@ -2839,9 +3142,9 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3e372db8e5c0d213e0cd0b9be18be2aca3d44cf2fe30a9d46a65581cd454584" dependencies = [ - "base64 0.13.0", + "base64 0.13.1", "bitflags", - "bytes 1.2.1", + "bytes 1.3.0", "headers-core", "http 0.2.8", "httpdate", @@ -2950,9 +3253,9 @@ dependencies = [ [[package]] name = "home" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2456aef2e6b6a9784192ae780c0f15bc57df0e918585282325e8c8ac27737654" +checksum = "747309b4b440c06d57b0b25f2aee03ee9b5e5397d288c60e21fc709bb98a7408" dependencies = [ "winapi", ] @@ -2985,7 +3288,7 @@ version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399" dependencies = [ - "bytes 1.2.1", + "bytes 1.3.0", "fnv", "itoa 1.0.2", ] @@ -2996,7 +3299,7 @@ version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" dependencies = [ - "bytes 1.2.1", + "bytes 1.3.0", "http 0.2.8", "pin-project-lite 0.2.9", ] @@ -3038,7 +3341,7 @@ dependencies = [ "anyhow", "async-channel", "async-std", - "base64 0.13.0", + "base64 0.13.1", "cookie 0.14.4", "futures-lite", "infer", @@ -3075,7 +3378,7 @@ version = "0.14.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "034711faac9d2166cb1baf1a2fb0b60b1f277f8492fd72176c17f3515e1abd3c" dependencies = [ - "bytes 1.2.1", + "bytes 1.3.0", "futures-channel", "futures-core", "futures-util", @@ -3115,23 +3418,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "hyper-rustls" -version = "0.22.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f9f7a97316d44c0af9b0301e65010573a853a9fc97046d7331d7f6bc0fd5a64" -dependencies = [ - "ct-logs", - "futures-util", - "hyper", - "log", - "rustls 0.19.1", - "rustls-native-certs", - "tokio", - "tokio-rustls 0.22.0", - "webpki 0.21.4", -] - [[package]] name = "hyper-rustls" version = "0.23.0" @@ -3140,9 +3426,11 @@ checksum = "d87c48c02e0dc5e3b849a2041db3029fd066650f8f717c07bf8ed78ccb895cac" dependencies = [ "http 0.2.8", "hyper", - "rustls 0.20.6", + "log", + "rustls", + "rustls-native-certs", "tokio", - "tokio-rustls 0.23.4", + "tokio-rustls", ] [[package]] @@ -3163,7 +3451,7 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ - "bytes 1.2.1", + "bytes 1.3.0", "hyper", "native-tls", "tokio", @@ -3213,6 +3501,16 @@ dependencies = [ "unicode-normalization", ] +[[package]] +name = "idna" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + [[package]] name = "ignore" version = "0.4.18" @@ -3252,10 +3550,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e" dependencies = [ "autocfg 1.1.0", - "hashbrown 0.12.1", + "hashbrown", "serde", ] +[[package]] +name = "indicatif" +version = "0.17.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4295cbb7573c16d310e99e713cf9e75101eb190ab31fccd35f2d2691b4352b19" +dependencies = [ + "console", + "number_prefix", + "portable-atomic", + "unicode-width", +] + [[package]] name = "indoc" version = "1.0.7" @@ -3283,14 +3593,29 @@ dependencies = [ "cfg-if 1.0.0", ] +[[package]] +name = "instant-acme" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4c6a5dc426fcc25b99d91e4a283a8f5518339a0f63bf28588a6c5f31e089f8a" +dependencies = [ + "base64 0.13.1", + "hyper", + "hyper-rustls", + "ring", + "serde", + "serde_json", + "thiserror", +] + [[package]] name = "io-extras" version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a5d8c2ab5becd8720e30fd25f8fa5500d8dc3fceadd8378f05859bd7b46fc49" dependencies = [ - "io-lifetimes", - "windows-sys", + "io-lifetimes 0.7.4", + "windows-sys 0.36.1", ] [[package]] @@ -3300,7 +3625,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6e481ccbe3dea62107216d0d1138bb8ad8e5e5c43009a098bd1990272c497b0" dependencies = [ "libc", - "windows-sys", + "windows-sys 0.36.1", +] + +[[package]] +name = "io-lifetimes" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46112a93252b123d31a119a8d1a1ac19deac4fac6e0e8b0df58f0d4e5870e63c" +dependencies = [ + "libc", + "windows-sys 0.42.0", ] [[package]] @@ -3337,16 +3672,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0d508111813f9af3afd2f92758f77e4ed2cc9371b642112c6a48d22eb73105c5" dependencies = [ "hermit-abi 0.2.6", - "io-lifetimes", - "rustix", - "windows-sys", + "io-lifetimes 0.7.4", + "rustix 0.35.12", + "windows-sys 0.36.1", +] + +[[package]] +name = "is-terminal" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aae5bc6e2eb41c9def29a3e0f1306382807764b9b53112030eff57435667352d" +dependencies = [ + "hermit-abi 0.2.6", + "io-lifetimes 1.0.3", + "rustix 0.36.3", + "windows-sys 0.42.0", ] [[package]] name = "itertools" -version = "0.10.3" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" dependencies = [ "either", ] @@ -3385,9 +3732,9 @@ dependencies = [ [[package]] name = "jni" -version = "0.19.0" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6df18c2e3db7e453d3c6ac5b3e9d5182664d28788126d39b91f2d1e22b017ec" +checksum = "039022cdf4d7b1cf548d31f60ae783138e5fd42013f6271049d7df7afadef96c" dependencies = [ "cesu8", "combine", @@ -3439,6 +3786,12 @@ dependencies = [ "log", ] +[[package]] +name = "language-tags" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4345964bb142484797b161f473a503a434de77149dd8c7427788c6e13379388" + [[package]] name = "lazy_static" version = "1.4.0" @@ -3465,9 +3818,9 @@ checksum = "fc7fcc620a3bff7cdd7a365be3376c97191aeaccc2a603e600951e452615bf89" [[package]] name = "libgit2-sys" -version = "0.13.2+1.4.2" +version = "0.13.4+1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a42de9a51a5c12e00fc0e4ca6bc2ea43582fc6418488e8f615e905d886f258b" +checksum = "d0fa6563431ede25f5cc7f6d803c6afbc1c5d3ad3d4925d12c882bf2b526f5d1" dependencies = [ "cc", "libc", @@ -3479,9 +3832,9 @@ dependencies = [ [[package]] name = "libloading" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efbc0f03f9a775e9f6aed295c6a1ba2253c5757a9e03d55c6caa46a681abcddd" +checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" dependencies = [ "cfg-if 1.0.0", "winapi", @@ -3546,6 +3899,30 @@ version = "0.0.46" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4d2456c373231a208ad294c33dc5bff30051eafd954cd4caae83a712b12854d" +[[package]] +name = "linux-raw-sys" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f9f08d8963a6c613f4b1a78f4f4a4dbfadf8e6545b2d72861731e4858b8b47f" + +[[package]] +name = "local-channel" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f303ec0e94c6c54447f84f3b0ef7af769858a9c4ef56ef2a986d3dcd4c3fc9c" +dependencies = [ + "futures-core", + "futures-sink", + "futures-util", + "local-waker", +] + +[[package]] +name = "local-waker" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e34f76eb3611940e0e7d53a9aaa4e6a3151f69541a282fd0dad5571420c53ff1" + [[package]] name = "lock_api" version = "0.4.7" @@ -3600,6 +3977,15 @@ dependencies = [ "libc", ] +[[package]] +name = "malloc_buf" +version = "0.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62bb907fe88d54d8d9ce32a3cceab4218ed2f6b7d35617cafe9adf84e43919cb" +dependencies = [ + "libc", +] + [[package]] name = "match_cfg" version = "0.1.0" @@ -3623,9 +4009,9 @@ checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" [[package]] name = "matchit" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73cbba799671b762df5a175adf59ce145165747bb891505c43d09aefbbf38beb" +checksum = "3dfc802da7b1cf80aefffa0c7b2f77247c8b32206cc83c270b61264f5b360a80" [[package]] name = "maybe-owned" @@ -3654,7 +4040,7 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "480b5a5de855d11ff13195950bdc8b98b5e942ef47afc447f6615cdcc4e15d80" dependencies = [ - "rustix", + "rustix 0.35.12", ] [[package]] @@ -3666,6 +4052,15 @@ dependencies = [ "autocfg 1.1.0", ] +[[package]] +name = "memoffset" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4" +dependencies = [ + "autocfg 1.1.0", +] + [[package]] name = "mime" version = "0.3.16" @@ -3690,9 +4085,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.5.3" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f5c75688da582b8ffc1f1799e9db273f32133c49e048f614d22ec3256773ccc" +checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa" dependencies = [ "adler", ] @@ -3706,7 +4101,7 @@ dependencies = [ "libc", "log", "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys", + "windows-sys 0.36.1", ] [[package]] @@ -3720,12 +4115,12 @@ dependencies = [ [[package]] name = "mongodb" -version = "2.3.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b95afe97b0c799fdf69cd960272a2cb9662d077bd6efd84eb722bb9805d47554" +checksum = "b5a1df476ac9541b0e4fdc8e2cc48884e66c92c933cd17a1fd75e68caf75752e" dependencies = [ "async-trait", - "base64 0.13.0", + "base64 0.13.1", "bitflags", "bson", "chrono", @@ -3742,12 +4137,12 @@ dependencies = [ "percent-encoding", "rand 0.8.5", "rustc_version_runtime", - "rustls 0.20.6", + "rustls", "rustls-pemfile 0.3.0", "serde", "serde_bytes", "serde_with", - "sha-1 0.10.0", + "sha-1", "sha2 0.10.2", "socket2", "stringprep", @@ -3755,13 +4150,13 @@ dependencies = [ "take_mut", "thiserror", "tokio", - "tokio-rustls 0.23.4", + "tokio-rustls", "tokio-util 0.7.3", "trust-dns-proto", "trust-dns-resolver", "typed-builder", "uuid 0.8.2", - "webpki-roots 0.22.3", + "webpki-roots", ] [[package]] @@ -3770,7 +4165,7 @@ version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f8f35e687561d5c1667590911e6698a8cb714a134a7505718a182e7bc9d3836" dependencies = [ - "bytes 1.2.1", + "bytes 1.3.0", "encoding_rs", "futures-util", "http 0.2.8", @@ -3829,19 +4224,6 @@ dependencies = [ "tempfile", ] -[[package]] -name = "ndk" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2032c77e030ddee34a6787a64166008da93f6a352b629261d0fee232b8742dd4" -dependencies = [ - "bitflags", - "jni-sys", - "ndk-sys", - "num_enum", - "thiserror", -] - [[package]] name = "ndk-context" version = "0.1.1" @@ -3849,51 +4231,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "27b02d87554356db9e9a873add8782d4ea6e3e58ea071a9adb9a2e8ddb884a8b" [[package]] -name = "ndk-glue" -version = "0.6.2" +name = "net2" +version = "0.2.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d0c4a7b83860226e6b4183edac21851f05d5a51756e97a1144b7f5a6b63e65f" +checksum = "391630d12b68002ae1e25e8f974306474966550ad82dac6886fb8910c19568ae" dependencies = [ - "lazy_static", + "cfg-if 0.1.10", "libc", - "log", - "ndk", - "ndk-context", - "ndk-macro", - "ndk-sys", -] - -[[package]] -name = "ndk-macro" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0df7ac00c4672f9d5aece54ee3347520b7e20f158656c7db2e6de01902eb7a6c" -dependencies = [ - "darling 0.13.4", - "proc-macro-crate", - "proc-macro2 1.0.43", - "quote 1.0.21", - "syn 1.0.99", -] - -[[package]] -name = "ndk-sys" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e5a6ae77c8ee183dcbbba6150e2e6b9f3f4196a7666c02a715a95692ec1fa97" -dependencies = [ - "jni-sys", + "winapi", ] [[package]] -name = "net2" -version = "0.2.37" +name = "nix" +version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "391630d12b68002ae1e25e8f974306474966550ad82dac6886fb8910c19568ae" +checksum = "e322c04a9e3440c327fca7b6c8a63e6890a32fa2ad689db972425f07e0d22abb" dependencies = [ - "cfg-if 0.1.10", + "autocfg 1.1.0", + "bitflags", + "cfg-if 1.0.0", "libc", - "winapi", + "memoffset 0.6.5", + "pin-utils", ] [[package]] @@ -3937,42 +4296,36 @@ dependencies = [ [[package]] name = "num_cpus" -version = "1.13.1" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" +checksum = "f6058e64324c71e02bc2b150e4f3bc8286db6c83092132ffa3f6b1eab0f9def5" dependencies = [ "hermit-abi 0.1.19", "libc", ] [[package]] -name = "num_enum" -version = "0.5.7" +name = "num_threads" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf5395665662ef45796a4ff5486c5d41d29e0c09640af4c5f17fd94ee2c119c9" +checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44" dependencies = [ - "num_enum_derive", + "libc", ] [[package]] -name = "num_enum_derive" -version = "0.5.7" +name = "number_prefix" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b0498641e53dd6ac1a4f22547548caa6864cc4933784319cd1775271c5a46ce" -dependencies = [ - "proc-macro-crate", - "proc-macro2 1.0.43", - "quote 1.0.21", - "syn 1.0.99", -] +checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" [[package]] -name = "num_threads" -version = "0.1.6" +name = "objc" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44" +checksum = "915b1b472bc21c53464d6c8461c9d3af805ba1ef837e1cac254428f4a77177b1" dependencies = [ - "libc", + "malloc_buf", ] [[package]] @@ -3982,16 +4335,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53" dependencies = [ "crc32fast", - "hashbrown 0.12.1", + "hashbrown", "indexmap", "memchr", ] [[package]] name = "once_cell" -version = "1.14.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f7254b99e31cad77da24b08ebf628882739a608578bb1bcdfc1f9c21260d7c0" +checksum = "86f0b0d4bf799edbc74508c1e8bf170ff5f41238e5f8225603ca7caaae2b7860" [[package]] name = "opaque-debug" @@ -4005,7 +4358,7 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ea3ebcd72a54701f56345f16785a6d3ac2df7e986d273eb4395c0b01db17952" dependencies = [ - "bstr", + "bstr 0.2.17", "winapi", ] @@ -4030,9 +4383,9 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b501e44f11665960c7e7fcf062c7d96a14ade4aa98116c004b2e37b5be7d736c" dependencies = [ - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -4041,6 +4394,15 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" +[[package]] +name = "openssl-src" +version = "111.24.0+1.1.1s" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3498f259dab01178c6228c6b00dcef0ed2a2d5e20d648c017861227773ea4abd" +dependencies = [ + "cc", +] + [[package]] name = "openssl-sys" version = "0.9.74" @@ -4050,72 +4412,102 @@ dependencies = [ "autocfg 1.1.0", "cc", "libc", + "openssl-src", "pkg-config", "vcpkg", ] [[package]] name = "opentelemetry" -version = "0.17.0" +version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6105e89802af13fdf48c49d7646d3b533a70e536d818aae7e78ba0433d01acb8" +checksum = "69d6c3d7288a106c0a363e4b0e8d308058d56902adefb16f4936f417ffef086e" dependencies = [ - "async-trait", - "crossbeam-channel", - "futures-channel", - "futures-executor", - "futures-util", - "js-sys", - "lazy_static", - "percent-encoding", - "pin-project", - "rand 0.8.5", - "thiserror", - "tokio", - "tokio-stream", + "opentelemetry_api", + "opentelemetry_sdk", ] [[package]] name = "opentelemetry-datadog" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "457462dc4cd365992c574c79181ff11ee6f66c5cbfb15a352217b4e0b35eac34" +checksum = "171770efa142d2a19455b7e985037f560b2e75461f822dd1688bfd83c14856f6" dependencies = [ "async-trait", + "futures-core", "http 0.2.8", "indexmap", "itertools", - "lazy_static", + "once_cell", "opentelemetry", "opentelemetry-http", "opentelemetry-semantic-conventions", "reqwest", "rmp", "thiserror", + "url", ] [[package]] name = "opentelemetry-http" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "449048140ee61e28f57abe6e9975eedc1f3a29855c7407bd6c12b18578863379" +checksum = "1edc79add46364183ece1a4542592ca593e6421c60807232f5b8f7a31703825d" dependencies = [ "async-trait", - "bytes 1.2.1", + "bytes 1.3.0", "http 0.2.8", - "opentelemetry", + "opentelemetry_api", "reqwest", ] [[package]] name = "opentelemetry-semantic-conventions" -version = "0.9.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "985cc35d832d412224b2cffe2f9194b1b89b6aa5d0bef76d080dce09d90e62bd" +checksum = "9b02e0230abb0ab6636d18e2ba8fa02903ea63772281340ccac18e0af3ec9eeb" dependencies = [ "opentelemetry", ] +[[package]] +name = "opentelemetry_api" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c24f96e21e7acc813c7a8394ee94978929db2bcc46cf6b5014fc612bf7760c22" +dependencies = [ + "fnv", + "futures-channel", + "futures-util", + "indexmap", + "js-sys", + "once_cell", + "pin-project-lite 0.2.9", + "thiserror", +] + +[[package]] +name = "opentelemetry_sdk" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ca41c4933371b61c2a2f214bf16931499af4ec90543604ec828f7a625c09113" +dependencies = [ + "async-trait", + "crossbeam-channel", + "dashmap", + "fnv", + "futures-channel", + "futures-executor", + "futures-util", + "once_cell", + "opentelemetry_api", + "percent-encoding", + "rand 0.8.5", + "thiserror", + "tokio", + "tokio-stream", +] + [[package]] name = "ordered-float" version = "2.10.0" @@ -4127,9 +4519,9 @@ dependencies = [ [[package]] name = "os_info" -version = "3.4.0" +version = "3.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eca3ecae1481e12c3d9379ec541b238a16f0b75c9a409942daa8ec20dbfdb62" +checksum = "c4750134fb6a5d49afc80777394ad5d95b04bc12068c6abb92fae8f43817270f" dependencies = [ "log", "serde", @@ -4208,7 +4600,7 @@ dependencies = [ "libc", "redox_syscall", "smallvec", - "windows-sys", + "windows-sys 0.36.1", ] [[package]] @@ -4249,17 +4641,26 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82a5ca643c2303ecb740d506539deba189e16f2754040a42901cd8105d0282d0" dependencies = [ - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "proc-macro2-diagnostics", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", +] + +[[package]] +name = "pem" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03c64931a1a212348ec4f3b4362585eca7159d0d09cbdf4a7f74f02173596fd4" +dependencies = [ + "base64 0.13.1", ] [[package]] name = "percent-encoding" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" +checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" [[package]] name = "petgraph" @@ -4273,22 +4674,22 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.0.10" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58ad3879ad3baf4e44784bc6a718a8698867bb991f8ce24d1bcbe2cfb4c3a75e" +checksum = "ad29a609b6bcd67fee905812e544992d216af9d755757c05ed2d0e15a74c6ecc" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.0.10" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "744b6f092ba29c3650faf274db506afd39944f48420f6c86b17cfe0ee1cb36bb" +checksum = "069bdb1e05adc7a8990dce9cc75370895fbe4e3d58b9b73bf1aee56359344a55" dependencies = [ - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -4326,12 +4727,12 @@ checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae" [[package]] name = "poem" -version = "1.3.40" +version = "1.3.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16d0fec4acc8779b696e3ff25527884fb17cda6cf59a249c57aa1af1e2f65b36" +checksum = "dc88a96f338947991534ac756e28bd05665a7dd40ad9c0c143cc5503ef5635e8" dependencies = [ "async-trait", - "bytes 1.2.1", + "bytes 1.3.0", "futures-util", "headers", "http 0.2.8", @@ -4352,19 +4753,18 @@ dependencies = [ "tokio-stream", "tokio-util 0.7.3", "tracing", - "typed-headers", ] [[package]] name = "poem-derive" -version = "1.3.40" +version = "1.3.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee7e20b5c7c573862cbc21e8f85682cc1f04766a318691837e8aa27df66857e6" +checksum = "d9bfb3ddf3eb162c2a2dc4dbdc610eaf56417cd4000fcda2686ccb354e2a1b2b" dependencies = [ "proc-macro-crate", - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -4403,6 +4803,12 @@ dependencies = [ "universal-hash", ] +[[package]] +name = "portable-atomic" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15eb2c6e362923af47e13c23ca5afb859e83d54452c55b0b9ac763b8f7c1ac16" + [[package]] name = "portpicker" version = "0.1.1" @@ -4418,16 +4824,43 @@ version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" +[[package]] +name = "predicates" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed6bd09a7f7e68f3f0bf710fb7ab9c4615a488b58b5f653382a687701e458c92" +dependencies = [ + "difflib", + "itertools", + "predicates-core", +] + +[[package]] +name = "predicates-core" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72f883590242d3c6fc5bf50299011695fa6590c2c70eac95ee1bdb9a733ad1a2" + +[[package]] +name = "predicates-tree" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54ff541861505aabf6ea722d2131ee980b8276e10a1297b94e896dd8b621850d" +dependencies = [ + "predicates-core", + "termtree", +] + [[package]] name = "pretty_assertions" -version = "1.2.1" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c89f989ac94207d048d92db058e4f6ec7342b0971fc58d1271ca148b799b3563" +checksum = "a25e9bcb20aa780fd0bb16b72403a9064d6b3f22f026946029acb941a50af755" dependencies = [ - "ansi_term", "ctor", "diff", "output_vt100", + "yansi", ] [[package]] @@ -4436,8 +4869,8 @@ version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e1516508b396cefe095485fdce673007422f5e48e82934b7b423dc26aa5e6a4" dependencies = [ - "proc-macro2 1.0.43", - "syn 1.0.99", + "proc-macro2 1.0.47", + "syn 1.0.104", ] [[package]] @@ -4457,9 +4890,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", "version_check", ] @@ -4469,7 +4902,7 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", "version_check", ] @@ -4491,9 +4924,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.43" +version = "1.0.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a2ca2c61bc9f3d74d2886294ab7b9853abd9c1ad903a3ac7815c58989bb7bab" +checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725" dependencies = [ "unicode-ident", ] @@ -4504,39 +4937,41 @@ version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4bf29726d67464d49fa6224a1d07936a8c08bb3fba727c7493f6cf1616fdaada" dependencies = [ - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", "version_check", "yansi", ] [[package]] name = "prost" -version = "0.11.0" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "399c3c31cdec40583bb68f0b18403400d01ec4289c383aa047560439952c4dd7" +checksum = "a0841812012b2d4a6145fae9a6af1534873c32aa67fff26bd09f8fa42c83f95a" dependencies = [ - "bytes 1.2.1", + "bytes 1.3.0", "prost-derive", ] [[package]] name = "prost-build" -version = "0.11.1" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f835c582e6bd972ba8347313300219fed5bfa52caf175298d860b61ff6069bb" +checksum = "1d8b442418ea0822409d9e7d047cbf1e7e9e1760b172bf9982cf29d517c93511" dependencies = [ - "bytes 1.2.1", + "bytes 1.3.0", "heck", "itertools", "lazy_static", "log", "multimap", "petgraph", + "prettyplease", "prost", "prost-types", "regex", + "syn 1.0.104", "tempfile", "which", ] @@ -4549,9 +4984,9 @@ checksum = "7345d5f0e08c0536d7ac7229952590239e77abf0a0100a1b1d890add6ea96364" dependencies = [ "anyhow", "itertools", - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -4560,7 +4995,7 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4dfaa718ad76a44b3415e6c4d53b17c8f99160dcb3a99b10470fce8ad43f6e3e" dependencies = [ - "bytes 1.2.1", + "bytes 1.3.0", "prost", ] @@ -4600,7 +5035,7 @@ version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179" dependencies = [ - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", ] [[package]] @@ -4802,13 +5237,21 @@ dependencies = [ "rand_core 0.6.3", ] +[[package]] +name = "raw-window-handle" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed7e3d950b66e19e0c372f3fa3fbbcf85b1746b571f74e0c2af6042a5c93420a" +dependencies = [ + "cty", +] + [[package]] name = "rayon" -version = "1.5.3" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d" +checksum = "1e060280438193c554f654141c9ea9417886713b7acd75974c85b18a69a88e0b" dependencies = [ - "autocfg 1.1.0", "crossbeam-deque", "either", "rayon-core", @@ -4816,9 +5259,9 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.9.3" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f" +checksum = "cac410af5d00ab6884528b4ab69d1e8e146e8d471201800fa1b4524126de6ad3" dependencies = [ "crossbeam-channel", "crossbeam-deque", @@ -4826,6 +5269,18 @@ dependencies = [ "num_cpus", ] +[[package]] +name = "rcgen" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffbe84efe2f38dea12e9bfc1f65377fdf03e53a18cb3b995faedf7934c7e785b" +dependencies = [ + "pem", + "ring", + "time 0.3.11", + "yasna", +] + [[package]] name = "rdrand" version = "0.4.0" @@ -4870,9 +5325,9 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a043824e29c94169374ac5183ac0ed43f5724dc4556b19568007486bd840fa1f" dependencies = [ - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -4924,12 +5379,12 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.11.11" +version = "0.11.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b75aa69a3f06bbcc66ede33af2af253c6f7a86b1ca0033f60c580a27074fbf92" +checksum = "68cc60575865c7831548863cc02356512e3f1dc2f3f82cb837d7fc4cc8f3c97c" dependencies = [ - "base64 0.13.0", - "bytes 1.2.1", + "base64 0.13.1", + "bytes 1.3.0", "encoding_rs", "futures-core", "futures-util", @@ -4937,44 +5392,43 @@ dependencies = [ "http 0.2.8", "http-body", "hyper", - "hyper-rustls 0.23.0", + "hyper-rustls", "hyper-tls", "ipnet", "js-sys", - "lazy_static", "log", "mime", "mime_guess", "native-tls", + "once_cell", "percent-encoding", "pin-project-lite 0.2.9", - "rustls 0.20.6", + "rustls", "rustls-pemfile 1.0.1", "serde", "serde_json", "serde_urlencoded", "tokio", "tokio-native-tls", - "tokio-rustls 0.23.4", + "tokio-rustls", "tokio-util 0.7.3", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", - "webpki-roots 0.22.3", + "webpki-roots", "winreg 0.10.1", ] [[package]] name = "reqwest-middleware" -version = "0.1.6" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69539cea4148dce683bec9dc95be3f0397a9bb2c248a49c8296a9d21659a8cdd" +checksum = "4a1c03e9011a8c59716ad13115550469e081e2e9892656b0ba6a47c907921894" dependencies = [ "anyhow", "async-trait", - "futures", "http 0.2.8", "reqwest", "serde", @@ -4984,9 +5438,9 @@ dependencies = [ [[package]] name = "reqwest-retry" -version = "0.1.5" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce246a729eaa6aff5e215aee42845bf5fed9893cc6cd51aeeb712f34e04dd9f3" +checksum = "e29d842a94e8ab9b581fd3b906053872aef2fb3e474cbd88712047895d2deee4" dependencies = [ "anyhow", "async-trait", @@ -5023,6 +5477,19 @@ dependencies = [ "rand 0.8.5", ] +[[package]] +name = "rexpect" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01ff60778f96fb5a48adbe421d21bf6578ed58c0872d712e7e08593c195adff8" +dependencies = [ + "comma", + "nix", + "regex", + "tempfile", + "thiserror", +] + [[package]] name = "rfc7239" version = "0.1.0" @@ -5080,7 +5547,7 @@ dependencies = [ "atomic", "atty", "binascii", - "bytes 1.2.1", + "bytes 1.3.0", "either", "figment", "futures", @@ -5116,10 +5583,10 @@ dependencies = [ "devise", "glob", "indexmap", - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", "rocket_http", - "syn 1.0.99", + "syn 1.0.104", "unicode-xid 0.2.3", ] @@ -5222,58 +5689,68 @@ checksum = "985947f9b6423159c4726323f373be0a21bdb514c5af06a849cb3d2dce2d01e8" dependencies = [ "bitflags", "errno", - "io-lifetimes", + "io-lifetimes 0.7.4", "itoa 1.0.2", "libc", - "linux-raw-sys", + "linux-raw-sys 0.0.46", "once_cell", - "windows-sys", + "windows-sys 0.36.1", ] [[package]] -name = "rustls" -version = "0.19.1" +name = "rustix" +version = "0.36.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35edb675feee39aec9c99fa5ff985081995a06d594114ae14cbe797ad7b7a6d7" +checksum = "0b1fbb4dfc4eb1d390c02df47760bb19a84bb80b301ecc947ab5406394d8223e" dependencies = [ - "base64 0.13.0", - "log", - "ring", - "sct 0.6.1", - "webpki 0.21.4", + "bitflags", + "errno", + "io-lifetimes 1.0.3", + "libc", + "linux-raw-sys 0.1.3", + "windows-sys 0.42.0", ] [[package]] name = "rustls" -version = "0.20.6" +version = "0.20.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aab8ee6c7097ed6057f43c187a62418d0c05a4bd5f18b3571db50ee0f9ce033" +checksum = "539a2bfe908f471bfa933876bd1eb6a19cf2176d375f82ef7f99530a40e48c2c" dependencies = [ "log", "ring", - "sct 0.7.0", - "webpki 0.22.0", + "sct", + "webpki", ] [[package]] name = "rustls-native-certs" -version = "0.5.0" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a07b7c1885bd8ed3831c289b7870b13ef46fe0e856d288c30d9cc17d75a2092" +checksum = "0167bac7a9f490495f3c33013e7722b53cb087ecbe082fb0c6387c96f634ea50" dependencies = [ "openssl-probe", - "rustls 0.19.1", + "rustls-pemfile 1.0.1", "schannel", "security-framework", ] +[[package]] +name = "rustls-pemfile" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5eebeaeb360c87bfb72e84abdb3447159c0eaececf1bef2aecd65a8be949d1c9" +dependencies = [ + "base64 0.13.1", +] + [[package]] name = "rustls-pemfile" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1ee86d63972a7c661d1536fefe8c3c8407321c3df668891286de28abcd087360" dependencies = [ - "base64 0.13.0", + "base64 0.13.1", ] [[package]] @@ -5282,14 +5759,14 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0864aeff53f8c05aa08d86e5ef839d3dfcf07aeba2db32f12db0ef716e87bd55" dependencies = [ - "base64 0.13.0", + "base64 0.13.1", ] [[package]] name = "rustrict" -version = "0.5.0" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ab6d404f2f6170969bf27865e33d708cbfed94a3b5def842c77394178bd7881" +checksum = "294846357ffbadaaa82996006626376f97b6327a3990da95458bbcb7c9f2e116" dependencies = [ "bitflags", "doc-comment", @@ -5303,9 +5780,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.7" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0a5f7c728f5d284929a1cccb5bc19884422bfe6ef4d6c409da2c41838983fcf" +checksum = "97477e48b4cf8603ad5f7aaf897467cf42ab4218a38ef76fb14c2d6773a6d6a8" [[package]] name = "ryu" @@ -5321,25 +5798,25 @@ checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072" [[package]] name = "salvo" -version = "0.34.3" +version = "0.37.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0d5860968c3504a1d13618078d2c833b23b2c7b194ce23e999891953d04b20c" +checksum = "b290f01b3b881afd34408b5823cb44f6717ed6b93a6e16a0113e9a49645ea8a7" dependencies = [ "salvo_core", ] [[package]] name = "salvo_core" -version = "0.34.3" +version = "0.37.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e104409bf6168999cae0d11d4340fdcb333592ddce2a5bd2c45e300e6a8e6f68" +checksum = "2fea63014bacaaaef1eaa1f28d90921cfbbee73a379974fca30fc698f64a8853" dependencies = [ - "Inflector", "async-compression", "async-trait", - "base64 0.13.0", - "bytes 1.2.1", + "base64 0.13.1", + "bytes 1.3.0", "cookie 0.16.0", + "cruet", "encoding_rs", "enumflags2", "fastrand", @@ -5371,17 +5848,17 @@ dependencies = [ [[package]] name = "salvo_macros" -version = "0.34.3" +version = "0.37.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1fe2ec840671e1427625d4dfb1c00177c64052fe0bfacf26964ab6d75446f45" +checksum = "b305a54f28b92483eabbfc91dd39bba62c840095b5513e83d31582c7e6bd8d44" dependencies = [ - "Inflector", + "cruet", "darling 0.14.1", "proc-macro-crate", - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", "regex", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -5400,7 +5877,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "88d6731146462ea25d9244b2ed5fd1d716d25c52e4d54aa4fb0f3c4e9854dbe2" dependencies = [ "lazy_static", - "windows-sys", + "windows-sys 0.36.1", ] [[package]] @@ -5415,16 +5892,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" -[[package]] -name = "sct" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b362b83898e0e69f38515b82ee15aa80636befe47c3b6d3d89a911e78fc228ce" -dependencies = [ - "ring", - "untrusted", -] - [[package]] name = "sct" version = "0.7.0" @@ -5484,9 +5951,9 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.147" +version = "1.0.148" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d193d69bae983fc11a79df82342761dfbf28a99fc8d203dca4c3c1b590948965" +checksum = "e53f64bb4ba0191d6d0676e1b141ca55047d83b74f5607e6d8eb88126c52c2dc" dependencies = [ "serde_derive", ] @@ -5512,13 +5979,13 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.147" +version = "1.0.148" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f1d362ca8fc9c3e3a7484440752472d68a6caa98f1ab81d99b5dfe517cec852" +checksum = "a55492425aa53521babf6137309e7d34c20bbfbbfcfe2c7f3a047fd1f6b92c0c" dependencies = [ - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -5541,9 +6008,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.86" +version = "1.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41feea4228a6f1cd09ec7a3593a682276702cd67b5273544757dae23c096f074" +checksum = "020ff22c755c2ed3f8cf162dbb41a7268d934702f3ed3631656ea597e08fc3db" dependencies = [ "indexmap", "itoa 1.0.2", @@ -5551,6 +6018,15 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_path_to_error" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "184c643044780f7ceb59104cef98a5a6f12cb2288a7bc701ab93a362b49fd47d" +dependencies = [ + "serde", +] + [[package]] name = "serde_qs" version = "0.8.5" @@ -5591,9 +6067,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" dependencies = [ "darling 0.13.4", - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -5604,9 +6080,9 @@ checksum = "82fd5e7b5858ad96e99d440138f34f5b98e1b959ebcd3a1036203b30e78eb788" dependencies = [ "async-trait", "async-tungstenite", - "base64 0.13.0", + "base64 0.13.1", "bitflags", - "bytes 1.2.1", + "bytes 1.3.0", "cfg-if 1.0.0", "flate2", "futures", @@ -5624,19 +6100,6 @@ dependencies = [ "url", ] -[[package]] -name = "sha-1" -version = "0.9.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6" -dependencies = [ - "block-buffer 0.9.0", - "cfg-if 1.0.0", - "cpufeatures", - "digest 0.9.0", - "opaque-debug", -] - [[package]] name = "sha-1" version = "0.10.0" @@ -5722,22 +6185,42 @@ dependencies = [ "dirs", ] +[[package]] +name = "shuttle-admin" +version = "0.8.0" +dependencies = [ + "anyhow", + "clap 4.0.27", + "dirs", + "reqwest", + "serde", + "serde_json", + "shuttle-common", + "tokio", + "toml", + "tracing", + "tracing-subscriber", +] + [[package]] name = "shuttle-codegen" -version = "0.7.0" +version = "0.8.0" dependencies = [ "pretty_assertions", "proc-macro-error", - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", "trybuild", ] [[package]] name = "shuttle-common" -version = "0.7.0" +version = "0.8.0" dependencies = [ + "anyhow", + "async-trait", + "axum", "chrono", "comfy-table", "crossterm", @@ -5745,27 +6228,28 @@ dependencies = [ "http-serde", "hyper", "once_cell", + "reqwest", "rmp-serde", "rustrict", "serde", "serde_json", "strum", "tracing", - "uuid 1.1.2", + "uuid 1.2.2", ] [[package]] name = "shuttle-deployer" -version = "0.7.0" +version = "0.8.0" dependencies = [ "anyhow", "async-trait", "axum", - "bytes 1.2.1", + "bytes 1.3.0", "cargo", "cargo_metadata", "chrono", - "clap 3.2.17", + "clap 3.2.23", "crossbeam-channel", "ctor", "flate2", @@ -5777,6 +6261,7 @@ dependencies = [ "once_cell", "opentelemetry", "opentelemetry-datadog", + "opentelemetry-http", "pipe", "rand 0.8.5", "serde", @@ -5784,7 +6269,7 @@ dependencies = [ "shuttle-common", "shuttle-proto", "shuttle-service", - "sqlx 0.6.1", + "sqlx", "strum", "tar", "tempdir", @@ -5797,37 +6282,47 @@ dependencies = [ "tracing", "tracing-opentelemetry", "tracing-subscriber", - "uuid 1.1.2", + "uuid 1.2.2", ] [[package]] name = "shuttle-gateway" -version = "0.7.0" +version = "0.8.0" dependencies = [ + "acme2", "anyhow", "async-trait", "axum", - "base64 0.13.0", + "axum-server", + "base64 0.13.1", "bollard", "chrono", - "clap 4.0.18", + "clap 4.0.27", "colored", "fqdn", "futures", "http 0.2.8", "hyper", "hyper-reverse-proxy 0.5.2-dev (git+https://github.com/chesedo/hyper-reverse-proxy?branch=bug/host_header)", + "instant-acme", + "lazy_static", + "num_cpus", "once_cell", "opentelemetry", "opentelemetry-datadog", + "opentelemetry-http", + "pem", "portpicker", "rand 0.8.5", - "regex", + "rcgen", + "rustls", + "rustls-pemfile 1.0.1", "serde", "serde_json", "shuttle-common", "snailquote", - "sqlx 0.5.13", + "sqlx", + "strum", "tempfile", "tokio", "tower", @@ -5835,11 +6330,13 @@ dependencies = [ "tracing", "tracing-opentelemetry", "tracing-subscriber", + "ttl_cache", + "uuid 1.2.2", ] [[package]] name = "shuttle-proto" -version = "0.7.0" +version = "0.8.0" dependencies = [ "prost", "prost-types", @@ -5850,12 +6347,11 @@ dependencies = [ [[package]] name = "shuttle-provisioner" -version = "0.7.0" +version = "0.8.0" dependencies = [ "aws-config", "aws-sdk-rds", - "aws-smithy-types", - "clap 3.2.17", + "clap 3.2.23", "ctor", "fqdn", "mongodb", @@ -5865,7 +6361,7 @@ dependencies = [ "rand 0.8.5", "serde_json", "shuttle-proto", - "sqlx 0.6.1", + "sqlx", "thiserror", "tokio", "tonic", @@ -5881,7 +6377,7 @@ dependencies = [ "anyhow", "async-trait", "cap-std", - "clap 4.0.18", + "clap 4.0.27", "hyper", "rmp-serde", "serenity", @@ -5894,7 +6390,7 @@ dependencies = [ "tonic", "tracing", "tracing-subscriber", - "uuid 1.1.2", + "uuid 1.2.2", "wasi-common", "wasmtime", "wasmtime-wasi", @@ -5902,7 +6398,7 @@ dependencies = [ [[package]] name = "shuttle-secrets" -version = "0.7.0" +version = "0.8.0" dependencies = [ "async-trait", "shuttle-service", @@ -5911,8 +6407,9 @@ dependencies = [ [[package]] name = "shuttle-service" -version = "0.7.0" +version = "0.8.0" dependencies = [ + "actix-web", "anyhow", "async-std", "async-trait", @@ -5925,6 +6422,7 @@ dependencies = [ "futures", "hyper", "libloading", + "num_cpus", "pipe", "poem", "portpicker", @@ -5934,7 +6432,7 @@ dependencies = [ "serenity", "shuttle-codegen", "shuttle-common", - "sqlx 0.6.1", + "sqlx", "sync_wrapper", "thiserror", "thruster", @@ -5943,7 +6441,7 @@ dependencies = [ "tower", "tracing", "tracing-subscriber", - "uuid 1.1.2", + "uuid 1.2.2", "warp", ] @@ -6074,9 +6572,9 @@ dependencies = [ [[package]] name = "sqlformat" -version = "0.1.8" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4b7922be017ee70900be125523f38bdd644f4f06a1b16e8fa5a8ee8c34bffd4" +checksum = "f87e292b4291f154971a43c3774364e2cbcaec599d3f5bf6fa9d122885dbc38a" dependencies = [ "itertools", "nom", @@ -6085,85 +6583,28 @@ dependencies = [ [[package]] name = "sqlx" -version = "0.5.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "551873805652ba0d912fec5bbb0f8b4cdd96baf8e2ebf5970e5671092966019b" -dependencies = [ - "sqlx-core 0.5.13", - "sqlx-macros 0.5.13", -] - -[[package]] -name = "sqlx" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "788841def501aabde58d3666fcea11351ec3962e6ea75dbcd05c84a71d68bcd1" -dependencies = [ - "sqlx-core 0.6.1", - "sqlx-macros 0.6.1", -] - -[[package]] -name = "sqlx-core" -version = "0.5.13" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e48c61941ccf5ddcada342cd59e3e5173b007c509e1e8e990dafc830294d9dc5" +checksum = "9249290c05928352f71c077cc44a464d880c63f26f7534728cca008e135c0428" dependencies = [ - "ahash", - "atoi 0.4.0", - "bitflags", - "byteorder", - "bytes 1.2.1", - "crc 2.1.0", - "crossbeam-queue", - "either", - "event-listener", - "flume", - "futures-channel", - "futures-core", - "futures-executor", - "futures-intrusive", - "futures-util", - "hashlink 0.7.0", - "hex 0.4.3", - "indexmap", - "itoa 1.0.2", - "libc", - "libsqlite3-sys", - "log", - "memchr", - "once_cell", - "paste", - "percent-encoding", - "rustls 0.19.1", - "serde", - "serde_json", - "sha2 0.10.2", - "smallvec", - "sqlformat", - "sqlx-rt 0.5.13", - "stringprep", - "thiserror", - "tokio-stream", - "url", - "webpki 0.21.4", - "webpki-roots 0.21.1", + "sqlx-core", + "sqlx-macros", ] [[package]] name = "sqlx-core" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c21d3b5e7cadfe9ba7cdc1295f72cc556c750b4419c27c219c0693198901f8e" +checksum = "dcbc16ddba161afc99e14d1713a453747a2b07fc097d2009f4c300ec99286105" dependencies = [ "ahash", - "atoi 1.0.0", - "base64 0.13.0", + "atoi", + "base64 0.13.1", "bitflags", "byteorder", - "bytes 1.2.1", + "bytes 1.3.0", "chrono", - "crc 3.0.0", + "crc", "crossbeam-queue", "dirs", "dotenvy", @@ -6175,7 +6616,7 @@ dependencies = [ "futures-executor", "futures-intrusive", "futures-util", - "hashlink 0.8.0", + "hashlink", "hex 0.4.3", "hkdf 0.12.3", "hmac 0.12.1", @@ -6192,75 +6633,44 @@ dependencies = [ "rand 0.8.5", "serde", "serde_json", - "sha-1 0.10.0", + "sha1 0.10.4", "sha2 0.10.2", "smallvec", "sqlformat", - "sqlx-rt 0.6.1", + "sqlx-rt", "stringprep", "thiserror", "tokio-stream", "url", - "uuid 1.1.2", + "uuid 1.2.2", "whoami", ] [[package]] name = "sqlx-macros" -version = "0.5.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc0fba2b0cae21fc00fe6046f8baa4c7fcb49e379f0f592b04696607f69ed2e1" -dependencies = [ - "dotenv", - "either", - "heck", - "once_cell", - "proc-macro2 1.0.43", - "quote 1.0.21", - "serde_json", - "sha2 0.10.2", - "sqlx-core 0.5.13", - "sqlx-rt 0.5.13", - "syn 1.0.99", - "url", -] - -[[package]] -name = "sqlx-macros" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4adfd2df3557bddd3b91377fc7893e8fa899e9b4061737cbade4e1bb85f1b45c" +checksum = "b850fa514dc11f2ee85be9d055c512aa866746adfacd1cb42d867d68e6a5b0d9" dependencies = [ "dotenvy", "either", "heck", "once_cell", - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", "serde_json", "sha2 0.10.2", - "sqlx-core 0.6.1", - "sqlx-rt 0.6.1", - "syn 1.0.99", + "sqlx-core", + "sqlx-rt", + "syn 1.0.104", "url", ] [[package]] name = "sqlx-rt" -version = "0.5.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4db708cd3e459078f85f39f96a00960bd841f66ee2a669e90bf36907f5a79aae" -dependencies = [ - "once_cell", - "tokio", - "tokio-rustls 0.22.0", -] - -[[package]] -name = "sqlx-rt" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7be52fc7c96c136cedea840ed54f7d446ff31ad670c9dea95ebcb998530971a3" +checksum = "24c5b2d25fa654cc5f841750b8e1cdedbe21189bf9a9382ee90bfa9dd3562396" dependencies = [ "native-tls", "once_cell", @@ -6327,11 +6737,11 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c87a60a40fccc84bef0652345bbbbbe20a605bf5d0ce81719fc476f5c03b50ef" dependencies = [ - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", "serde", "serde_derive", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -6341,13 +6751,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "58fa5ff6ad0d98d1ffa8cb115892b6e69d67799f6763e162a1c9db421dc22e11" dependencies = [ "base-x", - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", "serde", "serde_derive", "serde_json", "sha1 0.6.1", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -6397,10 +6807,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4faebde00e8ff94316c01800f9054fd2ba77d30d9e922541913051d1d978918b" dependencies = [ "heck", - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", "rustversion", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -6441,11 +6851,11 @@ dependencies = [ [[package]] name = "syn" -version = "1.0.99" +version = "1.0.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58dbef6ec655055e20b86b15a8cc6d439cca19b667537ac6a1369572d151ab13" +checksum = "4ae548ec36cf198c0ef7710d3c230987c2d6d7bd98ad6edc0274462724c585ce" dependencies = [ - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", "unicode-ident", ] @@ -6466,9 +6876,9 @@ dependencies = [ "bitflags", "cap-fs-ext", "cap-std", - "io-lifetimes", - "rustix", - "windows-sys", + "io-lifetimes 0.7.4", + "rustix 0.35.12", + "windows-sys 0.36.1", "winx", ] @@ -6556,6 +6966,22 @@ dependencies = [ "winapi", ] +[[package]] +name = "terminal_size" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb20089a8ba2b69debd491f8d2d023761cbf196e999218c591fa1e7e15a21907" +dependencies = [ + "rustix 0.36.3", + "windows-sys 0.42.0", +] + +[[package]] +name = "termtree" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95059e91184749cb66be6dc994f67f182b6d897cb3df74a5bf66b5e709295fd8" + [[package]] name = "test-context" version = "0.1.4" @@ -6574,7 +7000,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8901a55b0a7a06ebc4a674dcca925170da8e613fa3b163a1df804ed10afb154d" dependencies = [ "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -6589,12 +7015,9 @@ dependencies = [ [[package]] name = "textwrap" -version = "0.15.0" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb" -dependencies = [ - "terminal_size", -] +checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" [[package]] name = "thiserror" @@ -6611,9 +7034,9 @@ version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "982d17546b47146b28f7c22e3d08465f6b8903d0ea13c1660d9d84a6e7adcdbb" dependencies = [ - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -6627,13 +7050,13 @@ dependencies = [ [[package]] name = "thruster" -version = "1.2.6" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bc22b1c2267be6d1769c6d787936201341f03c915456ed8a8db8d40d665215f" +checksum = "910effe6fa8063f44f9f2f4d15d758270a679562414235c6781bf3b606b72682" dependencies = [ "async-trait", "bytes 0.5.6", - "bytes 1.2.1", + "bytes 1.3.0", "fnv", "futures", "http 0.1.21", @@ -6755,10 +7178,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fd3c141a1b43194f3f56a1411225df8646c55781d5f26db825b3d98507eb482f" dependencies = [ "proc-macro-hack", - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", "standback", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -6778,17 +7201,16 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" [[package]] name = "tokio" -version = "1.20.1" +version = "1.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a8325f63a7d4774dd041e363b2409ed1c5cbbd0f867795e661df066b2b0a581" +checksum = "d76ce4a75fb488c605c54bf610f221cea8b0dafb53333c1a67e8ee199dcd2ae3" dependencies = [ "autocfg 1.1.0", - "bytes 1.2.1", + "bytes 1.3.0", "libc", "memchr", "mio", "num_cpus", - "once_cell", "parking_lot 0.12.1", "pin-project-lite 0.2.9", "signal-hook-registry", @@ -6813,9 +7235,9 @@ version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9724f9a975fb987ef7a3cd9be0350edcbe130698af5b8f7a631e23d42d052484" dependencies = [ - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -6828,26 +7250,15 @@ dependencies = [ "tokio", ] -[[package]] -name = "tokio-rustls" -version = "0.22.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc6844de72e57df1980054b38be3a9f4702aba4858be64dd700181a8a6d0e1b6" -dependencies = [ - "rustls 0.19.1", - "tokio", - "webpki 0.21.4", -] - [[package]] name = "tokio-rustls" version = "0.23.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" dependencies = [ - "rustls 0.20.6", + "rustls", "tokio", - "webpki 0.22.0", + "webpki", ] [[package]] @@ -6868,25 +7279,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53474327ae5e166530d17f2d956afcb4f8a004de581b3cae10f12006bc8163e3" dependencies = [ "async-stream", - "bytes 1.2.1", + "bytes 1.3.0", "futures-core", "tokio", "tokio-stream", ] -[[package]] -name = "tokio-tungstenite" -version = "0.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "511de3f85caf1c98983545490c3d09685fa8eb634e57eec22bb4db271f46cbd8" -dependencies = [ - "futures-util", - "log", - "pin-project", - "tokio", - "tungstenite 0.14.0", -] - [[package]] name = "tokio-tungstenite" version = "0.17.2" @@ -6898,7 +7296,7 @@ dependencies = [ "native-tls", "tokio", "tokio-native-tls", - "tungstenite 0.17.3", + "tungstenite", ] [[package]] @@ -6907,7 +7305,7 @@ version = "0.6.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "36943ee01a6d67977dd3f84a5a1d2efeb4ada3a1ae771cadfaa535d9d9fc6507" dependencies = [ - "bytes 1.2.1", + "bytes 1.3.0", "futures-core", "futures-io", "futures-sink", @@ -6923,7 +7321,7 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cc463cd8deddc3770d20f9852143d50bf6094e640b485cb2e189a2099085ff45" dependencies = [ - "bytes 1.2.1", + "bytes 1.3.0", "futures-core", "futures-sink", "pin-project-lite 0.2.9", @@ -6957,6 +7355,12 @@ dependencies = [ "serde", ] +[[package]] +name = "toml_datetime" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "808b51e57d0ef8f71115d8f3a01e7d3750d01c79cac4b3eda910f4389fdf92fd" + [[package]] name = "toml_edit" version = "0.14.4" @@ -6970,17 +7374,29 @@ dependencies = [ "serde", ] +[[package]] +name = "toml_edit" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1541ba70885967e662f69d31ab3aeca7b1aaecfcd58679590b893e9239c3646" +dependencies = [ + "combine", + "indexmap", + "itertools", + "toml_datetime", +] + [[package]] name = "tonic" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55b9af819e54b8f33d453655bef9b9acc171568fb49523078d0cc4e7484200ec" +checksum = "8f219fad3b929bef19b1f86fbc0358d35daed8f2cac972037ac0dc10bbb8d5fb" dependencies = [ "async-stream", "async-trait", "axum", - "base64 0.13.0", - "bytes 1.2.1", + "base64 0.13.1", + "bytes 1.3.0", "futures-core", "futures-util", "h2", @@ -7004,15 +7420,15 @@ dependencies = [ [[package]] name = "tonic-build" -version = "0.8.0" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fbcd2800e34e743b9ae795867d5f77b535d3a3be69fd731e39145719752df8c" +checksum = "31fa2c5e870bdce133847d15e075333e6e1ca3fff913001fede6754f3060e367" dependencies = [ "prettyplease", - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "prost-build", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -7042,7 +7458,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aba3f3efabf7fb41fae8534fc20a817013dd1c12cb45441efb6c82e6556b4cd8" dependencies = [ "bitflags", - "bytes 1.2.1", + "bytes 1.3.0", "futures-core", "futures-util", "http 0.2.8", @@ -7059,9 +7475,9 @@ version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c530c8675c1dbf98facee631536fa116b5fb6382d7dd6dc1b118d970eafe3ba" dependencies = [ - "base64 0.13.0", + "base64 0.13.1", "bitflags", - "bytes 1.2.1", + "bytes 1.3.0", "futures-core", "futures-util", "http 0.2.8", @@ -7076,9 +7492,9 @@ dependencies = [ [[package]] name = "tower-layer" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "343bc9466d3fe6b0f960ef45960509f84480bf4fd96f92901afe7ff3df9d3a62" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" [[package]] name = "tower-service" @@ -7105,9 +7521,9 @@ version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a" dependencies = [ - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", ] [[package]] @@ -7143,9 +7559,9 @@ dependencies = [ [[package]] name = "tracing-opentelemetry" -version = "0.17.4" +version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbbe89715c1dbbb790059e2565353978564924ee85017b5fff365c872ff6721f" +checksum = "21ebb87a95ea13271332df069020513ab70bdb5637ca42d6e492dc3bbbad48de" dependencies = [ "once_cell", "opentelemetry", @@ -7186,7 +7602,7 @@ dependencies = [ "futures-channel", "futures-io", "futures-util", - "idna", + "idna 0.2.3", "ipnet", "lazy_static", "log", @@ -7226,9 +7642,9 @@ checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" [[package]] name = "trybuild" -version = "1.0.64" +version = "1.0.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7f408301c7480f9e6294eb779cfc907f54bd901a9660ef24d7f233ed5376485" +checksum = "db29f438342820400f2d9acfec0d363e987a38b2950bdb50a7069ed17b2148ee" dependencies = [ "glob", "once_cell", @@ -7240,22 +7656,12 @@ dependencies = [ ] [[package]] -name = "tungstenite" -version = "0.14.0" +name = "ttl_cache" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0b2d8558abd2e276b0a8df5c05a2ec762609344191e5fd23e292c910e9165b5" +checksum = "4189890526f0168710b6ee65ceaedf1460c48a14318ceec933cb26baa492096a" dependencies = [ - "base64 0.13.0", - "byteorder", - "bytes 1.2.1", - "http 0.2.8", - "httparse", - "log", - "rand 0.8.5", - "sha-1 0.9.8", - "thiserror", - "url", - "utf-8", + "linked-hash-map", ] [[package]] @@ -7264,20 +7670,20 @@ version = "0.17.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e27992fd6a8c29ee7eef28fc78349aa244134e10ad447ce3b9f0ac0ed0fa4ce0" dependencies = [ - "base64 0.13.0", + "base64 0.13.1", "byteorder", - "bytes 1.2.1", + "bytes 1.3.0", "http 0.2.8", "httparse", "log", "native-tls", "rand 0.8.5", - "rustls 0.20.6", - "sha-1 0.10.0", + "rustls", + "sha-1", "thiserror", "url", "utf-8", - "webpki 0.22.0", + "webpki", ] [[package]] @@ -7295,22 +7701,9 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89851716b67b937e393b3daa8423e67ddfc4bbbf1654bcf05488e95e0828db0c" dependencies = [ - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", -] - -[[package]] -name = "typed-headers" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3179a61e9eccceead5f1574fd173cf2e162ac42638b9bf214c6ad0baf7efa24a" -dependencies = [ - "base64 0.11.0", - "bytes 0.5.6", - "chrono", - "http 0.2.8", - "mime", + "syn 1.0.104", ] [[package]] @@ -7422,33 +7815,32 @@ checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" [[package]] name = "ureq" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9399fa2f927a3d327187cbd201480cee55bee6ac5d3c77dd27f0c6814cff16d5" +checksum = "b97acb4c28a254fd7a4aeec976c46a7fa404eac4d7c134b30c75144846d7cb8f" dependencies = [ - "base64 0.13.0", + "base64 0.13.1", "chunked_transfer", "log", "native-tls", "once_cell", - "rustls 0.20.6", + "rustls", "serde", "serde_json", "socks", "url", - "webpki 0.22.0", - "webpki-roots 0.22.3", + "webpki", + "webpki-roots", ] [[package]] name = "url" -version = "2.2.2" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c" +checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643" dependencies = [ "form_urlencoded", - "idna", - "matches", + "idna 0.3.0", "percent-encoding", "serde", ] @@ -7491,9 +7883,9 @@ dependencies = [ [[package]] name = "uuid" -version = "1.1.2" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd6469f4314d5f1ffec476e05f17cc9a78bc7a27a6a857842170bdf8d6f98d2f" +checksum = "422ee0de9031b5b948b97a8fc04e3aa35230001a722ddd27943e0be31564ce4c" dependencies = [ "getrandom 0.2.7", "serde", @@ -7548,10 +7940,19 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d257817081c7dffcdbab24b9e62d2def62e2ff7d00b1c20062551e6cccc145ff" dependencies = [ - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", ] +[[package]] +name = "wait-timeout" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +dependencies = [ + "libc", +] + [[package]] name = "waker-fn" version = "1.1.0" @@ -7581,11 +7982,11 @@ dependencies = [ [[package]] name = "warp" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cef4e1e9114a4b7f1ac799f16ce71c14de5778500c5450ec6b7b920c55b587e" +checksum = "ed7b8be92646fc3d18b06147664ebc5f48d222686cb11a8755e561a735aacc6d" dependencies = [ - "bytes 1.2.1", + "bytes 1.3.0", "futures-channel", "futures-util", "headers", @@ -7597,14 +7998,15 @@ dependencies = [ "multipart", "percent-encoding", "pin-project", + "rustls-pemfile 0.2.1", "scoped-tls", "serde", "serde_json", "serde_urlencoded", "tokio", "tokio-stream", - "tokio-tungstenite 0.15.0", - "tokio-util 0.6.10", + "tokio-tungstenite", + "tokio-util 0.7.3", "tower-service", "tracing", ] @@ -7641,14 +8043,14 @@ dependencies = [ "cap-time-ext", "fs-set-times", "io-extras", - "io-lifetimes", - "is-terminal", + "io-lifetimes 0.7.4", + "is-terminal 0.3.0", "once_cell", - "rustix", + "rustix 0.35.12", "system-interface", "tracing", "wasi-common", - "windows-sys", + "windows-sys 0.36.1", ] [[package]] @@ -7662,11 +8064,11 @@ dependencies = [ "cap-rand", "cap-std", "io-extras", - "rustix", + "rustix 0.35.12", "thiserror", "tracing", "wiggle", - "windows-sys", + "windows-sys 0.36.1", ] [[package]] @@ -7690,9 +8092,9 @@ dependencies = [ "bumpalo", "lazy_static", "log", - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", "wasm-bindgen-shared", ] @@ -7724,9 +8126,9 @@ version = "0.2.81" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d94ac45fcf608c1f45ef53e748d35660f168490c10b23704c7779ab8f5c3048" dependencies = [ - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -7783,7 +8185,7 @@ dependencies = [ "wasmtime-jit", "wasmtime-runtime", "wat", - "windows-sys", + "windows-sys 0.36.1", ] [[package]] @@ -7802,16 +8204,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c2101b211d9db7db8bcfb2ffa69e119fa99a20266d0e5f19bb989cb6c3280d7" dependencies = [ "anyhow", - "base64 0.13.0", + "base64 0.13.1", "bincode", "directories-next", "file-per-thread-logger", "log", - "rustix", + "rustix 0.35.12", "serde", "sha2 0.9.9", "toml", - "windows-sys", + "windows-sys 0.36.1", "zstd", ] @@ -7863,9 +8265,9 @@ checksum = "abb9b7b94f7b40d98665feca2338808cf449fa671d01be7176861f8d9aa4a012" dependencies = [ "cc", "cfg-if 1.0.0", - "rustix", + "rustix 0.35.12", "wasmtime-asm-macros", - "windows-sys", + "windows-sys 0.36.1", ] [[package]] @@ -7884,14 +8286,14 @@ dependencies = [ "log", "object", "rustc-demangle", - "rustix", + "rustix 0.35.12", "serde", "target-lexicon", "thiserror", "wasmtime-environ", "wasmtime-jit-debug", "wasmtime-runtime", - "windows-sys", + "windows-sys 0.36.1", ] [[package]] @@ -7902,7 +8304,7 @@ checksum = "bee06d206bcf7a875eacd1e1e957c2a63f64a92934d2535dd8e15cde6d3a9ffe" dependencies = [ "object", "once_cell", - "rustix", + "rustix 0.35.12", ] [[package]] @@ -7919,16 +8321,16 @@ dependencies = [ "log", "mach", "memfd", - "memoffset", + "memoffset 0.6.5", "paste", "rand 0.8.5", - "rustix", + "rustix 0.35.12", "thiserror", "wasmtime-asm-macros", "wasmtime-environ", "wasmtime-fiber", "wasmtime-jit-debug", - "windows-sys", + "windows-sys 0.36.1", ] [[package]] @@ -7998,28 +8400,20 @@ dependencies = [ [[package]] name = "webbrowser" -version = "0.7.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc6a3cffdb686fbb24d9fb8f03a213803277ed2300f11026a3afe1f108dc021b" +checksum = "2a0cc7962b5aaa0dfcebaeef0161eec6edf5f4606c12e6777fd7d392f52033a5" dependencies = [ "jni", - "ndk-glue", + "ndk-context", + "objc", + "raw-window-handle", "url", "web-sys", "widestring", "winapi", ] -[[package]] -name = "webpki" -version = "0.21.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8e38c0608262c46d4a56202ebabdeb094cef7e560ca7a226c6bf055188aa4ea" -dependencies = [ - "ring", - "untrusted", -] - [[package]] name = "webpki" version = "0.22.0" @@ -8030,22 +8424,13 @@ dependencies = [ "untrusted", ] -[[package]] -name = "webpki-roots" -version = "0.21.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aabe153544e473b775453675851ecc86863d2a81d786d741f6b76778f2a48940" -dependencies = [ - "webpki 0.21.4", -] - [[package]] name = "webpki-roots" version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44d8de8415c823c8abd270ad483c6feeac771fad964890779f9a8cb24fbbc1bf" dependencies = [ - "webpki 0.22.0", + "webpki", ] [[package]] @@ -8107,10 +8492,10 @@ checksum = "0321263a6b1ba1e0a97174524891a14907cee68cfa183fd5389088dffbeab668" dependencies = [ "anyhow", "heck", - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", "shellexpand", - "syn 1.0.99", + "syn 1.0.104", "witx", ] @@ -8120,9 +8505,9 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa3d3794e5d68ef69f30e65f267c6bf18c920750d3ccd2a3ac04e77d95f66b96" dependencies = [ - "proc-macro2 1.0.43", + "proc-macro2 1.0.47", "quote 1.0.21", - "syn 1.0.99", + "syn 1.0.104", "wiggle-generate", ] @@ -8163,43 +8548,100 @@ version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2" dependencies = [ - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_msvc", + "windows_aarch64_msvc 0.36.1", + "windows_i686_gnu 0.36.1", + "windows_i686_msvc 0.36.1", + "windows_x86_64_gnu 0.36.1", + "windows_x86_64_msvc 0.36.1", +] + +[[package]] +name = "windows-sys" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc 0.42.0", + "windows_i686_gnu 0.42.0", + "windows_i686_msvc 0.42.0", + "windows_x86_64_gnu 0.42.0", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc 0.42.0", ] +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e" + [[package]] name = "windows_aarch64_msvc" version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47" +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4" + [[package]] name = "windows_i686_gnu" version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6" +[[package]] +name = "windows_i686_gnu" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7" + [[package]] name = "windows_i686_msvc" version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024" +[[package]] +name = "windows_i686_msvc" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246" + [[package]] name = "windows_x86_64_gnu" version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1" +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028" + [[package]] name = "windows_x86_64_msvc" version = "0.36.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680" +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5" + [[package]] name = "winreg" version = "0.7.0" @@ -8225,8 +8667,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7b01e010390eb263a4518c8cebf86cb67469d1511c00b749a47b64c39e8054d" dependencies = [ "bitflags", - "io-lifetimes", - "windows-sys", + "io-lifetimes 0.7.4", + "windows-sys 0.36.1", ] [[package]] @@ -8252,9 +8694,9 @@ dependencies = [ [[package]] name = "xmlparser" -version = "0.13.3" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "114ba2b24d2167ef6d67d7d04c8cc86522b87f490025f39f0303b7db5bf5e3d8" +checksum = "4d25c75bf9ea12c4040a97f829154768bbbce366287e2dc044af160cd79a13fd" [[package]] name = "yansi" @@ -8262,6 +8704,15 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" +[[package]] +name = "yasna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "346d34a236c9d3e5f3b9b74563f238f955bbd05fa0b8b4efa53c130c43982f4c" +dependencies = [ + "time 0.3.11", +] + [[package]] name = "zeroize" version = "1.5.5" @@ -8299,12 +8750,12 @@ dependencies = [ [[patch.unused]] name = "shuttle-aws-rds" -version = "0.7.0" +version = "0.8.0" [[patch.unused]] name = "shuttle-persist" -version = "0.7.0" +version = "0.8.0" [[patch.unused]] name = "shuttle-shared-db" -version = "0.7.0" +version = "0.8.0" diff --git a/Cargo.toml b/Cargo.toml index 57fa71713..5b2ed622d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,5 +1,6 @@ [workspace] members = [ + "admin", "cargo-shuttle", "codegen", "common", @@ -10,6 +11,7 @@ members = [ "runtime", "service" ] + exclude = [ "e2e", "examples", @@ -17,5 +19,30 @@ exclude = [ "resources/persist", "resources/secrets", "resources/shared-db", - "tmp" + "tmp", + "resources/static-folder" ] + +[workspace.package] +version = "0.8.0" +edition = "2021" +license = "Apache-2.0" + +# https://doc.rust-lang.org/cargo/reference/workspaces.html#the-workspacedependencies-table +[workspace.dependencies] +shuttle-codegen = { path = "codegen", version = "0.8.0" } +shuttle-common = { path = "common", version = "0.8.0" } +shuttle-proto = { path = "proto", version = "0.8.0" } +shuttle-service = { path = "service", version = "0.8.0" } + +anyhow = "1.0.66" +async-trait = "0.1.58" +axum = "0.6.0" +chrono = "0.4.23" +once_cell = "1.16.0" +uuid = "1.2.2" +thiserror = "1.0.37" +serde = "1.0.148" +serde_json = "1.0.89" +tracing = "0.1.37" +tracing-subscriber = "0.3.16" diff --git a/Containerfile b/Containerfile index 0d7f05959..b866f55b4 100644 --- a/Containerfile +++ b/Containerfile @@ -1,5 +1,5 @@ #syntax=docker/dockerfile-upstream:1.4.0-rc1 -FROM rust:1.63.0-buster as shuttle-build +FROM rust:1.65.0-buster as shuttle-build RUN apt-get update &&\ apt-get install -y curl # download protoc binary and unzip it in usr/bin @@ -23,24 +23,18 @@ FROM shuttle-build AS builder COPY --from=planner /build/recipe.json recipe.json RUN cargo chef cook --recipe-path recipe.json COPY --from=cache /build . -ARG crate -RUN cargo build --bin ${crate} +ARG folder +RUN cargo build --bin shuttle-${folder} -FROM rust:1.63.0-buster as shuttle-common +FROM rust:1.65.0-buster as shuttle-common RUN apt-get update &&\ apt-get install -y curl RUN rustup component add rust-src COPY --from=cache /build/ /usr/src/shuttle/ FROM shuttle-common -ARG crate -SHELL ["/bin/bash", "-c"] -RUN mkdir -p $CARGO_HOME; \ -echo $'[patch.crates-io] \n\ -shuttle-service = { path = "/usr/src/shuttle/service" } \n\ -shuttle-aws-rds = { path = "/usr/src/shuttle/resources/aws-rds" } \n\ -shuttle-persist = { path = "/usr/src/shuttle/resources/persist" } \n\ -shuttle-shared-db = { path = "/usr/src/shuttle/resources/shared-db" } \n\ -shuttle-secrets = { path = "/usr/src/shuttle/resources/secrets" }' > $CARGO_HOME/config.toml -COPY --from=builder /build/target/debug/${crate} /usr/local/bin/service +ARG folder +COPY ${folder}/prepare.sh /prepare.sh +RUN /prepare.sh +COPY --from=builder /build/target/debug/shuttle-${folder} /usr/local/bin/service ENTRYPOINT ["/usr/local/bin/service"] diff --git a/Makefile b/Makefile index aa4cb75cc..ee94af8f2 100644 --- a/Makefile +++ b/Makefile @@ -21,6 +21,9 @@ endif BUILDX_FLAGS=$(BUILDX_OP) $(PLATFORM_FLAGS) $(CACHE_FLAGS) TAG?=$(shell git describe --tags) +BACKEND_TAG?=$(TAG) +DEPLOYER_TAG?=$(TAG) +PROVISIONER_TAG?=$(TAG) DOCKER?=docker @@ -37,30 +40,38 @@ ifeq ($(PROD),true) DOCKER_COMPOSE_FILES=-f docker-compose.yml STACK=shuttle-prod APPS_FQDN=shuttleapp.rs -DB_FQDN=pg.shuttle.rs +DB_FQDN=db.shuttle.rs CONTAINER_REGISTRY=public.ecr.aws/shuttle +DD_ENV=production +# make sure we only ever go to production with `--tls=enable` +USE_TLS=enable else DOCKER_COMPOSE_FILES=-f docker-compose.yml -f docker-compose.dev.yml -STACK=shuttle-dev +STACK?=shuttle-dev APPS_FQDN=unstable.shuttleapp.rs -DB_FQDN=pg.unstable.shuttle.rs +DB_FQDN=db.unstable.shuttle.rs CONTAINER_REGISTRY=public.ecr.aws/shuttle-dev +DD_ENV=unstable +USE_TLS?=disable endif POSTGRES_EXTRA_PATH?=./extras/postgres -POSTGRES_TAG?=latest +POSTGRES_TAG?=14 + +PANAMAX_EXTRA_PATH?=./extras/panamax +PANAMAX_TAG?=1.0.6 RUST_LOG?=debug -DOCKER_COMPOSE_ENV=STACK=$(STACK) BACKEND_TAG=$(TAG) PROVISIONER_TAG=$(TAG) POSTGRES_TAG=14 APPS_FQDN=$(APPS_FQDN) DB_FQDN=$(DB_FQDN) POSTGRES_PASSWORD=$(POSTGRES_PASSWORD) RUST_LOG=$(RUST_LOG) CONTAINER_REGISTRY=$(CONTAINER_REGISTRY) MONGO_INITDB_ROOT_USERNAME=$(MONGO_INITDB_ROOT_USERNAME) MONGO_INITDB_ROOT_PASSWORD=$(MONGO_INITDB_ROOT_PASSWORD) +DOCKER_COMPOSE_ENV=STACK=$(STACK) BACKEND_TAG=$(BACKEND_TAG) DEPLOYER_TAG=$(DEPLOYER_TAG) PROVISIONER_TAG=$(PROVISIONER_TAG) POSTGRES_TAG=${POSTGRES_TAG} PANAMAX_TAG=${PANAMAX_TAG} APPS_FQDN=$(APPS_FQDN) DB_FQDN=$(DB_FQDN) POSTGRES_PASSWORD=$(POSTGRES_PASSWORD) RUST_LOG=$(RUST_LOG) CONTAINER_REGISTRY=$(CONTAINER_REGISTRY) MONGO_INITDB_ROOT_USERNAME=$(MONGO_INITDB_ROOT_USERNAME) MONGO_INITDB_ROOT_PASSWORD=$(MONGO_INITDB_ROOT_PASSWORD) DD_ENV=$(DD_ENV) USE_TLS=$(USE_TLS) -.PHONY: images clean src up down deploy shuttle-% postgres docker-compose.rendered.yml test +.PHONY: images clean src up down deploy shuttle-% postgres docker-compose.rendered.yml test bump-% deploy-examples publish publish-% --validate-version clean: rm .shuttle-* rm docker-compose.rendered.yml -images: shuttle-provisioner shuttle-deployer shuttle-gateway postgres +images: shuttle-provisioner shuttle-deployer shuttle-gateway postgres panamax postgres: docker buildx build \ @@ -70,8 +81,16 @@ postgres: -f $(POSTGRES_EXTRA_PATH)/Containerfile \ $(POSTGRES_EXTRA_PATH) +panamax: + docker buildx build \ + --build-arg PANAMAX_TAG=$(PANAMAX_TAG) \ + --tag $(CONTAINER_REGISTRY)/panamax:$(PANAMAX_TAG) \ + $(BUILDX_FLAGS) \ + -f $(PANAMAX_EXTRA_PATH)/Containerfile \ + $(PANAMAX_EXTRA_PATH) + docker-compose.rendered.yml: docker-compose.yml docker-compose.dev.yml - $(DOCKER_COMPOSE_ENV) $(DOCKER_COMPOSE) $(DOCKER_COMPOSE_FILES) -p $(STACK) config > $@ + $(DOCKER_COMPOSE_ENV) $(DOCKER_COMPOSE) $(DOCKER_COMPOSE_FILES) $(DOCKER_COMPOSE_CONFIG_FLAGS) -p $(STACK) config > $@ deploy: docker-compose.yml $(DOCKER_COMPOSE_ENV) docker stack deploy -c $< $(STACK) @@ -79,18 +98,129 @@ deploy: docker-compose.yml test: cd e2e; POSTGRES_PASSWORD=$(POSTGRES_PASSWORD) APPS_FQDN=$(APPS_FQDN) cargo test $(CARGO_TEST_FLAGS) -- --nocapture -up: docker-compose.rendered.yml images - CONTAINER_REGISTRY=$(CONTAINER_REGISTRY) $(DOCKER_COMPOSE) -f $< -p $(STACK) up -d +up: docker-compose.rendered.yml + CONTAINER_REGISTRY=$(CONTAINER_REGISTRY) $(DOCKER_COMPOSE) -f $< -p $(STACK) up -d $(DOCKER_COMPOSE_FLAGS) down: docker-compose.rendered.yml - CONTAINER_REGISTRY=$(CONTAINER_REGISTRY) $(DOCKER_COMPOSE) -f $< -p $(STACK) down + CONTAINER_REGISTRY=$(CONTAINER_REGISTRY) $(DOCKER_COMPOSE) -f $< -p $(STACK) down $(DOCKER_COMPOSE_FLAGS) shuttle-%: ${SRC} Cargo.lock docker buildx build \ - --build-arg crate=shuttle-$(*) \ + --build-arg folder=$(*) \ --tag $(CONTAINER_REGISTRY)/$(*):$(COMMIT_SHA) \ --tag $(CONTAINER_REGISTRY)/$(*):$(TAG) \ --tag $(CONTAINER_REGISTRY)/$(*):latest \ $(BUILDX_FLAGS) \ -f Containerfile \ . + +# Bunch of targets to make bumping the shuttle version easier +# +# Dependencies: git, cargo-edit, fastmod, ripgrep +# Usage: make bump-version current=0.6.3 version=0.7.0 +bump-version: --validate-version + git checkout development + git fetch --all + git pull upstream + git checkout -b "chore/v$(version)" + cargo set-version --workspace "$(version)" + + $(call next, bump-resources) + +bump-resources: + git commit -m "chore: v$(version)" + fastmod --fixed-strings $(current) $(version) resources + + $(call next, bump-examples) + +bump-examples: + git commit -m "chore: resources v$(version)" + fastmod --fixed-strings $(current) $(version) examples + + $(call next, bump-misc) + +bump-misc: + git commit -m "docs: v$(version)" + fastmod --fixed-strings $(current) $(version) + + $(call next, bump-final) + +bump-final: + git commit -m "misc: v$(version)" + git push --set-upstream origin $$(git rev-parse --abbrev-ref HEAD) + + echo "Make pull request and confirm everything is okay. Then run:" + echo "make publish" + +# Deploy all our example using the command set in shuttle-command +# Usage: make deploy-example shuttle-command="cargo shuttle" -j 2 +deploy-examples: deploy-examples/rocket/hello-world \ + deploy-examples/rocket/persist \ + deploy-examples/rocket/postgres \ + deploy-examples/rocket/secrets \ + deploy-examples/rocket/authentication \ + deploy-examples/axum/hello-world \ + deploy-examples/axum/websocket \ + deploy-examples/poem/hello-world \ + deploy-examples/poem/mongodb \ + deploy-examples/poem/postgres \ + deploy-examples/salvo/hello-world \ + deploy-examples/tide/hello-world \ + deploy-examples/tide/postgres \ + deploy-examples/tower/hello-world \ + deploy-examples/warp/hello-world \ + + echo "All example have been redeployed" + +deploy-examples/%: + cd examples/$(*); $(shuttle-command) project rm || echo -e "\x1B[33m>> Nothing to remove for $*\x1B[39m" + sleep 5 + cd examples/$(*); $(shuttle-command) project new + sleep 5 + cd examples/$(*); $(shuttle-command) deploy + +define next + cargo check # To update Cargo.lock + git add --all + git --no-pager diff --staged + + echo -e "\x1B[36m>> Is this correct?\x1B[39m" + read yn; if [ $$yn != "y" ]; then echo "Fix the issues then continue with:"; echo "make version=$(version) current=$(current) $1"; exit 2; fi + + make $1 +endef + +# Publish all our crates to crates.io +# See CONTRIBUTING.md for the dependency graph +# Usage: make publish -j 4 +publish: publish-resources publish-cargo-shuttle + echo "The branch can now be safely merged" + +publish-resources: publish-resources/aws-rds \ + publish-resources/persist \ + publish-resources/shared-db + publish-resources/static-folder + +publish-cargo-shuttle: publish-resources/secrets + cd cargo-shuttle; cargo publish + sleep 10 # Wait for crates.io to update + +publish-service: publish-codegen publish-common + cd service; cargo publish + sleep 10 # Wait for crates.io to update + +publish-codegen: + cd codegen; cargo publish + sleep 10 # Wait for crates.io to update + +publish-common: + cd common; cargo publish + sleep 10 # Wait for crates.io to update + +publish-resources/%: publish-service + cd resources/$(*); cargo publish + sleep 10 # Wait for crates.io to update + +--validate-version: + echo "$(version)" | rg -q "\d+\.\d+\.\d+" || { echo "version argument must be in the form x.y.z"; exit 1; } + echo "$(current)" | rg -q "\d+\.\d+\.\d+" || { echo "current argument must be in the form x.y.z"; exit 1; } diff --git a/README.md b/README.md index 16af5b211..5349720f0 100644 --- a/README.md +++ b/README.md @@ -26,9 +26,9 @@ Shuttle is built for productivity, reliability and performance: - Zero-Configuration support for Rust using annotations - Automatic resource provisioning (databases, caches, subdomains, etc.) via [Infrastructure-From-Code](https://www.shuttle.rs/blog/2022/05/09/ifc) -- First-class support for popular Rust frameworks ([Rocket](https://docs.shuttle.rs/guide/rocket-examples.html), [Axum](https://docs.shuttle.rs/guide/axum-examples.html), - [Tide](https://docs.shuttle.rs/guide/tide-examples.html), [Poem](https://docs.shuttle.rs/guide/poem-examples.html) and [Tower](https://docs.shuttle.rs/guide/tower-examples.html)) -- Support for deploying Discord bots using [Serenity](https://docs.shuttle.rs/guide/serenity-examples.html) +- First-class support for popular Rust frameworks ([Rocket](https://docs.shuttle.rs/examples/rocket), [Axum](https://docs.shuttle.rs/examples/axum), + [Tide](https://docs.shuttle.rs/examples/tide), [Poem](https://docs.shuttle.rs/examples/poem) and [Tower](https://docs.shuttle.rs/examples/tower)) +- Support for deploying Discord bots using [Serenity](https://docs.shuttle.rs/examples/serenity) - Scalable hosting (with optional self-hosting) 📖 Check out our documentation to get started quickly: [docs.shuttle.rs](https://docs.shuttle.rs) @@ -82,7 +82,7 @@ For the full documentation, visit [our docs](https://docs.shuttle.rs). Contributing to shuttle is highly encouraged! -If you want to setup a local environment to test code changes to core `shuttle` packages, or want to contribute to the project check out [our docs](https://docs.shuttle.rs/guide/contribute.html). +If you want to setup a local environment to test code changes to core `shuttle` packages, or want to contribute to the project check out [our docs](https://docs.shuttle.rs/community/contribute). Even if you are not planning to submit any code; joining our [Discord server](https://discord.gg/shuttle) and providing feedback helps us a lot! diff --git a/admin/Cargo.toml b/admin/Cargo.toml new file mode 100644 index 000000000..00bcefa0e --- /dev/null +++ b/admin/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "shuttle-admin" +version = "0.8.0" +edition = "2021" + +[dependencies] +anyhow = { workspace = true } +clap = { version = "4.0.27", features = [ "derive", "env" ] } +dirs = "4.0.0" +reqwest = { version = "0.11.13", features = ["json"] } +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true } +tokio = { version = "1.22.0", features = ["macros", "rt-multi-thread"] } +toml = "0.5.9" +tracing = { workspace = true } +tracing-subscriber = { workspace = true, features = ["env-filter"] } + +[dependencies.shuttle-common] +workspace = true +features = ["models"] diff --git a/admin/README.md b/admin/README.md new file mode 100644 index 000000000..6d56d6266 --- /dev/null +++ b/admin/README.md @@ -0,0 +1,34 @@ +_Small utility used by the shuttle admin for common tasks_ + +## How to test custom domain certificates locally +For local testing it is easiest to use the [Pebble](https://github.com/letsencrypt/pebble) server. So install it using +whatever method works for your system. It is included in the nix environment if you use it though. + +To start the `Pebble` server you'll need some config, a root CA and a certificate signed with the CA. The easiest way +to get all these is to get them from the [pebble/test](https://github.com/letsencrypt/pebble/tree/main/test) folder. + +You should now be able to start `Pebble` locally. If you used the `pebble/test` folder, then your important +variables are as follow: + +- *Server url*: `https://localhost:14000/dir` +- *CA location*: `$PWD/test/certs/pebble.minica.pem` + +Next you'll need `gateway` to use this CA when checking the TLS connection with localhost. This can be done by +setting the `SSL_CERT_FILE` environment variable. + +``` shell +export SSL_CERT_FILE="$PWD/test/certs/pebble.minica.pem" +``` + +When `gateway` now runs, it will use this root certificate to check the certificate presented by `Pebble`. + +Now you'll want this admin client to use the local `Pebble` server when making new account. Therefore, use the +following command when you create new accounts + +``` shell +cargo run -p shuttle-admin -- --api-url http://localhost:8001 acme create-account --acme-server https://localhost:14000/dir --email +``` + +Safe the account JSON in a local file and use it to test creating new certificate. However, you'll the FQDN you're +using for testnig to resolve to your local machine. So create an `A` record for it on your DNS with the value +`127.0.0.1`. And Bob's your uncle 🎉 diff --git a/admin/src/args.rs b/admin/src/args.rs new file mode 100644 index 000000000..e7ae68122 --- /dev/null +++ b/admin/src/args.rs @@ -0,0 +1,71 @@ +use std::path::PathBuf; + +use clap::{Parser, Subcommand}; +use shuttle_common::project::ProjectName; + +#[derive(Parser, Debug)] +pub struct Args { + /// run this command against the api at the supplied url + #[arg(long, default_value = "https://api.shuttle.rs", env = "SHUTTLE_API")] + pub api_url: String, + + #[command(subcommand)] + pub command: Command, +} + +#[derive(Subcommand, Debug)] +pub enum Command { + /// Try to revive projects in the crashed state + Revive, + + /// Manage custom domains + #[command(subcommand)] + Acme(AcmeCommand), + + /// Manage project names + ProjectNames, + + /// Viewing and managing stats + #[command(subcommand)] + Stats(StatsCommand), +} + +#[derive(Subcommand, Debug)] +pub enum AcmeCommand { + /// Create a new ACME account. Should only be needed once + CreateAccount { + /// Email for managing all certificates + #[arg(long)] + email: String, + + /// Acme server to create account on. Gateway will default to LetsEncrypt + #[arg(long)] + acme_server: Option, + }, + + /// Request a certificate for a FQDN + RequestCertificate { + /// Fqdn to request certificate for + #[arg(long)] + fqdn: String, + + /// Project to request certificate for + #[arg(long)] + project: ProjectName, + + /// Path to acme credentials file + /// This should have been created with `acme create-account` + #[arg(long)] + credentials: PathBuf, + }, +} + +#[derive(Subcommand, Debug)] +pub enum StatsCommand { + /// View load stats + Load { + /// Clear the loads counter + #[arg(long)] + clear: bool, + }, +} diff --git a/admin/src/client.rs b/admin/src/client.rs new file mode 100644 index 000000000..43e756a78 --- /dev/null +++ b/admin/src/client.rs @@ -0,0 +1,114 @@ +use anyhow::{Context, Result}; +use serde::{de::DeserializeOwned, Serialize}; +use shuttle_common::{ + models::{project, stats, ToJson}, + project::ProjectName, +}; +use tracing::trace; + +pub struct Client { + api_url: String, + api_key: String, +} + +impl Client { + pub fn new(api_url: String, api_key: String) -> Self { + Self { api_url, api_key } + } + + pub async fn revive(&self) -> Result { + self.post("/admin/revive", Option::::None).await + } + + pub async fn acme_account_create( + &self, + email: &str, + acme_server: Option, + ) -> Result { + let path = format!("/admin/acme/{email}"); + self.post(&path, Some(acme_server)).await + } + + pub async fn acme_request_certificate( + &self, + fqdn: &str, + project_name: &ProjectName, + credentials: &serde_json::Value, + ) -> Result { + let path = format!("/admin/acme/request/{project_name}/{fqdn}"); + self.post(&path, Some(credentials)).await + } + + pub async fn get_projects(&self) -> Result> { + self.get("/admin/projects").await + } + + pub async fn get_load(&self) -> Result { + self.get("/admin/stats/load").await + } + + pub async fn clear_load(&self) -> Result { + self.delete("/admin/stats/load", Option::::None) + .await + } + + async fn post( + &self, + path: &str, + body: Option, + ) -> Result { + trace!(self.api_key, "using api key"); + + let mut builder = reqwest::Client::new() + .post(format!("{}{}", self.api_url, path)) + .bearer_auth(&self.api_key); + + if let Some(body) = body { + builder = builder.json(&body); + } + + builder + .send() + .await + .context("failed to make post request")? + .to_json() + .await + .context("failed to extract json body from post response") + } + + async fn delete( + &self, + path: &str, + body: Option, + ) -> Result { + trace!(self.api_key, "using api key"); + + let mut builder = reqwest::Client::new() + .delete(format!("{}{}", self.api_url, path)) + .bearer_auth(&self.api_key); + + if let Some(body) = body { + builder = builder.json(&body); + } + + builder + .send() + .await + .context("failed to make delete request")? + .to_json() + .await + .context("failed to extract json body from delete response") + } + + async fn get(&self, path: &str) -> Result { + reqwest::Client::new() + .get(format!("{}{}", self.api_url, path)) + .bearer_auth(&self.api_key) + .send() + .await + .context("failed to make post request")? + .to_json() + .await + .context("failed to post text body from response") + } +} diff --git a/admin/src/config.rs b/admin/src/config.rs new file mode 100644 index 000000000..5b63f955a --- /dev/null +++ b/admin/src/config.rs @@ -0,0 +1,18 @@ +use std::{fs, path::PathBuf}; + +pub fn get_api_key() -> String { + let data = fs::read_to_string(config_path()).expect("shuttle config file to exist"); + let toml: toml::Value = toml::from_str(&data).expect("to parse shuttle config file"); + + toml["api_key"] + .as_str() + .expect("api key to be a string") + .to_string() +} + +fn config_path() -> PathBuf { + dirs::config_dir() + .expect("system to have a config path") + .join("shuttle") + .join("config.toml") +} diff --git a/admin/src/lib.rs b/admin/src/lib.rs new file mode 100644 index 000000000..f1c9fb465 --- /dev/null +++ b/admin/src/lib.rs @@ -0,0 +1,3 @@ +pub mod args; +pub mod client; +pub mod config; diff --git a/admin/src/main.rs b/admin/src/main.rs new file mode 100644 index 000000000..55cb40ee5 --- /dev/null +++ b/admin/src/main.rs @@ -0,0 +1,161 @@ +use clap::Parser; +use shuttle_admin::{ + args::{AcmeCommand, Args, Command, StatsCommand}, + client::Client, + config::get_api_key, +}; +use std::{ + collections::{hash_map::RandomState, HashMap}, + fmt::Write, + fs, +}; +use tracing::trace; + +#[tokio::main] +async fn main() { + tracing_subscriber::fmt::init(); + + let args = Args::parse(); + + trace!(?args, "starting with args"); + + let api_key = get_api_key(); + let client = Client::new(args.api_url.clone(), api_key); + + let res = match args.command { + Command::Revive => client.revive().await.expect("revive to succeed"), + Command::Acme(AcmeCommand::CreateAccount { email, acme_server }) => { + let account = client + .acme_account_create(&email, acme_server) + .await + .expect("to create ACME account"); + + let mut res = String::new(); + writeln!(res, "Details of ACME account are as follow. Keep this safe as it will be needed to create certificates in the future").unwrap(); + writeln!(res, "{}", serde_json::to_string_pretty(&account).unwrap()).unwrap(); + + res + } + Command::Acme(AcmeCommand::RequestCertificate { + fqdn, + project, + credentials, + }) => { + let credentials = fs::read_to_string(credentials).expect("to read credentials file"); + let credentials = + serde_json::from_str(&credentials).expect("to parse content of credentials file"); + + client + .acme_request_certificate(&fqdn, &project, &credentials) + .await + .expect("to get a certificate challenge response") + } + Command::ProjectNames => { + let projects = client + .get_projects() + .await + .expect("to get list of projects"); + + let projects: HashMap = HashMap::from_iter( + projects + .into_iter() + .map(|project| (project.project_name, project.account_name)), + ); + + let mut res = String::new(); + + for (project_name, account_name) in &projects { + let mut issues = Vec::new(); + let cleaned_name = project_name.to_lowercase(); + + // Were there any uppercase characters + if &cleaned_name != project_name { + // Since there were uppercase characters, will the new name clash with any existing projects + if let Some(other_account) = projects.get(&cleaned_name) { + if other_account == account_name { + issues.push( + "changing to lower case will clash with same owner".to_string(), + ); + } else { + issues.push(format!( + "changing to lower case will clash with another owner: {other_account}" + )); + } + } + } + + let cleaned_underscore = cleaned_name.replace('_', "-"); + // Were there any underscore cleanups + if cleaned_underscore != cleaned_name { + // Since there were underscore cleanups, will the new name clash with any existing projects + if let Some(other_account) = projects.get(&cleaned_underscore) { + if other_account == account_name { + issues + .push("cleaning underscore will clash with same owner".to_string()); + } else { + issues.push(format!( + "cleaning underscore will clash with another owner: {other_account}" + )); + } + } + } + + let cleaned_separator_name = cleaned_underscore.trim_matches('-'); + // Were there any dash cleanups + if cleaned_separator_name != cleaned_underscore { + // Since there were dash cleanups, will the new name clash with any existing projects + if let Some(other_account) = projects.get(cleaned_separator_name) { + if other_account == account_name { + issues.push("cleaning dashes will clash with same owner".to_string()); + } else { + issues.push(format!( + "cleaning dashes will clash with another owner: {other_account}" + )); + } + } + } + + // Are reserved words used + match cleaned_separator_name { + "shuttleapp" | "shuttle" => issues.push("is a reserved name".to_string()), + _ => {} + } + + // Is it longer than 63 chars + if cleaned_separator_name.len() > 63 { + issues.push("final name is too long".to_string()); + } + + // Only report of problem projects + if !issues.is_empty() { + writeln!(res, "{project_name}") + .expect("to write name of project name having issues"); + + for issue in issues { + writeln!(res, "\t- {issue}").expect("to write issue with project name"); + } + + writeln!(res).expect("to write a new line"); + } + } + + res + } + Command::Stats(StatsCommand::Load { clear }) => { + let resp = if clear { + client.clear_load().await.expect("to delete load stats") + } else { + client.get_load().await.expect("to get load stats") + }; + + let has_capacity = if resp.has_capacity { "a" } else { "no" }; + + format!( + "Currently {} builds are running and there is {} capacity for new builds", + resp.builds_count, has_capacity + ) + } + }; + + println!("{res}"); +} diff --git a/assets/v0.8.0-interactive-init.gif b/assets/v0.8.0-interactive-init.gif new file mode 100644 index 000000000..74a9036aa Binary files /dev/null and b/assets/v0.8.0-interactive-init.gif differ diff --git a/cargo-shuttle/Cargo.toml b/cargo-shuttle/Cargo.toml index 00ed551af..3b2a6916f 100644 --- a/cargo-shuttle/Cargo.toml +++ b/cargo-shuttle/Cargo.toml @@ -1,58 +1,72 @@ [package] name = "cargo-shuttle" -version = "0.7.0" -edition = "2021" -license = "Apache-2.0" +version = "0.8.1" +edition.workspace = true +license.workspace = true description = "A cargo command for the shuttle platform (https://www.shuttle.rs/)" homepage = "https://www.shuttle.rs" [dependencies] -anyhow = "1.0.62" -async-trait = "0.1.57" +anyhow = { workspace = true } +async-trait = { workspace = true } bollard = "0.13.0" -cargo = "0.64.0" -cargo-edit = { version = "0.10.4", features = ["cli"] } -cargo_metadata = "0.15.0" -chrono = "0.4.22" +# TODO: debug the libgit2-sys conflict with cargo-edit when upgrading cargo to 0.66 +cargo = "0.65.0" +cargo-edit = { version = "0.11.6", features = ["cli"] } +cargo_metadata = "0.15.2" +chrono = { workspace = true } clap = { version = "3.2.17", features = ["derive", "env"] } +clap_complete = "3.2.5" crossbeam-channel = "0.5.6" crossterm = "0.25.0" +dialoguer = { version = "0.10.2", features = ["fuzzy-select"] } dirs = "4.0.0" -futures = "0.3.23" +flate2 = "1.0.25" +futures = "0.3.25" +git2 = "0.14.2" headers = "0.3.8" +indicatif = "0.17.2" +ignore = "0.4.18" indoc = "1.0.7" log = "0.4.17" +openssl = { version = '0.10', optional = true } portpicker = "0.1.1" -reqwest = { version = "0.11.11", features = ["json"] } -reqwest-middleware = "0.1.6" -reqwest-retry = "0.1.5" -serde = { version = "1.0.143", features = ["derive"] } -serde_json = "1.0.83" -sqlx = { version = "0.6.1", features = ["runtime-tokio-native-tls", "postgres"] } -tokio = { version = "1.20.1", features = ["macros"] } +reqwest = { version = "0.11.13", features = ["json"] } +reqwest-middleware = "0.2.0" +reqwest-retry = "0.2.0" +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true } +sqlx = { version = "0.6.2", features = ["runtime-tokio-native-tls", "postgres"] } +strum = { version = "0.24.1", features = ["derive"] } +tar = "0.4.38" +tokio = { version = "1.22.0", features = ["macros"] } tokio-tungstenite = { version = "0.17.2", features = ["native-tls"] } toml = "0.5.9" -toml_edit = "0.14.4" -tracing = "0.1.35" -tracing-subscriber = { version = "0.3.11", features = ["env-filter"] } -url = "2.2.2" -uuid = { version = "1.1.2", features = ["v4"] } -webbrowser = "0.7.1" +toml_edit = "0.15.0" +tracing = { workspace = true } +tracing-subscriber = { workspace = true, features = ["env-filter"] } +url = "2.3.1" +uuid = { workspace = true, features = ["v4"] } +webbrowser = "0.8.2" [dependencies.shuttle-common] -version = "0.7.0" -path = "../common" +workspace = true +features= ["models"] [dependencies.shuttle-secrets] -version = "0.7.0" +version = "0.8.0" path = "../resources/secrets" [dependencies.shuttle-service] -version = "0.7.0" -path = "../service" -features = ["loader"] +workspace = true +features = ["loader", "codegen"] + +[features] +vendored-openssl = ["openssl/vendored"] [dev-dependencies] +assert_cmd = "2.0.6" +rexpect = "0.5.0" tempfile = "3.3.0" test-context = "0.1.4" # Tmp until this branch is merged and released diff --git a/cargo-shuttle/README.md b/cargo-shuttle/README.md index 691b861d9..964a2a478 100644 --- a/cargo-shuttle/README.md +++ b/cargo-shuttle/README.md @@ -90,7 +90,7 @@ $ cargo shuttle init --rocket my-rocket-app This should generate the following dependency in `Cargo.toml`: ```toml -shuttle-service = { version = "0.7.0", features = ["web-rocket"] } +shuttle-service = { version = "0.8.0", features = ["web-rocket"] } ``` The following boilerplate code should be generated into `src/lib.rs`: diff --git a/cargo-shuttle/src/args.rs b/cargo-shuttle/src/args.rs index dd6c93973..9976d3f6d 100644 --- a/cargo-shuttle/src/args.rs +++ b/cargo-shuttle/src/args.rs @@ -6,9 +6,12 @@ use std::{ }; use clap::Parser; +use clap_complete::Shell; use shuttle_common::project::ProjectName; use uuid::Uuid; +use crate::init::Framework; + #[derive(Parser)] #[clap( version, @@ -56,6 +59,15 @@ pub enum Command { Deployment(DeploymentCommand), /// create a new shuttle service Init(InitArgs), + /// generate shell completions + Generate { + /// which shell + #[clap(short, long, env, default_value_t = Shell::Bash)] + shell: Shell, + /// output to file or stdout by default + #[clap(short, long, env)] + output: Option, + }, /// view the status of a shuttle service Status, /// view the logs of a deployment in this shuttle service @@ -67,6 +79,8 @@ pub enum Command { /// Follow log output follow: bool, }, + /// remove artifacts that were generated by cargo + Clean, /// delete this shuttle service Delete, /// manage secrets for this shuttle service @@ -100,10 +114,14 @@ pub enum ProjectCommand { /// remove this project environment from shuttle Rm, /// show the status of this project's environment on shuttle - Status, + Status { + #[clap(short, long)] + /// Follow status of project command + follow: bool, + }, } -#[derive(Parser)] +#[derive(Parser, Clone, Debug)] pub struct LoginArgs { /// api key for the shuttle platform #[clap(long)] @@ -136,33 +154,44 @@ pub struct RunArgs { #[derive(Parser, Debug)] pub struct InitArgs { + /// Initialize with actix-web framework + #[clap(long="actix-web", conflicts_with_all = &["axum", "rocket", "tide", "tower", "poem", "serenity", "warp", "salvo", "thruster", "no-framework"])] + pub actix_web: bool, /// Initialize with axum framework - #[clap(long, conflicts_with_all = &["rocket", "tide", "tower", "poem", "serenity", "warp", "salvo", "thruster"])] + #[clap(long, conflicts_with_all = &["actix-web","rocket", "tide", "tower", "poem", "serenity", "warp", "salvo", "thruster", "no-framework"])] pub axum: bool, /// Initialize with rocket framework - #[clap(long, conflicts_with_all = &["axum", "tide", "tower", "poem", "serenity", "warp", "salvo", "thruster"])] + #[clap(long, conflicts_with_all = &["actix-web","axum", "tide", "tower", "poem", "serenity", "warp", "salvo", "thruster", "no-framework"])] pub rocket: bool, /// Initialize with tide framework - #[clap(long, conflicts_with_all = &["axum", "rocket", "tower", "poem", "serenity", "warp", "salvo", "thruster"])] + #[clap(long, conflicts_with_all = &["actix-web","axum", "rocket", "tower", "poem", "serenity", "warp", "salvo", "thruster", "no-framework"])] pub tide: bool, /// Initialize with tower framework - #[clap(long, conflicts_with_all = &["axum", "rocket", "tide", "poem", "serenity", "warp", "salvo", "thruster"])] + #[clap(long, conflicts_with_all = &["actix-web","axum", "rocket", "tide", "poem", "serenity", "warp", "salvo", "thruster", "no-framework"])] pub tower: bool, /// Initialize with poem framework - #[clap(long, conflicts_with_all = &["axum", "rocket", "tide", "tower", "serenity", "warp", "salvo", "thruster"])] + #[clap(long, conflicts_with_all = &["actix-web","axum", "rocket", "tide", "tower", "serenity", "warp", "salvo", "thruster", "no-framework"])] pub poem: bool, /// Initialize with salvo framework - #[clap(long, conflicts_with_all = &["axum", "rocket", "tide", "tower", "poem", "warp", "serenity", "thruster"])] + #[clap(long, conflicts_with_all = &["actix-web","axum", "rocket", "tide", "tower", "poem", "warp", "serenity", "thruster", "no-framework"])] pub salvo: bool, /// Initialize with serenity framework - #[clap(long, conflicts_with_all = &["axum", "rocket", "tide", "tower", "poem", "warp", "salvo", "thruster"])] + #[clap(long, conflicts_with_all = &["actix-web","axum", "rocket", "tide", "tower", "poem", "warp", "salvo", "thruster", "no-framework"])] pub serenity: bool, /// Initialize with warp framework - #[clap(long, conflicts_with_all = &["axum", "rocket", "tide", "tower", "poem", "serenity", "salvo", "thruster"])] + #[clap(long, conflicts_with_all = &["actix-web","axum", "rocket", "tide", "tower", "poem", "serenity", "salvo", "thruster", "no-framework"])] pub warp: bool, /// Initialize with thruster framework - #[clap(long, conflicts_with_all = &["axum", "rocket", "tide", "tower", "poem", "warp", "salvo", "serenity"])] + #[clap(long, conflicts_with_all = &["actix-web","axum", "rocket", "tide", "tower", "poem", "warp", "salvo", "serenity", "no-framework"])] pub thruster: bool, + /// Initialize without a framework + #[clap(long, conflicts_with_all = &["actix-web","axum", "rocket", "tide", "tower", "poem", "warp", "salvo", "serenity", "thruster"])] + pub no_framework: bool, + /// Whether to create the environment for this project on Shuttle + #[clap(long)] + pub new: bool, + #[clap(flatten)] + pub login_args: LoginArgs, /// Path to initialize a new shuttle project #[clap( parse(try_from_os_str = parse_init_path), @@ -171,6 +200,36 @@ pub struct InitArgs { pub path: PathBuf, } +impl InitArgs { + pub fn framework(&self) -> Option { + if self.actix_web { + Some(Framework::ActixWeb) + } else if self.axum { + Some(Framework::Axum) + } else if self.rocket { + Some(Framework::Rocket) + } else if self.tide { + Some(Framework::Tide) + } else if self.tower { + Some(Framework::Tower) + } else if self.poem { + Some(Framework::Poem) + } else if self.salvo { + Some(Framework::Salvo) + } else if self.serenity { + Some(Framework::Serenity) + } else if self.warp { + Some(Framework::Warp) + } else if self.thruster { + Some(Framework::Thruster) + } else if self.no_framework { + Some(Framework::None) + } else { + None + } + } +} + // Helper function to parse and return the absolute path fn parse_path(path: &OsStr) -> Result { canonicalize(path).map_err(|e| { @@ -182,9 +241,60 @@ fn parse_path(path: &OsStr) -> Result { } // Helper function to parse, create if not exists, and return the absolute path -fn parse_init_path(path: &OsStr) -> Result { +pub(crate) fn parse_init_path(path: &OsStr) -> Result { // Create the directory if does not exist create_dir_all(path)?; parse_path(path) } + +#[cfg(test)] +mod tests { + use strum::IntoEnumIterator; + + use super::*; + + fn init_args_factory(framework: &str) -> InitArgs { + let mut init_args = InitArgs { + actix_web: false, + axum: false, + rocket: false, + tide: false, + tower: false, + poem: false, + salvo: false, + serenity: false, + warp: false, + thruster: false, + no_framework: false, + new: false, + login_args: LoginArgs { api_key: None }, + path: PathBuf::new(), + }; + + match framework { + "actix-web" => init_args.actix_web = true, + "axum" => init_args.axum = true, + "rocket" => init_args.rocket = true, + "tide" => init_args.tide = true, + "tower" => init_args.tower = true, + "poem" => init_args.poem = true, + "salvo" => init_args.salvo = true, + "serenity" => init_args.serenity = true, + "warp" => init_args.warp = true, + "thruster" => init_args.thruster = true, + "none" => init_args.no_framework = true, + _ => unreachable!(), + } + + init_args + } + + #[test] + fn test_init_args_framework() { + for framework in Framework::iter() { + let args = init_args_factory(&framework.to_string()); + assert_eq!(args.framework(), Some(framework)); + } + } +} diff --git a/cargo-shuttle/src/client.rs b/cargo-shuttle/src/client.rs index aea1f2d38..4f3c4c778 100644 --- a/cargo-shuttle/src/client.rs +++ b/cargo-shuttle/src/client.rs @@ -1,23 +1,19 @@ use std::fmt::Write; -use std::fs::File; -use std::io::Read; use anyhow::{Context, Result}; -use async_trait::async_trait; use headers::{Authorization, HeaderMapExt}; use reqwest::{Body, Response}; use reqwest_middleware::{ClientBuilder, ClientWithMiddleware, RequestBuilder}; use reqwest_retry::policies::ExponentialBackoff; use reqwest_retry::RetryTransientMiddleware; -use serde::de::DeserializeOwned; use serde::Deserialize; -use shuttle_common::models::{deployment, error, project, secret, service, user}; +use shuttle_common::models::{deployment, project, secret, service, user, ToJson}; use shuttle_common::project::ProjectName; use shuttle_common::{ApiKey, ApiUrl, LogItem}; use tokio::net::TcpStream; use tokio_tungstenite::tungstenite::client::IntoClientRequest; use tokio_tungstenite::{connect_async, MaybeTlsStream, WebSocketStream}; -use tracing::{error, trace}; +use tracing::error; use uuid::Uuid; pub struct Client { @@ -25,35 +21,6 @@ pub struct Client { api_key: Option, } -#[async_trait] -trait ToJson { - async fn to_json(self) -> Result; -} - -#[async_trait] -impl ToJson for Response { - async fn to_json(self) -> Result { - let full = self.bytes().await?; - - trace!( - response = std::str::from_utf8(&full).unwrap_or_default(), - "parsing response to json" - ); - // try to deserialize into calling function response model - match serde_json::from_slice(&full) { - Ok(res) => Ok(res), - Err(_) => { - trace!("parsing response to common error"); - // if that doesn't work, try to deserialize into common error type - let res: error::ApiError = - serde_json::from_slice(&full).context("failed to parse response to JSON")?; - - Err(res.into()) - } - } - } -} - impl Client { pub fn new(api_url: ApiUrl) -> Self { Self { @@ -78,7 +45,7 @@ impl Client { pub async fn deploy( &self, - package_file: File, + data: Vec, project: &ProjectName, no_test: bool, ) -> Result { @@ -92,13 +59,7 @@ impl Client { let _ = write!(path, "?no-test"); } - let mut package_file = package_file; - let mut package_content = Vec::new(); - package_file - .read_to_end(&mut package_content) - .context("failed to convert package content to buf")?; - - self.post(path, Some(package_content)) + self.post(path, Some(data)) .await .context("failed to send deployment to the Shuttle server")? .to_json() @@ -145,6 +106,16 @@ impl Client { .await } + pub async fn clean_project(&self, project: &ProjectName) -> Result> { + let path = format!("/projects/{}/clean", project.as_str(),); + + self.post(path, Option::::None) + .await + .context("failed to get clean output")? + .to_json() + .await + } + pub async fn get_project(&self, project: &ProjectName) -> Result { let path = format!("/projects/{}", project.as_str()); @@ -258,6 +229,7 @@ impl Client { if let Some(body) = body { builder = builder.body(body); + builder = builder.header("Transfer-Encoding", "chunked"); } builder.send().await diff --git a/cargo-shuttle/src/config.rs b/cargo-shuttle/src/config.rs index a6bc12104..a364455b3 100644 --- a/cargo-shuttle/src/config.rs +++ b/cargo-shuttle/src/config.rs @@ -41,11 +41,13 @@ pub trait ConfigManager: Sized { C: for<'de> Deserialize<'de>, { let path = self.path(); - let config_bytes = File::open(&path).and_then(|mut f| { - let mut buf = Vec::new(); - f.read_to_end(&mut buf)?; - Ok(buf) - })?; + let config_bytes = File::open(&path) + .and_then(|mut f| { + let mut buf = Vec::new(); + f.read_to_end(&mut buf)?; + Ok(buf) + }) + .with_context(|| anyhow!("Unable to read configuration file: {}", path.display()))?; toml::from_slice(config_bytes.as_slice()) .with_context(|| anyhow!("Invalid global configuration file: {}", path.display())) } @@ -256,7 +258,9 @@ impl RequestContext { if !global.exists() { global.create()?; } - global.open()?; + global + .open() + .context("Unable to load global configuration")?; Ok(Self { global, project: None, diff --git a/cargo-shuttle/src/factory.rs b/cargo-shuttle/src/factory.rs index a4cf1c74f..fa9b5ff52 100644 --- a/cargo-shuttle/src/factory.rs +++ b/cargo-shuttle/src/factory.rs @@ -23,6 +23,7 @@ use shuttle_service::{database::Type, error::CustomError, Factory, ServiceName}; use std::{ collections::{BTreeMap, HashMap}, io::stdout, + path::PathBuf, time::Duration, }; use tokio::time::sleep; @@ -32,14 +33,20 @@ pub struct LocalFactory { docker: Docker, service_name: ServiceName, secrets: BTreeMap, + working_directory: PathBuf, } impl LocalFactory { - pub fn new(service_name: ServiceName, secrets: BTreeMap) -> Result { + pub fn new( + service_name: ServiceName, + secrets: BTreeMap, + working_directory: PathBuf, + ) -> Result { Ok(Self { docker: Docker::connect_with_local_defaults()?, service_name, secrets, + working_directory, }) } } @@ -176,6 +183,14 @@ impl Factory for LocalFactory { fn get_service_name(&self) -> ServiceName { self.service_name.clone() } + + fn get_build_path(&self) -> Result { + Ok(self.working_directory.clone()) + } + + fn get_storage_path(&self) -> Result { + Ok(self.working_directory.clone()) + } } impl LocalFactory { diff --git a/cargo-shuttle/src/init.rs b/cargo-shuttle/src/init.rs index d4309aa7e..ea09f305a 100644 --- a/cargo-shuttle/src/init.rs +++ b/cargo-shuttle/src/init.rs @@ -2,7 +2,6 @@ use std::fs::{read_to_string, File}; use std::io::Write; use std::path::{Path, PathBuf}; -use crate::args::InitArgs; use anyhow::Result; use cargo::ops::NewOptions; use cargo_edit::{find, get_latest_dependency, registry_url}; @@ -10,6 +9,43 @@ use indoc::indoc; use toml_edit::{value, Array, Document, Item, Table}; use url::Url; +#[derive(Clone, Copy, Debug, PartialEq, Eq, strum::Display, strum::EnumIter)] +#[strum(serialize_all = "kebab-case")] +pub enum Framework { + ActixWeb, + Axum, + Rocket, + Tide, + Tower, + Poem, + Salvo, + Serenity, + Warp, + Thruster, + None, +} + +impl Framework { + /// Returns a framework-specific struct that implements the trait `ShuttleInit` + /// for writing framework-specific dependencies to `Cargo.toml` and generating + /// boilerplate code in `src/lib.rs`. + pub fn init_config(&self) -> Box { + match self { + Framework::ActixWeb => Box::new(ShuttleInitActixWeb), + Framework::Axum => Box::new(ShuttleInitAxum), + Framework::Rocket => Box::new(ShuttleInitRocket), + Framework::Tide => Box::new(ShuttleInitTide), + Framework::Tower => Box::new(ShuttleInitTower), + Framework::Poem => Box::new(ShuttleInitPoem), + Framework::Salvo => Box::new(ShuttleInitSalvo), + Framework::Serenity => Box::new(ShuttleInitSerenity), + Framework::Warp => Box::new(ShuttleInitWarp), + Framework::Thruster => Box::new(ShuttleInitThruster), + Framework::None => Box::new(ShuttleInitNoOp), + } + } +} + pub trait ShuttleInit { fn set_cargo_dependencies( &self, @@ -21,6 +57,52 @@ pub trait ShuttleInit { fn get_boilerplate_code_for_framework(&self) -> &'static str; } +pub struct ShuttleInitActixWeb; + +impl ShuttleInit for ShuttleInitActixWeb { + fn set_cargo_dependencies( + &self, + dependencies: &mut Table, + manifest_path: &Path, + url: &Url, + get_dependency_version_fn: GetDependencyVersionFn, + ) { + set_key_value_dependency_version( + "actix-web", + dependencies, + manifest_path, + url, + true, + get_dependency_version_fn, + ); + + set_inline_table_dependency_features( + "shuttle-service", + dependencies, + vec!["web-actix-web".to_string()], + ); + } + + fn get_boilerplate_code_for_framework(&self) -> &'static str { + indoc! {r#" + use actix_web::{get, web::ServiceConfig}; + use shuttle_service::ShuttleActixWeb; + + #[get("/hello")] + async fn hello_world() -> &'static str { + "Hello World!" + } + + #[shuttle_service::main] + async fn actix_web( + ) -> ShuttleActixWeb { + Ok(move |cfg: &mut ServiceConfig| { + cfg.service(hello_world); + }) + }"#} + } +} + pub struct ShuttleInitAxum; impl ShuttleInit for ShuttleInitAxum { @@ -552,49 +634,6 @@ impl ShuttleInit for ShuttleInitNoOp { } } -/// Returns a framework-specific struct that implements the trait `ShuttleInit` -/// for writing framework-specific dependencies to `Cargo.toml` and generating -/// boilerplate code in `src/lib.rs`. -pub fn get_framework(init_args: &InitArgs) -> Box { - if init_args.axum { - return Box::new(ShuttleInitAxum); - } - - if init_args.rocket { - return Box::new(ShuttleInitRocket); - } - - if init_args.tide { - return Box::new(ShuttleInitTide); - } - - if init_args.tower { - return Box::new(ShuttleInitTower); - } - - if init_args.poem { - return Box::new(ShuttleInitPoem); - } - - if init_args.salvo { - return Box::new(ShuttleInitSalvo); - } - - if init_args.serenity { - return Box::new(ShuttleInitSerenity); - } - - if init_args.warp { - return Box::new(ShuttleInitWarp); - } - - if init_args.thruster { - return Box::new(ShuttleInitThruster); - } - - Box::new(ShuttleInitNoOp) -} - /// Interoprates with `cargo` crate and calls `cargo init --libs [path]`. pub fn cargo_init(path: PathBuf) -> Result<()> { let opts = NewOptions::new(None, false, true, path, None, None, None)?; @@ -610,7 +649,7 @@ pub fn cargo_init(path: PathBuf) -> Result<()> { } /// Performs shuttle init on the existing files generated by `cargo init --libs [path]`. -pub fn cargo_shuttle_init(path: PathBuf, framework: Box) -> Result<()> { +pub fn cargo_shuttle_init(path: PathBuf, framework: Framework) -> Result<()> { let cargo_toml_path = path.join("Cargo.toml"); let mut cargo_doc = read_to_string(cargo_toml_path.clone()) .unwrap() @@ -642,8 +681,10 @@ pub fn cargo_shuttle_init(path: PathBuf, framework: Box) -> Res get_latest_dependency_version, ); + let init_config = framework.init_config(); + // Set framework-specific dependencies to the `dependencies` table - framework.set_cargo_dependencies( + init_config.set_cargo_dependencies( &mut dependencies, &manifest_path, &url, @@ -658,7 +699,7 @@ pub fn cargo_shuttle_init(path: PathBuf, framework: Box) -> Res // Write boilerplate to `src/lib.rs` file let lib_path = path.join("src").join("lib.rs"); - let boilerplate = framework.get_boilerplate_code_for_framework(); + let boilerplate = init_config.get_boilerplate_code_for_framework(); if !boilerplate.is_empty() { write_lib_file(boilerplate, &lib_path)?; } @@ -740,36 +781,6 @@ pub fn write_lib_file(boilerplate: &'static str, lib_path: &Path) -> Result<()> mod shuttle_init_tests { use super::*; - fn init_args_factory(framework: &str) -> InitArgs { - let mut init_args = InitArgs { - axum: false, - rocket: false, - tide: false, - tower: false, - poem: false, - salvo: false, - serenity: false, - warp: false, - thruster: false, - path: PathBuf::new(), - }; - - match framework { - "axum" => init_args.axum = true, - "rocket" => init_args.rocket = true, - "tide" => init_args.tide = true, - "tower" => init_args.tower = true, - "poem" => init_args.poem = true, - "salvo" => init_args.salvo = true, - "serenity" => init_args.serenity = true, - "warp" => init_args.warp = true, - "thruster" => init_args.thruster = true, - _ => unreachable!(), - } - - init_args - } - fn cargo_toml_factory() -> Document { indoc! {r#" [dependencies] @@ -787,32 +798,6 @@ mod shuttle_init_tests { "1.0".to_string() } - #[test] - fn test_get_framework_via_get_boilerplate_code() { - let frameworks = vec![ - "axum", "rocket", "tide", "tower", "poem", "salvo", "serenity", "warp", "thruster", - ]; - let framework_inits: Vec> = vec![ - Box::new(ShuttleInitAxum), - Box::new(ShuttleInitRocket), - Box::new(ShuttleInitTide), - Box::new(ShuttleInitTower), - Box::new(ShuttleInitPoem), - Box::new(ShuttleInitSalvo), - Box::new(ShuttleInitSerenity), - Box::new(ShuttleInitWarp), - Box::new(ShuttleInitThruster), - ]; - - for (framework, expected_framework_init) in frameworks.into_iter().zip(framework_inits) { - let framework_init = get_framework(&init_args_factory(framework)); - assert_eq!( - framework_init.get_boilerplate_code_for_framework(), - expected_framework_init.get_boilerplate_code_for_framework(), - ); - } - } - #[test] fn test_set_inline_table_dependency_features() { let mut cargo_toml = cargo_toml_factory(); @@ -879,6 +864,37 @@ mod shuttle_init_tests { assert_eq!(cargo_toml.to_string(), expected); } + #[test] + fn test_set_cargo_dependencies_actix_web() { + let mut cargo_toml = cargo_toml_factory(); + let dependencies = cargo_toml["dependencies"].as_table_mut().unwrap(); + let manifest_path = PathBuf::new(); + let url = Url::parse("https://shuttle.rs").unwrap(); + + set_inline_table_dependency_version( + "shuttle-service", + dependencies, + &manifest_path, + &url, + false, + mock_get_latest_dependency_version, + ); + + ShuttleInitActixWeb.set_cargo_dependencies( + dependencies, + &manifest_path, + &url, + mock_get_latest_dependency_version, + ); + + let expected = indoc! {r#" + [dependencies] + shuttle-service = { version = "1.0", features = ["web-actix-web"] } + actix-web = "1.0" + "#}; + + assert_eq!(cargo_toml.to_string(), expected); + } #[test] fn test_set_cargo_dependencies_axum() { diff --git a/cargo-shuttle/src/lib.rs b/cargo-shuttle/src/lib.rs index 415cf0407..13236628a 100644 --- a/cargo-shuttle/src/lib.rs +++ b/cargo-shuttle/src/lib.rs @@ -4,28 +4,36 @@ pub mod config; mod factory; mod init; +use shuttle_common::project::ProjectName; use std::collections::BTreeMap; +use std::ffi::OsString; use std::fs::{read_to_string, File}; -use std::io::Write; -use std::io::{self, stdout}; +use std::io::stdout; use std::net::{Ipv4Addr, SocketAddr}; use std::path::{Path, PathBuf}; -use std::rc::Rc; - -use anyhow::{anyhow, Context, Result}; -pub use args::{Args, Command, DeployArgs, InitArgs, ProjectArgs, RunArgs}; -use args::{AuthArgs, LoginArgs}; -use cargo::core::resolver::CliFeatures; -use cargo::core::Workspace; -use cargo::ops::{PackageOpts, Packages}; + +use anyhow::{anyhow, bail, Context, Result}; +use args::AuthArgs; +pub use args::{Args, Command, DeployArgs, InitArgs, LoginArgs, ProjectArgs, RunArgs}; use cargo_metadata::Message; +use clap::CommandFactory; +use clap_complete::{generate, Shell}; use config::RequestContext; use crossterm::style::Stylize; +use dialoguer::{theme::ColorfulTheme, Confirm, FuzzySelect, Input, Password}; use factory::LocalFactory; +use flate2::write::GzEncoder; +use flate2::Compression; use futures::StreamExt; -use shuttle_common::models::secret; +use git2::{Repository, StatusOptions}; +use ignore::overrides::OverrideBuilder; +use ignore::WalkBuilder; +use shuttle_common::models::{project, secret}; use shuttle_service::loader::{build_crate, Loader, Runtime}; use shuttle_service::Logger; +use std::fmt::Write; +use strum::IntoEnumIterator; +use tar::Builder; use tokio::sync::mpsc; use tracing::trace; use uuid::Uuid; @@ -37,16 +45,10 @@ pub struct Shuttle { ctx: RequestContext, } -impl Default for Shuttle { - fn default() -> Self { - Self::new() - } -} - impl Shuttle { - pub fn new() -> Self { - let ctx = RequestContext::load_global().unwrap(); - Self { ctx } + pub fn new() -> Result { + let ctx = RequestContext::load_global()?; + Ok(Self { ctx }) } pub async fn run(mut self, mut args: Args) -> Result { @@ -57,6 +59,7 @@ impl Shuttle { | Command::Deployment(..) | Command::Project(..) | Command::Delete + | Command::Clean | Command::Secrets | Command::Status | Command::Logs { .. } @@ -65,13 +68,14 @@ impl Shuttle { self.load_project(&mut args.project_args)?; } + self.ctx.set_api_url(args.api_url); + match args.cmd { - Command::Init(init_args) => self.init(init_args).await, + Command::Init(init_args) => self.init(init_args, args.project_args).await, + Command::Generate { shell, output } => self.complete(shell, output).await, Command::Login(login_args) => self.login(login_args).await, Command::Run(run_args) => self.local_run(run_args).await, need_client => { - self.ctx.set_api_url(args.api_url); - let mut client = Client::new(self.ctx.api_url()); client.set_api_key(self.ctx.api_key()?); @@ -88,10 +92,13 @@ impl Shuttle { self.deployment_get(&client, id).await } Command::Delete => self.delete(&client).await, + Command::Clean => self.clean(&client).await, Command::Secrets => self.secrets(&client).await, Command::Auth(auth_args) => self.auth(auth_args, &client).await, Command::Project(ProjectCommand::New) => self.project_create(&client).await, - Command::Project(ProjectCommand::Status) => self.project_status(&client).await, + Command::Project(ProjectCommand::Status { follow }) => { + self.project_status(&client, follow).await + } Command::Project(ProjectCommand::Rm) => self.project_delete(&client).await, _ => { unreachable!("commands that don't need a client have already been matched") @@ -102,13 +109,100 @@ impl Shuttle { .map(|_| CommandOutcome::Ok) } - async fn init(&self, args: InitArgs) -> Result<()> { - // Interface with cargo to initialize new lib package for shuttle - let path = args.path.clone(); + /// Log in, initialize a project and potentially create the Shuttle environment for it. + /// + /// If both a project name and framework are passed as arguments, it will run without any extra + /// interaction. + async fn init(&mut self, args: InitArgs, mut project_args: ProjectArgs) -> Result<()> { + let interactive = project_args.name.is_none() || args.framework().is_none(); + + let theme = ColorfulTheme::default(); + + // 1. Log in (if not logged in yet) + if self.ctx.api_key().is_err() { + if interactive { + println!("First, let's log in to your Shuttle account."); + self.login(args.login_args.clone()).await?; + println!(); + } else if args.new && args.login_args.api_key.is_some() { + self.login(args.login_args.clone()).await?; + } else { + bail!("Tried to login to create a Shuttle environment, but no API key was set.") + } + } + + // 2. Ask for project name + if project_args.name.is_none() { + println!("How do you want to name your project? It will be hosted at ${{project_name}}.shuttleapp.rs."); + // TODO: Check whether the project name is still available + project_args.name = Some( + Input::with_theme(&theme) + .with_prompt("Project name") + .interact()?, + ); + println!(); + } + + // 3. Confirm the project directory + let path = if interactive { + println!("Where should we create this project?"); + let directory_str: String = Input::with_theme(&theme) + .with_prompt("Directory") + .default(".".to_owned()) + .interact()?; + println!(); + args::parse_init_path(&OsString::from(directory_str))? + } else { + args.path.clone() + }; + + // 4. Ask for the framework + let framework = match args.framework() { + Some(framework) => framework, + None => { + println!( + "Shuttle works with a range of web frameworks. Which one do you want to use?" + ); + let frameworks = init::Framework::iter().collect::>(); + let index = FuzzySelect::with_theme(&theme) + .items(&frameworks) + .default(0) + .interact()?; + println!(); + frameworks[index] + } + }; + + // 5. Initialize locally init::cargo_init(path.clone())?; + init::cargo_shuttle_init(path.clone(), framework)?; + println!(); + + // 6. Confirm that the user wants to create the project environment on Shuttle + let should_create_environment = if !interactive { + args.new + } else if args.new { + true + } else { + let should_create = Confirm::with_theme(&theme) + .with_prompt("Do you want to create the project environment on Shuttle?") + .default(true) + .interact()?; + + println!(); + should_create + }; + + if should_create_environment { + // Set the project working directory path to the init path, + // so `load_project` is ran with the correct project path + project_args.working_directory = path; - let framework = init::get_framework(&args); - init::cargo_shuttle_init(path, framework)?; + self.load_project(&mut project_args)?; + let mut client = Client::new(self.ctx.api_url()); + client.set_api_key(self.ctx.api_key()?); + self.project_create(&client).await?; + } Ok(()) } @@ -132,23 +226,21 @@ impl Shuttle { self.ctx.load_local(project_args) } + /// Log in with the given API key or after prompting the user for one. async fn login(&mut self, login_args: LoginArgs) -> Result<()> { - let api_key_str = login_args.api_key.unwrap_or_else(|| { - let url = "https://shuttle.rs/login"; - - let _ = webbrowser::open(url); - - println!("If your browser did not automatically open, go to {url}"); - print!("Enter Api Key: "); - - stdout().flush().unwrap(); - - let mut input = String::new(); + let api_key_str = match login_args.api_key { + Some(api_key) => api_key, + None => { + let url = "https://shuttle.rs/login"; + let _ = webbrowser::open(url); - io::stdin().read_line(&mut input).unwrap(); + println!("If your browser did not automatically open, go to {url}"); - input - }); + Password::with_theme(&ColorfulTheme::default()) + .with_prompt("API key") + .interact()? + } + }; let api_key = api_key_str.trim().parse()?; @@ -181,6 +273,17 @@ impl Shuttle { Ok(()) } + async fn complete(&self, shell: Shell, output: Option) -> Result<()> { + let name = env!("CARGO_PKG_NAME"); + let mut app = Command::command(); + match output { + Some(v) => generate(shell, &mut app, name, &mut File::create(v)?), + None => generate(shell, &mut app, name, &mut stdout()), + }; + + Ok(()) + } + async fn status(&self, client: &Client) -> Result<()> { let summary = client.get_service_summary(self.ctx.project_name()).await?; @@ -198,6 +301,18 @@ impl Shuttle { Ok(()) } + async fn clean(&self, client: &Client) -> Result<()> { + let lines = client.clean_project(self.ctx.project_name()).await?; + + for line in lines { + println!("{line}"); + } + + println!("Cleaning done!"); + + Ok(()) + } + async fn logs(&self, client: &Client, id: Option, follow: bool) -> Result<()> { let id = if let Some(id) = id { id @@ -254,7 +369,7 @@ impl Shuttle { trace!("starting a local run for a service: {run_args:?}"); let (tx, rx): (crossbeam_channel::Sender, _) = crossbeam_channel::bounded(0); - tokio::spawn(async move { + tokio::task::spawn_blocking(move || { while let Ok(message) = rx.recv() { match message { Message::TextLine(line) => println!("{line}"), @@ -300,7 +415,11 @@ impl Shuttle { let loader = Loader::from_so_file(so_path)?; - let mut factory = LocalFactory::new(self.ctx.project_name().clone(), secrets)?; + let mut factory = LocalFactory::new( + self.ctx.project_name().clone(), + secrets, + working_directory.to_path_buf(), + )?; let addr = SocketAddr::new(Ipv4Addr::LOCALHOST.into(), run_args.port); trace!("loading project"); @@ -323,7 +442,7 @@ impl Shuttle { handle.await??; - tokio::spawn(async move { + tokio::task::spawn_blocking(move || { trace!("closing so file"); so.close().unwrap(); }); @@ -332,12 +451,14 @@ impl Shuttle { } async fn deploy(&self, args: DeployArgs, client: &Client) -> Result { - let package_file = self - .run_cargo_package(args.allow_dirty) - .context("failed to package cargo project")?; + if !args.allow_dirty { + self.is_dirty()?; + } + + let data = self.make_archive()?; let deployment = client - .deploy(package_file, self.ctx.project_name(), args.no_test) + .deploy(data, self.ctx.project_name(), args.no_test) .await?; let mut stream = client @@ -392,57 +513,203 @@ impl Shuttle { } async fn project_create(&self, client: &Client) -> Result<()> { - let project = client.create_project(self.ctx.project_name()).await?; + self.wait_with_spinner( + &[project::State::Ready, project::State::Errored], + Client::create_project, + self.ctx.project_name(), + client, + ) + .await?; - println!("{project}"); + Ok(()) + } + + async fn project_status(&self, client: &Client, follow: bool) -> Result<()> { + match follow { + true => { + self.wait_with_spinner( + &[ + project::State::Ready, + project::State::Destroyed, + project::State::Errored, + ], + Client::get_project, + self.ctx.project_name(), + client, + ) + .await?; + } + false => { + let project = client.get_project(self.ctx.project_name()).await?; + println!("{project}"); + } + } Ok(()) } - async fn project_status(&self, client: &Client) -> Result<()> { - let project = client.get_project(self.ctx.project_name()).await?; + async fn wait_with_spinner<'a, F, Fut>( + &self, + states_to_check: &[project::State], + f: F, + project_name: &'a ProjectName, + client: &'a Client, + ) -> Result<(), anyhow::Error> + where + F: Fn(&'a Client, &'a ProjectName) -> Fut, + Fut: std::future::Future> + 'a, + { + let mut project = f(client, project_name).await?; + let pb = indicatif::ProgressBar::new_spinner(); + pb.enable_steady_tick(std::time::Duration::from_millis(350)); + pb.set_style( + indicatif::ProgressStyle::with_template("{spinner:.orange} {msg}") + .unwrap() + .tick_strings(&[ + "( ● )", + "( ● )", + "( ● )", + "( ● )", + "( ●)", + "( ● )", + "( ● )", + "( ● )", + "( ● )", + "(● )", + "(●●●●●●)", + ]), + ); + loop { + if states_to_check.contains(&project.state) { + break; + } + pb.set_message(format!("{project}")); + project = client.get_project(project_name).await?; + } + pb.finish_and_clear(); println!("{project}"); - Ok(()) } async fn project_delete(&self, client: &Client) -> Result<()> { - let project = client.delete_project(self.ctx.project_name()).await?; - - println!("{project}"); + self.wait_with_spinner( + &[project::State::Destroyed, project::State::Errored], + Client::delete_project, + self.ctx.project_name(), + client, + ) + .await?; Ok(()) } - // Packages the cargo project and returns a File to that file - fn run_cargo_package(&self, allow_dirty: bool) -> Result { - let config = cargo::util::config::Config::default()?; + fn make_archive(&self) -> Result> { + let encoder = GzEncoder::new(Vec::new(), Compression::fast()); + let mut tar = Builder::new(encoder); let working_directory = self.ctx.working_directory(); - let path = working_directory.join("Cargo.toml"); - - let ws = Workspace::new(&path, &config)?; - let opts = PackageOpts { - config: &config, - list: false, - check_metadata: true, - allow_dirty, - keep_going: false, - verify: false, - jobs: None, - to_package: Packages::Default, - targets: vec![], - cli_features: CliFeatures { - features: Rc::new(Default::default()), - all_features: false, - uses_default_features: true, - }, - }; + let base_directory = working_directory + .parent() + .context("get parent directory of crate")?; + + // Make sure the target folder is excluded at all times + let overrides = OverrideBuilder::new(working_directory) + .add("!target/") + .context("add `!target/` override")? + .build() + .context("build an override")?; + + for dir_entry in WalkBuilder::new(working_directory) + .hidden(false) + .overrides(overrides) + .build() + { + let dir_entry = dir_entry.context("get directory entry")?; + + // It's not possible to add a directory to an archive + if dir_entry.file_type().context("get file type")?.is_dir() { + continue; + } + + let path = dir_entry + .path() + .strip_prefix(base_directory) + .context("strip the base of the archive entry")?; + + tar.append_path_with_name(dir_entry.path(), path) + .context("archive entry")?; + } + + // Make sure to add any `Secrets.toml` files + let secrets_path = self.ctx.working_directory().join("Secrets.toml"); + if secrets_path.exists() { + tar.append_path_with_name(secrets_path, Path::new("shuttle").join("Secrets.toml"))?; + } - let locks = cargo::ops::package(&ws, &opts)?.expect("unwrap ok here"); - let owned = locks.get(0).unwrap().file().try_clone()?; - Ok(owned) + let encoder = tar.into_inner().context("get encoder from tar archive")?; + let bytes = encoder.finish().context("finish up encoder")?; + + Ok(bytes) + } + + fn is_dirty(&self) -> Result<()> { + let working_directory = self.ctx.working_directory(); + if let Ok(repo) = Repository::discover(working_directory) { + let repo_path = repo + .workdir() + .context("getting working directory of repository")? + .canonicalize()?; + + trace!(?repo_path, "found git repository"); + + let repo_rel_path = working_directory + .strip_prefix(repo_path.as_path()) + .context("stripping repository path from working directory")?; + + trace!( + ?repo_rel_path, + "got working directory path relative to git repository" + ); + + let mut status_options = StatusOptions::new(); + status_options + .pathspec(repo_rel_path) + .include_untracked(true); + + let statuses = repo + .statuses(Some(&mut status_options)) + .context("getting status of repository files")?; + + if !statuses.is_empty() { + let mut error: String = format!("{} files in the working directory contain changes that were not yet committed into git:", statuses.len()); + writeln!(error).expect("to append error"); + + for status in statuses.iter() { + trace!( + path = status.path(), + status = ?status.status(), + "found file with updates" + ); + + let path = + repo_path.join(status.path().context("getting path of changed file")?); + let rel_path = path + .strip_prefix(working_directory) + .expect("getting relative path of changed file") + .display(); + + writeln!(error, "{rel_path}").expect("to append error"); + } + + writeln!(error).expect("to append error"); + writeln!(error, "to proceed despite this and include the uncommitted changes, pass the `--allow-dirty` flag").expect("to append error"); + + return Err(anyhow::Error::msg(error)); + } + } + + Ok(()) } } @@ -453,9 +720,16 @@ pub enum CommandOutcome { #[cfg(test)] mod tests { + use flate2::read::GzDecoder; + use shuttle_common::project::ProjectName; + use tar::Archive; + use tempfile::TempDir; + use crate::args::ProjectArgs; use crate::Shuttle; + use std::fs::{self, canonicalize}; use std::path::PathBuf; + use std::str::FromStr; fn path_from_workspace_root(path: &str) -> PathBuf { PathBuf::from(std::env::var("CARGO_MANIFEST_DIR").unwrap()) @@ -463,6 +737,33 @@ mod tests { .join(path) } + fn get_archive_entries(mut project_args: ProjectArgs) -> Vec { + let mut shuttle = Shuttle::new().unwrap(); + shuttle.load_project(&mut project_args).unwrap(); + + let archive = shuttle.make_archive().unwrap(); + + // Make sure the Secrets.toml file is not initially present + let tar = GzDecoder::new(&archive[..]); + let mut archive = Archive::new(tar); + + archive + .entries() + .unwrap() + .map(|entry| { + entry + .unwrap() + .path() + .unwrap() + .components() + .skip(1) + .collect::() + .display() + .to_string() + }) + .collect() + } + #[test] fn find_root_directory_returns_proper_directory() { let working_directory = path_from_workspace_root("examples/axum/hello-world/src"); @@ -482,7 +783,7 @@ mod tests { name: None, }; - let mut shuttle = Shuttle::new(); + let mut shuttle = Shuttle::new().unwrap(); Shuttle::load_project(&mut shuttle, &mut project_args).unwrap(); assert_eq!( @@ -490,4 +791,77 @@ mod tests { path_from_workspace_root("examples/axum/hello-world/") ); } + + #[test] + fn make_archive_include_secrets() { + let working_directory = + canonicalize(path_from_workspace_root("examples/rocket/secrets")).unwrap(); + + fs::write( + working_directory.join("Secrets.toml"), + "MY_API_KEY = 'the contents of my API key'", + ) + .unwrap(); + + let project_args = ProjectArgs { + working_directory, + name: None, + }; + + let mut entries = get_archive_entries(project_args); + entries.sort(); + + assert_eq!( + entries, + vec![ + ".gitignore", + "Cargo.toml", + "README.md", + "Secrets.toml", + "Secrets.toml.example", + "Shuttle.toml", + "src/lib.rs", + ] + ); + } + + #[test] + fn make_archive_respect_ignore() { + let tmp_dir = TempDir::new().unwrap(); + let working_directory = tmp_dir.path(); + + fs::write(working_directory.join(".env"), "API_KEY = 'blabla'").unwrap(); + fs::write(working_directory.join(".ignore"), ".env").unwrap(); + fs::write(working_directory.join("Cargo.toml"), "[package]").unwrap(); + + let project_args = ProjectArgs { + working_directory: working_directory.to_path_buf(), + name: Some(ProjectName::from_str("secret").unwrap()), + }; + + let mut entries = get_archive_entries(project_args); + entries.sort(); + + assert_eq!(entries, vec![".ignore", "Cargo.toml"]); + } + + #[test] + fn make_archive_ignore_target_folder() { + let tmp_dir = TempDir::new().unwrap(); + let working_directory = tmp_dir.path(); + + fs::create_dir_all(working_directory.join("target")).unwrap(); + fs::write(working_directory.join("target").join("binary"), "12345").unwrap(); + fs::write(working_directory.join("Cargo.toml"), "[package]").unwrap(); + + let project_args = ProjectArgs { + working_directory: working_directory.to_path_buf(), + name: Some(ProjectName::from_str("exclude_target").unwrap()), + }; + + let mut entries = get_archive_entries(project_args); + entries.sort(); + + assert_eq!(entries, vec!["Cargo.toml"]); + } } diff --git a/cargo-shuttle/src/main.rs b/cargo-shuttle/src/main.rs index fc4f83b5c..a80e1ca15 100644 --- a/cargo-shuttle/src/main.rs +++ b/cargo-shuttle/src/main.rs @@ -6,7 +6,7 @@ use clap::Parser; async fn main() -> Result<()> { tracing_subscriber::fmt::init(); - let result = Shuttle::new().run(Args::parse()).await; + let result = Shuttle::new()?.run(Args::parse()).await; if matches!(result, Ok(CommandOutcome::DeploymentFailure)) { // Deployment failure results in a shell error exit code being returned (this allows diff --git a/cargo-shuttle/tests/integration/init.rs b/cargo-shuttle/tests/integration/init.rs index 370001605..45551314a 100644 --- a/cargo-shuttle/tests/integration/init.rs +++ b/cargo-shuttle/tests/integration/init.rs @@ -1,97 +1,177 @@ -use std::{ - fs::read_to_string, - path::{Path, PathBuf}, -}; +use std::fs::read_to_string; +use std::path::Path; +use std::process::Command; -use cargo_shuttle::{Args, Command, CommandOutcome, InitArgs, ProjectArgs, Shuttle}; +use cargo_shuttle::{Args, Shuttle}; +use clap::Parser; use indoc::indoc; use tempfile::Builder; -/// creates a `cargo-shuttle` init instance with some reasonable defaults set. -async fn cargo_shuttle_init(path: PathBuf) -> anyhow::Result { - let working_directory = Path::new(".").to_path_buf(); - - Shuttle::new() - .run(Args { - api_url: Some("http://shuttle.invalid:80".to_string()), - project_args: ProjectArgs { - working_directory, - name: None, - }, - cmd: Command::Init(InitArgs { - axum: false, - rocket: false, - tide: false, - tower: false, - poem: false, - salvo: false, - serenity: false, - warp: false, - thruster: false, - path, - }), - }) - .await -} - -/// creates a `cargo-shuttle` init instance for initializing the `rocket` framework -async fn cargo_shuttle_init_framework(path: PathBuf) -> anyhow::Result { - let working_directory = Path::new(".").to_path_buf(); - - Shuttle::new() - .run(Args { - api_url: Some("http://shuttle.invalid:80".to_string()), - project_args: ProjectArgs { - working_directory, - name: None, - }, - cmd: Command::Init(InitArgs { - axum: false, - rocket: true, - tide: false, - tower: false, - poem: false, - salvo: false, - serenity: false, - warp: false, - thruster: false, - path, - }), - }) - .await -} - #[tokio::test] -async fn basic_init() { +async fn non_interactive_basic_init() { let temp_dir = Builder::new().prefix("basic-init").tempdir().unwrap(); let temp_dir_path = temp_dir.path().to_owned(); - cargo_shuttle_init(temp_dir_path.clone()).await.unwrap(); - let cargo_toml = read_to_string(temp_dir_path.join("Cargo.toml")).unwrap(); + let args = Args::parse_from([ + "cargo-shuttle", + "--api-url", + "http://shuttle.invalid:80", + "init", + "--api-key", + "fake-api-key", + "--name", + "my-project", + "--no-framework", + temp_dir_path.to_str().unwrap(), + ]); + Shuttle::new().unwrap().run(args).await.unwrap(); + let cargo_toml = read_to_string(temp_dir_path.join("Cargo.toml")).unwrap(); // Expected: name = "basic-initRANDOM_CHARS" assert!(cargo_toml.contains("name = \"basic-init")); assert!(cargo_toml.contains("shuttle-service = { version = ")); } #[tokio::test] -async fn framework_init() { +async fn non_interactive_rocket_init() { let temp_dir = Builder::new().prefix("rocket-init").tempdir().unwrap(); let temp_dir_path = temp_dir.path().to_owned(); - cargo_shuttle_init_framework(temp_dir_path.clone()) - .await - .unwrap(); + let args = Args::parse_from([ + "cargo-shuttle", + "--api-url", + "http://shuttle.invalid:80", + "init", + "--api-key", + "fake-api-key", + "--name", + "my-project", + "--rocket", + temp_dir_path.to_str().unwrap(), + ]); + Shuttle::new().unwrap().run(args).await.unwrap(); + + assert_valid_rocket_project(temp_dir_path.as_path(), "rocket-init"); +} - let cargo_toml = read_to_string(temp_dir_path.join("Cargo.toml")).unwrap(); +#[test] +fn interactive_rocket_init() -> Result<(), Box> { + let temp_dir = Builder::new().prefix("rocket-init").tempdir().unwrap(); + let temp_dir_path = temp_dir.path().to_owned(); + + let bin_path = assert_cmd::cargo::cargo_bin("cargo-shuttle"); + let mut command = Command::new(bin_path); + command.args([ + "--api-url", + "http://shuttle.invalid:80", + "init", + "--api-key", + "fake-api-key", + ]); + let mut session = rexpect::session::spawn_command(command, Some(2000))?; + + session.exp_string( + "How do you want to name your project? It will be hosted at ${project_name}.shuttleapp.rs.", + )?; + session.exp_string("Project name")?; + session.send_line("my-project")?; + session.exp_string("Where should we create this project?")?; + session.exp_string("Directory")?; + session.send_line(temp_dir_path.to_str().unwrap())?; + session.exp_string( + "Shuttle works with a range of web frameworks. Which one do you want to use?", + )?; + // Partial input should be enough to match "rocket" + session.send_line("roc")?; + session.exp_string("Do you want to create the project environment on Shuttle?")?; + session.send("y")?; + session.flush()?; + session.exp_string("yes")?; + + assert_valid_rocket_project(temp_dir_path.as_path(), "rocket-init"); + + Ok(()) +} + +#[test] +fn interactive_rocket_init_dont_prompt_framework() -> Result<(), Box> { + let temp_dir = Builder::new().prefix("rocket-init").tempdir().unwrap(); + let temp_dir_path = temp_dir.path().to_owned(); + + let bin_path = assert_cmd::cargo::cargo_bin("cargo-shuttle"); + let mut command = Command::new(bin_path); + command.args([ + "--api-url", + "http://shuttle.invalid:80", + "init", + "--api-key", + "fake-api-key", + "--rocket", + ]); + let mut session = rexpect::session::spawn_command(command, Some(2000))?; + + session.exp_string( + "How do you want to name your project? It will be hosted at ${project_name}.shuttleapp.rs.", + )?; + session.exp_string("Project name")?; + session.send_line("my-project")?; + session.exp_string("Where should we create this project?")?; + session.exp_string("Directory")?; + session.send_line(temp_dir_path.to_str().unwrap())?; + session.exp_string("Do you want to create the project environment on Shuttle?")?; + session.send("y")?; + session.flush()?; + session.exp_string("yes")?; + + assert_valid_rocket_project(temp_dir_path.as_path(), "rocket-init"); + + Ok(()) +} + +#[test] +fn interactive_rocket_init_dont_prompt_name() -> Result<(), Box> { + let temp_dir = Builder::new().prefix("rocket-init").tempdir().unwrap(); + let temp_dir_path = temp_dir.path().to_owned(); + + let bin_path = assert_cmd::cargo::cargo_bin("cargo-shuttle"); + let mut command = Command::new(bin_path); + command.args([ + "--api-url", + "http://shuttle.invalid:80", + "init", + "--api-key", + "fake-api-key", + "--name", + "my-project", + ]); + let mut session = rexpect::session::spawn_command(command, Some(2000))?; + + session.exp_string("Where should we create this project?")?; + session.exp_string("Directory")?; + session.send_line(temp_dir_path.to_str().unwrap())?; + session.exp_string( + "Shuttle works with a range of web frameworks. Which one do you want to use?", + )?; + // Partial input should be enough to match "rocket" + session.send_line("roc")?; + session.exp_string("Do you want to create the project environment on Shuttle?")?; + session.send("y")?; + session.flush()?; + session.exp_string("yes")?; + + assert_valid_rocket_project(temp_dir_path.as_path(), "rocket-init"); + + Ok(()) +} - // Expected: name = "rocket-initRANDOM_CHARS" - assert!(cargo_toml.contains("name = \"rocket-init")); +fn assert_valid_rocket_project(path: &Path, name_prefix: &str) { + let cargo_toml = read_to_string(path.join("Cargo.toml")).unwrap(); + assert!(cargo_toml.contains(&format!("name = \"{name_prefix}"))); assert!(cargo_toml.contains("shuttle-service = { version = ")); assert!(cargo_toml.contains("features = [\"web-rocket\"]")); assert!(cargo_toml.contains("rocket = ")); - let lib_file = read_to_string(temp_dir_path.join("src").join("lib.rs")).unwrap(); + let lib_file = read_to_string(path.join("src").join("lib.rs")).unwrap(); let expected = indoc! {r#" #[macro_use] extern crate rocket; diff --git a/cargo-shuttle/tests/integration/main.rs b/cargo-shuttle/tests/integration/main.rs index c21561713..accb9c4ae 100644 --- a/cargo-shuttle/tests/integration/main.rs +++ b/cargo-shuttle/tests/integration/main.rs @@ -12,6 +12,7 @@ async fn cargo_shuttle_command( let working_directory = Path::new(working_directory).to_path_buf(); Shuttle::new() + .unwrap() .run(Args { api_url: Some("http://shuttle.invalid:80".to_string()), project_args: ProjectArgs { diff --git a/cargo-shuttle/tests/integration/run.rs b/cargo-shuttle/tests/integration/run.rs index d928c4f55..318145ffb 100644 --- a/cargo-shuttle/tests/integration/run.rs +++ b/cargo-shuttle/tests/integration/run.rs @@ -10,7 +10,7 @@ async fn cargo_shuttle_run(working_directory: &str) -> u16 { let port = pick_unused_port().unwrap(); let run_args = RunArgs { port }; - let runner = Shuttle::new().run(Args { + let runner = Shuttle::new().unwrap().run(Args { api_url: Some("http://shuttle.invalid:80".to_string()), project_args: ProjectArgs { working_directory: working_directory.clone(), @@ -168,6 +168,22 @@ async fn rocket_authentication() { ); } +#[tokio::test(flavor = "multi_thread")] +async fn actix_web_hello_world() { + let port = cargo_shuttle_run("../examples/actix-web/hello-world").await; + + let request_text = reqwest::Client::new() + .get(format!("http://localhost:{port}/hello")) + .send() + .await + .unwrap() + .text() + .await + .unwrap(); + + assert_eq!(request_text, "Hello World!"); +} + #[tokio::test(flavor = "multi_thread")] async fn axum_hello_world() { let port = cargo_shuttle_run("../examples/axum/hello-world").await; diff --git a/codegen/Cargo.toml b/codegen/Cargo.toml index df1bb4d26..00b26b436 100644 --- a/codegen/Cargo.toml +++ b/codegen/Cargo.toml @@ -1,8 +1,8 @@ [package] name = "shuttle-codegen" -version = "0.7.0" -edition = "2021" -license = "Apache-2.0" +version = "0.8.0" +edition.workspace = true +license.workspace = true description = "Proc-macro code generator for the shuttle.rs service" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html @@ -11,10 +11,10 @@ proc-macro = true [dependencies] proc-macro-error = "1.0.4" -proc-macro2 = "1.0.43" +proc-macro2 = "1.0.47" quote = "1.0.21" -syn = { version = "1.0.99", features = ["full", "extra-traits"] } +syn = { version = "1.0.104", features = ["full", "extra-traits"] } [dev-dependencies] -pretty_assertions = "1.2.1" -trybuild = "1.0.64" +pretty_assertions = "1.3.0" +trybuild = "1.0.72" diff --git a/codegen/src/lib.rs b/codegen/src/lib.rs index 384e19373..2adc85247 100644 --- a/codegen/src/lib.rs +++ b/codegen/src/lib.rs @@ -1,5 +1,5 @@ -mod main; mod next; +mod shuttle_main; use next::App; use proc_macro::TokenStream; @@ -9,7 +9,7 @@ use syn::{parse_macro_input, File}; #[proc_macro_error] #[proc_macro_attribute] pub fn main(attr: TokenStream, item: TokenStream) -> TokenStream { - main::r#impl(attr, item) + shuttle_main::r#impl(attr, item) } #[proc_macro_error] diff --git a/codegen/src/next/mod.rs b/codegen/src/next/mod.rs index 27dc3a71f..f0983bc18 100644 --- a/codegen/src/next/mod.rs +++ b/codegen/src/next/mod.rs @@ -87,10 +87,7 @@ impl Endpoint { Ok(params) => params, Err(err) => { // This will error on invalid parameter syntax - emit_error!( - err.span(), - err - ); + emit_error!(err.span(), err); return None; } }; diff --git a/codegen/src/main/mod.rs b/codegen/src/shuttle_main/mod.rs similarity index 91% rename from codegen/src/main/mod.rs rename to codegen/src/shuttle_main/mod.rs index be3939920..c8a03d1da 100644 --- a/codegen/src/main/mod.rs +++ b/codegen/src/shuttle_main/mod.rs @@ -20,7 +20,8 @@ pub(crate) fn r#impl(_attr: TokenStream, item: TokenStream) -> TokenStream { addr: std::net::SocketAddr, runtime: &shuttle_service::Runtime, ) -> shuttle_service::ServeHandle { - runtime.spawn(async move { service.bind(addr).await }) + use shuttle_service::Context; + runtime.spawn(async move { service.bind(addr).await.context("failed to bind service").map_err(Into::into) }) } #fn_decl @@ -228,6 +229,7 @@ impl ToTokens for Wrapper { runtime: &shuttle_service::Runtime, logger: shuttle_service::Logger, ) -> Result, shuttle_service::Error> { + use shuttle_service::Context; use shuttle_service::tracing_subscriber::prelude::*; #extra_imports @@ -249,15 +251,15 @@ impl ToTokens for Wrapper { .into_panic() .downcast_ref::<&str>() .map(|x| x.to_string()) - .unwrap_or_else(|| "".to_string()); + .unwrap_or_else(|| "panicked setting logger".to_string()); shuttle_service::Error::BuildPanic(mes) } else { - shuttle_service::Error::Custom(shuttle_service::error::CustomError::new(e)) + shuttle_service::Error::Custom(shuttle_service::error::CustomError::new(e).context("failed to set logger")) } })?; - #(let #fn_inputs = #fn_inputs_builder::new()#fn_inputs_builder_options.build(#factory_ident, runtime).await?;)* + #(let #fn_inputs = #fn_inputs_builder::new()#fn_inputs_builder_options.build(#factory_ident, runtime).await.context(format!("failed to provision {}", stringify!(#fn_inputs_builder)))?;)* runtime.spawn(async { #fn_ident(#(#fn_inputs),*) @@ -271,11 +273,11 @@ impl ToTokens for Wrapper { .into_panic() .downcast_ref::<&str>() .map(|x| x.to_string()) - .unwrap_or_else(|| "".to_string()); + .unwrap_or_else(|| "panicked calling main".to_string()); shuttle_service::Error::BuildPanic(mes) } else { - shuttle_service::Error::Custom(shuttle_service::error::CustomError::new(e)) + shuttle_service::Error::Custom(shuttle_service::error::CustomError::new(e).context("failed to call main")) } })? } @@ -320,6 +322,7 @@ mod tests { runtime: &shuttle_service::Runtime, logger: shuttle_service::Logger, ) -> Result, shuttle_service::Error> { + use shuttle_service::Context; use shuttle_service::tracing_subscriber::prelude::*; runtime.spawn_blocking(move || { let filter_layer = @@ -339,11 +342,11 @@ mod tests { .into_panic() .downcast_ref::<&str>() .map(|x| x.to_string()) - .unwrap_or_else(|| "".to_string()); + .unwrap_or_else(|| "panicked setting logger".to_string()); shuttle_service::Error::BuildPanic(mes) } else { - shuttle_service::Error::Custom(shuttle_service::error::CustomError::new(e)) + shuttle_service::Error::Custom(shuttle_service::error::CustomError::new(e).context("failed to set logger")) } })?; @@ -359,11 +362,11 @@ mod tests { .into_panic() .downcast_ref::<&str>() .map(|x| x.to_string()) - .unwrap_or_else(|| "".to_string()); + .unwrap_or_else(|| "panicked calling main".to_string()); shuttle_service::Error::BuildPanic(mes) } else { - shuttle_service::Error::Custom(shuttle_service::error::CustomError::new(e)) + shuttle_service::Error::Custom(shuttle_service::error::CustomError::new(e).context("failed to call main")) } })? } @@ -432,6 +435,7 @@ mod tests { runtime: &shuttle_service::Runtime, logger: shuttle_service::Logger, ) -> Result, shuttle_service::Error> { + use shuttle_service::Context; use shuttle_service::tracing_subscriber::prelude::*; use shuttle_service::ResourceBuilder; @@ -453,16 +457,16 @@ mod tests { .into_panic() .downcast_ref::<&str>() .map(|x| x.to_string()) - .unwrap_or_else(|| "".to_string()); + .unwrap_or_else(|| "panicked setting logger".to_string()); shuttle_service::Error::BuildPanic(mes) } else { - shuttle_service::Error::Custom(shuttle_service::error::CustomError::new(e)) + shuttle_service::Error::Custom(shuttle_service::error::CustomError::new(e).context("failed to set logger")) } })?; - let pool = shuttle_shared_db::Postgres::new().build(factory, runtime).await?; - let redis = shuttle_shared_db::Redis::new().build(factory, runtime).await?; + let pool = shuttle_shared_db::Postgres::new().build(factory, runtime).await.context(format!("failed to provision {}", stringify!(shuttle_shared_db::Postgres)))?; + let redis = shuttle_shared_db::Redis::new().build(factory, runtime).await.context(format!("failed to provision {}", stringify!(shuttle_shared_db::Redis)))?; runtime.spawn(async { complex(pool, redis) @@ -476,11 +480,11 @@ mod tests { .into_panic() .downcast_ref::<&str>() .map(|x| x.to_string()) - .unwrap_or_else(|| "".to_string()); + .unwrap_or_else(|| "panicked calling main".to_string()); shuttle_service::Error::BuildPanic(mes) } else { - shuttle_service::Error::Custom(shuttle_service::error::CustomError::new(e)) + shuttle_service::Error::Custom(shuttle_service::error::CustomError::new(e).context("failed to call main")) } })? } @@ -593,6 +597,7 @@ mod tests { runtime: &shuttle_service::Runtime, logger: shuttle_service::Logger, ) -> Result, shuttle_service::Error> { + use shuttle_service::Context; use shuttle_service::tracing_subscriber::prelude::*; use shuttle_service::ResourceBuilder; @@ -614,15 +619,15 @@ mod tests { .into_panic() .downcast_ref::<&str>() .map(|x| x.to_string()) - .unwrap_or_else(|| "".to_string()); + .unwrap_or_else(|| "panicked setting logger".to_string()); shuttle_service::Error::BuildPanic(mes) } else { - shuttle_service::Error::Custom(shuttle_service::error::CustomError::new(e)) + shuttle_service::Error::Custom(shuttle_service::error::CustomError::new(e).context("failed to set logger")) } })?; - let pool = shuttle_shared_db::Postgres::new().size("10Gb").public(false).build(factory, runtime).await?; + let pool = shuttle_shared_db::Postgres::new().size("10Gb").public(false).build(factory, runtime).await.context(format!("failed to provision {}", stringify!(shuttle_shared_db::Postgres)))?; runtime.spawn(async { complex(pool) @@ -636,11 +641,11 @@ mod tests { .into_panic() .downcast_ref::<&str>() .map(|x| x.to_string()) - .unwrap_or_else(|| "".to_string()); + .unwrap_or_else(|| "panicked calling main".to_string()); shuttle_service::Error::BuildPanic(mes) } else { - shuttle_service::Error::Custom(shuttle_service::error::CustomError::new(e)) + shuttle_service::Error::Custom(shuttle_service::error::CustomError::new(e).context("failed to call main")) } })? } diff --git a/codegen/tests/ui/next/invalid-endpoint-syntax.stderr b/codegen/tests/ui/next/invalid-endpoint-syntax.stderr index 478bd56aa..b1fad595a 100644 --- a/codegen/tests/ui/next/invalid-endpoint-syntax.stderr +++ b/codegen/tests/ui/next/invalid-endpoint-syntax.stderr @@ -1,16 +1,10 @@ error: expected `,` - - = help: - --> tests/ui/next/invalid-endpoint-syntax.rs:2:64 | 2 | #[shuttle_codegen::endpoint(method = get, route = "/hello" extra = abundant)] | ^^^^^ error: expected `=` - - = help: - --> tests/ui/next/invalid-endpoint-syntax.rs:7:74 | 7 | #[shuttle_codegen::endpoint(method = get, route = "/goodbye", invalid)] diff --git a/common/Cargo.toml b/common/Cargo.toml index c54827c57..cdd16765b 100644 --- a/common/Cargo.toml +++ b/common/Cargo.toml @@ -1,30 +1,33 @@ [package] name = "shuttle-common" -version = "0.7.0" -edition = "2021" -license = "Apache-2.0" +version.workspace = true +edition.workspace = true +license.workspace = true description = "Common library for the shuttle platform (https://www.shuttle.rs/)" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -chrono = { version = "0.4.22", features = ["serde"] } -comfy-table = { version = "6.1.0", optional = true } +anyhow = { workspace = true, optional = true } +async-trait = { workspace = true , optional = true } +axum = { workspace = true, optional = true } +chrono = { workspace = true, features = ["serde"] } +comfy-table = { version = "6.1.3", optional = true } crossterm = { version = "0.25.0", optional = true } http = { version = "0.2.8", optional = true } http-serde = { version = "1.1.2", optional = true } hyper = { version = "0.14.23", optional = true } -once_cell = "1.13.1" +once_cell = { workspace = true } +reqwest = { version = "0.11.13", optional = true } rmp-serde = { version = "1.1.1", optional = true } -rustrict = "0.5.0" -serde = { version = "1.0.143", features = ["derive"] } -serde_json = { version = "1.0.85", optional = true } +rustrict = "0.5.5" +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true, optional = true } strum = { version = "0.24.1", features = ["derive"] } -tracing = "0.1.36" -uuid = { version = "1.1.1", features = ["v4", "serde"] } +tracing = { workspace = true } +uuid = { workspace = true, features = ["v4", "serde"] } [features] -default = ["models"] - -models = ["display", "serde_json", "http"] +backend = ["async-trait", "axum"] display = ["comfy-table", "crossterm"] axum-wasm = ["http-serde", "hyper", "rmp-serde"] +models = ["anyhow", "async-trait", "display", "http", "reqwest", "serde_json"] diff --git a/common/src/backends/metrics.rs b/common/src/backends/metrics.rs new file mode 100644 index 000000000..9dabc5a1c --- /dev/null +++ b/common/src/backends/metrics.rs @@ -0,0 +1,35 @@ +use std::{collections::HashMap, convert::Infallible}; + +use async_trait::async_trait; +use axum::extract::{FromRequestParts, Path}; +use axum::http::request::Parts; +use tracing::Span; + +/// Used to record a bunch of metrics info +/// The tracing layer on the server should record a `request.params.` field for each parameter +/// that should be recorded +pub struct Metrics; + +#[async_trait] +impl FromRequestParts for Metrics +where + S: Send + Sync, +{ + type Rejection = Infallible; + + async fn from_request_parts(parts: &mut Parts, state: &S) -> Result { + // Get path parameters if they exist + let Path(path): Path> = + match Path::from_request_parts(parts, state).await { + Ok(path) => path, + Err(_) => return Ok(Metrics), + }; + + let span = Span::current(); + + for (param, value) in path { + span.record(format!("request.params.{param}").as_str(), value); + } + Ok(Metrics) + } +} diff --git a/common/src/backends/mod.rs b/common/src/backends/mod.rs new file mode 100644 index 000000000..e14488328 --- /dev/null +++ b/common/src/backends/mod.rs @@ -0,0 +1 @@ +pub mod metrics; diff --git a/common/src/lib.rs b/common/src/lib.rs index 6d44adf67..6443b90a3 100644 --- a/common/src/lib.rs +++ b/common/src/lib.rs @@ -1,9 +1,12 @@ +#[cfg(feature = "backend")] +pub mod backends; pub mod database; pub mod deployment; pub mod log; #[cfg(feature = "models")] pub mod models; pub mod project; +pub mod storage_manager; #[cfg(feature = "axum-wasm")] pub mod wasm; diff --git a/common/src/models/error.rs b/common/src/models/error.rs index 5e5677925..7b20d5571 100644 --- a/common/src/models/error.rs +++ b/common/src/models/error.rs @@ -4,6 +4,7 @@ use comfy_table::Color; use crossterm::style::Stylize; use http::StatusCode; use serde::{Deserialize, Serialize}; +use tracing::{error, warn}; #[derive(Serialize, Deserialize, Debug)] pub struct ApiError { @@ -44,9 +45,13 @@ pub enum ErrorKind { ProjectAlreadyExists, ProjectNotReady, ProjectUnavailable, + CustomDomainNotFound, + InvalidCustomDomain, + CustomDomainAlreadyExists, InvalidOperation, Internal, NotReady, + ServiceUnavailable, } impl From for ApiError { @@ -54,6 +59,10 @@ impl From for ApiError { let (status, error_message) = match kind { ErrorKind::Internal => (StatusCode::INTERNAL_SERVER_ERROR, "internal server error"), ErrorKind::KeyMissing => (StatusCode::UNAUTHORIZED, "request is missing a key"), + ErrorKind::ServiceUnavailable => ( + StatusCode::SERVICE_UNAVAILABLE, + "we're experiencing a high workload right now, please try again in a little bit", + ), ErrorKind::KeyMalformed => (StatusCode::BAD_REQUEST, "request has an invalid key"), ErrorKind::BadHost => (StatusCode::BAD_REQUEST, "the 'Host' header is invalid"), ErrorKind::UserNotFound => (StatusCode::NOT_FOUND, "user not found"), @@ -66,7 +75,18 @@ impl From for ApiError { ErrorKind::ProjectUnavailable => { (StatusCode::BAD_GATEWAY, "project returned invalid response") } - ErrorKind::InvalidProjectName => (StatusCode::BAD_REQUEST, "invalid project name"), + ErrorKind::InvalidProjectName => ( + StatusCode::BAD_REQUEST, + r#" + Invalid project name. Project name must: + 1. start and end with alphanumeric characters. + 2. only contain lowercase characters. + 3. only contain characters inside of the alphanumeric range, except for `-`. + 4. not be empty. + 5. be shorter than 63 characters. + 6. not contain profanity. + 7. not be a reserved word."#, + ), ErrorKind::InvalidOperation => ( StatusCode::BAD_REQUEST, "the requested operation is invalid", @@ -75,6 +95,11 @@ impl From for ApiError { StatusCode::BAD_REQUEST, "a project with the same name already exists", ), + ErrorKind::InvalidCustomDomain => (StatusCode::BAD_REQUEST, "invalid custom domain"), + ErrorKind::CustomDomainNotFound => (StatusCode::NOT_FOUND, "custom domain not found"), + ErrorKind::CustomDomainAlreadyExists => { + (StatusCode::BAD_REQUEST, "custom domain already in use") + } ErrorKind::Unauthorized => (StatusCode::UNAUTHORIZED, "unauthorized"), ErrorKind::Forbidden => (StatusCode::FORBIDDEN, "forbidden"), ErrorKind::NotReady => (StatusCode::INTERNAL_SERVER_ERROR, "service not ready"), @@ -85,3 +110,40 @@ impl From for ApiError { } } } + +impl From for ApiError { + fn from(code: StatusCode) -> Self { + let message = match code { + StatusCode::OK | StatusCode::ACCEPTED | StatusCode::FOUND | StatusCode::SWITCHING_PROTOCOLS => { + unreachable!("we should not have an API error with a successfull status code") + } + StatusCode::FORBIDDEN => "this request is not allowed", + StatusCode::UNAUTHORIZED => { + "we were unable to authorize your request. Is your key still valid?" + }, + StatusCode::INTERNAL_SERVER_ERROR => "our server was unable to handle your request. A ticket should be created for us to fix this.", + StatusCode::SERVICE_UNAVAILABLE => "we're experiencing a high workload right now, please try again in a little bit", + StatusCode::BAD_REQUEST => { + warn!("responding to a BAD_REQUEST request with an unhelpful message. Use ErrorKind instead"); + "this request is invalid" + }, + StatusCode::NOT_FOUND => { + warn!("responding to a NOT_FOUND request with an unhelpful message. Use ErrorKind instead"); + "we don't serve this resource" + }, + StatusCode::BAD_GATEWAY => { + warn!("got a bad response from a deployer"); + "response from deployer is invalid. Please create a ticket to report this" + }, + _ => { + error!(%code, "got an unexpected status code"); + "an unexpected error occured. Please create a ticket to report this" + }, + }; + + Self { + message: message.to_string(), + status_code: code.as_u16(), + } + } +} diff --git a/common/src/models/mod.rs b/common/src/models/mod.rs index cd7acedf6..0236c641d 100644 --- a/common/src/models/mod.rs +++ b/common/src/models/mod.rs @@ -4,4 +4,47 @@ pub mod project; pub mod resource; pub mod secret; pub mod service; +pub mod stats; pub mod user; + +use anyhow::{Context, Result}; +use async_trait::async_trait; +use http::StatusCode; +use serde::de::DeserializeOwned; +use tracing::trace; + +#[async_trait] +pub trait ToJson { + async fn to_json(self) -> Result; +} + +#[async_trait] +impl ToJson for reqwest::Response { + async fn to_json(self) -> Result { + let status_code = self.status(); + let full = self.bytes().await?; + + trace!( + response = std::str::from_utf8(&full).unwrap_or_default(), + "parsing response to json" + ); + + if matches!( + status_code, + StatusCode::OK | StatusCode::SWITCHING_PROTOCOLS + ) { + serde_json::from_slice(&full).context("failed to parse a successfull response") + } else { + trace!("parsing response to common error"); + let res: error::ApiError = match serde_json::from_slice(&full) { + Ok(res) => res, + _ => { + trace!("getting error from status code"); + status_code.into() + } + }; + + Err(res.into()) + } + } +} diff --git a/common/src/models/project.rs b/common/src/models/project.rs index 12cf1cfc8..171c4e46a 100644 --- a/common/src/models/project.rs +++ b/common/src/models/project.rs @@ -46,3 +46,9 @@ impl State { } } } + +#[derive(Deserialize, Serialize)] +pub struct AdminResponse { + pub project_name: String, + pub account_name: String, +} diff --git a/common/src/models/stats.rs b/common/src/models/stats.rs new file mode 100644 index 000000000..a4a5035d4 --- /dev/null +++ b/common/src/models/stats.rs @@ -0,0 +1,13 @@ +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +#[derive(Deserialize, Serialize)] +pub struct LoadRequest { + pub id: Uuid, +} + +#[derive(Deserialize, Serialize)] +pub struct LoadResponse { + pub builds_count: usize, + pub has_capacity: bool, +} diff --git a/common/src/storage_manager.rs b/common/src/storage_manager.rs new file mode 100644 index 000000000..5a5fa1300 --- /dev/null +++ b/common/src/storage_manager.rs @@ -0,0 +1,69 @@ +use std::{fs, io, path::PathBuf}; + +use uuid::Uuid; + +/// Manager to take care of directories for storing project, services and deployment files +#[derive(Clone)] +pub struct StorageManager { + artifacts_path: PathBuf, +} + +impl StorageManager { + pub fn new(artifacts_path: PathBuf) -> Self { + Self { artifacts_path } + } + + /// Path of the directory that contains extracted service Cargo projects. + pub fn builds_path(&self) -> Result { + let builds_path = self.artifacts_path.join("shuttle-builds"); + fs::create_dir_all(&builds_path)?; + + Ok(builds_path) + } + + /// Path for a specific service + pub fn service_build_path>(&self, service_name: S) -> Result { + let builds_path = self.builds_path()?.join(service_name.as_ref()); + fs::create_dir_all(&builds_path)?; + + Ok(builds_path) + } + + /// The directory in which compiled '.so' files are stored. + pub fn libraries_path(&self) -> Result { + let libs_path = self.artifacts_path.join("shuttle-libs"); + fs::create_dir_all(&libs_path)?; + + Ok(libs_path) + } + + /// Path to `.so` for a service + pub fn deployment_library_path(&self, deployment_id: &Uuid) -> Result { + let library_path = self.libraries_path()?.join(deployment_id.to_string()); + + Ok(library_path) + } + + /// Path of the directory to store user files + pub fn storage_path(&self) -> Result { + let storage_path = self.artifacts_path.join("shuttle-storage"); + fs::create_dir_all(&storage_path)?; + + Ok(storage_path) + } + + /// Path to folder for storing deployment files + pub fn deployment_storage_path>( + &self, + service_name: S, + deployment_id: &Uuid, + ) -> Result { + let storage_path = self + .storage_path()? + .join(service_name.as_ref()) + .join(deployment_id.to_string()); + fs::create_dir_all(&storage_path)?; + + Ok(storage_path) + } +} diff --git a/deployer/Cargo.toml b/deployer/Cargo.toml index d7c575d17..db2804a32 100644 --- a/deployer/Cargo.toml +++ b/deployer/Cargo.toml @@ -1,60 +1,61 @@ [package] name = "shuttle-deployer" -version = "0.7.0" -edition = "2021" +version.workspace = true +edition.workspace = true +license.workspace = true description = "Service with instances created per project for handling the compilation, loading, and execution of Shuttle services" [dependencies] -anyhow = "1.0.58" -async-trait = "0.1.56" -axum = { version = "0.5.7", features = ["ws"] } -bytes = "1.1.0" -cargo = "0.64.0" -cargo_metadata = "0.15.0" -chrono = "0.4.22" +anyhow = { workspace = true } +async-trait = { workspace = true } +axum = { workspace = true, features = ["ws"] } +bytes = "1.3.0" +# TODO: debug the libgit2-sys conflict with cargo-edit when upgrading cargo to 0.66 +cargo = "0.65.0" +cargo_metadata = "0.15.2" +chrono = { workspace = true } clap = { version = "3.2.8", features = ["derive"] } crossbeam-channel = "0.5.6" -flate2 = "1.0.24" -fqdn = "0.2.2" -futures = "0.3.21" -hyper = { version = "0.14.20", features = ["client", "http1", "http2", "tcp" ] } +flate2 = "1.0.25" +fqdn = "0.2.3" +futures = "0.3.25" +hyper = { version = "0.14.23", features = ["client", "http1", "http2", "tcp" ] } # not great, but waiting for WebSocket changes to be merged hyper-reverse-proxy = { git = "https://github.com/chesedo/hyper-reverse-proxy", branch = "master" } -once_cell = "1.14.0" -opentelemetry = { version = "0.17.0", features = ["rt-tokio"] } -opentelemetry-datadog = { version = "0.5.0", features = ["reqwest-client"] } +once_cell = { workspace = true } +opentelemetry = { version = "0.18.0", features = ["rt-tokio"] } +opentelemetry-datadog = { version = "0.6.0", features = ["reqwest-client"] } +opentelemetry-http = "0.7.0" pipe = "0.4.0" -serde = "1.0.137" -serde_json = "1.0.81" -sqlx = { version = "0.6.0", features = ["runtime-tokio-native-tls", "sqlite", "chrono", "json", "migrate", "uuid"] } +serde = { workspace = true } +serde_json = { workspace = true } +sqlx = { version = "0.6.2", features = ["runtime-tokio-native-tls", "sqlite", "chrono", "json", "migrate", "uuid"] } strum = { version = "0.24.1", features = ["derive"] } tar = "0.4.38" -thiserror = "1.0.24" -tokio = { version = "1.19.2", features = ["fs", "process"] } +thiserror = { workspace = true } +tokio = { version = "1.22.0", features = ["fs", "process"] } toml = "0.5.9" -tonic = "0.8.2" -tower = { version = "0.4.12", features = ["make"] } +tonic = "0.8.3" +tower = { version = "0.4.13", features = ["make"] } tower-http = { version = "0.3.4", features = ["auth", "trace"] } -tracing = "0.1.35" -tracing-opentelemetry = "0.17.4" -tracing-subscriber = { version = "0.3.11", features = ["env-filter"] } -uuid = { version = "1.1.2", features = ["v4"] } +tracing = { workspace = true } +tracing-opentelemetry = "0.18.0" +tracing-subscriber = { workspace = true, features = ["env-filter"] } +uuid = { workspace = true, features = ["v4"] } [dependencies.shuttle-common] -version = "0.7.0" -path = "../common" +workspace = true +features= ["backend", "models"] [dependencies.shuttle-proto] -version = "0.7.0" -path = "../proto" +workspace = true [dependencies.shuttle-service] -version = "0.7.0" -path = "../service" -features = ["loader"] +workspace = true +features = ["loader", "codegen"] [dev-dependencies] -ctor = "0.1.22" +ctor = "0.1.26" hex = "0.4.3" rand = "0.8.5" tempdir = "0.3.7" diff --git a/deployer/prepare.sh b/deployer/prepare.sh new file mode 100755 index 000000000..70eac8a23 --- /dev/null +++ b/deployer/prepare.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env sh + +############################################################################### +# This file is used by our common Containerfile incase the container for this # +# service might need some extra preparation steps for its final image # +############################################################################### + +# Patch crates to be on same versions +mkdir -p $CARGO_HOME; \ +echo '[patch.crates-io] +shuttle-service = { path = "/usr/src/shuttle/service" } +shuttle-aws-rds = { path = "/usr/src/shuttle/resources/aws-rds" } +shuttle-persist = { path = "/usr/src/shuttle/resources/persist" } +shuttle-shared-db = { path = "/usr/src/shuttle/resources/shared-db" } +shuttle-secrets = { path = "/usr/src/shuttle/resources/secrets" } +shuttle-static-folder = { path = "/usr/src/shuttle/resources/static-folder" }' > $CARGO_HOME/config.toml + +# Make future crates requests to our own mirror +echo ' +[source.shuttle-crates-io-mirror] +registry = "http://panamax:8080/git/crates.io-index" +[source.crates-io] +replace-with = "shuttle-crates-io-mirror"' >> $CARGO_HOME/config.toml + +# Prefetch crates.io index from our mirror +# TODO: restore when we know how to prefetch from our mirror +# cd /usr/src/shuttle/service +# cargo fetch diff --git a/deployer/src/args.rs b/deployer/src/args.rs index 2950d1f23..87b467bbc 100644 --- a/deployer/src/args.rs +++ b/deployer/src/args.rs @@ -2,7 +2,8 @@ use std::{net::SocketAddr, path::PathBuf}; use clap::Parser; use fqdn::FQDN; -use shuttle_common::Port; +use hyper::Uri; +use shuttle_common::{project::ProjectName, Port}; /// Program to handle the deploys for a single project /// Handling includes, building, testing, and running each service @@ -33,6 +34,14 @@ pub struct Args { #[clap(long, default_value = "0.0.0.0:8000")] pub proxy_address: SocketAddr, + /// Address to reach gateway's control plane at + #[clap(long, default_value = "http://gateway:8001")] + pub gateway_uri: Uri, + + /// Project being served by this deployer + #[clap(long)] + pub project: ProjectName, + /// Secret that will be used to perform admin tasks on this deployer #[clap(long)] pub admin_secret: String, diff --git a/deployer/src/deployment/deploy_layer.rs b/deployer/src/deployment/deploy_layer.rs index f74d5039d..6cbf3edbb 100644 --- a/deployer/src/deployment/deploy_layer.rs +++ b/deployer/src/deployment/deploy_layer.rs @@ -400,7 +400,6 @@ mod tests { use axum::body::Bytes; use ctor::ctor; use flate2::{write::GzEncoder, Compression}; - use futures::FutureExt; use shuttle_proto::runtime::runtime_client::RuntimeClient; use tokio::{select, time::sleep}; use tonic::transport::Channel; @@ -409,7 +408,8 @@ mod tests { use crate::{ deployment::{ - deploy_layer::LogType, ActiveDeploymentsGetter, Built, DeploymentManager, Queued, + deploy_layer::LogType, gateway_client::BuildQueueClient, ActiveDeploymentsGetter, + Built, DeploymentManager, Queued, }, persistence::{SecretRecorder, State}, }; @@ -516,15 +516,29 @@ mod tests { } } + #[derive(Clone)] + struct StubBuildQueueClient; + + #[async_trait::async_trait] + impl BuildQueueClient for StubBuildQueueClient { + async fn get_slot( + &self, + _id: Uuid, + ) -> Result { + Ok(true) + } + + async fn release_slot( + &self, + _id: Uuid, + ) -> Result<(), crate::deployment::gateway_client::Error> { + Ok(()) + } + } + #[tokio::test(flavor = "multi_thread")] async fn deployment_to_be_queued() { - let deployment_manager = DeploymentManager::new( - get_runtime_client().await, - RECORDER.clone(), - RECORDER.clone(), - StubActiveDeploymentGetter, - PathBuf::from("/tmp"), - ); + let deployment_manager = get_deployment_manager().await; let queued = get_queue("sleep-async"); let id = queued.id; @@ -583,7 +597,7 @@ mod tests { }; select! { - _ = sleep(Duration::from_secs(120)) => { + _ = sleep(Duration::from_secs(180)) => { panic!("states should go into 'Running' for a valid service"); } _ = test => {} @@ -636,13 +650,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] async fn deployment_self_stop() { - let deployment_manager = DeploymentManager::new( - get_runtime_client().await, - RECORDER.clone(), - RECORDER.clone(), - StubActiveDeploymentGetter, - PathBuf::from("/tmp"), - ); + let deployment_manager = get_deployment_manager().await; let queued = get_queue("self-stop"); let id = queued.id; @@ -706,7 +714,7 @@ mod tests { }; select! { - _ = sleep(Duration::from_secs(120)) => { + _ = sleep(Duration::from_secs(180)) => { panic!("states should go into 'Completed' when a service stops by itself"); } _ = test => {} @@ -715,13 +723,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] async fn deployment_bind_panic() { - let deployment_manager = DeploymentManager::new( - get_runtime_client().await, - RECORDER.clone(), - RECORDER.clone(), - StubActiveDeploymentGetter, - PathBuf::from("/tmp"), - ); + let deployment_manager = get_deployment_manager().await; let queued = get_queue("bind-panic"); let id = queued.id; @@ -785,7 +787,7 @@ mod tests { }; select! { - _ = sleep(Duration::from_secs(120)) => { + _ = sleep(Duration::from_secs(180)) => { panic!("states should go into 'Crashed' panicing in bind"); } _ = test => {} @@ -794,13 +796,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] async fn deployment_main_panic() { - let deployment_manager = DeploymentManager::new( - get_runtime_client().await, - RECORDER.clone(), - RECORDER.clone(), - StubActiveDeploymentGetter, - PathBuf::from("/tmp"), - ); + let deployment_manager = get_deployment_manager().await; let queued = get_queue("main-panic"); let id = queued.id; @@ -859,7 +855,7 @@ mod tests { }; select! { - _ = sleep(Duration::from_secs(120)) => { + _ = sleep(Duration::from_secs(180)) => { panic!("states should go into 'Crashed' when panicing in main"); } _ = test => {} @@ -868,13 +864,7 @@ mod tests { #[tokio::test] async fn deployment_from_run() { - let deployment_manager = DeploymentManager::new( - get_runtime_client().await, - RECORDER.clone(), - RECORDER.clone(), - StubActiveDeploymentGetter, - PathBuf::from("/tmp"), - ); + let deployment_manager = get_deployment_manager().await; let id = Uuid::new_v4(); deployment_manager @@ -882,6 +872,7 @@ mod tests { id, service_name: "run-test".to_string(), service_id: Uuid::new_v4(), + tracing_context: Default::default(), }) .await; @@ -921,13 +912,7 @@ mod tests { #[tokio::test] async fn scope_with_nil_id() { - let deployment_manager = DeploymentManager::new( - get_runtime_client().await, - RECORDER.clone(), - RECORDER.clone(), - StubActiveDeploymentGetter, - PathBuf::from("/tmp"), - ); + let deployment_manager = get_deployment_manager().await; let id = Uuid::nil(); deployment_manager @@ -935,8 +920,9 @@ mod tests { id, service_name: "nil_id".to_string(), service_id: Uuid::new_v4(), - data_stream: Box::pin(async { Ok(Bytes::from("violets are red")) }.into_stream()), + data: Bytes::from("violets are red").to_vec(), will_run_tests: false, + tracing_context: Default::default(), }) .await; @@ -952,6 +938,17 @@ mod tests { ); } + async fn get_deployment_manager() -> DeploymentManager { + DeploymentManager::builder() + .build_log_recorder(RECORDER.clone()) + .secret_recorder(RECORDER.clone()) + .active_deployment_getter(StubActiveDeploymentGetter) + .artifacts_path(PathBuf::from("/tmp")) + .runtime(get_runtime_client().await) + .queue_client(StubBuildQueueClient) + .build() + } + fn get_queue(name: &str) -> Queued { let enc = GzEncoder::new(Vec::new(), Compression::fast()); let mut tar = tar::Builder::new(enc); @@ -978,8 +975,9 @@ mod tests { id: Uuid::new_v4(), service_name: format!("deploy-layer-{name}"), service_id: Uuid::new_v4(), - data_stream: Box::pin(async { Ok(Bytes::from(bytes)) }.into_stream()), + data: bytes, will_run_tests: false, + tracing_context: Default::default(), } } } diff --git a/deployer/src/deployment/gateway_client.rs b/deployer/src/deployment/gateway_client.rs new file mode 100644 index 000000000..61846e4a6 --- /dev/null +++ b/deployer/src/deployment/gateway_client.rs @@ -0,0 +1,108 @@ +use hyper::{body, client::HttpConnector, Body, Client, Method, Request, Uri}; +use serde::{de::DeserializeOwned, Serialize}; +use shuttle_common::models::stats; +use thiserror::Error; +use tracing::trace; +use uuid::Uuid; + +#[derive(Error, Debug)] +pub enum Error { + #[error("Hyper error: {0}")] + Hyper(#[from] hyper::Error), + #[error("Serde JSON error: {0}")] + SerdeJson(#[from] serde_json::Error), + #[error("Hyper error: {0}")] + Http(#[from] hyper::http::Error), +} + +/// A client that can communicate with the build queue +#[async_trait::async_trait] +pub trait BuildQueueClient: Clone + Send + Sync + 'static { + /// Try to get a build slot. A false returned value means that the spot could not be acquire + async fn get_slot(&self, id: Uuid) -> Result; + + /// Release a build slot that was previously acquired + async fn release_slot(&self, id: Uuid) -> Result<(), Error>; +} + +/// Handles all calls to gateway +#[derive(Clone)] +pub struct GatewayClient { + client: Client, + base: Uri, +} + +impl GatewayClient { + pub fn new(uri: Uri) -> Self { + Self { + client: Client::new(), + base: uri, + } + } + + /// Make a post request to a gateway endpoint + pub async fn post( + &self, + path: &str, + body: Option, + ) -> Result { + self.request(Method::POST, path, body).await + } + + /// Make a delete request to a gateway endpoint + pub async fn delete( + &self, + path: &str, + body: Option, + ) -> Result { + self.request(Method::DELETE, path, body).await + } + + async fn request( + &self, + method: Method, + path: &str, + body: Option, + ) -> Result { + let uri = format!("{}{path}", self.base); + trace!(uri, "calling gateway"); + + let req = Request::builder() + .method(method) + .uri(uri) + .header("Content-Type", "application/json"); + + let req = if let Some(body) = body { + req.body(Body::from(serde_json::to_vec(&body)?)) + } else { + req.body(Body::empty()) + }; + + let resp = self.client.request(req?).await?; + + trace!(response = ?resp, "Load response"); + + let body = resp.into_body(); + let bytes = body::to_bytes(body).await?; + let json = serde_json::from_slice(&bytes)?; + + Ok(json) + } +} + +#[async_trait::async_trait] +impl BuildQueueClient for GatewayClient { + async fn get_slot(&self, id: Uuid) -> Result { + let body = stats::LoadRequest { id }; + let load: stats::LoadResponse = self.post("stats/load", Some(body)).await?; + + Ok(load.has_capacity) + } + + async fn release_slot(&self, id: Uuid) -> Result<(), Error> { + let body = stats::LoadRequest { id }; + let _load: stats::LoadResponse = self.delete("stats/load", Some(body)).await?; + + Ok(()) + } +} diff --git a/deployer/src/deployment/mod.rs b/deployer/src/deployment/mod.rs index 549f05ad6..c069e1e7f 100644 --- a/deployer/src/deployment/mod.rs +++ b/deployer/src/deployment/mod.rs @@ -1,4 +1,5 @@ pub mod deploy_layer; +pub mod gateway_client; mod queue; mod run; @@ -6,103 +7,93 @@ use std::path::PathBuf; pub use queue::Queued; pub use run::{ActiveDeploymentsGetter, Built}; +use shuttle_common::storage_manager::StorageManager; use shuttle_proto::runtime::runtime_client::RuntimeClient; use tonic::transport::Channel; -use tracing::instrument; +use tracing::{instrument, Span}; +use tracing_opentelemetry::OpenTelemetrySpanExt; use crate::persistence::{SecretRecorder, State}; use tokio::sync::{broadcast, mpsc}; use uuid::Uuid; -use self::deploy_layer::LogRecorder; +use self::{deploy_layer::LogRecorder, gateway_client::BuildQueueClient}; const QUEUE_BUFFER_SIZE: usize = 100; const RUN_BUFFER_SIZE: usize = 100; const KILL_BUFFER_SIZE: usize = 10; -#[derive(Clone)] -pub struct DeploymentManager { - pipeline: Pipeline, - kill_send: KillSender, +pub struct DeploymentManagerBuilder { + build_log_recorder: Option, + secret_recorder: Option, + active_deployment_getter: Option, + artifacts_path: Option, + runtime_client: Option>, + queue_client: Option, } -impl DeploymentManager { - /// Create a new deployment manager. Manages one or more 'pipelines' for - /// processing service building, loading, and deployment. - pub fn new( - runtime_client: RuntimeClient, - build_log_recorder: impl LogRecorder, - secret_recorder: impl SecretRecorder, - active_deployment_getter: impl ActiveDeploymentsGetter, - artifacts_path: PathBuf, - ) -> Self { - let (kill_send, _) = broadcast::channel(KILL_BUFFER_SIZE); +impl DeploymentManagerBuilder +where + LR: LogRecorder, + SR: SecretRecorder, + ADG: ActiveDeploymentsGetter, + QC: BuildQueueClient, +{ + pub fn build_log_recorder(mut self, build_log_recorder: LR) -> Self { + self.build_log_recorder = Some(build_log_recorder); + + self + } - DeploymentManager { - pipeline: Pipeline::new( - kill_send.clone(), - runtime_client, - build_log_recorder, - secret_recorder, - active_deployment_getter, - artifacts_path, - ), - kill_send, - } + pub fn secret_recorder(mut self, secret_recorder: SR) -> Self { + self.secret_recorder = Some(secret_recorder); + + self } - #[instrument(skip(self), fields(id = %queued.id, state = %State::Queued))] - pub async fn queue_push(&self, queued: Queued) { - self.pipeline.queue_send.send(queued).await.unwrap(); + pub fn active_deployment_getter(mut self, active_deployment_getter: ADG) -> Self { + self.active_deployment_getter = Some(active_deployment_getter); + + self } - #[instrument(skip(self), fields(id = %built.id, state = %State::Built))] - pub async fn run_push(&self, built: Built) { - self.pipeline.run_send.send(built).await.unwrap(); + pub fn artifacts_path(mut self, artifacts_path: PathBuf) -> Self { + self.artifacts_path = Some(artifacts_path); + + self } - pub async fn kill(&self, id: Uuid) { - if self.kill_send.receiver_count() > 0 { - self.kill_send.send(id).unwrap(); - } + pub fn queue_client(mut self, queue_client: QC) -> Self { + self.queue_client = Some(queue_client); + + self } -} -/// ```no-test -/// queue channel all deployments here are State::Queued -/// | -/// v -/// queue task when taken from the channel by this task, deployments -/// enter the State::Building state and upon being -/// | built transition to the State::Built state -/// v -/// run channel all deployments here are State::Built -/// | -/// v -/// run task tasks enter the State::Running state and begin -/// executing -/// ``` -#[derive(Clone)] -struct Pipeline { - queue_send: QueueSender, - run_send: RunSender, -} + pub fn runtime(mut self, runtime_client: RuntimeClient) -> Self { + self.runtime_client = Some(runtime_client); -impl Pipeline { + self + } /// Creates two Tokio tasks, one for building queued services, the other for /// executing/deploying built services. Two multi-producer, single consumer /// channels are also created which are for moving on-going service /// deployments between the aforementioned tasks. - fn new( - kill_send: KillSender, - runtime_client: RuntimeClient, - build_log_recorder: impl LogRecorder, - secret_recorder: impl SecretRecorder, - active_deployment_getter: impl ActiveDeploymentsGetter, - artifacts_path: PathBuf, - ) -> Pipeline { + pub fn build(self) -> DeploymentManager { + let build_log_recorder = self + .build_log_recorder + .expect("a build log recorder to be set"); + let secret_recorder = self.secret_recorder.expect("a secret recorder to be set"); + let active_deployment_getter = self + .active_deployment_getter + .expect("an active deployment getter to be set"); + let artifacts_path = self.artifacts_path.expect("artifacts path to be set"); + let queue_client = self.queue_client.expect("a queue client to be set"); + let runtime_client = self.runtime_client.expect("a runtime client to be set"); + let (queue_send, queue_recv) = mpsc::channel(QUEUE_BUFFER_SIZE); let (run_send, run_recv) = mpsc::channel(RUN_BUFFER_SIZE); + let (kill_send, _) = broadcast::channel(KILL_BUFFER_SIZE); + let storage_manager = StorageManager::new(artifacts_path); let run_send_clone = run_send.clone(); @@ -111,21 +102,86 @@ impl Pipeline { run_send_clone, build_log_recorder, secret_recorder, - artifacts_path.clone(), + storage_manager.clone(), + queue_client, )); tokio::spawn(run::task( run_recv, runtime_client, - kill_send, + kill_send.clone(), active_deployment_getter, - artifacts_path, + storage_manager.clone(), )); - Pipeline { + DeploymentManager { queue_send, run_send, + kill_send, + storage_manager, + } + } +} + +#[derive(Clone)] +pub struct DeploymentManager { + queue_send: QueueSender, + run_send: RunSender, + kill_send: KillSender, + storage_manager: StorageManager, +} + +/// ```no-test +/// queue channel all deployments here are State::Queued until the get a slot from gateway +/// | +/// v +/// queue task when taken from the channel by this task, deployments +/// enter the State::Building state and upon being +/// | built transition to the State::Built state +/// v +/// run channel all deployments here are State::Built +/// | +/// v +/// run task tasks enter the State::Running state and begin +/// executing +/// ``` +impl DeploymentManager { + /// Create a new deployment manager. Manages one or more 'pipelines' for + /// processing service building, loading, and deployment. + pub fn builder() -> DeploymentManagerBuilder { + DeploymentManagerBuilder { + build_log_recorder: None, + secret_recorder: None, + active_deployment_getter: None, + artifacts_path: None, + runtime_client: None, + queue_client: None, + } + } + + pub async fn queue_push(&self, mut queued: Queued) { + let cx = Span::current().context(); + + opentelemetry::global::get_text_map_propagator(|propagator| { + propagator.inject_context(&cx, &mut queued.tracing_context); + }); + + self.queue_send.send(queued).await.unwrap(); + } + + #[instrument(skip(self), fields(id = %built.id, state = %State::Built))] + pub async fn run_push(&self, built: Built) { + self.run_send.send(built).await.unwrap(); + } + + pub async fn kill(&self, id: Uuid) { + if self.kill_send.receiver_count() > 0 { + self.kill_send.send(id).unwrap(); } } + + pub fn storage_manager(&self) -> StorageManager { + self.storage_manager.clone() + } } type QueueSender = mpsc::Sender; diff --git a/deployer/src/deployment/queue.rs b/deployer/src/deployment/queue.rs index 7b1222451..c5ae79822 100644 --- a/deployer/src/deployment/queue.rs +++ b/deployer/src/deployment/queue.rs @@ -1,29 +1,33 @@ use super::deploy_layer::{Log, LogRecorder, LogType}; +use super::gateway_client::BuildQueueClient; use super::{Built, QueueReceiver, RunSender, State}; use crate::error::{Error, Result, TestError}; use crate::persistence::{LogLevel, SecretRecorder}; +use shuttle_common::storage_manager::StorageManager; +use cargo::util::interning::InternedString; use cargo_metadata::Message; use chrono::Utc; use crossbeam_channel::Sender; +use opentelemetry::global; use serde_json::json; use shuttle_service::loader::{build_crate, get_config, Runtime}; -use tracing::{debug, error, info, instrument, trace}; +use tokio::time::{sleep, timeout}; +use tracing::{debug, debug_span, error, info, instrument, trace, warn, Instrument, Span}; +use tracing_opentelemetry::OpenTelemetrySpanExt; use uuid::Uuid; -use std::collections::BTreeMap; +use std::collections::{BTreeMap, HashMap}; use std::fmt; use std::fs::remove_file; use std::io::Read; use std::path::{Path, PathBuf}; -use std::pin::Pin; +use std::time::Duration; -use bytes::{BufMut, Bytes}; use cargo::core::compiler::{CompileMode, MessageFormat}; use cargo::core::Workspace; use cargo::ops::{CompileOptions, TestOptions}; use flate2::read::GzDecoder; -use futures::{Stream, StreamExt}; use tar::Archive; use tokio::fs; @@ -32,23 +36,11 @@ pub async fn task( run_send: RunSender, log_recorder: impl LogRecorder, secret_recorder: impl SecretRecorder, - artifacts_path: PathBuf, + storage_manager: StorageManager, + queue_client: impl BuildQueueClient, ) { info!("Queue task started"); - // Path of the directory that contains extracted service Cargo projects. - let builds_path = artifacts_path.join("shuttle-builds"); - - // The directory in which compiled '.so' files are stored. - let libs_path = artifacts_path.join("shuttle-libs"); - - fs::create_dir_all(&builds_path) - .await - .expect("could not create builds directory"); - fs::create_dir_all(&libs_path) - .await - .expect("could not create libs directory"); - while let Some(queued) = recv.recv().await { let id = queued.id; @@ -57,31 +49,86 @@ pub async fn task( let run_send_cloned = run_send.clone(); let log_recorder = log_recorder.clone(); let secret_recorder = secret_recorder.clone(); - let builds_path = builds_path.clone(); - let libs_path = libs_path.clone(); + let storage_manager = storage_manager.clone(); + let queue_client = queue_client.clone(); tokio::spawn(async move { - match queued - .handle(builds_path, libs_path, log_recorder, secret_recorder) + let parent_cx = global::get_text_map_propagator(|propagator| { + propagator.extract(&queued.tracing_context) + }); + let span = debug_span!("builder"); + span.set_parent(parent_cx); + + async move { + match timeout( + Duration::from_secs(60 * 5), // Timeout after 5 minutes if the build queue hangs or it takes too long for a slot to become available + wait_for_queue(queue_client.clone(), id), + ) .await - { - Ok(built) => promote_to_run(built, run_send_cloned).await, - Err(err) => build_failed(&id, err), + { + Ok(_) => {} + Err(err) => return build_failed(&id, err), + } + + match queued + .handle(storage_manager, log_recorder, secret_recorder) + .await + { + Ok(built) => promote_to_run(built, run_send_cloned).await, + Err(err) => build_failed(&id, err), + } + + remove_from_queue(queue_client, id).await } + .instrument(span) + .await }); } } -#[instrument(fields(id = %_id, state = %State::Crashed))] -fn build_failed(_id: &Uuid, err: impl std::error::Error + 'static) { +#[instrument(skip(_id), fields(id = %_id, state = %State::Crashed))] +fn build_failed(_id: &Uuid, error: impl std::error::Error + 'static) { error!( - error = &err as &dyn std::error::Error, + error = &error as &dyn std::error::Error, "service build encountered an error" ); } +#[instrument(skip(queue_client), fields(state = %State::Queued))] +async fn wait_for_queue(queue_client: impl BuildQueueClient, id: Uuid) -> Result<()> { + loop { + let got_slot = queue_client.get_slot(id).await?; + + if got_slot { + break; + } + + info!("The build queue is currently full..."); + + sleep(Duration::from_secs(1)).await; + } + + Ok(()) +} + +async fn remove_from_queue(queue_client: impl BuildQueueClient, id: Uuid) { + match queue_client.release_slot(id).await { + Ok(_) => {} + Err(error) => warn!( + error = &error as &dyn std::error::Error, + "could not release build slot" + ), + } +} + #[instrument(fields(id = %built.id, state = %State::Built))] -async fn promote_to_run(built: Built, run_send: RunSender) { +async fn promote_to_run(mut built: Built, run_send: RunSender) { + let cx = Span::current().context(); + + opentelemetry::global::get_text_map_propagator(|propagator| { + propagator.inject_context(&cx, &mut built.tracing_context); + }); + if let Err(err) = run_send.send(built.clone()).await { build_failed(&built.id, err); } @@ -91,29 +138,24 @@ pub struct Queued { pub id: Uuid, pub service_name: String, pub service_id: Uuid, - pub data_stream: Pin> + Send + Sync>>, + pub data: Vec, pub will_run_tests: bool, + pub tracing_context: HashMap, } impl Queued { - #[instrument(name = "queued_handle", skip(self, builds_path, libs_path, log_recorder, secret_recorder), fields(id = %self.id, state = %State::Building))] + #[instrument(skip(self, storage_manager, log_recorder, secret_recorder), fields(id = %self.id, state = %State::Building))] async fn handle( self, - builds_path: PathBuf, - libs_path: PathBuf, + storage_manager: StorageManager, log_recorder: impl LogRecorder, secret_recorder: impl SecretRecorder, ) -> Result { - info!("Fetching POSTed data"); - - let vec = extract_stream(self.data_stream).await?; - info!("Extracting received data"); - let project_path = builds_path.join(&self.service_name); - fs::create_dir_all(project_path.clone()).await?; + let project_path = storage_manager.service_build_path(&self.service_name)?; - extract_tar_gz_data(vec.as_slice(), &project_path)?; + extract_tar_gz_data(self.data.as_slice(), &project_path).await?; let secrets = get_secrets(&project_path).await?; set_secrets(secrets, &self.service_id, secret_recorder).await?; @@ -122,7 +164,7 @@ impl Queued { let (tx, rx): (crossbeam_channel::Sender, _) = crossbeam_channel::bounded(0); let id = self.id; - tokio::spawn(async move { + tokio::task::spawn_blocking(move || { while let Ok(message) = rx.recv() { trace!(?message, "received cargo message"); // TODO: change these to `info!(...)` as [valuable] support increases. @@ -171,12 +213,13 @@ impl Queued { info!("Moving built library"); - store_lib(libs_path, so_path, &self.id).await?; + store_lib(&storage_manager, so_path, &self.id).await?; let built = Built { id: self.id, service_name: self.service_name, service_id: self.service_id, + tracing_context: Default::default(), }; Ok(built) @@ -229,27 +272,28 @@ async fn set_secrets( Ok(()) } -#[instrument(skip(data_stream))] -async fn extract_stream( - mut data_stream: Pin> + Send + Sync>>, -) -> Result> { - let mut vec = Vec::new(); - while let Some(buf) = data_stream.next().await { - let buf = buf?; - debug!("Received {} bytes", buf.len()); - vec.put(buf); - } - - Ok(vec) -} - /// Equivalent to the command: `tar -xzf --strip-components 1` #[instrument(skip(data, dest))] -fn extract_tar_gz_data(data: impl Read, dest: impl AsRef) -> Result<()> { +async fn extract_tar_gz_data(data: impl Read, dest: impl AsRef) -> Result<()> { let tar = GzDecoder::new(data); let mut archive = Archive::new(tar); archive.set_overwrite(true); + // Clear directory first + let mut entries = fs::read_dir(&dest).await?; + while let Some(entry) = entries.next_entry().await? { + // Ignore the build cache directory + if ["target", "Cargo.lock"].contains(&entry.file_name().to_string_lossy().as_ref()) { + continue; + } + + if entry.metadata().await?.is_dir() { + fs::remove_dir_all(entry.path()).await?; + } else { + fs::remove_file(entry.path()).await?; + } + } + for entry in archive.entries()? { let mut entry = entry?; let path: PathBuf = entry.path()?.components().skip(1).collect(); @@ -286,36 +330,8 @@ async fn run_pre_deploy_tests( let (read, write) = pipe::pipe(); let project_path = project_path.to_owned(); - let handle = tokio::spawn(async move { - let config = get_config(write)?; - let manifest_path = project_path.join("Cargo.toml"); - - let ws = Workspace::new(&manifest_path, &config)?; - - let mut compile_opts = CompileOptions::new(&config, CompileMode::Test)?; - - compile_opts.build_config.message_format = MessageFormat::Json { - render_diagnostics: false, - short: false, - ansi: false, - }; - - let opts = TestOptions { - compile_opts, - no_run: false, - no_fail_fast: false, - }; - - let test_failures = cargo::ops::run_tests(&ws, &opts, &[])?; - - match test_failures { - Some(failures) => Err(failures.into()), - None => Ok(()), - } - }); - // This needs to be on a separate thread, else deployer will block (reason currently unknown :D) - tokio::spawn(async move { + tokio::task::spawn_blocking(move || { for message in Message::parse_stream(read) { match message { Ok(message) => { @@ -330,17 +346,49 @@ async fn run_pre_deploy_tests( } }); - handle.await? + let config = get_config(write)?; + let manifest_path = project_path.join("Cargo.toml"); + + let ws = Workspace::new(&manifest_path, &config)?; + + let mut compile_opts = CompileOptions::new(&config, CompileMode::Test)?; + + compile_opts.build_config.message_format = MessageFormat::Json { + render_diagnostics: false, + short: false, + ansi: false, + }; + + // We set the tests to build with the release profile since deployments compile + // with the release profile by default. This means crates don't need to be + // recompiled in debug mode for the tests, reducing memory usage during deployment. + compile_opts.build_config.requested_profile = InternedString::new("release"); + + // Build tests with a maximum of 4 workers. + compile_opts.build_config.jobs = 4; + + let opts = TestOptions { + compile_opts, + no_run: false, + no_fail_fast: false, + }; + + let test_failures = cargo::ops::run_tests(&ws, &opts, &[])?; + + match test_failures { + Some(failures) => Err(failures.into()), + None => Ok(()), + } } /// Store 'so' file in the libs folder -#[instrument(skip(storage_dir_path, so_path, id))] +#[instrument(skip(storage_manager, so_path, id))] async fn store_lib( - storage_dir_path: impl AsRef, + storage_manager: &StorageManager, so_path: impl AsRef, id: &Uuid, ) -> Result<()> { - let new_so_path = storage_dir_path.as_ref().join(id.to_string()); + let new_so_path = storage_manager.deployment_library_path(id)?; fs::rename(so_path, new_so_path).await?; @@ -351,6 +399,7 @@ async fn store_lib( mod tests { use std::{collections::BTreeMap, fs::File, io::Write, path::Path}; + use shuttle_common::storage_manager::StorageManager; use tempdir::TempDir; use tokio::fs; use uuid::Uuid; @@ -362,6 +411,37 @@ mod tests { let dir = TempDir::new("shuttle-extraction-test").unwrap(); let p = dir.path(); + // Files whose content should be replaced with the archive + fs::write(p.join("world.txt"), b"original text") + .await + .unwrap(); + + // Extra files that should be deleted + fs::write( + p.join("extra.txt"), + b"extra file at top level that should be deleted", + ) + .await + .unwrap(); + fs::create_dir_all(p.join("subdir")).await.unwrap(); + fs::write( + p.join("subdir/extra.txt"), + b"extra file in subdir that should be deleted", + ) + .await + .unwrap(); + + // Build cache in `/target` should not be cleared/deleted + fs::create_dir_all(p.join("target")).await.unwrap(); + fs::write(p.join("target/asset.txt"), b"some file in the build cache") + .await + .unwrap(); + + // Cargo.lock file shouldn't be deleted + fs::write(p.join("Cargo.lock"), "lock file contents shouldn't matter") + .await + .unwrap(); + // Binary data for an archive in the following form: // // - temp @@ -380,7 +460,9 @@ ff0e55bda1ff01000000000000000000e0079c01ff12a55500280000", ) .unwrap(); - super::extract_tar_gz_data(test_data.as_slice(), p).unwrap(); + super::extract_tar_gz_data(test_data.as_slice(), &p) + .await + .unwrap(); assert!(fs::read_to_string(p.join("world.txt")) .await .unwrap() @@ -390,8 +472,38 @@ ff0e55bda1ff01000000000000000000e0079c01ff12a55500280000", .unwrap() .starts_with("def")); + assert_eq!( + fs::metadata(p.join("extra.txt")).await.unwrap_err().kind(), + std::io::ErrorKind::NotFound, + "extra file should be deleted" + ); + assert_eq!( + fs::metadata(p.join("subdir/extra.txt")) + .await + .unwrap_err() + .kind(), + std::io::ErrorKind::NotFound, + "extra file in subdir should be deleted" + ); + + assert_eq!( + fs::read_to_string(p.join("target/asset.txt")) + .await + .unwrap(), + "some file in the build cache", + "build cache file should not be touched" + ); + + assert_eq!( + fs::read_to_string(p.join("Cargo.lock")).await.unwrap(), + "lock file contents shouldn't matter", + "Cargo lock file should not be touched" + ); + // Can we extract again without error? - super::extract_tar_gz_data(test_data.as_slice(), p).unwrap(); + super::extract_tar_gz_data(test_data.as_slice(), &p) + .await + .unwrap(); } #[tokio::test(flavor = "multi_thread")] @@ -399,7 +511,7 @@ ff0e55bda1ff01000000000000000000e0079c01ff12a55500280000", let root = Path::new(env!("CARGO_MANIFEST_DIR")); let (tx, rx) = crossbeam_channel::unbounded(); - tokio::spawn(async move { while rx.recv().is_ok() {} }); + tokio::task::spawn_blocking(move || while rx.recv().is_ok() {}); let failure_project_path = root.join("tests/resources/tests-fail"); assert!(matches!( @@ -417,22 +529,24 @@ ff0e55bda1ff01000000000000000000e0079c01ff12a55500280000", async fn store_lib() { let libs_dir = TempDir::new("lib-store").unwrap(); let libs_p = libs_dir.path(); + let storage_manager = StorageManager::new(libs_p.to_path_buf()); - let build_dir = TempDir::new("build-store").unwrap(); - let build_p = build_dir.path(); + let build_p = storage_manager.builds_path().unwrap(); let so_path = build_p.join("xyz.so"); let id = Uuid::new_v4(); fs::write(&so_path, "barfoo").await.unwrap(); - super::store_lib(&libs_p, &so_path, &id).await.unwrap(); + super::store_lib(&storage_manager, &so_path, &id) + .await + .unwrap(); // Old '.so' file gone? assert!(!so_path.exists()); assert_eq!( - fs::read_to_string(libs_p.join(id.to_string())) + fs::read_to_string(libs_p.join("shuttle-libs").join(id.to_string())) .await .unwrap(), "barfoo" diff --git a/deployer/src/deployment/run.rs b/deployer/src/deployment/run.rs index cf35d4e33..4837e882f 100644 --- a/deployer/src/deployment/run.rs +++ b/deployer/src/deployment/run.rs @@ -1,16 +1,20 @@ use std::{ + collections::HashMap, net::{Ipv4Addr, SocketAddr}, path::PathBuf, str::FromStr, }; use async_trait::async_trait; +use opentelemetry::global; use shuttle_common::project::ProjectName as ServiceName; +use shuttle_common::storage_manager::StorageManager; use shuttle_proto::runtime::{runtime_client::RuntimeClient, LoadRequest, StartRequest}; use tokio::task::JoinError; use tonic::transport::Channel; -use tracing::{error, info, instrument, trace}; +use tracing::{debug_span, error, info, instrument, trace, Instrument}; +use tracing_opentelemetry::OpenTelemetrySpanExt; use uuid::Uuid; use super::{KillReceiver, KillSender, RunReceiver, State}; @@ -24,13 +28,10 @@ pub async fn task( runtime_client: RuntimeClient, kill_send: KillSender, active_deployment_getter: impl ActiveDeploymentsGetter, - artifacts_path: PathBuf, + storage_manager: StorageManager, ) { info!("Run task started"); - // The directory in which compiled '.so' files are stored. - let libs_path = artifacts_path.join("shuttle-libs"); - while let Some(built) = recv.recv().await { let id = built.id; @@ -38,6 +39,7 @@ pub async fn task( let kill_send = kill_send.clone(); let kill_recv = kill_send.subscribe(); + let storage_manager = storage_manager.clone(); // todo: this is the port the legacy runtime is hardcoded to start services on let port = 7001; @@ -68,26 +70,34 @@ pub async fn task( Err(err) if err.is_cancelled() => stopped_cleanup(&id), Err(err) => start_crashed_cleanup(&id, err), }; - - let libs_path = libs_path.clone(); let runtime_client = runtime_client.clone(); tokio::spawn(async move { - if let Err(err) = built - .handle( - addr, - libs_path, - runtime_client, - kill_recv, - old_deployments_killer, - cleanup, - ) - .await - { - start_crashed_cleanup(&id, err) + let parent_cx = global::get_text_map_propagator(|propagator| { + propagator.extract(&built.tracing_context) + }); + let span = debug_span!("runner"); + span.set_parent(parent_cx); + + async move { + if let Err(err) = built + .handle( + addr, + storage_manager, + runtime_client, + kill_recv, + old_deployments_killer, + cleanup, + ) + .await + { + start_crashed_cleanup(&id, err) + } + + info!("deployment done"); } - - info!("deployment done"); + .instrument(span) + .await }); } } @@ -116,28 +126,28 @@ async fn kill_old_deployments( Ok(()) } -#[instrument(fields(id = %_id, state = %State::Completed))] +#[instrument(skip(_id), fields(id = %_id, state = %State::Completed))] fn completed_cleanup(_id: &Uuid) { info!("service finished all on its own"); } -#[instrument(fields(id = %_id, state = %State::Stopped))] +#[instrument(skip(_id), fields(id = %_id, state = %State::Stopped))] fn stopped_cleanup(_id: &Uuid) { info!("service was stopped by the user"); } -#[instrument(fields(id = %_id, state = %State::Crashed))] -fn crashed_cleanup(_id: &Uuid, err: impl std::error::Error + 'static) { +#[instrument(skip(_id), fields(id = %_id, state = %State::Crashed))] +fn crashed_cleanup(_id: &Uuid, error: impl std::error::Error + 'static) { error!( - error = &err as &dyn std::error::Error, + error = &error as &dyn std::error::Error, "service encountered an error" ); } -#[instrument(fields(id = %_id, state = %State::Crashed))] -fn start_crashed_cleanup(_id: &Uuid, err: impl std::error::Error + 'static) { +#[instrument(skip(_id), fields(id = %_id, state = %State::Crashed))] +fn start_crashed_cleanup(_id: &Uuid, error: impl std::error::Error + 'static) { error!( - error = &err as &dyn std::error::Error, + error = &error as &dyn std::error::Error, "service startup encountered an error" ); } @@ -157,15 +167,16 @@ pub struct Built { pub id: Uuid, pub service_name: String, pub service_id: Uuid, + pub tracing_context: HashMap, } impl Built { - #[instrument(name = "built_handle", skip(self, libs_path, runtime_client, kill_recv, kill_old_deployments, cleanup), fields(id = %self.id, state = %State::Loading))] + #[instrument(skip(self, storage_manager, runtime_client, kill_recv, kill_old_deployments, cleanup), fields(id = %self.id, state = %State::Loading))] #[allow(clippy::too_many_arguments)] async fn handle( self, address: SocketAddr, - libs_path: PathBuf, + storage_manager: StorageManager, runtime_client: RuntimeClient, kill_recv: KillReceiver, kill_old_deployments: impl futures::Future>, @@ -173,7 +184,8 @@ impl Built { + Send + 'static, ) -> Result<()> { - // todo: refactor this? + let so_path = storage_manager.deployment_library_path(&self.id)?; + kill_old_deployments.await?; info!("got handle for deployment"); @@ -181,7 +193,7 @@ impl Built { tokio::spawn(run( self.id, self.service_name, - libs_path, + so_path, runtime_client, address, kill_recv, @@ -196,7 +208,7 @@ impl Built { async fn run( id: Uuid, service_name: String, - libs_path: PathBuf, + so_path: PathBuf, mut runtime_client: RuntimeClient, _address: SocketAddr, _kill_recv: KillReceiver, @@ -206,10 +218,9 @@ async fn run( ) { info!( "loading project from: {}", - libs_path.clone().into_os_string().into_string().unwrap() + so_path.clone().into_os_string().into_string().unwrap() ); - let so_path = libs_path.join(id.to_string()); let load_request = tonic::Request::new(LoadRequest { path: so_path.into_os_string().into_string().unwrap(), service_name: service_name.clone(), @@ -235,14 +246,15 @@ async fn run( #[cfg(test)] mod tests { use std::{ - fs, net::{Ipv4Addr, SocketAddr}, path::PathBuf, process::Command, time::Duration, }; + use shuttle_common::storage_manager::StorageManager; use shuttle_proto::runtime::runtime_client::RuntimeClient; + use tempdir::TempDir; use tokio::{ sync::{broadcast, oneshot}, task::JoinError, @@ -256,7 +268,13 @@ mod tests { use super::Built; const RESOURCES_PATH: &str = "tests/resources"; - const LIBS_PATH: &str = "/tmp/shuttle-libs-tests"; + + fn get_storage_manager() -> StorageManager { + let tmp_dir = TempDir::new("shuttle_run_test").unwrap(); + let path = tmp_dir.into_path(); + + StorageManager::new(path) + } async fn kill_old_deployments() -> crate::error::Result<()> { Ok(()) @@ -271,7 +289,7 @@ mod tests { // This test uses the kill signal to make sure a service does stop when asked to #[tokio::test] async fn can_be_killed() { - let built = make_so_and_built("sleep-async"); + let (built, storage_manager) = make_so_and_built("sleep-async"); let id = built.id; let (kill_send, kill_recv) = broadcast::channel(1); let (cleanup_send, cleanup_recv) = oneshot::channel(); @@ -292,7 +310,7 @@ mod tests { built .handle( addr, - PathBuf::from(LIBS_PATH), + storage_manager, get_runtime_client().await, kill_recv, kill_old_deployments(), @@ -316,7 +334,7 @@ mod tests { // This test does not use a kill signal to stop the service. Rather the service decided to stop on its own without errors #[tokio::test] async fn self_stop() { - let built = make_so_and_built("sleep-async"); + let (built, storage_manager) = make_so_and_built("sleep-async"); let (_kill_send, kill_recv) = broadcast::channel(1); let (cleanup_send, cleanup_recv) = oneshot::channel(); @@ -337,7 +355,7 @@ mod tests { built .handle( addr, - PathBuf::from(LIBS_PATH), + storage_manager, get_runtime_client().await, kill_recv, kill_old_deployments(), @@ -355,7 +373,7 @@ mod tests { // Test for panics in Service::bind #[tokio::test] async fn panic_in_bind() { - let built = make_so_and_built("bind-panic"); + let (built, storage_manager) = make_so_and_built("bind-panic"); let (_kill_send, kill_recv) = broadcast::channel(1); let (cleanup_send, cleanup_recv): (oneshot::Sender<()>, _) = oneshot::channel(); @@ -376,7 +394,7 @@ mod tests { built .handle( addr, - PathBuf::from(LIBS_PATH), + storage_manager, get_runtime_client().await, kill_recv, kill_old_deployments(), @@ -394,7 +412,7 @@ mod tests { // Test for panics in the main function #[tokio::test] async fn panic_in_main() { - let built = make_so_and_built("main-panic"); + let (built, storage_manager) = make_so_and_built("main-panic"); let (_kill_send, kill_recv) = broadcast::channel(1); let handle_cleanup = |_result| panic!("the service shouldn't even start"); @@ -403,7 +421,7 @@ mod tests { let result = built .handle( addr, - PathBuf::from(LIBS_PATH), + storage_manager, get_runtime_client().await, kill_recv, kill_old_deployments(), @@ -424,16 +442,18 @@ mod tests { id: Uuid::new_v4(), service_name: "test".to_string(), service_id: Uuid::new_v4(), + tracing_context: Default::default(), }; let (_kill_send, kill_recv) = broadcast::channel(1); let handle_cleanup = |_result| panic!("no service means no cleanup"); let addr = SocketAddr::new(Ipv4Addr::LOCALHOST.into(), 8001); + let storage_manager = get_storage_manager(); let result = built .handle( addr, - PathBuf::from(LIBS_PATH), + storage_manager, get_runtime_client().await, kill_recv, kill_old_deployments(), @@ -451,7 +471,7 @@ mod tests { ); } - fn make_so_and_built(crate_name: &str) -> Built { + fn make_so_and_built(crate_name: &str) -> (Built, StorageManager) { let crate_dir: PathBuf = [RESOURCES_PATH, crate_name].iter().collect(); Command::new("cargo") @@ -472,17 +492,19 @@ mod tests { let id = Uuid::new_v4(); let so_path = crate_dir.join("target/release").join(lib_name); - let libs_path = PathBuf::from(LIBS_PATH); - fs::create_dir_all(&libs_path).unwrap(); - - let new_so_path = libs_path.join(id.to_string()); + let storage_manager = get_storage_manager(); + let new_so_path = storage_manager.deployment_library_path(&id).unwrap(); std::fs::copy(so_path, new_so_path).unwrap(); - Built { - id, - service_name: crate_name.to_string(), - service_id: Uuid::new_v4(), - } + ( + Built { + id, + service_name: crate_name.to_string(), + service_id: Uuid::new_v4(), + tracing_context: Default::default(), + }, + storage_manager, + ) } } diff --git a/deployer/src/error.rs b/deployer/src/error.rs index f7d3ecd45..e01766f1f 100644 --- a/deployer/src/error.rs +++ b/deployer/src/error.rs @@ -6,10 +6,10 @@ use shuttle_service::loader::LoaderError; use cargo::util::errors::CargoTestError; +use crate::deployment::gateway_client; + #[derive(Error, Debug)] pub enum Error { - #[error("Streaming error: {0}")] - Streaming(#[source] axum::Error), #[error("Internal I/O error: {0}")] InputOutput(#[from] io::Error), #[error("Build error: {0}")] @@ -26,6 +26,8 @@ pub enum Error { SecretsSet(#[source] Box), #[error("Failed to cleanup old deployments: {0}")] OldCleanup(#[source] Box), + #[error("Gateway client error: {0}")] + GatewayClient(#[from] gateway_client::Error), } #[derive(Error, Debug)] diff --git a/deployer/src/handlers/error.rs b/deployer/src/handlers/error.rs index b09fa24d1..f0fb98a0a 100644 --- a/deployer/src/handlers/error.rs +++ b/deployer/src/handlers/error.rs @@ -9,6 +9,8 @@ use shuttle_common::models::error::ApiError; #[derive(thiserror::Error, Debug)] pub enum Error { + #[error("Streaming error: {0}")] + Streaming(#[from] axum::Error), #[error("Persistence failure: {0}")] Persistence(#[from] crate::persistence::PersistenceError), #[error("Failed to convert {from} to {to}")] @@ -19,6 +21,8 @@ pub enum Error { }, #[error("record could not be found")] NotFound, + #[error("Custom error: {0}")] + Custom(#[from] anyhow::Error), } impl Serialize for Error { diff --git a/deployer/src/handlers/mod.rs b/deployer/src/handlers/mod.rs index e3873838e..3f2a83298 100644 --- a/deployer/src/handlers/mod.rs +++ b/deployer/src/handlers/mod.rs @@ -2,18 +2,26 @@ mod error; use axum::body::{Body, BoxBody}; use axum::extract::ws::{self, WebSocket}; -use axum::extract::{Extension, Path, Query}; +use axum::extract::{Extension, MatchedPath, Path, Query}; use axum::http::{Request, Response}; -use axum::routing::{get, Router}; +use axum::middleware::from_extractor; +use axum::routing::{get, post, Router}; use axum::{extract::BodyStream, Json}; +use bytes::BufMut; use chrono::{TimeZone, Utc}; use fqdn::FQDN; -use futures::TryStreamExt; +use futures::StreamExt; +use opentelemetry::global; +use opentelemetry_http::HeaderExtractor; +use shuttle_common::backends::metrics::Metrics; use shuttle_common::models::secret; +use shuttle_common::project::ProjectName; use shuttle_common::LogItem; +use shuttle_service::loader::clean_crate; use tower_http::auth::RequireAuthorizationLayer; use tower_http::trace::TraceLayer; -use tracing::{debug, debug_span, error, field, trace, Span}; +use tracing::{debug, debug_span, error, field, instrument, trace, Span}; +use tracing_opentelemetry::OpenTelemetrySpanExt; use uuid::Uuid; use crate::deployment::{DeploymentManager, Queued}; @@ -24,19 +32,25 @@ use std::time::Duration; pub use {self::error::Error, self::error::Result}; +mod project; + pub fn make_router( persistence: Persistence, deployment_manager: DeploymentManager, proxy_fqdn: FQDN, admin_secret: String, -) -> Router { + project_name: ProjectName, +) -> Router { Router::new() .route("/projects/:project_name/services", get(list_services)) .route( "/projects/:project_name/services/:service_name", get(get_service).post(post_service).delete(delete_service), ) - .route("/projects/:project_name/services/:service_name/summary", get(get_service_summary)) + .route( + "/projects/:project_name/services/:service_name/summary", + get(get_service_summary), + ) .route( "/projects/:project_name/deployments/:deployment_id", get(get_deployment).delete(delete_deployment), @@ -45,31 +59,71 @@ pub fn make_router( "/projects/:project_name/ws/deployments/:deployment_id/logs", get(get_logs_subscribe), ) - .route("/projects/:project_name/deployments/:deployment_id/logs", get(get_logs)) + .route( + "/projects/:project_name/deployments/:deployment_id/logs", + get(get_logs), + ) .route( "/projects/:project_name/secrets/:service_name", get(get_secrets), ) + .route("/projects/:project_name/clean", post(post_clean)) .layer(Extension(persistence)) .layer(Extension(deployment_manager)) .layer(Extension(proxy_fqdn)) .layer(RequireAuthorizationLayer::bearer(&admin_secret)) // This route should be below the auth bearer since it does not need authentication .route("/projects/:project_name/status", get(get_status)) + .route_layer(from_extractor::()) .layer( TraceLayer::new_for_http() .make_span_with(|request: &Request| { - debug_span!("request", http.uri = %request.uri(), http.method = %request.method(), http.status_code = field::Empty, api_key = field::Empty) + let path = if let Some(path) = request.extensions().get::() { + path.as_str() + } else { + "" + }; + + let account_name = request + .headers() + .get("X-Shuttle-Account-Name") + .map(|value| value.to_str().unwrap_or_default()); + + let span = debug_span!( + "request", + http.uri = %request.uri(), + http.method = %request.method(), + http.status_code = field::Empty, + account.name = account_name, + // A bunch of extra things for metrics + // Should be able to make this clearer once `Valuable` support lands in tracing + request.path = path, + request.params.project_name = field::Empty, + request.params.service_name = field::Empty, + request.params.deployment_id = field::Empty, + ); + let parent_context = global::get_text_map_propagator(|propagator| { + propagator.extract(&HeaderExtractor(request.headers())) + }); + span.set_parent(parent_context); + + span }) .on_response( |response: &Response, latency: Duration, span: &Span| { span.record("http.status_code", response.status().as_u16()); - debug!(latency = format_args!("{} ns", latency.as_nanos()), "finished processing request"); + debug!( + latency = format_args!("{} ns", latency.as_nanos()), + "finished processing request" + ); }, ), ) + .route_layer(from_extractor::()) + .layer(Extension(project_name)) } +#[instrument(skip_all)] async fn list_services( Extension(persistence): Extension, ) -> Result>> { @@ -83,9 +137,10 @@ async fn list_services( Ok(Json(services)) } +#[instrument(skip(persistence))] async fn get_service( Extension(persistence): Extension, - Path((_project_name, service_name)): Path<(String, String)>, + Path((project_name, service_name)): Path<(String, String)>, ) -> Result> { if let Some(service) = persistence.get_service_by_name(&service_name).await? { let deployments = persistence @@ -120,6 +175,7 @@ async fn get_service( } } +#[instrument(skip_all, fields(%project_name, %service_name))] async fn get_service_summary( Extension(persistence): Extension, Extension(proxy_fqdn): Extension, @@ -138,7 +194,7 @@ async fn get_service_summary( .collect(); let response = shuttle_common::models::service::Summary { - uri: format!("https://{}.{proxy_fqdn}", project_name), + uri: format!("https://{proxy_fqdn}"), name: service.name, deployment, resources, @@ -150,12 +206,13 @@ async fn get_service_summary( } } +#[instrument(skip_all, fields(%project_name, %service_name))] async fn post_service( Extension(persistence): Extension, Extension(deployment_manager): Extension, - Path((_project_name, service_name)): Path<(String, String)>, + Path((project_name, service_name)): Path<(String, String)>, Query(params): Query>, - stream: BodyStream, + mut stream: BodyStream, ) -> Result> { let service = persistence.get_or_create_service(&service_name).await?; let id = Uuid::new_v4(); @@ -168,14 +225,23 @@ async fn post_service( address: None, }; + let mut data = Vec::new(); + while let Some(buf) = stream.next().await { + let buf = buf?; + debug!("Received {} bytes", buf.len()); + data.put(buf); + } + debug!("Received a total of {} bytes", data.len()); + persistence.insert_deployment(deployment.clone()).await?; let queued = Queued { id, service_name: service.name, service_id: service.id, - data_stream: Box::pin(stream.map_err(crate::error::Error::Streaming)), + data, will_run_tests: !params.contains_key("no-test"), + tracing_context: Default::default(), }; deployment_manager.queue_push(queued).await; @@ -183,10 +249,11 @@ async fn post_service( Ok(Json(deployment.into())) } +#[instrument(skip_all, fields(%project_name, %service_name))] async fn delete_service( Extension(persistence): Extension, Extension(deployment_manager): Extension, - Path((_project_name, service_name)): Path<(String, String)>, + Path((project_name, service_name)): Path<(String, String)>, ) -> Result> { if let Some(service) = persistence.get_service_by_name(&service_name).await? { let old_deployments = persistence @@ -225,9 +292,10 @@ async fn delete_service( } } +#[instrument(skip_all, fields(%project_name, %deployment_id))] async fn get_deployment( Extension(persistence): Extension, - Path((_project_name, deployment_id)): Path<(String, Uuid)>, + Path((project_name, deployment_id)): Path<(String, Uuid)>, ) -> Result> { if let Some(deployment) = persistence.get_deployment(&deployment_id).await? { Ok(Json(deployment.into())) @@ -236,10 +304,11 @@ async fn get_deployment( } } +#[instrument(skip_all, fields(%project_name, %deployment_id))] async fn delete_deployment( Extension(deployment_manager): Extension, Extension(persistence): Extension, - Path((_project_name, deployment_id)): Path<(String, Uuid)>, + Path((project_name, deployment_id)): Path<(String, Uuid)>, ) -> Result> { if let Some(deployment) = persistence.get_deployment(&deployment_id).await? { deployment_manager.kill(deployment.id).await; @@ -250,9 +319,10 @@ async fn delete_deployment( } } +#[instrument(skip_all, fields(%project_name, %deployment_id))] async fn get_logs( Extension(persistence): Extension, - Path((_project_name, deployment_id)): Path<(String, Uuid)>, + Path((project_name, deployment_id)): Path<(String, Uuid)>, ) -> Result>> { if let Some(deployment) = persistence.get_deployment(&deployment_id).await? { Ok(Json( @@ -293,7 +363,9 @@ async fn logs_websocket_handler(mut s: WebSocket, persistence: Persistence, id: return; } }; - let mut last_timestamp = Utc.timestamp(0, 0); + + // Unwrap is safe because it only returns None for out of range numbers or invalid nanosecond + let mut last_timestamp = Utc.timestamp_opt(0, 0).unwrap(); for log in backlog.into_iter() { last_timestamp = log.timestamp; @@ -327,9 +399,10 @@ async fn logs_websocket_handler(mut s: WebSocket, persistence: Persistence, id: let _ = s.close().await; } +#[instrument(skip_all, fields(%project_name, %service_name))] async fn get_secrets( Extension(persistence): Extension, - Path((_project_name, service_name)): Path<(String, String)>, + Path((project_name, service_name)): Path<(String, String)>, ) -> Result>> { if let Some(service) = persistence.get_service_by_name(&service_name).await? { let keys = persistence @@ -345,6 +418,20 @@ async fn get_secrets( } } +async fn post_clean( + Extension(deployment_manager): Extension, + Path(project_name): Path, +) -> Result>> { + let project_path = deployment_manager + .storage_manager() + .service_build_path(project_name) + .map_err(anyhow::Error::new)?; + + let lines = clean_crate(&project_path, true)?; + + Ok(Json(lines)) +} + async fn get_status() -> String { "Ok".to_string() } diff --git a/deployer/src/handlers/project.rs b/deployer/src/handlers/project.rs new file mode 100644 index 000000000..bafae003a --- /dev/null +++ b/deployer/src/handlers/project.rs @@ -0,0 +1,56 @@ +use std::collections::HashMap; + +use async_trait::async_trait; +use axum::extract::{Extension, FromRequestParts, Path}; +use axum::http::request::Parts; +use axum::RequestPartsExt; +use hyper::StatusCode; +use shuttle_common::project::ProjectName; +use tracing::error; + +/// Gaurd to ensure request are for the project served by this deployer +/// Note: this guard needs the `ProjectName` extension to be set +pub struct ProjectNameGuard; + +#[async_trait] +impl FromRequestParts for ProjectNameGuard +where + S: Send + Sync, +{ + type Rejection = StatusCode; + + async fn from_request_parts(parts: &mut Parts, state: &S) -> Result { + // We expect some path parameters + let Path(path): Path> = + match Path::from_request_parts(parts, state).await { + Ok(path) => path, + Err(_) => return Err(StatusCode::NOT_FOUND), + }; + + // All our routes have the `project_name` parameter + let project_name = match path.get("project_name") { + Some(project_name) => project_name, + None => { + error!("ProjectNameGuard found no project name in path"); + return Err(StatusCode::INTERNAL_SERVER_ERROR); + } + }; + + // This extractor requires the ProjectName extension to be set + let Extension(expected_project_name) = match parts.extract::>().await + { + Ok(expected) => expected, + Err(_) => { + error!("ProjectName extension is not set"); + return Err(StatusCode::INTERNAL_SERVER_ERROR); + } + }; + + if project_name == expected_project_name.as_str() { + Ok(ProjectNameGuard) + } else { + error!(project_name, "project is not served by this deployer"); + Err(StatusCode::BAD_REQUEST) + } + } +} diff --git a/deployer/src/lib.rs b/deployer/src/lib.rs index 16dcd9992..7ba32683d 100644 --- a/deployer/src/lib.rs +++ b/deployer/src/lib.rs @@ -14,6 +14,8 @@ use shuttle_proto::runtime::runtime_client::RuntimeClient; use tonic::transport::Channel; use tracing::{error, info}; +use crate::deployment::gateway_client::GatewayClient; + mod args; mod deployment; mod error; @@ -22,19 +24,23 @@ mod persistence; mod proxy; pub async fn start(persistence: Persistence, runtime_client: RuntimeClient, args: Args) { - let deployment_manager = DeploymentManager::new( - runtime_client, - persistence.clone(), - persistence.clone(), - persistence.clone(), - args.artifacts_path, - ); + let deployment_manager = DeploymentManager::builder() + .build_log_recorder(persistence.clone()) + .secret_recorder(persistence.clone()) + .active_deployment_getter(persistence.clone()) + .artifacts_path(args.artifacts_path) + .runtime(runtime_client) + .queue_client(GatewayClient::new(args.gateway_uri)) + .build(); - for existing_deployment in persistence.get_all_runnable_deployments().await.unwrap() { + let runnable_deployments = persistence.get_all_runnable_deployments().await.unwrap(); + info!(count = %runnable_deployments.len(), "enqueuing runnable deployments"); + for existing_deployment in runnable_deployments { let built = Built { id: existing_deployment.id, service_name: existing_deployment.service_name, service_id: existing_deployment.service_id, + tracing_context: Default::default(), }; deployment_manager.run_push(built).await; } @@ -44,9 +50,12 @@ pub async fn start(persistence: Persistence, runtime_client: RuntimeClient(service_fn(move |req| { diff --git a/deployer/src/main.rs b/deployer/src/main.rs index 0259843b8..14edba8f5 100644 --- a/deployer/src/main.rs +++ b/deployer/src/main.rs @@ -3,6 +3,7 @@ use std::process::exit; use std::time::Duration; use clap::Parser; +use opentelemetry::global; use shuttle_deployer::{start, start_proxy, Args, DeployLayer, Persistence}; use shuttle_proto::runtime::runtime_client::RuntimeClient; use shuttle_proto::runtime::SubscribeLogsRequest; @@ -20,6 +21,8 @@ async fn main() { trace!(args = ?args, "parsed args"); + global::set_text_map_propagator(opentelemetry_datadog::DatadogPropagator::new()); + let fmt_layer = fmt::layer(); let filter_layer = EnvFilter::try_from_default_env() .or_else(|_| EnvFilter::try_new("info")) @@ -62,11 +65,9 @@ async fn main() { info!("connecting runtime client"); let conn = Endpoint::new("http://127.0.0.1:6001") .unwrap() - .connect_timeout(Duration::from_secs(5)) - .connect() - .await - .unwrap(); - let mut runtime_client = RuntimeClient::new(conn); + .connect_timeout(Duration::from_secs(5)); + + let mut runtime_client = RuntimeClient::connect(conn).await.unwrap(); let sender = persistence.get_log_sender(); let mut stream = runtime_client diff --git a/deployer/src/persistence/log.rs b/deployer/src/persistence/log.rs index 822982bc7..7613d9b3d 100644 --- a/deployer/src/persistence/log.rs +++ b/deployer/src/persistence/log.rs @@ -100,15 +100,9 @@ fn extract_message(fields: &Value) -> Option { return Some(message.as_str()?.to_string()); } - if let Some(message) = map.get("message") { - match message { - Value::Object(message_object) => { - if let Some(rendered) = message_object.get("rendered") { - return Some(rendered.as_str()?.to_string()); - } - } - Value::String(mes_str) => return Some(mes_str.to_string()), - _ => {} + if let Some(Value::Object(message_object)) = map.get("message") { + if let Some(rendered) = message_object.get("rendered") { + return Some(rendered.as_str()?.to_string()); } } } diff --git a/deployer/src/persistence/mod.rs b/deployer/src/persistence/mod.rs index 9ae1d8fc9..1550759e8 100644 --- a/deployer/src/persistence/mod.rs +++ b/deployer/src/persistence/mod.rs @@ -77,7 +77,7 @@ impl Persistence { let (log_send, log_recv): (crossbeam_channel::Sender, _) = crossbeam_channel::bounded(0); - let (stream_log_send, _) = broadcast::channel(32); + let (stream_log_send, _) = broadcast::channel(1); let stream_log_send_clone = stream_log_send.clone(); let pool_cloned = pool.clone(); @@ -478,7 +478,7 @@ mod tests { id, service_id, state: State::Queued, - last_update: Utc.ymd(2022, 4, 25).and_hms(4, 43, 33), + last_update: Utc.with_ymd_and_hms(2022, 4, 25, 4, 43, 33).unwrap(), address: None, }; @@ -498,7 +498,10 @@ mod tests { .unwrap(); let update = p.get_deployment(&id).await.unwrap().unwrap(); assert_eq!(update.state, State::Built); - assert_ne!(update.last_update, Utc.ymd(2022, 4, 25).and_hms(4, 43, 33)); + assert_ne!( + update.last_update, + Utc.with_ymd_and_hms(2022, 4, 25, 4, 43, 33).unwrap() + ); } #[tokio::test(flavor = "multi_thread")] @@ -512,28 +515,28 @@ mod tests { id: Uuid::new_v4(), service_id: xyz_id, state: State::Crashed, - last_update: Utc.ymd(2022, 4, 25).and_hms(7, 29, 35), + last_update: Utc.with_ymd_and_hms(2022, 4, 25, 7, 29, 35).unwrap(), address: None, }; let deployment_stopped = Deployment { id: Uuid::new_v4(), service_id: xyz_id, state: State::Stopped, - last_update: Utc.ymd(2022, 4, 25).and_hms(7, 49, 35), + last_update: Utc.with_ymd_and_hms(2022, 4, 25, 7, 49, 35).unwrap(), address: None, }; let deployment_other = Deployment { id: Uuid::new_v4(), service_id, state: State::Running, - last_update: Utc.ymd(2022, 4, 25).and_hms(7, 39, 39), + last_update: Utc.with_ymd_and_hms(2022, 4, 25, 7, 39, 39).unwrap(), address: None, }; let deployment_running = Deployment { id: Uuid::new_v4(), service_id: xyz_id, state: State::Running, - last_update: Utc.ymd(2022, 4, 25).and_hms(7, 48, 29), + last_update: Utc.with_ymd_and_hms(2022, 4, 25, 7, 48, 29).unwrap(), address: Some(SocketAddr::new(Ipv4Addr::LOCALHOST.into(), 9876)), }; @@ -570,35 +573,35 @@ mod tests { id: Uuid::new_v4(), service_id, state: State::Built, - last_update: Utc.ymd(2022, 4, 25).and_hms(4, 29, 33), + last_update: Utc.with_ymd_and_hms(2022, 4, 25, 4, 29, 33).unwrap(), address: None, }, Deployment { id: id_1, service_id: foo_id, state: State::Running, - last_update: Utc.ymd(2022, 4, 25).and_hms(4, 29, 44), + last_update: Utc.with_ymd_and_hms(2022, 4, 25, 4, 29, 44).unwrap(), address: None, }, Deployment { id: id_2, service_id: bar_id, state: State::Running, - last_update: Utc.ymd(2022, 4, 25).and_hms(4, 33, 48), + last_update: Utc.with_ymd_and_hms(2022, 4, 25, 4, 33, 48).unwrap(), address: None, }, Deployment { id: Uuid::new_v4(), service_id: service_id2, state: State::Crashed, - last_update: Utc.ymd(2022, 4, 25).and_hms(4, 38, 52), + last_update: Utc.with_ymd_and_hms(2022, 4, 25, 4, 38, 52).unwrap(), address: None, }, Deployment { id: id_3, service_id: foo_id, state: State::Running, - last_update: Utc.ymd(2022, 4, 25).and_hms(4, 42, 32), + last_update: Utc.with_ymd_and_hms(2022, 4, 25, 4, 42, 32).unwrap(), address: None, }, ] { @@ -790,14 +793,14 @@ mod tests { id, service_id, state: State::Queued, // Should be different from the state recorded below - last_update: Utc.ymd(2022, 4, 29).and_hms(2, 39, 39), + last_update: Utc.with_ymd_and_hms(2022, 4, 29, 2, 39, 39).unwrap(), address: None, }) .await .unwrap(); let state = deploy_layer::Log { id, - timestamp: Utc.ymd(2022, 4, 29).and_hms(2, 39, 59), + timestamp: Utc.with_ymd_and_hms(2022, 4, 29, 2, 39, 59).unwrap(), state: State::Running, level: Level::Info, file: None, @@ -830,7 +833,7 @@ mod tests { id, service_id, state: State::Running, - last_update: Utc.ymd(2022, 4, 29).and_hms(2, 39, 59), + last_update: Utc.with_ymd_and_hms(2022, 4, 29, 2, 39, 59).unwrap(), address: Some(SocketAddr::new(Ipv4Addr::LOCALHOST.into(), 12345)), } ); @@ -1006,35 +1009,35 @@ mod tests { id: Uuid::new_v4(), service_id, state: State::Built, - last_update: Utc.ymd(2022, 4, 25).and_hms(4, 29, 33), + last_update: Utc.with_ymd_and_hms(2022, 4, 25, 4, 29, 33).unwrap(), address: None, }, Deployment { id: Uuid::new_v4(), service_id, state: State::Stopped, - last_update: Utc.ymd(2022, 4, 25).and_hms(4, 29, 44), + last_update: Utc.with_ymd_and_hms(2022, 4, 25, 4, 29, 44).unwrap(), address: None, }, Deployment { id: id_1, service_id, state: State::Running, - last_update: Utc.ymd(2022, 4, 25).and_hms(4, 33, 48), + last_update: Utc.with_ymd_and_hms(2022, 4, 25, 4, 33, 48).unwrap(), address: None, }, Deployment { id: Uuid::new_v4(), service_id, state: State::Crashed, - last_update: Utc.ymd(2022, 4, 25).and_hms(4, 38, 52), + last_update: Utc.with_ymd_and_hms(2022, 4, 25, 4, 38, 52).unwrap(), address: None, }, Deployment { id: id_2, service_id, state: State::Running, - last_update: Utc.ymd(2022, 4, 25).and_hms(4, 42, 32), + last_update: Utc.with_ymd_and_hms(2022, 4, 25, 4, 42, 32).unwrap(), address: None, }, ] { diff --git a/deployer/src/proxy.rs b/deployer/src/proxy.rs index 7a7dcf9f0..cbb216ac9 100644 --- a/deployer/src/proxy.rs +++ b/deployer/src/proxy.rs @@ -4,6 +4,7 @@ use std::{ }; use async_trait::async_trait; +use fqdn::FQDN; use hyper::{ client::{connect::dns::GaiResolver, HttpConnector}, header::{HeaderValue, HOST, SERVER}, @@ -11,7 +12,10 @@ use hyper::{ }; use hyper_reverse_proxy::{ProxyError, ReverseProxy}; use once_cell::sync::Lazy; +use opentelemetry::global; +use opentelemetry_http::HeaderExtractor; use tracing::{error, field, instrument, trace, Span}; +use tracing_opentelemetry::OpenTelemetrySpanExt; static PROXY_CLIENT: Lazy>> = Lazy::new(|| ReverseProxy::new(Client::new())); @@ -20,14 +24,25 @@ static SERVER_HEADER: Lazy = Lazy::new(|| "shuttle.rs".parse().unwr #[instrument(name = "proxy_request", skip(address_getter), fields(http.method = %req.method(), http.uri = %req.uri(), http.status_code = field::Empty, service = field::Empty))] pub async fn handle( remote_address: SocketAddr, - fqdn: String, + fqdn: FQDN, req: Request, address_getter: impl AddressGetter, ) -> Result, Infallible> { - let host = match req.headers().get(HOST) { - Some(host) => host.to_str().unwrap_or_default().to_owned(), + let span = Span::current(); + let parent_context = global::get_text_map_propagator(|propagator| { + propagator.extract(&HeaderExtractor(req.headers())) + }); + span.set_parent(parent_context); + + let host: FQDN = match req.headers().get(HOST) { + Some(host) => host + .to_str() + .unwrap_or_default() + .parse::() + .unwrap_or_default() + .to_owned(), None => { - trace!("proxy request has to host header"); + trace!("proxy request has no host header"); return Ok(Response::builder() .status(StatusCode::BAD_REQUEST) .body(Body::empty()) @@ -35,33 +50,42 @@ pub async fn handle( } }; - let service = match host.strip_suffix(&fqdn) { - Some(service) => service, + if host != fqdn { + trace!(?host, "proxy won't serve foreign domain"); + return Ok(Response::builder() + .status(StatusCode::BAD_REQUEST) + .body(Body::from("this domain is not served by proxy")) + .unwrap()); + } + // We only have one service per project, and its name coincides + // with that of the project + let service = match req.headers().get("X-Shuttle-Project") { + Some(project) => project.to_str().unwrap_or_default().to_owned(), None => { - trace!(host, "proxy won't serve foreign domain"); + trace!("proxy request has no X-Shuttle-Project header"); return Ok(Response::builder() .status(StatusCode::BAD_REQUEST) - .body(Body::from("this domain is not served by proxy")) + .body(Body::from("request has no X-Shuttle-Project header")) .unwrap()); } }; // Record current service for tracing purposes - Span::current().record("service", service); + span.record("service", &service); - let proxy_address = match address_getter.get_address_for_service(service).await { + let proxy_address = match address_getter.get_address_for_service(&service).await { Ok(Some(address)) => address, Ok(None) => { - trace!(host, "host not found on this server"); - let response_body = format!("could not find service for host: {}", host); + trace!(?host, service, "service not found on this server"); + let response_body = format!("could not find service: {}", service); return Ok(Response::builder() .status(StatusCode::NOT_FOUND) .body(response_body.into()) .unwrap()); } Err(err) => { - error!(error = %err, host, "proxy failed to find address for host"); + error!(error = %err, service, "proxy failed to find address for host"); let response_body = format!("failed to find service for host: {}", host); return Ok(Response::builder() diff --git a/deployer/tests/deploy_layer/bind-panic/Cargo.toml b/deployer/tests/deploy_layer/bind-panic/Cargo.toml index 156d730a6..3ef90aa3d 100644 --- a/deployer/tests/deploy_layer/bind-panic/Cargo.toml +++ b/deployer/tests/deploy_layer/bind-panic/Cargo.toml @@ -11,4 +11,4 @@ crate-type = ["cdylib"] [workspace] [dependencies] -shuttle-service = "0.7.0" +shuttle-service = { version = "0.8.0", features = ["codegen"] } diff --git a/deployer/tests/deploy_layer/main-panic/Cargo.toml b/deployer/tests/deploy_layer/main-panic/Cargo.toml index 27c1c18cb..f6311505f 100644 --- a/deployer/tests/deploy_layer/main-panic/Cargo.toml +++ b/deployer/tests/deploy_layer/main-panic/Cargo.toml @@ -11,4 +11,4 @@ crate-type = ["cdylib"] [workspace] [dependencies] -shuttle-service = "0.7.0" +shuttle-service = { version = "0.8.0", features = ["codegen"] } diff --git a/deployer/tests/deploy_layer/self-stop/Cargo.toml b/deployer/tests/deploy_layer/self-stop/Cargo.toml index 4a05ac9ba..2e80c3243 100644 --- a/deployer/tests/deploy_layer/self-stop/Cargo.toml +++ b/deployer/tests/deploy_layer/self-stop/Cargo.toml @@ -11,4 +11,4 @@ crate-type = ["cdylib"] [workspace] [dependencies] -shuttle-service = "0.7.0" +shuttle-service = { version = "0.8.0", features = ["codegen"] } diff --git a/deployer/tests/deploy_layer/sleep-async/Cargo.toml b/deployer/tests/deploy_layer/sleep-async/Cargo.toml index b83c654da..8870d819b 100644 --- a/deployer/tests/deploy_layer/sleep-async/Cargo.toml +++ b/deployer/tests/deploy_layer/sleep-async/Cargo.toml @@ -12,4 +12,4 @@ crate-type = ["cdylib"] [dependencies] tokio = { version = "1.0", features = ["time"]} -shuttle-service = "0.7.0" +shuttle-service = { version = "0.8.0", features = ["codegen"] } diff --git a/deployer/tests/resources/bind-panic/Cargo.toml b/deployer/tests/resources/bind-panic/Cargo.toml index 0a721bd4a..807f8044a 100644 --- a/deployer/tests/resources/bind-panic/Cargo.toml +++ b/deployer/tests/resources/bind-panic/Cargo.toml @@ -11,4 +11,4 @@ crate-type = ["cdylib"] [workspace] [dependencies] -shuttle-service = { path = "../../../../service" } +shuttle-service = { path = "../../../../service", features = ["codegen"] } diff --git a/deployer/tests/resources/main-panic/Cargo.toml b/deployer/tests/resources/main-panic/Cargo.toml index da7b5c841..76833b28c 100644 --- a/deployer/tests/resources/main-panic/Cargo.toml +++ b/deployer/tests/resources/main-panic/Cargo.toml @@ -11,4 +11,4 @@ crate-type = ["cdylib"] [workspace] [dependencies] -shuttle-service = { path = "../../../../service" } +shuttle-service = { path = "../../../../service", features = ["codegen"] } diff --git a/deployer/tests/resources/sleep-async/Cargo.toml b/deployer/tests/resources/sleep-async/Cargo.toml index c0dc45479..f02de505e 100644 --- a/deployer/tests/resources/sleep-async/Cargo.toml +++ b/deployer/tests/resources/sleep-async/Cargo.toml @@ -12,4 +12,4 @@ crate-type = ["cdylib"] [dependencies] tokio = { version = "1.0", features = ["time"]} -shuttle-service = { path = "../../../../service" } +shuttle-service = { path = "../../../../service", features = ["codegen"] } diff --git a/deployer/tests/resources/tests-fail/Cargo.toml b/deployer/tests/resources/tests-fail/Cargo.toml index 18f948a67..f678cc9a8 100644 --- a/deployer/tests/resources/tests-fail/Cargo.toml +++ b/deployer/tests/resources/tests-fail/Cargo.toml @@ -9,5 +9,5 @@ crate-type = ["cdylib"] [workspace] [dependencies] -rocket = "0.5.0-rc.1" -shuttle-service = { version = "0.3.3", features = ["web-rocket"] } +rocket = "0.5.0-rc.2" +shuttle-service = { path = "../../../../service", features = ["codegen", "web-rocket"] } diff --git a/deployer/tests/resources/tests-pass/Cargo.toml b/deployer/tests/resources/tests-pass/Cargo.toml index b3e5424eb..358114dae 100644 --- a/deployer/tests/resources/tests-pass/Cargo.toml +++ b/deployer/tests/resources/tests-pass/Cargo.toml @@ -9,5 +9,5 @@ crate-type = ["cdylib"] [workspace] [dependencies] -rocket = "0.5.0-rc.1" -shuttle-service = { version = "0.3.3", features = ["web-rocket"] } +rocket = "0.5.0-rc.2" +shuttle-service = { path = "../../../../service", features = ["codegen", "web-rocket"] } diff --git a/docker-compose.yml b/docker-compose.yml index 63960ad78..10edbccbc 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,19 +2,22 @@ version: "3.7" volumes: gateway-vol: postgres-vol: + panamax-crates-vol: + panamax-io-index-vol: networks: user-net: attachable: true ipam: driver: default config: - - subnet: 10.99.0.0/24 + - subnet: 10.99.0.0/16 services: gateway: image: "${CONTAINER_REGISTRY}/gateway:${BACKEND_TAG}" depends_on: - provisioner ports: + - 7999:7999 - 8000:8000 - 8001:8001 deploy: @@ -42,16 +45,18 @@ services: environment: - RUST_LOG=${RUST_LOG} command: - - "--state=/var/lib/shuttle/gateway.sqlite" + - "--state=/var/lib/shuttle" - "start" - "--control=0.0.0.0:8001" - "--user=0.0.0.0:8000" - - "--image=${CONTAINER_REGISTRY}/deployer:${BACKEND_TAG}" + - "--bouncer=0.0.0.0:7999" + - "--image=${CONTAINER_REGISTRY}/deployer:${DEPLOYER_TAG}" - "--prefix=shuttle_" - "--network-name=${STACK}_user-net" - "--docker-host=/var/run/docker.sock" - "--provisioner-host=provisioner" - "--proxy-fqdn=${APPS_FQDN}" + - "--use-tls=${USE_TLS}" healthcheck: test: ["CMD", "curl", "-f", "http://localhost:8001"] interval: 1m @@ -131,6 +136,35 @@ services: - DD_APM_NON_LOCAL_TRAFFIC=true - DD_SITE=datadoghq.eu - DD_API_KEY=${DD_API_KEY} + - DD_ENV=${DD_ENV} + - DD_CONTAINER_LABELS_AS_TAGS={"project.name":"project_name"} + deploy: + placement: + constraints: + - node.hostname==controller + panamax: + image: "${CONTAINER_REGISTRY}/panamax:${PANAMAX_TAG}" + restart: always + networks: + user-net: + volumes: + - panamax-crates-vol:/mirror/crates + - panamax-io-index-vol:/mirror/crates.io-index + labels: + deck-chores.sync-mirror.command: panamax sync /mirror + deck-chores.sync-mirror.interval: 5 minutes + deck-chores.sync-mirror.max: 1 + deploy: + placement: + constraints: + - node.hostname==controller + deck-chores: + image: funkyfuture/deck-chores:1 + restart: unless-stopped + environment: + TIMEZONE: UTC + volumes: + - /var/run/docker.sock:/var/run/docker.sock deploy: placement: constraints: diff --git a/e2e/README.md b/e2e/README.md index 297ff3864..7d6caaf99 100644 --- a/e2e/README.md +++ b/e2e/README.md @@ -4,12 +4,16 @@ This crate runs all the end-to-end tests for shuttle. These tests must run again Running all the end-to-end tests may take a long time, so it is recommended to run individual tests shipped as part of each crate in the workspace first. ## Running the tests -Simply do +In the root of the repository, run: ```bash -$ SHUTTLE_API_KEY=test-key cargo test -- --nocapture +make test ``` -the `--nocapture` flag helps with logging errors as they arise instead of in one block at the end. +To run individual tests, in the root of the e2e directory run: + +```bash +POSTGRES_PASSWORD=postgres APPS_FQDN=unstable.shuttleapp.rs cargo test -- --nocapture +``` The server-side logs can be accessed with `docker compose logs`. diff --git a/e2e/tests/integration/actix_web.rs b/e2e/tests/integration/actix_web.rs new file mode 100644 index 000000000..f6a545889 --- /dev/null +++ b/e2e/tests/integration/actix_web.rs @@ -0,0 +1,23 @@ +use crossterm::style::Color; + +use crate::helpers::{self, APPS_FQDN}; + +#[test] +fn hello_world_actix_web() { + let client = helpers::Services::new_docker( + "hello-world (actix-web)", + "actix-web/hello-world", + Color::Green, + ); + client.deploy(); + + let request_text = client + .get("hello") + .header("Host", format!("hello-world-actix-web-app.{}", *APPS_FQDN)) + .send() + .unwrap() + .text() + .unwrap(); + + assert_eq!(request_text, "Hello World!"); +} diff --git a/e2e/tests/integration/axum.rs b/e2e/tests/integration/axum.rs index fc20183ce..698cd7e35 100644 --- a/e2e/tests/integration/axum.rs +++ b/e2e/tests/integration/axum.rs @@ -4,8 +4,9 @@ use crate::helpers::{self, APPS_FQDN}; #[test] fn hello_world_axum() { - let client = helpers::Services::new_docker("hello-world (axum)", Color::Green); - client.deploy("axum/hello-world"); + let client = + helpers::Services::new_docker("hello-world (axum)", "axum/hello-world", Color::Green); + client.deploy(); let request_text = client .get("hello") diff --git a/e2e/tests/integration/helpers/mod.rs b/e2e/tests/integration/helpers/mod.rs index f839373d1..f888830c8 100644 --- a/e2e/tests/integration/helpers/mod.rs +++ b/e2e/tests/integration/helpers/mod.rs @@ -42,12 +42,17 @@ shuttle-service = {{ path = "{}" }} shuttle-aws-rds = {{ path = "{}" }} shuttle-persist = {{ path = "{}" }} shuttle-shared-db = {{ path = "{}" }} -shuttle-secrets = {{ path = "{}" }}"#, +shuttle-secrets = {{ path = "{}" }} +shuttle-static-folder = {{ path = "{}" }}"#, WORKSPACE_ROOT.join("service").display(), WORKSPACE_ROOT.join("resources").join("aws-rds").display(), WORKSPACE_ROOT.join("resources").join("persist").display(), WORKSPACE_ROOT.join("resources").join("shared-db").display(), WORKSPACE_ROOT.join("resources").join("secrets").display(), + WORKSPACE_ROOT + .join("resources") + .join("static-folder") + .display(), ) .unwrap(); @@ -112,10 +117,10 @@ CARGO_HOME: {} let admin_key = if let Ok(key) = env::var("SHUTTLE_API_KEY") { key } else { - "test-key".to_string() + "e2e-test-key".to_string() }; - Command::new(DOCKER.as_os_str()) + _ = Command::new(DOCKER.as_os_str()) .args([ "compose", "--file", @@ -125,15 +130,14 @@ CARGO_HOME: {} "exec", "gateway", "/usr/local/bin/service", - "--state=/var/lib/shuttle/gateway.sqlite", + "--state=/var/lib/shuttle", "init", "--name", - "admin", + "test", "--key", &admin_key, ]) - .output() - .ensure_success("failed to create admin user on gateway"); + .output(); }; } @@ -214,12 +218,14 @@ pub fn spawn_and_log>( pub struct Services { api_addr: SocketAddr, proxy_addr: SocketAddr, + /// Path within the examples dir to a specific example + example_path: String, target: String, color: Color, } impl Services { - fn new_free(target: D, color: C) -> Self + fn new_free(target: D, example_path: D, color: C) -> Self where D: std::fmt::Display, C: Into, @@ -229,16 +235,25 @@ impl Services { proxy_addr: "127.0.0.1:8000".parse().unwrap(), target: target.to_string(), color: color.into(), + example_path: example_path.to_string(), } } - pub fn new_docker(target: D, color: C) -> Self + /// Initializes a a test client + /// + /// # Arguments + /// + /// * `target` - A string that describes the test target + /// * `example_path` - Path to a specific example within the examples dir, this is where + /// `project new` and `deploy` will run + /// * `color` - a preferably unique `crossterm::style::Color` to distinguish test logs + pub fn new_docker(target: D, example_path: D, color: C) -> Self where D: std::fmt::Display, C: Into, { let _ = *LOCAL_UP; - let service = Self::new_free(target, color); + let service = Self::new_free(target, example_path, color); service.wait_ready(Duration::from_secs(15)); // Make sure provisioner is ready, else deployers will fail to start up @@ -330,18 +345,18 @@ impl Services { panic!("timed out while waiting for mongodb to be ready"); } - pub fn wait_deployer_ready(&self, project_path: &str, mut timeout: Duration) { + pub fn wait_deployer_ready(&self, mut timeout: Duration) { let mut now = SystemTime::now(); while !timeout.is_zero() { let mut run = Command::new(WORKSPACE_ROOT.join("target/debug/cargo-shuttle")); if env::var("SHUTTLE_API_KEY").is_err() { - run.env("SHUTTLE_API_KEY", "test-key"); + run.env("SHUTTLE_API_KEY", "e2e-test-key"); } run.env("CARGO_HOME", CARGO_HOME.path()); run.args(["project", "status"]) - .current_dir(WORKSPACE_ROOT.join("examples").join(project_path)); + .current_dir(self.get_full_project_path()); let stdout = run.output().unwrap().stdout; let stdout = String::from_utf8(stdout).unwrap(); @@ -358,39 +373,33 @@ impl Services { panic!("timed out while waiting for deployer to be ready"); } - pub fn run_client<'s, I, P>(&self, args: I, path: P) -> Child + pub fn run_client<'s, I>(&self, args: I) -> Child where - P: AsRef, I: IntoIterator, { let mut run = Command::new(WORKSPACE_ROOT.join("target/debug/cargo-shuttle")); if env::var("SHUTTLE_API_KEY").is_err() { - run.env("SHUTTLE_API_KEY", "test-key"); + run.env("SHUTTLE_API_KEY", "e2e-test-key"); } run.env("CARGO_HOME", CARGO_HOME.path()); - run.args(args).current_dir(path); + run.args(args).current_dir(self.get_full_project_path()); spawn_and_log(&mut run, &self.target, self.color) } - pub fn deploy(&self, project_path: &str) { - self.run_client( - ["project", "new"], - WORKSPACE_ROOT.join("examples").join(project_path), - ) - .wait() - .ensure_success("failed to run deploy"); - - self.wait_deployer_ready(project_path, Duration::from_secs(120)); - - self.run_client( - ["deploy", "--allow-dirty"], - WORKSPACE_ROOT.join("examples").join(project_path), - ) - .wait() - .ensure_success("failed to run deploy"); + /// Starts a project and deploys a service for the example in `self.example_path` + pub fn deploy(&self) { + self.run_client(["project", "new"]) + .wait() + .ensure_success("failed to run deploy"); + + self.wait_deployer_ready(Duration::from_secs(120)); + + self.run_client(["deploy", "--allow-dirty"]) + .wait() + .ensure_success("failed to run deploy"); } pub fn get(&self, sub_path: &str) -> RequestBuilder { @@ -401,4 +410,16 @@ impl Services { pub fn post(&self, sub_path: &str) -> RequestBuilder { reqwest::blocking::Client::new().post(format!("http://{}/{}", self.proxy_addr, sub_path)) } + + /// Gets the full path: the path within examples to a specific example appended to the workspace root + pub fn get_full_project_path(&self) -> PathBuf { + WORKSPACE_ROOT.join("examples").join(&self.example_path) + } +} + +impl Drop for Services { + fn drop(&mut self) { + // Initiate project destruction on test completion + _ = self.run_client(["project", "rm"]).wait(); + } } diff --git a/e2e/tests/integration/main.rs b/e2e/tests/integration/main.rs index 95e06707c..8640c4c64 100644 --- a/e2e/tests/integration/main.rs +++ b/e2e/tests/integration/main.rs @@ -1,5 +1,6 @@ pub mod helpers; +pub mod actix_web; pub mod axum; pub mod poem; pub mod rocket; diff --git a/e2e/tests/integration/poem.rs b/e2e/tests/integration/poem.rs index 9c521b387..7f5a566bb 100644 --- a/e2e/tests/integration/poem.rs +++ b/e2e/tests/integration/poem.rs @@ -4,8 +4,9 @@ use crate::helpers::{self, APPS_FQDN}; #[test] fn hello_world_poem() { - let client = helpers::Services::new_docker("hello-world (poem)", Color::Cyan); - client.deploy("poem/hello-world"); + let client = + helpers::Services::new_docker("hello-world (poem)", "poem/hello-world", Color::Cyan); + client.deploy(); let request_text = client .get("hello") @@ -20,8 +21,8 @@ fn hello_world_poem() { #[test] fn postgres_poem() { - let client = helpers::Services::new_docker("postgres (poem)", Color::Blue); - client.deploy("poem/postgres"); + let client = helpers::Services::new_docker("postgres (poem)", "poem/postgres", Color::Blue); + client.deploy(); let add_response = client .post("todo") @@ -48,8 +49,8 @@ fn postgres_poem() { #[test] fn mongodb_poem() { - let client = helpers::Services::new_docker("mongo (poem)", Color::Green); - client.deploy("poem/mongodb"); + let client = helpers::Services::new_docker("mongo (poem)", "poem/mongodb", Color::Green); + client.deploy(); // post todo and get its generated objectId let add_response = client diff --git a/e2e/tests/integration/rocket.rs b/e2e/tests/integration/rocket.rs index e0f67cc79..bbf4d0017 100644 --- a/e2e/tests/integration/rocket.rs +++ b/e2e/tests/integration/rocket.rs @@ -4,8 +4,12 @@ use crate::helpers::{self, APPS_FQDN}; #[test] fn hello_world_rocket() { - let client = helpers::Services::new_docker("hello-world (rocket)", Color::DarkMagenta); - client.deploy("rocket/hello-world"); + let client = helpers::Services::new_docker( + "hello-world (rocket)", + "rocket/hello-world", + Color::DarkMagenta, + ); + client.deploy(); let request_text = client .get("hello") @@ -20,8 +24,9 @@ fn hello_world_rocket() { #[test] fn postgres_rocket() { - let client = helpers::Services::new_docker("postgres (rocket)", Color::Magenta); - client.deploy("rocket/postgres"); + let client = + helpers::Services::new_docker("postgres (rocket)", "rocket/postgres", Color::Magenta); + client.deploy(); let add_response = client .post("todo") @@ -47,8 +52,15 @@ fn postgres_rocket() { #[test] fn secrets_rocket() { - let client = helpers::Services::new_docker("secrets (rocket)", Color::Red); - client.deploy("rocket/secrets"); + let client = helpers::Services::new_docker("secrets (rocket)", "rocket/secrets", Color::Red); + let project_path = client.get_full_project_path(); + std::fs::copy( + project_path.join("Secrets.toml.example"), + project_path.join("Secrets.toml"), + ) + .unwrap(); + + client.deploy(); let secret_response: String = client .get("secret") .header("Host", format!("secrets-rocket-app.{}", *APPS_FQDN)) diff --git a/e2e/tests/integration/salvo.rs b/e2e/tests/integration/salvo.rs index c0749a0e4..01daf0b41 100644 --- a/e2e/tests/integration/salvo.rs +++ b/e2e/tests/integration/salvo.rs @@ -4,8 +4,9 @@ use crate::helpers::{self, APPS_FQDN}; #[test] fn hello_world_salvo() { - let client = helpers::Services::new_docker("hello-world (salvo)", Color::DarkRed); - client.deploy("salvo/hello-world"); + let client = + helpers::Services::new_docker("hello-world (salvo)", "salvo/hello-world", Color::DarkRed); + client.deploy(); let request_text = client .get("hello") diff --git a/e2e/tests/integration/thruster.rs b/e2e/tests/integration/thruster.rs index eabc89ab4..ad6a20919 100644 --- a/e2e/tests/integration/thruster.rs +++ b/e2e/tests/integration/thruster.rs @@ -4,8 +4,12 @@ use crate::helpers::{self, APPS_FQDN}; #[test] fn hello_world_thruster() { - let client = helpers::Services::new_docker("hello-world (thruster)", Color::DarkYellow); - client.deploy("thruster/hello-world"); + let client = helpers::Services::new_docker( + "hello-world (thruster)", + "thruster/hello-world", + Color::DarkYellow, + ); + client.deploy(); let request_text = client .get("hello") diff --git a/e2e/tests/integration/tide.rs b/e2e/tests/integration/tide.rs index 270d5c465..af4d487fe 100644 --- a/e2e/tests/integration/tide.rs +++ b/e2e/tests/integration/tide.rs @@ -4,8 +4,9 @@ use crate::helpers::{self, APPS_FQDN}; #[test] fn hello_world_tide() { - let client = helpers::Services::new_docker("hello-world (tide)", Color::DarkYellow); - client.deploy("tide/hello-world"); + let client = + helpers::Services::new_docker("hello-world (tide)", "tide/hello-world", Color::DarkYellow); + client.deploy(); let request_text = client .get("hello") diff --git a/e2e/tests/integration/tower.rs b/e2e/tests/integration/tower.rs index 57945a99a..5592332aa 100644 --- a/e2e/tests/integration/tower.rs +++ b/e2e/tests/integration/tower.rs @@ -4,8 +4,12 @@ use crate::helpers::{self, APPS_FQDN}; #[test] fn hello_world_tower() { - let client = helpers::Services::new_docker("hello-world (tower)", Color::DarkYellow); - client.deploy("tower/hello-world"); + let client = helpers::Services::new_docker( + "hello-world (tower)", + "tower/hello-world", + Color::DarkYellow, + ); + client.deploy(); let request_text = client .get("hello") diff --git a/e2e/tests/integration/warp.rs b/e2e/tests/integration/warp.rs index 36aef9535..802fa2733 100644 --- a/e2e/tests/integration/warp.rs +++ b/e2e/tests/integration/warp.rs @@ -1,11 +1,12 @@ -use colored::Color; +use crossterm::style::Color; use crate::helpers::{self, APPS_FQDN}; #[test] fn hello_world_warp() { - let client = helpers::Services::new_docker("hello-world (warp)", Color::Cyan); - client.deploy("warp/hello-world"); + let client = + helpers::Services::new_docker("hello-world (warp)", "warp/hello-world", Color::Cyan); + client.deploy(); let request_text = client .get("hello") @@ -15,5 +16,5 @@ fn hello_world_warp() { .text() .unwrap(); - assert_eq!(request_text, "Hello, world!"); + assert_eq!(request_text, "Hello, World!"); } diff --git a/examples b/examples new file mode 160000 index 000000000..8ada30396 --- /dev/null +++ b/examples @@ -0,0 +1 @@ +Subproject commit 8ada303965ff7ac14873358243bfa071ed11b7e8 diff --git a/examples/README.md b/examples/README.md deleted file mode 100644 index 56e8b165f..000000000 --- a/examples/README.md +++ /dev/null @@ -1,25 +0,0 @@ -# Examples - -Some example apps to show what you can do with shuttle. - -## How to deploy the examples - -To deploy the examples, check out the repository locally - -```bash -$ git clone https://github.com/shuttle-hq/shuttle.git -``` - -navigate to an example root folder - -```bash -$ cd examples/axum/hello-world -``` - -Pick a project name that is something unique - in shuttle, -projects are globally unique. Then run - -```bash -$ cargo shuttle project new --name=$PROJECT_NAME -$ cargo shuttle deploy --name=$PROJECT_NAME -``` diff --git a/examples/axum/hello-world/Cargo.toml b/examples/axum/hello-world/Cargo.toml deleted file mode 100644 index 2ed0f8c58..000000000 --- a/examples/axum/hello-world/Cargo.toml +++ /dev/null @@ -1,11 +0,0 @@ -[package] -name = "hello-world" -version = "0.1.0" -edition = "2021" - -[lib] - -[dependencies] -axum = "0.5" -shuttle-service = { version = "0.7.0", features = ["web-axum"] } -sync_wrapper = "0.1" diff --git a/examples/axum/hello-world/Shuttle.toml b/examples/axum/hello-world/Shuttle.toml deleted file mode 100644 index db50aed05..000000000 --- a/examples/axum/hello-world/Shuttle.toml +++ /dev/null @@ -1 +0,0 @@ -name = "hello-world-axum-app" \ No newline at end of file diff --git a/examples/axum/hello-world/src/lib.rs b/examples/axum/hello-world/src/lib.rs deleted file mode 100644 index 58137a394..000000000 --- a/examples/axum/hello-world/src/lib.rs +++ /dev/null @@ -1,14 +0,0 @@ -use axum::{routing::get, Router}; -use sync_wrapper::SyncWrapper; - -async fn hello_world() -> &'static str { - "Hello, world!" -} - -#[shuttle_service::main] -async fn axum() -> shuttle_service::ShuttleAxum { - let router = Router::new().route("/hello", get(hello_world)); - let sync_wrapper = SyncWrapper::new(router); - - Ok(sync_wrapper) -} diff --git a/examples/axum/websocket/Cargo.toml b/examples/axum/websocket/Cargo.toml deleted file mode 100644 index be6492154..000000000 --- a/examples/axum/websocket/Cargo.toml +++ /dev/null @@ -1,18 +0,0 @@ -[package] -name = "websocket" -version = "0.1.0" -edition = "2021" - -[lib] - -[dependencies] -axum = { version = "0.5", features = ["ws"] } -chrono = { version = "0.4", features = ["serde"] } -futures = "0.3" -hyper = { version = "0.14", features = ["client", "http2"] } -hyper-tls = "0.5" -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" -shuttle-service = { version = "0.7.0", features = ["web-axum"] } -sync_wrapper = "0.1" -tokio = { version = "1", features = ["full"] } diff --git a/examples/axum/websocket/Shuttle.toml b/examples/axum/websocket/Shuttle.toml deleted file mode 100644 index a98513a3a..000000000 --- a/examples/axum/websocket/Shuttle.toml +++ /dev/null @@ -1 +0,0 @@ -name = "websocket-axum-app" diff --git a/examples/axum/websocket/index.html b/examples/axum/websocket/index.html deleted file mode 100644 index f7fbdf5ca..000000000 --- a/examples/axum/websocket/index.html +++ /dev/null @@ -1,75 +0,0 @@ - - - - - - - Websocket status page - - - -
-
- Current API status -
-
-
-
-
- Last check time -
-
-
-
-
- Clients watching -
-
-
- - - - - - diff --git a/examples/axum/websocket/src/lib.rs b/examples/axum/websocket/src/lib.rs deleted file mode 100644 index 2d7ad4a58..000000000 --- a/examples/axum/websocket/src/lib.rs +++ /dev/null @@ -1,132 +0,0 @@ -use std::{sync::Arc, time::Duration}; - -use axum::{ - extract::{ - ws::{Message, WebSocket}, - WebSocketUpgrade, - }, - response::{Html, IntoResponse}, - routing::get, - Extension, Router, -}; -use chrono::{DateTime, Utc}; -use futures::{SinkExt, StreamExt}; -use hyper::{Client, Uri}; -use hyper_tls::HttpsConnector; -use serde::Serialize; -use shuttle_service::ShuttleAxum; -use sync_wrapper::SyncWrapper; -use tokio::{ - sync::{watch, Mutex}, - time::sleep, -}; - -struct State { - clients_count: usize, - rx: watch::Receiver, -} - -const PAUSE_SECS: u64 = 15; -const STATUS_URI: &str = "https://api.shuttle.rs/status"; - -#[derive(Serialize)] -struct Response { - clients_count: usize, - datetime: DateTime, - is_up: bool, -} - -#[shuttle_service::main] -async fn main() -> ShuttleAxum { - let (tx, rx) = watch::channel(Message::Text("{}".to_string())); - - let state = Arc::new(Mutex::new(State { - clients_count: 0, - rx, - })); - - // Spawn a thread to continually check the status of the api - let state_send = state.clone(); - tokio::spawn(async move { - let duration = Duration::from_secs(PAUSE_SECS); - let https = HttpsConnector::new(); - let client = Client::builder().build::<_, hyper::Body>(https); - let uri: Uri = STATUS_URI.parse().unwrap(); - - loop { - let is_up = client.get(uri.clone()).await; - let is_up = is_up.is_ok(); - - let response = Response { - clients_count: state_send.lock().await.clients_count, - datetime: Utc::now(), - is_up, - }; - let msg = serde_json::to_string(&response).unwrap(); - - if tx.send(Message::Text(msg)).is_err() { - break; - } - - sleep(duration).await; - } - }); - - let router = Router::new() - .route("/", get(index)) - .route("/websocket", get(websocket_handler)) - .layer(Extension(state)); - - let sync_wrapper = SyncWrapper::new(router); - - Ok(sync_wrapper) -} - -async fn websocket_handler( - ws: WebSocketUpgrade, - Extension(state): Extension>>, -) -> impl IntoResponse { - ws.on_upgrade(|socket| websocket(socket, state)) -} - -async fn websocket(stream: WebSocket, state: Arc>) { - // By splitting we can send and receive at the same time. - let (mut sender, mut receiver) = stream.split(); - - let mut rx = { - let mut state = state.lock().await; - state.clients_count += 1; - state.rx.clone() - }; - - // This task will receive watch messages and forward it to this connected client. - let mut send_task = tokio::spawn(async move { - while let Ok(()) = rx.changed().await { - let msg = rx.borrow().clone(); - - if sender.send(msg).await.is_err() { - break; - } - } - }); - - // This task will receive messages from this client. - let mut recv_task = tokio::spawn(async move { - while let Some(Ok(Message::Text(text))) = receiver.next().await { - println!("this example does not read any messages, but got: {text}"); - } - }); - - // If any one of the tasks exit, abort the other. - tokio::select! { - _ = (&mut send_task) => recv_task.abort(), - _ = (&mut recv_task) => send_task.abort(), - }; - - // This client disconnected - state.lock().await.clients_count -= 1; -} - -async fn index() -> Html<&'static str> { - Html(include_str!("../index.html")) -} diff --git a/examples/poem/hello-world/Cargo.toml b/examples/poem/hello-world/Cargo.toml deleted file mode 100644 index 74a34fded..000000000 --- a/examples/poem/hello-world/Cargo.toml +++ /dev/null @@ -1,10 +0,0 @@ -[package] -name = "hello-world" -version = "0.1.0" -edition = "2021" - -[lib] - -[dependencies] -poem = "1.3.35" -shuttle-service = { version = "0.7.0", features = ["web-poem"] } diff --git a/examples/poem/hello-world/Shuttle.toml b/examples/poem/hello-world/Shuttle.toml deleted file mode 100644 index 83fe477b1..000000000 --- a/examples/poem/hello-world/Shuttle.toml +++ /dev/null @@ -1 +0,0 @@ -name = "hello-world-poem-app" diff --git a/examples/poem/hello-world/src/lib.rs b/examples/poem/hello-world/src/lib.rs deleted file mode 100644 index f7630400d..000000000 --- a/examples/poem/hello-world/src/lib.rs +++ /dev/null @@ -1,13 +0,0 @@ -use poem::{get, handler, Route}; - -#[handler] -fn hello_world() -> &'static str { - "Hello, world!" -} - -#[shuttle_service::main] -async fn main() -> shuttle_service::ShuttlePoem { - let app = Route::new().at("/hello", get(hello_world)); - - Ok(app) -} diff --git a/examples/poem/mongodb/Cargo.toml b/examples/poem/mongodb/Cargo.toml deleted file mode 100644 index 41f6389b2..000000000 --- a/examples/poem/mongodb/Cargo.toml +++ /dev/null @@ -1,12 +0,0 @@ -[package] -name = "mongodb-poem-app" -version = "0.1.0" -edition = "2021" - -[dependencies] -mongodb = "2.3.0" -poem = "1.3.35" -serde = { version = "1", features = ["derive"] } -serde_json = "1" -shuttle-service = { version = "0.7.0", features = ["web-poem"] } -shuttle-shared-db = { version = "0.7.0", features = ["mongodb"] } diff --git a/examples/poem/mongodb/Shuttle.toml b/examples/poem/mongodb/Shuttle.toml deleted file mode 100644 index d1a1e3a7f..000000000 --- a/examples/poem/mongodb/Shuttle.toml +++ /dev/null @@ -1 +0,0 @@ -name = "mongodb-poem-app" diff --git a/examples/poem/mongodb/src/lib.rs b/examples/poem/mongodb/src/lib.rs deleted file mode 100644 index a247a9488..000000000 --- a/examples/poem/mongodb/src/lib.rs +++ /dev/null @@ -1,73 +0,0 @@ -use mongodb::bson::doc; -use mongodb::bson::oid::ObjectId; -use mongodb::{Collection, Database}; -use poem::{ - error::{BadRequest, NotFoundError}, - get, handler, - middleware::AddData, - post, - web::{Data, Json}, - EndpointExt, FromRequest, Request, RequestBody, Result, Route, -}; -use serde::{Deserialize, Serialize}; - -struct ObjectIdGuard(ObjectId); - -#[poem::async_trait] -impl<'a> FromRequest<'a> for ObjectIdGuard { - async fn from_request(req: &'a Request, _body: &mut RequestBody) -> Result { - let id = req.path_params::()?; - let obj_id = ObjectId::parse_str(id).map_err(BadRequest)?; - Ok(ObjectIdGuard(obj_id)) - } -} - -#[handler] -async fn retrieve( - ObjectIdGuard(id): ObjectIdGuard, - collection: Data<&Collection>, -) -> Result> { - let filter = doc! {"_id": id}; - let todo = collection - .find_one(filter, None) - .await - .map_err(BadRequest)?; - - match todo { - Some(todo) => Ok(Json(serde_json::json!(todo))), - None => Err(NotFoundError.into()), - } -} - -#[handler] -async fn add(Json(todo): Json, collection: Data<&Collection>) -> Result { - let todo_id = collection - .insert_one(todo, None) - .await - .map_err(BadRequest)?; - - Ok(todo_id - .inserted_id - .as_object_id() - .expect("id is objectId") - .to_string()) -} - -#[shuttle_service::main] -async fn main( - #[shuttle_shared_db::MongoDb] db: Database, -) -> shuttle_service::ShuttlePoem { - let collection = db.collection::("todos"); - - let app = Route::new() - .at("/todo", post(add)) - .at("/todo/:id", get(retrieve)) - .with(AddData::new(collection)); - - Ok(app) -} - -#[derive(Debug, Serialize, Deserialize)] -struct Todo { - pub note: String, -} diff --git a/examples/poem/postgres/Cargo.toml b/examples/poem/postgres/Cargo.toml deleted file mode 100644 index 5eb3e6b6e..000000000 --- a/examples/poem/postgres/Cargo.toml +++ /dev/null @@ -1,13 +0,0 @@ -[package] -name = "postgres" -version = "0.1.0" -edition = "2021" - -[lib] - -[dependencies] -poem = "1.3.35" -serde = "1.0" -shuttle-service = { version = "0.7.0", features = ["web-poem"] } -shuttle-shared-db = { version = "0.7.0", features = ["postgres"] } -sqlx = { version = "0.6", features = ["runtime-tokio-native-tls", "postgres"] } diff --git a/examples/poem/postgres/Shuttle.toml b/examples/poem/postgres/Shuttle.toml deleted file mode 100644 index 33ccd0784..000000000 --- a/examples/poem/postgres/Shuttle.toml +++ /dev/null @@ -1 +0,0 @@ -name = "postgres-poem-app" diff --git a/examples/poem/postgres/schema.sql b/examples/poem/postgres/schema.sql deleted file mode 100644 index 460e7c23d..000000000 --- a/examples/poem/postgres/schema.sql +++ /dev/null @@ -1,6 +0,0 @@ -DROP TABLE IF EXISTS todos; - -CREATE TABLE todos ( - id serial PRIMARY KEY, - note TEXT NOT NULL -); diff --git a/examples/poem/postgres/src/lib.rs b/examples/poem/postgres/src/lib.rs deleted file mode 100644 index d25787d98..000000000 --- a/examples/poem/postgres/src/lib.rs +++ /dev/null @@ -1,60 +0,0 @@ -use poem::{ - error::BadRequest, - get, handler, - middleware::AddData, - post, - web::{Data, Json, Path}, - EndpointExt, Result, Route, -}; -use serde::{Deserialize, Serialize}; -use shuttle_service::error::CustomError; -use sqlx::{Executor, FromRow, PgPool}; - -#[handler] -async fn retrieve(Path(id): Path, state: Data<&PgPool>) -> Result> { - let todo = sqlx::query_as("SELECT * FROM todos WHERE id = $1") - .bind(id) - .fetch_one(state.0) - .await - .map_err(BadRequest)?; - - Ok(Json(todo)) -} - -#[handler] -async fn add(Json(data): Json, state: Data<&PgPool>) -> Result> { - let todo = sqlx::query_as("INSERT INTO todos(note) VALUES ($1) RETURNING id, note") - .bind(&data.note) - .fetch_one(state.0) - .await - .map_err(BadRequest)?; - - Ok(Json(todo)) -} - -#[shuttle_service::main] -async fn main( - #[shuttle_shared_db::Postgres] pool: PgPool, -) -> shuttle_service::ShuttlePoem { - pool.execute(include_str!("../schema.sql")) - .await - .map_err(CustomError::new)?; - - let app = Route::new() - .at("/todo", post(add)) - .at("/todo/:id", get(retrieve)) - .with(AddData::new(pool)); - - Ok(app) -} - -#[derive(Deserialize)] -struct TodoNew { - pub note: String, -} - -#[derive(Serialize, FromRow)] -struct Todo { - pub id: i32, - pub note: String, -} diff --git a/examples/rocket/authentication/Cargo.toml b/examples/rocket/authentication/Cargo.toml deleted file mode 100644 index 1b7fb7a03..000000000 --- a/examples/rocket/authentication/Cargo.toml +++ /dev/null @@ -1,14 +0,0 @@ -[package] -name = "authentication" -version = "0.1.0" -edition = "2021" - -[lib] - -[dependencies] -chrono = "0.4" -jsonwebtoken = { version = "8", default-features = false } -lazy_static = "1.4" -rocket = { version = "0.5.0-rc.2", features = ["json"] } -serde = { version = "1.0", features = ["derive"] } -shuttle-service = { version = "0.7.0", features = ["web-rocket"] } diff --git a/examples/rocket/authentication/README.md b/examples/rocket/authentication/README.md deleted file mode 100644 index b4f4f43f5..000000000 --- a/examples/rocket/authentication/README.md +++ /dev/null @@ -1,60 +0,0 @@ -# Issue and verify JWT for authentication -This example shows how to use [Rocket request guards](https://rocket.rs/v0.5-rc/guide/requests/#request-guards) for authentication with [JSON Web Tokens](https://jwt.io/) (JWT for short). -The idea is that all requests authenticate first at https://authentication-rocket-app.shuttleapp.rs/login to get a JWT. -Then the JWT is sent with all requests requiring authentication using the HTTP header `Authorization: Bearer `. - -This example uses the [`jsonwebtoken`](https://github.com/Keats/jsonwebtoken) which supports symmetric and asymmetric secret encoding, built-in validations, and most JWT algorithms. -However, this example only makes use of symmetric encoding and validation on the expiration claim. - -## Structure -This example has two files to register routes and handle JWT claims. - -### src/main.rs -Three Rocker routes are registered in this file: -1. `/public`: a route that can be called without needing any authentication. -1. `/login`: a route for posting a JSON object with a username and password to get a JWT. -1. `/private`: a route that can only be accessed with a valid JWT. - -### src/claims.rs -The bulk of this example is in this file. Most of the code can be transferred to other frameworks except for the `FromRequest` implementation, which is Rocket specific. -This file contains a `Claims` object which can be expanded with more claims. A `Claims` can be created from a `Bearer ` string using `Claims::from_authorization()`. -And a `Claims` object can also be converted to a token using `to_token()`. - -## Deploy -After logging into shuttle, use the following command to deploy this example: - -```sh -$ cargo shuttle project new -$ cargo shuttle deploy -``` - -Now make a note of the `Host` for the deploy to use in the examples below. Or just use `authentication-rocket-app.shuttleapp.rs` as the host below. - -### Seeing it in action -First, we should be able to access the public endpoint without any authentication using: - -```sh -$ curl https:///public -``` - -But trying to access the private endpoint will fail with a 403 forbidden: - -```sh -$ curl https:///private -``` - -So let's get a JWT from the login route first: - - -```sh -$ curl --request POST --data '{"username": "username", "password": "password"}' https:///login -``` - -Accessing the private endpoint with the token will now succeed: - -```sh -$ curl --header "Authorization: Bearer " https:///private -``` - -The token is set to expire in 5 minutus, so wait a while and try to access the private endpoint again. Once the token has expired, a user will need to get a new token from login. -Since tokens usually have a longer than 5 minutes expiration time, we can create a `/refresh` endpoint that takes an active token and returns a new token with a refreshed expiration time. diff --git a/examples/rocket/authentication/Shuttle.toml b/examples/rocket/authentication/Shuttle.toml deleted file mode 100644 index 716d4370a..000000000 --- a/examples/rocket/authentication/Shuttle.toml +++ /dev/null @@ -1 +0,0 @@ -name = "authentication-rocket-app" diff --git a/examples/rocket/authentication/src/claims.rs b/examples/rocket/authentication/src/claims.rs deleted file mode 100644 index bce84a3fd..000000000 --- a/examples/rocket/authentication/src/claims.rs +++ /dev/null @@ -1,135 +0,0 @@ -use chrono::{Duration, Utc}; -use jsonwebtoken::{ - decode, encode, errors::ErrorKind, DecodingKey, EncodingKey, Header, Validation, -}; -use lazy_static::lazy_static; -use rocket::{ - http::Status, - request::{FromRequest, Outcome}, - response::status::Custom, -}; -use serde::{Deserialize, Serialize}; - -const BEARER: &str = "Bearer "; -const AUTHORIZATION: &str = "Authorization"; - -/// Key used for symmetric token encoding -const SECRET: &str = "secret"; - -lazy_static! { - /// Time before token expires (aka exp claim) - static ref TOKEN_EXPIRATION: Duration = Duration::minutes(5); -} - -// Used when decoding a token to `Claims` -#[derive(Debug, PartialEq)] -pub(crate) enum AuthenticationError { - Missing, - Decoding(String), - Expired, -} - -// Basic claim object. Only the `exp` claim (field) is required. Consult the `jsonwebtoken` documentation for other claims that can be validated. -// The `name` is a custom claim for this API -#[derive(Serialize, Deserialize, Debug)] -pub(crate) struct Claims { - pub(crate) name: String, - exp: usize, -} - -// Rocket specific request guard implementation -#[rocket::async_trait] -impl<'r> FromRequest<'r> for Claims { - type Error = AuthenticationError; - - async fn from_request(request: &'r rocket::Request<'_>) -> Outcome { - match request.headers().get_one(AUTHORIZATION) { - None => Outcome::Failure((Status::Forbidden, AuthenticationError::Missing)), - Some(value) => match Claims::from_authorization(value) { - Err(e) => Outcome::Failure((Status::Forbidden, e)), - Ok(claims) => Outcome::Success(claims), - }, - } - } -} - -impl Claims { - pub(crate) fn from_name(name: &str) -> Self { - Self { - name: name.to_string(), - exp: 0, - } - } - - /// Create a `Claims` from a 'Bearer ' value - fn from_authorization(value: &str) -> Result { - let token = value.strip_prefix(BEARER).map(str::trim); - - if token.is_none() { - return Err(AuthenticationError::Missing); - } - - // Safe to unwrap as we just confirmed it is not none - let token = token.unwrap(); - - // Use `jsonwebtoken` to get the claims from a JWT - // Consult the `jsonwebtoken` documentation for using other algorithms and validations (the default validation just checks the expiration claim) - let token = decode::( - token, - &DecodingKey::from_secret(SECRET.as_ref()), - &Validation::default(), - ) - .map_err(|e| match e.kind() { - ErrorKind::ExpiredSignature => AuthenticationError::Expired, - _ => AuthenticationError::Decoding(e.to_string()), - })?; - - Ok(token.claims) - } - - /// Converts this claims into a token string - pub(crate) fn into_token(mut self) -> Result> { - let expiration = Utc::now() - .checked_add_signed(*TOKEN_EXPIRATION) - .expect("failed to create an expiration time") - .timestamp(); - - self.exp = expiration as usize; - - // Construct and return JWT using `jsonwebtoken` - // Consult the `jsonwebtoken` documentation for using other algorithms and asymmetric keys - let token = encode( - &Header::default(), - &self, - &EncodingKey::from_secret(SECRET.as_ref()), - ) - .map_err(|e| Custom(Status::BadRequest, e.to_string()))?; - - Ok(token) - } -} - -#[cfg(test)] -mod tests { - use crate::claims::AuthenticationError; - - use super::Claims; - - #[test] - fn missing_bearer() { - let claim_err = Claims::from_authorization("no-Bearer-prefix").unwrap_err(); - - assert_eq!(claim_err, AuthenticationError::Missing); - } - - #[test] - fn to_token_and_back() { - let claim = Claims::from_name("test runner"); - let token = claim.into_token().unwrap(); - let token = format!("Bearer {token}"); - - let claim = Claims::from_authorization(&token).unwrap(); - - assert_eq!(claim.name, "test runner"); - } -} diff --git a/examples/rocket/authentication/src/lib.rs b/examples/rocket/authentication/src/lib.rs deleted file mode 100644 index 5fb1b103e..000000000 --- a/examples/rocket/authentication/src/lib.rs +++ /dev/null @@ -1,77 +0,0 @@ -use rocket::http::Status; -use rocket::response::status::Custom; -use rocket::serde::json::Json; -use serde::{Deserialize, Serialize}; - -mod claims; - -use claims::Claims; - -#[macro_use] -extern crate rocket; - -#[derive(Serialize)] -struct PublicResponse { - message: String, -} - -#[get("/public")] -fn public() -> Json { - Json(PublicResponse { - message: "This endpoint is open to anyone".to_string(), - }) -} - -#[derive(Serialize)] -struct PrivateResponse { - message: String, - user: String, -} - -// More details on Rocket request guards can be found here -// https://rocket.rs/v0.5-rc/guide/requests/#request-guards -#[get("/private")] -fn private(user: Claims) -> Json { - Json(PrivateResponse { - message: "The `Claims` request guard ensures only valid JWTs can access this endpoint" - .to_string(), - user: user.name, - }) -} - -#[derive(Deserialize)] -struct LoginRequest { - username: String, - password: String, -} - -#[derive(Serialize)] -struct LoginResponse { - token: String, -} - -/// Tries to authenticate a user. Successful authentications get a JWT -#[post("/login", data = "")] -fn login(login: Json) -> Result, Custom> { - // This should be real user validation code, but is left simple for this example - if login.username != "username" || login.password != "password" { - return Err(Custom( - Status::Unauthorized, - "account was not found".to_string(), - )); - } - - let claim = Claims::from_name(&login.username); - let response = LoginResponse { - token: claim.into_token()?, - }; - - Ok(Json(response)) -} - -#[shuttle_service::main] -async fn rocket() -> shuttle_service::ShuttleRocket { - let rocket = rocket::build().mount("/", routes![public, private, login]); - - Ok(rocket) -} diff --git a/examples/rocket/hello-world/Cargo.toml b/examples/rocket/hello-world/Cargo.toml deleted file mode 100644 index 0c179d821..000000000 --- a/examples/rocket/hello-world/Cargo.toml +++ /dev/null @@ -1,10 +0,0 @@ -[package] -name = "hello-world" -version = "0.1.0" -edition = "2021" - -[lib] - -[dependencies] -rocket = "0.5.0-rc.2" -shuttle-service = { version = "0.7.0", features = ["web-rocket"] } diff --git a/examples/rocket/hello-world/Shuttle.toml b/examples/rocket/hello-world/Shuttle.toml deleted file mode 100644 index 6d22450a7..000000000 --- a/examples/rocket/hello-world/Shuttle.toml +++ /dev/null @@ -1 +0,0 @@ -name = "hello-world-rocket-app" diff --git a/examples/rocket/hello-world/src/lib.rs b/examples/rocket/hello-world/src/lib.rs deleted file mode 100644 index cb788deaf..000000000 --- a/examples/rocket/hello-world/src/lib.rs +++ /dev/null @@ -1,14 +0,0 @@ -#[macro_use] -extern crate rocket; - -#[get("/")] -fn index() -> &'static str { - "Hello, world!" -} - -#[shuttle_service::main] -async fn rocket() -> shuttle_service::ShuttleRocket { - let rocket = rocket::build().mount("/hello", routes![index]); - - Ok(rocket) -} diff --git a/examples/rocket/persist/Cargo.toml b/examples/rocket/persist/Cargo.toml deleted file mode 100644 index 1d893c6b6..000000000 --- a/examples/rocket/persist/Cargo.toml +++ /dev/null @@ -1,12 +0,0 @@ -[package] -name = "persist" -version = "0.1.0" -edition = "2021" - -[lib] - -[dependencies] -rocket = { version = "0.5.0-rc.1", features = ["json"] } -serde = { version = "1.0", features = ["derive"] } -shuttle-persist = "0.7.0" -shuttle-service = { version = "0.7.0", features = ["web-rocket"] } diff --git a/examples/rocket/persist/README.md b/examples/rocket/persist/README.md deleted file mode 100644 index 9a409c2ae..000000000 --- a/examples/rocket/persist/README.md +++ /dev/null @@ -1,36 +0,0 @@ -# Persist Example - -An example app to show what you can do with shuttle. - -## How to deploy the example - -To deploy the examples, check out the repository locally - -```bash -$ git clone https://github.com/shuttle-hq/shuttle.git -``` - -navigate to the Persist root folder - -```bash -$ cd examples/rocket/persist -``` - -Pick a project name that is something unique - in shuttle, -projects are globally unique. Then run - -```bash -$ cargo shuttle project new --name=$PROJECT_NAME -$ cargo shuttle deploy --name=$PROJECT_NAME -``` - -Once deployed you can post to the endpoint the following values: -```bash -curl -X POST -H "Content-Type: application/json" -d '{"date":"2020-12-22", "temp_high":5, "temp_low":5, "precipitation": 5}' {$PROJECT_NAME}.shuttleapp.rs -``` - -The json data will then persist within Shuttle it can be queried with the following curl request - -```bash -curl {$PROJECT_NAME}.shuttleapp.rs/2020-12-22 -``` diff --git a/examples/rocket/persist/Shuttle.toml b/examples/rocket/persist/Shuttle.toml deleted file mode 100644 index 8ab3c88f5..000000000 --- a/examples/rocket/persist/Shuttle.toml +++ /dev/null @@ -1 +0,0 @@ -name = "persist-rocket-app" diff --git a/examples/rocket/persist/src/lib.rs b/examples/rocket/persist/src/lib.rs deleted file mode 100644 index f85bcd42f..000000000 --- a/examples/rocket/persist/src/lib.rs +++ /dev/null @@ -1,63 +0,0 @@ -#[macro_use] -extern crate rocket; - -use rocket::response::status::BadRequest; -use rocket::serde::json::Json; -use rocket::State; -use serde::{Deserialize, Serialize}; - -use shuttle_persist::PersistInstance; - -#[derive(Serialize, Deserialize, Clone)] -struct Weather { - date: String, - temp_high: f32, - temp_low: f32, - precipitation: f32, -} - -struct MyState { - persist: PersistInstance, -} - -#[post("/", data = "")] -async fn add( - data: Json, - state: &State, -) -> Result, BadRequest> { - // Change data Json to Weather - let weather: Weather = data.into_inner(); - - let _state = state - .persist - .save::( - format!("weather_{}", &weather.date.as_str()).as_str(), - weather.clone(), - ) - .map_err(|e| BadRequest(Some(e.to_string())))?; - Ok(Json(weather)) -} - -#[get("/")] -async fn retrieve( - date: String, - state: &State, -) -> Result, BadRequest> { - let weather = state - .persist - .load::(format!("weather_{}", &date).as_str()) - .map_err(|e| BadRequest(Some(e.to_string())))?; - Ok(Json(weather)) -} - -#[shuttle_service::main] -async fn rocket( - #[shuttle_persist::Persist] persist: PersistInstance, -) -> shuttle_service::ShuttleRocket { - let state = MyState { persist }; - let rocket = rocket::build() - .mount("/", routes![retrieve, add]) - .manage(state); - - Ok(rocket) -} diff --git a/examples/rocket/postgres/Cargo.toml b/examples/rocket/postgres/Cargo.toml deleted file mode 100644 index aca7ced72..000000000 --- a/examples/rocket/postgres/Cargo.toml +++ /dev/null @@ -1,13 +0,0 @@ -[package] -name = "postgres" -version = "0.1.0" -edition = "2021" - -[lib] - -[dependencies] -rocket = { version = "0.5.0-rc.1", features = ["json"] } -serde = "1.0" -shuttle-service = { version = "0.7.0", features = ["web-rocket"] } -shuttle-shared-db = { version = "0.7.0", features = ["postgres"] } -sqlx = { version = "0.6", features = ["runtime-tokio-native-tls", "postgres"] } diff --git a/examples/rocket/postgres/Shuttle.toml b/examples/rocket/postgres/Shuttle.toml deleted file mode 100644 index 539d434b3..000000000 --- a/examples/rocket/postgres/Shuttle.toml +++ /dev/null @@ -1 +0,0 @@ -name = "postgres-rocket-app" diff --git a/examples/rocket/postgres/schema.sql b/examples/rocket/postgres/schema.sql deleted file mode 100644 index 460e7c23d..000000000 --- a/examples/rocket/postgres/schema.sql +++ /dev/null @@ -1,6 +0,0 @@ -DROP TABLE IF EXISTS todos; - -CREATE TABLE todos ( - id serial PRIMARY KEY, - note TEXT NOT NULL -); diff --git a/examples/rocket/postgres/src/lib.rs b/examples/rocket/postgres/src/lib.rs deleted file mode 100644 index dd036fe22..000000000 --- a/examples/rocket/postgres/src/lib.rs +++ /dev/null @@ -1,63 +0,0 @@ -#[macro_use] -extern crate rocket; - -use rocket::response::status::BadRequest; -use rocket::serde::json::Json; -use rocket::State; -use serde::{Deserialize, Serialize}; -use shuttle_service::error::CustomError; -use sqlx::{Executor, FromRow, PgPool}; - -#[get("/")] -async fn retrieve(id: i32, state: &State) -> Result, BadRequest> { - let todo = sqlx::query_as("SELECT * FROM todos WHERE id = $1") - .bind(id) - .fetch_one(&state.pool) - .await - .map_err(|e| BadRequest(Some(e.to_string())))?; - - Ok(Json(todo)) -} - -#[post("/", data = "")] -async fn add( - data: Json, - state: &State, -) -> Result, BadRequest> { - let todo = sqlx::query_as("INSERT INTO todos(note) VALUES ($1) RETURNING id, note") - .bind(&data.note) - .fetch_one(&state.pool) - .await - .map_err(|e| BadRequest(Some(e.to_string())))?; - - Ok(Json(todo)) -} - -struct MyState { - pool: PgPool, -} - -#[shuttle_service::main] -async fn rocket(#[shuttle_shared_db::Postgres] pool: PgPool) -> shuttle_service::ShuttleRocket { - pool.execute(include_str!("../schema.sql")) - .await - .map_err(CustomError::new)?; - - let state = MyState { pool }; - let rocket = rocket::build() - .mount("/todo", routes![retrieve, add]) - .manage(state); - - Ok(rocket) -} - -#[derive(Deserialize)] -struct TodoNew { - pub note: String, -} - -#[derive(Serialize, FromRow)] -struct Todo { - pub id: i32, - pub note: String, -} diff --git a/examples/rocket/secrets/Cargo.toml b/examples/rocket/secrets/Cargo.toml deleted file mode 100644 index 9526d6a60..000000000 --- a/examples/rocket/secrets/Cargo.toml +++ /dev/null @@ -1,12 +0,0 @@ -[package] -name = "secrets" -version = "0.1.0" -edition = "2021" - -[lib] - -[dependencies] -anyhow = "1.0.62" -rocket = { version = "0.5.0-rc.1", features = ["json"] } -shuttle-secrets = "0.7.0" -shuttle-service = { version = "0.7.0", features = ["web-rocket"] } diff --git a/examples/rocket/secrets/Secrets.toml b/examples/rocket/secrets/Secrets.toml deleted file mode 100644 index ceedf199e..000000000 --- a/examples/rocket/secrets/Secrets.toml +++ /dev/null @@ -1 +0,0 @@ -MY_API_KEY = 'the contents of my API key' diff --git a/examples/rocket/secrets/Shuttle.toml b/examples/rocket/secrets/Shuttle.toml deleted file mode 100644 index 7febf30c9..000000000 --- a/examples/rocket/secrets/Shuttle.toml +++ /dev/null @@ -1 +0,0 @@ -name = "secrets-rocket-app" diff --git a/examples/rocket/secrets/src/lib.rs b/examples/rocket/secrets/src/lib.rs deleted file mode 100644 index 65755319f..000000000 --- a/examples/rocket/secrets/src/lib.rs +++ /dev/null @@ -1,33 +0,0 @@ -#[macro_use] -extern crate rocket; - -use anyhow::anyhow; -use rocket::response::status::BadRequest; -use rocket::State; -use shuttle_secrets::SecretStore; - -#[get("/secret")] -async fn secret(state: &State) -> Result> { - Ok(state.secret.clone()) -} - -struct MyState { - secret: String, -} - -#[shuttle_service::main] -async fn rocket( - #[shuttle_secrets::Secrets] secret_store: SecretStore, -) -> shuttle_service::ShuttleRocket { - // get secret defined in `Secrets.toml` file. - let secret = if let Some(secret) = secret_store.get("MY_API_KEY") { - secret - } else { - return Err(anyhow!("secret was not found").into()); - }; - - let state = MyState { secret }; - let rocket = rocket::build().mount("/", routes![secret]).manage(state); - - Ok(rocket) -} diff --git a/examples/rocket/url-shortener/Cargo.toml b/examples/rocket/url-shortener/Cargo.toml deleted file mode 100644 index d33cba841..000000000 --- a/examples/rocket/url-shortener/Cargo.toml +++ /dev/null @@ -1,15 +0,0 @@ -[package] -name = "url-shortener" -version = "0.1.0" -edition = "2021" - -[lib] - -[dependencies] -nanoid = "0.4" -rocket = { version = "0.5.0-rc.2", features = ["json"] } -serde = "1.0" -shuttle-service = { version = "0.7.0", features = ["web-rocket"] } -shuttle-shared-db = { version = "0.7.0", features = ["postgres"] } -sqlx = { version = "0.6", features = ["runtime-tokio-native-tls", "postgres"] } -url = "2.2" diff --git a/examples/rocket/url-shortener/README.md b/examples/rocket/url-shortener/README.md deleted file mode 100644 index c8bf86842..000000000 --- a/examples/rocket/url-shortener/README.md +++ /dev/null @@ -1,61 +0,0 @@ -# Url Shortener - -A URL shortener that you can use from your terminal - built with shuttle, rocket and postgres/sqlx. - -## How to use it - -You can use this URL shortener directly from your terminal. Just copy and paste this command to your terminal and replace `` with the URL that you want to shorten - -```bash -curl -X POST -d '' https://s.shuttleapp.rs -``` - -like this - -```bash -curl -X POST -d 'https://docs.rs/shuttle-service/latest/shuttle_service/' https://s.shuttleapp.rs -``` - -you will get the shortened URL back (something like this `https://s.shuttleapp.rs/RvpVU_`) - -## Project structure - -The project consists of the following files - -- `Shuttle.toml` contains the name of the app (if name is `s` domain will be `s.shuttleapp.rs`) -- `migrations` folder is for DB migration files created by [sqlx-cli](https://github.com/launchbadge/sqlx/tree/master/sqlx-cli) -- `src/lib.rs` is where all the magic happens - it creates a shuttle service with two endpoints: one for creating new short URLs and one for handling shortened URLs. - -## How to deploy - -To deploy this app, check out the repository locally - -```bash -$ git clone https://github.com/shuttle-hq/shuttle.git -``` - -navigate to `examples/rocket/url-shortener` - -```bash -$ cd examples/rocket/url-shortener -``` - -install shuttle - -```bash -$ cargo install cargo-shuttle -``` - -login to shuttle - -```bash -$ cargo shuttle login -``` - -Pick a project name that is something unique - in shuttle, -projects are globally unique. Then run - -```bash -$ cargo shuttle project new --name=$PROJECT_NAME -$ cargo shuttle deploy --name=$PROJECT_NAME -``` diff --git a/examples/rocket/url-shortener/Shuttle.toml b/examples/rocket/url-shortener/Shuttle.toml deleted file mode 100644 index c7d177586..000000000 --- a/examples/rocket/url-shortener/Shuttle.toml +++ /dev/null @@ -1 +0,0 @@ -name = "s" diff --git a/examples/rocket/url-shortener/migrations/20220324143837_urls.sql b/examples/rocket/url-shortener/migrations/20220324143837_urls.sql deleted file mode 100644 index 16144095c..000000000 --- a/examples/rocket/url-shortener/migrations/20220324143837_urls.sql +++ /dev/null @@ -1,4 +0,0 @@ -CREATE TABLE urls ( - id VARCHAR(6) PRIMARY KEY, - url VARCHAR NOT NULL -); diff --git a/examples/rocket/url-shortener/src/lib.rs b/examples/rocket/url-shortener/src/lib.rs deleted file mode 100644 index fa0f17ba1..000000000 --- a/examples/rocket/url-shortener/src/lib.rs +++ /dev/null @@ -1,83 +0,0 @@ -#[macro_use] -extern crate rocket; - -use rocket::{ - http::Status, - response::{status, Redirect}, - routes, State, -}; -use serde::Serialize; -use shuttle_service::{error::CustomError, ShuttleRocket}; -use sqlx::migrate::Migrator; -use sqlx::{FromRow, PgPool}; -use url::Url; - -struct AppState { - pool: PgPool, -} - -#[derive(Serialize, FromRow)] -struct StoredURL { - pub id: String, - pub url: String, -} - -#[get("/")] -async fn redirect(id: String, state: &State) -> Result> { - let stored_url: StoredURL = sqlx::query_as("SELECT * FROM urls WHERE id = $1") - .bind(id) - .fetch_one(&state.pool) - .await - .map_err(|err| match err { - sqlx::Error::RowNotFound => status::Custom( - Status::NotFound, - "the requested shortened URL does not exist".into(), - ), - _ => status::Custom( - Status::InternalServerError, - "something went wrong, sorry 🤷".into(), - ), - })?; - - Ok(Redirect::to(stored_url.url)) -} - -#[post("/", data = "")] -async fn shorten(url: String, state: &State) -> Result> { - let id = &nanoid::nanoid!(6); - - let parsed_url = Url::parse(&url).map_err(|err| { - status::Custom( - Status::UnprocessableEntity, - format!("url validation failed: {err}"), - ) - })?; - - sqlx::query("INSERT INTO urls(id, url) VALUES ($1, $2)") - .bind(id) - .bind(parsed_url.as_str()) - .execute(&state.pool) - .await - .map_err(|_| { - status::Custom( - Status::InternalServerError, - "something went wrong, sorry 🤷".into(), - ) - })?; - - Ok(format!("https://s.shuttleapp.rs/{id}")) -} - -static MIGRATOR: Migrator = sqlx::migrate!(); - -#[shuttle_service::main] -async fn rocket(#[shuttle_shared_db::Postgres] pool: PgPool) -> ShuttleRocket { - MIGRATOR.run(&pool).await.map_err(CustomError::new)?; - - let state = AppState { pool }; - let rocket = rocket::build() - .mount("/", routes![redirect, shorten]) - .manage(state); - - Ok(rocket) -} diff --git a/examples/salvo/hello-world/Cargo.toml b/examples/salvo/hello-world/Cargo.toml deleted file mode 100644 index 951a529c1..000000000 --- a/examples/salvo/hello-world/Cargo.toml +++ /dev/null @@ -1,10 +0,0 @@ -[package] -name = "hello-world" -version = "0.1.0" -edition = "2021" - -[lib] - -[dependencies] -salvo = "0.34.3" -shuttle-service = { version = "0.7.0", features = ["web-salvo"] } diff --git a/examples/salvo/hello-world/Shuttle.toml b/examples/salvo/hello-world/Shuttle.toml deleted file mode 100644 index 152ba13ab..000000000 --- a/examples/salvo/hello-world/Shuttle.toml +++ /dev/null @@ -1 +0,0 @@ -name = "hello-world-salvo-app" diff --git a/examples/salvo/hello-world/src/lib.rs b/examples/salvo/hello-world/src/lib.rs deleted file mode 100644 index d5ce829d4..000000000 --- a/examples/salvo/hello-world/src/lib.rs +++ /dev/null @@ -1,13 +0,0 @@ -use salvo::prelude::*; - -#[handler] -async fn hello_world(res: &mut Response) { - res.render(Text::Plain("Hello, world!")); -} - -#[shuttle_service::main] -async fn salvo() -> shuttle_service::ShuttleSalvo { - let router = Router::with_path("hello").get(hello_world); - - Ok(router) -} diff --git a/examples/serenity/hello-world/Cargo.toml b/examples/serenity/hello-world/Cargo.toml deleted file mode 100644 index 741dc89d0..000000000 --- a/examples/serenity/hello-world/Cargo.toml +++ /dev/null @@ -1,13 +0,0 @@ -[package] -name = "hello-world" -version = "0.1.0" -edition = "2021" - -[lib] - -[dependencies] -anyhow = "1.0.62" -serenity = { version = "0.11.5", default-features = false, features = ["client", "gateway", "rustls_backend", "model"] } -shuttle-secrets = "0.7.0" -shuttle-service = { version = "0.7.0", features = ["bot-serenity"] } -tracing = "0.1.35" diff --git a/examples/serenity/hello-world/README.md b/examples/serenity/hello-world/README.md deleted file mode 100644 index 086084e4e..000000000 --- a/examples/serenity/hello-world/README.md +++ /dev/null @@ -1,16 +0,0 @@ -# Serenity Hello World Bot with Shuttle - -In this example we will deploy a Serenity bot with Shuttle that responds to the `!hello` command with `world!`. To run this bot we need a valid Discord Token. To get started log in to the [Discord developer portal](https://discord.com/developers/applications). - -1. Click the New Application button, name your application and click Create. -2. Navigate to the Bot tab in the lefthand menu, and add a new bot. -3. On the bot page click the Reset Token button to reveal your token. Put this token in your `Secrets.toml`. It's very important that you don't reveal your token to anyone, as it can be abused. Create a `.gitignore` file to omit your `Secrets.toml` from version control. -4. For the sake of this example, you also need to scroll down on the bot page to the Message Content Intent section and enable that option. - -To add the bot to a server we need to create an invite link. - -1. On your bot's application page, open the OAuth2 page via the lefthand panel. -2. Go to the URL Generator via the lefthand panel, and select the `bot` scope as well as the `Send Messages` permission in the Bot Permissions section. -3. Copy the URL, open it in your browser and select a Discord server you wish to invite the bot to. - -For more information please refer to the [Discord docs](https://discord.com/developers/docs/getting-started) as well as the [Serenity repo](https://github.com/serenity-rs/serenity) for more examples. diff --git a/examples/serenity/hello-world/Secrets.toml b/examples/serenity/hello-world/Secrets.toml deleted file mode 100644 index 966fdb0c4..000000000 --- a/examples/serenity/hello-world/Secrets.toml +++ /dev/null @@ -1 +0,0 @@ -DISCORD_TOKEN = 'the contents of my discord token' diff --git a/examples/serenity/hello-world/Shuttle.toml b/examples/serenity/hello-world/Shuttle.toml deleted file mode 100644 index 39fe09834..000000000 --- a/examples/serenity/hello-world/Shuttle.toml +++ /dev/null @@ -1 +0,0 @@ -name = "hello-world-serenity-bot" diff --git a/examples/serenity/hello-world/src/lib.rs b/examples/serenity/hello-world/src/lib.rs deleted file mode 100644 index 9c3716c5d..000000000 --- a/examples/serenity/hello-world/src/lib.rs +++ /dev/null @@ -1,46 +0,0 @@ -use anyhow::anyhow; -use serenity::async_trait; -use serenity::model::channel::Message; -use serenity::model::gateway::Ready; -use serenity::prelude::*; -use shuttle_secrets::SecretStore; -use tracing::{error, info}; - -struct Bot; - -#[async_trait] -impl EventHandler for Bot { - async fn message(&self, ctx: Context, msg: Message) { - if msg.content == "!hello" { - if let Err(e) = msg.channel_id.say(&ctx.http, "world!").await { - error!("Error sending message: {:?}", e); - } - } - } - - async fn ready(&self, _: Context, ready: Ready) { - info!("{} is connected!", ready.user.name); - } -} - -#[shuttle_service::main] -async fn serenity( - #[shuttle_secrets::Secrets] secret_store: SecretStore, -) -> shuttle_service::ShuttleSerenity { - // Get the discord token set in `Secrets.toml` - let token = if let Some(token) = secret_store.get("DISCORD_TOKEN") { - token - } else { - return Err(anyhow!("'DISCORD_TOKEN' was not found").into()); - }; - - // Set gateway intents, which decides what events the bot will be notified about - let intents = GatewayIntents::GUILD_MESSAGES | GatewayIntents::MESSAGE_CONTENT; - - let client = Client::builder(&token, intents) - .event_handler(Bot) - .await - .expect("Err creating client"); - - Ok(client) -} diff --git a/examples/serenity/postgres/Cargo.toml b/examples/serenity/postgres/Cargo.toml deleted file mode 100644 index 8310116f2..000000000 --- a/examples/serenity/postgres/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -name = "serenity-postgres" -version = "0.1.0" -edition = "2021" - -[lib] - -[dependencies] -anyhow = "1.0.62" -serde = "1.0" -serenity = { version = "0.11.5", default-features = false, features = ["client", "gateway", "rustls_backend", "model"] } -shuttle-secrets = "0.7.0" -shuttle-service = { version = "0.7.0", features = ["bot-serenity"] } -shuttle-shared-db = { version = "0.7.0", features = ["postgres"] } -sqlx = { version = "0.6", features = ["runtime-tokio-native-tls", "postgres"] } -tracing = "0.1.35" diff --git a/examples/serenity/postgres/README.md b/examples/serenity/postgres/README.md deleted file mode 100644 index 271d3ee87..000000000 --- a/examples/serenity/postgres/README.md +++ /dev/null @@ -1,23 +0,0 @@ -# Serenity Todo List Bot with Shuttle - -In this example we will deploy a Serenity bot with Shuttle that can add, list and complete todos using [Application Commands](https://discord.com/developers/docs/interactions/application-commands). To persist the todos we need a database. We will have Shuttle provison a PostgreSQL database for us by enabling the `sqlx-postgres` feature for `shuttle-service` and passing `#[shared::Postgres] pool: PgPool` as an argument to our `main` function. - -To run this bot we need a valid Discord Token. To get started log in to the [Discord developer portal](https://discord.com/developers/applications). - -1. Click the New Application button, name your application and click Create. -2. Navigate to the Bot tab in the lefthand menu, and add a new bot. -3. On the bot page click the Reset Token button to reveal your token. Put this token in your `Secrets.toml`. It's very important that you don't reveal your token to anyone, as it can be abused. Create a `.gitignore` file to omit your `Secrets.toml` from version control. - -To add the bot to a server we need to create an invite link. - -1. On your bot's application page, open the OAuth2 page via the lefthand panel. -2. Go to the URL Generator via the lefthand panel, and select the `applications.commands` scope. -3. Copy the URL, open it in your browser and select a Discord server you wish to invite the bot to. - -For this example we also need a `GuildId`. - -1. Open your Discord client, open the User Settings and navigate to Advanced. Enable Developer Mode. -2. Right click the Discord server you'd like to use the bot in and click Copy Id. This is your Guild ID. -3. Store it in `Secrets.toml` and retrieve it like we did for the Discord Token. - -For more information please refer to the [Discord docs](https://discord.com/developers/docs/getting-started) as well as the [Serenity repo](https://github.com/serenity-rs/serenity) for more examples. diff --git a/examples/serenity/postgres/Secrets.toml b/examples/serenity/postgres/Secrets.toml deleted file mode 100644 index a3249ddae..000000000 --- a/examples/serenity/postgres/Secrets.toml +++ /dev/null @@ -1,2 +0,0 @@ -DISCORD_TOKEN = 'the contents of my discord token' -GUILD_ID = "123456789" diff --git a/examples/serenity/postgres/Shuttle.toml b/examples/serenity/postgres/Shuttle.toml deleted file mode 100644 index bc686807d..000000000 --- a/examples/serenity/postgres/Shuttle.toml +++ /dev/null @@ -1 +0,0 @@ -name = "postgres-serenity-bot" diff --git a/examples/serenity/postgres/schema.sql b/examples/serenity/postgres/schema.sql deleted file mode 100644 index 2e5ebbe0d..000000000 --- a/examples/serenity/postgres/schema.sql +++ /dev/null @@ -1,7 +0,0 @@ -DROP TABLE IF EXISTS todos; - -CREATE TABLE todos ( - id serial PRIMARY KEY, - user_id BIGINT NULL, - note TEXT NOT NULL -); diff --git a/examples/serenity/postgres/src/db.rs b/examples/serenity/postgres/src/db.rs deleted file mode 100644 index 29137b432..000000000 --- a/examples/serenity/postgres/src/db.rs +++ /dev/null @@ -1,54 +0,0 @@ -use sqlx::{FromRow, PgPool}; -use std::fmt::Write; - -#[derive(FromRow)] -struct Todo { - pub id: i32, - pub note: String, -} - -pub(crate) async fn add(pool: &PgPool, note: &str, user_id: i64) -> Result { - sqlx::query("INSERT INTO todos (note, user_id) VALUES ($1, $2)") - .bind(note) - .bind(user_id) - .execute(pool) - .await?; - - Ok(format!("Added `{}` to your todo list", note)) -} - -pub(crate) async fn complete( - pool: &PgPool, - index: &i64, - user_id: i64, -) -> Result { - let todo: Todo = sqlx::query_as( - "SELECT id, note FROM todos WHERE user_id = $1 ORDER BY id LIMIT 1 OFFSET $2", - ) - .bind(user_id) - .bind(index - 1) - .fetch_one(pool) - .await?; - - sqlx::query("DELETE FROM todos WHERE id = $1") - .bind(todo.id) - .execute(pool) - .await?; - - Ok(format!("Completed `{}`!", todo.note)) -} - -pub(crate) async fn list(pool: &PgPool, user_id: i64) -> Result { - let todos: Vec = - sqlx::query_as("SELECT note, id FROM todos WHERE user_id = $1 ORDER BY id") - .bind(user_id) - .fetch_all(pool) - .await?; - - let mut response = format!("You have {} pending todos:\n", todos.len()); - for (i, todo) in todos.iter().enumerate() { - writeln!(&mut response, "{}. {}", i + 1, todo.note).unwrap(); - } - - Ok(response) -} diff --git a/examples/serenity/postgres/src/lib.rs b/examples/serenity/postgres/src/lib.rs deleted file mode 100644 index 8815dcdb4..000000000 --- a/examples/serenity/postgres/src/lib.rs +++ /dev/null @@ -1,155 +0,0 @@ -use anyhow::Context as _; -use serenity::async_trait; -use serenity::model::application::command::CommandOptionType; -use serenity::model::application::interaction::application_command::CommandDataOptionValue; -use serenity::model::application::interaction::{Interaction, InteractionResponseType}; -use serenity::model::gateway::Ready; -use serenity::model::id::GuildId; -use serenity::prelude::*; -use shuttle_secrets::SecretStore; -use sqlx::{Executor, PgPool}; -use tracing::{error, info}; - -mod db; - -struct Bot { - database: PgPool, - guild_id: String, -} - -#[async_trait] -impl EventHandler for Bot { - async fn interaction_create(&self, ctx: Context, interaction: Interaction) { - let user_id: i64 = interaction - .clone() - .application_command() - .unwrap() - .user - .id - .into(); - - if let Interaction::ApplicationCommand(command) = interaction { - info!("Received command interaction: {:#?}", command); - - let content = match command.data.name.as_str() { - "todo" => { - let command = command.data.options.get(0).expect("Expected command"); - - // if the todo subcommand has a CommandOption the command is either `add` or `complete` - if let Some(subcommand) = command.options.get(0) { - match subcommand.resolved.as_ref().expect("Valid subcommand") { - CommandDataOptionValue::String(note) => { - db::add(&self.database, note, user_id).await.unwrap() - } - CommandDataOptionValue::Integer(index) => { - db::complete(&self.database, index, user_id) - .await - .unwrap_or_else(|_| { - "Please submit a valid index from your todo list" - .to_string() - }) - } - _ => "Please enter a valid todo".to_string(), - } - // if the todo subcommand doesn't have a CommandOption the command is `list` - } else { - db::list(&self.database, user_id).await.unwrap() - } - } - _ => "Command not implemented".to_string(), - }; - - if let Err(why) = command - .create_interaction_response(&ctx.http, |response| { - response - .kind(InteractionResponseType::ChannelMessageWithSource) - .interaction_response_data(|message| message.content(content)) - }) - .await - { - error!("Cannot respond to slash command: {}", why); - } - } - } - - async fn ready(&self, ctx: Context, ready: Ready) { - info!("{} is connected!", ready.user.name); - - let guild_id = GuildId(self.guild_id.parse().unwrap()); - - let _ = GuildId::set_application_commands(&guild_id, &ctx.http, |commands| { - commands.create_application_command(|command| { - command - .name("todo") - .description("Add, list and complete todos") - .create_option(|option| { - option - .name("add") - .description("Add a new todo") - .kind(CommandOptionType::SubCommand) - .create_sub_option(|option| { - option - .name("note") - .description("The todo note to add") - .kind(CommandOptionType::String) - .min_length(2) - .max_length(100) - .required(true) - }) - }) - .create_option(|option| { - option - .name("complete") - .description("The todo to complete") - .kind(CommandOptionType::SubCommand) - .create_sub_option(|option| { - option - .name("index") - .description("The index of the todo to complete") - .kind(CommandOptionType::Integer) - .min_int_value(1) - .required(true) - }) - }) - .create_option(|option| { - option - .name("list") - .description("List your todos") - .kind(CommandOptionType::SubCommand) - }) - }) - }) - .await; - } -} - -#[shuttle_service::main] -async fn serenity( - #[shuttle_shared_db::Postgres] pool: PgPool, - #[shuttle_secrets::Secrets] secret_store: SecretStore, -) -> shuttle_service::ShuttleSerenity { - // Get the discord token set in `Secrets.toml` - let token = secret_store - .get("DISCORD_TOKEN") - .context("'DISCORD_TOKEN' was not found")?; - // Get the guild_id set in `Secrets.toml` - let guild_id = secret_store - .get("GUILD_ID") - .context("'GUILD_ID' was not found")?; - - // Run the schema migration - pool.execute(include_str!("../schema.sql")) - .await - .context("failed to run migrations")?; - - let bot = Bot { - database: pool, - guild_id, - }; - let client = Client::builder(&token, GatewayIntents::empty()) - .event_handler(bot) - .await - .expect("Err creating client"); - - Ok(client) -} diff --git a/examples/thruster/hello-world/Cargo.toml b/examples/thruster/hello-world/Cargo.toml deleted file mode 100644 index ed56f40b7..000000000 --- a/examples/thruster/hello-world/Cargo.toml +++ /dev/null @@ -1,10 +0,0 @@ -[package] -name = "hello-world" -version = "0.1.0" -edition = "2021" - -[lib] - -[dependencies] -shuttle-service = { version = "0.7.0", features = ["web-thruster"] } -thruster = { version = "1.2.6", features = ["hyper_server"] } diff --git a/examples/thruster/hello-world/Shuttle.toml b/examples/thruster/hello-world/Shuttle.toml deleted file mode 100644 index 8dca93fff..000000000 --- a/examples/thruster/hello-world/Shuttle.toml +++ /dev/null @@ -1 +0,0 @@ -name = "hello-world-thruster-app" diff --git a/examples/thruster/hello-world/src/lib.rs b/examples/thruster/hello-world/src/lib.rs deleted file mode 100644 index 1e6de6dab..000000000 --- a/examples/thruster/hello-world/src/lib.rs +++ /dev/null @@ -1,17 +0,0 @@ -use thruster::{ - context::basic_hyper_context::{generate_context, BasicHyperContext as Ctx, HyperRequest}, - m, middleware_fn, App, HyperServer, MiddlewareNext, MiddlewareResult, ThrusterServer, -}; - -#[middleware_fn] -async fn hello(mut context: Ctx, _next: MiddlewareNext) -> MiddlewareResult { - context.body("Hello, World!"); - Ok(context) -} - -#[shuttle_service::main] -async fn thruster() -> shuttle_service::ShuttleThruster> { - Ok(HyperServer::new( - App::::create(generate_context, ()).get("/hello", m![hello]), - )) -} diff --git a/examples/thruster/postgres/Cargo.toml b/examples/thruster/postgres/Cargo.toml deleted file mode 100644 index 34f22dcbf..000000000 --- a/examples/thruster/postgres/Cargo.toml +++ /dev/null @@ -1,17 +0,0 @@ -[package] -name = "postgres" -version = "0.1.0" -edition = "2021" - -[lib] -crate-type = ["cdylib"] -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -hyper = "0.14.20" -serde = { version = "1.0", features = ["derive"] } -serde_json = { version = "1.0" } -shuttle-aws-rds = { version = "0.7.0", features = ["postgres"] } -shuttle-service = { version = "0.7.0", features = ["web-thruster"] } -sqlx = { version = "0.6", features = ["runtime-tokio-native-tls", "postgres"] } -thruster = { version = "1.2.6", features = ["hyper_server"] } diff --git a/examples/thruster/postgres/Shuttle.toml b/examples/thruster/postgres/Shuttle.toml deleted file mode 100644 index 4ffbeb3e5..000000000 --- a/examples/thruster/postgres/Shuttle.toml +++ /dev/null @@ -1 +0,0 @@ -name = "postgres-tide-app" diff --git a/examples/thruster/postgres/schema.sql b/examples/thruster/postgres/schema.sql deleted file mode 100644 index 460e7c23d..000000000 --- a/examples/thruster/postgres/schema.sql +++ /dev/null @@ -1,6 +0,0 @@ -DROP TABLE IF EXISTS todos; - -CREATE TABLE todos ( - id serial PRIMARY KEY, - note TEXT NOT NULL -); diff --git a/examples/thruster/postgres/src/lib.rs b/examples/thruster/postgres/src/lib.rs deleted file mode 100644 index 14a1e8626..000000000 --- a/examples/thruster/postgres/src/lib.rs +++ /dev/null @@ -1,111 +0,0 @@ -use serde::{Deserialize, Serialize}; -use shuttle_service::error::CustomError; -use sqlx::{Executor, FromRow, PgPool}; -use thruster::{ - context::{hyper_request::HyperRequest, typed_hyper_context::TypedHyperContext}, - errors::{ErrorSet, ThrusterError}, - m, middleware_fn, App, Context, HyperServer, MiddlewareNext, MiddlewareResult, ThrusterServer, -}; - -type Ctx = TypedHyperContext; - -#[derive(Deserialize)] -struct TodoNew { - pub note: String, -} - -#[derive(Serialize, FromRow)] -struct Todo { - pub id: i32, - pub note: String, -} - -struct ServerConfig { - pool: PgPool, -} - -#[derive(Clone)] -struct RequestConfig { - pool: PgPool, -} - -fn generate_context(request: HyperRequest, state: &ServerConfig, _path: &str) -> Ctx { - Ctx::new( - request, - RequestConfig { - pool: state.pool.clone(), - }, - ) -} - -#[middleware_fn] -async fn retrieve(mut context: Ctx, _next: MiddlewareNext) -> MiddlewareResult { - let id: i32 = context - .query_params - .get("id") - .ok_or_else(|| { - ThrusterError::parsing_error( - Ctx::new_without_request(context.extra.clone()), - "id is required", - ) - })? - .parse() - .map_err(|_e| { - ThrusterError::parsing_error( - Ctx::new_without_request(context.extra.clone()), - "id must be a number", - ) - })?; - - let todo: Todo = sqlx::query_as("SELECT * FROM todos WHERE id = $1") - .bind(id) - .fetch_one(&context.extra.pool) - .await - .map_err(|_e| { - ThrusterError::not_found_error(Ctx::new_without_request(context.extra.clone())) - })?; - - context.set_body(serde_json::to_vec(&todo).unwrap()); - - Ok(context) -} - -#[middleware_fn] -async fn add(context: Ctx, _next: MiddlewareNext) -> MiddlewareResult { - let extra = context.extra.clone(); - - let (body, mut context) = context - .get_body() - .await - .map_err(|_e| ThrusterError::generic_error(Ctx::new_without_request(extra)))?; - let data: TodoNew = serde_json::from_str(&body).map_err(|_e| { - ThrusterError::generic_error(Ctx::new_without_request(context.extra.clone())) - })?; - - let todo: Todo = sqlx::query_as("INSERT INTO todos(note) VALUES ($1) RETURNING id, note") - .bind(&data.note) - .fetch_one(&context.extra.pool) - .await - .map_err(|_e| { - ThrusterError::generic_error(Ctx::new_without_request(context.extra.clone())) - })?; - - context.set_body(serde_json::to_vec(&todo).unwrap()); - - Ok(context) -} - -#[shuttle_service::main] -async fn thruster( - #[shuttle_aws_rds::Postgres] pool: PgPool, -) -> shuttle_service::ShuttleThruster> { - pool.execute(include_str!("../schema.sql")) - .await - .map_err(CustomError::new)?; - - Ok(HyperServer::new( - App::::create(generate_context, ServerConfig { pool }) - .post("/todos", m![add]) - .get("/todos/:id", m![retrieve]), - )) -} diff --git a/examples/tide/hello-world/Cargo.toml b/examples/tide/hello-world/Cargo.toml deleted file mode 100644 index e331caaf4..000000000 --- a/examples/tide/hello-world/Cargo.toml +++ /dev/null @@ -1,10 +0,0 @@ -[package] -name = "hello-world" -version = "0.1.0" -edition = "2021" - -[lib] - -[dependencies] -shuttle-service = { version = "0.7.0", features = ["web-tide"] } -tide = "0.16.0" diff --git a/examples/tide/hello-world/Shuttle.toml b/examples/tide/hello-world/Shuttle.toml deleted file mode 100644 index 319271968..000000000 --- a/examples/tide/hello-world/Shuttle.toml +++ /dev/null @@ -1 +0,0 @@ -name = "hello-world-tide-app" diff --git a/examples/tide/hello-world/src/lib.rs b/examples/tide/hello-world/src/lib.rs deleted file mode 100644 index db95792bd..000000000 --- a/examples/tide/hello-world/src/lib.rs +++ /dev/null @@ -1,9 +0,0 @@ -#[shuttle_service::main] -async fn tide() -> shuttle_service::ShuttleTide<()> { - let mut app = tide::new(); - app.with(tide::log::LogMiddleware::new()); - - app.at("/hello").get(|_| async { Ok("Hello, world!") }); - - Ok(app) -} diff --git a/examples/tide/postgres/Cargo.toml b/examples/tide/postgres/Cargo.toml deleted file mode 100644 index 1c46c424a..000000000 --- a/examples/tide/postgres/Cargo.toml +++ /dev/null @@ -1,15 +0,0 @@ -[package] -name = "postgres" -version = "0.1.0" -edition = "2021" - -[lib] -crate-type = ["cdylib"] -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -serde = { version = "1.0", features = ["derive"] } -shuttle-aws-rds = { version = "0.7.0", features = ["postgres"] } -shuttle-service = { version = "0.7.0", features = ["web-tide"] } -sqlx = { version = "0.6", features = ["runtime-tokio-native-tls", "postgres"] } -tide = "0.16.0" diff --git a/examples/tide/postgres/Shuttle.toml b/examples/tide/postgres/Shuttle.toml deleted file mode 100644 index 4ffbeb3e5..000000000 --- a/examples/tide/postgres/Shuttle.toml +++ /dev/null @@ -1 +0,0 @@ -name = "postgres-tide-app" diff --git a/examples/tide/postgres/schema.sql b/examples/tide/postgres/schema.sql deleted file mode 100644 index 460e7c23d..000000000 --- a/examples/tide/postgres/schema.sql +++ /dev/null @@ -1,6 +0,0 @@ -DROP TABLE IF EXISTS todos; - -CREATE TABLE todos ( - id serial PRIMARY KEY, - note TEXT NOT NULL -); diff --git a/examples/tide/postgres/src/lib.rs b/examples/tide/postgres/src/lib.rs deleted file mode 100644 index 0ad8dd16f..000000000 --- a/examples/tide/postgres/src/lib.rs +++ /dev/null @@ -1,56 +0,0 @@ -use serde::{Deserialize, Serialize}; -use shuttle_service::{error::CustomError, ShuttleTide}; -use sqlx::{Executor, FromRow, PgPool}; -use tide::{Body, Request}; - -async fn retrieve(req: Request) -> tide::Result { - let id: i32 = req.param("id")?.parse()?; - let todo: Todo = sqlx::query_as("SELECT * FROM todos WHERE id = $1") - .bind(id) - .fetch_one(&req.state().pool) - .await?; - - Body::from_json(&todo).map(Into::into) -} - -async fn add(mut req: Request) -> tide::Result { - let data: TodoNew = req.body_json().await?; - let todo: Todo = sqlx::query_as("INSERT INTO todos(note) VALUES ($1) RETURNING id, note") - .bind(&data.note) - .fetch_one(&req.state().pool) - .await?; - - Body::from_json(&todo).map(Into::into) -} - -#[derive(Clone)] -struct MyState { - pool: PgPool, -} - -#[shuttle_service::main] -async fn tide(#[shuttle_aws_rds::Postgres] pool: PgPool) -> ShuttleTide { - pool.execute(include_str!("../schema.sql")) - .await - .map_err(CustomError::new)?; - - let state = MyState { pool }; - let mut app = tide::with_state(state); - - app.with(tide::log::LogMiddleware::new()); - app.at("/todo").post(add); - app.at("/todo/:id").get(retrieve); - - Ok(app) -} - -#[derive(Deserialize)] -struct TodoNew { - pub note: String, -} - -#[derive(Serialize, FromRow)] -struct Todo { - pub id: i32, - pub note: String, -} diff --git a/examples/tower/hello-world/Cargo.toml b/examples/tower/hello-world/Cargo.toml deleted file mode 100644 index 45fc5516c..000000000 --- a/examples/tower/hello-world/Cargo.toml +++ /dev/null @@ -1,11 +0,0 @@ -[package] -name = "hello-world" -version = "0.1.0" -edition = "2021" - -[lib] - -[dependencies] -hyper = { version = "0.14", features = ["full"] } -shuttle-service = { version = "0.7.0", features = ["web-tower"] } -tower = { version = "0.4", features = ["full"] } diff --git a/examples/tower/hello-world/Shuttle.toml b/examples/tower/hello-world/Shuttle.toml deleted file mode 100644 index b99b1f923..000000000 --- a/examples/tower/hello-world/Shuttle.toml +++ /dev/null @@ -1 +0,0 @@ -name = "hello-world-tower-app" diff --git a/examples/tower/hello-world/src/lib.rs b/examples/tower/hello-world/src/lib.rs deleted file mode 100644 index 20a495455..000000000 --- a/examples/tower/hello-world/src/lib.rs +++ /dev/null @@ -1,34 +0,0 @@ -use std::convert::Infallible; -use std::future::Future; -use std::pin::Pin; -use std::task::{Context, Poll}; - -#[derive(Clone)] -struct HelloWorld; - -impl tower::Service> for HelloWorld { - type Response = hyper::Response; - type Error = Infallible; - type Future = Pin> + Send + Sync>>; - - fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll> { - Poll::Ready(Ok(())) - } - - fn call(&mut self, _req: hyper::Request) -> Self::Future { - let body = hyper::Body::from("Hello, world!"); - let resp = hyper::Response::builder() - .status(200) - .body(body) - .expect("Unable to create the `hyper::Response` object"); - - let fut = async { Ok(resp) }; - - Box::pin(fut) - } -} - -#[shuttle_service::main] -async fn tower() -> Result { - Ok(HelloWorld) -} diff --git a/examples/warp/hello-world/Cargo.toml b/examples/warp/hello-world/Cargo.toml deleted file mode 100644 index 9bfc17ab5..000000000 --- a/examples/warp/hello-world/Cargo.toml +++ /dev/null @@ -1,10 +0,0 @@ -[package] -name = "hello-world" -version = "0.1.0" -edition = "2021" - -[lib] - -[dependencies] -shuttle-service = { version = "0.7.0", features = ["web-warp"] } -warp = "0.3.2" diff --git a/examples/warp/hello-world/Shuttle.toml b/examples/warp/hello-world/Shuttle.toml deleted file mode 100644 index a8e7f48a1..000000000 --- a/examples/warp/hello-world/Shuttle.toml +++ /dev/null @@ -1 +0,0 @@ -name = "hello-world-warp-app" diff --git a/examples/warp/hello-world/src/lib.rs b/examples/warp/hello-world/src/lib.rs deleted file mode 100644 index 0b8eb6db9..000000000 --- a/examples/warp/hello-world/src/lib.rs +++ /dev/null @@ -1,8 +0,0 @@ -use warp::Filter; -use warp::Reply; - -#[shuttle_service::main] -async fn warp() -> shuttle_service::ShuttleWarp<(impl Reply,)> { - let route = warp::any().map(|| "Hello, World"); - Ok(route.boxed()) -} diff --git a/extras/panamax/Containerfile b/extras/panamax/Containerfile new file mode 100644 index 000000000..911b930f1 --- /dev/null +++ b/extras/panamax/Containerfile @@ -0,0 +1,7 @@ +ARG PANAMAX_TAG= + +FROM panamaxrs/panamax:${PANAMAX_TAG} + +COPY mirror.toml /mirror/mirror.toml + +CMD ["serve", "/mirror"] diff --git a/extras/panamax/mirror.toml b/extras/panamax/mirror.toml new file mode 100644 index 000000000..e43bbaa31 --- /dev/null +++ b/extras/panamax/mirror.toml @@ -0,0 +1,128 @@ +# This is a Panamax mirror. It is a self-contained directory made to be easily copied +# to an offline network or machine via rsync, USB, or another method. + +# When offline, Panamax also includes a "serve" command that can be used to serve +# rustup and cargo clients from the mirror. This will also give setup instructions +# on the homepage. + +[mirror] +# Global mirror settings. + + +# Number of download retries before giving up. +retries = 5 + + +# Contact information for the user agent. +# This is entirely optional, and is not required for the crates.io CDN. +# You may want to set this if you are mirroring from somewhere else. +# contact = "your@email.com" + + +[rustup] +# These are the configuration parameters for the rustup half of the mirror. +# This will download the rustup-init files, as well as all components needed +# to run Rust on a machine. + + +# Perform rustup synchronization. Set this to false if you only want to mirror crates.io. +sync = false + + +# Whether to mirror XZ archives. These archives are more efficiently compressed +# than the GZ archives, and rustup uses them by default. +download_xz = true +# Whether to mirror GZ archives, for further backwards compatibility with rustup. +download_gz = false + + +# Number of downloads that can be ran in parallel. +download_threads = 16 + + +# Where to download rustup files from. +source = "https://static.rust-lang.org" + + +# How many historical versions of Rust to keep. +# Setting these to 1 will keep only the latest version. +# Setting these to 2 or higher will keep the latest version, as well as historical versions. +# Setting these to 0 will stop Panamax from downloading the release entirely. +# Removing the line will keep all release versions. +keep_latest_stables = 1 +keep_latest_betas = 1 +keep_latest_nightlies = 1 + + +# Pinned versions of Rust to download and keep alongside latest stable/beta/nightly +# Version specifiers should be in the rustup toolchain format: +# +# [-][-] +# +# = stable|beta|nightly|| +# = YYYY-MM-DD +# = +# +# e.g. valid versions could be "1.42", "1.42.0", and "nightly-2014-12-18" +# Uncomment the following lines to pin extra rust versions: + +#pinned_rust_versions = [ +# "1.42" +#] + + +# UNIX platforms to include in the mirror +# Uncomment the following lines to limit which platforms get downloaded. +# This affects both rustup-inits and components. + +# platforms_unix = [ +# "arm-unknown-linux-gnueabi", +# "x86_64-unknown-linux-gnu", +# "x86_64-unknown-linux-musl", +# ] + + +# Windows platforms to include in the mirror +# Uncomment the following lines to limit which platforms get downloaded. +# This affects both rustup-inits and components. + +# platforms_windows = [ +# "x86_64-pc-windows-gnu", +# "x86_64-pc-windows-msvc", +# ] + + +# Whether to download the rustc-dev component. +# This component isn't always needed, so setting this to false can save lots of space. +download_dev = false + + +[crates] +# These are the configuration parameters for the crates.io half of the mirror. +# This will download the crates.io-index, as well as the crates themselves. +# Once downloaded, it will then (optionally) rewrite the config.json to point to your mirror. + + +# Perform crates synchronization. Set this to false if you only want to mirror rustup. +sync = true + + +# Number of downloads that can be ran in parallel. +download_threads = 64 + + +# Where to download the crates from. +# The default, "https://crates.io/api/v1/crates", will actually instead use the corresponding +# url at https://static.crates.io in order to avoid a redirect and rate limiting +source = "https://crates.io/api/v1/crates" + + +# Where to clone the crates.io-index repository from. +source_index = "https://github.com/rust-lang/crates.io-index" + + +# URL where this mirror's crates directory can be accessed from. +# Used for rewriting crates.io-index's config.json. +# Remove this parameter to perform no rewriting. +# If removed, the `panamax rewrite` command can be used later. +base_url = "http://panamax:8080/crates" diff --git a/gateway/Cargo.toml b/gateway/Cargo.toml index 034d66282..932b2e7a8 100644 --- a/gateway/Cargo.toml +++ b/gateway/Cargo.toml @@ -1,45 +1,59 @@ [package] name = "shuttle-gateway" -version = "0.7.0" -edition = "2021" +version.workspace = true +edition.workspace = true +license.workspace = true publish = false [dependencies] -async-trait = "0.1.52" -axum = { version = "0.5.8", features = [ "headers" ] } -base64 = "0.13" -bollard = "0.13" -chrono = "0.4" -clap = { version = "4.0.0", features = [ "derive" ] } -fqdn = "0.2.2" -futures = "0.3.21" +acme2 = "0.5.1" +async-trait = { workspace = true } +axum = { workspace = true, features = [ "headers" ] } +axum-server = { version = "0.4.4", features = [ "tls-rustls" ] } +base64 = "0.13.1" +bollard = "0.13.0" +chrono = { workspace = true } +clap = { version = "4.0.27", features = [ "derive" ] } +fqdn = "0.2.3" +futures = "0.3.25" http = "0.2.8" -hyper = { version = "0.14.19", features = [ "stream" ] } +hyper = { version = "0.14.23", features = [ "stream" ] } # not great, but waiting for WebSocket changes to be merged hyper-reverse-proxy = { git = "https://github.com/chesedo/hyper-reverse-proxy", branch = "bug/host_header" } -once_cell = "1.14.0" -opentelemetry = { version = "0.17.0", features = ["rt-tokio"] } -opentelemetry-datadog = { version = "0.5.0", features = ["reqwest-client"] } +instant-acme = "0.1.1" +lazy_static = "1.4.0" +num_cpus = "1.14.0" +once_cell = { workspace = true } +opentelemetry = { version = "0.18.0", features = ["rt-tokio"] } +opentelemetry-datadog = { version = "0.6.0", features = ["reqwest-client"] } +opentelemetry-http = "0.7.0" +pem = "1.1.0" rand = "0.8.5" -regex = "1.5.5" -serde = { version = "1.0.137", features = [ "derive" ] } -serde_json = "1.0.81" -sqlx = { version = "0.5.11", features = [ "sqlite", "json", "runtime-tokio-rustls", "migrate" ] } -tokio = { version = "1.17", features = [ "full" ] } +rcgen = "0.10.0" +rustls = "0.20.7" +rustls-pemfile = "1.0.1" +serde = { workspace = true, features = [ "derive" ] } +serde_json = { workspace = true } +sqlx = { version = "0.6.2", features = [ "sqlite", "json", "runtime-tokio-native-tls", "migrate" ] } +strum = { version = "0.24.1", features = ["derive"] } +tokio = { version = "1.22.0", features = [ "full" ] } tower = { version = "0.4.13", features = [ "steer" ] } tower-http = { version = "0.3.4", features = ["trace"] } -tracing = "0.1.35" -tracing-opentelemetry = "0.17.4" -tracing-subscriber = { version = "0.3.11", features = ["env-filter"] } +tracing = { workspace = true } +tracing-opentelemetry = "0.18.0" +tracing-subscriber = { workspace = true, features = ["env-filter"] } +ttl_cache = "0.5.1" +uuid = { workspace = true, features = [ "v4" ] } [dependencies.shuttle-common] -version = "0.7.0" -path = "../common" +workspace = true +features = ["backend", "models"] [dev-dependencies] -anyhow = "1" -base64 = "0.13" -colored = "2" -portpicker = "0.1" -snailquote = "0.3" +anyhow = { workspace = true } +base64 = "0.13.1" +colored = "2.0.0" +portpicker = "0.1.1" +snailquote = "0.3.1" tempfile = "3.3.0" + diff --git a/gateway/README.md b/gateway/README.md new file mode 100644 index 000000000..8450081ab --- /dev/null +++ b/gateway/README.md @@ -0,0 +1,8 @@ +# shuttle-gateway + +## Tests +To run the tests for gateway, follow the steps in [contributing](https://github.com/shuttle-hq/shuttle/blob/main/CONTRIBUTING.md) to set up your local environment. Then, from the root of the repository, run: + +```bash +SHUTTLE_TESTS_RUNTIME_IMAGE=public.ecr.aws/shuttle-dev/deployer:latest SHUTTLE_TESTS_NETWORK=shuttle-dev_user-net cargo test --package shuttle-gateway --all-features -- --nocapture +``` diff --git a/gateway/migrations/0001_add_top_level_account_perms.sql b/gateway/migrations/0001_add_top_level_account_perms.sql new file mode 100644 index 000000000..15731aed2 --- /dev/null +++ b/gateway/migrations/0001_add_top_level_account_perms.sql @@ -0,0 +1 @@ +ALTER TABLE accounts ADD account_tier TEXT DEFAULT "basic" NOT NULL; diff --git a/gateway/migrations/0002_custom_domains.sql b/gateway/migrations/0002_custom_domains.sql new file mode 100644 index 000000000..d4178810d --- /dev/null +++ b/gateway/migrations/0002_custom_domains.sql @@ -0,0 +1,6 @@ +CREATE TABLE IF NOT EXISTS custom_domains ( + fqdn TEXT PRIMARY KEY, + project_name TEXT NOT NULL REFERENCES projects (project_name), + certificate TEXT NOT NULL, + private_key TEXT NOT NULL +); diff --git a/gateway/prepare.sh b/gateway/prepare.sh new file mode 100755 index 000000000..6a52d3030 --- /dev/null +++ b/gateway/prepare.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env sh + +############################################################################### +# This file is used by our common Containerfile incase the container for this # +# service might need some extra preparation steps for its final image # +############################################################################### + +# Nothing to prepare in container image here diff --git a/gateway/src/acme.rs b/gateway/src/acme.rs new file mode 100644 index 000000000..4f448df1e --- /dev/null +++ b/gateway/src/acme.rs @@ -0,0 +1,402 @@ +use std::collections::HashMap; +use std::sync::Arc; +use std::task::{Context, Poll}; +use std::time::Duration; + +use axum::body::boxed; +use axum::response::Response; +use fqdn::FQDN; +use futures::future::BoxFuture; +use hyper::server::conn::AddrStream; +use hyper::{Body, Request}; +use instant_acme::{ + Account, AccountCredentials, Authorization, AuthorizationStatus, Challenge, ChallengeType, + Identifier, KeyAuthorization, LetsEncrypt, NewAccount, NewOrder, Order, OrderStatus, +}; +use rcgen::{Certificate, CertificateParams, DistinguishedName}; +use tokio::sync::Mutex; +use tokio::time::sleep; +use tower::{Layer, Service}; +use tracing::{error, trace, warn}; + +use crate::proxy::AsResponderTo; +use crate::{Error, ProjectName}; + +const MAX_RETRIES: usize = 15; + +#[derive(Debug, Eq, PartialEq)] +pub struct CustomDomain { + pub fqdn: FQDN, + pub project_name: ProjectName, + pub certificate: String, + pub private_key: String, +} + +/// An ACME client implementation that completes Http01 challenges +/// It is safe to clone this type as it functions as a singleton +#[derive(Clone, Default)] +pub struct AcmeClient(Arc>>); + +impl AcmeClient { + pub fn new() -> Self { + Self(Arc::new(Mutex::new(HashMap::default()))) + } + + async fn add_http01_challenge_authorization(&self, token: String, key: KeyAuthorization) { + trace!(token, "saving acme http01 challenge"); + self.0.lock().await.insert(token, key); + } + + async fn get_http01_challenge_authorization(&self, token: &str) -> Option { + self.0 + .lock() + .await + .get(token) + .map(|key| key.as_str().to_owned()) + } + + async fn remove_http01_challenge_authorization(&self, token: &str) { + trace!(token, "removing acme http01 challenge"); + self.0.lock().await.remove(token); + } + + /// Create a new ACME account that can be restored using by deserializing the returned JSON into a [instant_acme::AccountCredentials] + pub async fn create_account( + &self, + email: &str, + acme_server: Option, + ) -> Result { + let acme_server = acme_server.unwrap_or_else(|| LetsEncrypt::Production.url().to_string()); + + trace!(email, acme_server, "creating acme account"); + + let account: NewAccount = NewAccount { + contact: &[&format!("mailto:{email}")], + terms_of_service_agreed: true, + only_return_existing: false, + }; + + let account = Account::create(&account, &acme_server) + .await + .map_err(|error| { + error!(%error, "got error while creating acme account"); + AcmeClientError::AccountCreation + })?; + + let credentials = serde_json::to_value(account.credentials()).map_err(|error| { + error!(%error, "got error while extracting credentials from acme account"); + AcmeClientError::Serializing + })?; + + Ok(credentials) + } + + /// Create an ACME-signed certificate and return it and its + /// associated PEM-encoded private key + pub async fn create_certificate( + &self, + identifier: &str, + challenge_type: ChallengeType, + credentials: AccountCredentials<'_>, + ) -> Result<(String, String), AcmeClientError> { + trace!(identifier, "requesting acme certificate"); + + let account = Account::from_credentials(credentials).map_err(|error| { + error!( + error = &error as &dyn std::error::Error, + "failed to convert acme credentials into account" + ); + AcmeClientError::AccountCreation + })?; + + let (mut order, state) = account + .new_order(&NewOrder { + identifiers: &[Identifier::Dns(identifier.to_string())], + }) + .await + .map_err(|error| { + error!(%error, "failed to order certificate"); + AcmeClientError::OrderCreation + })?; + + let authorizations = + order + .authorizations(&state.authorizations) + .await + .map_err(|error| { + error!(%error, "failed to get authorizations information"); + AcmeClientError::AuthorizationCreation + })?; + + // There should only ever be 1 authorization as we only provide 1 domain at a time + debug_assert!(authorizations.len() == 1); + let authorization = &authorizations[0]; + + trace!(?authorization, "got authorization"); + + self.complete_challenge(challenge_type, authorization, &mut order) + .await?; + + let certificate = { + let mut params = CertificateParams::new(vec![identifier.to_owned()]); + params.distinguished_name = DistinguishedName::new(); + Certificate::from_params(params).map_err(|error| { + error!(%error, "failed to create certificate"); + AcmeClientError::CertificateCreation + })? + }; + let signing_request = certificate.serialize_request_der().map_err(|error| { + error!(%error, "failed to create certificate signing request"); + AcmeClientError::CertificateSigning + })?; + + let certificate_chain = order + .finalize(&signing_request, &state.finalize) + .await + .map_err(|error| { + error!(%error, "failed to finalize certificate request"); + AcmeClientError::OrderFinalizing + })?; + + Ok((certificate_chain, certificate.serialize_private_key_pem())) + } + + fn find_challenge( + ty: ChallengeType, + authorization: &Authorization, + ) -> Result<&Challenge, AcmeClientError> { + authorization + .challenges + .iter() + .find(|c| c.r#type == ty) + .ok_or_else(|| { + error!("http-01 challenge not found"); + AcmeClientError::MissingChallenge + }) + } + + async fn wait_for_termination(&self, order: &mut Order) -> Result<(), AcmeClientError> { + // Exponential backoff until order changes status + let mut tries = 1; + let mut delay = Duration::from_millis(250); + let state = loop { + sleep(delay).await; + let state = order.state().await.map_err(|error| { + error!(%error, "got error while fetching state"); + AcmeClientError::FetchingState + })?; + + trace!(?state, "order state refreshed"); + match state.status { + OrderStatus::Ready => break state, + OrderStatus::Invalid => { + return Err(AcmeClientError::ChallengeInvalid); + } + OrderStatus::Pending => { + delay *= 2; + tries += 1; + if tries < MAX_RETRIES { + trace!(?state, tries, attempt_in=?delay, "order not yet ready"); + } else { + error!(?state, tries, "order not ready in {MAX_RETRIES} tries"); + return Err(AcmeClientError::ChallengeTimeout); + } + } + _ => unreachable!(), + } + }; + + trace!(?state, "challenge completed"); + + Ok(()) + } + + async fn complete_challenge( + &self, + ty: ChallengeType, + authorization: &Authorization, + order: &mut Order, + ) -> Result<(), AcmeClientError> { + // Don't complete challenge for orders that are already valid + if let AuthorizationStatus::Valid = authorization.status { + return Ok(()); + } + let challenge = Self::find_challenge(ty, authorization)?; + match ty { + ChallengeType::Http01 => self.complete_http01_challenge(challenge, order).await, + ChallengeType::Dns01 => { + self.complete_dns01_challenge(&authorization.identifier, challenge, order) + .await + } + _ => Err(AcmeClientError::ChallengeNotSupported), + } + } + + async fn complete_dns01_challenge( + &self, + identifier: &Identifier, + challenge: &Challenge, + order: &mut Order, + ) -> Result<(), AcmeClientError> { + let Identifier::Dns(domain) = identifier; + + let digest = order.key_authorization(challenge).dns_value(); + warn!("dns-01 challenge: _acme-challenge.{domain} 300 IN TXT \"{digest}\""); + + // Wait 60 secs to insert the record manually and for it to + // propagate before moving on + sleep(Duration::from_secs(60)).await; + + order + .set_challenge_ready(&challenge.url) + .await + .map_err(|error| { + error!(%error, "failed to mark challenge as ready"); + AcmeClientError::SetReadyFailed + })?; + + self.wait_for_termination(order).await + } + + async fn complete_http01_challenge( + &self, + challenge: &Challenge, + order: &mut Order, + ) -> Result<(), AcmeClientError> { + trace!(?challenge, "will complete challenge"); + + self.add_http01_challenge_authorization( + challenge.token.clone(), + order.key_authorization(challenge), + ) + .await; + + order + .set_challenge_ready(&challenge.url) + .await + .map_err(|error| { + error!(%error, "failed to mark challenge as ready"); + AcmeClientError::SetReadyFailed + })?; + + let res = self.wait_for_termination(order).await; + + self.remove_http01_challenge_authorization(&challenge.token) + .await; + + res + } +} + +#[derive(Debug, strum::Display)] +pub enum AcmeClientError { + AccountCreation, + AuthorizationCreation, + CertificateCreation, + CertificateSigning, + ChallengeInvalid, + ChallengeTimeout, + FetchingState, + OrderCreation, + OrderFinalizing, + MissingChallenge, + ChallengeNotSupported, + Serializing, + SetReadyFailed, +} + +impl std::error::Error for AcmeClientError {} + +pub struct ChallengeResponderLayer { + client: AcmeClient, +} + +impl ChallengeResponderLayer { + pub fn new(client: AcmeClient) -> Self { + Self { client } + } +} + +impl Layer for ChallengeResponderLayer { + type Service = ChallengeResponder; + + fn layer(&self, inner: S) -> Self::Service { + ChallengeResponder { + client: self.client.clone(), + inner, + } + } +} + +pub struct ChallengeResponder { + client: AcmeClient, + inner: S, +} + +impl<'r, S> AsResponderTo<&'r AddrStream> for ChallengeResponder +where + S: AsResponderTo<&'r AddrStream>, +{ + fn as_responder_to(&self, req: &'r AddrStream) -> Self { + Self { + client: self.client.clone(), + inner: self.inner.as_responder_to(req), + } + } +} + +impl Service> for ChallengeResponder +where + S: Service, Response = Response, Error = Error> + Send + 'static, + S::Future: Send + 'static, +{ + type Response = S::Response; + type Error = S::Error; + type Future = BoxFuture<'static, Result>; + + fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll> { + self.inner.poll_ready(cx) + } + + fn call(&mut self, req: Request) -> Self::Future { + if !req.uri().path().starts_with("/.well-known/acme-challenge/") { + let future = self.inner.call(req); + return Box::pin(async move { + let response: Response = future.await?; + Ok(response) + }); + } + + let token = match req + .uri() + .path() + .strip_prefix("/.well-known/acme-challenge/") + { + Some(token) => token.to_string(), + None => { + return Box::pin(async { + Ok(Response::builder() + .status(404) + .body(boxed(Body::empty())) + .unwrap()) + }) + } + }; + + trace!(token, "responding to certificate challenge"); + + let client = self.client.clone(); + + Box::pin(async move { + let (status, body) = match client.get_http01_challenge_authorization(&token).await { + Some(key) => (200, Body::from(key)), + None => (404, Body::empty()), + }; + + Ok(Response::builder() + .status(status) + .body(boxed(body)) + .unwrap()) + }) + } +} diff --git a/gateway/src/api/latest.rs b/gateway/src/api/latest.rs index 2d760368d..9b7093f1a 100644 --- a/gateway/src/api/latest.rs +++ b/gateway/src/api/latest.rs @@ -1,27 +1,45 @@ +use std::io::Cursor; +use std::net::SocketAddr; use std::sync::Arc; use std::time::Duration; use axum::body::{Body, BoxBody}; -use axum::extract::{Extension, Path}; +use axum::extract::{Extension, MatchedPath, Path, State}; use axum::http::Request; +use axum::middleware::from_extractor; use axum::response::Response; -use axum::routing::{any, get}; +use axum::routing::{any, get, post}; use axum::{Json as AxumJson, Router}; +use fqdn::FQDN; +use futures::Future; use http::StatusCode; +use instant_acme::{AccountCredentials, ChallengeType}; use serde::{Deserialize, Serialize}; -use shuttle_common::models::{project, user}; +use shuttle_common::backends::metrics::Metrics; +use shuttle_common::models::error::ErrorKind; +use shuttle_common::models::{project, stats, user}; use tokio::sync::mpsc::Sender; +use tokio::sync::{Mutex, MutexGuard}; use tower_http::trace::TraceLayer; -use tracing::{debug, debug_span, field, Span}; +use tracing::{debug, debug_span, field, instrument, Span}; +use ttl_cache::TtlCache; +use uuid::Uuid; +use crate::acme::{AcmeClient, CustomDomain}; use crate::auth::{Admin, ScopedUser, User}; -use crate::worker::Work; +use crate::project::{Project, ProjectCreating}; +use crate::task::{self, BoxedTask, TaskResult}; +use crate::tls::GatewayCertResolver; +use crate::worker::WORKER_QUEUE_SIZE; use crate::{AccountName, Error, GatewayService, ProjectName}; +pub const SVC_DEGRADED_THRESHOLD: usize = 128; + #[derive(Serialize, Deserialize)] #[serde(rename_all = "lowercase")] pub enum GatewayStatus { Healthy, + Degraded, Unhealthy, } @@ -37,6 +55,12 @@ impl StatusResponse { } } + pub fn degraded() -> Self { + Self { + status: GatewayStatus::Degraded, + } + } + pub fn unhealthy() -> Self { Self { status: GatewayStatus::Unhealthy, @@ -44,18 +68,20 @@ impl StatusResponse { } } +#[instrument(skip_all, fields(%account_name))] async fn get_user( - Extension(service): Extension>, + State(RouterState { service, .. }): State, Path(account_name): Path, _: Admin, ) -> Result, Error> { - let user = service.user_from_account_name(account_name).await?; + let user = User::retrieve_from_account_name(&service, account_name).await?; Ok(AxumJson(user.into())) } +#[instrument(skip_all, fields(%account_name))] async fn post_user( - Extension(service): Extension>, + State(RouterState { service, .. }): State, Path(account_name): Path, _: Admin, ) -> Result, Error> { @@ -64,8 +90,9 @@ async fn post_user( Ok(AxumJson(user.into())) } +#[instrument(skip(service))] async fn get_project( - Extension(service): Extension>, + State(RouterState { service, .. }): State, ScopedUser { scope, .. }: ScopedUser, ) -> Result, Error> { let state = service.find_project(&scope).await?.into(); @@ -77,60 +104,82 @@ async fn get_project( Ok(AxumJson(response)) } +#[instrument(skip_all, fields(%project))] async fn post_project( - Extension(service): Extension>, - Extension(sender): Extension>, + State(RouterState { + service, sender, .. + }): State, User { name, .. }: User, Path(project): Path, ) -> Result, Error> { - let work = service.create_project(project.clone(), name).await?; - - let name = work.project_name.to_string(); - let state = work.work.clone().into(); + let state = service + .create_project(project.clone(), name.clone()) + .await?; - sender.send(work).await?; + service + .new_task() + .project(project.clone()) + .send(&sender) + .await?; - let response = project::Response { name, state }; + let response = project::Response { + name: project.to_string(), + state: state.into(), + }; Ok(AxumJson(response)) } +#[instrument(skip_all, fields(%project))] async fn delete_project( - Extension(service): Extension>, - Extension(sender): Extension>, - ScopedUser { - scope: _, - user: User { name, .. }, - }: ScopedUser, - Path(project): Path, + State(RouterState { + service, sender, .. + }): State, + ScopedUser { scope: project, .. }: ScopedUser, ) -> Result, Error> { - let work = service.destroy_project(project, name).await?; + let state = service.find_project(&project).await?; + + let mut response = project::Response { + name: project.to_string(), + state: state.into(), + }; + + if response.state == shuttle_common::models::project::State::Destroyed { + return Ok(AxumJson(response)); + } - let name = work.project_name.to_string(); - let state = work.work.clone().into(); + // if project exists and isn't `Destroyed`, send destroy task + service + .new_task() + .project(project) + .and_then(task::destroy()) + .send(&sender) + .await?; - sender.send(work).await?; + response.state = shuttle_common::models::project::State::Destroying; - let response = project::Response { name, state }; Ok(AxumJson(response)) } +#[instrument(skip_all, fields(scope = %scoped_user.scope))] async fn route_project( - Extension(service): Extension>, - ScopedUser { scope, .. }: ScopedUser, + State(RouterState { service, .. }): State, + scoped_user: ScopedUser, req: Request, ) -> Result, Error> { - service.route(&scope, req).await + service.route(&scoped_user, req).await } -async fn get_status(Extension(sender): Extension>) -> Response { - let (status, body) = if !sender.is_closed() && sender.capacity() > 0 { - (StatusCode::OK, StatusResponse::healthy()) - } else { +async fn get_status(State(RouterState { sender, .. }): State) -> Response { + let (status, body) = if sender.is_closed() || sender.capacity() == 0 { ( StatusCode::INTERNAL_SERVER_ERROR, StatusResponse::unhealthy(), ) + } else if sender.capacity() < WORKER_QUEUE_SIZE - SVC_DEGRADED_THRESHOLD { + (StatusCode::OK, StatusResponse::degraded()) + } else { + (StatusCode::OK, StatusResponse::healthy()) }; let body = serde_json::to_vec(&body).unwrap(); @@ -140,33 +189,314 @@ async fn get_status(Extension(sender): Extension>) -> Response, sender: Sender) -> Router { - debug!("making api route"); - Router::::new() - .route( - "/", - get(get_status) - ) - .route( - "/projects/:project", - get(get_project).delete(delete_project).post(post_project) - ) - .route("/users/:account_name", get(get_user).post(post_user)) - .route("/projects/:project/*any", any(route_project)) - .layer(Extension(service)) - .layer(Extension(sender)) - .layer( +#[instrument(skip_all)] +async fn post_load( + State(RouterState { running_builds, .. }): State, + AxumJson(build): AxumJson, +) -> Result, Error> { + let mut running_builds = running_builds.lock().await; + let mut load = calculate_capacity(&mut running_builds); + + if load.has_capacity + && running_builds + .insert(build.id, (), Duration::from_secs(60 * 10)) + .is_none() + { + // Only increase when an item was not already in the queue + load.builds_count += 1; + } + + Ok(AxumJson(load)) +} + +#[instrument(skip_all)] +async fn delete_load( + State(RouterState { running_builds, .. }): State, + AxumJson(build): AxumJson, +) -> Result, Error> { + let mut running_builds = running_builds.lock().await; + running_builds.remove(&build.id); + + let load = calculate_capacity(&mut running_builds); + + Ok(AxumJson(load)) +} + +#[instrument(skip_all)] +async fn get_load_admin( + _: Admin, + State(RouterState { running_builds, .. }): State, +) -> Result, Error> { + let mut running_builds = running_builds.lock().await; + + let load = calculate_capacity(&mut running_builds); + + Ok(AxumJson(load)) +} + +#[instrument(skip_all)] +async fn delete_load_admin( + _: Admin, + State(RouterState { running_builds, .. }): State, +) -> Result, Error> { + let mut running_builds = running_builds.lock().await; + running_builds.clear(); + + let load = calculate_capacity(&mut running_builds); + + Ok(AxumJson(load)) +} + +fn calculate_capacity(running_builds: &mut MutexGuard>) -> stats::LoadResponse { + let active = running_builds.iter().count(); + let capacity = running_builds.capacity(); + let has_capacity = active < capacity; + + stats::LoadResponse { + builds_count: active, + has_capacity, + } +} + +#[instrument(skip_all)] +async fn revive_projects( + _: Admin, + State(RouterState { + service, sender, .. + }): State, +) -> Result<(), Error> { + crate::project::exec::revive(service, sender) + .await + .map_err(|_| Error::from_kind(ErrorKind::Internal)) +} + +#[instrument(skip_all, fields(%email, ?acme_server))] +async fn create_acme_account( + _: Admin, + Extension(acme_client): Extension, + Path(email): Path, + AxumJson(acme_server): AxumJson>, +) -> Result, Error> { + let res = acme_client.create_account(&email, acme_server).await?; + + Ok(AxumJson(res)) +} + +#[instrument(skip_all, fields(%project_name, %fqdn))] +async fn request_acme_certificate( + _: Admin, + State(RouterState { + service, sender, .. + }): State, + Extension(acme_client): Extension, + Extension(resolver): Extension>, + Path((project_name, fqdn)): Path<(ProjectName, String)>, + AxumJson(credentials): AxumJson>, +) -> Result { + let fqdn: FQDN = fqdn + .parse() + .map_err(|_err| Error::from(ErrorKind::InvalidCustomDomain))?; + + let (certs, private_key) = match service.project_details_for_custom_domain(&fqdn).await { + Ok(CustomDomain { + certificate, + private_key, + .. + }) => (certificate, private_key), + Err(err) if err.kind() == ErrorKind::CustomDomainNotFound => { + let (certs, private_key) = acme_client + .create_certificate(&fqdn.to_string(), ChallengeType::Http01, credentials) + .await?; + service + .create_custom_domain(project_name.clone(), &fqdn, &certs, &private_key) + .await?; + (certs, private_key) + } + Err(err) => return Err(err), + }; + + // destroy and recreate the project with the new domain + service + .new_task() + .project(project_name) + .and_then(task::destroy()) + .and_then(task::run_until_done()) + .and_then(task::run({ + let fqdn = fqdn.to_string(); + move |ctx| { + let fqdn = fqdn.clone(); + async move { + let creating = ProjectCreating::new_with_random_initial_key(ctx.project_name) + .with_fqdn(fqdn); + TaskResult::Done(Project::Creating(creating)) + } + } + })) + .send(&sender) + .await?; + + let mut buf = Vec::new(); + buf.extend(certs.as_bytes()); + buf.extend(private_key.as_bytes()); + resolver + .serve_pem(&fqdn.to_string(), Cursor::new(buf)) + .await?; + + Ok("certificate created".to_string()) +} + +async fn get_projects( + _: Admin, + State(RouterState { service, .. }): State, +) -> Result>, Error> { + let projects = service + .iter_projects_detailed() + .await? + .into_iter() + .map(Into::into) + .collect(); + + Ok(AxumJson(projects)) +} + +#[derive(Clone)] +pub(crate) struct RouterState { + pub service: Arc, + pub sender: Sender, + pub running_builds: Arc>>, +} + +pub struct ApiBuilder { + router: Router, + service: Option>, + sender: Option>, + bind: Option, +} + +impl Default for ApiBuilder { + fn default() -> Self { + Self::new() + } +} + +impl ApiBuilder { + pub fn new() -> Self { + Self { + router: Router::new(), + service: None, + sender: None, + bind: None, + } + } + + pub fn with_acme(mut self, acme: AcmeClient, resolver: Arc) -> Self { + self.router = self + .router + .route("/admin/acme/:email", post(create_acme_account)) + .route( + "/admin/acme/request/:project_name/:fqdn", + post(request_acme_certificate), + ) + .layer(Extension(acme)) + .layer(Extension(resolver)); + self + } + + pub fn with_service(mut self, service: Arc) -> Self { + self.service = Some(service); + self + } + + pub fn with_sender(mut self, sender: Sender) -> Self { + self.sender = Some(sender); + self + } + + pub fn binding_to(mut self, addr: SocketAddr) -> Self { + self.bind = Some(addr); + self + } + + pub fn with_default_traces(mut self) -> Self { + self.router = self.router.route_layer(from_extractor::()).layer( TraceLayer::new_for_http() .make_span_with(|request: &Request| { - debug_span!("request", http.uri = %request.uri(), http.method = %request.method(), http.status_code = field::Empty, api_key = field::Empty) + let path = if let Some(path) = request.extensions().get::() { + path.as_str() + } else { + "" + }; + + debug_span!( + "request", + http.uri = %request.uri(), + http.method = %request.method(), + http.status_code = field::Empty, + account.name = field::Empty, + // A bunch of extra things for metrics + // Should be able to make this clearer once `Valuable` support lands in tracing + request.path = path, + request.params.project_name = field::Empty, + request.params.account_name = field::Empty, + ) }) .on_response( |response: &Response, latency: Duration, span: &Span| { span.record("http.status_code", response.status().as_u16()); - debug!(latency = format_args!("{} ns", latency.as_nanos()), "finished processing request"); + debug!( + latency = format_args!("{} ns", latency.as_nanos()), + "finished processing request" + ); }, ), - ) + ); + self + } + + pub fn with_default_routes(mut self) -> Self { + self.router = self + .router + .route("/", get(get_status)) + .route( + "/projects/:project_name", + get(get_project).delete(delete_project).post(post_project), + ) + .route("/users/:account_name", get(get_user).post(post_user)) + .route("/projects/:project_name/*any", any(route_project)) + .route("/stats/load", post(post_load).delete(delete_load)) + .route("/admin/projects", get(get_projects)) + .route("/admin/revive", post(revive_projects)) + .route( + "/admin/stats/load", + get(get_load_admin).delete(delete_load_admin), + ); + self + } + + pub fn into_router(self) -> Router { + let service = self.service.expect("a GatewayService is required"); + let sender = self.sender.expect("a task Sender is required"); + + // Allow about 4 cores per build + let mut concurrent_builds = num_cpus::get() / 4; + if concurrent_builds < 1 { + concurrent_builds = 1; + } + + let running_builds = Arc::new(Mutex::new(TtlCache::new(concurrent_builds))); + + self.router.with_state(RouterState { + service, + sender, + running_builds, + }) + } + + pub fn serve(self) -> impl Future> { + let bind = self.bind.expect("a socket address to bind to is required"); + let router = self.into_router(); + axum::Server::bind(&bind).serve(router.into_make_service()) + } } #[cfg(test)] @@ -185,22 +515,24 @@ pub mod tests { use super::*; use crate::service::GatewayService; use crate::tests::{RequestBuilderExt, World}; - use crate::worker::Work; #[tokio::test] async fn api_create_get_delete_projects() -> anyhow::Result<()> { let world = World::new().await; - let service = - Arc::new(GatewayService::init(world.args(), world.fqdn(), world.pool()).await); + let service = Arc::new(GatewayService::init(world.args(), world.pool()).await); - let (sender, mut receiver) = channel::(256); + let (sender, mut receiver) = channel::(256); tokio::spawn(async move { while receiver.recv().await.is_some() { // do not do any work with inbound requests } }); - let mut router = make_api(Arc::clone(&service), sender); + let mut router = ApiBuilder::new() + .with_service(Arc::clone(&service)) + .with_sender(sender) + .with_default_routes() + .into_router(); let neo = service.create_user("neo".parse().unwrap()).await?; @@ -320,23 +652,35 @@ pub mod tests { .await .unwrap(); + // delete returns 404 for project that doesn't exist + router + .call(delete_project("resurrections").with_header(&authorization)) + .map_ok(|resp| { + assert_eq!(resp.status(), StatusCode::NOT_FOUND); + }) + .await + .unwrap(); + Ok(()) } #[tokio::test] async fn api_create_get_users() -> anyhow::Result<()> { let world = World::new().await; - let service = - Arc::new(GatewayService::init(world.args(), world.fqdn(), world.pool()).await); + let service = Arc::new(GatewayService::init(world.args(), world.pool()).await); - let (sender, mut receiver) = channel::(256); + let (sender, mut receiver) = channel::(256); tokio::spawn(async move { while receiver.recv().await.is_some() { // do not do any work with inbound requests } }); - let mut router = make_api(Arc::clone(&service), sender); + let mut router = ApiBuilder::new() + .with_service(Arc::clone(&service)) + .with_sender(sender) + .with_default_routes() + .into_router(); let get_neo = || { Request::builder() @@ -416,10 +760,9 @@ pub mod tests { #[tokio::test(flavor = "multi_thread")] async fn status() { let world = World::new().await; - let service = - Arc::new(GatewayService::init(world.args(), world.fqdn(), world.pool()).await); + let service = Arc::new(GatewayService::init(world.args(), world.pool()).await); - let (sender, mut receiver) = channel::(1); + let (sender, mut receiver) = channel::(1); let (ctl_send, ctl_recv) = oneshot::channel(); let (done_send, done_recv) = oneshot::channel(); let worker = tokio::spawn(async move { @@ -433,7 +776,11 @@ pub mod tests { } }); - let mut router = make_api(Arc::clone(&service), sender.clone()); + let mut router = ApiBuilder::new() + .with_service(Arc::clone(&service)) + .with_sender(sender) + .with_default_routes() + .into_router(); let get_status = || { Request::builder() @@ -471,6 +818,7 @@ pub mod tests { assert_eq!(resp.status(), StatusCode::OK); worker.abort(); + let _ = worker.await; let resp = router.call(get_status()).await.unwrap(); assert_eq!(resp.status(), StatusCode::INTERNAL_SERVER_ERROR); diff --git a/gateway/src/api/mod.rs b/gateway/src/api/mod.rs index e7eb5861f..27f571e54 100644 --- a/gateway/src/api/mod.rs +++ b/gateway/src/api/mod.rs @@ -1,2 +1 @@ pub mod latest; -pub use latest::make_api; diff --git a/gateway/src/args.rs b/gateway/src/args.rs index 5f65a7480..fd720025b 100644 --- a/gateway/src/args.rs +++ b/gateway/src/args.rs @@ -1,20 +1,26 @@ -use std::net::SocketAddr; +use std::{net::SocketAddr, path::PathBuf}; -use clap::{Parser, Subcommand}; +use clap::{Parser, Subcommand, ValueEnum}; use fqdn::FQDN; use crate::auth::Key; #[derive(Parser, Debug)] pub struct Args { - /// Uri to the `.sqlite` file used to store state - #[arg(long, default_value = "./gateway.sqlite")] - pub state: String, + /// Where to store gateway state (such as sqlite state, and certs) + #[arg(long, default_value = "./")] + pub state: PathBuf, #[command(subcommand)] pub command: Commands, } +#[derive(Debug, Clone, Copy, ValueEnum)] +pub enum UseTls { + Disable, + Enable, +} + #[derive(Subcommand, Debug)] pub enum Commands { Start(StartArgs), @@ -26,9 +32,31 @@ pub struct StartArgs { /// Address to bind the control plane to #[arg(long, default_value = "127.0.0.1:8001")] pub control: SocketAddr, - /// Address to bind the user plane to + /// Address to bind the bouncer service to + #[arg(long, default_value = "127.0.0.1:7999")] + pub bouncer: SocketAddr, + /// Address to bind the user proxy to #[arg(long, default_value = "127.0.0.1:8000")] pub user: SocketAddr, + /// Allows to disable the use of TLS in the user proxy service (DANGEROUS) + #[arg(long, default_value = "enable")] + pub use_tls: UseTls, + #[command(flatten)] + pub context: ContextArgs, +} + +#[derive(clap::Args, Debug, Clone)] +pub struct InitArgs { + /// Name of initial account to create + #[arg(long)] + pub name: String, + /// Key to assign to initial account + #[arg(long)] + pub key: Option, +} + +#[derive(clap::Args, Debug, Clone)] +pub struct ContextArgs { /// Default image to deploy user runtimes into #[arg(long, default_value = "public.ecr.aws/shuttle/deployer:latest")] pub image: String, @@ -40,23 +68,13 @@ pub struct StartArgs { /// the provisioner service #[arg(long, default_value = "provisioner")] pub provisioner_host: String, - /// The path to the docker daemon socket - #[arg(long, default_value = "/var/run/docker.sock")] - pub docker_host: String, /// The Docker Network name in which to deploy user runtimes #[arg(long, default_value = "shuttle_default")] pub network_name: String, /// FQDN where the proxy can be reached at - #[arg(long)] + #[arg(long, default_value = "shuttleapp.rs")] pub proxy_fqdn: FQDN, -} - -#[derive(clap::Args, Debug, Clone)] -pub struct InitArgs { - /// Name of initial account to create - #[arg(long)] - pub name: String, - /// Key to assign to initial account - #[arg(long)] - pub key: Option, + /// The path to the docker daemon socket + #[arg(long, default_value = "/var/run/docker.sock")] + pub docker_host: String, } diff --git a/gateway/src/auth.rs b/gateway/src/auth.rs index 5c66cceee..2e1b84b19 100644 --- a/gateway/src/auth.rs +++ b/gateway/src/auth.rs @@ -1,13 +1,15 @@ -use std::fmt::Formatter; +use std::fmt::{Debug, Formatter}; use std::str::FromStr; -use std::sync::Arc; -use axum::extract::{Extension, FromRequest, Path, RequestParts, TypedHeader}; +use axum::extract::{FromRef, FromRequestParts, Path, TypedHeader}; use axum::headers::authorization::Bearer; use axum::headers::Authorization; +use axum::http::request::Parts; use rand::distributions::{Alphanumeric, DistString}; use serde::{Deserialize, Serialize}; +use tracing::{trace, Span}; +use crate::api::latest::RouterState; use crate::service::GatewayService; use crate::{AccountName, Error, ErrorKind, ProjectName}; @@ -23,17 +25,21 @@ impl Key { } #[async_trait] -impl FromRequest for Key +impl FromRequestParts for Key where - B: Send, + S: Send + Sync, { type Rejection = Error; - async fn from_request(req: &mut RequestParts) -> Result { - TypedHeader::>::from_request(req) + async fn from_request_parts(parts: &mut Parts, state: &S) -> Result { + let key = TypedHeader::>::from_request_parts(parts, state) .await .map_err(|_| Error::from(ErrorKind::KeyMissing)) - .and_then(|TypedHeader(Authorization(bearer))| bearer.token().trim().parse()) + .and_then(|TypedHeader(Authorization(bearer))| bearer.token().trim().parse())?; + + trace!(%key, "got bearer key"); + + Ok(key) } } @@ -57,8 +63,6 @@ impl Key { } } -const FALSE: fn() -> bool = || false; - /// A wrapper for a guard that verifies an API key is associated with a /// valid user. /// @@ -70,28 +74,136 @@ pub struct User { pub name: AccountName, pub key: Key, pub projects: Vec, - #[serde(skip_serializing_if = "std::ops::Not::not")] - #[serde(default = "FALSE")] + pub permissions: Permissions, +} + +impl User { + pub fn is_super_user(&self) -> bool { + self.permissions.is_super_user() + } + + pub fn new_with_defaults(name: AccountName, key: Key) -> Self { + Self { + name, + key, + projects: Vec::new(), + permissions: Permissions::default(), + } + } + + pub async fn retrieve_from_account_name( + svc: &GatewayService, + name: AccountName, + ) -> Result { + let key = svc.key_from_account_name(&name).await?; + let permissions = svc.get_permissions(&name).await?; + let projects = svc.iter_user_projects(&name).await?.collect(); + Ok(User { + name, + key, + projects, + permissions, + }) + } + + pub async fn retrieve_from_key(svc: &GatewayService, key: Key) -> Result { + let name = svc.account_name_from_key(&key).await?; + trace!(%name, "got account name from key"); + + let permissions = svc.get_permissions(&name).await?; + let projects = svc.iter_user_projects(&name).await?.collect(); + Ok(User { + name, + key, + projects, + permissions, + }) + } +} + +#[derive(Clone, Copy, Deserialize, PartialEq, Eq, Serialize, Debug, sqlx::Type)] +#[sqlx(rename_all = "lowercase")] +pub enum AccountTier { + Basic, + Pro, + Team, +} + +#[derive(Default)] +pub struct PermissionsBuilder { + tier: Option, + super_user: Option, +} + +impl PermissionsBuilder { + pub fn super_user(mut self, is_super_user: bool) -> Self { + self.super_user = Some(is_super_user); + self + } + + pub fn tier(mut self, tier: AccountTier) -> Self { + self.tier = Some(tier); + self + } + + pub fn build(self) -> Permissions { + Permissions { + tier: self.tier.unwrap_or(AccountTier::Basic), + super_user: self.super_user.unwrap_or_default(), + } + } +} + +#[derive(Clone, Deserialize, PartialEq, Eq, Serialize, Debug)] +pub struct Permissions { + pub tier: AccountTier, pub super_user: bool, } +impl Default for Permissions { + fn default() -> Self { + Self { + tier: AccountTier::Basic, + super_user: false, + } + } +} + +impl Permissions { + pub fn builder() -> PermissionsBuilder { + PermissionsBuilder::default() + } + + pub fn tier(&self) -> &AccountTier { + &self.tier + } + + pub fn is_super_user(&self) -> bool { + self.super_user + } +} + #[async_trait] -impl FromRequest for User +impl FromRequestParts for User where - B: Send, + S: Send + Sync, + RouterState: FromRef, { type Rejection = Error; - async fn from_request(req: &mut RequestParts) -> Result { - let key = Key::from_request(req).await?; - let Extension(service) = Extension::>::from_request(req) - .await - .unwrap(); - let user = service - .user_from_key(key) + async fn from_request_parts(parts: &mut Parts, state: &S) -> Result { + let key = Key::from_request_parts(parts, state).await?; + + let RouterState { service, .. } = RouterState::from_ref(state); + + let user = User::retrieve_from_key(&service, key) .await // Absord any error into `Unauthorized` .map_err(|e| Error::source(ErrorKind::Unauthorized, e))?; + + // Record current account name for tracing purposes + Span::current().record("account.name", &user.name.to_string()); + Ok(user) } } @@ -121,22 +233,25 @@ pub struct ScopedUser { } #[async_trait] -impl FromRequest for ScopedUser +impl FromRequestParts for ScopedUser where - B: Send, + S: Send + Sync, + RouterState: FromRef, { type Rejection = Error; - async fn from_request(req: &mut RequestParts) -> Result { - let user = User::from_request(req).await?; - let scope = match Path::::from_request(req).await { + async fn from_request_parts(parts: &mut Parts, state: &S) -> Result { + let user = User::from_request_parts(parts, state).await?; + + let scope = match Path::::from_request_parts(parts, state).await { Ok(Path(p)) => p, - Err(_) => Path::<(ProjectName, String)>::from_request(req) + Err(_) => Path::<(ProjectName, String)>::from_request_parts(parts, state) .await .map(|Path((p, _))| p) .unwrap(), }; - if user.super_user || user.projects.contains(&scope) { + + if user.is_super_user() || user.projects.contains(&scope) { Ok(Self { user, scope }) } else { Err(Error::from(ErrorKind::ProjectNotFound)) @@ -149,15 +264,17 @@ pub struct Admin { } #[async_trait] -impl FromRequest for Admin +impl FromRequestParts for Admin where - B: Send, + S: Send + Sync, + RouterState: FromRef, { type Rejection = Error; - async fn from_request(req: &mut RequestParts) -> Result { - let user = User::from_request(req).await?; - if user.super_user { + async fn from_request_parts(parts: &mut Parts, state: &S) -> Result { + let user = User::from_request_parts(parts, state).await?; + + if user.is_super_user() { Ok(Self { user }) } else { Err(Error::from(ErrorKind::Forbidden)) diff --git a/gateway/src/lib.rs b/gateway/src/lib.rs index d5d2980bd..09c60b79d 100644 --- a/gateway/src/lib.rs +++ b/gateway/src/lib.rs @@ -8,29 +8,29 @@ use std::io; use std::pin::Pin; use std::str::FromStr; +use acme::AcmeClientError; use axum::response::{IntoResponse, Response}; use axum::Json; use bollard::Docker; use futures::prelude::*; -use once_cell::sync::Lazy; -use regex::Regex; use serde::{Deserialize, Deserializer, Serialize}; use shuttle_common::models::error::{ApiError, ErrorKind}; use tokio::sync::mpsc::error::SendError; use tracing::error; +pub mod acme; pub mod api; pub mod args; pub mod auth; pub mod project; pub mod proxy; pub mod service; +pub mod task; +pub mod tls; pub mod worker; use crate::service::{ContainerSettings, GatewayService}; -static PROJECT_REGEX: Lazy = Lazy::new(|| Regex::new("^[a-zA-Z0-9\\-_]{3,64}$").unwrap()); - /// Server-side errors that do not have to do with the user runtime /// should be [`Error`]s. /// @@ -80,7 +80,19 @@ impl From for Error { impl From> for Error { fn from(_: SendError) -> Self { - Self::from(ErrorKind::NotReady) + Self::from(ErrorKind::ServiceUnavailable) + } +} + +impl From for Error { + fn from(_: io::Error) -> Self { + Self::from(ErrorKind::Internal) + } +} + +impl From for Error { + fn from(error: AcmeClientError) -> Self { + Self::source(ErrorKind::Internal, error) } } @@ -107,10 +119,33 @@ impl std::fmt::Display for Error { impl StdError for Error {} -#[derive(Debug, sqlx::Type, Serialize, Clone, PartialEq, Eq)] +#[derive(Debug, sqlx::Type, Serialize, Clone, PartialEq, Eq, Hash)] #[sqlx(transparent)] pub struct ProjectName(String); +impl ProjectName { + pub fn as_str(&self) -> &str { + self.0.as_str() + } + + pub fn is_valid(&self) -> bool { + let name = self.0.clone(); + + fn is_valid_char(byte: u8) -> bool { + matches!(byte, b'a'..=b'z' | b'0'..=b'9' | b'-') + } + + // each label in a hostname can be between 1 and 63 chars + let is_invalid_length = name.len() > 63; + + !(name.bytes().any(|byte| !is_valid_char(byte)) + || name.ends_with('-') + || name.starts_with('-') + || name.is_empty() + || is_invalid_length) + } +} + impl<'de> Deserialize<'de> for ProjectName { fn deserialize(deserializer: D) -> Result where @@ -126,11 +161,9 @@ impl FromStr for ProjectName { type Err = Error; fn from_str(s: &str) -> Result { - if PROJECT_REGEX.is_match(s) { - Ok(Self(s.to_string())) - } else { - Err(Error::from_kind(ErrorKind::InvalidProjectName)) - } + s.parse::() + .map_err(|_| Error::from_kind(ErrorKind::InvalidProjectName)) + .map(|pn| Self(pn.to_string())) } } @@ -169,22 +202,37 @@ impl<'de> Deserialize<'de> for AccountName { } } -pub trait Context<'c>: Send + Sync { - fn docker(&self) -> &'c Docker; +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ProjectDetails { + pub project_name: ProjectName, + pub account_name: AccountName, +} + +impl From for shuttle_common::models::project::AdminResponse { + fn from(project: ProjectDetails) -> Self { + Self { + project_name: project.project_name.to_string(), + account_name: project.account_name.to_string(), + } + } +} + +pub trait DockerContext: Send + Sync { + fn docker(&self) -> &Docker; - fn container_settings(&self) -> &'c ContainerSettings; + fn container_settings(&self) -> &ContainerSettings; } #[async_trait] -pub trait Service<'c> { - type Context: Context<'c>; +pub trait Service { + type Context; - type State: EndState<'c>; + type State: EndState; type Error; /// Asks for the latest available context for task execution - fn context(&'c self) -> Self::Context; + fn context(&self) -> Self::Context; /// Commit a state update to persistence async fn update(&self, state: &Self::State) -> Result<(), Self::Error>; @@ -193,42 +241,39 @@ pub trait Service<'c> { /// A generic state which can, when provided with a [`Context`], do /// some work and advance itself #[async_trait] -pub trait State<'c>: Send + Sized + Clone { +pub trait State: Send { type Next; type Error; - async fn next>(self, ctx: &C) -> Result; + async fn next(self, ctx: &Ctx) -> Result; } -/// A [`State`] which contains all its transitions, including -/// failures -pub trait EndState<'c> +pub type StateTryStream<'c, St, Err> = Pin> + Send + 'c>>; + +pub trait EndState where - Self: State<'c, Error = Infallible, Next = Self>, + Self: State, { - type ErrorVariant; - fn is_done(&self) -> bool; - - fn into_result(self) -> Result; } -pub type StateTryStream<'c, St, Err> = Pin> + Send + 'c>>; - -pub trait EndStateExt<'c>: EndState<'c> { +pub trait EndStateExt: TryState + EndState +where + Ctx: Sync, + Self: Clone, +{ /// Convert the state into a [`TryStream`] that yields /// the generated states. /// /// This stream will not end. - fn into_stream(self, ctx: Ctx) -> StateTryStream<'c, Self, Self::ErrorVariant> + fn into_stream<'c>(self, ctx: &'c Ctx) -> StateTryStream<'c, Self, Self::ErrorVariant> where Self: 'c, - Ctx: 'c + Context<'c>, { Box::pin(stream::try_unfold((self, ctx), |(state, ctx)| async move { state - .next(&ctx) + .next(ctx) .await .unwrap() // EndState's `next` is Infallible .into_result() @@ -237,29 +282,42 @@ pub trait EndStateExt<'c>: EndState<'c> { } } -impl<'c, S> EndStateExt<'c> for S where S: EndState<'c> {} +impl EndStateExt for S +where + S: Clone + TryState + EndState, + Ctx: Send + Sync, +{ +} + +/// A [`State`] which contains all its transitions, including +/// failures +pub trait TryState: Sized { + type ErrorVariant; + + fn into_result(self) -> Result; +} -pub trait IntoEndState<'c, E> +pub trait IntoTryState where - E: EndState<'c>, + S: TryState, { - fn into_end_state(self) -> Result; + fn into_try_state(self) -> Result; } -impl<'c, E, S, Err> IntoEndState<'c, E> for Result +impl IntoTryState for Result where - E: EndState<'c> + From + From, + S: TryState + From + From, { - fn into_end_state(self) -> Result { - self.map(|s| E::from(s)).or_else(|err| Ok(E::from(err))) + fn into_try_state(self) -> Result { + self.map(|s| S::from(s)).or_else(|err| Ok(S::from(err))) } } #[async_trait] -pub trait Refresh: Sized { +pub trait Refresh: Sized { type Error: StdError; - async fn refresh<'c, C: Context<'c>>(self, ctx: &C) -> Result; + async fn refresh(self, ctx: &Ctx) -> Result; } #[cfg(test)] @@ -281,18 +339,18 @@ pub mod tests { use hyper::http::Uri; use hyper::{Body, Client as HyperClient, Request, Response, StatusCode}; use rand::distributions::{Alphanumeric, DistString, Distribution, Uniform}; - use shuttle_common::models::{project, service}; + use shuttle_common::models::{project, service, user}; use sqlx::SqlitePool; use tokio::sync::mpsc::channel; - use tracing::info; - use crate::api::make_api; - use crate::args::StartArgs; + use crate::acme::AcmeClient; + use crate::api::latest::ApiBuilder; + use crate::args::{ContextArgs, StartArgs, UseTls}; use crate::auth::User; - use crate::proxy::make_proxy; + use crate::proxy::UserServiceBuilder; use crate::service::{ContainerSettings, GatewayService, MIGRATIONS}; use crate::worker::Worker; - use crate::Context; + use crate::DockerContext; macro_rules! value_block_helper { ($next:ident, $block:block) => { @@ -355,7 +413,7 @@ pub mod tests { $($(#[$($meta:tt)*])* $($patterns:pat_param)|+ $(if $guards:expr)? $(=> $mores:block)?,)+ } => {{ let state = $state; - let mut stream = crate::EndStateExt::into_stream(state, $ctx); + let mut stream = crate::EndStateExt::into_stream(state, &$ctx); assert_stream_matches!( stream, $($(#[$($meta)*])* $($patterns)|+ $(if $guards)? $(=> $mores)?,)+ @@ -485,21 +543,19 @@ pub mod tests { args: StartArgs, hyper: HyperClient, pool: SqlitePool, - fqdn: String, + acme_client: AcmeClient, } - #[derive(Clone, Copy)] - pub struct WorldContext<'c> { - pub docker: &'c Docker, - pub container_settings: &'c ContainerSettings, - pub hyper: &'c HyperClient, - pub fqdn: &'c str, + #[derive(Clone)] + pub struct WorldContext { + pub docker: Docker, + pub container_settings: ContainerSettings, + pub hyper: HyperClient, } impl World { pub async fn new() -> Self { let docker = Docker::connect_with_local_defaults().unwrap(); - let fqdn = "test.shuttleapp.rs".to_string(); docker .list_images::<&str>(None) @@ -509,8 +565,10 @@ pub mod tests { let control: i16 = Uniform::from(9000..10000).sample(&mut rand::thread_rng()); let user = control + 1; + let bouncer = user + 1; let control = format!("127.0.0.1:{control}").parse().unwrap(); let user = format!("127.0.0.1:{user}").parse().unwrap(); + let bouncer = format!("127.0.0.1:{bouncer}").parse().unwrap(); let prefix = format!( "shuttle_test_{}_", @@ -529,17 +587,21 @@ pub mod tests { let args = StartArgs { control, - docker_host, user, - image, - prefix, - provisioner_host, - network_name, - proxy_fqdn: FQDN::from_str(&fqdn).unwrap(), + bouncer, + use_tls: UseTls::Disable, + context: ContextArgs { + docker_host, + image, + prefix, + provisioner_host, + network_name, + proxy_fqdn: FQDN::from_str("test.shuttleapp.rs").unwrap(), + }, }; - let settings = ContainerSettings::builder(&docker, fqdn.clone()) - .from_args(&args) + let settings = ContainerSettings::builder(&docker) + .from_args(&args.context) .await; let hyper = HyperClient::builder().build(HttpConnector::new()); @@ -547,18 +609,20 @@ pub mod tests { let pool = SqlitePool::connect("sqlite::memory:").await.unwrap(); MIGRATIONS.run(&pool).await.unwrap(); + let acme_client = AcmeClient::new(); + Self { docker, settings, args, hyper, pool, - fqdn, + acme_client, } } - pub fn args(&self) -> StartArgs { - self.args.clone() + pub fn args(&self) -> ContextArgs { + self.args.context.clone() } pub fn pool(&self) -> SqlitePool { @@ -569,48 +633,52 @@ pub mod tests { Client::new(addr).with_hyper_client(self.hyper.clone()) } - pub fn fqdn(&self) -> String { - self.fqdn.clone() + pub fn fqdn(&self) -> FQDN { + self.args().proxy_fqdn + } + + pub fn acme_client(&self) -> AcmeClient { + self.acme_client.clone() } } impl World { pub fn context(&self) -> WorldContext { WorldContext { - docker: &self.docker, - container_settings: &self.settings, - hyper: &self.hyper, - fqdn: &self.fqdn, + docker: self.docker.clone(), + container_settings: self.settings.clone(), + hyper: self.hyper.clone(), } } } - impl<'c> Context<'c> for WorldContext<'c> { - fn docker(&self) -> &'c Docker { - self.docker + impl DockerContext for WorldContext { + fn docker(&self) -> &Docker { + &self.docker } - fn container_settings(&self) -> &'c ContainerSettings { - self.container_settings + fn container_settings(&self) -> &ContainerSettings { + &self.container_settings } } #[tokio::test] async fn end_to_end() { let world = World::new().await; - let service = - Arc::new(GatewayService::init(world.args(), world.fqdn(), world.pool()).await); - let worker = Worker::new(Arc::clone(&service)); + let service = Arc::new(GatewayService::init(world.args(), world.pool()).await); + let worker = Worker::new(); let (log_out, mut log_in) = channel(256); tokio::spawn({ let sender = worker.sender(); async move { while let Some(work) = log_in.recv().await { - info!("work: {work:?}"); - sender.send(work).await.unwrap() + sender + .send(work) + .await + .map_err(|_| "could not send work") + .unwrap(); } - info!("work channel closed"); } }); @@ -621,28 +689,33 @@ pub mod tests { } }; - let api = make_api(Arc::clone(&service), log_out); let api_addr = format!("127.0.0.1:{}", base_port).parse().unwrap(); - let serve_api = hyper::Server::bind(&api_addr).serve(api.into_make_service()); let api_client = world.client(api_addr); - - let proxy = make_proxy(Arc::clone(&service), world.fqdn()); - let proxy_addr = format!("127.0.0.1:{}", base_port + 1).parse().unwrap(); - let serve_proxy = hyper::Server::bind(&proxy_addr).serve(proxy); - let proxy_client = world.client(proxy_addr); + let api = ApiBuilder::new() + .with_service(Arc::clone(&service)) + .with_sender(log_out) + .with_default_routes() + .binding_to(api_addr); + + let user_addr: SocketAddr = format!("127.0.0.1:{}", base_port + 1).parse().unwrap(); + let proxy_client = world.client(user_addr); + let user = UserServiceBuilder::new() + .with_service(Arc::clone(&service)) + .with_public(world.fqdn()) + .with_user_proxy_binding_to(user_addr); let _gateway = tokio::spawn(async move { tokio::select! { _ = worker.start() => {}, - _ = serve_api => {}, - _ = serve_proxy => {} + _ = api.serve() => {}, + _ = user.serve() => {} } }); let User { key, name, .. } = service.create_user("neo".parse().unwrap()).await.unwrap(); service.set_super_user(&name, true).await.unwrap(); - let User { key, .. } = api_client + let user::Response { key, .. } = api_client .request( Request::post("/users/trinity") .with_header(&Authorization::bearer(key.as_str()).unwrap()) @@ -742,6 +815,7 @@ pub mod tests { .request( Request::get("/hello") .header("Host", "matrix.test.shuttleapp.rs") + .header("x-shuttle-project", "matrix") .body(Body::empty()) .unwrap(), ) @@ -777,10 +851,27 @@ pub mod tests { ) .await .unwrap(); - println!("{resp:?}"); - if matches!(resp.status(), StatusCode::NOT_FOUND) { + let resp = serde_json::from_slice::(resp.body().as_slice()).unwrap(); + if matches!(resp.state, project::State::Destroyed) { break; } }); + + // Attempting to delete already Destroyed project will return Destroyed + api_client + .request( + Request::delete("/projects/matrix") + .with_header(&authorization) + .body(Body::empty()) + .unwrap(), + ) + .map_ok(|resp| { + assert_eq!(resp.status(), StatusCode::OK); + let resp = + serde_json::from_slice::(resp.body().as_slice()).unwrap(); + assert_eq!(resp.state, project::State::Destroyed); + }) + .await + .unwrap(); } } diff --git a/gateway/src/main.rs b/gateway/src/main.rs index 5a01f5e48..b2912034d 100644 --- a/gateway/src/main.rs +++ b/gateway/src/main.rs @@ -1,18 +1,25 @@ use clap::Parser; +use fqdn::FQDN; use futures::prelude::*; -use shuttle_gateway::args::{Args, Commands, InitArgs}; +use instant_acme::{AccountCredentials, ChallengeType}; +use opentelemetry::global; +use shuttle_gateway::acme::{AcmeClient, CustomDomain}; +use shuttle_gateway::api::latest::{ApiBuilder, SVC_DEGRADED_THRESHOLD}; +use shuttle_gateway::args::StartArgs; +use shuttle_gateway::args::{Args, Commands, InitArgs, UseTls}; use shuttle_gateway::auth::Key; -use shuttle_gateway::proxy::make_proxy; +use shuttle_gateway::proxy::UserServiceBuilder; use shuttle_gateway::service::{GatewayService, MIGRATIONS}; -use shuttle_gateway::worker::{Work, Worker}; -use shuttle_gateway::{api::make_api, args::StartArgs}; -use shuttle_gateway::{Refresh, Service}; +use shuttle_gateway::task; +use shuttle_gateway::tls::{make_tls_acceptor, ChainAndPrivateKey}; +use shuttle_gateway::worker::{Worker, WORKER_QUEUE_SIZE}; use sqlx::migrate::MigrateDatabase; use sqlx::{query, Sqlite, SqlitePool}; -use std::io; -use std::path::Path; +use std::io::{self, Cursor}; +use std::path::{Path, PathBuf}; use std::sync::Arc; -use tracing::{debug, error, info, trace}; +use std::time::Duration; +use tracing::{debug, error, info, info_span, trace, warn, Instrument}; use tracing_subscriber::{fmt, prelude::*, EnvFilter}; #[tokio::main(flavor = "multi_thread")] @@ -21,6 +28,8 @@ async fn main() -> io::Result<()> { trace!(args = ?args, "parsed args"); + global::set_text_map_propagator(opentelemetry_datadog::DatadogPropagator::new()); + let fmt_layer = fmt::layer(); let filter_layer = EnvFilter::try_from_default_env() .or_else(|_| EnvFilter::try_new("info")) @@ -38,8 +47,11 @@ async fn main() -> io::Result<()> { .with(opentelemetry) .init(); - if !Path::new(&args.state).exists() { - Sqlite::create_database(&args.state).await.unwrap(); + let db_path = args.state.join("gateway.sqlite"); + let db_uri = db_path.to_str().unwrap(); + + if !db_path.exists() { + Sqlite::create_database(db_uri).await.unwrap(); } info!( @@ -48,61 +60,38 @@ async fn main() -> io::Result<()> { .unwrap() .to_string_lossy() ); - let db = SqlitePool::connect(&args.state).await.unwrap(); + let db = SqlitePool::connect(db_uri).await.unwrap(); MIGRATIONS.run(&db).await.unwrap(); match args.command { - Commands::Start(start_args) => start(db, start_args).await, + Commands::Start(start_args) => start(db, args.state, start_args).await, Commands::Init(init_args) => init(db, init_args).await, } } -async fn start(db: SqlitePool, args: StartArgs) -> io::Result<()> { - let fqdn = args - .proxy_fqdn - .to_string() - .trim_end_matches('.') - .to_string(); - let gateway = Arc::new(GatewayService::init(args.clone(), fqdn.clone(), db).await); +async fn start(db: SqlitePool, fs: PathBuf, args: StartArgs) -> io::Result<()> { + let gateway = Arc::new(GatewayService::init(args.context.clone(), db).await); - let worker = Worker::new(Arc::clone(&gateway)); + let worker = Worker::new(); let sender = worker.sender(); - let gateway_clone = gateway.clone(); - let sender_clone = sender.clone(); - - tokio::spawn(async move { - for Work { - project_name, - account_name, - work, - } in gateway_clone - .iter_projects() + for (project_name, _) in gateway + .iter_projects() + .await + .expect("could not list projects") + { + gateway + .clone() + .new_task() + .project(project_name) + .and_then(task::refresh()) + .send(&sender) .await - .expect("could not list projects") - { - match work.refresh(&gateway_clone.context()).await { - Ok(work) => sender_clone - .send(Work { - account_name, - project_name, - work, - }) - .await - .unwrap(), - Err(err) => { - error!( - error = %err, - %account_name, - %project_name, - "could not refresh state. Skipping it for now.", - ); - } - } - } - }); + .ok() + .unwrap(); + } let worker_handle = tokio::spawn( worker @@ -111,20 +100,114 @@ async fn start(db: SqlitePool, args: StartArgs) -> io::Result<()> { .map_err(|err| error!("worker error: {}", err)), ); - let api = make_api(Arc::clone(&gateway), sender); + // Every 60secs go over all `::Ready` projects and check their + // health + let ambulance_handle = tokio::spawn({ + let gateway = Arc::clone(&gateway); + let sender = sender.clone(); + async move { + loop { + tokio::time::sleep(Duration::from_secs(60)).await; + if sender.capacity() < WORKER_QUEUE_SIZE - SVC_DEGRADED_THRESHOLD { + // if degraded, don't stack more health checks + warn!( + sender.capacity = sender.capacity(), + "skipping health checks" + ); + continue; + } + + if let Ok(projects) = gateway.iter_projects().await { + let span = info_span!( + "running health checks", + healthcheck.num_projects = projects.len() + ); + + let gateway = gateway.clone(); + let sender = sender.clone(); + async move { + for (project_name, _) in projects { + if let Ok(handle) = gateway + .new_task() + .project(project_name) + .and_then(task::check_health()) + .send(&sender) + .await + { + // we wait for the check to be done before + // queuing up the next one + handle.await + } + } + } + .instrument(span) + .await; + } + } + } + }); + + let acme_client = AcmeClient::new(); + + let mut api_builder = ApiBuilder::new() + .with_service(Arc::clone(&gateway)) + .with_sender(sender) + .binding_to(args.control); - let api_handle = tokio::spawn(axum::Server::bind(&args.control).serve(api.into_make_service())); + let mut user_builder = UserServiceBuilder::new() + .with_service(Arc::clone(&gateway)) + .with_public(args.context.proxy_fqdn.clone()) + .with_user_proxy_binding_to(args.user) + .with_bouncer(args.bouncer); - let proxy = make_proxy(gateway, fqdn); + if let UseTls::Enable = args.use_tls { + let (resolver, tls_acceptor) = make_tls_acceptor(); - let proxy_handle = tokio::spawn(hyper::Server::bind(&args.user).serve(proxy)); + user_builder = user_builder + .with_acme(acme_client.clone()) + .with_tls(tls_acceptor); + + api_builder = api_builder.with_acme(acme_client.clone(), resolver.clone()); + + for CustomDomain { + fqdn, + certificate, + private_key, + .. + } in gateway.iter_custom_domains().await.unwrap() + { + let mut buf = Vec::new(); + buf.extend(certificate.as_bytes()); + buf.extend(private_key.as_bytes()); + resolver + .serve_pem(&fqdn.to_string(), Cursor::new(buf)) + .await + .unwrap(); + } + + tokio::spawn(async move { + // make sure we have a certificate for ourselves + let certs = init_certs(fs, args.context.proxy_fqdn.clone(), acme_client.clone()).await; + resolver.serve_default_der(certs).await.unwrap(); + }); + } else { + warn!("TLS is disabled in the proxy service. This is only acceptable in testing, and should *never* be used in deployments."); + }; + + let api_handle = api_builder + .with_default_routes() + .with_default_traces() + .serve(); + + let user_handle = user_builder.serve(); debug!("starting up all services"); tokio::select!( _ = worker_handle => info!("worker handle finished"), - _ = api_handle => info!("api handle finished"), - _ = proxy_handle => info!("proxy handle finished"), + _ = api_handle => error!("api handle finished"), + _ = user_handle => error!("user handle finished"), + _ = ambulance_handle => error!("ambulance handle finished"), ); Ok(()) @@ -146,3 +229,47 @@ async fn init(db: SqlitePool, args: InitArgs) -> io::Result<()> { println!("`{}` created as super user with key: {key}", args.name); Ok(()) } + +async fn init_certs>(fs: P, public: FQDN, acme: AcmeClient) -> ChainAndPrivateKey { + let tls_path = fs.as_ref().join("ssl.pem"); + + match ChainAndPrivateKey::load_pem(&tls_path) { + Ok(valid) => valid, + Err(_) => { + let creds_path = fs.as_ref().join("acme.json"); + warn!( + "no valid certificate found at {}, creating one...", + tls_path.display() + ); + + if !creds_path.exists() { + panic!( + "no ACME credentials found at {}, cannot continue with certificate creation", + creds_path.display() + ); + } + + let creds = std::fs::File::open(creds_path).unwrap(); + let creds: AccountCredentials = serde_json::from_reader(&creds).unwrap(); + + let identifier = format!("*.{public}"); + + // Use ::Dns01 challenge because that's the only supported + // challenge type for wildcard domains + let (chain, private_key) = acme + .create_certificate(&identifier, ChallengeType::Dns01, creds) + .await + .unwrap(); + + let mut buf = Vec::new(); + buf.extend(chain.as_bytes()); + buf.extend(private_key.as_bytes()); + + let certs = ChainAndPrivateKey::parse_pem(Cursor::new(buf)).unwrap(); + + certs.clone().save_pem(&tls_path).unwrap(); + + certs + } + } +} diff --git a/gateway/src/project.rs b/gateway/src/project.rs index 18a841889..e77f4a62e 100644 --- a/gateway/src/project.rs +++ b/gateway/src/project.rs @@ -1,4 +1,5 @@ -use std::convert::Infallible; +use std::collections::HashMap; +use std::convert::{identity, Infallible}; use std::net::{IpAddr, SocketAddr}; use std::time::Duration; @@ -6,17 +7,23 @@ use bollard::container::{ Config, CreateContainerOptions, RemoveContainerOptions, StopContainerOptions, }; use bollard::errors::Error as DockerError; -use bollard::models::{ - ContainerConfig, ContainerInspectResponse, ContainerStateStatusEnum, HealthStatusEnum, -}; +use bollard::models::{ContainerInspectResponse, ContainerStateStatusEnum}; +use bollard::system::EventsOptions; +use fqdn::FQDN; use futures::prelude::*; +use http::uri::InvalidUri; +use http::Uri; +use hyper::client::HttpConnector; +use hyper::Client; +use once_cell::sync::Lazy; +use rand::distributions::{Alphanumeric, DistString}; use serde::{Deserialize, Serialize}; -use tokio::time; -use tracing::{debug, error}; +use tokio::time::{self, timeout}; +use tracing::{debug, error, instrument}; use crate::{ - ContainerSettings, Context, EndState, Error, ErrorKind, IntoEndState, ProjectName, Refresh, - State, + ContainerSettings, DockerContext, EndState, Error, ErrorKind, IntoTryState, ProjectName, + Refresh, State, TryState, }; macro_rules! safe_unwrap { @@ -55,17 +62,79 @@ macro_rules! impl_from_variant { } const RUNTIME_API_PORT: u16 = 8001; +const MAX_RESTARTS: usize = 3; + +// Client used for health checks +static CLIENT: Lazy> = Lazy::new(Client::new); +// Health check must succeed within 10 seconds +static IS_HEALTHY_TIMEOUT: Duration = Duration::from_secs(10); #[async_trait] -impl Refresh for ContainerInspectResponse { +impl Refresh for ContainerInspectResponse +where + Ctx: DockerContext, +{ type Error = DockerError; - async fn refresh<'c, C: Context<'c>>(self, ctx: &C) -> Result { + async fn refresh(self, ctx: &Ctx) -> Result { ctx.docker() .inspect_container(self.id.as_ref().unwrap(), None) .await } } +pub trait ContainerInspectResponseExt { + fn container(&self) -> &ContainerInspectResponse; + + fn project_name(&self, prefix: &str) -> Result { + // This version can't be enabled while there are active + // deployers before v0.8.0 since the don't have this label + // TODO: switch to this version when you notice all deployers + // are greater than v0.8.0 + // let name = safe_unwrap!(container.config.labels.get("project.name")).to_string(); + + let container = self.container(); + let container_name = safe_unwrap!(container.name.strip_prefix("/")).to_string(); + safe_unwrap!(container_name.strip_prefix(prefix).strip_suffix("_run")) + .parse::() + .map_err(|_| ProjectError::internal("invalid project name")) + } + + fn find_arg_and_then<'s, F, O>(&'s self, find: &str, and_then: F) -> Result + where + F: FnOnce(&'s str) -> O, + O: 's, + { + let mut args = self.args()?.iter(); + let out = if args.any(|arg| arg.as_str() == find) { + args.next().map(|s| and_then(s.as_str())) + } else { + None + }; + out.ok_or_else(|| ProjectError::internal(format!("no such argument: {find}"))) + } + + fn args(&self) -> Result<&Vec, ProjectError> { + let container = self.container(); + Ok(safe_unwrap!(container.args)) + } + + fn fqdn(&self) -> Result { + self.find_arg_and_then("--proxy-fqdn", identity)? + .parse() + .map_err(|_| ProjectError::internal("invalid value for --proxy-fqdn")) + } + + fn initial_key(&self) -> Result { + self.find_arg_and_then("--admin-secret", str::to_owned) + } +} + +impl ContainerInspectResponseExt for ContainerInspectResponse { + fn container(&self) -> &ContainerInspectResponse { + self + } +} + impl From for Error { fn from(err: DockerError) -> Self { error!(error = %err, "internal Docker error"); @@ -110,6 +179,10 @@ impl Project { } } + pub fn create(project_name: ProjectName) -> Self { + Self::Creating(ProjectCreating::new_with_random_initial_key(project_name)) + } + pub fn destroy(self) -> Result { if let Some(container) = self.container() { Ok(Self::Destroying(ProjectDestroying { container })) @@ -118,6 +191,14 @@ impl Project { } } + pub fn is_ready(&self) -> bool { + matches!(self, Self::Ready(_)) + } + + pub fn is_destroyed(&self) -> bool { + matches!(self, Self::Destroyed(_)) + } + pub fn target_ip(&self) -> Result, Error> { match self.clone() { Self::Ready(project_ready) => Ok(Some(*project_ready.target_ip())), @@ -158,6 +239,14 @@ impl Project { } } + pub fn initial_key(&self) -> Option<&str> { + if let Self::Creating(creating) = self { + Some(creating.initial_key()) + } else { + None + } + } + pub fn container_id(&self) -> Option { self.container().and_then(|container| container.id) } @@ -180,27 +269,31 @@ impl From for shuttle_common::models::project::State { } #[async_trait] -impl<'c> State<'c> for Project { +impl State for Project +where + Ctx: DockerContext, +{ type Next = Self; type Error = Infallible; - async fn next>(self, ctx: &C) -> Result { + #[instrument(skip_all, fields(state = %self.state()))] + async fn next(self, ctx: &Ctx) -> Result { let previous = self.clone(); let previous_state = previous.state(); let mut new = match self { - Self::Creating(creating) => creating.next(ctx).await.into_end_state(), - Self::Starting(ready) => ready.next(ctx).await.into_end_state(), + Self::Creating(creating) => creating.next(ctx).await.into_try_state(), + Self::Starting(ready) => ready.next(ctx).await.into_try_state(), Self::Started(started) => match started.next(ctx).await { Ok(ProjectReadying::Ready(ready)) => Ok(ready.into()), Ok(ProjectReadying::Started(started)) => Ok(started.into()), Err(err) => Ok(Self::Errored(err)), }, - Self::Ready(ready) => ready.next(ctx).await.into_end_state(), - Self::Stopped(stopped) => stopped.next(ctx).await.into_end_state(), - Self::Stopping(stopping) => stopping.next(ctx).await.into_end_state(), - Self::Destroying(destroying) => destroying.next(ctx).await.into_end_state(), - Self::Destroyed(destroyed) => destroyed.next(ctx).await.into_end_state(), + Self::Ready(ready) => ready.next(ctx).await.into_try_state(), + Self::Stopped(stopped) => stopped.next(ctx).await.into_try_state(), + Self::Stopping(stopping) => stopping.next(ctx).await.into_try_state(), + Self::Destroying(destroying) => destroying.next(ctx).await.into_try_state(), + Self::Destroyed(destroyed) => destroyed.next(ctx).await.into_try_state(), Self::Errored(errored) => Ok(Self::Errored(errored)), }; @@ -222,15 +315,17 @@ impl<'c> State<'c> for Project { } } -impl<'c> EndState<'c> for Project { - type ErrorVariant = ProjectError; - +impl EndState for Project +where + Ctx: DockerContext, +{ fn is_done(&self) -> bool { - matches!( - self, - Self::Errored(_) | Self::Ready(_) | Self::Stopped(_) | Self::Destroyed(_) - ) + matches!(self, Self::Errored(_) | Self::Ready(_) | Self::Destroyed(_)) } +} + +impl TryState for Project { + type ErrorVariant = ProjectError; fn into_result(self) -> Result { match self { @@ -241,7 +336,10 @@ impl<'c> EndState<'c> for Project { } #[async_trait] -impl Refresh for Project { +impl Refresh for Project +where + Ctx: DockerContext, +{ type Error = Error; /// TODO: we could be a bit more clever than this by using the @@ -249,24 +347,21 @@ impl Refresh for Project { /// state which is probably prone to erroneously setting the /// project into the wrong state if the docker is transitioning /// the state of its resources under us - async fn refresh<'c, C: Context<'c>>(self, ctx: &C) -> Result { - let _container = if let Some(container_id) = self.container_id() { - Some(ctx.docker().inspect_container(&container_id, None).await?) - } else { - None - }; - + async fn refresh(self, ctx: &Ctx) -> Result { let refreshed = match self { Self::Creating(creating) => Self::Creating(creating), Self::Starting(ProjectStarting { container }) | Self::Started(ProjectStarted { container, .. }) | Self::Ready(ProjectReady { container, .. }) | Self::Stopping(ProjectStopping { container }) - | Self::Stopped(ProjectStopped { container }) => { - let container = container.refresh(ctx).await?; - match container.state.as_ref().unwrap().status.as_ref().unwrap() { + | Self::Stopped(ProjectStopped { container }) => match container + .clone() + .refresh(ctx) + .await + { + Ok(container) => match container.state.as_ref().unwrap().status.as_ref().unwrap() { ContainerStateStatusEnum::RUNNING => { - Self::Started(ProjectStarted { container }) + Self::Started(ProjectStarted::new(container)) } ContainerStateStatusEnum::CREATED => { Self::Starting(ProjectStarting { container }) @@ -278,8 +373,19 @@ impl Refresh for Project { "container resource has drifted out of sync: cannot recover", )) } + }, + Err(DockerError::DockerResponseServerError { + status_code: 404, .. + }) => { + // container not found, let's try to recreate it + // with the same image + let project_name = container.project_name(&ctx.container_settings().prefix)?; + let initial_key = container.initial_key()?; + let creating = ProjectCreating::new(project_name, initial_key).from(container); + Self::Creating(creating) } - } + Err(err) => return Err(err.into()), + }, Self::Destroying(destroying) => Self::Destroying(destroying), Self::Destroyed(destroyed) => Self::Destroyed(destroyed), Self::Errored(err) => Self::Errored(err), @@ -288,10 +394,18 @@ impl Refresh for Project { } } -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ProjectCreating { project_name: ProjectName, + /// The admin secret with which the start deployer initial_key: String, + /// Override the default fqdn (`${project_name}.${public}`) + fqdn: Option, + /// Override the default image (specified in the args to this gateway) + image: Option, + /// Configuration will be extracted from there if specified (will + /// take precedence over other overrides) + from: Option, } impl ProjectCreating { @@ -299,14 +413,41 @@ impl ProjectCreating { Self { project_name, initial_key, + fqdn: None, + image: None, + from: None, } } + pub fn from(mut self, from: ContainerInspectResponse) -> Self { + self.from = Some(from); + self + } + + pub fn with_fqdn(mut self, fqdn: String) -> Self { + self.fqdn = Some(fqdn); + self + } + + pub fn new_with_random_initial_key(project_name: ProjectName) -> Self { + let initial_key = Alphanumeric.sample_string(&mut rand::thread_rng(), 32); + Self::new(project_name, initial_key) + } + + pub fn with_image(mut self, image: String) -> Self { + self.image = Some(image); + self + } + pub fn project_name(&self) -> &ProjectName { &self.project_name } - fn container_name<'c, C: Context<'c>>(&self, ctx: &C) -> String { + pub fn initial_key(&self) -> &str { + &self.initial_key + } + + fn container_name(&self, ctx: &C) -> String { let prefix = &ctx.container_settings().prefix; let Self { project_name, .. } = &self; @@ -314,23 +455,25 @@ impl ProjectCreating { format!("{prefix}{project_name}_run") } - fn generate_container_config<'c, C: Context<'c>>( + fn generate_container_config( &self, ctx: &C, ) -> (CreateContainerOptions, Config) { let ContainerSettings { - image, + image: default_image, prefix, provisioner_host, network_name, network_id, - fqdn, + fqdn: public, .. } = ctx.container_settings(); let Self { initial_key, project_name, + fqdn, + image, .. } = &self; @@ -338,39 +481,43 @@ impl ProjectCreating { name: self.container_name(ctx), }; - let container_config: ContainerConfig = deserialize_json!({ - "Image": image, - "Hostname": format!("{prefix}{project_name}"), - "Labels": { - "shuttle_prefix": prefix - }, - "Cmd": [ - "--admin-secret", - initial_key, - "--api-address", - format!("0.0.0.0:{RUNTIME_API_PORT}"), - "--provisioner-address", - provisioner_host, - "--provisioner-port", - "8000", - "--proxy-address", - "0.0.0.0:8000", - "--proxy-fqdn", - fqdn, - "--artifacts-path", - "/opt/shuttle", - "--state", - "/opt/shuttle/deployer.sqlite", - ], - "Env": [ - "RUST_LOG=debug", - ], - "Healthcheck": { - "Interval": 60_000_000_000i64, // Every minute - "Timeout": 15_000_000_000i64, // 15 seconds - "Test": ["CMD", "curl", format!("localhost:8001/projects/{project_name}/status")], - }, - }); + let container_config = self + .from + .as_ref() + .and_then(|container| container.config.clone()) + .unwrap_or_else(|| { + deserialize_json!({ + "Image": image.as_ref().unwrap_or(default_image), + "Hostname": format!("{prefix}{project_name}"), + "Labels": { + "shuttle.prefix": prefix, + "shuttle.project": project_name, + }, + "Cmd": [ + "--admin-secret", + initial_key, + "--project", + project_name, + "--api-address", + format!("0.0.0.0:{RUNTIME_API_PORT}"), + "--provisioner-address", + provisioner_host, + "--provisioner-port", + "8000", + "--proxy-address", + "0.0.0.0:8000", + "--proxy-fqdn", + fqdn.clone().unwrap_or(format!("{project_name}.{public}")), + "--artifacts-path", + "/opt/shuttle", + "--state", + "/opt/shuttle/deployer.sqlite", + ], + "Env": [ + "RUST_LOG=debug", + ] + }) + }); let mut config = Config::::from(container_config); @@ -387,7 +534,13 @@ impl ProjectCreating { "Target": "/opt/shuttle", "Source": format!("{prefix}{project_name}_vol"), "Type": "volume" - }] + }], + // https://docs.docker.com/config/containers/resource_constraints/#memory + "Memory": 6442450000i64, // 6 GiB hard limit + "MemoryReservation": 4295000000i64, // 4 GiB soft limit, applied if host is low on memory + // https://docs.docker.com/config/containers/resource_constraints/#cpu + "CpuPeriod": 100000i64, + "CpuQuota": 400000i64 }); debug!( @@ -402,11 +555,15 @@ Config: {config:#?} } #[async_trait] -impl<'c> State<'c> for ProjectCreating { +impl State for ProjectCreating +where + Ctx: DockerContext, +{ type Next = ProjectStarting; type Error = ProjectError; - async fn next>(self, ctx: &C) -> Result { + #[instrument(skip_all)] + async fn next(self, ctx: &Ctx) -> Result { let container_name = self.container_name(ctx); let container = ctx .docker() @@ -435,11 +592,15 @@ pub struct ProjectStarting { } #[async_trait] -impl<'c> State<'c> for ProjectStarting { +impl State for ProjectStarting +where + Ctx: DockerContext, +{ type Next = ProjectStarted; type Error = ProjectError; - async fn next>(self, ctx: &C) -> Result { + #[instrument(skip_all)] + async fn next(self, ctx: &Ctx) -> Result { let container_id = self.container.id.as_ref().unwrap(); ctx.docker() .start_container::(container_id, None) @@ -453,15 +614,25 @@ impl<'c> State<'c> for ProjectStarting { } })?; - Ok(Self::Next { - container: self.container.refresh(ctx).await?, - }) + let container = self.container.refresh(ctx).await?; + + Ok(Self::Next::new(container)) } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct ProjectStarted { container: ContainerInspectResponse, + service: Option, +} + +impl ProjectStarted { + pub fn new(container: ContainerInspectResponse) -> Self { + Self { + container, + service: None, + } + } } #[derive(Clone, Debug, Serialize, Deserialize)] @@ -471,32 +642,42 @@ pub enum ProjectReadying { } #[async_trait] -impl<'c> State<'c> for ProjectStarted { +impl State for ProjectStarted +where + Ctx: DockerContext, +{ type Next = ProjectReadying; type Error = ProjectError; - async fn next>(self, ctx: &C) -> Result { + #[instrument(skip_all)] + async fn next(self, ctx: &Ctx) -> Result { time::sleep(Duration::from_secs(1)).await; + let container = self.container.refresh(ctx).await?; - if matches!( - safe_unwrap!(container.state.health.status), - HealthStatusEnum::HEALTHY - ) { - let service = Service::from_container(container.clone())?; + let mut service = match self.service { + Some(service) => service, + None => Service::from_container(ctx, container.clone())?, + }; + + if service.is_healthy().await { Ok(Self::Next::Ready(ProjectReady { container, service })) } else { - let created = chrono::DateTime::parse_from_rfc3339(safe_unwrap!(container.created)) - .map_err(|_err| { - ProjectError::internal("invalid `created` response from Docker daemon") - })?; + let started_at = + chrono::DateTime::parse_from_rfc3339(safe_unwrap!(container.state.started_at)) + .map_err(|_err| { + ProjectError::internal("invalid `started_at` response from Docker daemon") + })?; let now = chrono::offset::Utc::now(); - if created + chrono::Duration::seconds(120) < now { + if started_at + chrono::Duration::seconds(120) < now { return Err(ProjectError::internal( "project did not become healthy in time", )); } - Ok(Self::Next::Started(ProjectStarted { container })) + Ok(Self::Next::Started(ProjectStarted { + container, + service: Some(service), + })) } } } @@ -508,48 +689,91 @@ pub struct ProjectReady { } #[async_trait] -impl<'c> State<'c> for ProjectReady { +impl State for ProjectReady +where + Ctx: DockerContext, +{ type Next = Self; type Error = ProjectError; - async fn next>(self, _ctx: &C) -> Result { + #[instrument(skip_all)] + async fn next(mut self, _ctx: &Ctx) -> Result { Ok(self) } } impl ProjectReady { - pub fn name(&self) -> &str { + pub fn name(&self) -> &ProjectName { &self.service.name } pub fn target_ip(&self) -> &IpAddr { &self.service.target } + + pub async fn is_healthy(&mut self) -> bool { + self.service.is_healthy().await + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +pub struct HealthCheckRecord { + at: chrono::DateTime, + is_healthy: bool, +} + +impl HealthCheckRecord { + pub fn new(is_healthy: bool) -> Self { + Self { + at: chrono::Utc::now(), + is_healthy, + } + } } #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct Service { - name: String, + name: ProjectName, target: IpAddr, + last_check: Option, } impl Service { - pub fn from_container(container: ContainerInspectResponse) -> Result { - let container_name = safe_unwrap!(container.name.strip_prefix("/")).to_string(); - - let resource_name = safe_unwrap!(container_name.strip_suffix("_run")).to_string(); + pub fn from_container( + ctx: &Ctx, + container: ContainerInspectResponse, + ) -> Result { + let resource_name = container.project_name(&ctx.container_settings().prefix)?; let network = safe_unwrap!(container.network_settings.networks) .values() .next() .ok_or_else(|| ProjectError::internal("project was not linked to a network"))?; - let target = safe_unwrap!(network.ip_address).parse().unwrap(); + + let target = safe_unwrap!(network.ip_address) + .parse() + .map_err(|_| ProjectError::internal("project did not join the network"))?; Ok(Self { name: resource_name, target, + last_check: None, }) } + + pub fn uri>(&self, path: S) -> Result { + format!("http://{}:8001{}", self.target, path.as_ref()) + .parse::() + .map_err(|err| err.into()) + } + + pub async fn is_healthy(&mut self) -> bool { + let uri = self.uri(format!("/projects/{}/status", self.name)).unwrap(); + let resp = timeout(IS_HEALTHY_TIMEOUT, CLIENT.get(uri)).await; + let is_healthy = matches!(resp, Ok(Ok(res)) if res.status().is_success()); + self.last_check = Some(HealthCheckRecord::new(is_healthy)); + is_healthy + } } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] @@ -558,12 +782,16 @@ pub struct ProjectStopping { } #[async_trait] -impl<'c> State<'c> for ProjectStopping { +impl State for ProjectStopping +where + Ctx: DockerContext, +{ type Next = ProjectStopped; type Error = ProjectError; - async fn next>(self, ctx: &C) -> Result { + #[instrument(skip_all)] + async fn next(self, ctx: &Ctx) -> Result { let Self { container } = self; ctx.docker() .stop_container( @@ -583,15 +811,50 @@ pub struct ProjectStopped { } #[async_trait] -impl<'c> State<'c> for ProjectStopped { +impl State for ProjectStopped +where + Ctx: DockerContext, +{ type Next = ProjectStarting; type Error = ProjectError; - async fn next>(self, _ctx: &C) -> Result { - // If stopped, try to restart - Ok(ProjectStarting { - container: self.container, - }) + #[instrument(skip_all)] + async fn next(self, ctx: &Ctx) -> Result { + let container = self.container; + + let since = (chrono::Utc::now() - chrono::Duration::minutes(15)) + .timestamp() + .to_string(); + let until = chrono::Utc::now().timestamp().to_string(); + + // Filter and collect `start` events for this project in the last 15 minutes + let start_events = ctx + .docker() + .events(Some(EventsOptions::<&str> { + since: Some(since), + until: Some(until), + filters: HashMap::from([ + ("container", vec![safe_unwrap!(container.id).as_str()]), + ("event", vec!["start"]), + ]), + })) + .try_collect::>() + .await?; + + let start_event_count = start_events.len(); + debug!( + "project started {} times in the last 15 minutes", + start_event_count + ); + + // If stopped, and has not restarted too much, try to restart + if start_event_count < MAX_RESTARTS { + Ok(ProjectStarting { container }) + } else { + Err(ProjectError::internal( + "too many restarts in the last 15 minutes", + )) + } } } @@ -601,11 +864,15 @@ pub struct ProjectDestroying { } #[async_trait] -impl<'c> State<'c> for ProjectDestroying { +impl State for ProjectDestroying +where + Ctx: DockerContext, +{ type Next = ProjectDestroyed; type Error = ProjectError; - async fn next>(self, ctx: &C) -> Result { + #[instrument(skip_all)] + async fn next(self, ctx: &Ctx) -> Result { let container_id = self.container.id.as_ref().unwrap(); ctx.docker() .stop_container(container_id, Some(StopContainerOptions { t: 1 })) @@ -633,11 +900,15 @@ pub struct ProjectDestroyed { } #[async_trait] -impl<'c> State<'c> for ProjectDestroyed { +impl State for ProjectDestroyed +where + Ctx: DockerContext, +{ type Next = ProjectDestroyed; type Error = ProjectError; - async fn next>(self, _ctx: &C) -> Result { + #[instrument(skip_all)] + async fn next(self, _ctx: &Ctx) -> Result { Ok(self) } } @@ -684,6 +955,30 @@ impl From for ProjectError { } } +impl From for ProjectError { + fn from(uri: InvalidUri) -> Self { + error!(%uri, "failed to create a health check URI"); + + Self { + kind: ProjectErrorKind::Internal, + message: uri.to_string(), + ctx: None, + } + } +} + +impl From for ProjectError { + fn from(err: hyper::Error) -> Self { + error!(error = %err, "failed to check project's health"); + + Self { + kind: ProjectErrorKind::Internal, + message: err.to_string(), + ctx: None, + } + } +} + impl From for Error { fn from(err: ProjectError) -> Self { Self::source(ErrorKind::Internal, err) @@ -691,19 +986,82 @@ impl From for Error { } #[async_trait] -impl<'c> State<'c> for ProjectError { +impl State for ProjectError +where + Ctx: DockerContext, +{ type Next = Self; type Error = Infallible; - async fn next>(self, _ctx: &C) -> Result { + #[instrument(skip_all)] + async fn next(self, _ctx: &Ctx) -> Result { Ok(self) } } +pub mod exec { + + use std::sync::Arc; + + use bollard::service::ContainerState; + use tokio::sync::mpsc::Sender; + + use crate::{ + service::GatewayService, + task::{self, BoxedTask, TaskResult}, + }; + + use super::*; + + pub async fn revive( + gateway: Arc, + sender: Sender, + ) -> Result<(), ProjectError> { + for (project_name, _) in gateway + .iter_projects() + .await + .expect("could not list projects") + { + if let Project::Errored(ProjectError { ctx: Some(ctx), .. }) = + gateway.find_project(&project_name).await.unwrap() + { + if let Some(container) = ctx.container() { + if let Ok(container) = gateway + .context() + .docker() + .inspect_container(safe_unwrap!(container.id), None) + .await + { + if let Some(ContainerState { + status: Some(ContainerStateStatusEnum::EXITED), + .. + }) = container.state + { + debug!("{} will be revived", project_name.clone()); + _ = gateway + .new_task() + .project(project_name) + .and_then(task::run(|ctx| async move { + TaskResult::Done(Project::Stopped(ProjectStopped { + container: ctx.state.container().unwrap(), + })) + })) + .send(&sender) + .await; + } + } + } + } + } + + Ok(()) + } +} + #[cfg(test)] pub mod tests { - use bollard::models::{ContainerState, Health}; + use bollard::models::ContainerState; use futures::prelude::*; use hyper::{Body, Request, StatusCode}; @@ -722,6 +1080,9 @@ pub mod tests { Project::Creating(ProjectCreating { project_name: "my-project-test".parse().unwrap(), initial_key: "test".to_string(), + fqdn: None, + image: None, + from: None, }), #[assertion = "Container created, assigned an `id`"] Ok(Project::Starting(ProjectStarting { @@ -752,20 +1113,17 @@ pub mod tests { futures::pin_mut!(delay); let mut project_readying = project_started .unwrap() - .into_stream(ctx) + .into_stream(&ctx) .take_until(delay) .try_skip_while(|state| future::ready(Ok(!matches!(state, Project::Ready(_))))); let project_ready = assert_stream_matches!( project_readying, - #[assertion = "Container is ready, in a healthy state"] + #[assertion = "Container is ready"] Ok(Project::Ready(ProjectReady { container: ContainerInspectResponse { state: Some(ContainerState { - health: Some(Health { - status: Some(HealthStatusEnum::HEALTHY), - .. - }), + status: Some(ContainerStateStatusEnum::RUNNING), .. }), .. diff --git a/gateway/src/proxy.rs b/gateway/src/proxy.rs index 012604ed4..aa51b0e9c 100644 --- a/gateway/src/proxy.rs +++ b/gateway/src/proxy.rs @@ -1,35 +1,178 @@ +use std::convert::Infallible; use std::future::Future; +use std::io; use std::net::SocketAddr; use std::pin::Pin; use std::sync::Arc; use std::task::{Context, Poll}; -use axum::body::HttpBody; +use axum::headers::{Error as HeaderError, Header, HeaderMapExt, HeaderName, HeaderValue, Host}; use axum::response::{IntoResponse, Response}; +use axum_server::accept::DefaultAcceptor; +use axum_server::tls_rustls::RustlsAcceptor; +use fqdn::{fqdn, FQDN}; +use futures::future::{ready, Ready}; use futures::prelude::*; -use hyper::body::Body; +use hyper::body::{Body, HttpBody}; use hyper::client::connect::dns::GaiResolver; use hyper::client::HttpConnector; use hyper::server::conn::AddrStream; use hyper::{Client, Request}; use hyper_reverse_proxy::ReverseProxy; use once_cell::sync::Lazy; -use tower::Service; -use tracing::debug; +use opentelemetry::global; +use opentelemetry_http::HeaderInjector; +use tower::{Service, ServiceBuilder}; +use tracing::{debug_span, error, field, trace}; +use tracing_opentelemetry::OpenTelemetrySpanExt; +use crate::acme::{AcmeClient, ChallengeResponderLayer, CustomDomain}; use crate::service::GatewayService; use crate::{Error, ErrorKind, ProjectName}; static PROXY_CLIENT: Lazy>> = Lazy::new(|| ReverseProxy::new(Client::new())); -pub struct ProxyService { +pub trait AsResponderTo { + fn as_responder_to(&self, req: R) -> Self; + + fn into_make_service(self) -> ResponderMakeService + where + Self: Sized, + { + ResponderMakeService { inner: self } + } +} + +pub struct ResponderMakeService { + inner: S, +} + +impl<'r, S> Service<&'r AddrStream> for ResponderMakeService +where + S: AsResponderTo<&'r AddrStream>, +{ + type Response = S; + type Error = Infallible; + type Future = Ready>; + + fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll> { + Poll::Ready(Ok(())) + } + + fn call(&mut self, req: &'r AddrStream) -> Self::Future { + ready(Ok(self.inner.as_responder_to(req))) + } +} + +lazy_static::lazy_static! { + pub static ref X_SHUTTLE_PROJECT: HeaderName = HeaderName::from_static("x-shuttle-project"); +} + +pub struct XShuttleProject(ProjectName); + +impl Header for XShuttleProject { + fn name() -> &'static HeaderName { + &X_SHUTTLE_PROJECT + } + + fn encode>(&self, values: &mut E) { + values.extend(std::iter::once( + HeaderValue::from_str(self.0.as_str()).unwrap(), + )); + } + + fn decode<'i, I>(values: &mut I) -> Result + where + Self: Sized, + I: Iterator, + { + values + .last() + .and_then(|value| value.to_str().ok()) + .and_then(|value| value.parse().ok()) + .map(Self) + .ok_or_else(HeaderError::invalid) + } +} + +#[derive(Clone)] +pub struct UserProxy { gateway: Arc, remote_addr: SocketAddr, - fqdn: String, + public: FQDN, +} + +impl<'r> AsResponderTo<&'r AddrStream> for UserProxy { + fn as_responder_to(&self, addr_stream: &'r AddrStream) -> Self { + let mut responder = self.clone(); + responder.remote_addr = addr_stream.remote_addr(); + responder + } } -impl Service> for ProxyService { +impl UserProxy { + async fn proxy(self, mut req: Request) -> Result { + let span = debug_span!("proxy", http.method = %req.method(), http.host = ?req.headers().get("Host"), http.uri = %req.uri(), http.status_code = field::Empty, project = field::Empty); + trace!(?req, "serving proxy request"); + + let fqdn = req + .headers() + .typed_get::() + .map(|host| fqdn!(host.hostname())) + .ok_or_else(|| Error::from_kind(ErrorKind::ProjectNotFound))?; + + let project_name = + if fqdn.is_subdomain_of(&self.public) && fqdn.depth() - self.public.depth() == 1 { + fqdn.labels() + .next() + .unwrap() + .to_owned() + .parse() + .map_err(|_| Error::from_kind(ErrorKind::ProjectNotFound))? + } else if let Ok(CustomDomain { project_name, .. }) = + self.gateway.project_details_for_custom_domain(&fqdn).await + { + project_name + } else { + return Err(Error::from_kind(ErrorKind::ProjectNotFound)); + }; + + req.headers_mut() + .typed_insert(XShuttleProject(project_name.clone())); + + let project = self.gateway.find_project(&project_name).await?; + + // Record current project for tracing purposes + span.record("project", &project_name.to_string()); + + let target_ip = project + .target_ip()? + .ok_or_else(|| Error::from_kind(ErrorKind::ProjectNotReady))?; + + let target_url = format!("http://{}:{}", target_ip, 8000); + + let cx = span.context(); + + global::get_text_map_propagator(|propagator| { + propagator.inject_context(&cx, &mut HeaderInjector(req.headers_mut())) + }); + + let proxy = PROXY_CLIENT + .call(self.remote_addr.ip(), &target_url, req) + .await + .map_err(|_| Error::from_kind(ErrorKind::ProjectUnavailable))?; + + let (parts, body) = proxy.into_parts(); + let body = ::map_err(body, axum::Error::new).boxed_unsync(); + + span.record("http.status_code", parts.status.as_u16()); + + Ok(Response::from_parts(parts, body)) + } +} + +impl Service> for UserProxy { type Response = Response; type Error = Error; type Future = @@ -40,51 +183,57 @@ impl Service> for ProxyService { } fn call(&mut self, req: Request) -> Self::Future { - let remote_addr = self.remote_addr.ip(); - let gateway = Arc::clone(&self.gateway); - let fqdn = self.fqdn.clone(); - Box::pin( - async move { - let project_str = req - .headers() - .get("Host") - .map(|head| head.to_str().unwrap()) - .and_then(|host| host.strip_suffix('.').unwrap_or(host).strip_suffix(&fqdn)) - .ok_or_else(|| Error::from_kind(ErrorKind::ProjectNotFound))?; - - let project_name: ProjectName = project_str - .parse() - .map_err(|_| Error::from_kind(ErrorKind::InvalidProjectName))?; + self.clone() + .proxy(req) + .or_else(|err: Error| future::ready(Ok(err.into_response()))) + .boxed() + } +} - let project = gateway.find_project(&project_name).await?; +#[derive(Clone)] +pub struct Bouncer { + gateway: Arc, + public: FQDN, +} - let target_ip = project - .target_ip()? - .ok_or_else(|| Error::from_kind(ErrorKind::ProjectNotReady))?; +impl<'r> AsResponderTo<&'r AddrStream> for Bouncer { + fn as_responder_to(&self, _req: &'r AddrStream) -> Self { + self.clone() + } +} - let target_url = format!("http://{}:{}", target_ip, 8000); +impl Bouncer { + async fn bounce(self, req: Request) -> Result { + let mut resp = Response::builder(); - let proxy = PROXY_CLIENT - .call(remote_addr, &target_url, req) - .await - .map_err(|_| Error::from_kind(ErrorKind::ProjectUnavailable))?; + let host = req.headers().typed_get::().unwrap(); + let hostname = host.hostname(); + let fqdn = fqdn!(hostname); - let (parts, body) = proxy.into_parts(); - let body = ::map_err(body, axum::Error::new).boxed_unsync(); - Ok(Response::from_parts(parts, body)) - } - .or_else(|err: Error| future::ready(Ok(err.into_response()))), - ) - } -} + let path = req.uri(); -pub struct MakeProxyService { - gateway: Arc, - fqdn: String, + if fqdn.is_subdomain_of(&self.public) + || self + .gateway + .project_details_for_custom_domain(&fqdn) + .await + .is_ok() + { + resp = resp + .status(301) + .header("Location", format!("https://{hostname}{path}")); + } else { + resp = resp.status(404); + } + + let body = ::map_err(Body::empty(), axum::Error::new).boxed_unsync(); + + Ok(resp.body(body).unwrap()) + } } -impl<'r> Service<&'r AddrStream> for MakeProxyService { - type Response = ProxyService; +impl Service> for Bouncer { + type Response = Response; type Error = Error; type Future = Pin> + Send + 'static>>; @@ -93,25 +242,134 @@ impl<'r> Service<&'r AddrStream> for MakeProxyService { Poll::Ready(Ok(())) } - fn call(&mut self, target: &'r AddrStream) -> Self::Future { - let gateway = Arc::clone(&self.gateway); - let remote_addr = target.remote_addr(); - let fqdn = self.fqdn.clone(); - Box::pin(async move { - Ok(ProxyService { - remote_addr, - gateway, - fqdn, - }) - }) + fn call(&mut self, req: Request) -> Self::Future { + self.clone().bounce(req).boxed() } } -pub fn make_proxy(gateway: Arc, fqdn: String) -> MakeProxyService { - debug!("making proxy"); +pub struct UserServiceBuilder { + service: Option>, + acme: Option, + tls_acceptor: Option>, + bouncer_binds_to: Option, + user_binds_to: Option, + public: Option, +} + +impl Default for UserServiceBuilder { + fn default() -> Self { + Self::new() + } +} + +impl UserServiceBuilder { + pub fn new() -> Self { + Self { + service: None, + public: None, + acme: None, + tls_acceptor: None, + bouncer_binds_to: None, + user_binds_to: None, + } + } + + pub fn with_public(mut self, public: FQDN) -> Self { + self.public = Some(public); + self + } - MakeProxyService { - gateway, - fqdn: format!(".{fqdn}"), + pub fn with_service(mut self, service: Arc) -> Self { + self.service = Some(service); + self + } + + pub fn with_bouncer(mut self, bound_to: SocketAddr) -> Self { + self.bouncer_binds_to = Some(bound_to); + self + } + + pub fn with_user_proxy_binding_to(mut self, bound_to: SocketAddr) -> Self { + self.user_binds_to = Some(bound_to); + self + } + + pub fn with_acme(mut self, acme: AcmeClient) -> Self { + self.acme = Some(acme); + self + } + + pub fn with_tls(mut self, acceptor: RustlsAcceptor) -> Self { + self.tls_acceptor = Some(acceptor); + self + } + + pub fn serve(self) -> impl Future> { + let service = self.service.expect("a GatewayService is required"); + let public = self.public.expect("a public FQDN is required"); + let user_binds_to = self + .user_binds_to + .expect("a socket address to bind to is required"); + + let user_proxy = UserProxy { + gateway: service.clone(), + remote_addr: "127.0.0.1:80".parse().unwrap(), + public: public.clone(), + }; + + let bouncer = self.bouncer_binds_to.as_ref().map(|_| Bouncer { + gateway: service.clone(), + public: public.clone(), + }); + + let mut futs = Vec::new(); + if let Some(tls_acceptor) = self.tls_acceptor { + // TLS is enabled + let bouncer = bouncer.expect("TLS cannot be enabled without a bouncer"); + let bouncer_binds_to = self.bouncer_binds_to.unwrap(); + + let acme = self + .acme + .expect("TLS cannot be enabled without an ACME client"); + + let bouncer = ServiceBuilder::new() + .layer(ChallengeResponderLayer::new(acme)) + .service(bouncer); + + let bouncer = axum_server::Server::bind(bouncer_binds_to) + .serve(bouncer.into_make_service()) + .map(|handle| ("bouncer (with challenge responder)", handle)) + .boxed(); + + futs.push(bouncer); + + let user_with_tls = axum_server::Server::bind(user_binds_to) + .acceptor(tls_acceptor) + .serve(user_proxy.into_make_service()) + .map(|handle| ("user proxy (with TLS)", handle)) + .boxed(); + futs.push(user_with_tls); + } else { + if let Some(bouncer) = bouncer { + // bouncer is enabled + let bouncer_binds_to = self.bouncer_binds_to.unwrap(); + let bouncer = axum_server::Server::bind(bouncer_binds_to) + .serve(bouncer.into_make_service()) + .map(|handle| ("bouncer (without challenge responder)", handle)) + .boxed(); + futs.push(bouncer); + } + + let user_without_tls = axum_server::Server::bind(user_binds_to) + .serve(user_proxy.into_make_service()) + .map(|handle| ("user proxy (no TLS)", handle)) + .boxed(); + futs.push(user_without_tls); + } + + future::select_all(futs.into_iter()).map(|((name, resolved), _, _)| { + error!(service = %name, "exited early"); + resolved + }) } } diff --git a/gateway/src/service.rs b/gateway/src/service.rs index 2cae0e7bf..20ca92a05 100644 --- a/gateway/src/service.rs +++ b/gateway/src/service.rs @@ -7,24 +7,30 @@ use axum::http::Request; use axum::response::Response; use bollard::network::ListNetworksOptions; use bollard::{Docker, API_DEFAULT_VERSION}; +use fqdn::Fqdn; +use http::HeaderValue; use hyper::client::connect::dns::GaiResolver; use hyper::client::HttpConnector; use hyper::Client; use hyper_reverse_proxy::ReverseProxy; use once_cell::sync::Lazy; -use rand::distributions::{Alphanumeric, DistString}; +use opentelemetry::global; +use opentelemetry_http::HeaderInjector; use sqlx::error::DatabaseError; use sqlx::migrate::Migrator; use sqlx::sqlite::SqlitePool; use sqlx::types::Json as SqlxJson; use sqlx::{query, Error as SqlxError, Row}; -use tracing::debug; +use tracing::{debug, Span}; +use tracing_opentelemetry::OpenTelemetrySpanExt; -use crate::args::StartArgs; -use crate::auth::{Key, User}; -use crate::project::{self, Project}; -use crate::worker::Work; -use crate::{AccountName, Context, Error, ErrorKind, ProjectName, Service}; +use crate::acme::CustomDomain; +use crate::args::ContextArgs; +use crate::auth::{Key, Permissions, ScopedUser, User}; +use crate::project::Project; +use crate::task::{BoxedTask, TaskBuilder}; +use crate::worker::TaskRouter; +use crate::{AccountName, DockerContext, Error, ErrorKind, ProjectDetails, ProjectName}; pub static MIGRATIONS: Migrator = sqlx::migrate!("./migrations"); static PROXY_CLIENT: Lazy>> = @@ -43,33 +49,35 @@ pub struct ContainerSettingsBuilder<'d> { image: Option, provisioner: Option, network_name: Option, - fqdn: String, + fqdn: Option, } impl<'d> ContainerSettingsBuilder<'d> { - pub fn new(docker: &'d Docker, fqdn: String) -> Self { + pub fn new(docker: &'d Docker) -> Self { Self { docker, prefix: None, image: None, provisioner: None, network_name: None, - fqdn, + fqdn: None, } } - pub async fn from_args(self, args: &StartArgs) -> ContainerSettings { - let StartArgs { + pub async fn from_args(self, args: &ContextArgs) -> ContainerSettings { + let ContextArgs { prefix, network_name, provisioner_host, image, + proxy_fqdn, .. } = args; self.prefix(prefix) .image(image) .provisioner_host(provisioner_host) .network_name(network_name) + .fqdn(proxy_fqdn) .build() .await } @@ -94,6 +102,11 @@ impl<'d> ContainerSettingsBuilder<'d> { self } + pub fn fqdn(mut self, fqdn: S) -> Self { + self.fqdn = Some(fqdn.to_string().trim_end_matches('.').to_string()); + self + } + /// Resolves the Docker network ID for the given network name. /// /// # Panics @@ -125,7 +138,7 @@ impl<'d> ContainerSettingsBuilder<'d> { let network_name = self.network_name.take().unwrap(); let network_id = self.resolve_network_id(&network_name).await; - let fqdn = self.fqdn; + let fqdn = self.fqdn.take().unwrap(); ContainerSettings { prefix, @@ -138,6 +151,7 @@ impl<'d> ContainerSettingsBuilder<'d> { } } +#[derive(Clone)] pub struct ContainerSettings { pub prefix: String, pub image: String, @@ -148,8 +162,8 @@ pub struct ContainerSettings { } impl ContainerSettings { - pub fn builder(docker: &Docker, fqdn: String) -> ContainerSettingsBuilder { - ContainerSettingsBuilder::new(docker, fqdn) + pub fn builder(docker: &Docker) -> ContainerSettingsBuilder { + ContainerSettingsBuilder::new(docker) } } @@ -165,8 +179,8 @@ impl GatewayContextProvider { pub fn context(&self) -> GatewayContext { GatewayContext { - docker: &self.docker, - settings: &self.settings, + docker: self.docker.clone(), + settings: self.settings.clone(), } } } @@ -174,6 +188,7 @@ impl GatewayContextProvider { pub struct GatewayService { provider: GatewayContextProvider, db: SqlitePool, + task_router: TaskRouter, } impl GatewayService { @@ -181,23 +196,28 @@ impl GatewayService { /// /// * `args` - The [`Args`] with which the service was /// started. Will be passed as [`Context`] to workers and state. - pub async fn init(args: StartArgs, fqdn: String, db: SqlitePool) -> Self { + pub async fn init(args: ContextArgs, db: SqlitePool) -> Self { let docker = Docker::connect_with_unix(&args.docker_host, 60, API_DEFAULT_VERSION).unwrap(); - let container_settings = ContainerSettings::builder(&docker, fqdn) - .from_args(&args) - .await; + let container_settings = ContainerSettings::builder(&docker).from_args(&args).await; let provider = GatewayContextProvider::new(docker, container_settings); - Self { provider, db } + let task_router = TaskRouter::new(); + + Self { + provider, + db, + task_router, + } } pub async fn route( &self, - project_name: &ProjectName, + scoped_user: &ScopedUser, mut req: Request, ) -> Result, Error> { + let project_name = &scoped_user.scope; let target_ip = self .find_project(project_name) .await? @@ -213,6 +233,17 @@ impl GatewayService { debug!(target_url, "routing control"); + let headers = req.headers_mut(); + headers.append( + "X-Shuttle-Account-Name", + HeaderValue::from_str(&scoped_user.user.name.to_string()).unwrap(), + ); + + let cx = Span::current().context(); + global::get_text_map_propagator(|propagator| { + propagator.inject_context(&cx, &mut HeaderInjector(headers)) + }); + let resp = PROXY_CLIENT .call("127.0.0.1".parse().unwrap(), &target_url, req) .await @@ -221,16 +252,14 @@ impl GatewayService { Ok(resp) } - pub async fn iter_projects(&self) -> Result, Error> { - let iter = query("SELECT * FROM projects") + pub async fn iter_projects( + &self, + ) -> Result, Error> { + let iter = query("SELECT project_name, account_name FROM projects") .fetch_all(&self.db) .await? .into_iter() - .map(|row| Work { - project_name: row.get("project_name"), - work: row.get::, _>("project_state").0, - account_name: row.get("account_name"), - }); + .map(|row| (row.get("project_name"), row.get("account_name"))); Ok(iter) } @@ -247,24 +276,38 @@ impl GatewayService { .ok_or_else(|| Error::from_kind(ErrorKind::ProjectNotFound)) } - async fn update_project( + pub async fn update_project( &self, project_name: &ProjectName, project: &Project, ) -> Result<(), Error> { let query = match project { - Project::Destroyed(_) => { - query("DELETE FROM projects WHERE project_name = ?1").bind(project_name) - } + Project::Creating(state) => query( + "UPDATE projects SET initial_key = ?1, project_state = ?2 WHERE project_name = ?3", + ) + .bind(state.initial_key()) + .bind(SqlxJson(project)) + .bind(project_name), _ => query("UPDATE projects SET project_state = ?1 WHERE project_name = ?2") .bind(SqlxJson(project)) .bind(project_name), }; - query.execute(&self.db).await?; Ok(()) } + pub async fn account_name_from_project( + &self, + project_name: &ProjectName, + ) -> Result { + query("SELECT account_name FROM projects WHERE project_name = ?1") + .bind(project_name) + .fetch_optional(&self.db) + .await? + .map(|row| row.get("account_name")) + .ok_or_else(|| Error::from(ErrorKind::ProjectNotFound)) + } + pub async fn key_from_account_name(&self, account_name: &AccountName) -> Result { let key = query("SELECT key FROM accounts WHERE account_name = ?1") .bind(account_name) @@ -298,30 +341,6 @@ impl GatewayService { Ok(control_key) } - pub async fn user_from_account_name(&self, name: AccountName) -> Result { - let key = self.key_from_account_name(&name).await?; - let super_user = self.is_super_user(&name).await?; - let projects = self.iter_user_projects(&name).await?.collect(); - Ok(User { - name, - key, - projects, - super_user, - }) - } - - pub async fn user_from_key(&self, key: Key) -> Result { - let name = self.account_name_from_key(&key).await?; - let super_user = self.is_super_user(&name).await?; - let projects = self.iter_user_projects(&name).await?.collect(); - Ok(User { - name, - key, - projects, - super_user, - }) - } - pub async fn create_user(&self, name: AccountName) -> Result { let key = Key::new_random(); query("INSERT INTO accounts (account_name, key) VALUES (?1, ?2)") @@ -341,38 +360,53 @@ impl GatewayService { // Otherwise this is internal err.into() })?; - Ok(User { - name, - key, - projects: Vec::default(), - super_user: false, - }) + Ok(User::new_with_defaults(name, key)) + } + + pub async fn get_permissions(&self, account_name: &AccountName) -> Result { + let permissions = + query("SELECT super_user, account_tier FROM accounts WHERE account_name = ?1") + .bind(account_name) + .fetch_optional(&self.db) + .await? + .map(|row| { + Permissions::builder() + .super_user(row.try_get("super_user").unwrap()) + .tier(row.try_get("account_tier").unwrap()) + .build() + }) + .unwrap_or_default(); // defaults to `false` (i.e. not super user) + Ok(permissions) } - pub async fn is_super_user(&self, account_name: &AccountName) -> Result { - let is_super_user = query("SELECT super_user FROM accounts WHERE account_name = ?1") + pub async fn set_super_user( + &self, + account_name: &AccountName, + super_user: bool, + ) -> Result<(), Error> { + query("UPDATE accounts SET super_user = ?1 WHERE account_name = ?2") + .bind(super_user) .bind(account_name) - .fetch_optional(&self.db) - .await? - .map(|row| row.try_get("super_user").unwrap()) - .unwrap_or(false); // defaults to `false` (i.e. not super user) - Ok(is_super_user) + .execute(&self.db) + .await?; + Ok(()) } - pub async fn set_super_user( + pub async fn set_permissions( &self, account_name: &AccountName, - value: bool, + permissions: &Permissions, ) -> Result<(), Error> { - query("UPDATE accounts SET super_user = ?1 WHERE account_name = ?2") - .bind(value) + query("UPDATE accounts SET super_user = ?1, account_tier = ?2 WHERE account_name = ?3") + .bind(permissions.super_user) + .bind(permissions.tier) .bind(account_name) .execute(&self.db) .await?; Ok(()) } - async fn iter_user_projects( + pub async fn iter_user_projects( &self, AccountName(account_name): &AccountName, ) -> Result, Error> { @@ -389,18 +423,51 @@ impl GatewayService { &self, project_name: ProjectName, account_name: AccountName, - ) -> Result { - let initial_key = Alphanumeric.sample_string(&mut rand::thread_rng(), 32); + ) -> Result { + if let Some(row) = query("SELECT project_name, account_name, initial_key, project_state FROM projects WHERE project_name = ?1 AND account_name = ?2") + .bind(&project_name) + .bind(&account_name) + .fetch_optional(&self.db) + .await? + { + // If the project already exists and belongs to this account + let project = row.get::, _>("project_state").0; + if project.is_destroyed() { + // But is in `::Destroyed` state, recreate it + let project = Project::create(project_name.clone()); + self.update_project(&project_name, &project).await?; + Ok(project) + } else { + // Otherwise it already exists + Err(Error::from_kind(ErrorKind::ProjectAlreadyExists)) + } + } else { + // Check if project name is valid according to new rules if it + // doesn't exist. + // TODO: remove this check when we update the project name rules + // in shuttle-common + if project_name.is_valid() { + // Otherwise attempt to create a new one. This will fail + // outright if the project already exists (this happens if + // it belongs to another account). + self.insert_project(project_name, account_name).await + } else { + Err(Error::from_kind(ErrorKind::InvalidProjectName)) + } + } + } - let project = SqlxJson(Project::Creating(project::ProjectCreating::new( - project_name.clone(), - initial_key.clone(), - ))); + pub async fn insert_project( + &self, + project_name: ProjectName, + account_name: AccountName, + ) -> Result { + let project = SqlxJson(Project::create(project_name.clone())); query("INSERT INTO projects (project_name, account_name, initial_key, project_state) VALUES (?1, ?2, ?3, ?4)") .bind(&project_name) .bind(&account_name) - .bind(&initial_key) + .bind(project.initial_key().unwrap()) .bind(&project) .execute(&self.db) .await @@ -418,66 +485,103 @@ impl GatewayService { let project = project.0; - Ok(Work { - project_name, - account_name, - work: project, - }) + Ok(project) } - pub async fn destroy_project( + pub async fn create_custom_domain( &self, project_name: ProjectName, - account_name: AccountName, - ) -> Result { - let project = self.find_project(&project_name).await?.destroy()?; + fqdn: &Fqdn, + certs: &str, + private_key: &str, + ) -> Result<(), Error> { + query("INSERT OR REPLACE INTO custom_domains (fqdn, project_name, certificate, private_key) VALUES (?1, ?2, ?3, ?4)") + .bind(fqdn.to_string()) + .bind(&project_name) + .bind(certs) + .bind(private_key) + .execute(&self.db) + .await?; - Ok(Work { - project_name, - account_name, - work: project, - }) + Ok(()) } - fn context(&self) -> GatewayContext { - self.provider.context() + pub async fn iter_custom_domains(&self) -> Result, Error> { + query("SELECT fqdn, project_name, certificate, private_key FROM custom_domains") + .fetch_all(&self.db) + .await + .map(|res| { + res.into_iter().map(|row| CustomDomain { + fqdn: row.get::<&str, _>("fqdn").parse().unwrap(), + project_name: row.try_get("project_name").unwrap(), + certificate: row.get("certificate"), + private_key: row.get("private_key"), + }) + }) + .map_err(|_| Error::from_kind(ErrorKind::Internal)) } -} -#[async_trait] -impl<'c> Service<'c> for Arc { - type Context = GatewayContext<'c>; + pub async fn project_details_for_custom_domain( + &self, + fqdn: &Fqdn, + ) -> Result { + let custom_domain = query( + "SELECT fqdn, project_name, certificate, private_key FROM custom_domains WHERE fqdn = ?1", + ) + .bind(fqdn.to_string()) + .fetch_optional(&self.db) + .await? + .map(|row| CustomDomain { + fqdn: row.get::<&str, _>("fqdn").parse().unwrap(), + project_name: row.try_get("project_name").unwrap(), + certificate: row.get("certificate"), + private_key: row.get("private_key"), + }) + .ok_or_else(|| Error::from(ErrorKind::CustomDomainNotFound))?; + Ok(custom_domain) + } - type State = Work; + pub async fn iter_projects_detailed( + &self, + ) -> Result, Error> { + let iter = query("SELECT project_name, account_name FROM projects") + .fetch_all(&self.db) + .await? + .into_iter() + .map(|row| ProjectDetails { + project_name: row.try_get("project_name").unwrap(), + account_name: row.try_get("account_name").unwrap(), + }); + Ok(iter) + } - type Error = Error; + pub fn context(&self) -> GatewayContext { + self.provider.context() + } - fn context(&'c self) -> Self::Context { - GatewayService::context(self) + /// Create a builder for a new [ProjectTask] + pub fn new_task(self: &Arc) -> TaskBuilder { + TaskBuilder::new(self.clone()) } - async fn update( - &self, - Work { - project_name, work, .. - }: &Self::State, - ) -> Result<(), Self::Error> { - self.update_project(project_name, work).await + pub fn task_router(&self) -> TaskRouter { + self.task_router.clone() } } -pub struct GatewayContext<'c> { - docker: &'c Docker, - settings: &'c ContainerSettings, +#[derive(Clone)] +pub struct GatewayContext { + docker: Docker, + settings: ContainerSettings, } -impl<'c> Context<'c> for GatewayContext<'c> { - fn docker(&self) -> &'c Docker { - self.docker +impl DockerContext for GatewayContext { + fn docker(&self) -> &Docker { + &self.docker } - fn container_settings(&self) -> &'c ContainerSettings { - self.settings + fn container_settings(&self) -> &ContainerSettings { + &self.settings } } @@ -486,45 +590,50 @@ pub mod tests { use std::str::FromStr; + use fqdn::FQDN; + use super::*; + use crate::auth::AccountTier; + use crate::task::{self, TaskResult}; use crate::tests::{assert_err_kind, World}; + use crate::{Error, ErrorKind}; #[tokio::test] async fn service_create_find_user() -> anyhow::Result<()> { let world = World::new().await; - let svc = GatewayService::init(world.args(), world.fqdn(), world.pool()).await; + let svc = GatewayService::init(world.args(), world.pool()).await; let account_name: AccountName = "test_user_123".parse()?; assert_err_kind!( - svc.user_from_account_name(account_name.clone()).await, + User::retrieve_from_account_name(&svc, account_name.clone()).await, ErrorKind::UserNotFound ); assert_err_kind!( - svc.user_from_key(Key::from_str("123").unwrap()).await, + User::retrieve_from_key(&svc, Key::from_str("123").unwrap()).await, ErrorKind::UserNotFound ); let user = svc.create_user(account_name.clone()).await?; assert_eq!( - svc.user_from_account_name(account_name.clone()).await?, + User::retrieve_from_account_name(&svc, account_name.clone()).await?, user ); - assert!(!svc.is_super_user(&account_name).await?); - let User { name, key, projects, - super_user, + permissions, } = user; assert!(projects.is_empty()); - assert!(!super_user); + assert!(!permissions.is_super_user()); + + assert_eq!(*permissions.tier(), AccountTier::Basic); assert_eq!(name, account_name); @@ -543,9 +652,10 @@ pub mod tests { #[tokio::test] async fn service_create_find_delete_project() -> anyhow::Result<()> { let world = World::new().await; - let svc = Arc::new(GatewayService::init(world.args(), world.fqdn(), world.pool()).await); + let svc = Arc::new(GatewayService::init(world.args(), world.pool()).await); let neo: AccountName = "neo".parse().unwrap(); + let trinity: AccountName = "trinity".parse().unwrap(); let matrix: ProjectName = "matrix".parse().unwrap(); let creating_same_project_name = |project: &Project, project_name: &ProjectName| { @@ -556,35 +666,160 @@ pub mod tests { }; svc.create_user(neo.clone()).await.unwrap(); + svc.create_user(trinity.clone()).await.unwrap(); - let work = svc + let project = svc .create_project(matrix.clone(), neo.clone()) .await .unwrap(); - // work work work work - let project = work.work; - assert!(creating_same_project_name(&project, &matrix)); assert_eq!(svc.find_project(&matrix).await.unwrap(), project); + assert_eq!( + svc.iter_projects_detailed() + .await + .unwrap() + .next() + .expect("to get one project with its user"), + ProjectDetails { + project_name: matrix.clone(), + account_name: neo.clone(), + } + ); - let work = svc.destroy_project(matrix.clone(), neo).await.unwrap(); - - let project = work.work; - - assert!(matches!(&project, Project::Destroyed(_))); + let mut work = svc + .new_task() + .project(matrix.clone()) + .and_then(task::destroy()) + .build(); - svc.update_project(&matrix, &project).await.unwrap(); + while let TaskResult::Pending(_) = work.poll(()).await {} + assert!(matches!(work.poll(()).await, TaskResult::Done(()))); + // After project has been destroyed... assert!(matches!( svc.find_project(&matrix).await, + Ok(Project::Destroyed(_)) + )); + + // If recreated by a different user + assert!(matches!( + svc.create_project(matrix.clone(), trinity.clone()).await, Err(Error { - kind: ErrorKind::ProjectNotFound, + kind: ErrorKind::ProjectAlreadyExists, .. }) )); + // If recreated by the same user + assert!(matches!( + svc.create_project(matrix, neo).await, + Ok(Project::Creating(_)) + )); + + Ok(()) + } + + #[tokio::test] + async fn service_create_ready_kill_restart_docker() -> anyhow::Result<()> { + let world = World::new().await; + let svc = Arc::new(GatewayService::init(world.args(), world.pool()).await); + + let neo: AccountName = "neo".parse().unwrap(); + let matrix: ProjectName = "matrix".parse().unwrap(); + + svc.create_user(neo.clone()).await.unwrap(); + svc.create_project(matrix.clone(), neo.clone()) + .await + .unwrap(); + + let mut task = svc.new_task().project(matrix.clone()).build(); + + while let TaskResult::Pending(_) = task.poll(()).await { + // keep polling + } + + let project = svc.find_project(&matrix).await.unwrap(); + println!("{:?}", project); + assert!(project.is_ready()); + + let container = project.container().unwrap(); + svc.context() + .docker() + .kill_container::(container.name.unwrap().strip_prefix('/').unwrap(), None) + .await + .unwrap(); + + println!("killed container"); + + let mut ambulance_task = svc + .new_task() + .project(matrix.clone()) + .and_then(task::check_health()) + .build(); + + // the first poll will trigger a refresh + let _ = ambulance_task.poll(()).await; + + let project = svc.find_project(&matrix).await.unwrap(); + println!("{:?}", project); + assert!(!project.is_ready()); + + // the subsequent will trigger a restart task + while let TaskResult::Pending(_) = ambulance_task.poll(()).await { + // keep polling + } + + let project = svc.find_project(&matrix).await.unwrap(); + println!("{:?}", project); + assert!(project.is_ready()); + + Ok(()) + } + + #[tokio::test] + async fn service_create_find_custom_domain() -> anyhow::Result<()> { + let world = World::new().await; + let svc = Arc::new(GatewayService::init(world.args(), world.pool()).await); + + let account: AccountName = "neo".parse().unwrap(); + let project_name: ProjectName = "matrix".parse().unwrap(); + let domain: FQDN = "neo.the.matrix".parse().unwrap(); + let certificate = "dummy certificate"; + let private_key = "dummy private key"; + + svc.create_user(account.clone()).await.unwrap(); + + assert_err_kind!( + svc.project_details_for_custom_domain(&domain).await, + ErrorKind::CustomDomainNotFound + ); + + let _ = svc + .create_project(project_name.clone(), account.clone()) + .await + .unwrap(); + + svc.create_custom_domain(project_name.clone(), &domain, certificate, private_key) + .await + .unwrap(); + + let custom_domain = svc + .project_details_for_custom_domain(&domain) + .await + .unwrap(); + + assert_eq!(custom_domain.project_name, project_name); + assert_eq!(custom_domain.certificate, certificate); + assert_eq!(custom_domain.private_key, private_key); + + assert_err_kind!( + svc.create_custom_domain(project_name.clone(), &domain, certificate, private_key) + .await, + ErrorKind::CustomDomainAlreadyExists + ); + Ok(()) } } diff --git a/gateway/src/task.rs b/gateway/src/task.rs new file mode 100644 index 000000000..dc506306a --- /dev/null +++ b/gateway/src/task.rs @@ -0,0 +1,556 @@ +use futures::Future; +use std::collections::VecDeque; +use std::marker::PhantomData; +use std::pin::Pin; +use std::sync::Arc; +use std::time::{Duration, Instant}; +use tokio::sync::mpsc::Sender; +use tokio::sync::oneshot; +use tokio::time::{sleep, timeout}; +use tracing::{error, info, info_span, warn}; +use uuid::Uuid; + +use crate::project::*; +use crate::service::{GatewayContext, GatewayService}; +use crate::worker::TaskRouter; +use crate::{AccountName, EndState, Error, ErrorKind, ProjectName, Refresh, State}; + +// Default maximum _total_ time a task is allowed to run +pub const DEFAULT_TIMEOUT: Duration = Duration::from_secs(300); +// Maximum time we'll wait for a task to successfully be sent down the channel +pub const TASK_SEND_TIMEOUT: Duration = Duration::from_secs(9); +// Maximum time before a task is considered degraded +pub const PROJECT_TASK_MAX_IDLE_TIMEOUT: Duration = Duration::from_secs(60); + +#[async_trait] +pub trait Task: Send { + type Output; + + type Error; + + async fn poll(&mut self, ctx: Ctx) -> TaskResult; +} + +#[async_trait] +impl Task for Box +where + Ctx: Send + 'static, + T: Task + ?Sized, +{ + type Output = T::Output; + + type Error = T::Error; + + async fn poll(&mut self, ctx: Ctx) -> TaskResult { + self.as_mut().poll(ctx).await + } +} + +#[must_use] +#[derive(Debug, PartialEq, Eq)] +pub enum TaskResult { + /// More work needs to be done + Pending(R), + /// No further work needed + Done(R), + /// Try again later + TryAgain, + /// Task has been cancelled + Cancelled, + /// Task has failed + Err(E), +} + +impl TaskResult { + pub fn ok(self) -> Option { + match self { + Self::Pending(r) | Self::Done(r) => Some(r), + _ => None, + } + } + + pub fn to_str(&self) -> &str { + match self { + Self::Pending(_) => "pending", + Self::Done(_) => "done", + Self::TryAgain => "try again", + Self::Cancelled => "cancelled", + Self::Err(_) => "error", + } + } + + pub fn is_done(&self) -> bool { + match self { + Self::Done(_) | Self::Cancelled | Self::Err(_) => true, + Self::TryAgain | Self::Pending(_) => false, + } + } + + pub fn as_ref(&self) -> TaskResult<&R, &E> { + match self { + Self::Pending(r) => TaskResult::Pending(r), + Self::Done(r) => TaskResult::Done(r), + Self::TryAgain => TaskResult::TryAgain, + Self::Cancelled => TaskResult::Cancelled, + Self::Err(e) => TaskResult::Err(e), + } + } +} + +pub fn run(f: F) -> impl Task +where + F: FnMut(ProjectContext) -> Fut + Send + 'static, + Fut: Future> + Send + 'static, +{ + RunFn { + f, + _output: PhantomData, + } +} + +pub fn refresh() -> impl Task { + run(|ctx: ProjectContext| async move { + match ctx.state.refresh(&ctx.gateway).await { + Ok(new) => TaskResult::Done(new), + Err(err) => TaskResult::Err(err), + } + }) +} + +pub fn destroy() -> impl Task { + run(|ctx| async move { + match ctx.state.destroy() { + Ok(state) => TaskResult::Done(state), + Err(err) => TaskResult::Err(err), + } + }) +} + +pub fn check_health() -> impl Task { + run(|ctx| async move { + match ctx.state.refresh(&ctx.gateway).await { + Ok(Project::Ready(mut ready)) => { + if ready.is_healthy().await { + TaskResult::Done(Project::Ready(ready)) + } else { + TaskResult::Done(Project::Ready(ready).stop().unwrap()) + } + } + Ok(update) => TaskResult::Done(update), + Err(err) => TaskResult::Err(err), + } + }) +} + +pub fn run_until_done() -> impl Task { + RunUntilDone +} + +pub struct TaskBuilder { + project_name: Option, + service: Arc, + timeout: Option, + tasks: VecDeque>, +} + +impl TaskBuilder { + pub fn new(service: Arc) -> Self { + Self { + service, + project_name: None, + timeout: None, + tasks: VecDeque::new(), + } + } +} + +impl TaskBuilder { + pub fn project(mut self, name: ProjectName) -> Self { + self.project_name = Some(name); + self + } + + pub fn and_then(mut self, task: T) -> Self + where + T: Task + 'static, + { + self.tasks.push_back(Box::new(task)); + self + } + + pub fn with_timeout(mut self, duration: Duration) -> Self { + self.timeout = Some(duration); + self + } + + pub fn build(mut self) -> BoxedTask { + self.tasks.push_back(Box::new(RunUntilDone)); + + let timeout = self.timeout.unwrap_or(DEFAULT_TIMEOUT); + + Box::new(WithTimeout::on( + timeout, + ProjectTask { + uuid: Uuid::new_v4(), + project_name: self.project_name.expect("project_name is required"), + service: self.service, + tasks: self.tasks, + }, + )) + } + + pub async fn send(self, sender: &Sender) -> Result { + let project_name = self.project_name.clone().expect("project_name is required"); + let task_router = self.service.task_router(); + let (task, handle) = AndThenNotify::after(self.build()); + let task = Route::::to(project_name, Box::new(task), task_router); + match timeout(TASK_SEND_TIMEOUT, sender.send(Box::new(task))).await { + Ok(Ok(_)) => Ok(handle), + _ => Err(Error::from_kind(ErrorKind::ServiceUnavailable)), + } + } +} + +pub struct Route { + project_name: ProjectName, + inner: Option, + router: TaskRouter, +} + +impl Route { + pub fn to(project_name: ProjectName, what: T, router: TaskRouter) -> Self { + Self { + project_name, + inner: Some(what), + router, + } + } +} + +#[async_trait] +impl Task<()> for Route { + type Output = (); + + type Error = Error; + + async fn poll(&mut self, _ctx: ()) -> TaskResult { + if let Some(task) = self.inner.take() { + match self.router.route(&self.project_name, task).await { + Ok(_) => TaskResult::Done(()), + Err(_) => TaskResult::Err(Error::from_kind(ErrorKind::Internal)), + } + } else { + TaskResult::Done(()) + } + } +} + +pub struct RunFn { + f: F, + _output: PhantomData, +} + +#[async_trait] +impl Task for RunFn +where + F: FnMut(ProjectContext) -> Fut + Send, + Fut: Future> + Send, +{ + type Output = Project; + + type Error = Error; + + async fn poll(&mut self, ctx: ProjectContext) -> TaskResult { + (self.f)(ctx).await + } +} + +/// Advance a project's state until it's returning `is_done` +pub struct RunUntilDone; + +#[async_trait] +impl Task for RunUntilDone { + type Output = Project; + + type Error = Error; + + async fn poll(&mut self, ctx: ProjectContext) -> TaskResult { + if !>::is_done(&ctx.state) { + TaskResult::Pending(ctx.state.next(&ctx.gateway).await.unwrap()) + } else { + TaskResult::Done(ctx.state) + } + } +} + +pub struct TaskHandle { + rx: oneshot::Receiver<()>, +} + +impl Future for TaskHandle { + type Output = (); + + fn poll( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll { + Pin::new(&mut self.rx).poll(cx).map(|_| ()) + } +} + +pub struct AndThenNotify { + inner: T, + notify: Option>, +} + +impl AndThenNotify { + pub fn after(task: T) -> (Self, TaskHandle) { + let (tx, rx) = oneshot::channel(); + ( + Self { + inner: task, + notify: Some(tx), + }, + TaskHandle { rx }, + ) + } +} + +#[async_trait] +impl Task for AndThenNotify +where + Ctx: Send + 'static, + T: Task, +{ + type Output = T::Output; + + type Error = T::Error; + + async fn poll(&mut self, ctx: Ctx) -> TaskResult { + let out = self.inner.poll(ctx).await; + + if out.is_done() { + let _ = self.notify.take().unwrap().send(()); + } + + out + } +} + +pub struct WithTimeout { + inner: T, + start: Option, + timeout: Duration, +} + +impl WithTimeout { + pub fn on(timeout: Duration, inner: T) -> Self { + Self { + inner, + start: None, + timeout, + } + } +} + +#[async_trait] +impl Task for WithTimeout +where + Ctx: Send + 'static, + T: Task, +{ + type Output = T::Output; + + type Error = T::Error; + + async fn poll(&mut self, ctx: Ctx) -> TaskResult { + if self.start.is_none() { + self.start = Some(Instant::now()); + } + + if Instant::now() - *self.start.as_ref().unwrap() > self.timeout { + warn!( + "task has timed out: was running for more than {}s", + self.timeout.as_secs() + ); + return TaskResult::Cancelled; + } + + self.inner.poll(ctx).await + } +} + +/// A collection of tasks scoped to a specific project. +/// +/// All the tasks in the collection are run to completion. If an error +/// is encountered, the `ProjectTask` completes early passing through +/// the error. The value returned by the inner tasks upon their +/// completion is committed back to persistence through +/// [GatewayService]. +pub struct ProjectTask { + uuid: Uuid, + project_name: ProjectName, + service: Arc, + tasks: VecDeque, +} + +impl ProjectTask { + pub fn uuid(&self) -> &Uuid { + &self.uuid + } +} + +/// A context for tasks which are scoped to a specific project. +/// +/// This will be always instantiated with the latest known state of +/// the project and gives access to the broader gateway context. +#[derive(Clone)] +pub struct ProjectContext { + /// The name of the project this task is about + pub project_name: ProjectName, + /// The name of the user the project belongs to + pub account_name: AccountName, + /// The gateway context in which this task is running + pub gateway: GatewayContext, + /// The last known state of the project + pub state: Project, +} + +pub type BoxedTask = Box>; + +#[async_trait] +impl Task<()> for ProjectTask +where + T: Task, +{ + type Output = (); + + type Error = Error; + + async fn poll(&mut self, _: ()) -> TaskResult { + if self.tasks.is_empty() { + return TaskResult::Done(()); + } + + let ctx = self.service.context(); + + let project = match self.service.find_project(&self.project_name).await { + Ok(project) => project, + Err(err) => return TaskResult::Err(err), + }; + + let account_name = match self + .service + .account_name_from_project(&self.project_name) + .await + { + Ok(account_name) => account_name, + Err(err) => return TaskResult::Err(err), + }; + + let project_ctx = ProjectContext { + project_name: self.project_name.clone(), + account_name: account_name.clone(), + gateway: ctx, + state: project, + }; + + let span = info_span!( + "polling project", + ctx.project = ?project_ctx.project_name, + ctx.account = ?project_ctx.account_name, + ctx.state = project_ctx.state.state() + ); + let _ = span.enter(); + + let task = self.tasks.front_mut().unwrap(); + + let timeout = sleep(PROJECT_TASK_MAX_IDLE_TIMEOUT); + let res = { + let mut poll = task.poll(project_ctx); + tokio::select! { + res = &mut poll => res, + _ = timeout => { + warn!( + project_name = ?self.project_name, + account_name = ?account_name, + "a task has been idling for a long time" + ); + poll.await + } + } + }; + + if let Some(update) = res.as_ref().ok() { + info!(new_state = ?update.state(), "new state"); + match self + .service + .update_project(&self.project_name, update) + .await + { + Ok(_) => { + info!(new_state = ?update.state(), "successfully updated project state"); + } + Err(err) => { + error!(err = %err, "could not update project state"); + return TaskResult::Err(err); + } + } + } + + info!(result = res.to_str(), "poll result"); + + match res { + TaskResult::Pending(_) => TaskResult::Pending(()), + TaskResult::TryAgain => TaskResult::TryAgain, + TaskResult::Done(_) => { + let _ = self.tasks.pop_front().unwrap(); + if self.tasks.is_empty() { + TaskResult::Done(()) + } else { + TaskResult::Pending(()) + } + } + TaskResult::Cancelled => TaskResult::Cancelled, + TaskResult::Err(err) => { + error!(err = %err, "project task failure"); + TaskResult::Err(err) + } + } + } +} + +#[cfg(test)] +pub mod tests { + use super::*; + + struct NeverEnding; + + #[async_trait] + impl Task<()> for NeverEnding { + type Output = (); + + type Error = (); + + async fn poll(&mut self, _ctx: ()) -> TaskResult { + TaskResult::Pending(()) + } + } + + #[tokio::test] + async fn task_with_timeout() -> anyhow::Result<()> { + let timeout = Duration::from_secs(1); + + let mut task_with_timeout = WithTimeout::on(timeout, NeverEnding); + + let start = Instant::now(); + + while let TaskResult::Pending(()) = task_with_timeout.poll(()).await { + assert!(Instant::now() - start <= timeout + Duration::from_secs(1)); + } + + assert_eq!(task_with_timeout.poll(()).await, TaskResult::Cancelled); + + Ok(()) + } +} diff --git a/gateway/src/tls.rs b/gateway/src/tls.rs new file mode 100644 index 000000000..6e3ee8292 --- /dev/null +++ b/gateway/src/tls.rs @@ -0,0 +1,165 @@ +use std::collections::HashMap; +use std::fs::File; +use std::io::{BufReader, Read, Write}; +use std::path::Path; +use std::sync::Arc; + +use axum_server::accept::DefaultAcceptor; +use axum_server::tls_rustls::{RustlsAcceptor, RustlsConfig}; +use futures::executor::block_on; +use pem::Pem; +use rustls::server::{ClientHello, ResolvesServerCert}; +use rustls::sign::{self, CertifiedKey}; +use rustls::{Certificate, PrivateKey, ServerConfig}; +use rustls_pemfile::Item; +use shuttle_common::models::error::ErrorKind; +use tokio::runtime::Handle; +use tokio::sync::RwLock; + +use crate::Error; + +#[derive(Clone)] +pub struct ChainAndPrivateKey { + chain: Vec, + private_key: PrivateKey, +} + +impl ChainAndPrivateKey { + pub fn parse_pem(rd: R) -> Result { + let mut private_key = None; + let mut chain = Vec::new(); + + for item in rustls_pemfile::read_all(&mut BufReader::new(rd)) + .map_err(|_| Error::from_kind(ErrorKind::Internal))? + { + match item { + Item::X509Certificate(cert) => chain.push(Certificate(cert)), + Item::ECKey(key) | Item::PKCS8Key(key) | Item::RSAKey(key) => { + private_key = Some(PrivateKey(key)) + } + _ => return Err(Error::from_kind(ErrorKind::Internal)), + } + } + + Ok(Self { + chain, + private_key: private_key.unwrap(), + }) + } + + pub fn load_pem>(path: P) -> Result { + let rd = File::open(path)?; + Self::parse_pem(rd) + } + + pub fn into_pem(self) -> Result { + let mut pems = Vec::new(); + for cert in self.chain { + pems.push(Pem { + tag: "CERTIFICATE".to_string(), + contents: cert.0, + }); + } + + pems.push(Pem { + tag: "PRIVATE KEY".to_string(), + contents: self.private_key.0, + }); + + Ok(pem::encode_many(&pems)) + } + + pub fn into_certified_key(self) -> Result { + let signing_key = sign::any_supported_type(&self.private_key) + .map_err(|_| Error::from_kind(ErrorKind::Internal))?; + Ok(CertifiedKey::new(self.chain, signing_key)) + } + + pub fn save_pem>(self, path: P) -> Result<(), Error> { + let as_pem = self.into_pem()?; + let mut f = File::create(path)?; + f.write_all(as_pem.as_bytes())?; + Ok(()) + } +} + +pub struct GatewayCertResolver { + keys: RwLock>>, + default: RwLock>>, +} + +impl Default for GatewayCertResolver { + fn default() -> Self { + Self::new() + } +} + +impl GatewayCertResolver { + pub fn new() -> Self { + Self { + keys: RwLock::new(HashMap::default()), + default: RwLock::new(None), + } + } + + /// Get the loaded [CertifiedKey] associated with the given + /// domain. + pub async fn get(&self, sni: &str) -> Option> { + self.keys.read().await.get(sni).map(Arc::clone) + } + + pub async fn serve_default_der(&self, certs: ChainAndPrivateKey) -> Result<(), Error> { + *self.default.write().await = Some(Arc::new(certs.into_certified_key()?)); + Ok(()) + } + + pub async fn serve_default_pem(&self, rd: R) -> Result<(), Error> { + let certs = ChainAndPrivateKey::parse_pem(rd)?; + self.serve_default_der(certs).await + } + + /// Load a new certificate chain and private key to serve when + /// receiving incoming TLS connections for the given domain. + pub async fn serve_der(&self, sni: &str, certs: ChainAndPrivateKey) -> Result<(), Error> { + let certified_key = certs.into_certified_key()?; + self.keys + .write() + .await + .insert(sni.to_string(), Arc::new(certified_key)); + Ok(()) + } + + pub async fn serve_pem(&self, sni: &str, rd: R) -> Result<(), Error> { + let certs = ChainAndPrivateKey::parse_pem(rd)?; + self.serve_der(sni, certs).await + } +} + +impl ResolvesServerCert for GatewayCertResolver { + fn resolve(&self, client_hello: ClientHello) -> Option> { + let sni = client_hello.server_name()?; + let handle = Handle::current(); + let _ = handle.enter(); + block_on(async move { + if let Some(cert) = self.get(sni).await { + Some(cert) + } else { + self.default.read().await.clone() + } + }) + } +} + +pub fn make_tls_acceptor() -> (Arc, RustlsAcceptor) { + let resolver = Arc::new(GatewayCertResolver::new()); + + let mut server_config = ServerConfig::builder() + .with_safe_defaults() + .with_no_client_auth() + .with_cert_resolver(Arc::clone(&resolver) as Arc); + server_config.alpn_protocols = vec![b"http/1.1".to_vec()]; + + let rustls_config = RustlsConfig::from_config(Arc::new(server_config)); + + (resolver, RustlsAcceptor::new(rustls_config)) +} diff --git a/gateway/src/worker.rs b/gateway/src/worker.rs index ee71dfd8a..b81bb1ad0 100644 --- a/gateway/src/worker.rs +++ b/gateway/src/worker.rs @@ -1,88 +1,37 @@ -use std::fmt::Debug; +use std::collections::HashMap; use std::sync::Arc; +use tokio::sync::mpsc::error::SendError; use tokio::sync::mpsc::{channel, Receiver, Sender}; +use tokio::sync::RwLock; use tracing::{debug, info}; -use crate::project::Project; -use crate::service::GatewayService; -use crate::{AccountName, Context, EndState, Error, ProjectName, Refresh, Service, State}; +use crate::task::{BoxedTask, TaskResult}; +use crate::{Error, ProjectName}; -#[must_use] -#[derive(Debug, Clone)] -pub struct Work { - pub project_name: ProjectName, - pub account_name: AccountName, - pub work: W, -} - -#[async_trait] -impl Refresh for Work -where - W: Refresh + Send, -{ - type Error = W::Error; +pub const WORKER_QUEUE_SIZE: usize = 2048; - async fn refresh<'c, C: Context<'c>>(self, ctx: &C) -> Result { - Ok(Self { - project_name: self.project_name, - account_name: self.account_name, - work: self.work.refresh(ctx).await?, - }) - } -} - -#[async_trait] -impl<'c, W> State<'c> for Work -where - W: State<'c>, -{ - type Next = Work; - - type Error = W::Error; - - async fn next>(self, ctx: &C) -> Result { - Ok(Work:: { - project_name: self.project_name, - account_name: self.account_name, - work: self.work.next(ctx).await?, - }) - } +pub struct Worker { + send: Option>, + recv: Receiver, } -impl<'c, W> EndState<'c> for Work +impl Default for Worker where - W: EndState<'c>, + W: Send, { - type ErrorVariant = W::ErrorVariant; - - fn is_done(&self) -> bool { - self.work.is_done() - } - - fn into_result(self) -> Result { - Ok(Self { - project_name: self.project_name, - account_name: self.account_name, - work: self.work.into_result()?, - }) + fn default() -> Self { + Self::new() } } -pub struct Worker, W = Work> { - service: Svc, - send: Option>, - recv: Receiver, -} - -impl Worker +impl Worker where W: Send, { - pub fn new(service: Svc) -> Self { - let (send, recv) = channel(32); + pub fn new() -> Self { + let (send, recv) = channel(WORKER_QUEUE_SIZE); Self { - service, send: Some(send), recv, } @@ -97,11 +46,7 @@ where } } -impl Worker -where - Svc: for<'c> Service<'c, State = W, Error = Error>, - W: Debug + Send + for<'c> EndState<'c>, -{ +impl Worker { /// Starts the worker, waiting and processing elements from the /// queue until the last sending end for the channel is dropped, /// at which point this future resolves. @@ -116,24 +61,14 @@ where debug!("starting worker"); while let Some(mut work) = self.recv.recv().await { - debug!(?work, "received work"); loop { - work = { - let context = self.service.context(); - - // Safety: EndState's transitions are Infallible - work.next(&context).await.unwrap() - }; - - match self.service.update(&work).await { - Ok(_) => {} - Err(err) => info!("failed to update a state: {}\nstate: {:?}", err, work), - }; - - if work.is_done() { - break; - } else { - debug!(?work, "work not done yet"); + match work.poll(()).await { + TaskResult::Done(_) | TaskResult::Cancelled => break, + TaskResult::Pending(_) | TaskResult::TryAgain => continue, + TaskResult::Err(err) => { + info!("task failed: {err}"); + break; + } } } } @@ -142,121 +77,52 @@ where } } -#[cfg(test)] -pub mod tests { - use std::convert::Infallible; - - use anyhow::anyhow; - use tokio::sync::Mutex; - - use super::*; - use crate::tests::{World, WorldContext}; - - pub struct DummyService { - world: World, - state: Mutex>, - } - - impl DummyService<()> { - pub async fn new() -> DummyService { - let world = World::new().await; - DummyService { - world, - state: Mutex::new(None), - } - } - } - - #[async_trait] - impl<'c, S> Service<'c> for DummyService - where - S: EndState<'c> + Sync, - { - type Context = WorldContext<'c>; - - type State = S; - - type Error = Error; - - fn context(&'c self) -> Self::Context { - self.world.context() - } +pub struct TaskRouter { + table: Arc>>>, +} - async fn update(&self, state: &Self::State) -> Result<(), Self::Error> { - let mut lock = self.state.lock().await; - *lock = Some(Self::State::clone(state)); - Ok(()) +impl Clone for TaskRouter { + fn clone(&self) -> Self { + Self { + table: self.table.clone(), } } +} - #[derive(Debug, PartialEq, Eq, Clone)] - pub struct FiniteState { - count: usize, - max_count: usize, - } - - #[async_trait] - impl<'c> State<'c> for FiniteState { - type Next = Self; - - type Error = Infallible; - - async fn next>(mut self, _ctx: &C) -> Result { - if self.count < self.max_count { - self.count += 1; - } - Ok(self) - } +impl Default for TaskRouter { + fn default() -> Self { + Self::new() } +} - impl<'c> EndState<'c> for FiniteState { - type ErrorVariant = anyhow::Error; - - fn is_done(&self) -> bool { - self.count == self.max_count - } - - fn into_result(self) -> Result { - if self.count > self.max_count { - Err(anyhow!( - "count is over max_count: {} > {}", - self.count, - self.max_count - )) - } else { - Ok(self) - } +impl TaskRouter { + pub fn new() -> Self { + Self { + table: Arc::new(RwLock::new(HashMap::new())), } } +} - #[tokio::test] - async fn worker_queue_and_proceed_until_done() { - let svc = DummyService::new::().await; - - let worker = Worker::new(svc); - - { +impl TaskRouter { + pub async fn route( + &self, + name: &ProjectName, + task: BoxedTask, + ) -> Result<(), SendError> { + let mut table = self.table.write().await; + if let Some(sender) = table.get(name) { + sender.send(task).await + } else { + let worker = Worker::new(); let sender = worker.sender(); - let state = FiniteState { - count: 0, - max_count: 42, - }; + tokio::spawn(worker.start()); - sender.send(state).await.unwrap(); - } + let res = sender.send(task).await; - let Worker { - service: DummyService { state, .. }, - .. - } = worker.start().await.unwrap(); + table.insert(name.clone(), sender); - assert_eq!( - *state.lock().await, - Some(FiniteState { - count: 42, - max_count: 42 - }) - ); + res + } } } diff --git a/gateway/tests/hello_world.crate b/gateway/tests/hello_world.crate index d4f72b6be..038d4d03e 100644 Binary files a/gateway/tests/hello_world.crate and b/gateway/tests/hello_world.crate differ diff --git a/migrator/.gitignore b/migrator/.gitignore deleted file mode 100644 index a063a0a46..000000000 --- a/migrator/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -users.toml -users.sql -dump.sql diff --git a/migrator/Cargo.toml b/migrator/Cargo.toml deleted file mode 100644 index a3f4c56fc..000000000 --- a/migrator/Cargo.toml +++ /dev/null @@ -1,11 +0,0 @@ -[package] -name = "migrator" -version = "0.1.0" -edition = "2021" - -[workspace] - -[dependencies] -rand = "0.8.5" -shuttle-common = { path = "../common" } -toml = "0.5.9" diff --git a/migrator/db_script.sh b/migrator/db_script.sh deleted file mode 100755 index d92bbc275..000000000 --- a/migrator/db_script.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env sh - - -ssh ubuntu@18.133.52.140 "docker exec pg pg_dumpall -U postgres > dump.sql" -scp ubuntu@18.133.52.140:~/dump.sql dump.sql - -scp dump.sql database.shuttle.internal:~/dump.sql - -# docker cp dump.sql 123:/dump.sql -# docker exec 123 psql -f dump.sql -U postgres diff --git a/migrator/script.sh b/migrator/script.sh deleted file mode 100755 index 68b80976c..000000000 --- a/migrator/script.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env sh - -scp ubuntu@18.133.52.140:/opt/shuttle/user-data/users/users.toml users.toml -cargo run -- users.toml > users.sql - - -scp users.sql controller.shuttle.internal:~/users.sql -ssh controller.shuttle.internal "cat ~/users.sql | sudo sqlite3 /var/lib/docker/volumes/shuttle-dev_gateway-vol/_data/gateway.sqlite" diff --git a/migrator/src/main.rs b/migrator/src/main.rs deleted file mode 100644 index 0ddab79a5..000000000 --- a/migrator/src/main.rs +++ /dev/null @@ -1,185 +0,0 @@ -use rand::distributions::{Alphanumeric, DistString}; -use shuttle_common::project::ProjectName; -use std::{ - env::args, - fmt::{Display, Formatter}, - fs, - str::FromStr, -}; - -fn main() { - let mut args = args(); - let _ = args.next(); - let file = args - .next() - .expect("expected a users.toml file to convert to .sql"); - let data = fs::read_to_string(file).expect("to read data file"); - let toml = toml::from_str(&data).expect("to parse data file"); - - let users = parse_value(toml); - - for user in users { - println!("{user}"); - } -} - -#[derive(Eq, PartialEq, Debug)] -struct User { - key: String, - name: String, - projects: Vec, -} - -impl From<(&String, &toml::Value)> for User { - fn from((key, value): (&String, &toml::Value)) -> User { - let (name, projects) = match value { - toml::Value::Table(table) => { - let name = table - .get("name") - .expect("user to have a name") - .as_str() - .expect("name to be a string") - .to_string(); - let projects = table - .get("projects") - .expect("user to have projects") - .as_array() - .expect("projects to be an array") - .iter() - .map(|value| value.as_str().expect("project to be a string")) - .filter_map(|project_name| match project_name.len() { - 3..=64 => match ProjectName::from_str(project_name) { - Ok(project_name) => Some(project_name), - Err(err) => { - eprintln!("{err}"); - None - } - }, - _ => { - eprintln!("project name is too long/short: {project_name}"); - None - } - }) - .collect(); - - (name, projects) - } - other => panic!("unexpected '{other}' at user level"), - }; - - Self { - key: key.to_string(), - name, - projects, - } - } -} - -impl Display for User { - fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { - write!( - f, - "INSERT INTO accounts(account_name, key, super_user) VALUES('{}', '{}', 0);", - self.name, self.key - )?; - - for project in self.projects.iter() { - let initial_key = Alphanumeric.sample_string(&mut rand::thread_rng(), 32); - let state = format!("{{\"creating\": {{\"project_name\": \"{project}\", \"initial_key\": \"{initial_key}\"}}}}"); - - write!( - f, - "\nINSERT INTO projects(project_name, account_name, project_state, initial_key) VALUES('{}', '{}', '{}', '{}');", - project, - self.name, - state, - initial_key, - )?; - } - - write!(f, "\n") - } -} - -fn parse_value(value: toml::Value) -> Vec { - match value { - toml::Value::Table(table) => table.iter().map(Into::into).collect(), - _ => vec![], - } -} - -#[cfg(test)] -mod tests { - use super::User; - - fn get_dummy() -> toml::Value { - r#" -[key1] -name = 'name1' -projects = [] - -[key2] -name = 'name2' -projects = [ - 'project1', - 'project2', -] -"# - .parse() - .unwrap() - } - - #[test] - fn parse_value() { - let value = get_dummy(); - let actual = super::parse_value(value); - - let expected = vec![ - User { - key: "key1".to_string(), - name: "name1".to_string(), - projects: vec![], - }, - User { - key: "key2".to_string(), - name: "name2".to_string(), - projects: vec!["project1".parse().unwrap(), "project2".parse().unwrap()], - }, - ]; - - assert_eq!(actual, expected); - } - - #[test] - fn display() { - let input = User { - key: "key".to_string(), - name: "name".to_string(), - projects: vec!["project1".parse().unwrap(), "project2".parse().unwrap()], - }; - - let actual = input.to_string(); - - assert!( - actual.starts_with( - "INSERT INTO accounts(account_name, key, super_user) VALUES('name', 'key', 0);" - ), - "got: {}", - actual - ); - assert!( - actual.contains( - "INSERT INTO projects(project_name, account_name, project_state, initial_key) VALUES('project1', 'name', '{\"creating\": {\"project_name\": \"project1\", \"initial_key\": " - ), - "got: {}", - actual - ); - assert!( - actual.contains( - "INSERT INTO projects(project_name, account_name, project_state, initial_key) VALUES('project2', 'name', '{\"creating\": {\"project_name\": \"project2\", \"initial_key\": " - ), - "got: {}", - actual - ); - } -} diff --git a/proto/Cargo.toml b/proto/Cargo.toml index bfe263cc4..4f92e4e55 100644 --- a/proto/Cargo.toml +++ b/proto/Cargo.toml @@ -1,18 +1,19 @@ [package] name = "shuttle-proto" -version = "0.7.0" -edition = "2021" +version.workspace = true +edition.workspace = true +license.workspace = true publish = false # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -prost = "0.11.0" +prost = "0.11.2" prost-types = "0.11.0" -tonic = "0.8.2" +tonic = "0.8.3" [dependencies.shuttle-common] -version = "0.7.0" -path = "../common" +workspace = true +features = ["models"] [build-dependencies] -tonic-build = "0.8.0" +tonic-build = "0.8.3" diff --git a/provisioner/Cargo.toml b/provisioner/Cargo.toml index 35fd0791a..91b1bff91 100644 --- a/provisioner/Cargo.toml +++ b/provisioner/Cargo.toml @@ -1,36 +1,35 @@ [package] name = "shuttle-provisioner" -version = "0.7.0" -edition = "2021" +version.workspace = true +edition.workspace = true +license.workspace = true description = "Service responsible for provisioning and managing resources for services" publish = false # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -aws-config = "0.47.0" -aws-sdk-rds = "0.17.0" -aws-smithy-types = "0.47.0" +aws-config = "0.51.0" +aws-sdk-rds = "0.21.0" clap = { version = "3.2.17", features = ["derive", "env"] } -fqdn = "0.2.2" -mongodb = "2.3.0" -prost = "0.11.0" +fqdn = "0.2.3" +mongodb = "2.3.1" +prost = "0.11.2" rand = "0.8.5" -sqlx = { version = "0.6.1", features = ["postgres", "runtime-tokio-native-tls"] } -thiserror = "1.0.32" -tokio = { version = "1.20.1", features = ["macros", "rt-multi-thread"] } -tonic = "0.8.2" -tracing = "0.1.36" -tracing-subscriber = "0.3.15" +sqlx = { version = "0.6.2", features = ["postgres", "runtime-tokio-native-tls"] } +thiserror = { workspace = true } +tokio = { version = "1.22.0", features = ["macros", "rt-multi-thread"] } +tonic = "0.8.3" +tracing = { workspace = true } +tracing-subscriber = { workspace = true } [dependencies.shuttle-proto] -version = "0.7.0" -path = "../proto" +workspace = true [dev-dependencies] -ctor = "0.1.23" -once_cell = "1.13.1" +ctor = "0.1.26" +once_cell = { workspace = true } portpicker = "0.1.1" -serde_json = "1.0.83" +serde_json = { workspace = true } [build-dependencies] -tonic-build = "0.8.0" +tonic-build = "0.8.3" diff --git a/provisioner/prepare.sh b/provisioner/prepare.sh new file mode 100755 index 000000000..6a52d3030 --- /dev/null +++ b/provisioner/prepare.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env sh + +############################################################################### +# This file is used by our common Containerfile incase the container for this # +# service might need some extra preparation steps for its final image # +############################################################################### + +# Nothing to prepare in container image here diff --git a/provisioner/src/lib.rs b/provisioner/src/lib.rs index 7c8b08a16..a877a6dd9 100644 --- a/provisioner/src/lib.rs +++ b/provisioner/src/lib.rs @@ -3,7 +3,6 @@ use std::time::Duration; pub use args::Args; use aws_config::timeout; use aws_sdk_rds::{error::ModifyDBInstanceErrorKind, model::DbInstance, types::SdkError, Client}; -use aws_smithy_types::tristate::TriState; pub use error::Error; use mongodb::{bson::doc, options::ClientOptions}; use rand::Rng; @@ -51,10 +50,10 @@ impl MyProvisioner { let mongodb_client = mongodb::Client::with_options(mongodb_options)?; // Default timeout is too long so lowering it - let api_timeout_config = timeout::Api::new() - .with_call_timeout(TriState::Set(Duration::from_secs(120))) - .with_call_attempt_timeout(TriState::Set(Duration::from_secs(120))); - let timeout_config = timeout::Config::new().with_api_timeouts(api_timeout_config); + let timeout_config = timeout::TimeoutConfig::builder() + .operation_timeout(Duration::from_secs(120)) + .operation_attempt_timeout(Duration::from_secs(120)) + .build(); let aws_config = aws_config::from_env() .timeout_config(timeout_config) diff --git a/provisioner/src/main.rs b/provisioner/src/main.rs index 329348c83..3c42acf90 100644 --- a/provisioner/src/main.rs +++ b/provisioner/src/main.rs @@ -1,4 +1,4 @@ -use std::net::SocketAddr; +use std::{net::SocketAddr, time::Duration}; use clap::Parser; use shuttle_provisioner::{Args, MyProvisioner, ProvisionerServer}; @@ -31,6 +31,7 @@ async fn main() -> Result<(), Box> { println!("starting provisioner on {}", addr); Server::builder() + .http2_keepalive_interval(Some(Duration::from_secs(30))) // Prevent deployer clients from loosing connection #ENG-219 .add_service(ProvisionerServer::new(provisioner)) .serve(addr) .await?; diff --git a/resources/aws-rds/Cargo.toml b/resources/aws-rds/Cargo.toml index 1b17ac11a..2403eeaad 100644 --- a/resources/aws-rds/Cargo.toml +++ b/resources/aws-rds/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "shuttle-aws-rds" -version = "0.7.0" +version = "0.8.0" edition = "2021" license = "Apache-2.0" description = "Plugin to provision AWS RDS resources" @@ -10,8 +10,8 @@ keywords = ["shuttle-service", "rds"] [dependencies] async-trait = "0.1.56" paste = "1.0.7" -shuttle-service = { path = "../../service", version = "0.7.0", default-features = false } -sqlx = { version = "0.6.0", features = ["runtime-tokio-native-tls"] } +shuttle-service = { path = "../../service", version = "0.8.0" } +sqlx = { version = "0.6.2", features = ["runtime-tokio-native-tls"] } tokio = { version = "1.19.2", features = ["rt"] } [features] diff --git a/resources/aws-rds/README.md b/resources/aws-rds/README.md index 519c9ec15..b48228fe1 100644 --- a/resources/aws-rds/README.md +++ b/resources/aws-rds/README.md @@ -13,5 +13,5 @@ Add `shuttle-aws-rds` to the dependencies for your service. Every engine is behi | MySql | mysql | shuttle_aws_rds::MySql | | MariaDB | mariadb | shuttle_aws_rds::MariaDB | -An example using the Tide framework can be found on [GitHub](https://github.com/shuttle-hq/shuttle/tree/main/examples/tide/postgres) +An example using the Tide framework can be found on [GitHub](https://github.com/shuttle-hq/examples/tree/main/tide/postgres) diff --git a/resources/persist/Cargo.toml b/resources/persist/Cargo.toml index ee13d37f3..837caf372 100644 --- a/resources/persist/Cargo.toml +++ b/resources/persist/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "shuttle-persist" -version = "0.7.0" +version = "0.8.0" edition = "2021" license = "Apache-2.0" description = "Plugin for persist objects" @@ -11,7 +11,7 @@ keywords = ["shuttle-service", "persistence"] async-trait = "0.1.56" bincode = "1.2.1" serde = { version = "1.0.0", features = ["derive"] } -shuttle-common = { path = "../../common", version = "0.7.0" } -shuttle-service = { path = "../../service", version = "0.7.0", default-features = false } +shuttle-common = { path = "../../common", version = "0.8.0" } +shuttle-service = { path = "../../service", version = "0.8.0" } thiserror = "1.0.32" tokio = { version = "1.19.2", features = ["rt"] } diff --git a/resources/persist/README.md b/resources/persist/README.md index 1f7638a2c..333bf75ba 100644 --- a/resources/persist/README.md +++ b/resources/persist/README.md @@ -4,5 +4,5 @@ This plugin allows persisting struct that implement `serde::Serialize` and loadi ## Usage Add `shuttle-persist` to the dependencies for your service. You can get this resource using the `shuttle-persist::Persist` attribute to get a `PersistInstance`. Object can now be saved using `PersistInstance.save()` and loaded again using `PersistInstance.load()`. -An example using the Rocket framework can be found on [GitHub](https://github.com/shuttle-hq/shuttle/tree/main/examples/rocket/persist) +An example using the Rocket framework can be found on [GitHub](https://github.com/shuttle-hq/examples/tree/main/rocket/persist) diff --git a/resources/secrets/Cargo.toml b/resources/secrets/Cargo.toml index 0bbecd62c..f5b1b713e 100644 --- a/resources/secrets/Cargo.toml +++ b/resources/secrets/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "shuttle-secrets" -version = "0.7.0" +version = "0.8.0" edition = "2021" license = "Apache-2.0" description = "Plugin to for managing secrets on shuttle" @@ -9,5 +9,5 @@ keywords = ["shuttle-service", "secrets"] [dependencies] async-trait = "0.1.56" -shuttle-service = { path = "../../service", version = "0.7.0", default-features = false } +shuttle-service = { path = "../../service", version = "0.8.0" } tokio = { version = "1.19.2", features = ["rt"] } diff --git a/resources/secrets/README.md b/resources/secrets/README.md index 157bd2b16..08e7912a5 100644 --- a/resources/secrets/README.md +++ b/resources/secrets/README.md @@ -8,4 +8,4 @@ with the secrets you'd like to store. Make sure to add `Secrets.toml` to a `.git Next, pass `#[shuttle_secrets::Secrets] secret_store: SecretStore` as an argument to your `shuttle_service::main` function. `SecretStore::get` can now be called to retrieve your API keys and other secrets at runtime. -An example using the Rocket framework can be found on [GitHub](https://github.com/shuttle-hq/shuttle/tree/main/examples/rocket/secrets) +An example using the Rocket framework can be found on [GitHub](https://github.com/shuttle-hq/examples/tree/main/rocket/secrets) diff --git a/resources/shared-db/Cargo.toml b/resources/shared-db/Cargo.toml index fce73e2cf..6e205e6aa 100644 --- a/resources/shared-db/Cargo.toml +++ b/resources/shared-db/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "shuttle-shared-db" -version = "0.7.0" +version = "0.8.0" edition = "2021" license = "Apache-2.0" description = "Plugin for managing shared databases on shuttle" @@ -10,8 +10,8 @@ keywords = ["shuttle-service", "database"] [dependencies] async-trait = "0.1.56" mongodb = { version = "2.3.0", optional = true } -shuttle-service = { path = "../../service", version = "0.7.0", default-features = false } -sqlx = { version = "0.6.1", features = ["runtime-tokio-native-tls"], optional = true } +shuttle-service = { path = "../../service", version = "0.8.0" } +sqlx = { version = "0.6.2", features = ["runtime-tokio-native-tls"], optional = true } tokio = { version = "1.19.2", features = ["rt"] } [features] diff --git a/resources/shared-db/README.md b/resources/shared-db/README.md index 5b3e4029e..a3b1c877c 100644 --- a/resources/shared-db/README.md +++ b/resources/shared-db/README.md @@ -9,5 +9,5 @@ Add `shuttle-shared-db` to the dependencies for your service. Every type of shar | Postgres | postgres | shuttle_shared_db::Postgres | | MongoDB | mongodb | shuttle_shared_db::MongoDb | -An example using the Rocket framework can be found on [GitHub](https://github.com/shuttle-hq/shuttle/tree/main/examples/rocket/postgres) +An example using the Rocket framework can be found on [GitHub](https://github.com/shuttle-hq/examples/tree/main/rocket/postgres) diff --git a/resources/static-folder/Cargo.toml b/resources/static-folder/Cargo.toml new file mode 100644 index 000000000..4ac0f8671 --- /dev/null +++ b/resources/static-folder/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "shuttle-static-folder" +version = "0.8.0" +edition = "2021" +license = "Apache-2.0" +description = "Plugin to get a static folder at runtime on shuttle" +keywords = ["shuttle-service", "static-folder"] +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +async-trait = "0.1.56" +shuttle-service = { path = "../../service", version = "0.8.0" } +tokio = { version = "1.19.2", features = ["rt"] } + +[dev-dependencies] +tempdir = "0.3.7" +tokio = { version = "1.19.2", features = ["macros"] } diff --git a/resources/static-folder/README.md b/resources/static-folder/README.md new file mode 100644 index 000000000..48e27ba32 --- /dev/null +++ b/resources/static-folder/README.md @@ -0,0 +1,29 @@ +# Shuttle Static Folder +This plugin allows services to get the path to a static folder at runtime + +## Usage +Add `shuttle-static-folder` to the dependencies for your service. This resource can be using by the `shuttle_static_folder::StaticFolder` attribute to get a `PathBuf` with the location of the static folder. + +An example using the Axum framework can be found on [GitHub](https://github.com/shuttle-hq/examples/tree/main/axum/websocket) + +``` rust +#[shuttle_service::main] +async fn main( + #[shuttle_static_folder::StaticFolder] static_folder: PathBuf, +) -> __ { ... } +``` + +### Parameters +| Parameter | Type | Default | Description | +|-----------|------|----------|--------------------------------------------------------------------| +| folder | str | `static` | The relative path, from the crate root, to the directory containing static files to deploy | + +### Example: Using the public folder instead +Since this plugin defaults to the `static` folder, the arguments can be used to use the `public` folder instead. + +``` rust +#[shuttle_service::main] +async fn main( + #[shuttle_static_folder::StaticFolder(folder = "public")] public_folder: PathBuf, +) -> __ { ... } +``` diff --git a/resources/static-folder/src/lib.rs b/resources/static-folder/src/lib.rs new file mode 100644 index 000000000..259288dd9 --- /dev/null +++ b/resources/static-folder/src/lib.rs @@ -0,0 +1,229 @@ +use async_trait::async_trait; +use shuttle_service::{ + error::{CustomError, Error as ShuttleError}, + Factory, ResourceBuilder, +}; +use std::{ + fs::rename, + path::{Path, PathBuf}, +}; +use tokio::runtime::Runtime; + +pub struct StaticFolder<'a> { + /// The folder to reach at runtime. Defaults to `static` + folder: &'a str, +} + +pub enum Error { + AbsolutePath, + TransversedUp, +} + +impl<'a> StaticFolder<'a> { + pub fn folder(mut self, folder: &'a str) -> Self { + self.folder = folder; + + self + } +} + +#[async_trait] +impl<'a> ResourceBuilder for StaticFolder<'a> { + fn new() -> Self { + Self { folder: "static" } + } + + async fn build( + self, + factory: &mut dyn Factory, + _runtime: &Runtime, + ) -> Result { + let folder = Path::new(self.folder); + + // Prevent users from users from reading anything outside of their crate's build folder + if folder.is_absolute() { + return Err(Error::AbsolutePath)?; + } + + let input_dir = factory.get_build_path()?.join(self.folder); + + match input_dir.canonicalize() { + Ok(canonical_path) if canonical_path != input_dir => return Err(Error::TransversedUp)?, + Ok(_) => { + // The path did not change to outside the crate's build folder + } + Err(err) => return Err(err)?, + } + + let output_dir = factory.get_storage_path()?.join(self.folder); + + rename(input_dir, output_dir.clone())?; + + Ok(output_dir) + } +} + +impl From for shuttle_service::Error { + fn from(error: Error) -> Self { + let msg = match error { + Error::AbsolutePath => "Cannot use an absolute path for a static folder", + Error::TransversedUp => "Cannot transverse out of crate for a static folder", + }; + + ShuttleError::Custom(CustomError::msg(msg)) + } +} + +#[cfg(test)] +mod tests { + use std::fs::{self}; + use std::path::PathBuf; + + use async_trait::async_trait; + use shuttle_service::{Factory, ResourceBuilder}; + use tempdir::TempDir; + + use crate::StaticFolder; + + struct MockFactory { + temp_dir: TempDir, + } + + // Will have this tree across all the tests + // . + // ├── build + // │   └── static + // │    └── note.txt + // ├── storage + // │   └── static + // │    └── note.txt + // └── escape + //    └── passwd + impl MockFactory { + fn new() -> Self { + Self { + temp_dir: TempDir::new("static_folder").unwrap(), + } + } + + fn build_path(&self) -> PathBuf { + self.get_path("build") + } + + fn storage_path(&self) -> PathBuf { + self.get_path("storage") + } + + fn escape_path(&self) -> PathBuf { + self.get_path("escape") + } + + fn get_path(&self, folder: &str) -> PathBuf { + let path = self.temp_dir.path().join(folder); + + if !path.exists() { + fs::create_dir(&path).unwrap(); + } + + path + } + } + + #[async_trait] + impl Factory for MockFactory { + async fn get_db_connection_string( + &mut self, + _db_type: shuttle_service::database::Type, + ) -> Result { + panic!("no static folder test should try to get a db connection string") + } + + async fn get_secrets( + &mut self, + ) -> Result, shuttle_service::Error> { + panic!("no static folder test should try to get secrets") + } + + fn get_service_name(&self) -> shuttle_service::ServiceName { + panic!("no static folder test should try to get the service name") + } + + fn get_build_path(&self) -> Result { + Ok(self.build_path()) + } + + fn get_storage_path(&self) -> Result { + Ok(self.storage_path()) + } + } + + #[tokio::test] + async fn copies_folder() { + let mut factory = MockFactory::new(); + + let input_file_path = factory.build_path().join("static").join("note.txt"); + fs::create_dir_all(input_file_path.parent().unwrap()).unwrap(); + fs::write(input_file_path, "Hello, test!").unwrap(); + + let expected_file = factory.storage_path().join("static").join("note.txt"); + assert!(!expected_file.exists(), "input file should not exist yet"); + + // Call plugin + let static_folder = StaticFolder::new(); + + let runtime = tokio::runtime::Runtime::new().unwrap(); + let actual_folder = static_folder.build(&mut factory, &runtime).await.unwrap(); + + assert_eq!( + actual_folder, + factory.storage_path().join("static"), + "expect path to the static folder" + ); + assert!(expected_file.exists(), "expected input file to be created"); + assert_eq!( + fs::read_to_string(expected_file).unwrap(), + "Hello, test!", + "expected file content to match" + ); + + runtime.shutdown_background(); + } + + #[tokio::test] + #[should_panic(expected = "Cannot use an absolute path for a static folder")] + async fn cannot_use_absolute_path() { + let mut factory = MockFactory::new(); + let static_folder = StaticFolder::new(); + let runtime = tokio::runtime::Runtime::new().unwrap(); + + let _ = static_folder + .folder("/etc") + .build(&mut factory, &runtime) + .await + .unwrap(); + + runtime.shutdown_background(); + } + + #[tokio::test] + #[should_panic(expected = "Cannot transverse out of crate for a static folder")] + async fn cannot_transverse_up() { + let mut factory = MockFactory::new(); + + let password_file_path = factory.escape_path().join("passwd"); + fs::create_dir_all(password_file_path.parent().unwrap()).unwrap(); + fs::write(password_file_path, "qwerty").unwrap(); + + // Call plugin + let static_folder = StaticFolder::new(); + + let runtime = tokio::runtime::Runtime::new().unwrap(); + let _ = static_folder + .folder("../escape") + .build(&mut factory, &runtime) + .await + .unwrap(); + + runtime.shutdown_background(); + } +} diff --git a/runtime/Cargo.toml b/runtime/Cargo.toml index 2aea4596c..a790bd94a 100644 --- a/runtime/Cargo.toml +++ b/runtime/Cargo.toml @@ -1,40 +1,38 @@ [package] name = "shuttle-runtime" version = "0.1.0" -edition = "2021" +edition.workspace = true +license.workspace = true publish = false # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -anyhow = "1.0.62" -async-trait = "0.1.58" +anyhow = { workspace = true } +async-trait = { workspace = true } cap-std = "0.26.0" clap ={ version = "4.0.18", features = ["derive"] } hyper = { version = "0.14.23", features = ["full"] } rmp-serde = { version = "1.1.1" } serenity = { version = "0.11.5", default-features = false, features = ["client", "gateway", "rustls_backend", "model"] } -thiserror = "1.0.37" -tokio = { version = "=1.20.1", features = ["full"] } +thiserror = { workspace = true } +tokio = { version = "=1.22.0", features = ["full"] } tokio-stream = "0.1.11" tonic = "0.8.2" -tracing = "0.1.37" -tracing-subscriber = { version = "0.3.16", features = ["env-filter"] } -uuid = { version = "1.1.2", features = ["v4"] } +tracing = { workspace = true } +tracing-subscriber = { workspace = true, features = ["env-filter"] } +uuid = { workspace = true, features = ["v4"] } wasi-common = "2.0.0" wasmtime = "2.0.0" wasmtime-wasi = "2.0.0" [dependencies.shuttle-common] -version = "0.7.0" -path = "../common" +workspace = true features = ["axum-wasm"] [dependencies.shuttle-proto] -version = "0.7.0" -path = "../proto" +workspace = true [dependencies.shuttle-service] -version = "0.7.0" -default-features = false +workspace = true +version = "0.8.0" features = ["loader"] -path = "../service" diff --git a/runtime/src/legacy/mod.rs b/runtime/src/legacy/mod.rs index 0960136ce..18e0f8147 100644 --- a/runtime/src/legacy/mod.rs +++ b/runtime/src/legacy/mod.rs @@ -8,7 +8,7 @@ use std::{ use anyhow::anyhow; use async_trait::async_trait; -use shuttle_common::LogItem; +use shuttle_common::{storage_manager::StorageManager, LogItem}; use shuttle_proto::{ provisioner::provisioner_client::ProvisionerClient, runtime::{ @@ -76,25 +76,11 @@ impl Runtime for Legacy { let service_port = 7001; let service_address = SocketAddr::new(Ipv4Addr::LOCALHOST.into(), service_port); - let request = request.into_inner(); - let provisioner_client = ProvisionerClient::connect(self.provisioner_address.clone()) .await .expect("failed to connect to provisioner"); let abstract_factory = AbstractProvisionerFactory::new(provisioner_client); - let service_name = ServiceName::from_str(request.service_name.as_str()) - .map_err(|err| Status::from_error(Box::new(err)))?; - - let mut factory = abstract_factory.get_factory(service_name); - - let logs_tx = self.logs_tx.lock().unwrap().clone(); - - let deployment_id = - Uuid::from_str(std::str::from_utf8(&request.deployment_id).unwrap()).unwrap(); - - let logger = Logger::new(logs_tx, deployment_id); - let so_path = self .so_path .lock() @@ -106,6 +92,25 @@ impl Runtime for Legacy { .map_err(|err| Status::from_error(Box::new(err)))? .clone(); + let storage_manager = StorageManager::new(so_path.clone()); + + let StartRequest { + deployment_id, + service_name, + } = request.into_inner(); + + let service_name = ServiceName::from_str(service_name.as_str()) + .map_err(|err| Status::from_error(Box::new(err)))?; + + let deployment_id = Uuid::from_str(std::str::from_utf8(&deployment_id).unwrap()).unwrap(); + + let mut factory = + abstract_factory.get_factory(service_name, deployment_id, storage_manager); + + let logs_tx = self.logs_tx.lock().unwrap().clone(); + + let logger = Logger::new(logs_tx, deployment_id); + trace!(%service_address, "starting"); let service = load_service(service_address, so_path, &mut factory, logger) .await diff --git a/runtime/src/provisioner_factory.rs b/runtime/src/provisioner_factory.rs index 6a99d6d4b..28bd9d7c9 100644 --- a/runtime/src/provisioner_factory.rs +++ b/runtime/src/provisioner_factory.rs @@ -1,20 +1,26 @@ -use std::collections::BTreeMap; +use std::{collections::BTreeMap, path::PathBuf}; use async_trait::async_trait; -use shuttle_common::{database, DatabaseReadyInfo}; +use shuttle_common::{database, storage_manager::StorageManager, DatabaseReadyInfo}; use shuttle_proto::provisioner::{ database_request::DbType, provisioner_client::ProvisionerClient, DatabaseRequest, }; use shuttle_service::{Factory, ServiceName}; use tonic::{transport::Channel, Request}; use tracing::{debug, info, trace}; +use uuid::Uuid; /// Trait to make it easy to get a factory (service locator) for each service being started pub trait AbstractFactory: Send + 'static { type Output: Factory; /// Get a factory for a specific service - fn get_factory(&self, service_name: ServiceName) -> Self::Output; + fn get_factory( + &self, + service_name: ServiceName, + deployment_id: Uuid, + storage_manager: StorageManager, + ) -> Self::Output; } /// An abstract factory that makes factories which uses provisioner @@ -26,8 +32,18 @@ pub struct AbstractProvisionerFactory { impl AbstractFactory for AbstractProvisionerFactory { type Output = ProvisionerFactory; - fn get_factory(&self, service_name: ServiceName) -> Self::Output { - ProvisionerFactory::new(self.provisioner_client.clone(), service_name) + fn get_factory( + &self, + service_name: ServiceName, + deployment_id: Uuid, + storage_manager: StorageManager, + ) -> Self::Output { + ProvisionerFactory::new( + self.provisioner_client.clone(), + service_name, + deployment_id, + storage_manager, + ) } } @@ -40,6 +56,8 @@ impl AbstractProvisionerFactory { /// A factory (service locator) which goes through the provisioner crate pub struct ProvisionerFactory { service_name: ServiceName, + deployment_id: Uuid, + storage_manager: StorageManager, provisioner_client: ProvisionerClient, info: Option, secrets: Option>, @@ -49,10 +67,14 @@ impl ProvisionerFactory { pub(crate) fn new( provisioner_client: ProvisionerClient, service_name: ServiceName, + deployment_id: Uuid, + storage_manager: StorageManager, ) -> Self { Self { provisioner_client, service_name, + deployment_id, + storage_manager, info: None, secrets: None, } @@ -108,4 +130,16 @@ impl Factory for ProvisionerFactory { fn get_service_name(&self) -> ServiceName { self.service_name.clone() } + + fn get_build_path(&self) -> Result { + self.storage_manager + .service_build_path(self.service_name.as_str()) + .map_err(Into::into) + } + + fn get_storage_path(&self) -> Result { + self.storage_manager + .deployment_storage_path(self.service_name.as_str(), &self.deployment_id) + .map_err(Into::into) + } } diff --git a/scripts/publish.sh b/scripts/publish.sh deleted file mode 100755 index a879a795d..000000000 --- a/scripts/publish.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env bash -# -# Little script to publish to crates.io -# Usage: publish.sh x.y.z -# -# Dependencies: git, ripgrep - -set -ueo pipefail - -function publish-folder() -{ - local folder=$1 - - echo "Publishing $folder" - cd $folder - cargo publish - cd .. - - sleep 5 -} - -function main() -{ - version=$1 - - echo $version | rg "\d+\.\d+\.\d+" || { echo "first argument must be in the form x.y.z"; exit 1; } - - publish-folder "common" - publish-folder "codegen" - publish-folder "service" - publish-folder "resources/secrets" - publish-folder "cargo-shuttle" - - publish-folder "resources/aws-rds" - publish-folder "resources/persist" - publish-folder "resources/shared-db" - - git tag "v$version" - git push upstream "v$version" - - echo "Success!! Now tell about it on Discord :D" -} - -main "${1-*}" diff --git a/scripts/release.sh b/scripts/release.sh deleted file mode 100755 index b7f21fd6e..000000000 --- a/scripts/release.sh +++ /dev/null @@ -1,79 +0,0 @@ -#!/usr/bin/env bash -# -# Little script to release a new version. -# Usage: release.sh x.y.z -# -# Dependencies: git, cargo-edit, ripgrep - -set -ueo pipefail - -function update-cargo-versions() -{ - local version=$1 - - cargo set-version --workspace $version - git commit -am "chore: v$version" -} - -function update-examples-versions() -{ - local version=$1 - - for d in examples/*/*/; - do - cd "$d" - - if [[ -f Cargo.toml ]] - then - cargo add shuttle-service@$version - fi - - cd ../../../ - done - - # Update docs in service and README - rg "shuttle-service = \{ version" --files-with-matches service/ | xargs sed -i "s/shuttle-service = { version = \"[[:digit:]]*.[[:digit:]]*.[[:digit:]]*\"/shuttle-service = { version = \"$version\"/g" - sed -i "s/shuttle-service = { version = \"[[:digit:]]*.[[:digit:]]*.[[:digit:]]*\"/shuttle-service = { version = \"$version\"/g" README.md - - git commit -am "docs: v$version" -} - -function update-resources-versions() -{ - local version=$1 - - for d in resources/*/; - do - cd "$d" - - if [[ -f Cargo.toml ]] - then - cargo add shuttle-service@$version - fi - - cd ../../ - done - - git commit -am "chore: resources v$version" -} - -function main() -{ - version=$1 - - echo $version | rg "\d+\.\d+\.\d+" || { echo "first argument must be in the form x.y.z"; exit 1; } - - echo "Will try to update to version $version" - git checkout -b "chore/v$version" - - update-cargo-versions $version - update-resources-versions $version - update-examples-versions $version - - echo "Success!! You can now merge this branch" - echo "" - echo "Thereafter run:" - echo "./publish.sh $version" -} - -main "${1-*}" diff --git a/service/Cargo.toml b/service/Cargo.toml index 5b1695b93..08512f6e7 100644 --- a/service/Cargo.toml +++ b/service/Cargo.toml @@ -1,8 +1,8 @@ [package] name = "shuttle-service" -version = "0.7.0" -edition = "2021" -license = "Apache-2.0" +version = "0.8.0" +edition.workspace = true +license.workspace = true description = "Service traits and macros to deploy on the shuttle platform (https://www.shuttle.rs/)" homepage = "https://www.shuttle.rs" @@ -10,33 +10,36 @@ homepage = "https://www.shuttle.rs" doctest = false [dependencies] -anyhow = "1.0.62" -async-trait = "0.1.57" -axum = { version = "0.5.15", optional = true } -bincode = { version = "1.2.1", optional = true } -cargo = { version = "0.64.0", optional = true } -cargo_metadata = "0.15.0" -chrono = "=0.4.22" +actix-web = { version = "4.2.1", optional = true } +anyhow = { workspace = true } +async-trait = { workspace = true } +axum = { workspace = true, optional = true } +bincode = { version = "1.3.3", optional = true } +# TODO: debug the libgit2-sys conflict with cargo-edit when upgrading cargo to 0.66 +cargo = { version = "0.65.0", optional = true } +cargo_metadata = "0.15.2" +chrono = { workspace = true } crossbeam-channel = "0.5.6" -futures = { version = "0.3.23", features = ["std"] } -hyper = { version = "0.14.20", features = ["server", "tcp", "http1"], optional = true } -libloading = { version = "0.7.3", optional = true } +futures = { version = "0.3.25", features = ["std"] } +hyper = { version = "0.14.23", features = ["server", "tcp", "http1"], optional = true } +libloading = { version = "0.7.4", optional = true } +num_cpus = { version = "1.14.0", optional = true } pipe = "0.4.0" -poem = { version = "1.3.40", optional = true } +poem = { version = "1.3.49", optional = true } rocket = { version = "0.5.0-rc.2", optional = true } -salvo = { version = "0.34.3", optional = true } -serde_json = "1.0.83" +salvo = { version = "0.37.5", optional = true } +serde_json = { workspace = true } serenity = { version = "0.11.5", default-features = false, features = ["client", "gateway", "rustls_backend", "model"], optional = true } sync_wrapper = { version = "0.1.1", optional = true } -thiserror = "1.0.32" -thruster = { version = "1.2.6", optional = true } +thiserror = { workspace = true } +thruster = { version = "1.3.0", optional = true } tide = { version = "0.16.0", optional = true } -tokio = { version = "=1.20.1", features = ["rt", "rt-multi-thread", "sync"] } +tokio = { version = "=1.22.0", features = ["rt", "rt-multi-thread", "sync"] } tower = { version = "0.4.13", features = ["make"], optional = true } -tracing = "0.1.36" -tracing-subscriber = { version = "0.3.15", features = ["env-filter"] } -uuid = { version = "1.1.2", features = ["v4"] } -warp = { version = "0.3.2", optional = true } +tracing = { workspace = true } +tracing-subscriber = { workspace = true, features = ["env-filter"] } +uuid = { workspace = true, features = ["v4"] } +warp = { version = "0.3.3", optional = true } # Tide does not have tokio support. So make sure async-std is compatible with tokio # https://github.com/http-rs/tide/issues/791 @@ -46,26 +49,23 @@ optional = true features = ["tokio1"] [dependencies.shuttle-codegen] -version = "0.7.0" -path = "../codegen" +workspace = true optional = true +[dependencies.shuttle-common] +workspace = true + [dev-dependencies] portpicker = "0.1.1" -sqlx = { version = "0.6.1", features = ["runtime-tokio-native-tls", "postgres"] } -tokio = { version = "1.20.1", features = ["macros"] } -uuid = { version = "1.1.2", features = ["v4"] } - -[dependencies.shuttle-common] -version = "0.7.0" -default-features = false -path = "../common" +sqlx = { version = "0.6.2", features = ["runtime-tokio-native-tls", "postgres"] } +tokio = { version = "1.22.0", features = ["macros"] } +uuid = { workspace = true, features = ["v4"] } [features] -default = ["codegen"] codegen = ["shuttle-codegen"] loader = ["cargo", "libloading"] +web-actix-web = ["actix-web", "num_cpus"] web-axum = ["axum", "sync_wrapper"] web-rocket = ["rocket"] web-thruster = ["thruster"] diff --git a/service/src/lib.rs b/service/src/lib.rs index ce39ce9f0..aafe1f887 100644 --- a/service/src/lib.rs +++ b/service/src/lib.rs @@ -27,7 +27,7 @@ //! be a library crate with a `shuttle-service` dependency with the `web-rocket` feature on the `shuttle-service` dependency. //! //! ```toml -//! shuttle-service = { version = "0.7.0", features = ["web-rocket"] } +//! shuttle-service = { version = "0.8.0", features = ["web-rocket"] } //! ``` //! //! A boilerplate code for your rocket project can also be found in `src/lib.rs`: @@ -52,7 +52,7 @@ //! ``` //! //! See the [shuttle_service::main][main] macro for more information on supported services - such as `axum`. -//! Or look at more complete examples [in the repository](https://github.com/shuttle-hq/shuttle/tree/main/examples), but +//! Or look at [more complete examples](https://github.com/shuttle-hq/examples), but //! take note that the examples may update before official releases. //! //! ## Running locally @@ -108,8 +108,8 @@ //! Add `shuttle-shared-db` as a dependency with the `postgres` feature, and add `sqlx` as a dependency with the `runtime-tokio-native-tls` and `postgres` features inside `Cargo.toml`: //! //! ```toml -//! shuttle-shared-db = { version = "0.7.0", features = ["postgres"] } -//! sqlx = { version = "0.6.1", features = ["runtime-tokio-native-tls", "postgres"] } +//! shuttle-shared-db = { version = "0.8.0", features = ["postgres"] } +//! sqlx = { version = "0.6.2", features = ["runtime-tokio-native-tls", "postgres"] } //! ``` //! //! Now update the `#[shuttle_service::main]` function to take in a `PgPool`: @@ -213,11 +213,13 @@ use std::collections::BTreeMap; use std::future::Future; use std::net::SocketAddr; +use std::path::PathBuf; use std::pin::Pin; pub use async_trait::async_trait; // Pub uses by `codegen` +pub use anyhow::Context; pub use tokio::runtime::Runtime; pub use tracing; pub use tracing_subscriber; @@ -255,13 +257,15 @@ extern crate shuttle_codegen; /// /// | Return type | Feature flag | Service | Version | Example | /// | ------------------------------------- | ------------ | ------------------------------------------- | ---------- | ----------------------------------------------------------------------------------- | -/// | `ShuttleRocket` | web-rocket | [rocket](https://docs.rs/rocket/0.5.0-rc.2) | 0.5.0-rc.2 | [GitHub](https://github.com/shuttle-hq/shuttle/tree/main/examples/rocket/hello-world) | -/// | `ShuttleAxum` | web-axum | [axum](https://docs.rs/axum/0.5) | 0.5 | [GitHub](https://github.com/shuttle-hq/shuttle/tree/main/examples/axum/hello-world) | -/// | `ShuttleSalvo` | web-salvo | [salvo](https://docs.rs/salvo/0.34.3) | 0.34.3 | [GitHub](https://github.com/shuttle-hq/shuttle/tree/main/examples/salvo/hello-world) | -/// | `ShuttleTide` | web-tide | [tide](https://docs.rs/tide/0.16.0) | 0.16.0 | [GitHub](https://github.com/shuttle-hq/shuttle/tree/main/examples/tide/hello-world) | -/// | `ShuttlePoem` | web-poem | [poem](https://docs.rs/poem/1.3.35) | 1.3.35 | [GitHub](https://github.com/shuttle-hq/shuttle/tree/main/examples/poem/hello-world) | -/// | `Result` | web-tower | [tower](https://docs.rs/tower/0.4.12) | 0.14.12 | [GitHub](https://github.com/shuttle-hq/shuttle/tree/main/examples/tower/hello-world) | -/// | `ShuttleSerenity` | bot-serenity | [serenity](https://docs.rs/serenity/0.11.5) | 0.11.5 | [GitHub](https://github.com/shuttle-hq/shuttle/tree/main/examples/serenity/hello-world) | +/// | `ShuttleRocket` | web-rocket | [rocket](https://docs.rs/rocket/0.5.0-rc.2) | 0.5.0-rc.2 | [GitHub](https://github.com/shuttle-hq/examples/tree/main/rocket/hello-world) | +/// | `ShuttleAxum` | web-axum | [axum](https://docs.rs/axum/0.5) | 0.5 | [GitHub](https://github.com/shuttle-hq/examples/tree/main/axum/hello-world) | +/// | `ShuttleSalvo` | web-salvo | [salvo](https://docs.rs/salvo/0.34.3) | 0.34.3 | [GitHub](https://github.com/shuttle-hq/examples/tree/main/salvo/hello-world) | +/// | `ShuttleTide` | web-tide | [tide](https://docs.rs/tide/0.16.0) | 0.16.0 | [GitHub](https://github.com/shuttle-hq/examples/tree/main/tide/hello-world) | +/// | `ShuttlePoem` | web-poem | [poem](https://docs.rs/poem/1.3.35) | 1.3.35 | [GitHub](https://github.com/shuttle-hq/examples/tree/main/poem/hello-world) | +/// | `Result` | web-tower | [tower](https://docs.rs/tower/0.4.12) | 0.14.12 | [GitHub](https://github.com/shuttle-hq/examples/tree/main/tower/hello-world) | +/// | `ShuttleSerenity` | bot-serenity | [serenity](https://docs.rs/serenity/0.11.5) | 0.11.5 | [GitHub](https://github.com/shuttle-hq/examples/tree/main/serenity/hello-world) | +/// | `ShuttleActixWeb` | web-actix-web| [actix-web](https://docs.rs/actix-web/4.2.1)| 4.2.1 | [GitHub](https://github.com/shuttle-hq/examples/tree/main/actix-web/hello-world) | + /// /// # Getting shuttle managed resources /// Shuttle is able to manage resource dependencies for you. These resources are passed in as inputs to your `#[shuttle_service::main]` function and are configured using attributes: @@ -309,6 +313,12 @@ pub trait Factory: Send + Sync { /// Get the name for the service being deployed fn get_service_name(&self) -> ServiceName; + + /// Get the path where the build files are stored for this service + fn get_build_path(&self) -> Result; + + /// Get the path where files can be stored for this deployment + fn get_storage_path(&self) -> Result; } /// Used to get resources of type `T` from factories. @@ -543,6 +553,28 @@ impl Service for sync_wrapper::SyncWrapper { } } +#[cfg(feature = "web-actix-web")] +#[async_trait] +impl Service for F +where + F: FnOnce(&mut actix_web::web::ServiceConfig) + Sync + Send + Clone + 'static, +{ + async fn bind(mut self: Box, addr: SocketAddr) -> Result<(), Error> { + // Start a worker for each cpu, but no more than 4. + let worker_count = num_cpus::get().max(4); + + let srv = actix_web::HttpServer::new(move || actix_web::App::new().configure(self.clone())) + .workers(worker_count) + .bind(addr)? + .run(); + srv.await.map_err(error::CustomError::new)?; + + Ok(()) + } +} +#[cfg(feature = "web-actix-web")] +pub type ShuttleActixWeb = Result; + #[cfg(feature = "web-axum")] pub type ShuttleAxum = Result, Error>; @@ -550,7 +582,7 @@ pub type ShuttleAxum = Result, Error>; #[async_trait] impl Service for salvo::Router { async fn bind(mut self: Box, addr: SocketAddr) -> Result<(), error::Error> { - salvo::Server::new(salvo::listener::TcpListener::bind(&addr)) + salvo::Server::new(salvo::listener::TcpListener::bind(addr)) .serve(self) .await; diff --git a/service/src/loader.rs b/service/src/loader.rs index 109b714c7..b57bffc70 100644 --- a/service/src/loader.rs +++ b/service/src/loader.rs @@ -7,7 +7,7 @@ use std::path::{Path, PathBuf}; use anyhow::{anyhow, Context}; use cargo::core::compiler::{CompileKind, CompileMode, CompileTarget, MessageFormat}; use cargo::core::{Manifest, PackageId, Shell, Summary, Verbosity, Workspace}; -use cargo::ops::{compile, CompileOptions}; +use cargo::ops::{clean, compile, CleanOptions, CompileOptions}; use cargo::util::interning::InternedString; use cargo::util::{homedir, ToSemver}; use cargo::Config; @@ -50,7 +50,7 @@ impl Loader { /// function called `ENTRYPOINT_SYMBOL_NAME`, likely automatically generated /// using the [`shuttle_service::main`][crate::main] macro. pub fn from_so_file>(so_path: P) -> Result { - trace!("loading {:?}", so_path.as_ref().to_str()); + trace!(so_path = so_path.as_ref().to_str(), "loading .so path"); unsafe { let lib = Library::new(so_path).map_err(LoaderError::Load)?; @@ -117,34 +117,8 @@ pub async fn build_crate( let (read, write) = pipe::pipe(); let project_path = project_path.to_owned(); - let handle = tokio::spawn(async move { - trace!("started thread to build crate"); - let config = get_config(write)?; - let manifest_path = project_path.join("Cargo.toml"); - let mut ws = Workspace::new(&manifest_path, &config)?; - - let current = ws.current_mut().map_err(|_| anyhow!("A Shuttle project cannot have a virtual manifest file - please ensure your Cargo.toml file specifies it as a library."))?; - let manifest = current.manifest_mut(); - ensure_cdylib(manifest)?; - - let summary = current.manifest_mut().summary_mut(); - make_name_unique(summary, deployment_id); - check_version(summary)?; - check_no_panic(&ws)?; - - let opts = get_compile_options(&config, release_mode, wasm)?; - let compilation = compile(&ws, &opts); - - let path = compilation?.cdylibs[0].path.clone(); - Ok(if wasm { - Runtime::Next(path) - } else { - Runtime::Legacy(path) - }) - }); - // This needs to be on a separate thread, else deployer will block (reason currently unknown :D) - tokio::spawn(async move { + tokio::task::spawn_blocking(move || { trace!("started thread to to capture build output stream"); for message in Message::parse_stream(read) { trace!(?message, "parsed cargo message"); @@ -161,7 +135,73 @@ pub async fn build_crate( } }); - handle.await? + let config = get_config(write)?; + let manifest_path = project_path.join("Cargo.toml"); + let mut ws = Workspace::new(&manifest_path, &config)?; + + let current = ws.current_mut().map_err(|_| anyhow!("A Shuttle project cannot have a virtual manifest file - please ensure your Cargo.toml file specifies it as a library."))?; + let manifest = current.manifest_mut(); + ensure_cdylib(manifest)?; + + let summary = current.manifest_mut().summary_mut(); + make_name_unique(summary, deployment_id); + check_version(summary)?; + check_no_panic(&ws)?; + + let opts = get_compile_options(&config, release_mode, wasm)?; + let compilation = compile(&ws, &opts); + + let path = compilation?.cdylibs[0].path.clone(); + Ok(if wasm { + Runtime::Next(path) + } else { + Runtime::Legacy(path) + }) +} + +pub fn clean_crate(project_path: &Path, release_mode: bool) -> anyhow::Result> { + let (read, write) = pipe::pipe(); + let project_path = project_path.to_owned(); + + tokio::task::spawn_blocking(move || { + let config = get_config(write).unwrap(); + let manifest_path = project_path.join("Cargo.toml"); + let ws = Workspace::new(&manifest_path, &config).unwrap(); + + let requested_profile = if release_mode { + InternedString::new("release") + } else { + InternedString::new("dev") + }; + + let opts = CleanOptions { + config: &config, + spec: Vec::new(), + targets: Vec::new(), + requested_profile, + profile_specified: true, + doc: false, + }; + + clean(&ws, &opts).unwrap(); + }); + + let mut lines = Vec::new(); + + for message in Message::parse_stream(read) { + trace!(?message, "parsed cargo message"); + match message { + Ok(Message::TextLine(line)) => { + lines.push(line); + } + Ok(_) => {} + Err(error) => { + error!("failed to parse cargo message: {error}"); + } + } + } + + Ok(lines) } /// Get the default compile config with output redirected to writer @@ -199,6 +239,12 @@ fn get_compile_options( InternedString::new("dev") }; + // This sets the max workers for cargo build to 4 for release mode (aka deployment), + // but leaves it as default (num cpus) for local runs + if release_mode { + opts.build_config.jobs = 4 + }; + opts.build_config.requested_kinds = vec![if wasm { CompileKind::Target(CompileTarget::new("wasm32-unknown-unknown")?) } else { diff --git a/service/tests/integration/loader.rs b/service/tests/integration/loader.rs index 352b5edf4..ba42c3807 100644 --- a/service/tests/integration/loader.rs +++ b/service/tests/integration/loader.rs @@ -60,6 +60,14 @@ impl Factory for DummyFactory { async fn get_secrets(&mut self) -> Result, Error> { panic!("did not expect any loader test to get secrets") } + + fn get_build_path(&self) -> Result { + panic!("did not expect any loader test to get the build path") + } + + fn get_storage_path(&self) -> Result { + panic!("did not expect any loader test to get the storage path") + } } #[test] diff --git a/service/tests/resources/bind-panic/Cargo.toml b/service/tests/resources/bind-panic/Cargo.toml index eb3331848..f48c8442b 100644 --- a/service/tests/resources/bind-panic/Cargo.toml +++ b/service/tests/resources/bind-panic/Cargo.toml @@ -9,4 +9,4 @@ crate-type = ["cdylib"] [workspace] [dependencies] -shuttle-service = { path = "../../../" } +shuttle-service = { path = "../../../", features = ["codegen"] } diff --git a/service/tests/resources/build-panic/Cargo.toml b/service/tests/resources/build-panic/Cargo.toml index 93a0b14e8..8f8cb8167 100644 --- a/service/tests/resources/build-panic/Cargo.toml +++ b/service/tests/resources/build-panic/Cargo.toml @@ -9,4 +9,4 @@ crate-type = ["cdylib"] [workspace] [dependencies] -shuttle-service = { path = "../../../" } +shuttle-service = { path = "../../../", features = ["codegen"] } diff --git a/service/tests/resources/is-cdylib/Cargo.toml b/service/tests/resources/is-cdylib/Cargo.toml index 99083de80..903e50335 100644 --- a/service/tests/resources/is-cdylib/Cargo.toml +++ b/service/tests/resources/is-cdylib/Cargo.toml @@ -10,4 +10,4 @@ crate-type = ["cdylib", "staticlib"] [dependencies] rocket = "0.5.0-rc.2" -shuttle-service = { path = "../../../", features = ["web-rocket"] } +shuttle-service = { path = "../../../", features = ["codegen", "web-rocket"] } diff --git a/service/tests/resources/not-cdylib/Cargo.toml b/service/tests/resources/not-cdylib/Cargo.toml index 47a2afa53..09fc3c69c 100644 --- a/service/tests/resources/not-cdylib/Cargo.toml +++ b/service/tests/resources/not-cdylib/Cargo.toml @@ -10,4 +10,4 @@ crate-type = ["staticlib"] [dependencies] rocket = "0.5.0-rc.2" -shuttle-service = { path = "../../../", features = ["web-rocket"] } +shuttle-service = { path = "../../../", features = ["codegen", "web-rocket"] } diff --git a/service/tests/resources/not-shuttle/Cargo.toml b/service/tests/resources/not-shuttle/Cargo.toml index 99bfa8a3c..b7fe25901 100644 --- a/service/tests/resources/not-shuttle/Cargo.toml +++ b/service/tests/resources/not-shuttle/Cargo.toml @@ -9,4 +9,4 @@ crate-type = ["cdylib"] [workspace] [dependencies] -shuttle-service = "0.7.0" +shuttle-service = "0.8.0" diff --git a/service/tests/resources/sleep-async/Cargo.toml b/service/tests/resources/sleep-async/Cargo.toml index 84f976cce..9b4d8ca16 100644 --- a/service/tests/resources/sleep-async/Cargo.toml +++ b/service/tests/resources/sleep-async/Cargo.toml @@ -9,5 +9,5 @@ crate-type = ["cdylib"] [workspace] [dependencies] -tokio = { version = "1.0", features = ["time"]} -shuttle-service = { path = "../../../" } +tokio = { version = "1.22.0", features = ["time"] } +shuttle-service = { path = "../../../", features = ["codegen"] } diff --git a/service/tests/resources/sleep/Cargo.toml b/service/tests/resources/sleep/Cargo.toml index 40b0f6d1f..4bc2899dd 100644 --- a/service/tests/resources/sleep/Cargo.toml +++ b/service/tests/resources/sleep/Cargo.toml @@ -9,4 +9,4 @@ crate-type = ["cdylib"] [workspace] [dependencies] -shuttle-service = { path = "../../../" } +shuttle-service = { path = "../../../", features = ["codegen"] } diff --git a/service/tests/resources/sqlx-pool/Cargo.toml b/service/tests/resources/sqlx-pool/Cargo.toml index e3672cfe1..0f6163f37 100644 --- a/service/tests/resources/sqlx-pool/Cargo.toml +++ b/service/tests/resources/sqlx-pool/Cargo.toml @@ -9,6 +9,6 @@ crate-type = ["cdylib"] [workspace] [dependencies] -shuttle-service = { path = "../../../" } +shuttle-service = { path = "../../../", features = ["codegen"] } shuttle-shared-db = { path = "../../../../resources/shared-db", features = ["postgres"] } -sqlx = { version = "0.6", features = [ "runtime-tokio-native-tls" ] } +sqlx = { version = "0.6.2", features = [ "runtime-tokio-native-tls" ] } diff --git a/shell.nix b/shell.nix index 3dd8886d1..6cce93e2c 100644 --- a/shell.nix +++ b/shell.nix @@ -1,7 +1,7 @@ let moz_overlay = import (builtins.fetchTarball https://github.com/mozilla/nixpkgs-mozilla/archive/master.tar.gz); # Pin to stable from https://status.nixos.org/ - nixpkgs = import (fetchTarball "https://github.com/NixOS/nixpkgs/archive/3d47bbaa26e7a771059d828eecf3bd8bf28a8b0f.tar.gz") { overlays = [ moz_overlay ]; }; + nixpkgs = import (fetchTarball "https://github.com/NixOS/nixpkgs/archive/596a8e828c5dfa504f91918d0fa4152db3ab5502.tar.gz") { overlays = [ moz_overlay ]; }; in with nixpkgs; stdenv.mkDerivation { @@ -11,10 +11,10 @@ in pkg-config ]; buildInputs = with nixpkgs; [ - ((rustChannelOf{ channel = "1.63.0"; }).rust.override { + ((rustChannelOf{ channel = "1.65.0"; }).rust.override { targets = ["wasm32-wasi"]; + extensions = ["rust-src"]; }) - rust-analyzer cargo-watch terraform awscli2 @@ -27,6 +27,8 @@ in datadog-agent sccache sqlite + fastmod + pebble ]; PROTOC = "${protobuf}/bin/protoc"; diff --git a/terraform/.gitignore b/terraform/.gitignore deleted file mode 100644 index 3f5ca68ad..000000000 --- a/terraform/.gitignore +++ /dev/null @@ -1 +0,0 @@ -terraform.tfvars diff --git a/terraform/backend.tf b/terraform/backend.tf deleted file mode 100644 index ba4a24881..000000000 --- a/terraform/backend.tf +++ /dev/null @@ -1,40 +0,0 @@ -terraform { - backend "s3" { - bucket = "unveil-terraform-state" - key = "unveil.tfstate" - region = "eu-west-2" - } - - required_providers { - aws = { - source = "hashicorp/aws" - version = "~> 4.0" - } - cloudinit = { - source = "hashicorp/cloudinit" - version = "~> 2.0" - } - random = { - source = "hashicorp/random" - version = "~> 3.0" - } - } - - required_version = ">= 0.14.9" -} - -provider "aws" { - region = "eu-west-2" -} - -module "shuttle" { - source = "./modules/shuttle" - - api_fqdn = "api.shuttle.rs" - db_fqdn = "db.shuttle.rs" - proxy_fqdn = "shuttleapp.rs" - postgres_password = var.postgres_password - mongodb_password = var.mongodb_password - shuttle_admin_secret = var.shuttle_admin_secret - instance_type = "c6g.4xlarge" -} diff --git a/terraform/containers.tf b/terraform/containers.tf deleted file mode 100644 index de61d8ad9..000000000 --- a/terraform/containers.tf +++ /dev/null @@ -1,15 +0,0 @@ -provider "aws" { - alias = "us_east_1" - region = "us-east-1" -} - -resource "aws_ecrpublic_repository" "backend" { - provider = aws.us_east_1 - - repository_name = "backend" - - catalog_data { - architectures = ["x86-64"] - operating_systems = ["Linux"] - } -} diff --git a/terraform/modules/shuttle/README.md b/terraform/modules/shuttle/README.md deleted file mode 100644 index aff97c744..000000000 --- a/terraform/modules/shuttle/README.md +++ /dev/null @@ -1,67 +0,0 @@ -# AWS shuttle module -This module contains all the resources needed to deploy shuttle on AWS. The basic architecture is to create: -1. A single EC2 instance to run shuttle and PostgresDB -1. Two Route53 zones - one for the shuttle api; another to reach user services hosted by shuttle (called the proxy) -1. Three Load Balancers - one for the api, proxy, and PostgresDB respectively - -## Usage guide -The following terraform can be used as a starting point for using this module: - -```tf -module "shuttle" { - source = "github.com/shuttle-hq/shuttle/terraform/modules/shuttle" - - api_fqdn = "api.test.shuttle.rs" - db_fqdn = "db.test.shuttle.rs" - proxy_fqdn = "test.shuttleapp.rs" - postgres_password = "password" - shuttle_admin_secret = "12345" -} - -output "api_name_servers" { - value = module.shuttle.api_name_servers -} - -output "db_name_servers" { - value = module.shuttle.db_name_servers -} - -output "user_name_servers" { - value = module.shuttle.user_name_servers -} - -output "initial_user_key" { - value = module.shuttle.initial_user_key - description = "Key given to the initial shuttle user" -} -``` - -The shuttle api will be reachable at `api_fqdn` while hosted services will be subdomains of `proxy_fqdn`. The `postgres_password` sets the root password for Postgres and `shuttle_admin_secret` will be the secret needed to add more user keys to shuttle by an admin user. Shuttle does create the first user key though. This key is stored in the `initial_user_key` output variable. - -Just running `terraform apply` for the first time will fail since SSl certificates will be created for the api and proxy domains which will be verified. This verification will fail since it uses DNS that will be missing on first setup. So for first setups rather run the following: - -``` sh -terraform apply --target module.shuttle.aws_route53_zone.user --target module.shuttle.aws_route53_zone.api --target module.shuttle.aws_route53_zone.db -``` - -This command will create just the DNS zones needed for the api and proxy. Now use the `api_name_servers`, `db_name_servers` and `user_name_servers` outputs from this module to manually add NS records for `api_fqdn`, `db_fqdn` and `proxy_fqdn` in your DNS provider respectively. - -Once these records have propagated, a `terraform apply` command will succeed. - -## Allowing outside connections to the shared PostgresDB instance -Currently, the shared DB is started within a Docker container with a default configuration. This configuration does not allow external connections. Therefore you will need to manually whitelist external IP addresses which should be allowed to connect to the instance. - -First login to the VM instance, then edit the file at `/opt/shuttle/conf/postgres/pg_hba.conf` by adding a line similar to the following. - -``` -host all all 0.0.0.0/0 md5 -``` - -The `0.0.0.0/0` address can be replaced with a more target IP subset. - -Finally, restart the shuttle service using - -``` sh -sudo systemctl status shuttle-backend.service -``` - diff --git a/terraform/modules/shuttle/api.tf b/terraform/modules/shuttle/api.tf deleted file mode 100644 index 17713a4fe..000000000 --- a/terraform/modules/shuttle/api.tf +++ /dev/null @@ -1,68 +0,0 @@ -resource "aws_apigatewayv2_api" "backend" { - name = "shuttle-api-gateway" - protocol_type = "HTTP" - disable_execute_api_endpoint = true -} - -resource "aws_apigatewayv2_domain_name" "backend" { - domain_name = aws_acm_certificate.api.domain_name - - domain_name_configuration { - certificate_arn = aws_acm_certificate.api.arn - endpoint_type = "REGIONAL" - security_policy = "TLS_1_2" - } -} - -resource "aws_apigatewayv2_api_mapping" "backend" { - api_id = aws_apigatewayv2_api.backend.id - domain_name = aws_apigatewayv2_domain_name.backend.id - stage = aws_apigatewayv2_stage.alpha.id -} - -resource "aws_apigatewayv2_vpc_link" "private" { - name = "shuttle-api-gateway-vpc-link" - - security_group_ids = [aws_default_security_group.default.id] - subnet_ids = [aws_subnet.backend_a.id, aws_subnet.backend_b.id] -} - -resource "aws_apigatewayv2_integration" "backend" { - api_id = aws_apigatewayv2_api.backend.id - - integration_type = "HTTP_PROXY" - integration_uri = aws_lb_listener.api.arn - integration_method = "ANY" - - request_parameters = { - "overwrite:path" = "$request.path" - } - - connection_type = "VPC_LINK" - connection_id = aws_apigatewayv2_vpc_link.private.id -} - -resource "aws_apigatewayv2_stage" "alpha" { - api_id = aws_apigatewayv2_api.backend.id - - name = "valpha" - - auto_deploy = true - - access_log_settings { - destination_arn = aws_cloudwatch_log_group.api_gateway.arn - format = < { - name = dvo.resource_record_name - record = dvo.resource_record_value - type = dvo.resource_record_type - zone_id = aws_route53_zone.user.zone_id - } - } - - allow_overwrite = true - name = each.value.name - records = [each.value.record] - ttl = 60 - type = each.value.type - zone_id = each.value.zone_id -} - -resource "aws_route53_record" "user_alias" { - zone_id = aws_route53_zone.user.zone_id - name = "*.${var.proxy_fqdn}" - type = "A" - - alias { - name = aws_lb.user.dns_name - zone_id = aws_lb.user.zone_id - evaluate_target_health = true - } -} - -resource "aws_acm_certificate_validation" "user" { - certificate_arn = aws_acm_certificate.user.arn - validation_record_fqdns = [for record in aws_route53_record.user : record.fqdn] -} - -resource "aws_route53_zone" "api" { - name = var.api_fqdn -} - -resource "aws_acm_certificate" "api" { - domain_name = var.api_fqdn - - validation_method = "DNS" - - lifecycle { - create_before_destroy = true - } -} - -resource "aws_route53_record" "api" { - for_each = { - for dvo in aws_acm_certificate.api.domain_validation_options : dvo.domain_name => { - name = dvo.resource_record_name - record = dvo.resource_record_value - type = dvo.resource_record_type - zone_id = aws_route53_zone.api.zone_id - } - } - - allow_overwrite = true - name = each.value.name - records = [each.value.record] - ttl = 60 - type = each.value.type - zone_id = each.value.zone_id -} - -resource "aws_route53_record" "api_alias" { - zone_id = aws_route53_zone.api.zone_id - name = aws_apigatewayv2_domain_name.backend.domain_name - type = "A" - - alias { - name = aws_apigatewayv2_domain_name.backend.domain_name_configuration[0].target_domain_name - zone_id = aws_apigatewayv2_domain_name.backend.domain_name_configuration[0].hosted_zone_id - evaluate_target_health = true - } -} - -resource "aws_acm_certificate_validation" "api" { - certificate_arn = aws_acm_certificate.api.arn - validation_record_fqdns = [for record in aws_route53_record.api : record.fqdn] -} - -resource "aws_route53_zone" "db" { - name = var.db_fqdn -} - -resource "aws_acm_certificate" "db" { - domain_name = var.db_fqdn - - validation_method = "DNS" - - lifecycle { - create_before_destroy = true - } -} - -resource "aws_route53_record" "db" { - for_each = { - for dvo in aws_acm_certificate.db.domain_validation_options : dvo.domain_name => { - name = dvo.resource_record_name - record = dvo.resource_record_value - type = dvo.resource_record_type - zone_id = aws_route53_zone.db.zone_id - } - } - - allow_overwrite = true - name = each.value.name - records = [each.value.record] - ttl = 60 - type = each.value.type - zone_id = each.value.zone_id -} - -resource "aws_route53_record" "db_alias" { - zone_id = aws_route53_zone.db.zone_id - name = "" - type = "A" - - alias { - name = aws_lb.db.dns_name - zone_id = aws_lb.db.zone_id - evaluate_target_health = true - } -} - -resource "aws_acm_certificate_validation" "db" { - certificate_arn = aws_acm_certificate.db.arn - validation_record_fqdns = [for record in aws_route53_record.db : record.fqdn] -} diff --git a/terraform/modules/shuttle/load-balancing.tf b/terraform/modules/shuttle/load-balancing.tf deleted file mode 100644 index 87809ab94..000000000 --- a/terraform/modules/shuttle/load-balancing.tf +++ /dev/null @@ -1,197 +0,0 @@ -resource "aws_lb" "api" { - name = "shuttle" - - internal = true - - load_balancer_type = "application" - - security_groups = [aws_default_security_group.default.id] - subnets = [aws_subnet.backend_a.id, aws_subnet.backend_b.id] - - access_logs { - bucket = aws_s3_bucket.logs.bucket - prefix = "shuttle-lb" - enabled = true - } -} - -resource "aws_lb" "db" { - name = "db" - - internal = false - - load_balancer_type = "network" - - //security_groups = [aws_default_security_group.default.id] - subnets = [aws_subnet.backend_a.id, aws_subnet.backend_b.id] - - access_logs { - bucket = aws_s3_bucket.logs.bucket - prefix = "db-lb" - enabled = true - } -} - -resource "aws_lb_target_group" "api" { - name = "shuttle-lb-tg-http" - - health_check { - enabled = true - path = "/status" - port = var.api_container_port - } - - port = var.api_container_port - - protocol = "HTTP" - - vpc_id = aws_vpc.backend.id - - target_type = "instance" -} - -resource "aws_lb_listener" "api" { - load_balancer_arn = aws_lb.api.arn - - port = "80" - - protocol = "HTTP" - - default_action { - type = "forward" - target_group_arn = aws_lb_target_group.api.arn - } -} - -resource "aws_lb_listener" "postgres" { - load_balancer_arn = aws_lb.db.arn - - port = "5432" - - protocol = "TCP" - - default_action { - type = "forward" - target_group_arn = aws_lb_target_group.postgres.arn - } -} - -resource "aws_lb_listener" "mongodb" { - load_balancer_arn = aws_lb.db.arn - - port = "27017" - - protocol = "TCP" - - default_action { - type = "forward" - target_group_arn = aws_lb_target_group.mongodb.arn - } -} - -resource "aws_lb" "user" { - name = "shuttleapp" - - internal = false - - load_balancer_type = "application" - - security_groups = [aws_default_security_group.default.id] - subnets = [aws_subnet.backend_a.id, aws_subnet.backend_b.id] - - access_logs { - bucket = aws_s3_bucket.logs.bucket - prefix = "shuttle-user-lb" - enabled = true - } -} - -resource "aws_lb_listener" "user" { - load_balancer_arn = aws_lb.user.arn - - port = "80" - - protocol = "HTTP" - - default_action { - type = "redirect" - - redirect { - status_code = "HTTP_301" - port = "443" - protocol = "HTTPS" - } - } -} - -resource "aws_lb_listener" "user_tls" { - load_balancer_arn = aws_lb.user.arn - - port = "443" - - protocol = "HTTPS" - - ssl_policy = "ELBSecurityPolicy-2016-08" - certificate_arn = aws_acm_certificate.user.arn - - default_action { - type = "forward" - target_group_arn = aws_lb_target_group.user.arn - } -} - -resource "aws_lb_target_group" "user" { - name = "shuttle-user-lb-tg-http" - - health_check { - enabled = true - path = "/status" - port = var.api_container_port - } - - port = var.proxy_container_port - - protocol = "HTTP" - - vpc_id = aws_vpc.backend.id - - target_type = "instance" -} - -resource "aws_lb_target_group" "postgres" { - name = "shuttle-postgres-lb-tg-tcp" - - // TODO: change me - health_check { - enabled = true - path = "/status" - port = var.api_container_port - } - - port = var.postgres_container_port - - protocol = "TCP" - - vpc_id = aws_vpc.backend.id - - target_type = "instance" -} - -resource "aws_lb_target_group" "mongodb" { - name = "shuttle-mongodb-lb-tg-tcp" - - // TODO: change me - health_check { - enabled = true - path = "/status" - port = var.api_container_port - } - - port = var.mongodb_container_port - - protocol = "TCP" - - vpc_id = aws_vpc.backend.id - - target_type = "instance" -} diff --git a/terraform/modules/shuttle/locals.tf b/terraform/modules/shuttle/locals.tf deleted file mode 100644 index 2240a88e5..000000000 --- a/terraform/modules/shuttle/locals.tf +++ /dev/null @@ -1,8 +0,0 @@ -data "aws_caller_identity" "current" {} - -locals { - account_id = data.aws_caller_identity.current.account_id - data_dir = "/opt/shuttle" - docker_backend_image = "public.ecr.aws/shuttle/api" - docker_provisioner_image = "public.ecr.aws/shuttle/provisioner" -} diff --git a/terraform/modules/shuttle/main.tf b/terraform/modules/shuttle/main.tf deleted file mode 100644 index f80aab2be..000000000 --- a/terraform/modules/shuttle/main.tf +++ /dev/null @@ -1,16 +0,0 @@ -terraform { - required_providers { - aws = { - source = "hashicorp/aws" - version = ">= 4.0" - } - cloudinit = { - source = "hashicorp/cloudinit" - version = ">= 2.0" - } - random = { - source = "hashicorp/random" - version = ">= 3.0" - } - } -} diff --git a/terraform/modules/shuttle/misc/cloud-config.yaml b/terraform/modules/shuttle/misc/cloud-config.yaml deleted file mode 100644 index c0d8b5d29..000000000 --- a/terraform/modules/shuttle/misc/cloud-config.yaml +++ /dev/null @@ -1,85 +0,0 @@ -#cloud-config - -# Install needed packages -packages: - - docker.io - - nfs-common - -# Create the docker group -groups: - - docker - -# Add default auto-created user to docker group -system_info: - default_user: - groups: [docker] - -# Make sure files are written every time -cloud_final_modules: - - package-update-upgrade-install - - [runcmd, always] - - [write-files, always] - - fan - - landscape - - lxd - - ubuntu-drivers - - write-files-deferred - - puppet - - chef - - mcollective - - salt-minion - - reset_rmc - - refresh_rmc_and_interface - - rightscale_userdata - - scripts-vendor - - scripts-per-once - - scripts-per-boot - - scripts-per-instance - - [scripts-user, always] - - ssh-authkey-fingerprints - - keys-to-console - - install-hotplug - - phone-home - - final-message - - power-state-change - -# Create our systemd files -write_files: - - encoding: b64 - content: ${opt_shuttle_content} - path: /lib/systemd/system/opt-shuttle.mount - owner: root:root - permissions: "0644" - - encoding: b64 - content: ${shuttle_backend_content} - path: /lib/systemd/system/shuttle-backend.service - owner: root:root - permissions: "0644" - - encoding: b64 - content: ${shuttle_provisioner_content} - path: /lib/systemd/system/shuttle-provisioner.service - owner: root:root - permissions: "0644" - - encoding: b64 - content: ${shuttle_pg_content} - path: /lib/systemd/system/shuttle-pg.service - owner: root:root - permissions: "0644" - - encoding: b64 - content: ${shuttle_mongodb_content} - path: /lib/systemd/system/shuttle-mongodb.service - owner: root:root - permissions: "0644" - -power_state: - mode: reboot - -# Up services on every boot -runcmd: - - docker network inspect shuttle-net || docker network create --driver bridge shuttle-net - - [systemctl, daemon-reload] - - [systemctl, enable, "opt-shuttle.mount"] - - [systemctl, enable, "shuttle-pg.service"] - - [systemctl, enable, "shuttle-mongodb.service"] - - [systemctl, enable, "shuttle-provisioner.service"] - - [systemctl, enable, "shuttle-backend.service"] diff --git a/terraform/modules/shuttle/networking.tf b/terraform/modules/shuttle/networking.tf deleted file mode 100644 index b03b1bc4a..000000000 --- a/terraform/modules/shuttle/networking.tf +++ /dev/null @@ -1,72 +0,0 @@ -resource "aws_vpc" "backend" { - cidr_block = "10.0.0.0/16" - - enable_dns_hostnames = true -} - -resource "aws_internet_gateway" "public" { - vpc_id = aws_vpc.backend.id -} - -resource "aws_network_acl_rule" "postgres" { - network_acl_id = aws_vpc.backend.default_network_acl_id - rule_number = 10 - egress = false - protocol = "tcp" - rule_action = "allow" - cidr_block = "0.0.0.0/0" - from_port = 5432 - to_port = 5432 -} - -resource "aws_network_acl_rule" "mysql" { - network_acl_id = aws_vpc.backend.default_network_acl_id - rule_number = 11 - egress = false - protocol = "tcp" - rule_action = "allow" - cidr_block = "0.0.0.0/0" - from_port = 3306 - to_port = 3306 -} - -resource "aws_default_security_group" "default" { - vpc_id = aws_vpc.backend.id - - ingress { - from_port = 0 - to_port = 0 - protocol = "-1" - cidr_blocks = ["0.0.0.0/0"] - } - - egress { - from_port = 0 - to_port = 0 - protocol = "-1" - cidr_blocks = ["0.0.0.0/0"] - } -} - -resource "aws_default_route_table" "backend" { - default_route_table_id = aws_vpc.backend.default_route_table_id - - route { - cidr_block = "0.0.0.0/0" - gateway_id = aws_internet_gateway.public.id - } -} - -resource "aws_subnet" "backend_a" { - vpc_id = aws_vpc.backend.id - - availability_zone = "eu-west-2a" - cidr_block = "10.0.10.0/24" -} - -resource "aws_subnet" "backend_b" { - vpc_id = aws_vpc.backend.id - - availability_zone = "eu-west-2b" - cidr_block = "10.0.20.0/24" -} diff --git a/terraform/modules/shuttle/output.tf b/terraform/modules/shuttle/output.tf deleted file mode 100644 index 5fd6db4eb..000000000 --- a/terraform/modules/shuttle/output.tf +++ /dev/null @@ -1,29 +0,0 @@ -output "api_url" { - value = aws_apigatewayv2_domain_name.backend.id - description = "URL to connect to the api" -} - -output "api_name_servers" { - value = aws_route53_zone.api.name_servers - description = "Name servers (NS) for api zone" -} - -output "db_name_servers" { - value = aws_route53_zone.db.name_servers - description = "Name servers (NS) for pg zone" -} - -output "user_name_servers" { - value = aws_route53_zone.user.name_servers - description = "Name servers (NS) for proxy zone" -} - -output "api_content_host" { - value = aws_lb.api.dns_name - description = "URL for api load balancer" -} - -output "user_content_host" { - value = aws_lb.user.dns_name - description = "URL for user proxy load balancer" -} diff --git a/terraform/modules/shuttle/rds.tf b/terraform/modules/shuttle/rds.tf deleted file mode 100644 index 7c5ffd1e3..000000000 --- a/terraform/modules/shuttle/rds.tf +++ /dev/null @@ -1,5 +0,0 @@ -resource "aws_db_subnet_group" "managed" { - name = "shuttle_rds" - description = "Subnet for RDS instances managed by shuttle" - subnet_ids = [aws_subnet.backend_a.id, aws_subnet.backend_b.id] -} diff --git a/terraform/modules/shuttle/service.tf b/terraform/modules/shuttle/service.tf deleted file mode 100644 index a8d88db67..000000000 --- a/terraform/modules/shuttle/service.tf +++ /dev/null @@ -1,170 +0,0 @@ -resource "aws_network_interface" "backend" { - subnet_id = aws_subnet.backend_b.id -} - -resource "aws_eip" "backend" { - vpc = true - network_interface = aws_network_interface.backend.id -} - -resource "aws_iam_instance_profile" "backend" { - name = "backend-profile" - role = aws_iam_role.backend.name -} - -resource "aws_iam_role" "backend" { - name = "BackendAPIRole" - path = "/" - description = "Allows EC2 instances to call AWS services on your behalf." - - assume_role_policy = < &'static str { - "Hello, world!" -} - -#[shuttle_service::main] -async fn init() -> Result, shuttle_service::Error> { - let rocket = rocket::build().mount("/", routes![hello]); - - Ok(rocket) -} -``` - -My IDE violently lights up with red syntax highlighting as I realise I haven't imported anything. The realities of software engineering hit me as I eye the bottle of whiskey next to me. 18 year old scotch. It turns out I'm grossly overpaid for the value I offer society. I grab a coffee mug and pour myself a small shot - liquid courage. - -Next I import all of the dependencies to get shuttle to work with Rocket - pretty simple. I open up `Cargo.toml` add a couple of lines: - -```toml -[package] -name = "url-shortener" -version = "0.1.0" -edition = "2021" - -[lib] -crate-type = ["cdylib"] - -[dependencies] -rocket = { version = "0.5.0-rc.1", features = ["json"] } -shuttle-service = { version = "0.2", features = ["sqlx-postgres", "web-rocket"] } -``` - -My IDE quietens down as dependencies are resolved and a wave of relief washes over me. Let's deploy this thing. - -```bash -$ cargo shuttle deploy - Packaging url-shortener v0.1.0 (/private/shuttle/examples/url-shortener) - Archiving Cargo.toml - Archiving Cargo.toml.orig - Archiving src/lib.rs - Compiling tracing-attributes v0.1.20 - Compiling tokio-util v0.6.9 - Compiling multer v2.0.2 - Compiling hyper v0.14.18 - Compiling rocket_http v0.5.0-rc.1 - Compiling rocket_codegen v0.5.0-rc.1 - Compiling rocket v0.5.0-rc.1 - Compiling shuttle-service v0.2.5 - Compiling url-shortener v0.1.0 (/opt/shuttle/crates/url-shortener) - Finished dev [unoptimized + debuginfo] target(s) in 1m 01s - - Project: url-shortener - Deployment Id: 3d08ac34-ad63-41c1-836b-99afdc90af9f - Deployment Status: DEPLOYED - Host: url-shortener.shuttleapp.rs - Created At: 2022-04-13 03:07:34.412602556 UTC -``` - -Ok... this seemed a little too easy, let's see if it works. - -``` -$ curl -X https://url-shortener.shuttleapp.rs/hello -Hello, world! -``` - -Hm, not bad. I pour myself another shot... - -## Adding Postgres - 07:03 minutes remaining - -This is the part of my journey where I usually get a little flustered. I've set up databases before but it's always a pain. You need to provision a VM, make sure storage isn't ephemeral, install and spin up the database, create an account with the correct privileges and secure password, store the password in some sort of secrets manager in CI, add your IP address and your VM's IP address to the list of acceptable hosts etc etc etc. Oof that sounds like a lot of work. - -`shuttle` does a lot of this stuff for you - I just didn't remember how. I quickly head over to the [shuttle / sqlx](https://docs.rs/shuttle-service/0.2.5/shuttle_service/#using-sqlx) section in the docs. I added the `sqlx` dependency to `Cargo.toml` and change _one line_ in `lib.rs`: - -```rust -#[shuttle_service::main] -async fn rocket(pool: PgPool) -> Result, shuttle_service::Error> { -``` - -By adding a parameter to the main `rocket` function, `shuttle` will automatically provision a Postgres database for you, create an account and hand you back an authenticated connection pool which is usable from your application code. - -Let's deploy it and see what happens: - -```bash -$ cargo shuttle deploy -... - Finished dev [unoptimized + debuginfo] target(s) in 19.50s - - Project: url-shortener - Deployment Id: 538e41cf-44a9-4158-94f1-3760b42619a3 - Deployment Status: DEPLOYED - Host: url-shortener.shuttleapp.rs - Created At: 2022-04-13 03:08:30.412602556 UTC - Database URI: postgres://***:***@pg.shuttle.rs/db-url-shortener -``` - -I have a database! I couldn't help but chuckle a little bit. So far so good. - -## Setting up the Schema - 06:30 minutes remaining - -The database provisioned by `shuttle` is completely empty - I'm going to need to either connect to Postgres and create the schema myself, or write some sort of code to automatically perform the migration. As I start to ponder this seemingly existential question I decide not to overthink it. I'm just going to go with whatever is easiest. - -I connect to the database provisioned by shuttle using [pgAdmin](https://www.pgadmin.org/) using the provided database URI and run the following script: - -```sql -CREATE TABLE urls ( - id VARCHAR(6) PRIMARY KEY, - url VARCHAR NOT NULL -); -``` - -As I was ready to Google 'how to create index postgres' I realised that since the `id` used for the url lookup is a primary key, which is implicitly a 'unique' constraint, Postgres would create the index for me. Cool. - -## Writing the Endpoints - 05:17 remaining - -The app's going to need two endpoints - one to `shorten` URLs and one to retrieve URLs and `redirect` the user. - -I quickly created two stubs for the endpoints while I thought about the actual implementation: - -```rust -#[get("/")] -async fn redirect(id: String, pool: &State) -> Result { - unimplemented!() -} - -#[post("/", data = "")] -async fn shorten(url: String, pool: &State) -> Result { - unimplemented!() -} -``` - -I decided to start with the shorten method. The simplest implementation I could think of is to generate a unique id on the fly using the [`nanoid`](https://crates.io/crates/nanoid) crate and then running an `INSERT` statement. Hm - what about duplicates? I decided not to overthink it -🤷. - -```rust -#[post("/", data = "")] -async fn shorten(url: String, pool: &State) -> Result { - let id = &nanoid::nanoid!(6); - let p_url = Url::parse(&url).map_err(|_| Status::UnprocessableEntity)?; - sqlx::query("INSERT INTO urls(id, url) VALUES ($1, $2)") - .bind(id) - .bind(p_url.as_str()) - .execute(&**pool) - .await - .map_err(|_| Status::InternalServerError)?; - Ok(format!("https://url-shortener.shuttleapp.rs/{id}")) -} -``` - -Next I implemented the `redirect` method in a similar spirit. At this point I started to panic as it was really getting close to the 10 minute mark. I'll do a `SELECT *` and pull the first url that matches with the query id. If the id does not exist, you get back a `404`: - -```rust -#[get("/")] -async fn redirect(id: String, pool: &State) -> Result { - let url: (String,) = sqlx::query_as("SELECT url FROM urls WHERE id = $1") - .bind(id) - .fetch_one(&**pool) - .await - .map_err(|e| match e { - Error::RowNotFound => Status::NotFound, - _ => Status::InternalServerError - })?; - Ok(Redirect::to(url.0)) -} -``` - -Whoops there's a typo in the SQL query. - -After I fixed my typo and sorted out the various unresolved dependencies by letting my IDE do the heavy lifting for me, I deployed to shuttle for the last time. - -## Moment of truth - 00:25 minutes remaining - -Feeling like an off-brand Tom Cruise in mission impossible I stared intently at the clock counting down as shuttle deployed my url-shortener. 19.3 seconds and we're live. As soon as the `DEPLOYED` dialog came up, I instantly tested it out: - -```bash -$ curl -X POST -d "https://google.com" https://url-shortener.shuttleapp.rs -https://s.shuttleapp.rs/XDlrTB⏎ -``` - -I then copy/pasted the shortened URL to my browser and, lo an behold, was redirected to Google. - -I did it. - -## Retrospective - 00:00 minutes remaining - -With a sigh of relief I pushed myself back from my desk. I refilled my mug, picked it up and headed to my derelict balcony. As I slid open the the windows and the cold air flowed into my apartment, I took two steps forward to rest my elbows and mug on the railing. - -I sat there for a while reflecting on what had just happened. I _had_ succeeded. I'd successfully built a somewhat trivial app quickly without needing to worry about provisioning databases or networking or any of that jazz. - -But how would this measure up in the real world? Real software engineering is complex, involving collaboration across different teams with different skill-sets. The entire world of software is barely keeping it together. Is it really feasible to replace our existing, tried and tested cloud paradigms with a new paradigm of not having to deal with infrastructure at all? What I knew for sure is I wasn't going to get to the bottom of this one tonight. - -As I went back to my bedroom and laid once more in bed, I noticed I was grinning. There's a chance we really can do better. Maybe we're not exactly there yet, but my experience tonight had given me a certain optimism that we aren't as far as I once thought. With the promise of a brighter tomorrow, I turned on my side and fell asleep. diff --git a/www/_blog/2022-04-22-dev-log-0.mdx b/www/_blog/2022-04-22-dev-log-0.mdx deleted file mode 100644 index ee72e944c..000000000 --- a/www/_blog/2022-04-22-dev-log-0.mdx +++ /dev/null @@ -1,78 +0,0 @@ ---- -title: "DevLog[0]: Building a serverless platform for Rust in 4 weeks" -description: DevLog[0] is the first in a series of posts about how we built the shuttle MVP -author: christoshadjiaslanis -tags: [rust, startup, devlog] -thumb: shuttle-logo-square-rocket.png -cover: shuttle-logo-rectangle.png -coverAspectRatio: 2624/832 -date: "2022-04-22T15:00:00" ---- - -Put yourself in this situation. Your startup company has come across a pretty obvious gap in the market. It's ambitious, maybe even a little crazy. You're going to toe-to-toe with AWS, Heroku, Google etc. You have 4 weeks to prove the concept. Go. - -In January we spent countless hours interviewing software engineers. A striking pattern emerged - no one liked dealing with the cloud. It is of course, much better than having physical servers in your basement or driving 45 minutes to your local datacenter to patch a service. But since AWS came along c. 2007, there was a longing for things to be done better. Most engineers (myself included) don't want to deal with infrastructure, we just want to write code that scales and focus on product. Infrastructure is a pre-requisite but not sufficient to build a great product. The folks at Heroku had this insight and essentially developed PaaS along with the beginnings of containerisation tech. Then Hashicorp built declarative abstractions to make the business of managing your infrastructure less of a headache. Then the serverless movement promised to be the final chapter of this saga; devs could wrap their business logic in neat functions which would scale for you. Yet here we are again, in the winter of our discontent. - -At shuttle we think there is a better paradigm for building applications. We call it Infrastructure From Code (IFC). - -IFC uses application code as the source of truth for provisioning infrastructure. No longer are your applications and servers decoupled, the two go hand in hand. Our plan was to achieve this by doing static analysis of user code and generating the corresponding infrastructure in real time. A bit like this: - -```rust -#[get("/hello")] -fn index() -> &'static str { - "Hello, world!" -} - -#[shuttle_service::main] -async fn rocket( - pool: PgPool, // This will spin up a Postgres database, create an account and hand you an authenticated connection pool - redis: redis::Client // This will spin up a Redis instance and hand you back a client -) -> Result<...> { - // Application Code -} - -``` - -This isn't going to be everyone's cup of tea and this paradigm is probably not sufficient for every use-case. However we believe there exists a large class of products and teams which will benefit substantially from IFC. - -We have 4 weeks to prove the concept, let's get started. - -## Developer Experience - -Our primary focus with the MVP was to provide the best possible developer experience. We wanted the end user experience to be as simple as possible. - -1. A single annotation can transform your web app into a shuttle app: `#[shuttle_service::main]` -2. You can get started with a single cargo command: `$ cargo shuttle deploy` -3. Your shuttle app is automatically provisioned a subdomain `my-app.shuttleapp.rs` - -## Engineering Design - -Our primary focus was simplicity - we didn't want anything too complicated to start with. For example, we decided to ditch Kubernetes for our API and deployment servers. We simply didn't need that scale until we proved the concept and the complexity overhead would have been detrimental to development velocity. - -The deployment process is the core piece of engineering we spent the most time on. It looks something like this: - -1. `$ cargo shuttle deploy` runs a `cargo package` under the hood, zipping up the current cargo project into a tarball and shipping it to our API under the `/deploy` endpoint with a bearer token for authentication -2. The API receives the tarball and holds it in memory. The build is added to a job processor which acts as a build queue. -3. The job processor unpacks the tarball and writes it to disk, say under `/projects/my-app`. The build system is triggered to compile the unpacked cargo project -4. The output of the build process is a shared object file ('.so') which is then dynamically loaded by the API with its own runtime. The newly born web-server is assigned a free port which is not exposed to the outside world. -5. We update the routing table of our reverse proxy such that requests coming in with the host `my-app.shuttleapp.rs` are forwarded to the aforementioned port. - -And that's it! It turns out there are more than a few devils in the details here - but that was our plan in all it's glory. - -## $ cargo shuttle deploy - -The cargo subcommand [`cargo shuttle`](https://github.com/getsynth/shuttle/tree/main/cargo-shuttle) seemed like the obvious place to start. - -To create a third-party cargo subcommand, the binary needs to be named `cargo-${command}` and it needs to be stored in `~./.cargo/bin`. The easiest way to do this is to create a binary called `cargo-shuttle` and publish it to `crates.io`. Then, `cargo install cargo-shuttle` will place it in `~./.cargo/bin`. Pretty simple. - -`cargo-shuttle` also needs an HTTP client to make requests against the API, as well as some config logic to hold API keys. Finally, `cargo-shuttle` needs to use the `cargo` crate to programmatically run cargo commands like `cargo package`. - -We had underestimated how easy this would be - it turns out even though the `cargo` binary has world-class documentation, the same is not true for the crate. - -After a day of grappling and digging into the `cargo` source code, `cargo-shuttle` was happily packaging up cargo projects and serializing them nicely into the body of a POST request. - -We had built the bare bones of our client, next up was build system. - -## Next Steps - -In the next devlog we'll be exploring how we hacked together a build system to compile `cdylib`s for them to be dynamically linked to the API runtime. In the meantime, if you want to try out shuttle head over to the [getting started](https://docs.rs/shuttle-service/0.2.6/shuttle_service/) section! It's completely free while shuttle is still in Alpha. diff --git a/www/_blog/2022-04-27-dev-log-1.mdx b/www/_blog/2022-04-27-dev-log-1.mdx deleted file mode 100644 index b294fdce5..000000000 --- a/www/_blog/2022-04-27-dev-log-1.mdx +++ /dev/null @@ -1,162 +0,0 @@ ---- -title: "DevLog[1]: Building a serverless platform for Rust in 4 weeks - part deux" -description: Designing and building a deployment system as a state machine -author: christoshadjiaslanis -tags: [rust, startup, devlog] -thumb: shuttle-logo-square-rocket.png -cover: shuttle-logo-rectangle.png -coverAspectRatio: 2624/832 -date: "2022-04-27T15:00:00" ---- - -`shuttle` is a serverless platform built for Rust. The goal of shuttle is to create the best possible developer experience for deploying Rust apps. Also, shuttle introduces a new paradigm for developing on the cloud called Infrastructure From Code (IFC). - -IFC uses application code as the source of truth for provisioning infrastructure. No longer are your applications and servers decoupled, the two go hand in hand. shuttle does this by doing static analysis of user code and generating the corresponding infrastructure in real time. A bit like this: - -

- -

- -In the [previous DevLog](https://www.shuttle.rs/blog/2022/04/22/dev-log-0) we started the journey of building the shuttle MVP. We went over the design and implementation of the `cargo` subcommand which deploys cargo projects to shuttle. This has been a race against the clock, so corners were cut and tradeoffs were made. A similar theme emerges in this DevLog which covers the **deployment state machine**. We're going to think about compiling and deploying user code, while also covering one of my favourite design patterns in Rust. - -## Deployment State - -shuttle exposes an HTTP endpoint under `POST /deploy`. This endpoint receives a series of bytes, from [`cargo shuttle`](https://crates.io/crates/cargo-shuttle), which correspond to a packaged cargo project (basically a compressed tarball with a bunch of `.rs` files). - -The aim of the game, is to convert that series of bytes into a deployed web service - how do we go about doing that? - -The deployment process is broken into 4 stages: - -1. `Queued` - the cargo project is received and waiting to be compiled -2. `Built` - the cargo project is compiled successfully -3. `Loaded` - the output of the compilation is loaded as a dynamically-linked library -4. `Deployed` - the app inside the DLL is running and listening for connections - -Then life happens so you need a couple more states: - -5. `Error` - there was an issue anywhere in the build process -6. `Deleted` - user-initiated deletion of the deploymentThis endpoint - -Which corresponds to: - -![State Machine](/images/blog/state-machine.jpeg) - -All this can be expressed nicely in an enum since all these states are mutually exclusive: - -```rust -enum DeploymentState { - Queued, - Built, - Loaded, - Deployed, - Error, - Deleted -} -``` - -Even though we have a nice representation of our states - these states don't actually hold any data yet and the state transitions are not defined. We would like the `DeploymentState` to own all the data that corresponds to the specific stage in it's deployment. We'll create some structs to hold the data required for each stage. - -First, the `QueuedState` just has a vector of bytes from the packaged cargo project that was received from `cargo-shuttle`: - -```rust -struct QueuedState { - crate_bytes: Vec, -} -``` - -When a deployment is queued, the shuttle build system writes the `crate_bytes` (just a tarball of a cargo project) to the file system. It then extracts the tarball and starts the compilation process by running `cargo::ops::compile`. - -The output of the build process is an `.so` file which is held in the next stage - the `BuildState`: - -```rust -struct BuiltState { - so_path: PathBuf, -} -``` - -So far so good. At this point we have a pointer to a compiled shared object file - next we need to load it into memory. - -`shuttle` uses the [`libloading`](https://crates.io/crates/libloading) crate to dynamically load from a `.so` file a value of a type implementing the [`Service`](https://docs.rs/shuttle-service/0.2.6/shuttle_service/trait.Service.html) trait. The `Service` trait is code-generated for the user via the `#[shuttle_service::main]` annotation and it's how shuttle interfaces with client apps. - -```rust -pub struct LoadedState { - service: Box, - so: Library, -} -``` - -We keep the `Library` struct around since `Box` is just a pointer to data loaded and managed by `Library`. Library going out of scope deallocates that data; meaning service will be pointing to deallocated memory hence we get a `segfault`. So it's important to keep `Library` around for the lifetime of the deployment. - -Finally we find a free port, spin up a new tokio runtime (we keep the handle so that we can kill it in the future) and bind the service to the port. We'll be covering this stuff in depth on a future DevLog but if you're insatiably curious you can check out the [source](https://github.com/shuttle-hq/shuttle/tree/main/service). - -All of this is put into the `DeployedState` and we're done! - -```rust -struct DeployedState { - so: Library, // remember if we drop this, weird undefined behaviour - port: Port, - handle: ServeHandle, -} -``` - -To tie it all together, we modify our initial `DeploymentState` own the various states corresponding to the stages of the deployment process: - -```rust -enum DeploymentState { - Queued(QueuedState), - Built(BuiltState), - Loaded(LoadedState), - Deployed(DeployedState), - Error(anyhow::Error), - Deleted // doesn't have any state -} -``` - -We also wrote a really light `impl` to define the state transitions: - -```rust -impl DeploymentState { - fn queued(crate_bytes: Vec) -> Self { - Self::Queued(QueuedState { crate_bytes }) - } - - fn built(build: Build) -> Self { - Self::Built(BuiltState { build }) - } - - fn loaded(loader: Loader) -> Self { - Self::Loaded(loader) - } - - fn deployed( - so: Library, - port: Port, - handle: ServeHandle - ) -> Self { - Self::Deployed(DeployedState { - so, - port, - handle, - }) - } -} -``` - -You'll also notice that there is no mutation happening here. We found it cleaner to simply drop the old state and construct a new one (although we did try). - -## Conclusion - -In the case of shuttle, using enum variants and structs to represent states in a state machine seemed like the natural thing to do. The states were distinct and clear, and for the most part the transitions are clean and self-contained. - -So what do you think about enum variants as states in a state machine? What would you have done differently? - -## Next Steps - -In the next DevLog we'll be looking at the implementation of our reverse proxy and routing table - how we keep a ledger of deployed services and route network calls appropriately. - -In the meantime, if you want to try out shuttle head over to the [getting started](https://docs.rs/shuttle-service/0.2.6/shuttle_service/) section! It's completely free while shuttle is still in Alpha. diff --git a/www/_blog/2022-05-09-ifc.mdx b/www/_blog/2022-05-09-ifc.mdx deleted file mode 100644 index 1b00db9ec..000000000 --- a/www/_blog/2022-05-09-ifc.mdx +++ /dev/null @@ -1,60 +0,0 @@ ---- -title: Infrastructure From Code -description: A new paradigm for building on the cloud -author: nodar -tags: [infra-from-code, rust, startup] -thumb: infrastructure-from-code-trans.png -cover: infrastructure-from-code-trans.png -date: "2022-05-09T15:00:00" ---- - -In the early days of Facebook (back when it was still called `thefacebook.com`), Mark Zuckerberg hosted it on Harvard’s university servers. Back then companies used to buy or rent physical servers to run their software on. The advent of the cloud in the mid-2000s changed the game. The elasticity that this enabled has in big part enabled the rapid progress that we’ve all enjoyed since then. What we demand from software has increased tremendously, and correspondingly its architecture has become much more elaborate. The power of flexibility came at a price though - the complexity of wiring code with infrastructure. That price is even higher today. - -### The Container Hero - -Heroku became part of the cloud-native lore as the first incredibly successful attempt at tackling this complexity. They led the first crusade to rid software developers of the infrastructure complexity dragon. People loved it. Heroku pioneered the wildly popular container-based approach to deployment that abstracted away the burden of managing virtual machines. By being opinionated with the use of containers, Heroku was able to appeal to a broad set of customers looking to quickly build apps. Containers are mutually isolated processes, wired together by third-party configuration which does not belong in the application’s code base - this design choice results in a lack of elasticity and granular control of your system. This results in a conservative outlook of dealing with infrastructure, constantly over-provisioning and hence overpaying to account for potential future load. - -Furthermore, infrastructure is still treated separately from code - the two worlds live separately and don’t really know much about each other. There is much less wiring to do than with AWS for example, but what is left to do - and there’s a lot of it - you still have to do yourself. Heroku trades off AWS’s elasticity for ready-made building block components that are statically wired up together through a combination of CLI commands and dashboard operations. Of course, Heroku is limited by its founding principle: static containers as building blocks of applications. With Heroku, it is true you do not have to think about infrastructure - but only in the beginning. Once your application scales, your bills stack up and you’re left without a choice: go back to AWS. - -### The Serverless Conundrum - -We need to talk about serverless. Serverless (think AWS Lambda) was a new cloud computing execution model where machine allocation happens on-demand and the user is primarily abstracted away from the underlying servers. With it came a familiar promise - developers not needing to think about infrastructure at all. Despite its somewhat counterintuitive name (because, of course, there are always servers running somewhere), serverless sounds like a great ideal to strive towards. This is simple, developers want to spend as much time as possible on delivering business value by writing code, while companies would like to avoid spending fortunes on DevOps. This seems to be the holy grail, but there’s a catch. You might ask, “if you say serverless is so great, why have we all not switched yet”? - -Well, serverless forces you to write application business logic as functions, rather than the more traditional idiom of stateful processes. To reap the benefits of serverless, you have to build your application as a multitude of stateless request or event handlers, often requiring a bottoms-up redesign of your system. For some use-cases the serverless paradigm works, but in many cases breaking things into discrete, decoupled functions may not be optimal or even feasible. The next question is, can we have our cake and eat it too? Can we maintain the paradigm of stateful processes and abstract away the underlying infrastructure and orchestration? - -### Infrastructure from Code - -At shuttle we want to empower engineers by creating the best possible developer experience. - -We've already developed an annotation based system that enables Rust apps to be deployed with a one-liner, as well as dependencies like databases being provisioned through static analysis in real-time. - -```rust -#[shuttle_service::main] -async fn rocket( - pool: PgPool, // automatic db provisioning + hands you back an authenticated connection pool -) -> Result<...> { - // application code -} -``` - -Building on the phenomenal engineering done before us, we see a better future. One where developers don’t need to do any “wiring” whatsoever when it comes to code and infrastructure. - -In this future, infrastructure can be defined directly from code. Not in the “Infrastructure as Code” kind of way though, but in the way that the code that developers write implicitly defines infrastructure. What your code actually needs in terms of infrastructure should be inferred as you build your application, instead of you having to think upfront about what infrastructure piece is needed and how to wire it up. - -This setup should also break the boundaries that keep containers isolated from each other (and thus make it difficult to orchestrate them), without necessarily getting rid of the paradigm of containers. It should not force you into any specific way of writing applications, but just be an extension of your workflow. - -### Having your cake and eating it too - -When looking back at Heroku’s success, it becomes apparent that focusing on one language, Ruby, which was becoming quite popular at the time - was a remarkable strategy. It enabled their team to focus acutely and produce an unparalleled experience for their users. - -At shuttle we are convinced Rust is the best language to start this journey with. It’s been [the most loved](https://www.cantorsparadise.com/the-most-loved-programming-language-in-the-world-5220475fcc22) language by developers for many years in a row (as well as one of the fastest-growing languages). If you want to create the best developer experience - it makes sense to start with the most loved language. Indeed, Rust is the first language packed with such a powerful set of tools for static analysis and code generation, that are required to create the best developer experience when it comes to _Infrastructure ~~as~~ from Code_. - -Removing the burden of dealing with DevOps from developers, many of whom find it daunting and stressful, not only do we stand to make development more enjoyable and efficient, but also enable far more people to write and ship applications. - -From inception, all of us shared affection for open source software, not only from a philosophical standpoint. We have seen in practice that the best way to build software is together with the end-users. It all goes back to the idea of creating the best developer experience - so for us, this is a no-brainer. - -Our community is just as important to us, as our vision is, so if any of this resonates with you - [join us on discord](https://discord.gg/shuttle). - -Or check out our [jobs board](https://www.workatastartup.com/companies/shuttle). - -Also, if you’re curious to learn more about _how_ we are building this - [check out our GitHub](https://github.com/getsynth/shuttle). diff --git a/www/_blog/2022-06-01-hyper-vs-rocket.mdx b/www/_blog/2022-06-01-hyper-vs-rocket.mdx deleted file mode 100644 index 779c4415b..000000000 --- a/www/_blog/2022-06-01-hyper-vs-rocket.mdx +++ /dev/null @@ -1,251 +0,0 @@ ---- -title: Hyper vs Rocket - Low Level vs Batteries included -description: A comparison of using the low-level HTTP framework 'hyper' vs a batteries included framework like 'Rocket' -author: ben -tags: [rust, rocket, hyper, comparison] -thumb: hyper-vs-rocket.png -cover: hyper-vs-rocket.png -date: "2022-05-09T15:00:00" ---- - -In this post we're going to be comparing two popular Rust libraries used for building web applications. We'll be writing an example in each and compare their ergonomics and how they perform. - -The first library [Hyper](https://github.com/hyperium/hyper) is a low level HTTP library which contains the primitives for building server applications. The second library [Rocket](https://rocket.rs/) comes with more "batteries included" and provides a more declarative approach to building web applications. - -## The Demo - -We're going to build a simple site to showcase how each libraries implements: - -### Routing - -Routing decides what to respond for a given URL. Some paths are fixed, in our example we will have a fixed route `/` which returns `Hello World`. Some paths are dynamic and can have parameters. In the example we will have `/hello/*name*` which will response `Hello *name*` which will have *name* substituted in each response. - -### Shared state - -We want to have a central state for the application. - -In this demo we will have central site visitor counter which counts the number of requests. This number can be viewed as JSON on the `/counter.json` route. In this example we will be storing the counter in application memory. However if you were storing it in a database the shared state would be a database client. - -The are lots of other functionality necessary for a site such as handling HTTP methods, receiving data, rendering templates and error handling. But for the scope of this post and example we will only be comparing these two features. - -### The rules - -The rules for this demonstration is to only use the specific library and any of its re-exported dependencies. So no additional libraries (except in the hyper example we need a `tokio::main`). - -## Hyper - -Hyper's readme describes hyper as a "A fast and correct HTTP implementation for Rust with client and server APIs". For this demo we will be using the server side of the library. It has **9.7k** stars on GitHub and **48M** crates downloads. It is used as a often a dependency and many other libraries such as [reqwest and tonic](https://crates.io/crates/hyper/reverse_dependencies) build on top of it. - -In this example we see how far we can get with just using the library. This demo uses Hyper 0.14[^hyper-deps]. Below is the full code for the site: - -```rust -use hyper::server::conn::AddrStream; -use hyper::service::{make_service_fn, service_fn}; -use hyper::{Body, Request, Response, Server}; -use std::convert::Infallible; -use std::sync::{atomic::AtomicUsize, Arc}; - -#[derive(Clone)] -struct AppContext { - pub counter: Arc, -} - -async fn handle(context: AppContext, req: Request) -> Result, Infallible> { - // Increment the visit count - let new_count = context - .counter - .fetch_add(1, std::sync::atomic::Ordering::SeqCst); - - if req.method().as_str() != "GET" { - return Ok(Response::builder().status(406).body(Body::empty()).unwrap()); - } - - let path = req.uri().path(); - let response = if path == "/" { - Response::new(Body::from("Hello World")) - } else if path == "/counter.json" { - let data = format!("{{\"counter\":{}}}", new_count); - Response::builder() - .header("Content-Type", "application/json") - .body(Body::from(data)) - .unwrap() - } else if let Some(name) = path.strip_prefix("/hello/") { - Response::new(Body::from(format!("Hello, {}!", name))) - } else { - Response::builder().status(404).body(Body::empty()).unwrap() - }; - Ok(response) -} - -#[tokio::main] -async fn main() { - let context = AppContext { - counter: Arc::new(AtomicUsize::new(0)), - }; - - let make_service = make_service_fn(move |_conn: &AddrStream| { - let context = context.clone(); - let service = service_fn(move |req| handle(context.clone(), req)); - async move { Ok::<_, Infallible>(service) } - }); - - let server = Server::bind(&"127.0.0.1:3000".parse().unwrap()) - .serve(make_service) - .await; - - if let Err(e) = server { - eprintln!("server error: {}", e); - } -} -``` - -At the top we define a `handle` function which processes all the requests. - -Routing is done through the chain of ifs and elses in the `handle` function. First the path of the request (e.g `/` for the index) is extracted using `req.uri().path()`. Fixed routes are easy to branch on using string comparison like `path == "/"`. For routes which match multiple paths such as the `/hello/` route it uses [`str::strip_prefix`](https://doc.rust-lang.org/std/primitive.str.html#method.strip_prefix) which returns a `None` if the path doesn't -start with the prefix or `Some` if the path starts with the prefix along with a slice that proceeds the prefix. - -```rust -"/".strip_prefix("/hello/") == None -"/test".strip_prefix("/hello/") == None -"/hello/jack".strip_prefix("/hello/") == Some("jack") -``` - -The function has a early return for requests with a method other than GET because there are no POST routes or others for this example. If the site accepted different requests types and had to add additional guards then we could additional clauses to the if statement. Although you could see how expand on the if chain would get more complex and verbose. - -To return a response, Hyper re-exports [`Response`](https://docs.rs/hyper/0.14.19/hyper/struct.Response.html) (from the [http crate](https://docs.rs/http/latest/http/)). It has a nice simple builder pattern for building the responses. The serialization code is hand written using `format!`. Of course we could import serde but that's against the rules. - -The counter is done by creating a struct in the initializing code and cloning it on every request to send to the handler function. Without going into the details it uses `Arc` instead of a `usize` as the atomic variant has special properties for when multiple handlers are using and mutating it. The code increments the visitor counter before anything else in the handler function so that a visit is recorded for all requests. - -### Hyper Verdict - -In terms of development (on a low end machine we used for profiling[^profile-machine]), a debug build (without any of the build artifacts) takes **79.0s**. After the initial compilation, incremental compilation takes only **1.9s**. For building a `release` build with further optimizations (on top of the debug build artifacts) it takes **32.5s**. - -The initialization code was take from [Hyper's server docs](https://docs.rs/hyper/latest/hyper/server/index.html) and is quite verbose and out of the box for Hyper there are no logs or server information. - -In terms of runtime performance over three 30 second connections Hyper responded to on average **74,563** requests per second on the index route on the above code. Which is incredible quick! - -## Rocket - -Rocket is a "web framework for Rust with a focus on ease-of-use, expressibility, and speed". It has **17.4k** github stars and **1.7M** crates downloads. Rocket internally uses Hyper. - -For this demo we are using the `0.5.0-rc2` version of Rocket[^rocket-deps] which builds on Rust stable. - -```rust -use rocket::{ - fairing::{Fairing, Info, Kind}, - get, launch, routes, - serde::{json::Json, Serialize}, - Config, Data, Request, State, -}; -use std::sync::atomic::AtomicUsize; - -#[derive(Serialize, Default)] -#[serde(crate = "rocket::serde")] -struct AppContext { - pub counter: AtomicUsize, -} - -#[launch] -fn rocket() -> _ { - let config = Config { - port: 3000, - ..Config::debug_default() - }; - - rocket::custom(&config) - .attach(CounterFairing) - .manage(AppContext::default()) - .mount("/", routes![hello1, hello2, counter]) -} - -struct CounterFairing; - -#[rocket::async_trait] -impl Fairing for CounterFairing { - fn info(&self) -> Info { - Info { - name: "Request Counter", - kind: Kind::Request, - } - } - - async fn on_request(&self, request: &mut Request<'_>, _: &mut Data<'_>) { - request - .rocket() - .state::() - .unwrap() - .counter - .fetch_add(1, std::sync::atomic::Ordering::SeqCst); - } -} - -#[get("/")] -fn hello1() -> &'static str { - "Hello World" -} - -#[get("/hello/")] -fn hello2(name: &str) -> String { - format!("Hello, {}!", name) -} - -#[get("/counter.json")] -fn counter(state: &State) -> Json<&AppContext> { - Json(state.inner()) -} -``` - -In Rocket we describe each endpoint using a function. The `get` macro attribute handles path routing and http method constraint. No need to add early returns for methods and dealing with raw string slices. It takes the declarative approach, `#[get("/hello/")]` is more descriptive and less verbose than `if let Some(name) = path.strip_prefix("/hello/")`. The functions are registered using `.mount("/", routes![hello1, hello2, counter])`. - -The application has a state defined here: - -```rust -#[derive(Serialize, Default)] -#[serde(crate = "rocket::serde")] -struct AppContext { - pub counter: AtomicUsize, -} -``` - -And it is created and registered using `.manage(AppContext::default())`. Rocket re-exports the serialization library serde so we can use `#[derive(Serialize)]` to generate serialization logic for the counter state, so no hand writing the serialization code unlike first method. - -In Rocket endpoint functions can just return `String` or `str` slices and Rocket handles it automatically. Rocket also comes with a `Json` return type and reusing the fact that `AppContext` implements `Serialize` we can freely build a `Json` response from it. The `Json` structure handles setting the `Content-Type` header automatically for us. - -Rocket has a middleware implementation which it calls ["fairings"](https://rocket.rs/v0.5-rc/guide/fairings/#fairings). In the example it defines a `CounterFairing` which on every request modifies the counter state. The initialization code is really slim, it sets up a config and a Rocket structure is created using a builder pattern. Annotating the main function with `#[launch]` helps Rocket find the entry point and abstracts how the server is span up. Rocket also has really nice built in logs which are great for development. - -

- -

- -### Rocket Verdict - -Since Rocket has more dependencies and requires more macro expansion it takes a bit longer build taking **141.9s** (2m 21.9s) on a cold start to compile. A release builds on top of debug artefact takes **147.0s** (2m 27.0s) to compile. Incremental builds are still fast taking **3.3s** to compile after a small change to the response of a endpoint. - -Using the same benchmark as Hyper, on average Rocket returned **43,899** requests per second in a release build with the logging disabled - roughly **60%** of Hyper's throughput. - -## Conclusion - -Writing both of these examples were fun to build and there weren't any frustrations or problems using them. Both are plenty fast for performance to be a concern. - -Rockets documentation is very good and explanatory. All of Hyper's api is well documented on its [docs.rs page](https://docs.rs/hyper/latest/hyper/). Both libraries are actively developed with many commits and pull requests made in the last month. - -Do you prefer the control and speed of Hyper or prefer the expressiveness of Rocket? - -## [Shuttle](https://www.shuttle.rs/): Stateful Serverless for Rust - -Deploying and managing your Rust web apps can be an expensive, anxious and time consuming process. - -If you want a batteries included and ops-free experience, [try out Shuttle](https://docs.rs/shuttle-service/latest/shuttle_service/). - -
- -[^profile-machine]: The build and request profile machine is a vm with 2 cores and 7 GB RAM. Take the numbers with a grain of salt - -[^hyper-deps]: The dependencies for building the project `hyper = { version = "0.14", features = ["server", "tcp", "http1"] }` and `tokio = { version = "1.18.2", features = ["rt", "macros", "rt-multi-thread"] }` - -[^rocket-deps]: The dependencies for building the project `rocket = { version = "0.5.0-rc.2", features = ["json"] }` diff --git a/www/_blog/2022-06-09-the-builder-pattern.mdx b/www/_blog/2022-06-09-the-builder-pattern.mdx deleted file mode 100644 index a83b9606e..000000000 --- a/www/_blog/2022-06-09-the-builder-pattern.mdx +++ /dev/null @@ -1,495 +0,0 @@ ---- -title: Builders in Rust -description: In this post we do a deep dive into the builder pattern - an easy way to write cleaner and more readable code. -author: ben -tags: [rust, tutorial] -thumb: crab-builder.png -cover: crab-builder.png -date: "2022-06-09T15:00:00" ---- -This blog post is powered by shuttle! The serverless platform built for Rust. - -In this post, we'll be going over the "builder pattern". The builder pattern is an API design pattern for constructing instances of Rust structures. We'll be going over where it makes sense to use it and some of the benefits of applying it to your structs. - -## Examples - -Here are some examples of the builder pattern in common Rust crates: - -[`Command`](https://doc.rust-lang.org/std/process/struct.Command.html) from the Rust standard library -```rust -Command::new("cmd") - .args(["/C", "echo hello"]) - .output() -``` - -[`Rocket`](https://api.rocket.rs/v0.5-rc/rocket/struct.Rocket.html) in Rocket -```rust -rocket::build() - .mount("/hello", routes![world]) - .launch() -``` - -[`Response`](https://docs.rs/http/latest/http/response/struct.Response.html#method.builder) in the HTTP crate -```rust -Response::builder() - .status(200) - .header("X-Custom-Foo", "Bar") - .header("Set-Cookie", "key=2") - .body(()) - .unwrap(); -``` - -[Cargo uses the pattern internally for tests](https://github.com/rust-lang/cargo/blob/c6745a3d7fcea3a949c3e13e682b8ddcbd213add/tests/testsuite/build.rs#L74-L91) - -Ok - so let's dive into *what* the builder pattern actually is. - -## What is the builder pattern - -Given the following struct representation: -```rust -struct Message { - from: String, - content: String, - attachment: Option -} -``` -Using struct initialization syntax: -```rust -Message { - from: "John Smith".into(), - content: "Hello!".into(), - attachment: None -} -``` -Using a builder pattern: -```rust -Message::builder() - .from("John Smith".into()) - .content("Hello!".into()) - .build() -``` - -The builder pattern consists of: -- A function that generates a *intermediate builder structure* (`Message::builder()`) -- A chain of methods which set values on the builder: (`.from("John Smith".into()).content("Hello!".into())`) -- A final method which builds the final value from the intermediate structure `.build()` - -The structure of the builder pattern follows the functional programming design and has likeness of building iterators. - -The setting methods take a mutable reference to the builder and return the same reference (thus for chaining to work). The handy part about working with mutable references is that it can be shared around between functions and if statements: - -```rust -fn build_message_from_console_input( - builder: &mut MessageBuilder -) -> Result<(), Box> { - let mut buffer = String::new(); - let mut stdin = std::io::stdin(); - stdin.read_line(&mut buffer).unwrap(); - - let split = buffer.rsplit_once("with attachment: "); - if let Some((message, attachment_path)) = split { - let attachment = - std::fs::read_to_string(attachment_path).unwrap(); - builder - .content(message.into()); - .attachment(attachment); - } else { - builder.text_filter(buffer); - } -} -``` - -Next we'll explore some places where the builder pattern can offer a lot of benefits. - -#### Constraints and computed data - -Given the following struct which represents running a certain function at a certain time: - -```rust -struct FutureRequest { - at: chrono::DateTime, - func: T -} -``` - -We don't want the program to be able to create a `FutureRequest` for a time in the past. - -With regular struct initialisation and public fields there isn't a good way to constrain the values being given to the struct[^type_constraints] - -```rust -let fq = FutureRequest { - at: chrono::DateTime::from_utc( - chrono::NaiveDate::from_ymd(-112, 2, 18) - .and_hms(11, 5, 6), - Utc - ), - func: || println!("𓅥𓃶𓀫"), -} -``` - -However with the builder pattern and a method for setting the time we can validate the value before it is assigned - -```rust -#[derive(Debug)] -struct SchedulingInPastError; - -impl ()> FutureRequestBuilder { - fn at( - &mut self, - date_time: chrono::DateTime - ) -> Result<&mut Self, SchedulingInPastError> { - if date_time < Utc::now() { - Err(SchedulingInPastError) - } else { - self.at = date_time; - Ok(self) - } - } -} -``` - -Maybe we don't even want an absolute time - but a relative time at some point in the future. - -```rust -impl ()> FutureRequestBuilder { - fn after(&mut self, duration: std::time::Duration) -> &mut Self { - self.at = Utc::now() + chrono::Duration::from_std(duration).unwrap(); - self - } -} -``` - -#### Encapsulating data - -Sometimes - we want to keep some fields hidden from the user: - -```rust -struct Query { - pub on_database: String, - // ... -} - -fn foo(query: &mut Query) { - // You want mutable access to call mutable methods on the query - // but want to prevent against: - query.on_database.drain(..); -} -``` - -So you could make the fields private and create a function which constructs the value (known as a constructor): - -```rust -impl Query { - fn new( - fields: Vec, - text_filter: String, - database: String, - table: String, - fixed_amount: Option, - descending: bool, - ) -> Self { - unimplemented!() - } -} - -let query = Query::new( - vec!["title".into()], - "Morbius 2".into(), - "imdb".into(), - "films".into(), - None, - false -); -``` -But this causes confusion at the call site. Its not clear whether "imdb" is the database, the table or the text_filter? [^vscode-inlay-hints]. - -The builder pattern makes it much easier to read and understand what's happening during initialisation: - -```rust -let query = Query::builder() - .fields(vec!["title".into()]), - .text_filter("Morbius 2".into()), - .database("imdb".into()), - .table("films".into()), - .fixed_amount(None), - .descending(false) - .build(); -``` - -#### Enums and nested data - -So far we've just discussed structs - let's talk about enums: - -```rust -enum HTMLNode { - Text(String), - Comment(String), - Element(HTMLElement) -} - -struct HTMLElement { - tag_name: String, - attributes: HashMap>, - children: Vec -} -``` - -Here there is builder associated with each variant: - -```rust -HTMLNode::text_builder() - .text("Some text".into()) - .build() - -// vs - -HTMLNode::Text("Some text".into()) - -// -- - -HTMLNode::element_builder() - .tag_name("p".into()) - .attribute("class".into(), "big quote".into()) - .attribute("tabindex".into(), "5".into()) - .content("Some text") - -// vs - -HTMLNode::Element(HTMLElement { - tag_name: "p".into(), - attributes: [ - ("class".into(), "big quote".into()), - ("tabindex".into(), "5".into()) - ].into_iter(), - children: vec![HTMLNode::Text("Some text".into())] -}) -``` - -## Building our own builder pattern - -Now let's build our own builders (no pun intended). In this example we have some users: - -```rust -#[derive(Debug)] -struct User { - username: String, - birthday: NaiveDate, -} - -struct UserBuilder { - username: Option, - birthday: Option, -} - -#[derive(Debug)] -struct InvalidUsername; - -#[derive(Debug)] -enum IncompleteUserBuild { - NoUsername, - NoCreatedOn, -} - -impl UserBuilder { - fn new() -> Self { - Self { - username: None, - birthday: None, - } - } - - fn set_username(&mut self, username: String) -> Result<&mut Self, InvalidUsername> { - // true if every character is number of lowercase letter in English alphabet - let valid = username - .chars() - .all(|chr| matches!(chr, 'a'..='z' | '0'..='9')); - - if valid { - self.username = Some(username); - Ok(self) - } else { - Err(InvalidUsername) - } - } - - fn set_birthday(&mut self, date: NaiveDate) -> &mut Self { - self.birthday = Some(date); - self - } - - fn build(&self) -> Result { - if let Some(username) = self.username.clone() { - if let Some(birthday) = self.birthday.clone() { - Ok(User { username, birthday }) - } else { - Err(IncompleteUserBuild::NoCreatedOn) - } - } else { - Err(IncompleteUserBuild::NoUsername) - } - } -} -``` - -Some things to look out for: -- Every set method must take a mutable reference in order to add the data to the backer -- The method must then return the mutable reference it has to allow for them to be chained. - -There are clones in the `build` method but if that method is only called once then it is optimized out by Rust. - -## Automatic approaches - -Similar to how Clone and Debug work, crates can create there own derive macros. [There are a lot of crates which can help with generating the builder pattern](https://lib.rs/keywords/builder). Let's take a look at a few: - -### [derive_builder](https://lib.rs/crates/derive_builder) - -```rust -#[derive(Debug, derive_builder::Builder)] -#[builder(build_fn(validate = "Self::validate"))] -struct Query { - fields: Vec, - text_filter: String, - database: String, - table: String, - fixed_amount: Option, - descending: bool, -} - -// Usage same as described patterns: -let query = Query::builder() - .table("...".into()) - // ... - .build() - .unwrap(); -``` - -This derive macro generates a new struct named the same as the original structure but postfixed with `Builder` (in this case `QueryBuilder`). - -Derive builder has the downside of a whole object validation rather than per field. As well as the error variant of construction being a `String`, which makes it harder to match on the error or return error data compared to a error enum: - -```rust -impl Query { - fn validate(&self) -> Result<(), String> { - let valid = self - .database - .as_ref() - .map(|value| value == "pg_roles") - .unwrap_or_default(); - - if valid { - Ok(()) - } else { - Err("Cannot construct Query on 'pg_roles'".into()) - } - } -} -``` - -### [typed-builder](https://lib.rs/crates/typed-builder) - -Typed-builder solves two problems with `derive_builder`: - -With `derive_builder` you can set a field twice (or more) -```rust -Query::builder() - .database("imdb".into()) - // ... - .database("fishbase".into()) -``` - -Which takes the value of the last set field which is likely a mistake. Although Rust can optimize out a write without a read it is very difficult to have a linter error for this mistake. `derive_builder` also delegates the check to whether all the required fields have been set to runtime. - -With `typed-builder` it has a very similar implementation but has a different output which Rust can reason about and check that they are no duplicate sets and the build is well formed (all the required fields have been set). - -The downside here is that it takes longer to expand the macros as there is more to generate. The added complexity also makes it more complicated to pass the builder around. - -### [Buildstructor](https://lib.rs/crates/buildstructor) - -Buildstructor is a annotation for an existing impl block. Rather than using the fields on a structure (as seen in the previous two) to generate code it builds wrappers around existing constructor functions: - -```rust -struct MyStruct { - sum: usize -} - -#[buildstructor::buildstructor] -impl MyStruct { - #[builder] - fn new(a: usize, b: usize) -> MyStruct { - Self { sum: a + b } - } -} - -MyStruct::builder().a(1).b(2).build(); -``` - -Similar to `typed-builder` it generates intermediate staging structs for building which has the benefits of compile time checking that all the fields exist. However that comes again with the drawback of slower compile time and less flexibility when passing it around. - -Typed builder looks to be more compatible with the Rust language which allows it to support async builders! It's definitely the more interesting one of the bunch and I will be looking to play with with it future projects. - -### Alternative patterns - -If you just want to build a struct which has a large amount of default fields, using `..` (base syntax) with the [Default](https://doc.rust-lang.org/std/default/trait.Default.html) trait (whether a custom implementation or the default one with `#[derive(Default)]`) will do: - -```rust -#[derive(Default)] -struct X { - a: u32, - b: i32, - c: bool, -} - -X { a: 10, ..Default::default() } -``` - -If you want computation, constraints, encapsulation and named fields you could create a intermediate struct which can be passed to a constructor: - -```rust -struct Report { - title: String, - on: chrono::DateTime - // ... -} - -struct ReportArguments { - title: String, - on: Option - // ... -} - -impl Report { - fn new_from_arguments(ReportArguments { title, on }: ReportArguments) -> Result { - if title. - .chars() - .all(|chr| matches!(chr, 'a'..='z' | '0'..='9')) - { - Ok(Self { - title, - on: chrono.unwrap_or_else(|| todo!()) - }) - } else { - Err("Invalid report name") - } - } -} -``` - -However both of these don't the use the nice chaining syntax. - -## Conclusion - -The builder pattern can help you write cleaner, more readable APIs, and it turn help the consumers of your APIs write better code. We can apply constraints to make sure that our structs are initialised correctly with a clean API enforcing the contract. - -One thing to remember is that code is read *much* more than it's written - so it's worth going out of our way to make our code just that little bit more pleasant to read. - -## [Shuttle](https://www.shuttle.rs/): Stateful Serverless for Rust - -Deploying and managing your Rust web apps can be an expensive, anxious and time consuming process. - -If you want a batteries included and ops-free experience, [try out Shuttle](https://docs.rs/shuttle-service/latest/shuttle_service/). - -
- -[^type_constraints]: I partially agree with this, there are ways to design your types to be constrained. Here we could create a `struct FutureEvent(chrono::DateTime)` structure where the constraint is constructing the `FutureEvent` type rather than leaving the constraint to the field. But there are lots of scenarios where that isn't the case. - -[^vscode-inlay-hints]: With vscode and rust analyzer there is a feature called [inlay hints](https://rust-analyzer.github.io/manual.html#inlay-hints) which shows the names of parameters in the editor. While this is great this is a feature specific to vscode at the moment. You won't see the hints on GitHub diffs and in other text editors. diff --git a/www/_blog/2022-06-16-a-short-introduction-to-async-rust.mdx b/www/_blog/2022-06-16-a-short-introduction-to-async-rust.mdx deleted file mode 100644 index c031ad81a..000000000 --- a/www/_blog/2022-06-16-a-short-introduction-to-async-rust.mdx +++ /dev/null @@ -1,169 +0,0 @@ ---- -title: A short introduction to async Rust -description: Async code can be mystifying - this posts goes over the basics and helps us get comfortable with some of the core concepts of asynchronous Rust -author: ben -tags: [rust, tutorial, async] -thumb: introduction-to-async-snippet.png -cover: introduction-to-async-snippet.png -date: "2022-06-16T15:00:00" ---- - -This blog post is powered by shuttle! The serverless platform built for Rust. - -In this post, we'll be going over the state of async code. Before this post the only async Rust I had written had been copied and pasted from Stack Overflow so I wanted to dig deeper into what async code is and how to start writing it. - -## What is asynchronous code? - -To understand what asynchronous code is - let's first talk about synchronous code. - -In synchronous code, statements run in a sequential order: - -```rust -println!("Hello World"); -let cargo_toml_content = std::fs::read_to_string("Cargo.toml").unwrap(); -println!("'Cargo.toml':\n{}", cargo_toml_content); -``` - -The above statements are executed in a well defined order - one after the other, top to bottom. `Hello World` is printed, followed by the contents of `Cargo.toml` being read and then printed. - -This paradigm is perfectly fine under normal operation - but sometimes our code requires the current context to stop while it *waits* for something else - this is generally known as **blocking**. - -```rust -for index in 1..=100 { - let result = sync_http_client.get(format!("www.example.com/items/{}", index)); -} -``` - -In the above example, every loop iteration a request is made to the infamous `example.com`. - -The problem here is that `sync_http_client.get` is blocking. Blocking can occur for lots of reasons: - -- waiting for the file system -- waiting for the network -- waiting for some database transaction -- waiting for some time to occur -- etc. - -When a program is blocked it is doing nothing but waiting for a response to return to continue execution. If we need to work on anything else - we're kinda stuck. In this example the loop cannot run the next iteration / index until the request in the previous one has fully finished. While making and reading a single request is relatively fast, the code in the loop runs 100 times and makes 100 requests so the whole loop takes a while to run. - -**What if there was a way to start additional requests without having to wait for the previous to have finished its request?** - -This is where asynchronous programming comes in. Asynchronous programming is -about *not* blocking. Let's say you've ordered a mountain bike for a ride on -the weekend. You don't need to spend all your time on the doorstep waiting -for the delivery - you can continue living your life doing whatever. An async runtime allows you to continue whatever you are doing and acts as a doorbell, *awaking* you to the door when the delivery arrives. - -We will get more in to how to write async later but the essence is that we can change the loop to the following to start up 100 requests without having requiring the previous request to have finished: - -```rust -let mut handles = Vec::new(); -for index in 1..=100 { - let handle = tokio::spawn( - async_http_client.get(format!("www.example.com/items/{}", index)) - ); - handles.push(handle); -} -for handle in handles { - let result = handle.await; -} -``` - -### Parallelization and concurrency - -Before we go further we should note that async is *not* for processing -expensive operations. It's only beneficial for IO in which data comes from somewhere further away than the RAM and when there is a lot of it. For process expensive operations parallelization is beneficial. - -**Parallelization is running multiple things at the same time. Concurrency is handling multiple things at the same time.** - -Async is designed for concurrency. Tokio's default runtime utilises threads so we also benefit from parallelization. - -### Benchmarking - -Comparing an example written using async vs the same example written -synchronously - for a large numbers of concurrent web requests, the async -version is ~60% faster that synchronous requests and ~20% faster than spinning -up a thread for each request[^benchmarks]. - -| Command | Mean [s] | Min [s] | Max [s] | Relative | -|:---|---:|---:|---:|---:| -| `./sync` | 1.070 ± 0.013 | 1.060 | 1.085 | 1.65 ± 0.09 | -| `./threads` | 0.787 ± 0.007 | 0.782 | 0.795 | 1.22 ± 0.06 | -| `./async` | 0.732 ± 0.016 | 0.721 | 0.750 | 1.13 ± 0.06 | -| `./async_threads` | 0.646 ± 0.033 | 0.612 | 0.677 | 1.00 | - -### Getting started with asynchronous Rust - -Rust does not have a runtime[^rust-runtime] and so doesn't have a standard -executor (at least for now). There are several popular executor runtimes. These are crates like any other library so you can use them by adding them to the `Cargo.toml`. For this demo we will pick [Tokio](https://tokio.rs/) as it the most popular executor. Other runtimes exist and prioritize different things. For example [async-std](https://docs.rs/async-std/latest/async_std/index.html) is focused on an async version of Rust's standard library and [smol](https://docs.rs/smol/1.2.5/smol/) which is focused on being lightweight. Overall Rust is designed to stay out the way, so it lets you pick which executor you run. - -To start we will run `cargo new`. Then add `tokio = { version = "1.19", features = ["full"] }` to `Cargo.toml` (or if you have [cargo-edit](https://github.com/killercup/cargo-edit) installed: `cargo add tokio -F full`) - -```rust -#[tokio::main] -async fn main() { - println!("Hello from an async function"); -} -``` - -### Async functions - -Function which contain *async things* are marked as `async`, this is done by prefixing the function with `async`: - -```rust -async fn do_thing() { - let result = some_async_function().await; - println!("{}", result); -} -``` - -In an async function you can use `.await`. You add it on to the end of a -call of async function and it will now block and return the actual value. - -Async functions (and async blocks) return Futures. A [Future](https://doc.rust-lang.org/std/future/trait.Future.html) is a function which returns a [Poll](https://doc.rust-lang.org/std/task/enum.Poll.html). Poll is a bit like a `Result` or `Option`, it has two variants one is a final value and the other variant is that the value is still pending. Futures are lazy, there are two ways to run a future: `tokio::spawn` to spawn eagerly and get a [JoinHandle](https://docs.rs/tokio/latest/tokio/task/struct.JoinHandle.html) or `.await`. Rust warns against *unawaited* futures. - -### Writing async operations - -All IO and filesystem functions in Rust's standard library are synchronous (and so block). Tokio provides async versions of the synchronous io in Rusts's standard library. - -```rust -let contents = tokio::fs::read("Cargo.toml").await; -``` - -### Writing concurrency - -As discussed earlier the problem with blocking calls is only one thing can run. - -```rust -let weather = client.get("https://api.darksky.net/forecast").await; -let news = client.get("https://api.nytimes.com/svc/topstories").await; -``` - -With `tokio::join!` we can run the requests start both requests and *await* for their results concurrently. - -```rust -let weather = client.get("https://api.darksky.net/forecast"); -let news = client.get("https://api.nytimes.com/svc/topstories"); -let (weather, news) = tokio::join!(weather, news).await; -``` - -Rather than getting weather **then** getting the news. The above code starts both requests and then waits for the response from both, joining them in a resulting tuple. - -
- -To keep this post short and to the basics we will stop here. If you want to read more about writing async the there is the [official Rust async book](https://rust-lang.github.io/async-book/) and [Tokio has a brilliant tutorial](https://tokio.rs/tokio/tutorial). - -### Conclusion - -Rust async is very much usable. The async side of the Rust language is still in heavy development and can only can get better from here. https://areweasyncyet.rs/ gives a good overview of the status of async language features and other things in the async ecosystem. This post is a introductory look into writing async Rust. Maybe *await* a future post digging deeper into async in Rust! - -## [Shuttle](https://www.shuttle.rs/): Stateful Serverless for Rust - -Deploying and managing your Rust web apps can be an expensive, anxious and time consuming process. - -If you want a batteries included and ops-free experience, [try out Shuttle](https://docs.rs/shuttle-service/latest/shuttle_service/). - -
- -[^rust-runtime]: Technically there are panic handlers and things which is runtime https://doc.rust-lang.org/reference/runtime.html - -[^benchmarks]: We had a bit of difficult showing beneficial results for async and still unsure whether these results are a good reflection of the benefits of async. You can view the results [here](https://github.com/kaleidawave/sync-vs-threads-vs-async-rust-bench/actions/runs/2517638422) and the full benchmarking code [here](https://github.com/kaleidawave/sync-vs-threads-vs-async-rust-bench). diff --git a/www/_blog/2022-06-23-generative-metatag-images.mdx b/www/_blog/2022-06-23-generative-metatag-images.mdx deleted file mode 100644 index 81d5578f3..000000000 --- a/www/_blog/2022-06-23-generative-metatag-images.mdx +++ /dev/null @@ -1,297 +0,0 @@ ---- -title: Generative metatag images in Rust -description: Creating images in Rust using svgs -author: ben -tags: [rust, tutorial] -thumb: generative-meta-images.png -cover: generative-meta-images.png -date: "2022-06-23T15:00:00" ---- - -This blog post is powered by shuttle! The serverless platform built for Rust. - -### What open graph tags are - -Links are bare and unreadable. They can contain symbols to parse and cannot contain spaces. - -`https://www.shuttle.rs/blog/2022/06/16/a-short-introduction-to-async-rust` - -The above url isn't the most user friendly way of understanding what the post contains. With referrer parameters and such it only gets more unreadable. - -Meta tags are special HTML elements that you can add to HTML responses which show nicer previews: - -![](https://i.imgur.com/RzgMfUs.png) - -You can get this additional preview by setting the following HTML elements inside the `` tag. - -```html - - - - - - - - - - - - - - - - - -``` - -These tags are easily scrapable[^scrapable] by bots which allow them to be added to places where links can be shared, such as messages and tweets. Unfortuantly meta tags don't really have a specification which is why it is best to include both the [open graph protocol](https://ogp.me/) and the [twitter card](https://developer.twitter.com/en/docs/twitter-for-websites/cards/guides/getting-started) specific tags. - -When links are shared with these tags in the response, the platform can add the adornments to the message. - -These previews make it easier to see what the content is before following the link. https://metatags.io/ is a great site if you want to preview what the meta tags look like. - -### Creating open graph tags and images - -The specific tag of interest here is: - -```html - - -``` - -Here the `content` is a url to an image. In the case of a blog post, we create specific graphics for it, upload it as an asset then set the URL to the path of the uploaded asset. - -This is fine for static content. However for *dynamic* pages which may be user generated content, manual image creation isn't really possible. You can also use this method if you don't have individual custom meta images for each of your static posts. - -A while back GitHub added custom images for links on pull request, which includes information about the pull request. They even wrote a [blog post about how they did it]( -https://github.blog/2021-06-22-framework-building-open-graph-images/). - -I really like the result, however wondered if there was a alternative to the way they implemented it. GitHub uses a headless browser[^headless-browser] to do this which is less portable and includes spinning up a execessive process to generate a simple image. - -In this post we'll attempt to create similar graphics using lowerlevel libraries and Rust. - -### Image generation in Rust - -To easily create graphics we will be using SVG. It's the most used format for vector graphics and supports embedding text, images and shapes. Since it's a vector graphic, shapes and text remain crisp no matter the size of the output image. It's readable and easily modifiable. - -The problem is that open graph images don't support displaying SVGs in previews due to the fact they are more complex of a format to render. - -So we have to make a step to turn our SVGs into another image format. - -### The scalable vector graphic format - -We will start exploring the format with a simple SVG with three shapes in different colors and a rectangle used to give the graphic a white background. - -```svg - - - - - - -``` - -SVGs renders line by line, so the white box will be at the behind, rendering the shapes in front. - -![](/images/blog/metatag-shapes.png) - -#### Turning SVGS into WEBP images with Rust - -We can draw images in Rust using [resvg](https://docs.rs/resvg/latest/resvg) which handles rendering SVGs. It expects a parsed svg tree from [usvg](https://docs.rs/usvg/latest/usvg) so we'll also be needing that. Internally it uses [tiny_skia](https://docs.rs/tiny-skia/latest/tiny_skia/) which is a "tiny Skia subset ported to Rust". Resvg and tiny skia have all the building blocks we need to do basic image generation. We'll also use Pixmap which holds the pixels that we will be generating and then encode it in webp format[^png-output] to minimize output file size. - -`cargo add resvg tiny-skia usvg webp` - -```rust -use resvg::render; -use std::{error::Error, fs, time::Instant}; -use tiny_skia::{Pixmap, Transform}; -use usvg::{Options, Tree}; -use std::fs; - -const WIDTH: u32 = 1200; -const HEIGHT: u32 = 630; - -fn main() -> Result<(), Box> { - // Read in the svg template we have - let svg = include_str!("shapes.svg"); - - // Create a new pixmap buffer to render to - let mut pixmap = Pixmap::new(WIDTH, HEIGHT) - .ok_or("Pixmap allocation error")?; - - // Use default settings - let mut options = Options::default(); - - // Build our string into a svg tree - let tree = Tree::from_str(svg, &options.to_ref())?; - - // Render our tree to the pixmap buffer, using default fit and transformation settings - render( - &tree, - usvg::FitTo::Original, - Transform::default(), - pixmap.as_mut(), - ); - - // Encode our pixmap buffer into a webp image - let encoded_buffer = - webp::Encoder::new(pixmap.data(), webp::PixelLayout::Rgba, WIDTH, HEIGHT).encode_lossless(); - let result = encoded_buffer.deref(); - - // Write the result - fs::write("image.webp", result)?; - - Ok(()) -} -``` - -The above code generate a `image.webp` with the colorful shape image shown above. - -## Going further - -Lets add some text to the graphic. We could use the default Times New Roman font - but let's get a little more fancy. [Google Fonts](https://fonts.google.com/) is a great resource for free font files. You can download any of the families on there and extract the specific `.ttf` font you want in and include it in the binary using `include_bytes!()`. In this demo I am using [Inter](https://fonts.google.com/specimen/Inter). - -```rust -// ... -let mut options = Options::default(); - -options - .fontdb - .load_font_data(include_bytes!("Inter.ttf").to_vec()); - -// ... -``` - -### Templating - -As our page will be dynamic, we'd like to insert strings defined in our Rust code onto the SVG. - -To do this we'll use the templating engine [liquid](https://crates.io/crates/liquid). (`cargo add liquid`) - -```svg - - - - {{ text }} - - -``` -We can add a new `text` node that is positioned in the center of the graphic. Using `font-family="Inter"` we can specify the font to be Inter. Liquid uses double braces ``{{ ... }}`` for interpolation. - -In our Rust code we'll change the string we use to build the tree to be the output of our of liquid template. The `liquid::object!` macro sets the data we we'll be rendering. - -```rust -let template = liquid::ParserBuilder::with_stdlib() - .build() - .unwrap() - .parse(include_str!("template.svg")) - .unwrap(); - -let globals = liquid::object!({ - "text": "test" -}); - -let svg = template.render(&globals).unwrap(); -// Build our string into a svg tree -let tree = Tree::from_str(&svg, &options.to_ref())?; -``` - -Which will render the following: - -![](/images/blog/metatag-test-text.png) - -### Adding images - -So far we have seen shapes and text. We're gonna step it up a bit by adding an image to the SVG. There are many ways to add a images to an SVG but we will use `` - -```svg - - - - - - - - - - {{ text }} - - -``` - -The pattern includes a `` with a href that points to the one and only ferris. - -However `resvg` looks up images in the filesystem by default so we have to rewrire the handler which turns paths into binary image representation. We can do that using `reqwest` and its blocking client (add `reqwest` using `cargo add reqwest -F blocking`). - -We change the options to a custom function which gets the response, figures out the encoding and pulls out the images bytes: - -```rust -let mut options = Options { - image_href_resolver: ImageHrefResolver { - resolve_string: Box::new(move |path: &str, _| { - let response = reqwest::blocking::get(path).ok()?; - let content_type = response - .headers() - .get("content-type") - .and_then(|hv| hv.to_str().ok())? - .to_owned(); - let image_buffer = response.bytes().ok()?.into_iter().collect::>(); - match content_type.as_str() { - "image/png" => Some(ImageKind::PNG(Arc::new(image_buffer))), - // ... excluding other content types - _ => None, - } - }), - ..Default::default() - }, - ..Default::default() -}; -``` - -And now we have: - -![](/images/blog/metatag-ferris.png) - -## Benchmarking - -Compared to the headless browser technique this process is faster. While doing a lot of the same things that the headless browser process was doing, we have picked out the only part we wanted, the svg renderer. - -With some rough benchmarks the Rust loading, rendering and encoded was two times faster than the nodejs puppeteer equivalent (100ms vs 200ms). - -This time accounts for the startup time in the nodejs version. If you aren't retaining the browser window then the results are even more noticeable. Generating one of images (*cold start*) the Rust version is 7x faster. - -Aside from rendering performance the Rust version is self contained, only the compiled binary is needed to generate the image. No having to worry about whether chromium is in the environment. This is huge benefit if you are doing image generation in a serverless environment. - -Also the headless browser version was harder to work with, importing fonts and setting the output size was considerably more complicated. - -## Conclusion - -Hopefully this was a interesting post and taught some things about generating images in Rust. This technique can be used for other types of image generation not just for meta tag results. - -To complete the result all you need to do is hook up a service for the url in the meta tag. Rather than saving the image to the file system you would then return send the bytes back over the wire. For images that aren't updated you should be caching the images that are generated as to not regenerate them on every request. - -[Full code for the demo is here](https://github.com/kaleidawave/image-generation-rust) - -And if you are looking for a service to host your new procedurally generated meta tag images, why not try shuttle: - -## [Shuttle](https://www.shuttle.rs/): Stateful Serverless for Rust - -Deploying and managing your Rust web apps can be an expensive, anxious and time consuming process. - -If you want a batteries included and ops-free experience, [try out Shuttle](https://docs.rs/shuttle-service/latest/shuttle_service/). - -
- -[^png-output]: If you just want png output then: `let encoded_buffer = pixels.encode_png().unwrap();` - -[^scrapable]: HTML elements in a response can easily parsed without having to run JavaScript - -[^headless-browser]: A browser which is controlled via code. [puppeteer](https://developer.chrome.com/docs/puppeteer/) and [selenium](https://www.selenium.dev/) are good examples. diff --git a/www/_blog/2022-06-30-error-handling.mdx b/www/_blog/2022-06-30-error-handling.mdx deleted file mode 100644 index 06d45104e..000000000 --- a/www/_blog/2022-06-30-error-handling.mdx +++ /dev/null @@ -1,523 +0,0 @@ ---- -title: More than you've ever wanted to know about errors in Rust -description: A (mostly) complete guide to error handling in Rust -author: ben -tags: [rust, tutorial] -thumb: ferris-error-handling.png -cover: ferris-error-handling.png -date: "2022-06-30T18:00:00" ---- - -This blog post is powered by shuttle! The serverless platform built for Rust. - ---- - -To quote the Rust Book, 'errors are a fact of life in software'. This post goes -over how to handle them. - -Before talking about recoverable errors and the `Result` type, let's first -touch on unrecoverable errors - a.k.a panics. - -## Panics - -[Panics](https://doc.rust-lang.org/std/macro.panic.html) are exceptions a program can throw. It stops all execution in the current thread. When a panic is thrown it returns a short description of what went wrong as well as information about the position of the the panic. - -```rust -fn main() { - panic!("error!"); - println!("Never reached :("); -} -``` - -Running the above causes: - -``` -thread 'main' panicked at 'error!', examples\panics.rs:2:5 -``` - -They are similar to `throw` in JavaScript and other languages, in that they -don't require an annotation on the function to run and they can pass through -function boundaries. However in Rust, panics cannot be recovered from, there is no way to incept a panic in the current thread. - -```rust -fn send_message(s: String) { - if s.is_empty() { - panic!("Cannot send empty message"); - } else { - // ... - } -} -``` - -The `send_message` function is fallible (can go wrong). If this is called -with an empty message then the program stops running. There is no way for the callee to track that an error has occurred. - -For recoverable errors, Rust has a type for error handling in the standard -library called a **`Result`**. It is a generic type, which means the result -and error variant can basically be whatever you want. - -```rust -pub enum Result { - Ok(T), - Err(E), -} -``` - -## Basic error creation and handling - -At the moment our `send_message` function doesn't return anything. This means no information can be received by the callee. We can change the definition to instead return a `Result` and rather than panicking we can early return a `Result::Err`. - -```rust -fn send_message(s: String) -> Result<(), &'static str> { - if s.is_empty() { - // Note the standard prelude includes `Err` so the `Result::Err` and `Err` are equivalent - return Result::Err("message is empty") - } else { - // ... - } - Ok(()) -} -``` - -Now our function actually returns information about what went wrong we can handle it when we call it: - -```rust -if let Err(send_error) = send_message(message) { - show_user_error(send_error); -} -``` - -### Rust knows when a Result is unused. - -In the above example we inspect the value of the item and branch on it. -However, if we didn't inspect and handle the returned Result then the Rust -compiler gives us a helpful warning about it so that you don't forget to -explicitly deal with errors in your program. - -``` -| send_message(); -| ^^^^^^^^^^^^^^^ -= note: `#[warn(unused_must_use)]` on by default -= note: this `Result` may be an `Err` variant, which should be handled -``` - -### Examples of `Result` in the standard library - -`Result` can be found in most libraries. One of my favorite examples is the -return type of the [FromStr::from_str](https://doc.rust-lang.org/std/str/trait.FromStr.html#tymethod.from_str) trait method. With [str::parse](https://doc.rust-lang.org/std/primitive.str.html#method.parse) (which uses the `FromStr` trait) we can do the following: - -```rust -fn main() { - let mut input = String::new(); - std::io::stdin().read_line(&mut input).unwrap(); - - match input.trim_end().parse::() { - Ok(number) => { - dbg!(number); - } - Err(err) => { - dbg!(err); - } - }; -} -``` - -(We'll ignore the `unwrap` for now 😉) - -```js -$ cargo r --example input -q -10 -[examples\input.rs:7] number = 10.0 - -$ cargo r --example input -q -100 -[examples\input.rs:7] number = 100.0 - -$ cargo r --example input -q -bad -[examples\input.rs:10] err = ParseFloatError { - kind: Invalid, -} -``` - -Here we can see when we type in a number we get a `Ok` variant with the number else we get a [ParseFloatError](https://doc.rust-lang.org/std/num/struct.ParseFloatError.html) - -## Files, networks and databases - -**All errors occur when you interact with the outside world or things -outside the Rust runtime**. One of the places where a lot of errors can -occur is interacting with the file system. The `File::open` function -attempts to open a file. This can fail for a variety of reasons. The -filename is invalid, the file doesn't exist or you simply don't have -permission to read the file. Notice the errors are well-defined and known -before-hand. You can even access the error variants with the [`kind`](https://doc.rust-lang.org/std/io/struct.Error.html#method.kind) function -and in order to implement your program logic or return an instructive error -message to the user. - -### Aliasing Results and errors - -When you're working on a project you'll often find yourself repeating -yourself when it comes to return types in function signatures: - -```rust -fn foo() -> Result { -... -} -``` - -To give a concrete example, all functions which operate on the file system have -the same -errors (file not exists, invalid permissions). [io::Result](https://doc.rust-lang.org/std/io/type.Result.html) is a alias over a result but means that every function does not have to specify the error type: - -```rust -pub type Result = Result; -``` - -If you have an API which has a common error type, you may want to -consider this pattern. - -### The question mark operator - -One of the best things about Results is the question mark operator, The -question mark operator can short circuit Result -error values. Let's look at a simple function which uploads text from a file. -This can error in a bunch of different ways: - -```rust -fn upload_file() -> Result<(), &'static str> { - let text = match std::fs::read_to_string("file.txt").map_err(|_| "read file error") { - Ok(value) => value, - Err(err) => { - return err; - } - }; - if let Err(err) = upload_text(text) { - return err - } - Ok(()) -} -``` - -Hang on, we're writing Rust not Go! - -If a `?` is postfixed on to a Result (or anything that implements [`try`](https://doc.rust-lang.org/std/ops/trait.Try.html) so also `Option`) we can -obtain a functionally equivalent outcome with a much more readable and -concise syntax. - -```rust -fn upload_file() -> Result<(), &'static str> { - let text = std::fs::read_to_string("file.txt").map_err(|_| "read file error")?; - upload_text(text)?; - Ok(()) -} -``` - -As long as the calling function also returns a `Result` with the -same `Error` type, `?` saves a ton of explicit code being written. Moreover, -the question-mark implicitly runs [Into::into](https://doc.rust-lang.org/std/convert/trait.Into.html#tymethod.into) -(which is automatically implemented for [From](https://doc.rust-lang.org/std/convert/trait.From.html) implementors) on the error value. So we don't have to worry about converting the error before we use the operator: - -```rust -// This derive an into implementation for `std::io::Error -> MyError` -#[derive(derive_enum_from_into::EnumFrom)] -enum MyError { - IoError(std::io::Error) - // ... -} - -fn do_stuff() -> Result<(), MyError> { - let file = File::open("data.csv")?; - // ... -} -``` - -We will look at more patterns for combining error types later! - -## [The Error trait](https://doc.rust-lang.org/std/error/trait.Error.html) - -The [Error](https://doc.rust-lang.org/std/error/trait.Error.html#) trait is -defined in the standard library. It basically represents the expectations of -error values - values of type `E` in `Result`. -[The Error trait is implemented for many errors](https://doc.rust-lang.org/std/error/trait.Error.html#implementors) -and provides a unified API for information on errors. The Error trait is a bit needy and requires that the error implements both [Debug](https://doc.rust-lang.org/std/fmt/trait.Debug.html) and [Display](https://doc.rust-lang.org/std/fmt/trait.Display.html). While it can be cumbersome to implement we will see some helper libraries for doing so later on. - -In the standard library [VarError](https://doc.rust-lang.org/std/env/enum.VarError.html) (for reading environment variables) and [ParseIntError](https://doc.rust-lang.org/std/num/struct.ParseIntError.html) (for parsing a string slice as a integer) are different errors. When we interact them we need to differentiate between the types because they have different properties and different stack sizes. To build a combination of them we could build a sum type using an enum. Alternatively we can use dynamically dispatched traits which handle varying stack sized items and other type information. - -Using the above mentioned try syntax (`?`) we can convert the above errors to be dynamically dispatched. This makes it easy to handle different errors without building enums to combine errors. - -```rust -fn main() -> Result<(), Box> { - let key = std::env::var("NUMBER_IN_ENV")?; - let number = key.parse::()?; - println!("\"NUMBER_IN_ENV\" is {}", number); - Ok(()) -} -``` - -While this is an easy way to handle errors, it isn't easy to differentiate -between the types and can make handling errors in libraries hard. More information on this later. - -### The Error trait vs Results and enums - -One thing when using an enum is we can use `match` to branch on the enum -error variants. On the other hand, with the `dyn` trait unless you go down -the down casting path it is very hard to get specific information about the -error: - -```rust -match my_enum_error { - FsError(err) => { - report_fs_error(err) - }, - DbError(DbError { err, database }) => { - report_db_error(database, err) - }, -} -``` - -For reusable libraries it is better to use enums to combine errors so that -users of your library can handle the specifics themselves. But for CLIs and -other applications using the trait can be a lot simpler. - -## Methods on Result - -Result and Option contains many useful functions. Here are some functions I -commonly use: - -### [Result::map](https://doc.rust-lang.org/std/result/enum.Result.html#method.map) - -This maps or converts the `Ok` value if it exists. This can be more concise -than using the `?` operator. - -```rust -fn string_to_plus_one(s: &str) -> Result { - s.parse::().map(|num| num + 1) -} -``` - -### [Result::ok](https://doc.rust-lang.org/std/result/enum.Result.html#method.ok) - -Useful for converting Results to Options - -```rust -assert_eq!(Ok(2).ok(), Some(2)); -assert_eq!(Err("err!").ok(), None); -``` - -### [Option::ok_or_else](https://doc.rust-lang.org/std/option/enum.Option.html#method.ok_or_else) - -Useful for going the other way in converting from Options to Results - -```rust -fn get_first(vec: &Vec) -> Result<&i32, NotInVec> { - vec.first().ok_or_else(|| NotInVec) -} -``` - -### Error handling for iteration - -Using results in iterator chains can be a little confusing. Luckily `Result` -implements [collect](https://doc.rust-lang.org/std/iter/trait.Iterator.html#method.collect). -We can use this to short circuit an iterator if an error -occurs. In the following, if all the `parse`s succeed then we get collected vec of numbers result. If one fails then it instead returns a Result with the failing Err. - -```rust -fn main() { - let a = ["1", "2", "not a number"] - .into_iter() - .map(|a| a.parse::()) - .collect::, _>>(); - dbg!(a); -} -``` - -``` -[examples\iteration.rs:6] a = Err( ParseFloatError { kind: Invalid, }, ) -``` - -Removing the `"not a number"` entry - -``` -[examples\iteration.rs:3] a = Ok( [ 1.0, 2.0, ], ) -``` - -Because Rust iterators are *piecewise* and lazy the iterator can short circuit without evaluating parse on any of the later items. - -## More Panic - -### Special panics - -`todo!()`, `unimplemented!()`, `unreachable!()` are all wrappers for `panic! -()` which but are specialized to their situation. Panics have a special [`!`](https://doc.rust-lang.org/reference/types/never.html) -type, called the 'never type', which represents the result of computations -that never complete (also means it can be passed anywhere): - -```rust -fn func_i_havent_written_yet() -> u32 { - todo!() -} -``` - -Sometimes there is Rust code which the compiler cannot properly infer is -valid. For this type of situation, the `unreachable!` panic can be used: - -```rust -fn get_from_vec_else_zero(a: Vec) -> i32 { - if let Some(value) = a.get(2) { - if let Some(prev_value) = a.get(1) { - prev_value - } else { - unreachable!() - } - } else { - 0 - } -} -``` - -### Unwrapping - -`unwrap` is a method on `Result` and `Option`. They return the `Ok` or `Some` -variant or else panic... - -```rust -// result.unwrap() - -let value = if let Ok(value) = result { - value -} else { - panic!("Unwrapped!") -}; -``` - -The uses-cases for this are developer error and situations the compiler can't -quite figure out. -If you are just trying something and don't want to set up a full error handling system then they can be used to ignore compiler warnings. - -Even if the situation calls for `unwrap` you are better off using `expect` -which has an accompanying message - you'll be thanking your past self when the -`expect` error message helps you find the root cause of an issue 2 weeks -down the line. - -### Panics in the standard library - -It is important to note that some of the APIs in the standard library *can* -panic. You should look out for these annotations in the docs. One of them is -[Vec::remove](https://doc.rust-lang.org/std/vec/struct.Vec.html#panics-6). -If you use this you should ensure that the argument is in its indexable range. - -```rust -fn remove_at_idx(a: usize, vec: &mut Vec) -> Option { - if a < idx.len() { - Some(vec.remove(a)) - } else { - None - } -} -``` - -## Handling multiple errors and helper crates: - -Handling errors from multiple libraries and APIs can become challenging as -you have to deal with a bunch of different types. They are different sizes -and contain different information. To unify the types we have to build a sum -type using an enum, in order to ensure they have the same size at compile time. - -```rust -enum Errors { - FileSystemError(..), - StringParseError(..), - NetworkError(..), -} -``` - -Some crates for making creating these unifying enums easier: - -### [thiserror](https://crates.io/crates/thiserror) - -`thiserror` provides a derive implementation which adds the Error trait for us. -As previously mentioned, to implement Error we have to implement display and -thiserrors' `#[error]` attributes provide templating for the displayed errors. - -```rust -use thiserror::Error; - -#[derive(Error, Debug)] -pub enum DataStoreError { - #[error("data store disconnected")] - Disconnect(#[from] io::Error), - #[error("the data for key `{0}` is not available")] - Redaction(String), - #[error("invalid header (expected {expected:?}, found {found:?})")] - InvalidHeader { - expected: String, - found: String, - }, - #[error("unknown data store error")] - Unknown, -} -``` - -### [anyhow](https://crates.io/crates/anyhow) - -`anyhow` provides an ergonomic and idiomatic alternative to explicitly -handling errors. It is similar to the previously mentioned error trait but -has additional features such as adding context to thrown errors. - -This is really, really, useful when you want to convey errors to an -application's users in a context-aware fashion: - -```rust -use anyhow::{bail, Result, Context}; - -fn main() -> Result<()> { - println!("Hello World!"); - func1().context("while calling func1")?; - Ok(()) -} - -fn func1() -> Result<()> { - func2().context("while calling func2") -} - -fn func2() -> Result<()> { - bail!("Hmm something went wrong ") -} -``` - -``` -Error: while calling func1 - -Caused by: - 0: while calling func2 - 1: Hmm something went wrong -``` - - -Similar to the `Error` trait, `anyhow` suffers from the fact you can't match on -`anyhow`'s result error variant. This is why it is suggested in `anyhow`'s -docs to use `anyhow` for applications and `thiserror` for libraries. - -### [eyre](https://crates.io/crates/eyre) - -Finally, `eyre` is a fork of `anyhow` and adds more backtrace information. -It's highly customisable and using [color-eyre](https://lib.rs/crates/color-eyre) we get colors in our panic messages - a little color -always brightens up the dev experience. - -``` -The application panicked (crashed). -Message: test -Location: examples\color_eyre.rs:6 - - ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ BACKTRACE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ - ⋮ 13 frames hidden ⋮ - 14: core::ops::function::FnOnce::call_once,eyre::Report>, 1, 18446744073709551615, Err> (*)(),tuple$<> > - at /rustc/7737e0b5c4103216d6fd8cf941b7ab9bdbaace7c\library\core\src\ops\function.rs:227 - ⋮ 17 frames hidden ⋮ -``` - -## [Shuttle](https://www.shuttle.rs/): Stateful Serverless for Rust - -Deploying and managing your Rust web apps can be an expensive, anxious and time consuming process. - -If you want a batteries included and ops-free experience, [try out Shuttle](https://docs.rs/shuttle-service/latest/shuttle_service/). diff --git a/www/_blog/2022-07-28-patterns-with-rust-types.mdx b/www/_blog/2022-07-28-patterns-with-rust-types.mdx deleted file mode 100644 index cdfdd10c7..000000000 --- a/www/_blog/2022-07-28-patterns-with-rust-types.mdx +++ /dev/null @@ -1,332 +0,0 @@ ---- -title: Patterns with Rust types -description: Patterns to use types for better safety and design -author: ben -tags: [rust, tutorial] -thumb: rust-type-patterns-banner.png -cover: rust-type-patterns-banner.png -date: "2022-07-28T18:00:00" ---- - -This post introduces some patterns and tricks to better utilise Rust's type system for clean and safe code. -This post is on the advanced side and in general there are no absolutes - these patterns usually need to be evaluated on a case-by-case basis to see if the cost / benefit trade-off is worth it. - -## The new type pattern - -The new type pattern provides encapsulation as well as a guarantee that the right type of value is supplied at compile time. There are several uses and benefits for the new type pattern - let's take a look at some examples. - -### Identifier Separation - -A common representation of an identifier is a number - in this case let's use the unsigned integer type `usize`. - -Let's say we have a function that receives an identifier for a **User** from a database by username. By using a unique username our API retrieves the identifier of the user: - -```rust -fn get_user_id_from_username(username: &str) -> usize -``` - -Let's say we have a similar mechanism for another entity, `Post`. - -If our application is performing operations involving posts **and** users, the logic can get in a mix: - -```rust -let user_id: usize = get_user_id_from_username(username); -let post_id: usize = get_last_post(); - -fn delete_post(post_id: usize) { - // ... -} - -delete_post(user_id); -``` - -Here `get_user_id_from_username` and `get_last_post` both return `usize`s while `delete)_post` also takes a usize. In this code we can accidentally call `delete_post` with a `user_id`, there's nothing in the type system that would stop us from doing that. - -To differentiate between these two identifiers we can use the new type pattern: - -The new type pattern boils down to creating **a new tuple struct with a single item**, in this case `usize` - -```rust -struct UserId(pub usize); -``` - -Now we can change our library definition to return a `UserId` instead of `usize` - -```rust -fn get_user_id_from_username(username: String) -> UserId { - let user_id: usize = ... - UserId(user_id) -} -``` - -Doing similar for the posts system with a `PostId`, when now compiling we get an error on when calling `get_post`. - -```rust - | -14 | get_post(x); - | ^ expected struct `PostId`, found struct `UserId` -``` - -The new-type pattern enforces type-safety at compile time without any performance overhead at runtime. - -### Re-adding functionality to our type - -After creating this new *wrapper* type, we may need to implement some of the behaviour of the type it is encapsulating to appease our compiler. For example consider a set of 'banned' users: - -```rust -let banned_users: HashSet = HashSet::new(); -``` - -The above doesn't compile because our new type `UserId` doesn't implement equality and hashing behaviour whereas `usize` did. To add these traits back we can use the inbuilt derive macro, which generates implementations for our struct based on the single and only field. - -```rust -#[derive(PartialEq, Eq, Hash)] -struct UserId(usize); -``` - -And we're good to go! - -### Contract based programming in Rust / sub-typing - -The new type pattern can also be used to constrain types to only take 'valid' values. - -In the above example we used a wrapper type to enforce *flow* of values, this method also enforces the *content* of the value. In our application we only want usernames to contain **lowercase** alphabetic characters. Wrapping over String we can do this: - -```rust -struct Username(String); -``` - -The only way to create a Username is using the `TryFrom` trait. - -```rust -impl TryFrom for Username { - type Error = String; - - fn try_from(value: String) -> Result { - if value.chars().all(|c| matches!(c, 'a'..='z')) { - Ok(Username(value)) - } else { - Err(value) - } - } -} -``` - -This implementation returns a new `Username` if *all* the characters are lowercase. Else the string is returned and can be reused in logic possibly displaying an error. - -As the string field is private a `Username` cannot be created with `Username(my_string)`. It also cannot be modified by outsiders and invalidate our contract. - -We can now use this structure as an argument to our API. - -```rust -fn create_user(db: &mut DB, username: Username) -> Result<(), CreationError> { - // ... -} -``` - -Since the username is validated to be lowercase ahead of time, the `create_user` function doesn't care about whether the username is valid inside in its own scope. - -This can lead to easier error handling. `CreationError` doesn't have to include a variant for the if the username has invalid characters. - -Although **the only safe way** to construct if through the validator `TryFrom` trait, the `Username` can be created through unsafe transmute (casting the bits of one value to the type of another without checks). This is normally fine though as with unsafe you are introducing undefined behaviour anyway. - -```rust -let string = String::new("muahahaha 👿"); -let bad_username = unsafe { std::mem::transmute::(string) }; -dbg!(bad_username); -``` - -### Wrapping vs canonical type - -Our wrapped type is great from the outside, however we are relying on logic internal to the type to validate our contract. - -If we want to we can be really drill down on the structure of our username. Here we also enforce that the username has to be between four and ten letters. - -```rust -#[rustfmt::skip] -enum Alphabet { - A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z -} - -enum Username { - FourLetters([Alphabet; 4]), - FiveLetters([Alphabet; 5]), - SixLetters([Alphabet; 6]), - SevenLetters([Alphabet; 7]), - EightLetters([Alphabet; 8]), - NineLetters([Alphabet; 9]), - TenLetters([Alphabet; 10]), -} -``` - -Even though there is no way to make an invalid username (except for unsafe) this is a little over the top 😂. In some edge cases it can be beneficial but in the example above this is clearly overkill. - -### Working with foreign traits on foreign types - -Traits are great. They can be defined on structs and enums, but you may run into some issues when implementing a foreign trait on a foreign type. - -This is by design, and here's why: - -![](/images/blog/rust-trait-rules-diagram.png) - -In our crate the compiler doesn't know when calling `MyTrait` methods on `MyStruct` whether to use the implementation defined in `crate 3` or `crate 4`! [Rust has a set of orphan rules](https://github.com/Ixrec/rust-orphan-rules) to prevent this situation from happening. - -In the situation where you'd like to implement a foreign trait on a foreign type - the 'new type' pattern can come to the rescue yet again: - -```rust -// lives in crate X -trait ToTree { - // ... -} - -fn very_useful_function(something: impl ToTree) -> () { - // .. -} - -// Our crate -struct Wrapper(pub crate_y::MyType); - -impl ToTree for Wrapper { - // ... -} - -// Yay -very_useful_function(Wrapper(foreign_value)) -``` - -One of the gotchas with this is that you have to manually implement the trait. You can't use derive macros, e.g. `#[derive(PartialEq)]` and reach through to the declaration of the wrapped type and read its declaration. You also have to make sure that you can properly implement the trait on the item. `crate_y::MyType` might hide information needed for the implementation 😕. - -Ok - enough with the new type pattern. Let's leave it for a minute and look at some other tricks when working with types in Rust. - -## Using either to unify different types - -Sometimes we have a case where we have a complicated data type - -```rust -enum PostUser { - Single { - username: UserId - }, - Group { - usernames: HashSet - } -} -``` - -We'd like a method that returns an iterator, but we're stuck since we either return a single once iterable ([std::iter::Once](https://doc.rust-lang.org/std/iter/struct.Once.html)) or an iterator over a hashset. These iterators are different types and have different properties, so Rust doesn't like when we try to build a function returning both. - -A Rust function / method can only return one type: - -```rust -impl PostUser { - fn iter(&self) -> impl Iterator + '_ { - match self { - PostUser::User { username } => std::iter::once(username), - PostUser::Group { usernames } => usernames.into_iter(), - } - } -} -``` - -The following will fail because the match arms have different types. - -```rust - | -17 | / match self { -18 | | PostUser::User { username } => std::iter::once(username), - | | ------------------------- this is found to be of type `std::iter::Once<&UserId>` -19 | | PostUser::Group { usernames } => usernames.into_iter(), - | | ^^^^^^^^^^^^^^^^^^^^^ expected struct `std::iter::Once`, found struct `std::collections::hash_set::Iter` -20 | | } - | |_________- `match` arms have incompatible types - | - = note: expected struct `std::iter::Once<&UserId>` - found struct `std::collections::hash_set::Iter<'_, UserId>` -``` - -The [`either` crate](https://crates.io/crates/either) offers a general purpose sum type [that implements many traits](https://docs.rs/either/1.7.0/either/enum.Either.html#trait-implementations). Using `either::Left` for the once iterator and `either::Right` we can build two iterators into what Rust considers as a single type. - -```rust -impl PostUser { - fn iter(&self) -> impl Iterator + '_ { - match self { - PostUser::User { username } => either::Left(std::iter::once(username)), - PostUser::Group { usernames } => either::Right(usernames.into_iter()), - } - } -} -``` - -We could have instead boxed the results and returned `Box>`. The benefit of using either is that it uses static dispatch rather than dynamic dispatch. [enum_dispatch has good performance comparison for using static dispatch over dyn](https://docs.rs/enum_dispatch/latest/enum_dispatch/#the-benchmarks) so if you are on a critical hot path, and you know all the returned types it is faster to use enums to unify types rather than dynamic trait dispatching. - -## Extension traits - -When creating a library we may add some functions for working with existing types (whether in the standard library or a different crate). - -Let's say we are writing a library on top of *serenity* which has models for discord servers (discord refers to them as guilds). - -Let's write a helper function that gets the number of channels in a [Guild](https://docs.rs/serenity/0.11.4/serenity/model/guild/struct.Guild.html). - -```rust -async fn get_number_of_channels( - guild: &serenity::model::Guild, - http: impl AsRef -) -> serenity::Result -``` - -When calling the function we **have to** pass the guild as the first argument. - -```rust -let guild: serenity::model::Guild = // ... -get_number_of_channels(&guild, client); -``` - -But from a design perspective we might prefer to use member notation instead: `guild.get_number_of_channels(client)`. - -We can't use add a direct implementation for a type defined outside our current crate. - -```rust -/ impl serenity::model::Guild { -| fn number_of_channels>(&self, http: T) -> serenity::Result { -| todo!() -| } -| } -|_^ impl for type defined outside of crate. -``` - -To define an associated method on a type outside the crate we must instead make an intermediate 'Extension' trait: - -```rust -trait GuildExt { - fn number_of_channels>(&self, http: T) -> serenity::Result; -} - -impl GuildExt for serenity::model::Guild { - fn number_of_channels>(&self, http: T) -> serenity::Result { - // ... - } -} -``` - -Using the intermediate trait the compiler can reason about when the method exists. To use the method syntax and show the compiler that the extension exists we must import the trait into our scope: - -```rust -use crate::GuildExt; -let guild: serenity::model::Guild = // ... -let number_of_channels = guild.get_number_of_channels(client); -``` - -This pattern is used in the futures crate with the [FutureExt trait](https://docs.rs/futures/0.3.21/futures/future/trait.FutureExt.html). -Here using the trait `FutureExt` provides additional methods to the existing [`Future` trait in Rust's standard library](https://doc.rust-lang.org/std/future/trait.Future.html). Aside from syntax aesthetics, it becomes much easier to find object-specific functions when using an IDE. - -You can use the [easy_ext](https://docs.rs/easy-ext/1.0.0/easy_ext/) for doing this pattern on a single type without having to write the trait / trait definition is generated for you. - -## Conclusion - -We saw how we can use various patterns like the new-type pattern and extension pattern to make our Rust code more ergonomic and take advantage of the type system and compiler to write better code. There is a great book out on [Rust design patterns](https://rust-unofficial.github.io/patterns/intro.html) which covers some of these and many more patterns in Rust. What are your favourite design patterns in Rust? Let us know and we'll cover them next time! - -## [Shuttle](https://www.shuttle.rs/): Stateful Serverless for Rust - -Deploying and managing your Rust web apps can be an expensive, anxious and time consuming process. - -If you want a batteries included and ops-free experience, [try out Shuttle](https://docs.rs/shuttle-service/latest/shuttle_service/). diff --git a/www/_blog/2022-08-04-middleware.mdx b/www/_blog/2022-08-04-middleware.mdx deleted file mode 100644 index e764d700a..000000000 --- a/www/_blog/2022-08-04-middleware.mdx +++ /dev/null @@ -1,422 +0,0 @@ ---- -title: What is server middleware -description: A look at middleware and some implementations in Rust server applications -author: ben -tags: [rust, tutorial] -thumb: middleware-banner.png -cover: middleware-banner.png -date: "2022-08-04T15:00:00" ---- - -In this post we will take a general look into what middleware is, the benefits of the pattern and then how to use middleware in a Rust server application. - -## What is middleware? - -A web server generally provides responses to requests. Very often, the protocol of choice is HTTP. A handler (sometimes called a response callback) is a function which takes a request's data and returns a response. - -Most server frameworks have a system called a 'router' which routes requests based on various parameters - usually the URL path. In HTTP routing is typically a combination of the path and the request method (GET, POST, PUT etc.). The benefit of a router is that it allows splitting per path logic up, which makes building large systems with lots of endpoints easier to manage. - -Individual path handlers are great, but sometimes you want logic which applies to a group of paths or indeed all paths. This is where **middleware** comes in. Unlike a handler, middleware is called on **every request and path** that it's registered on. Like handlers, middleware are functions. - -Middleware is very much **implementor dependent**. We will have a look at some concrete examples, but different frameworks have opted for different tradeoffs in their middleware implementation. Some middleware implementations work on an immutable state and act as a transformer on request and responses. Other frameworks treat the inputs as mutable and can freely modify / mutate the request objects. Some frameworks implement middleware that can fail or short circuit. - -### Middleware as a stack - -Middleware tends to be well-ordered. That is, a request or response passes through middleware in a well-defined order, as each layer processes the request or response and passes it onto the next layer: - -``` - requests - | - v -+----- layer_three -----+ -| +---- layer_two ----+ | -| | +-- layer_one --+ | | -| | | | | | -| | | handler | | | -| | | | | | -| | +-- layer_one --+ | | -| +---- layer_two ----+ | -+----- layer_three -----+ - | - v - responses -``` - -### Applications of middleware - -#### Authentication - -Many routes may want user information. The incoming request contain user information via cookies or http authentication. Rather than every path handler having to deal with extracting the information we can abstract this logic to a request middleware and pass it down to subsequent handlers. - -#### Logging - -Information about which paths users are going to and when can be very useful. With logging middleware we can log and store request information for later analysis. - -Similar to logging is [*server response timings*](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Server-Timing). This is a field / http header, which is standardized for holding timing information about requests. Here our middleware can log the start time of an incoming *request* and the end time on the *response*. Then the middleware can modify the outgoing response to include the timing. This header is often highlighted in developer tools, which can be useful while debugging. It can also be used in chunked / streamed responses where the header of a request might have already been sent by using [Trailer](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Trailer)s. - -#### Compression and other response optimizations - -Middleware can also amend outgoing responses and compress the output via algorithms like gzip and brotli. This removes the responsibility out of handlers and provides a convenient default for all responses. - -And it doesn't have to just be compression of responses, another use case is image resizing. Identifying mobile viewports using information on the request, outgoing responses can instead return smaller images rather than huge 4k images, in the end reducing bandwidth. - -### Structuring applications - -As mentioned above the benefits of the middleware system is that while it is possible to do this stuff individually in each handler, abstracting it moves the responsibility away from the handlers. This can make management simpler and fewer lines of code! - -```rust -fn index() { - let index_page = "..."; - return compress(index_page); -} - -fn about() { - let about_page = "..."; - return compress(about_page); -} - -fn search() { - let search_page = "..."; - return compress(search_page); -} - -Application::build() - .routes([index, about, search]) -``` - -vs - -```rust -fn index() { return "..."; } -fn about() { return "..."; } -fn search() { return "..."; } - -Application::build() - .routes([index, about, search]) - .add_middleware(CompressionMiddleware::new()) -``` - -### Separating out code - -The benefit of middleware *just* being functions is that they can be separated out to different modules or even crates. Many 3rd party services may choose to expose their service as a middleware rather than a system of complicated functions, and having to deal with users passing the correct state into them. - -```rust - .add_middleware(hot_new_server_logging_framework_start_up::Middleware::new()) -``` - -## Comparing middleware implementations in libraries - -### [Rocket](https://rocket.rs/) - -Rocket is a server framework. Rocket's middleware implementation is known as [fairings](https://rocket.rs/v0.5-rc/guide/fairings/#fairings) (yes there are many rocket related puns in the crate). - -From Rocket's fairing documentation: - -> Rocket’s fairings are a lot like middleware from other frameworks, but they bear a few key distinctions: -> -> Fairings cannot terminate or respond to an incoming request directly. -> Fairings cannot inject arbitrary, non-request data into a request. -> Fairings can prevent an application from launching. -> Fairings can inspect and modify the application's configuration. - -To make a fairing in Rocket you have to implement the fairing trait: - -```rust -struct MyCounterFaring { - get_requests: AtomicUsize, -} - -#[rocket::async_trait] -impl Fairing for MyCounterFaring { - fn info(&self) -> Info { - Info { - name: "GET Counter", - kind: Kind::Request - } - } - - async fn on_request(&self, request: &mut Request<'_>, _: &mut Data<'_>) { - if let Method::Get = request.method() { - self.get.fetch_add(1, Ordering::Relaxed); - } - } -} -``` - -Using the `.attach` method it's really simple to add a fairing to a application. - -```rust -#[launch] -fn rocket() -> _ { - rocket::build() - .attach(MyCounterFaring { - get_requests: AtomicUsize::new(0), - }) - .attach(other_fairing) -} -``` - -Rocket's fairings have several hooks. Each of them has a default implementation so can be left out (you don't have to explicitly write a method for each hook). - -#### Requests using `on_request` - -This fires when a request is received. This hook has a mutable reference to the request and so **can modify the request**. "It cannot abort or respond directly to the request; these issues are better handled via request guards or via response callbacks.". - -As an aside, Rocket has a different non-middleware implementation that can be better suited for handlers that might short circuit an error rather than running a handler afterwards. We won't go into it here but if your middleware is fallible [request guards](https://rocket.rs/v0.5-rc/guide/requests/#request-guards) might be a better option - -#### Response using `on_response` - -Similar to `on_request` this has mutable access to the response object (it also has immutable access to the request). Using this hook you can **inject headers** or amend **partial responses (aka 404)**. - -#### General server hooks - -Rocket's fairings go beyond request and responses and can act as hooks into application startup and closing: - -- Ignite (`on_ignite`). Runs before starting the server. Can validate config values, set initial state or abort. -- Liftoff (`on_liftoff`). After server has launched (started) "A liftoff callback can be a convenient hook for launching services related to the Rocket application being launched." -- Shutdown (`on_shutdown`). This hook can be used to wind down services and save state before the application closes. Runs concurrently and no requests are returned before. - -All Rocket fairings have a [info field](https://api.rocket.rs/v0.5-rc/rocket/fairing/trait.Fairing.html#tymethod.info). The kind property decides which hooks the fairing can fire. - -#### Ad hoc fairings - -Simpler middleware using functions can be added using [ad-hoc fairings](https://rocket.rs/v0.5-rc/guide/fairings/#ad-hoc-fairings). If the fairing doesn't have state / data with it, you can bypass needing to create a structure and writing a trait implementation for it and instead write a function. - -Using `AdHoc` and any of the names of the above mentioned hooks we can instead creating a function using a function (+ a string info): - -```rust -.attach(AdHoc::on_liftoff("Liftoff Printer", |_| Box::pin(async move { - println!("...annnddd we have liftoff!"); -}))) -``` - -### [Axum](https://docs.rs/axum/latest/axum/index.html) - -Similar to Rocket, Axum is a HTTP framework for web applications. [Axum middleware](https://docs.rs/axum/latest/axum/index.html#middleware) is based of [tower](https://crates.io/crates/tower) which is a separate crate which deals with lower level base for networking in Rust. Axum and tower middleware is refereed to a 'layers'. - -There are several ways to write middleware for Axum. Similar to standard fairings you can create a type that implements the [Layer trait](https://docs.rs/tower/0.4.13/tower/trait.Layer.html). The layer trait decorates / acts apon the [Service trait](https://docs.rs/tower/0.4.13/tower/trait.Service.html). - -This demo was taken from the [Tower docs](https://docs.rs/tower/0.4.13/tower/trait.Layer.html#log) **and before you get scared off we will see a much simpler way to implement middleware shortly**. - -```rust -pub struct LogLayer { - target: &'static str, -} - -impl Layer for LogLayer { - type Service = LogService; - - fn layer(&self, service: S) -> Self::Service { - LogService { - target: self.target, - service - } - } -} - -// This service implements the Log behavior -pub struct LogService { - target: &'static str, - service: S, -} - -impl Service for LogService -where - S: Service, - Request: fmt::Debug, -{ - type Response = S::Response; - type Error = S::Error; - type Future = S::Future; - - fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll> { - self.service.poll_ready(cx) - } - - fn call(&mut self, request: Request) -> Self::Future { - // Insert log statement here or other functionality - println!("request = {:?}, target = {:?}", request, self.target); - self.service.call(request) - } -} -``` - -We can register our mew layer (middleware) on a to a Axum application using `.layer` (similar to `.attach` in Rocket). - -```rust -use axum::{routing::get, Router}; - -async fn handler() {} - -let app = Router::new() - .route("/", get(handler)) - .layer(LogLayer { target: "our site" }) - // `.route_layer` will only run the middleware if a route is matched - .route_layer(TimeOutLayer) -``` - -There is also [`ServiceBuilder`](https://docs.rs/tower/0.4.13/tower/struct.ServiceBuilder.html) which is the recommended way to chain layers. They are executed in the reverse order to which they are attached (`layer_one` runs first). - -```rust -Router::new() - .route("/", get(handler)) - .layer( - ServiceBuilder::new() - .layer(layer_three) - .layer(layer_two) - .layer(layer_one) - ) -``` - -#### A simpler way - -Similar to Rocket's trait fairings and ad hoc fairings there are two ways to write middleware for Axum using [middleware::from_fn](https://docs.rs/axum/latest/axum/middleware/fn.from_fn.html). - -Using a demo from the [Axum docs](https://docs.rs/axum/latest/axum/middleware/index.html#writing-middleware). - -```rust -async fn auth(req: Request, next: Next) -> Result { - let auth_header = req.headers() - .get(http::header::AUTHORIZATION) - .and_then(|header| header.to_str().ok()); - - match auth_header { - Some(auth_header) if token_is_valid(auth_header) => { - Ok(next.run(req).await) - } - _ => Err(StatusCode::UNAUTHORIZED), - } -} -``` - -```rust -let app = Router::new() - .route("/", get(|| async { /* ... */ })) - .route_layer(middleware::from_fn(auth)); -``` - -#### Existing ready to use layers: - -As Axum is built on `tower` there are some great readily importable middleware that can be added as layers. - -One of those is that [TraceLayer](https://docs.rs/tower-http/0.3.4/tower_http/trace/index.html) that logs requests coming in and responses going out: - -``` -Mar 05 20:50:28.523 DEBUG request{method=GET path="/foo"}: tower_http::trace::on_request: started processing request -Mar 05 20:50:28.524 DEBUG request{method=GET path="/foo"}: tower_http::trace::on_response: finished processing request latency=1 ms status=200 -``` - -There are a [bunch of layers in the tower_http crate](https://docs.rs/tower-http/0.3.4/tower_http/trace/index.html?search=struct%3ALayer) that can be used instead of writing your own. - -## Building authentication using our own middleware - -Let's play around with a realistic example and build a middleware layer for our own application that manages authentication. In our route handlers we might want to know detailed information about the user that made the request. Rather than having to deal with passing around request information we can encapsulate this logic in middleware. - -We'll be using Axum for this demo. The demo is not public at the moment, look out for a future post about authentication for when the full demo will be public! - -### Cookies as user state - -Cookies can be used for maintaining user state. When a user cookie is set on the frontend it's sent with every request. We'll skip over how the cookie got there 😅 and leave it for a future tutorial. - -Either way we want to add middleware which *injects* the following the struct into current request. - -```rust -#[derive(Clone)] -struct AuthState(Option<(SessionId, Arc>)>, Database); -``` - -We have got a bit fancy here. Rather than making a database request on every request we instead save the database pool a mutable store ([OnceCell](https://docs.rs/once_cell/latest/once_cell/sync/struct.OnceCell.html)) and the session id. With all this information it means that getting user state can be lazy or not done at all. - -We will build an `auth` function which builds up this lazy `AuthState` struct with the required information by parsing the headers of a request. - -```rust -async fn auth( - mut req: Request, - next: Next, - database: Database, -) -> axum::response::Response { - // Assuming we only have one cookie - let key_pair_opt = req - .headers() - .get("Cookie") - .and_then(|value| value.to_str().ok()) - .map(|value| - value - .split_once(';') - .map(|(left, _)| left) - .unwrap_or(value) - ) - .and_then(|kv| kv.split_once('=')); - - let auth_state = if let Some((key, value)) = key_pair_opt { - if key != USER_COOKIE_NAME { - None - } else if let Ok(value) = value.parse::() { - Some(value) - } else { - None - } - } else { - None - }; - - req.extensions_mut().insert(AuthState( - auth_state - .map(|v| ( - v, - Arc::new(OnceCell::new()), - database - )), - )); - next.run(req).await -} -``` - -*this is a bit ad hoc parsing, proper parsing should account for multiple cookies etc and could be neater 😆*. - -At the end we do two **important things**. First we *extend* the request with this lazy auth state: `req.extensions_mut().insert(...)`. Secondly we run the rest of the request stack: `next.run(req).await`. - -Unlike Rocket fairings, in Axum we could return our own Response from the middleware and not run the handler by skipping `next.run(req).await`. - -### Attaching the middleware - -We first attach it to out Axum application using: - -```rust -let middleware_database = database_pool.clone(); - -Router::new() - .layer(middleware::from_fn(move |req, next| { - auth(req, next, middleware_database.clone()) - })) -``` - -Because our middleware also needs application state (in this case the database pool) we create a intermediate function which pulls that in. - -### Using the middleware - -We can now use the state injected by the middleware using a [Extension](https://docs.rs/axum/latest/axum/struct.Extension.html) parameter. - -```rust -async fn me( - Extension(current_user): Extension, -) -> Result { - if let Some(user) = current_user.get_user().await { - Ok(show_user(user)) - } else { - Err(error_page("Not logged in")); - } -} -``` - -I was actually surprised when this worked, Axum's handler parameter system is quite magic. - -## Conclusion - -In summary middleware helps you abstract common logic for paths into reusable stateful and stateless objects. Middleware might not be applicative for every scenario but when you need it, it is super useful! - -## [Shuttle](https://www.shuttle.rs/): Stateful Serverless for Rust - -Deploying and managing your Rust web apps can be an expensive, anxious and time consuming process. - -If you want a batteries included and ops-free experience, [try out Shuttle](https://docs.rs/shuttle-service/latest/shuttle_service/). diff --git a/www/_blog/2022-08-11-authentication-tutorial.mdx b/www/_blog/2022-08-11-authentication-tutorial.mdx deleted file mode 100644 index c07dc31a4..000000000 --- a/www/_blog/2022-08-11-authentication-tutorial.mdx +++ /dev/null @@ -1,701 +0,0 @@ ---- -title: Building an authentication system in Rust using session tokens -description: Building authentication into a website with Rust and SQL -author: Ben -tags: [rust, tutorial, axum, sql] -thumb: authentication-banner.png -cover: authentication-banner.png -date: "2022-08-17T15:00:00" ---- - -Most websites have some kind of user system. But implementing authentication can be a bit complex. It requires several things working together. - -Making sure the system is secure is daunting. How do we know others cannot easily log into accounts and make edits on other people's behalf? And building stateful systems is difficult. - -Today we will look at a minimal implementation in Rust. For this demo we won't be using a specific authentication library, instead writing from scratch using our own database and backend API. - -We will be walking through implementing the system including a frontend for interacting with it. We will be using Axum for routing and other handling logic. The [source code for this tutorial can be found here](https://github.com/kaleidawave/axum-shuttle-postgres-authentication-demo). We will then deploy the code on shuttle, which will handle running the server and giving us access to a Postgres server. - -To prevent this post from being an hour long, some things are skipped over (such as error handling) and so might not match up one-to-one with the tutorial. This post also assumes basic knowledge of HTML, web servers, databases and Rust. - -This isn't verified to be secure, use it at your own risk!! - -## Let's get started - -First, we will install shuttle for creating the project (and later for deployment). If you don't already have it you can install it with `cargo install cargo-shuttle`. We will first go to a new directory for our project and create a new Axum app with `cargo shuttle init --axum`. - -You should see the following in `src/lib.rs`: - -```rust -use axum::{routing::get, Router}; -use sync_wrapper::SyncWrapper; - -async fn hello_world() -> &'static str { - "Hello, world!" -} - -#[shuttle_service::main] -async fn axum() -> shuttle_service::ShuttleAxum { - let router = Router::new().route("/hello", get(hello_world)); - let sync_wrapper = SyncWrapper::new(router); - - Ok(sync_wrapper) -} -``` - -### Templates - -For generating HTML we will be using [Tera](https://tera.netlify.app/docs), so we can go ahead and add this with `cargo add tera`. We will store all our templates in a `template` directory in the project root. - -We want a general layout for our site, so we create a base layout. In our base layout, we can add specific tags that will apply to all pages such as a [Google font](https://fonts.google.com/). With this layout all the content will be injected in place of `{% block content %}{% endblock content %}`: - -```html - - - - - - - - Title - - - - - - - {% block content %}{% endblock content %} - - -``` - -And now we can create our first page that will be displayed under the `/` path - -```html - -{% extends "base.html" %} -{% block content %} -

Hello world

-{% endblock content %} -``` - -Now we have our template, we need to register it under a Tera instance. Tera has a nice [filesystem-based registration system](https://docs.rs/tera/1.16.0/tera/struct.Tera.html#method.new), but we will use the [`include_str!`](https://doc.rust-lang.org/std/macro.include_str.html) macro so that the content is in the binary. This way we don't have to deal with the complexities of a filesystem at runtime. We register both templates so that the `index` page knows about `base.html`. - -```rust -let mut tera = Tera::default(); -tera.add_raw_templates(vec![ - ("base.html", include_str!("../templates/base.html")), - ("index", include_str!("../templates/index.html")), -]) -.unwrap(); -``` - -We add it via an [Extension](https://docs.rs/axum/latest/axum/struct.Extension.html) (wrapped in `Arc` so that extension cloning does not deep clone all the templates) - -```rust -#[shuttle_service::main] -async fn axum() -> shuttle_service::ShuttleAxum { - let mut tera = Tera::default(); - tera.add_raw_templates(vec![ - ("base.html", include_str!("../templates/base.html")), - ("index", include_str!("../templates/index.html")), - ]) - .unwrap(); - - let router = Router::new() - .route("/hello", get(hello_world)) - .layer(Extension(Arc::new(tera))); - - let sync_wrapper = SyncWrapper::new(router); - Ok(sync_wrapper) -} -``` - -### Rendering views - -Now we have created our Tera instance we want it to be accessible to our get methods. To do this in Axum, we add the extension as a parameter to our function. In Axum, an [Extension](https://docs.rs/axum/latest/axum/struct.Extension.html) is a unit struct. Rather than dealing with `.0` to access fields, we use destructuring in the parameter (if you thought that syntax looks weird). - -```rust -async fn index( - Extension(templates): Extension, -) -> impl IntoResponse { - Html(templates.render("index", &Context::new()).unwrap()) -} -``` - -### Serving assets - -We can create a `public/styles.css` file - -```css -body { - font-family: 'Karla', sans-serif; - font-size: 12pt; -} -``` - -And easily create a new endpoint for it to be served from: - -```rust -async fn styles() -> impl IntoResponse { - Response::builder() - .status(http::StatusCode::OK) - .header("Content-Type", "text/css") - .body(include_str!("../public/styles.css").to_owned()) - .unwrap() -} -``` - -Here we again are using `include_str!` to not have to worry about the filesystem at runtime. [ServeDir](https://docs.rs/tower-http/latest/tower_http/services/struct.ServeDir.html) is an alternative if you have a filesystem at runtime. You can use this method for other static assets like JavaScript and favicons. - -## Running - -We will add our two new routes to the router (and remove the default "hello world" one) to get: - -```rust -let router = Router::new() - .route("/", get(index)) - .route("/styles.css", get(styles)) - .layer(Extension(Arc::new(tera))); -``` - -With our main service we can now test it locally with `cargo shuttle run`. - -![](/images/blog/authentication-demo-screenshot.png) - -Nice! - -## Adding users - -We will start with a user's table in SQL. ([this is defined in schema.sql](https://github.com/kaleidawave/axum-shuttle-postgres-authentication-demo/blob/main/schema.sql)). - -```sql -CREATE TABLE users ( - id integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, - username text NOT NULL UNIQUE, - password text NOT NULL -); -``` - -The `id` is generated by the database using a sequence. The `id` is a primary key, which we will use to reference users. It is better to use a fixed value field for identification rather than using something like the `username` field because you may add the ability to change usernames, which can leave things pointing to the wrong places. - -### Registering our database - -Before our app can use the database we have to add sqlx with some features: `cargo add sqlx -F postgres runtime-tokio-native-tls`. We will also add the `shuttle-shared-db` crate with `cargo add shuttle-shared-db -F postgres`. - -Now back in the code we add a parameter with `#[shuttle_shared_db::Postgres] pool: Database`. The `#[shuttle_shared_db::Postgres]` annotation tells shuttle to provision a Postgres database using the [infrastructure from code design](https://www.shuttle.rs/blog/2022/05/09/ifc)! - -```rust -type Database = sqlx::PgPool; - -#[shuttle_service::main] -async fn axum( - #[shuttle_shared_db::Postgres] pool: Database -) -> ShuttleAxum { - // Build tera as before - - let router = Router::new() - .route("/", get(index)) - .route("/styles.css", get(styles)) - .layer(Extension(Arc::new(tera))) - .layer(pool); - - // Wrap and return router as before -} -``` - -### Signup - -For getting users into our database, we will create a post handler. In our handler, we will parse data using multipart. [I wrote a simple parser for multipart that we will use here](https://github.com/kaleidawave/axum-shuttle-postgres-authentication-demo/blob/main/src/utils.rs#L45-L64). The below example contains some error handling that we will ignore for now. - -```rust -async fn post_signup( - Extension(database): Extension, - multipart: Multipart, -) -> impl IntoResponse { - let data = parse_multipart(multipart) - .await - .map_err(|err| error_page(&err))?; - - if let (Some(username), Some(password), Some(confirm_password)) = ( - data.get("username"), - data.get("password"), - data.get("confirm_password"), - ) { - if password != confirm_password { - return Err(error_page(&SignupError::PasswordsDoNotMatch)); - } - - let user_id = create_user(username, password, database); - - Ok(todo!()) - } else { - Err(error_page(&SignupError::MissingDetails)) - } -} -``` - -#### Creating users and storing passwords safety - -When storing passwords in a database, for security reasons we don't want them to be in the exact format as plain text. To transform them away from the plain text format we will use a [cryptographic hash function](https://en.wikipedia.org/wiki/Cryptographic_hash_function) from [pbkdf2](https://crates.io/crates/pbkdf2) (`cargo add pbkdf2`): - -```rust -fn create_user(username: &str, password: &str, database: &Database) -> Result { - let salt = SaltString::generate(&mut OsRng); - // Hash password to PHC string ($pbkdf2-sha256$...) - let hashed_password = Pbkdf2.hash_password(password.as_bytes(), &salt).unwrap().to_string(); - - // ... -} -``` - -With hashing, if someone gets the value in the password field they cannot find out the actual password value. The only thing this value allows is whether a plain text password matches this value. And with [salting](https://en.wikipedia.org/wiki/Salt_(cryptography)) different names are encoded differently. Here all these passwords were registered as *"password"*, but they have different values in the database because of salting. - -```sql -postgres=> select * from users; - id | username | password -----+----------+------------------------------------------------------------------------------------------------ - 1 | user1 | $pbkdf2-sha256$i=10000,l=32$uC5/1ngPBs176UkRjDbrJg$mPZhv4FfC6HAfdCVHW/djgOT9xHVAlbuHJ8Lqu7R0eU - 2 | user2 | $pbkdf2-sha256$i=10000,l=32$4mHGcEhTCT7SD48EouZwhg$A/L3TuK/Osq6l41EumohoZsVCknb/wiaym57Og0Oigs - 3 | user3 | $pbkdf2-sha256$i=10000,l=32$lHJfNN7oJTabvSHfukjVgA$2rlvCjQKjs94ZvANlo9se+1ChzFVu+B22im6f2J0W9w -(3 rows) -``` - -With the following simple database query and our hashed password, we can insert users. - -```rust -fn create_user(username: &str, password: &str, database: &Database) -> Result { - // ... - - const INSERT_QUERY: &str = - "INSERT INTO users (username, password) VALUES ($1, $2) RETURNING id;"; - - let fetch_one = sqlx::query_as(INSERT_QUERY) - .bind(username) - .bind(hashed_password) - .fetch_one(database) - .await; - - // ... -} -``` - -And we can handle the response and get the new user id with the following: - -```rust -fn create_user(username: &str, password: &str, database: &Database) -> Result { - // ... - - match fetch_one { - Ok((user_id,)) => Ok(user_id), - Err(sqlx::Error::Database(database)) - if database.constraint() == Some("users_username_key") => - { - return Err(SignupError::UsernameExists); - } - Err(err) => { - return Err(SignupError::InternalError); - } - } -} -``` - -Great now we have the signup handler written, let's create a way to invoke it in the UI. - -### Using HTML forms - -To invoke the endpoint with multipart we will use an HTML form. - -```html - -{% extends "base.html" %} -{% block content %} -
- - - - - - - -
-{% endblock content %} -``` - -Notice the action and method that correspond to the route we just added. Notice also the `enctype` being multipart, which matches what we are parsing in the handler. The above has a few attributes to do some client-side validation, but [in the full demo it is also handled on the server](https://github.com/kaleidawave/axum-shuttle-postgres-authentication-demo/blob/ba71a914055f312636581f5e82172b1078e7b9eb/src/authentication.rs#L124-L133). - -We create a handler for this markup in the same way as done for our index with: - -```rust -async fn get_signup( - Extension(templates): Extension, -) -> impl IntoResponse { - Html(templates.render("signup", &Context::new()).unwrap()) -} -``` - -We can add `signup` to the Tera instance and then add both the get and post handlers to the router by adding it to the chain: - -```rust -.route("/signup", get(get_signup).post(post_signup)) -``` - -### Sessions - -Once signed up, we want to save the logged-in state. We don't want the user to have to send their username and password for every request they make. - -### Cookies and session tokens - -Cookies help store the state between browser requests. When a response is sent down with [Set-Cookie](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Set-Cookie), then any subsequent requests the browser/client makes will send cookie information. We can then pull this information off of headers on requests on the server. - -Again, these need to be safe. We don't want collisions/duplicates. We want it to be hard to guess. For these reasons, we will represent it as a 128-bit unsigned integer. This has 2^128 options, so a very low chance of a collision. - -We want to generate a "session token". We want the tokens to be cryptographically secure. Given a session id, we don't want users to be able to find the next one. A simple globally incremented u128 wouldn't be secure because if I know I have *session 10* then I can send requests with *session 11* for the user who logged in after. With a cryptographically secure generator, there isn't a distinguishing pattern between subsequently generated tokens. We will use the [ChaCha](https://crates.io/crates/rand_chacha) algorithm/crate (we will add `cargo add rand_core rand_chacha`). [We can see that it does implement the crypto marker-trait confirming it is valid for cryptographic scenarios](https://docs.rs/rand_chacha/0.3.1/rand_chacha/struct.ChaCha8Rng.html#impl-CryptoRng). - -This is unlike [Pseudo-random number generators where you can predict the next *random* number given a start point and the algorithm](https://www.youtube.com/watch?v=-h_rj2-HP2E). This could be a problem if we have our token we can get the session token of the person who logged in after us really easy and thus impersonate them. - -To initialize the random generator we use [SeedableRng::from_seed](https://docs.rs/rand_core/latest/rand_core/trait.SeedableRng.html#tymethod.from_seed). The seed in this case is an initial *state* for the generator. Here we use [OsRng.next_u64()](https://docs.rs/rand_core/latest/rand_core/struct.OsRng.html) which *retrieves randomness from the operating system* rather a seed. We will be doing something similar to the creation of the Tera instance. We must wrap it in an arc and a mutex because generating new identifiers requires mutable access. We now have the following main function: - -```rust -#[shuttle_service::main] -async fn axum( - #[shuttle_shared_db::Postgres] pool: Database -) -> ShuttleAxum { - // Build tera as before - - let random = ChaCha8Rng::seed_from_u64(OsRng.next_u64()) - - let router = Router::new() - .route("/", get(index)) - .route("/styles.css", get(styles)) - .route("/signup", get(get_signup).post(post_signup)) - .layer(Extension(Arc::new(tera))) - .layer(pool) - .layer(Extension(Arc::new(Mutex::new(random)))); - - // Wrap and return router as before -} -``` - -#### Adding sessions to signup - -As well as creating a user on signup, we will create the session token for the newly signed-up user. We post it to the table with our `user_id` - -```rust -type Random = Arc>; - -pub(crate) async fn new_session( - database: &Database, - random: Random, - user_id: i32 -) -> String { - const QUERY: &str = "INSERT INTO sessions (session_token, user_id) VALUES ($1, $2);"; - - let mut u128_pool = [0u8; 16]; - random.lock().unwrap().fill_bytes(&mut u128_pool); - - // endian doesn't matter here - let session_token = u128::from_le_bytes(u128_pool); - - let _result = sqlx::query(QUERY) - .bind(&session_token.to_le_bytes().to_vec()) - .bind(user_id) - .execute(database) - .await - .unwrap(); - - session_token -} -``` - -In the full demo, we use the [new type pattern](https://www.shuttle.rs/blog/2022/07/28/patterns-with-rust-types#the-new-type-pattern) over a u128 to make this easier, but we will stick with a u128 type here. - -Now we have our token, we need to package it into a cookie value. We will do it in the simplest way possible, using `.to_string()`. We will send a response that does two things, sets this new value and returns/redirects us back to the index page. We will create a utility function for doing this: - -```rust -fn set_cookie(session_token: &str) -> impl IntoResponse { - http::Response::builder() - .status(http::StatusCode::SEE_OTHER) - .header("Location", "/") - .header("Set-Cookie", format!("session_token={}; Max-Age=999999", session_token)) - .body(http_body::Empty::new()) - .unwrap() -} -``` - -Now we can complete our signup handler by adding random as a parameter and returning our set cookie response. - -```rust -async fn post_signup( - Extension(database): Extension, - Extension(random): Extension, - multipart: Multipart, -) -> impl IntoResponse { - let data = parse_multipart(multipart) - .await - .map_err(|err| error_page(&err))?; - - if let (Some(username), Some(password), Some(confirm_password)) = ( - data.get("username"), - data.get("password"), - data.get("confirm_password"), - ) { - if password != confirm_password { - return Err(error_page(&SignupError::PasswordsDoNotMatch)); - } - - let user_id = create_user(username, password, &database); - - let session_token = new_session(database, random, user_id); - - Ok(set_cookie(&session_token)) - } else { - Err(error_page(&SignupError::MissingDetails)) - } -} -let session_token = new_session(database, random, user_id); -``` - -### Using the session token - -Great so now we have a token/identifier for a *session*. Now we can use this as a key to get information about users. - -We can pull the cookie value using the following spaghetti of iterators and options: - -```rust -let session_token = req - .headers() - .get_all("Cookie") - .iter() - .filter_map(|cookie| { - cookie - .to_str() - .ok() - .and_then(|cookie| cookie.parse::().ok()) - }) - .find_map(|cookie| { - (cookie.name() == USER_COOKIE_NAME).then(move || cookie.value().to_owned()) - }) - .and_then(|cookie_value| cookie_value.parse::().ok()); -``` - -#### Auth middleware - -[In the last post, we went into detail about middleware. You can read more about it in more detail there](https://www.shuttle.rs/blog/2022/08/04/middleware). - -In our middleware, we will get a little fancy and make the user pulling lazy. This is so that requests that don't need user data don't have to make a database trip. Rather than adding our user straight onto the request, we split things apart. We first create an `AuthState` which contains the session token, the database, and a placeholder for our user `(Option)` - -```rust -#[derive(Clone)] -pub(crate) struct AuthState(Option<(u128, Option, Database)>); - -pub(crate) async fn auth( - mut req: http::Request, - next: axum::middleware::Next, - database: Database, -) -> axum::response::Response { - let session_token = /* cookie logic from above */; - - req.extensions_mut() - .insert(AuthState(session_token.map(|v| (v, None, database)))); - - next.run(req).await -} -``` - -Then we create a method on `AuthState` which makes the database request. - -Now we have the user's token we need to get their information. We can do that using SQL joins - -```rust -impl AuthState { - pub async fn get_user(&mut self) -> Option<&User> { - let (session_token, store, database) = self.0.as_mut()?; - if store.is_none() { - const QUERY: &str = - "SELECT id, username FROM users JOIN sessions ON user_id = id WHERE session_token = $1;"; - - let user: Option<(i32, String)> = sqlx::query_as(QUERY) - .bind(&session_token.to_le_bytes().to_vec()) - .fetch_optional(&*database) - .await - .unwrap(); - - if let Some((_id, username)) = user { - *store = Some(User { username }); - } - } - store.as_ref() - } -} -``` - -Here we cache the user internally using an Option. With the caching in place if another middleware gets the user and then a different handler tries to get the user it results in one database request, not two! - -We can add the middleware to our chain using: - -```rust -#[shuttle_service::main] -async fn axum( - #[shuttle_shared_db::Postgres] pool: Database -) -> ShuttleAxum { - // tera and random creation as before - - let middleware_database = database.clone(); - - let router = Router::new() - .route("/", get(index)) - .route("/styles.css", get(styles)) - .route("/signup", get(get_signup).post(post_signup)) - .layer(axum::middleware::from_fn(move |req, next| { - auth(req, next, middleware_database.clone()) - })) - .layer(Extension(Arc::new(tera))) - .layer(pool) - .layer(Extension(Arc::new(Mutex::new(random)))); - - // Wrap and return router as before -} -``` - -#### Getting middleware and displaying our user info - -Modifying our index Tera template, we can add an "if block" to show a status if the user is logged in. - -```html - -{% extends "base.html" %} -{% block content %} -

Hello world

-{% if username %} -

Logged in: {{ username }}

-{% endif %} -{% endblock content %} -``` - -Using our middleware in requests is easy in Axum by including a reference to it in the parameters. We then add the username to the context for it to be rendered on the page. - -```rust -async fn index( - Extension(current_user): Extension, - Extension(templates): Extension, -) -> impl IntoResponse { - let mut context = Context::new(); - if let Some(user) = current_user.get_user().await { - context.insert("username", &user.username); - } - Html(templates.render("index", &context).unwrap()) -} -``` - -### Logging in and logging out - -Great we can signup and that now puts us in a session. We may want to log out and drop the session. This is very simple to do by returning a response with the cookie `Max-Age` set to 0. - -```rust -pub(crate) async fn logout_response() -> impl axum::response::IntoResponse { - Response::builder() - .status(http::StatusCode::SEE_OTHER) - .header("Location", "/") - .header("Set-Cookie", "session_token=_; Max-Age=0") - .body(Empty::new()) - .unwrap() -} -``` - -For logging in we have a very similar logic for signup with pulling multipart information of a post request. Unlike signup, we don't want to create a new user. We want to check the row with that username has a password that matches. If the credentials match then we create a new session: - -```rust -async fn post_login( - Extension(database): Extension, - multipart: Multipart, -) -> impl IntoResponse { - let data = parse_multipart(multipart) - .await - .map_err(|err| error_page(&err))?; - - if let (Some(username), Some(password)) = (data.get("username"), data.get("password")) { - const LOGIN_QUERY: &str = "SELECT id, password FROM users WHERE users.username = $1;"; - - let row: Option<(i32, String)> = sqlx::query_as(LOGIN_QUERY) - .bind(username) - .fetch_optional(database) - .await - .unwrap(); - - let (user_id, hashed_password) = if let Some(row) = row { - row - } else { - return Err(LoginError::UserDoesNotExist); - }; - - // Verify password against PHC string - let parsed_hash = PasswordHash::new(&hashed_password).unwrap(); - if let Err(_err) = Pbkdf2.verify_password(password.as_bytes(), &parsed_hash) { - return Err(LoginError::WrongPassword); - } - - let session_token = new_session(database, random, user_id); - - - Ok(set_cookie(&session_token)) - } else { - Err(error_page(&LoginError::NoData)) - } -} -``` - -Then we refer back to the [signup section](#using-html-forms) and replicate the same HTML form and handler that renders the Tera template as seen before but for a login screen. At the end of that we can add two new routes with three handlers completing the demo: - -```rust -#[shuttle_service::main] -async fn axum( - #[shuttle_shared_db::Postgres] pool: Database -) -> ShuttleAxum { - // tera, middleware and random creation as before - - let router = Router::new() - // ... - .route("/logout", post(logout_response)) - .route("/login", get(get_login).post(post_login)) - // ... - - // Wrap and return router as before -} -``` - -## Deployment - -This is great, we now have a site with signup and login functionality. But we have no users, our friends can't log in on our localhost. We want it live on the interwebs. Luckily we are using shuttle, so it is as simple as: - -Create a project, this will start an isolated deployer container for you under the hood: -``` -cargo shuttle project new -``` - -Finally, to deploy your app, all you need to do is: -``` -cargo shuttle deploy -``` - -Because of our `#[shuttle_service::main]` annotation and out-the-box Axum support our deployment doesn't need any prior config, it is instantly live! - -Now you can go ahead with these concepts and add functionality for listing and deleting users. [The full demo implements these if you are looking for clues](https://github.com/kaleidawave/axum-shuttle-postgres-authentication-demo). - -## Thoughts building the tutorial and other ideas on where to take it - -This demo includes the minimum required for authentication. Hopefully, the concepts and snippets are useful for building it into an existing site or for starting a site that needs authentication. If you were to continue, it would be as simple as more fields onto the user object or building relations with the id field on the user's table. I will leave it out with some of my thoughts and opinions while building the site as well as things you could try extending it with. - -For templating Tera is great. I like how I separate the markup into external files rather than bundling it into `src/lib.rs`. Its API is easy to use and is well documented. However, it is quite a simple system. I had a few errors where I would rename or remove templates and because the template picker for rendering uses a map it can panic at runtime if the template does not exist. It would be nice if the system allowed checking that templates exist at compile time. The data sending works on serde serialization, which is a little bit more computation overhead than I would like. It also does not support streaming. With streaming, we could send a chunk of HTML that doesn't depend on database values first, and then we can add more content when the database transaction has gone through. If it supported streaming we could avoid the all-or-nothing pages with white page pauses and start connections to services like Google Fonts earlier. Let me know what your favorite templating engine is for Rust and whether it supports those features! - -For working with the database, sqlx has typed macros. I didn't use them here but for more complex queries you might prefer the type-checking behavior. Maybe 16 bytes for storing session tokens is a bit overkill. You also might want to try sharding that table if you have a lot of sessions or using a key-value store (such as Redis) might be simpler. We also didn't implement cleaning up the sessions table, if you were storing sessions using Redis you could use the [EXPIRE command](https://redis.io/commands/expire/) to automatically remove old keys. - -This blog post is powered by shuttle! The serverless platform built for Rust. - -## [Shuttle](https://www.shuttle.rs/): Stateful Serverless for Rust - -Deploying and managing your Rust web apps can be an expensive, anxious and time consuming process. - -If you want a batteries included and ops-free experience, [try out shuttle](https://docs.rs/shuttle-service/latest/shuttle_service/). - -
diff --git a/www/_blog/2022-09-14-serentity-discord-bot.mdx b/www/_blog/2022-09-14-serentity-discord-bot.mdx deleted file mode 100644 index 636d3cce2..000000000 --- a/www/_blog/2022-09-14-serentity-discord-bot.mdx +++ /dev/null @@ -1,488 +0,0 @@ ---- -title: Building a Discord bot in Rust -description: A tutorial on building and deploying an interactive bot in Rust with Serenity & shuttle -author: ben -tags: [rust, tutorial] -thumb: discord-bot-thumbnail.png -cover: discord-bot-thumbnail.png -date: "2022-09-14T15:00:00" ---- - -In this post, we will look at a simple way to add custom functionality to a Discord server using a bot written in Rust. We will first register a bot with Discord, then go about how to create a Serenity application that will later run on shuttle. Finally, we will make the bot do something useful, writing some Rust code to get information from an external service. - -The full code can be found in [this repository](https://github.com/kaleidawave/discord-weather-bot). - -### Registering our bot - -Before we start making our bot, we need to register it for Discord. We do that by going to [https://discord.com/developers/applications](https://discord.com/developers/applications) and creating a new application. - -![](/images/blog/discord-bot-screenshots/application-registration.png) - -The application process is also used for adding functionality to Discord but we will be only using the bot offering. Fill in the basic details and you should get to the following screen: - -![](/images/blog/discord-bot-screenshots/application_id.png) - -You want to copy the Application ID and have it handy, because we will use it to add our bot to a test server. - -Next, we want to create a bot. You can set its public username here: - -![](/images/blog/discord-bot-screenshots/bot-name.png) - -You want to click the reset token and copy this value (we will use it in a later step). This value represents the username and password as a single value that Discord uses to authenticate that our server is the one controlling the bot. You want to keep this value secret. - -You also want to tick the `MESSAGE CONTENT INTENT` setting so it can read the commands input. - -To add the bot to the server we will test on, we can use the following URL (replace `*application_id*` in the URL with the ID you copied beforehand): - -```tsx -https://discord.com/oauth2/authorize?client_id=*application_id*&scope=bot&permissions=8 -``` - -Here, we create it with `permissions=8` so that it can do everything on the server. If you are adding to another server, select only the permissions it needs. - -We now have a bot on our server: - -![](/images/blog/discord-bot-screenshots/bot-is-offline.png) - -Oh, they’re offline 😢 - -## Getting a bot online - -At this moment, our bot is not running because there is no code. We will have to write it and run it before we can start interacting with it. - -### [Serenity](https://docs.rs/serenity/latest/serenity/index.html) - -Serenity is a library for writing Discord bots (and communicating with the Discord API). We can create a new Serenity project which is readily deployable on shuttle with: `cargo shuttle init --serenity` - -If you don’t have shuttle yet, you can install it with `cargo install cargo-shuttle`. Afterwards, run the following in an empty directory: - -``` -cargo shuttle init --serenity -``` - -After running it you, should see the following generated in `src/lib.rs`: - -```rust -use anyhow::anyhow; -use serenity::async_trait; -use serenity::model::channel::Message; -use serenity::model::gateway::Ready; -use serenity::prelude::*; -use shuttle_secrets::SecretStore; -use tracing::{error, info}; - -struct Bot; - -#[async_trait] -impl EventHandler for Bot { - async fn message(&self, ctx: Context, msg: Message) { - if msg.content == "!hello" { - if let Err(e) = msg.channel_id.say(&ctx.http, "world!").await { - error!("Error sending message: {:?}", e); - } - } - } - - async fn ready(&self, _: Context, ready: Ready) { - info!("{} is connected!", ready.user.name); - } -} - -#[shuttle_service::main] -async fn serenity( - #[shuttle_secrets::Secrets] secret_store: SecretStore, -) -> shuttle_service::ShuttleSerenity { - // Get the discord token set in `Secrets.toml` - let token = if let Some(token) = secret_store.get("DISCORD_TOKEN") { - token - } else { - return Err(anyhow!("'DISCORD_TOKEN' was not found").into()); - }; - - // Set gateway intents, which decides what events the bot will be notified about - let intents = GatewayIntents::GUILD_MESSAGES | GatewayIntents::MESSAGE_CONTENT; - - let client = Client::builder(&token, intents) - .event_handler(Bot) - .await - .expect("Err creating client"); - - Ok(client) -} -``` - -### Building an interaction for our bot - -We want to call our bot when chatting in a text channel. Discord enables this with [slash commands](https://discord.com/blog/slash-commands-are-here). - -Slash commands can be server-specific (servers are named as `guilds` in Discords API documentation) or application specific (across all servers the bot is in). For testing, we will only enable it on a single guild/server. This is because the application-wide commands can take an hour to fully register whereas the guild/server specific ones are instant, so we can test the new commands immediately. - -You can copy the guild ID by right-clicking here on the server name and click `copy ID` ([you will need developer mode enabled to do this](https://www.howtogeek.com/714348/how-to-enable-or-disable-developer-mode-on-discord/)): - -![](/images/blog/discord-bot-screenshots/guild-id.png) - -Now that we have the information for setup, we can start writing our bot and its commands. - -We will first get rid of the `async fn message` hook as we won’t be using it in this example. In the `ready` hook we will call `set_application_commands` with a `GuildId` to register a command with Discord. Here we register a `hello` command with a description and no parameters (Discord refers to these as options). - -```rust -#[async_trait] -impl EventHandler for Bot { - async fn ready(&self, ctx: Context, ready: Ready) { - info!("{} is connected!", ready.user.name); - - let guild_id = GuildId(*your guild id*); - - let commands = GuildId::set_application_commands(&guild_id, &ctx.http, |commands| { - commands.create_application_command(|command| { command.name("hello").description("Say hello") }) - }).await.unwrap(); - - info!("{:#?}", commands); - } -} -``` - -> Serenity has a bit of a different way of registering commands using a callback. If you are working on a larger command application, [poise](https://docs.rs/poise/latest/poise/) (which builds on Serenity) might be better suited. -> - -With our command registered, we will now add a hook for when these commands are called using `interaction_create`. - -```rust -#[async_trait] -impl EventHandler for Bot { - async fn ready(&self, ctx: Context, ready: Ready) { - // ... - } - - async fn interaction_create(&self, ctx: Context, interaction: Interaction) { - if let Interaction::ApplicationCommand(command) = interaction { - let response_content = match command.data.name.as_str() { - "hello" => "hello".to_owned(), - command => unreachable!("Unknown command: {}", command), - }; - - let create_interaction_response = - command.create_interaction_response(&ctx.http, |response| { - response - .kind(InteractionResponseType::ChannelMessageWithSource) - .interaction_response_data(|message| message.content(response_content)) - }); - - if let Err(why) = create_interaction_response.await { - eprintln!("Cannot respond to slash command: {}", why); - } - } - } -} -``` - -### Trying it out - -Now with the code written we can test it locally. Before we do that we have to authenticate the bot with Discord. We do this with the value we got from "Reset Token" on the bot screen in one of the previous steps. To register a secret with shuttle we create a `Secrets.toml` file with a key value pair. This pair is read by the `secret_store.get("DISCORD_TOKEN")` call in the `ready` hook: - -``` -# Secrets.toml -DISCORD_TOKEN="*your discord token*" -DISCORD_GUILD_ID="*the guild we are testing on*" -``` - -`cargo shuttle run` - -We should see that our bot now displays as online: - -![](/images/blog/discord-bot-screenshots/bot-is-online.png) - -When typing, we should see our command come up with its description: - -![](/images/blog/discord-bot-screenshots/command-description.png) - -Our bot should respond with "hello" to our command: - -![](/images/blog/discord-bot-screenshots/command-result.png) - -Wow! Let’s make our bot do something a little more useful. - -### Making the bot do something - -[There is plenty of free APIs](https://github.com/public-apis/public-apis) that can be used for getting information on a variety of topics. - -For this demo, we are going to build a bot that gives a forecast for a location. I used the [AccuWeather API](https://developer.accuweather.com/) for this demo. If you are following this tutorial 1:1 you can go and register an application to get an access key. If you are using a different API this is still the sort of process you would follow. - -To get a forecast using the API requires two requests: - -1. Get a location ID for a named location -2. Get the forecast at the location ID - -The API requires making network requests and it returns a JSON response. We can make the requests with `cargo add reqwest -F json` and deserialize the results to structures using serde, with `cargo add serde`. We will then have a function that chains the two requests together and deserializes the forecast to a readable result. - -> You can skip some of the boilerplate by using [direct access on untyped values](https://docs.rs/serde_json/latest/serde_json/#operating-on-untyped-json-values). But we will opt for the better strongly typed structured approach. - -Here we type some of the structures returned by the API and add `#[derive(Deserialize)]` so they can be decoded from JSON. All the keys are in *`PascalCase`* so we use the `#[serde(rename_all = "PascalCase")]` helper attribute to stay aligned with Rust standards. Some are completely different from the Rust field name so we use `#[serde(alias = ...)]` on the field to set its matching JSON representation. - -```rust -// In weather.rs -use serde::Deserialize; - -#[derive(Deserialize, Debug)] -#[serde(rename_all = "PascalCase")] -pub struct Location { - key: String, - localized_name: String, - country: Country, -} - -impl Display for Location { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}, {}", self.localized_name, self.country.id) - } -} - -#[derive(Deserialize, Debug)] -pub struct Country { - #[serde(alias = "ID")] - pub id: String, -} - -#[derive(Deserialize, Debug)] -#[serde(rename_all = "PascalCase")] -pub struct Forecast { - pub headline: Headline, -} - -#[derive(Deserialize, Debug)] -pub struct Headline { - #[serde(alias = "Text")] - pub overview: String, -} -``` - -> The above skips [a lot of the fields returned by the API](https://developer.accuweather.com/accuweather-forecast-api/apis/get/forecasts/v1/daily/1day/%7BlocationKey%7D), only opting for the ones we will use in this demo. If you wanted to type all the fields you could try the new [type from JSON feature in rust-analyzer](https://rust-analyzer.github.io/thisweek/2022/08/15/changelog-142.html#new-features) to avoid having to write as much. - -Our location request call also fails if the search we put in returns no places. We will create an intermediate type that represents this case and implements `std::error::Error`: - -```rust -// Again in weather.rs -use std::fmt::Display; - -#[derive(Debug)] -pub struct CouldNotFindLocation { - place: String, -} - -impl Display for CouldNotFindLocation { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "Could not find location '{}'", self.place) - } -} - -impl std::error::Error for CouldNotFindLocation {} -``` - -Now with all the types written, we create a new `async` function that, given a place and a client, will return the forecast along with the location: - -```rust -// Again in weather.rs -pub async fn get_forecast( - place: &str, - api_key: &str, - client: &Client, -) -> Result<(Location, Forecast), Box> { - // Endpoints we will use - const LOCATION_REQUEST: &str = "http://dataservice.accuweather.com/locations/v1/cities/search"; - const DAY_REQUEST: &str = "http://dataservice.accuweather.com/forecasts/v1/daily/1day/"; - - // The URL to call combined with our API_KEY and the place (via the q search parameter) - let url = format!("{}?apikey={}&q={}", LOCATION_REQUEST, api_key, place); - // Make the request we will call - let request = client.get(url).build().unwrap(); - // Execute the request and await a JSON result that will be converted to a - // vector of locations - let resp = client - .execute(request) - .await? - .json::>() - .await?; - - // Get the first location. If empty respond with the above declared - // `CouldNotFindLocation` error type - let first_location = resp - .into_iter() - .next() - .ok_or_else(|| CouldNotFindLocation { - place: place.to_owned(), - })?; - - // Now have the location combine the key/identifier with the URL - let url = format!("{}{}?apikey={}", DAY_REQUEST, first_location.key, api_key); - - let request = client.get(url).build().unwrap(); - let forecast = client - .execute(request) - .await? - .json::() - .await?; - - // Combine the location with the foreact - Ok((first_location, forecast)) -} -``` - -Now we have a function to get the weather, **given a `reqwest` client and a place**, we can wire that into the bots logic. - -### Setting up the reqwest client - -Our `get_forecast` requires a `reqwest` Client and the weather API key. We will add some fields to our bot for holding this data and initialize this in the `shuttle_service::main` function. Using the secrets feature we can get our weather API key: - -```rust -// In lib.rs -struct Bot { - weather_api_key: String, - client: reqwest::Client, - discord_guild_id: GuildId, -} - -#[shuttle_service::main] -async fn serenity(#[shuttle_secrets::Secrets] secret_store: SecretStore) -> shuttle_service::ShuttleSerenity { - // Get the discord token set in `Secrets.toml` - let token = secret_store - .get("DISCORD_TOKEN") - .context("'DISCORD_TOKEN' was not found")?; - - let weather_api_key = secret_store - .get("WEATHER_API_KEY") - .context("'WEATHER_API_KEY' was not found")?; - - let discord_guild_id = secret_store - .get("DISCORD_GUILD_ID") - .context("'DISCORD_GUILD_ID' was not found")?; - - // Set gateway intents, which decides what events the bot will be notified about - let intents = GatewayIntents::GUILD_MESSAGES | GatewayIntents::MESSAGE_CONTENT; - - let client = Client::builder(&token, intents) - .event_handler(Bot { - weather_api_key, - client: reqwest::Client::new(), - discord_guild_id: GuildId(discord_guild_id.parse().unwrap()) - }) - .await - .expect("Err creating client"); - - Ok(client) -} -``` - -### Registering a /weather command - -We will add our new command with a place option/parameter. Back in the `ready` hook, we can add an additional command alongside the existing `hello` command: - -```rust -let commands = GuildId::set_application_commands(&guild_id, &ctx.http, |commands| { - commands - .create_application_command(|command| { command.name("hello").description("Say hello") }) - .create_application_command(|command| { - command - .name("weather") - .description("Display the weather") - .create_option(|option| { - option - .name("place") - .description("City to lookup forecast") - .kind(CommandOptionType::String) - .required(true) - }) - }) -}).await.unwrap(); -``` - -Discord allows us to set the expected type and whether it is required. Here, the place needs to be a string and is required. - -Now in the interaction handler, we can add a new branch to the match tree. We pull out the option/argument corresponding to `place` and extract its value. Because of the restrictions made when setting the option we can assume that it is well-formed (unless Discord sends a bad request) and thus the unwraps here. After we have the arguments of the command we call the `get_forecast` function and format the results into a string to return. - -```rust -"weather" => { - let argument = command - .data - .options - .iter() - .find(|opt| opt.name == "place") - .cloned(); - - let value = argument.unwrap().value.unwrap(); - let place = value.as_str().unwrap(); - let result = weather::get_forecast(place).await; - - match result { - Ok((location, forecast)) => format!( - "Forecast: {} in {}", - forecast.headline.overview, location - ), - Err(err) => { - format!("Err: {}", err) - } - } -} -``` - -### Running - -Now, we have these additional secrets we are using and we will add them to the `Secrets.toml` file: - -```toml -# In Secrets.toml -# Existing secrets: -DISCORD_TOKEN="***" -DISCORD_GUILD_ID="***" -# New secret -WEATHER_API_KEY="***" -``` - -With the secrets added, we can run the server: - -`cargo shuttle run` - -While typing, we should see our command come up with the options/parameters: - -![](/images/blog/discord-bot-screenshots/weather-input.png) - -Entering “Paris” as the place we get a result with a forecast: - -![](/images/blog/discord-bot-screenshots/weather-forecast.png) - -And entering a location that isn’t registered returns an error, thanks to the error handling we added to the `get_forecast` function: - -![](/images/blog/discord-bot-screenshots/weather-error.png) - -### Deploying on shuttle - -With all of that setup, it is really easy to get your bot hosted and running without having to run your PC 24/7. - -Create a project, this will start an isolated deployer container for you under the hood: -```bash -cargo shuttle project new -``` -Finally, to deploy your app, all you need to do is: -```bash -cargo shuttle deploy -``` - -And you are good to go. Easy-pease, right? - -You could now take this idea even further: - -- Use a different API, to create a bot that can return [funny facts](https://asli-fun-fact-api.herokuapp.com/) or return [new spaceflights](https://spaceflightnewsapi.net/) -- Maybe you could use one of shuttle's provided databases to remember certain information about a user -- Expand on the weather forecast idea by adding more advanced options and follow-ups to command options -- Use the [localization information](https://discord.com/developers/docs/interactions/application-commands#localization) to return information in other languages - ---- - -This blog post is powered by shuttle! If you have any questions, or want to provide feedback, join our [Discord server](https://discord.gg/shuttle)! - -## [Shuttle](https://www.shuttle.rs/): The Rust-native, open source, cloud development platform. - -Deploying and managing your Rust web apps can be an expensive, anxious and time consuming process. - -If you want a batteries included and ops-free experience, [try out Shuttle](https://github.com/shuttle-hq/shuttle). - -
diff --git a/www/app-env.d.ts b/www/app-env.d.ts deleted file mode 100644 index e69de29bb..000000000 diff --git a/www/components/AnnouncementBar/index.tsx b/www/components/AnnouncementBar/index.tsx deleted file mode 100644 index 6a4912eb7..000000000 --- a/www/components/AnnouncementBar/index.tsx +++ /dev/null @@ -1,41 +0,0 @@ -import { faTimes } from "@fortawesome/free-solid-svg-icons"; -import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; -import React, { useState } from "react"; -import { createStateContext } from "react-use"; -import ExternalLink from "../ExternalLink"; -import styles from "./styles.module.css"; - -export const [useAnnouncementBarIsClosed, AnnouncementBarIsClosedProvider] = - createStateContext(false); - -export default function AnnouncementBar() { - const [isClosed, setClosed] = useAnnouncementBarIsClosed(); - - if (isClosed) { - return null; - } - - return ( -
-

- ⭐️ If you like Shuttle,  - - give it a star on GitHub - - ! -

- - -
- ); -} diff --git a/www/components/AnnouncementBar/styles.module.css b/www/components/AnnouncementBar/styles.module.css deleted file mode 100644 index bad0ed0ba..000000000 --- a/www/components/AnnouncementBar/styles.module.css +++ /dev/null @@ -1,46 +0,0 @@ -.announcement { - position: relative; - width: 100%; - height: 2rem; - background-color: #7777dd; - color: white; -} - -.announcement__close { - position: absolute; - display: flex; - right: 0; - top: 0; - height: 100%; - width: 1rem; - margin: 0 0.5rem; - padding: 0; - justify-content: center; - align-items: center; - border: none; - outline: none; - background: none; - color: inherit; - font-size: 2rem; - cursor: pointer; -} - -.announcement__content { - display: flex; - height: 100%; - width: 100%; - margin: 0; - align-items: center; - justify-content: center; - font-size: 14px; - font-weight: bold; -} - -.announcement__link { - color: inherit; - text-decoration: underline; -} - -.announcement__link:hover { - color: inherit; -} diff --git a/www/components/ApiKeyModal.tsx b/www/components/ApiKeyModal.tsx deleted file mode 100644 index 9bd21a14e..000000000 --- a/www/components/ApiKeyModal.tsx +++ /dev/null @@ -1,139 +0,0 @@ -import { Fragment, useState, createContext, useContext } from "react"; -import { Dialog, Transition } from "@headlessui/react"; -import { XIcon } from "@heroicons/react/outline"; -import { createStateContext } from "react-use"; -import { useUser } from "@auth0/nextjs-auth0"; -import Code from "./Code"; -import { DISCORD_URL } from "../lib/constants"; -import ExternalLink from "./ExternalLink"; - -export const [useApiKeyModalState, ApiKeyModalStateProvider] = - createStateContext(false); - -export default function ApiKeyModal() { - const [open, setOpen] = useApiKeyModalState(); - const { user, error, isLoading } = useUser(); - - const api_key = user?.api_key as string | undefined; - - return ( - - -
- - - - - {/* This element is to trick the browser into centering the modal contents. */} - - -
-
- -
-
- {user && ( - <> - {api_key && ( -
-
- - Api key - -
-

- copy/paste the API key below to the "cargo shuttle - login" dialog: -

- - -

- alternatively, you can execute the command below: -

- -
-
-
- )} - {!api_key && ( -
-
- - Api key not found! - -
-

- {"This shouldn't happen. Please contact us on "} - - Discord - - {" to resolve the issue"} -

-
-
-
- )} - - )} -
- -
- -
-
-
-
-
-
- ); -} diff --git a/www/components/Code.tsx b/www/components/Code.tsx deleted file mode 100644 index bd7e587ce..000000000 --- a/www/components/Code.tsx +++ /dev/null @@ -1,56 +0,0 @@ -import { faClipboard } from "@fortawesome/free-regular-svg-icons"; -import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; -import ReactTooltip from "react-tooltip"; -import NoSsr from "./NoSsr"; -import { gtagEvent } from "../lib/gtag"; -type CodeProps = { - readonly id: string; - readonly code: string; -}; - -const copyToClipboard = (code: string, id: string) => { - gtagEvent({ - action: "copy_install_script", - category: "Code", - label: "Copied Install Script", - value: id, - }); - - navigator.clipboard.writeText(code); -}; - -export default function Code({ code, id }: CodeProps) { - return ( -
-
-        
-          $ 
-          {code}
-        
-        
-      
- - copyToClipboard(code, id)} - > - Copied to clipboard! - - -
- ); -} diff --git a/www/components/CodeBlock.tsx b/www/components/CodeBlock.tsx deleted file mode 100644 index 569f44b06..000000000 --- a/www/components/CodeBlock.tsx +++ /dev/null @@ -1,58 +0,0 @@ -import { PrismLight as SyntaxHighlighter } from "react-syntax-highlighter"; -import rust from "react-syntax-highlighter/dist/cjs/languages/prism/rust"; -import oneDark from "react-syntax-highlighter/dist/cjs/styles/prism/one-dark"; -import { useWindowSize } from "react-use"; -import Copy from "./Copy"; -import HeightMagic from "./HeightMagic"; - -SyntaxHighlighter.registerLanguage("rust", rust); - -cargo.displayName = "cargo"; -cargo.aliases = []; - -function cargo(Prism: any) { - Prism.languages.cargo = { - builtin: /\b(?:Packaging|Archiving|Compiling|Finished)\b/, - }; -} - -SyntaxHighlighter.registerLanguage("cargo", cargo); - -interface Props { - readonly language: string; - readonly code: string; - readonly showLineNumbers?: boolean; - readonly name: string; -} - -export default function CodeBlock({ - code, - language, - showLineNumbers, - name, -}: Props) { - useWindowSize(); - - return ( -
- - - - - {code} - - -
- ); -} diff --git a/www/components/CodeSnippets.tsx b/www/components/CodeSnippets.tsx deleted file mode 100644 index 7f948388c..000000000 --- a/www/components/CodeSnippets.tsx +++ /dev/null @@ -1,210 +0,0 @@ -import classnames from "classnames"; -import { useState } from "react"; -import CodeBlock from "./CodeBlock"; - -export default function CodeSnippets() { - const [activeTab, setActiveTab] = useState(1); - - return ( -
-
-
-

- How it works -

-

- Shuttle is built for Rust. -

- -

- A simple cargo command packages up your application, ships it to the - shuttle build cluster where it's incrementally compiled and - automatically served on a unique subdomain. -

- -

- Shuttle uses simple but powerful annotations to understand your - dependencies. Infrastructure dependencies like databases or - key-value stores are spun up for you and everything is automatically - wired together from the get-go. -

-

- It feels a little magical. -

-
-
-
-
- - - -
-
- -
-
- -
-
-
- ); -} - -const CARGO_DEPLOYS = ` -$ cargo shuttle deploy - Packaging url-shortener v0.1.0 (/private/shuttle/examples/url-shortener) - Archiving Cargo.toml - Archiving Cargo.toml.orig - Archiving README.md - Archiving Shuttle.toml - Archiving migrations/20220324143837_urls.sql - Archiving src/lib.rs - Compiling tracing-attributes v0.1.20 - Compiling tokio-util v0.6.9 - Compiling multer v2.0.2 - Compiling hyper v0.14.18 - Compiling rocket_http v0.5.0-rc.1 - Compiling rocket_codegen v0.5.0-rc.1 - Compiling rocket v0.5.0-rc.1 - Compiling shuttle-service v0.2.5 - Compiling url-shortener v0.1.0 (/opt/unveil/crates/s-2) - Finished dev [unoptimized + debuginfo] target(s) in 1m 01s - - Project: url-shortener - Deployment Id: 3d08ac34-ad63-41c1-836b-99afdc90af9f - Deployment Status: DEPLOYED - Host: url-shortener.shuttleapp.rs - Created At: 2022-04-01 08:32:34.412602556 UTC - Database URI: postgres://***:***@pg.shuttle.rs/db-url-shortener - -❯ -`.trim(); - -const USING_SQLX = ` -use rocket::{get, routes, State}; -use sqlx::PgPool; - -struct MyState(PgPool); - -#[get("/hello")] -fn hello(state: &State) -> &'static str { - // Do things with \`state.0\`... - "Hello, Postgres!" -} - -#[shuttle_service::main] -async fn rocket( - #[shared::Postgres] pool: PgPool -) -> shuttle_service::ShuttleRocket { - let state = MyState(pool); - - Ok( - rocket::build() - .manage(state) - .mount("/", routes![hello]) - ) -} - -`.trim(); - -const HELLO_CLOUD = ` -use rocket::{get, routes}; - -#[get("/hello")] -fn hello() -> &'static str { - "Hello, world!" -} - -#[shuttle_service::main] -async fn init() -> shuttle_service::ShuttleRocket { - Ok( - rocket::build() - .mount("/", routes![hello]) - ) -} -`.trim(); - -const USING_AXUM = ` -use axum::{routing::get, Router}; -use sync_wrapper::SyncWrapper; - -async fn hello_world() -> &'static str { - "Hello, world!" -} - -#[shuttle_service::main] -async fn axum() -> shuttle_service::ShuttleAxum { - let router = Router::new() - .route("/hello", get(hello_world)); - let sync_wrapper = SyncWrapper::new(router); - - Ok(sync_wrapper) -} -`.trim(); - -const tabs = [ - { - name: "Cargo Deploys", - code: CARGO_DEPLOYS, - showLineNumbers: false, - language: "cargo", - }, - { - name: "Hello Cloud", - code: HELLO_CLOUD, - showLineNumbers: true, - language: "rust", - }, - { - name: "Using Sqlx", - code: USING_SQLX, - showLineNumbers: true, - language: "rust", - }, - { - name: "Using Axum", - code: USING_AXUM, - showLineNumbers: true, - language: "rust", - }, -]; diff --git a/www/components/Copy.tsx b/www/components/Copy.tsx deleted file mode 100644 index aca68c434..000000000 --- a/www/components/Copy.tsx +++ /dev/null @@ -1,54 +0,0 @@ -import { ClipboardCheckIcon, ClipboardIcon } from "@heroicons/react/outline"; -import { useEffect, useState } from "react"; -import { useCopyToClipboard, useWindowSize } from "react-use"; -import { gtagEvent } from "../lib/gtag"; - -interface Props { - readonly code: string; - readonly name?: string; -} - -export default function Copy({ code, name }: Props) { - const [copyToClipboardState, copyToClipboard] = useCopyToClipboard(); - const [copied, setCopied] = useState(false); - - useEffect(() => { - let timeout = setTimeout(() => { - setCopied(false); - }, 1500); - - return () => void clearTimeout(timeout); - }, [copied]); - - return ( - - ); -} diff --git a/www/components/Examples.tsx b/www/components/Examples.tsx deleted file mode 100644 index 575ec570f..000000000 --- a/www/components/Examples.tsx +++ /dev/null @@ -1,82 +0,0 @@ -import ExternalLink from "./ExternalLink"; - -interface Card { - readonly title: string; - readonly description: string; - readonly link: string; - readonly icon: string; -} - -const cards: Card[] = [ - { - title: "Persist with Postgres", - description: - "Build any web service with a fully managed database using Rocket and sqlx", - link: "https://github.com/shuttle-hq/shuttle/tree/main/examples/rocket/postgres", - icon: "/images/icon1.svg", - }, - { - title: "Url Shortener", - description: - "A URL shortener that you can use from your terminal - built with shuttle, rocket and postgres/sqlx.", - link: "https://github.com/shuttle-hq/shuttle/tree/main/examples/rocket/url-shortener", - icon: "/images/icon2.svg", - }, - { - title: "JWT authentication", - description: - "Guard endpoints using self-issued JWT tokens while keeping public endpoint open", - link: "https://github.com/shuttle-hq/shuttle/tree/main/examples/rocket/authentication", - icon: "/images/icon3.svg", - }, -]; - -export default function Examples() { - return ( -
-
-
-

- From code to cloud in a minute -

-

- Take your code to full-featured cloud infrastructure in under a - minute. Don't take our word for it, see it for yourself. -

-
-
- {cards.map((card, index) => ( - -
- -
-
-
-
-

- {card.title} -

-

- {card.description} -

-
-
-
-
- ))} -
-
-
- ); -} diff --git a/www/components/ExternalLink.tsx b/www/components/ExternalLink.tsx deleted file mode 100644 index bc8c92203..000000000 --- a/www/components/ExternalLink.tsx +++ /dev/null @@ -1,16 +0,0 @@ -export default function ExternalLink({ - ref, - href, - target, - rel, - ...props -}: JSX.IntrinsicElements["a"]): JSX.Element { - return ( - - ); -} diff --git a/www/components/Features.tsx b/www/components/Features.tsx deleted file mode 100644 index 7a8b24d67..000000000 --- a/www/components/Features.tsx +++ /dev/null @@ -1,73 +0,0 @@ -import { CheckIcon } from "@heroicons/react/outline"; - -const features = [ - { - name: "Infrastructure from Rust", - description: - "Traits and annotations define all your infrastructure from your service code", - }, - { - name: "Serverless", - description: - "All the infrastructure and wiring you need is handled by the deployer runtime. No VMs or containers", - }, - { - name: "Databases", - description: - "Wiring up a service to a persistent database is as easy as deriving a trait", - }, - { - name: "Entirely open-source", - description: - "A completely free and open-source project, with no vendor lock-in or hidden code", - }, - { - name: "Self-hosting friendly", - description: - "Deploy the entire platform under your own AWS account for extra security and isolation", - }, - { - name: "Fast deploy times", - description: - "Deploy new versions as quickly as running an incremental build, all with zero downtime", - }, -]; - -export default function Features() { - return ( -
-
-
-

- Serverless for Rust -

-

- The only serverless platform that lets you control your - infrastructure from Rust code as easily as deriving a trait. -

-
-
- {features.map((feature) => ( -
-
-
-
- {feature.description} -
-
- ))} -
-
-
- ); -} diff --git a/www/components/Footer.tsx b/www/components/Footer.tsx deleted file mode 100644 index aed55c9fb..000000000 --- a/www/components/Footer.tsx +++ /dev/null @@ -1,232 +0,0 @@ -import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; -import { - faGithub, - faTwitter, - faDiscord, -} from "@fortawesome/free-brands-svg-icons"; -import { DISCORD_URL, GITHUB_URL, TWITTER_URL } from "../lib/constants"; -import ExternalLink from "./ExternalLink"; -import InternalLink from "./InternalLink"; - -// const navigation = [ -// { name: "Solutions", href: "#" }, -// { name: "Pricing", href: "#" }, -// { name: "Docs", href: "#" }, -// { name: "Company", href: "#" }, -// ]; - -const communities = [ - { - name: "Github", - href: GITHUB_URL, - icon: faGithub, - }, - { - name: "Discord", - href: DISCORD_URL, - icon: faDiscord, - }, - { - name: "Twitter", - href: TWITTER_URL, - icon: faTwitter, - }, -]; - -const navigation = [ - { - title: "Product", - links: [ - { name: "Features", href: "/#features" }, - { name: "Examples", href: "/#examples" }, - { name: "Code Snippets", href: "/#code-snippets" }, - { - name: "Roadmap", - href: "https://github.com/orgs/shuttle-hq/projects/4/views/2", - }, - ], - }, - { - title: "Company", - links: [ - { name: "Blog", href: "/blog" }, - { - name: "Careers", - href: "https://www.workatastartup.com/companies/shuttle", - }, - ], - }, - { - title: "Developers", - links: [ - { - name: "Documentation", - href: "https://docs.rs/shuttle-service/latest/shuttle_service/", - }, - { - name: "Guides", - href: "https://github.com/shuttle-hq/shuttle/tree/main/examples", - }, - { - name: "Contributors", - href: "https://github.com/shuttle-hq/shuttle/graphs/contributors", - }, - ], - }, - { - title: "Community", - links: [ - ...communities.map(({ name, href }) => ({ name, href })), - { - name: "Linkedin", - href: "https://www.linkedin.com/company/shuttle-yc/", - }, - ], - }, -]; - -export default function Footer() { - return ( - <> -
- {communities.map((community, index) => ( - - - - ))} -
- -
-

- Let's make Rust the next language of cloud-native -

-

- We love you Go, but Rust is just better. -

-
- {communities.map((community, index) => ( - - - {community.name} - - ))} -
-
- - {/*
-

- Ready to dive in? - - Start your free trial today. - -

-
- - Get Started - - - - Join Discord - -
-
*/} - -
-
- {navigation.map((col, index) => ( -
-

- {col.title} -

-
    - {col.links.map((link, index) => ( -
  • - {link.href.startsWith("/") ? ( - - {link.name} - - ) : ( - - {link.name} - - )} -
  • - ))} -
-
- ))} -
- -
-

© 2022 shuttle

- {/*

- {navigation.map((link, index) => ( - - {link.name} - - ))} -

*/} -

- Backed by - - Y - - - - - - Combinator -

-
-
- - ); -} diff --git a/www/components/Header.tsx b/www/components/Header.tsx deleted file mode 100644 index 8f9ff7ac4..000000000 --- a/www/components/Header.tsx +++ /dev/null @@ -1,91 +0,0 @@ -import { useRouter } from "next/router"; -import InternalLink from "./InternalLink"; -import { SHUTTLE_DOCS_URL } from "../lib/constants"; -import ExternalLink from "./ExternalLink"; -import ThemeSwitch from "./ThemeSwitch"; -import NoSsr from "./NoSsr"; -import LoginButton from "./LoginButton"; - -const navigation = [ - { name: "Features", href: "/#features", internal: true }, - { name: "Examples", href: "/#examples", internal: true }, - { name: "Docs", href: SHUTTLE_DOCS_URL, internal: false }, - { name: "Blog", href: "/blog", internal: true }, - { name: "Pricing", href: "/pricing", internal: true }, -]; - -export default function Header() { - const { basePath } = useRouter(); - - return ( -
- -
- ); -} diff --git a/www/components/HeightMagic.tsx b/www/components/HeightMagic.tsx deleted file mode 100644 index b4a15dfa5..000000000 --- a/www/components/HeightMagic.tsx +++ /dev/null @@ -1,24 +0,0 @@ -import { ReactNode, useState } from "react"; - -interface Props { - readonly children?: ReactNode | undefined; -} - -export default function HeightMagic({ children }: Props) { - const [height, setHeight] = useState(0); - - return ( -
-
{ - setHeight(el?.getBoundingClientRect().height ?? 0); - }} - > - {children} -
-
- ); -} diff --git a/www/components/Hero.tsx b/www/components/Hero.tsx deleted file mode 100644 index 1df39e4d3..000000000 --- a/www/components/Hero.tsx +++ /dev/null @@ -1,77 +0,0 @@ -import { useRouter } from "next/router"; -import Code from "./Code"; -import { - DISCORD_URL, - SHUTTLE_DOCS_URL, - SITE_DESCRIPTION, - SITE_TITLE, -} from "../lib/constants"; -import classnames from "classnames"; -import { useAnnouncementBarIsClosed } from "./AnnouncementBar"; -import ExternalLink from "./ExternalLink"; - -export default function Hero() { - const { basePath } = useRouter(); - const [announcementBarIsClosed] = useAnnouncementBarIsClosed(); - - return ( -
-
-
-
- {/*
- Shuttle - - ALPHA - -
*/} - -
-
- {SITE_TITLE} -
-
- {SITE_DESCRIPTION} -
-
-
- -
- -
- - Get Started - - - - Join Discord - -
-
-
-
-
- ); -} diff --git a/www/components/InternalLink.tsx b/www/components/InternalLink.tsx deleted file mode 100644 index 438aa26e3..000000000 --- a/www/components/InternalLink.tsx +++ /dev/null @@ -1,62 +0,0 @@ -import Link, { LinkProps } from "next/link"; -import { useRouter } from "next/router"; - -export default function InternalLink({ - href, - as, - replace, - scroll, - shallow, - passHref, - prefetch, - locale, - ...props -}: JSX.IntrinsicElements["a"] & LinkProps): JSX.Element { - const router = useRouter(); - - if (!href) { - return ; - } - - return ( - -
{ - if (router.pathname === href) { - e.preventDefault(); - - document.body.scrollIntoView({ - behavior: "smooth", - }); - - setTimeout(() => { - router.replace(href); - }, 350); - } else if (href.startsWith(router.pathname + "#")) { - e.preventDefault(); - - document - .querySelector(href.slice(router.pathname.length)) - .scrollIntoView({ - behavior: "smooth", - }); - - setTimeout(() => { - router.replace(href); - }, 350); - } - }} - /> - - ); -} diff --git a/www/components/LoginButton.tsx b/www/components/LoginButton.tsx deleted file mode 100644 index 251be04ea..000000000 --- a/www/components/LoginButton.tsx +++ /dev/null @@ -1,51 +0,0 @@ -import { useApiKeyModalState } from "./ApiKeyModal"; -import { useUser } from "@auth0/nextjs-auth0"; -import { gtagEvent } from "../lib/gtag"; - -export default function LoginButton() { - const { user, error, isLoading } = useUser(); - const [open, setOpen] = useApiKeyModalState(); - - const label = "Log In"; - const className = - "inline-block w-full rounded border border-slate-900 bg-transparent py-1 px-4 text-center text-base font-medium text-slate-900 transition-colors hover:bg-slate-800 hover:text-slate-100 dark:border-white dark:text-white hover:dark:bg-white hover:dark:text-dark-700"; - - if (user) { - return ( - - ); - } - - return ( - { - gtagEvent({ - action: "new_login_click", - category: "Login", - label: "New Session Login", - // todo: track api-key? - // value: api-key, - }); - }} - > - {label} - - ); -} diff --git a/www/components/NoSsr.tsx b/www/components/NoSsr.tsx deleted file mode 100644 index a22b80265..000000000 --- a/www/components/NoSsr.tsx +++ /dev/null @@ -1,11 +0,0 @@ -import React, { useEffect, useState } from "react"; - -export default function NoSsr({ children }): JSX.Element { - const [isMounted, setMount] = useState(false); - - useEffect(() => { - setMount(true); - }, []); - - return <>{isMounted ? children : null}; -} diff --git a/www/components/ThemeSwitch.tsx b/www/components/ThemeSwitch.tsx deleted file mode 100644 index 240f47d1a..000000000 --- a/www/components/ThemeSwitch.tsx +++ /dev/null @@ -1,43 +0,0 @@ -import { useLocalStorage, useMedia } from "react-use"; -import { useEffect } from "react"; -import { MoonIcon, SunIcon } from "@heroicons/react/solid"; - -type StorageTheme = "dark" | "light" | "system"; - -export default function ThemeSwitch() { - const osTheme = useMedia("(prefers-color-scheme: dark)") ? "dark" : "light"; - const [storageTheme, setStorageTheme] = useLocalStorage( - "app-theme", - "system" - ); - const theme = storageTheme === "system" ? osTheme : storageTheme; - const isDarkTheme = theme === "dark"; - - function updateTheme(theme: "dark" | "light") { - setStorageTheme(theme === osTheme ? "system" : theme); - } - - useEffect(() => { - if (isDarkTheme) { - document.body.classList.add("dark"); - } else { - document.body.classList.remove("dark"); - } - }, [isDarkTheme]); - - return ( - - ); -} diff --git a/www/components/blog/BlogListItem.tsx b/www/components/blog/BlogListItem.tsx deleted file mode 100644 index d7dcee418..000000000 --- a/www/components/blog/BlogListItem.tsx +++ /dev/null @@ -1,70 +0,0 @@ -import authors, { getAuthors } from "../../lib/authors"; -import Image from "next/image"; -import React from "react"; -import { Post } from "../../lib/posts"; -import InternalLink from "../InternalLink"; - -interface Props { - readonly post: Post; -} - -export default function BlogListItem({ post }: Props): JSX.Element { - const author = getAuthors(post.author?.split(",") ?? []); - - return ( -
- -
-
-
-
- -
- -

- {post.title} -

- {post.date && ( -

- {post.date} -

- )} -

- {post.description} -

-
-
- {author.map((author, index) => { - return ( - author.author_image_url && ( -
- -
- ) - ); - })} -
-
-
-
-
- ); -} diff --git a/www/lib/authors.ts b/www/lib/authors.ts deleted file mode 100644 index 2f3fa3296..000000000 --- a/www/lib/authors.ts +++ /dev/null @@ -1,57 +0,0 @@ -export interface Author { - readonly author_id: string; - readonly author: string; - readonly position: string; - readonly author_url: string; - readonly author_image_url: string; -} - -export function getAuthors(ids: readonly string[]): readonly Author[] { - return ids.flatMap((id) => { - const author = authors.find((author) => author.author_id === id); - - if (author == null) return []; - - return [author]; - }); -} - -const authors: readonly Author[] = [ - { - author_id: "christoshadjiaslanis", - author: "Christos Hadjiaslanis", - position: "Founder", - author_url: "https://github.com/christoshadjiaslanis", - author_image_url: "https://github.com/christoshadjiaslanis.png", - }, - { - author_id: "brokad", - author: "Damien Broka", - position: "Founder", - author_url: "https://github.com/brokad", - author_image_url: "https://github.com/brokad.png", - }, - { - author_id: "nodar", - author: "Nodar Daneliya", - position: "Founder", - author_url: "https://github.com/NodarD", - author_image_url: "https://github.com/nodard.png", - }, - { - author_id: "terrencewaters", - author: "Terrence Waters", - position: "Software Engineer", - author_url: "", - author_image_url: "", - }, - { - author_id: "ben", - author: "Ben", - position: "", - author_url: "https://github.com/kaleidawave", - author_image_url: "https://github.com/kaleidawave.png" - } -]; - -export default authors; diff --git a/www/lib/constants.ts b/www/lib/constants.ts deleted file mode 100644 index a8e071ac7..000000000 --- a/www/lib/constants.ts +++ /dev/null @@ -1,18 +0,0 @@ -export const APP_NAME = "shuttle"; - -export const SITE_TITLE = "Stateful Serverless for Rust"; - -export const SITE_DESCRIPTION = - "Shuttle is a web application platform that uses traits and annotations to configure your backend deployment - including databases."; - -export const SITE_URL = "https://shuttle.rs/"; - -export const TWITTER_HANDLE = "@shuttle_dev"; - -export const GA_MEASUREMENT_ID = process.env.NEXT_PUBLIC_GA_MEASUREMENT_ID; - -export const GITHUB_URL = "https://github.com/shuttle-hq/shuttle"; -export const DISCORD_URL = "https://discord.gg/shuttle"; -export const TWITTER_URL = "https://twitter.com/shuttle_dev"; -export const SHUTTLE_DOCS_URL = - "https://docs.shuttle.rs/"; diff --git a/www/lib/gtag.ts b/www/lib/gtag.ts deleted file mode 100644 index cdc5c230f..000000000 --- a/www/lib/gtag.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { NextRouter } from "next/router"; -import { GA_MEASUREMENT_ID } from "./constants"; - -// https://developers.google.com/analytics/devguides/collection/ga4/event-parameters?client_type=gtag#set-up-every-event -export const pageview = (url: string) => { - window.gtag("config", GA_MEASUREMENT_ID, { - page_path: url, - }); -}; - -interface GtagEvent { - action: string; - category: string; - label: string; - value?: string; -} -// https://developers.google.com/analytics/devguides/collection/ga4/events?client_type=gtag -export const gtagEvent = ({ action, category, label, value }: GtagEvent) => { - window.gtag("event", action, { - event_category: category, - event_label: label, - value: value, - }); -}; - -export const setupGoogleAnalytics = (router: NextRouter) => { - const handleRouteChange = (url: string) => { - pageview(url); - }; - router.events.on("routeChangeComplete", handleRouteChange); - router.events.on("hashChangeComplete", handleRouteChange); - return () => { - router.events.off("routeChangeComplete", handleRouteChange); - router.events.off("hashChangeComplete", handleRouteChange); - }; -}; diff --git a/www/lib/helpers.ts b/www/lib/helpers.ts deleted file mode 100644 index 6f5a0103f..000000000 --- a/www/lib/helpers.ts +++ /dev/null @@ -1,11 +0,0 @@ -export function generateReadingTime(text: string): string { - const wordsPerMinute = 200; - const noOfWords = text.split(/\s/g).length; - const minutes = noOfWords / wordsPerMinute; - const readTime = Math.ceil(minutes); - return `${readTime} minute read`; -} - -export function capitalize(word: string): string { - return word[0].toUpperCase() + word.substring(1).toLowerCase(); -} diff --git a/www/lib/make-rss.ts b/www/lib/make-rss.ts deleted file mode 100644 index 5cab34817..000000000 --- a/www/lib/make-rss.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { APP_NAME, SITE_URL } from "./constants"; -import { getSortedPosts, Post } from "./posts"; -import fs from "fs"; - -const postXML = (post: Post): string => ` - - ${SITE_URL}blog/${post.url} - ${post.title} - ${SITE_URL}blog/${post.url} - ${post.description} - ${new Date(post.date).toUTCString()} - -`; - -const rssXML = (posts: readonly Post[]): string => ` - - - Blog - ${APP_NAME} - ${SITE_URL} - Latest news from ${APP_NAME} - en - ${new Date(posts[0].date).toUTCString()} - - ${posts.map(postXML).join("")} - - -`; - -const posts = getSortedPosts(); - -fs.writeFile("./public/rss.xml", rssXML(posts), (err) => { - if (err != null) { - console.error(err); - process.exit(1); - } -}); diff --git a/www/lib/posts.ts b/www/lib/posts.ts deleted file mode 100644 index 92f1dfa9c..000000000 --- a/www/lib/posts.ts +++ /dev/null @@ -1,161 +0,0 @@ -import fs from "fs"; -import path from "path"; -import matter from "gray-matter"; -import { generateReadingTime } from "./helpers"; -import { MDXRemoteSerializeResult } from "next-mdx-remote"; - -// substring amount for file names -// based on YYYY-MM-DD format -const FILENAME_SUBSTRING = 11; - -const POST_DIRECTORY = path.join(process.cwd(), "_blog"); - -export interface Post { - readonly slug?: string; - readonly title: string; - readonly date: string; - readonly cover?: string; - readonly coverAspectRatio?: string; - readonly author?: string; - readonly author_url?: string; - readonly excerpt?: string; - readonly ogImage?: { - readonly url: string; - }; - readonly content?: MDXRemoteSerializeResult>; - readonly toc?: MDXRemoteSerializeResult>; - readonly thumb: string; - readonly image?: string; - readonly readingTime?: string; - readonly description: string; - readonly url: string; - readonly tags?: string[]; - readonly logo?: string; - readonly hideAuthor?: boolean; -} - -export function getSortedPosts( - limit?: number, - tags?: readonly string[] -): readonly Post[] { - //Reads all the files in the post directory - const fileNames = fs.readdirSync(POST_DIRECTORY); - - // categories stored in this array - - let allPostsData: Post[] = fileNames.map((filename) => { - const slug = filename.replace(".mdx", ""); - - const fullPath = path.join(POST_DIRECTORY, filename); - - //Extracts contents of the MDX file - const fileContents = fs.readFileSync(fullPath, "utf8"); - const { data, content } = matter(fileContents); - - const options: Intl.DateTimeFormatOptions = { - month: "long", - day: "numeric", - year: "numeric", - }; - const formattedDate = new Date(data.date).toLocaleDateString( - "en-IN", - options - ); - - const readingTime = generateReadingTime(content); - - // construct url to link to blog posts - // based on datestamp in file name - - const dates = getDatesFromFileName(filename); - let url = `${dates.year}/${dates.month}/${dates.day}/${slug.substring( - FILENAME_SUBSTRING - )}`; - - return { - ...data, - date: formattedDate, - readingTime, - url: url, - slug, - } as Post; - }); - - allPostsData = allPostsData.sort((a, b) => { - if (new Date(a.date) < new Date(b.date)) { - return 1; - } else { - return -1; - } - }); - - if (tags) { - allPostsData = allPostsData.filter((post) => { - const found = tags.some((tag) => post.tags.includes(tag)); - return found; - }); - } - - if (limit) allPostsData = allPostsData.slice(0, limit); - - return allPostsData; -} - -// Get Slugs -export const getAllPostSlugs = () => { - const fileNames = fs.readdirSync(POST_DIRECTORY); - - const files = fileNames.map((filename) => { - const dates = getDatesFromFileName(filename); - - return { - params: { - ...dates, - slug: filename.replace(".mdx", "").substring(FILENAME_SUBSTRING), - }, - }; - }); - - return files; -}; - -// Get Post based on Slug -export const getPostdata = async (slug: string) => { - const fullPath = path.join(POST_DIRECTORY, `${slug}.mdx`); - - const postContent = fs.readFileSync(fullPath, "utf8"); - - return postContent; -}; - -export function getAlltags(): readonly string[] { - const posts = getSortedPosts(); - let tags: string[] = []; - - posts.map((post) => { - post.tags.map((tag: string) => { - if (!tags.includes(tag)) return tags.push(tag); - }); - }); - - return tags; -} - -interface Dates { - readonly year: string; - readonly month: string; - readonly day: string; -} - -function getDatesFromFileName(filename: string): Dates { - // extract YYYY, MM, DD from post name - const year = filename.substring(0, 4); - const month = filename.substring(5, 7); - const day = filename.substring(8, 10); - - return { - year, - month, - day, - }; -} diff --git a/www/lib/shuttle.ts b/www/lib/shuttle.ts deleted file mode 100644 index bb1cf58ff..000000000 --- a/www/lib/shuttle.ts +++ /dev/null @@ -1,75 +0,0 @@ -import axios, {AxiosRequestConfig, AxiosResponse, HttpStatusCode, Method} from "axios"; - -export async function getApiKey(username: string): Promise { - const res = await fetch( - `${process.env.SHUTTLE_API_BASE_URL}/users/${username}`, - { - method: "POST", - headers: { - Authorization: `Bearer ${process.env.SHUTTLE_ADMIN_SECRET}`, - }, - } - ); - - if (res.ok) { - const body = await res.json(); - return body["key"] - } else { - console.log(res); - throw new Error("could not get api key."); - } -} - -export type User = { - name: string - key: string - projects: string[] -} - -export type Error = { - status: HttpStatusCode - error: string -} - -export class Shuttle { - private url(suffix: string): string { - return `${process.env.SHUTTLE_API_BASE_URL}${suffix}` - } - - private request(method: Method, path: string): Promise> { - let req = { - headers: { - Authorization: `Bearer ${process.env.SHUTTLE_ADMIN_SECRET}` - }, - method: method, - url: this.url(path) - }; - return axios.request(req).then((res) => { - return res.data; - }).catch((err) => { - if (err.response) { - return Promise.reject({ - status: err.response.status, - ...err.response.data - }) - } else { - return Promise.reject(err); - } - }) - } - - async get_user(user: string): Promise { - return this.request("GET", `/users/${user}`).then((body) => { - return body as User - }) - } - - async create_user(user: string): Promise { - return this.request("POST", `/users/${user}`).then((body) => { - return body as User - }) - } -} - -export default new Shuttle(); - diff --git a/www/next-env.d.ts b/www/next-env.d.ts deleted file mode 100644 index 4f11a03dc..000000000 --- a/www/next-env.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -/// -/// - -// NOTE: This file should not be edited -// see https://nextjs.org/docs/basic-features/typescript for more information. diff --git a/www/next-sitemap.js b/www/next-sitemap.js deleted file mode 100644 index deca1aa90..000000000 --- a/www/next-sitemap.js +++ /dev/null @@ -1,4 +0,0 @@ -module.exports = { - siteUrl: process.env.SITE_URL || "https://shuttle.rs", - generateRobotsTxt: true, -}; diff --git a/www/next.config.js b/www/next.config.js deleted file mode 100644 index 6cd46df7b..000000000 --- a/www/next.config.js +++ /dev/null @@ -1,17 +0,0 @@ -module.exports = { - i18n: { - locales: ["en"], - defaultLocale: "en", - }, - images: { - domains: ["github.com"], - }, - async rewrites() { - return [ - { - source: "/rss.xml", - destination: "/api/rss", - }, - ]; - }, -}; diff --git a/www/package.json b/www/package.json deleted file mode 100644 index 07a5d3433..000000000 --- a/www/package.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "private": true, - "scripts": { - "dev": "next dev", - "build": "next build", - "start": "next start", - "fmt": "prettier --write *.{yaml,js,json,md} {styles,pages,lib,components,_blog}/**", - "make": "yarn make:sitemap; yarn make:rss", - "make:rss": "ts-node --skipProject --transpileOnly --esm lib/make-rss.ts", - "make:sitemap": "next-sitemap", - "postbuild": "yarn make" - }, - "dependencies": { - "@auth0/nextjs-auth0": "^1.7.0", - "@fortawesome/fontawesome-svg-core": "^6.1.0", - "@fortawesome/free-brands-svg-icons": "^6.1.0", - "@fortawesome/free-regular-svg-icons": "^6.1.0", - "@fortawesome/free-solid-svg-icons": "^6.1.0", - "@fortawesome/react-fontawesome": "^0.1.14", - "@headlessui/react": "^1.5.0", - "@heroicons/react": "^1.0.6", - "@mapbox/rehype-prism": "^0.8.0", - "@tailwindcss/forms": "^0.5.0", - "@tailwindcss/typography": "^0.5.2", - "@types/react-syntax-highlighter": "^13.5.2", - "axios": "^1.0.0", - "classnames": "^2.3.1", - "gray-matter": "^4.0.3", - "markdown-toc": "^1.2.0", - "next": "^12.1.0", - "next-mdx-remote": "^4.0.2", - "next-seo": "^5.1.0", - "node-fetch": "^3.2.3", - "prism-themes": "^1.9.0", - "react": "^17.0.2", - "react-dom": "^17.0.2", - "react-markdown": "^8.0.2", - "react-syntax-highlighter": "^15.5.0", - "react-tooltip": "^4.2.21", - "react-use": "^17.3.2", - "rehype-slug": "^5.0.1", - "remark-gfm": "^3.0.1", - "sass": "^1.32.8", - "ts-node": "^10.7.0" - }, - "devDependencies": { - "@types/gtag.js": "^0.0.11", - "@types/node": "^17.0.21", - "@types/react": "^17.0.2", - "autoprefixer": "^10.4.4", - "next-sitemap": "^2.5.10", - "postcss": "^8.4.12", - "prettier": "^2.6.1", - "prettier-plugin-tailwindcss": "^0.1.8", - "tailwindcss": "^3.0.23", - "typescript": "^4.1.5" - } -} diff --git a/www/pages/404.tsx b/www/pages/404.tsx deleted file mode 100644 index 8acfa8c02..000000000 --- a/www/pages/404.tsx +++ /dev/null @@ -1,16 +0,0 @@ -export default function NotFound() { - return ( - <> -
-
-
- Oops! -
-
- This page does not seem to exist, sorry. -
-
-
- - ); -} diff --git a/www/pages/_app.tsx b/www/pages/_app.tsx deleted file mode 100644 index 2d5d7cc71..000000000 --- a/www/pages/_app.tsx +++ /dev/null @@ -1,119 +0,0 @@ -import "../styles/index.css"; -import type { AppProps } from "next/app"; -import React, { useEffect } from "react"; -import { useRouter } from "next/router"; -import Head from "next/head"; -import { DefaultSeo } from "next-seo"; -import { - APP_NAME, - SITE_TITLE, - SITE_DESCRIPTION, - SITE_URL, - TWITTER_HANDLE, - GA_MEASUREMENT_ID, -} from "../lib/constants"; -import AnnouncementBar, { - AnnouncementBarIsClosedProvider, -} from "../components/AnnouncementBar"; -import { UserProvider } from "@auth0/nextjs-auth0"; -import ApiKeyModal, { - ApiKeyModalStateProvider, -} from "../components/ApiKeyModal"; -import Footer from "../components/Footer"; -import Header from "../components/Header"; -import { config } from "@fortawesome/fontawesome-svg-core"; -import Script from "next/script"; -import { setupGoogleAnalytics } from "../lib/gtag"; - -config.autoAddCss = false; - -export default function App({ Component, pageProps }: AppProps) { - const router = useRouter(); - useEffect(() => setupGoogleAnalytics(router)); - const { user } = pageProps; - - return ( - - - - - {SITE_TITLE} - - */} - - -
- - - - ); - } -} diff --git a/www/pages/api/auth/[...auth0].ts b/www/pages/api/auth/[...auth0].ts deleted file mode 100644 index 32fa76a1f..000000000 --- a/www/pages/api/auth/[...auth0].ts +++ /dev/null @@ -1,40 +0,0 @@ -import {handleAuth, handleCallback, handleLogin} from "@auth0/nextjs-auth0"; -import shuttle, {Error} from "../../../lib/shuttle"; - -async function afterCallback(req, res, session, state) { - const shuttlified = session.user.sub.replace("|", "-"); - - const user = await shuttle.get_user(shuttlified).catch((err) => { - if ((err as Error).status === 404) { - console.log(`user ${shuttlified} does not exist, creating`); - return shuttle.create_user(shuttlified); - } else { - return Promise.reject(err); - } - }); - - session.user.api_key = user.key; - - return session; -} - -export default handleAuth({ - async callback(req, res) { - try { - await handleCallback(req, res, { afterCallback }); - } catch (error) { - res.status(error.status || 500).end(error.message); - } - }, - async login(req, res) { - try { - await handleLogin(req, res, { - authorizationParams: { - connection: "github", - }, - }); - } catch (error) { - res.status(error.status || 400).end(error.message); - } - }, -}); diff --git a/www/pages/blog.tsx b/www/pages/blog.tsx deleted file mode 100644 index 750fb1a0f..000000000 --- a/www/pages/blog.tsx +++ /dev/null @@ -1,214 +0,0 @@ -import { useMemo, useState } from "react"; -import { useRouter } from "next/router"; -import Image from "next/image"; -import { NextSeo } from "next-seo"; -import { getAlltags, getSortedPosts, Post } from "../lib/posts"; -import { getAuthors } from "../lib/authors"; -import BlogListItem from "../components/blog/BlogListItem"; -import { SITE_URL } from "../lib/constants"; -import { GetStaticPropsResult } from "next"; -import InternalLink from "../components/InternalLink"; -import classnames from "classnames"; - -export async function getStaticProps(): Promise> { - const allPostsData = getSortedPosts(); - const tags = getAlltags(); - - return { - props: { - blogs: allPostsData, - tags, - }, - }; -} - -interface Props { - readonly blogs: ReturnType; - readonly tags: readonly string[]; -} - -export default function Blog(props: Props): JSX.Element { - const tags = ["all", ...props.tags]; - const [activeTag, setActiveTag] = useState("all"); - const router = useRouter(); - - const [headPost, tailPosts] = useMemo(() => { - const [head, ...tail] = props.blogs; - - return [head, tail]; - }, [props.blogs]); - - const blogs = useMemo(() => { - if (activeTag === "all") return tailPosts; - - return tailPosts.filter((post) => post.tags.includes(activeTag)); - }, [tailPosts, activeTag]); - - const meta_title = "Shuttle Blog"; - const meta_description = "Get all your shuttle News on the shuttle blog."; - - return ( - <> - - -
-
-
- -
-
-
- -
-
-
-
-
-
-
- - - -
-
- -
-
-
-
-
- -
    - {blogs.map((blog, index) => ( -
    - -
    - ))} -
-
-
- - ); -} - -interface FeaturedThumbProps { - readonly post: Post; -} - -function FeaturedThumb({ post }: FeaturedThumbProps) { - const author = getAuthors(post.author?.split(",") ?? []); - - return ( -
- -
- -
-
-
-

{post.date}

-

-

{post.readingTime}

-
- -
-

{post.title}

-

- {post.description} -

-
- -
- {author.map((author, index) => { - return ( -
- {author.author_image_url && ( -
- {`${author.author} -
- )} -
- - {author.author} - - - {author.position} - -
-
- ); - })} -
-
-
-
- ); -} diff --git a/www/pages/blog/[year]/[month]/[day]/[slug].tsx b/www/pages/blog/[year]/[month]/[day]/[slug].tsx deleted file mode 100644 index 87fe7daf4..000000000 --- a/www/pages/blog/[year]/[month]/[day]/[slug].tsx +++ /dev/null @@ -1,416 +0,0 @@ -import matter from "gray-matter"; -import { getAuthors } from "../../../../../lib/authors"; -import { serialize } from "next-mdx-remote/serialize"; -import { NextSeo } from "next-seo"; -import Image from "next/image"; -import { useRouter } from "next/router"; -import React from "react"; -import { generateReadingTime } from "../../../../../lib/helpers"; -import { - getAllPostSlugs, - getPostdata, - getSortedPosts, - Post, -} from "../../../../../lib/posts"; -import { MDXRemote, MDXRemoteProps } from "next-mdx-remote"; -import gfm from "remark-gfm"; -import slug from "rehype-slug"; -import toc from "markdown-toc"; -import rehypePrism from "@mapbox/rehype-prism"; -import { SITE_URL } from "../../../../../lib/constants"; -import { GetStaticPropsContext, GetStaticPropsResult } from "next"; -import { ParsedUrlQuery } from "querystring"; -import InternalLink from "../../../../../components/InternalLink"; -import ExternalLink from "../../../../../components/ExternalLink"; -import classnames from "classnames"; -import { DocumentTextIcon } from "@heroicons/react/outline"; -import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; -import { faLinkedin, faTwitter } from "@fortawesome/free-brands-svg-icons"; -import Copy from "../../../../../components/Copy"; - -export async function getStaticPaths() { - const paths = getAllPostSlugs(); - return { - paths, - fallback: false, - }; -} - -interface Params extends ParsedUrlQuery { - readonly year: string; - readonly month: string; - readonly day: string; - readonly slug: string; -} - -export async function getStaticProps({ - params, -}: GetStaticPropsContext): Promise> { - const filePath = `${params.year}-${params.month}-${params.day}-${params.slug}`; - const postContent = await getPostdata(filePath); - const readingTime = generateReadingTime(postContent); - const { data, content } = matter(postContent); - - const mdxPost = await serialize(content, { - scope: data, - mdxOptions: { - remarkPlugins: [gfm], - rehypePlugins: [slug, rehypePrism], - }, - }); - - const contentTOC = toc(content, { - maxdepth: data.toc_depth ?? 3, - }); - - const mdxTOC = await serialize(contentTOC.content); - - const relatedPosts = getSortedPosts( - 6, - mdxPost.scope.tags as readonly string[] - ) - .filter((p) => p.slug != filePath) - .slice(0, 5); - - const allPosts = getSortedPosts(); - - const currentIndex = allPosts - .map(function (e) { - return e.slug; - }) - .indexOf(filePath); - - const nextPost = allPosts[currentIndex + 1] ?? null; - const prevPost = allPosts[currentIndex - 1] ?? null; - - return { - props: { - prevPost, - nextPost, - relatedPosts, - blog: { - slug: `${params.year}/${params.month}/${params.day}/${params.slug}`, - content: mdxPost, - ...data, - toc: mdxTOC, - readingTime, - } as Post, - }, - }; -} - -const Pre = ({ children, ...props }: any) => { - let line = 1; - - const code = React.useMemo(() => { - return [children.props.children] - .flat() - .flatMap((child) => { - if (typeof child !== "string") { - return child.props.children; - } else { - return child; - } - }) - .join(""); - }, [children]); - - return ( -
- - -
-        {{
-          ...children,
-          props: {
-            ...children.props,
-            className: children.props.className ?? "language-",
-            children: [
-              
-                {line}
-              ,
-              ...[children.props.children].flat().flatMap((child) => {
-                if (typeof child === "string") {
-                  const [head, ...tail] = child.split("\n");
-                  return [
-                    head,
-                    ...tail.flatMap((child) => {
-                      line++;
-
-                      return [
-                        "\n",
-                        
-                          {line}
-                        ,
-                        child,
-                      ];
-                    }),
-                  ];
-                } else {
-                  return child;
-                }
-              }),
-            ],
-          },
-        }}
-      
-
- ); -}; - -const mdxComponents: MDXRemoteProps["components"] = { - a(props) { - if (props.href.match(/^https?:\/\//)) { - return ; - } - - return ; - }, - pre: (props: any) => { - return
;
-  },
-};
-
-interface Props {
-  readonly prevPost?: Post;
-  readonly nextPost?: Post;
-  readonly relatedPosts: readonly Post[];
-  readonly blog: Post;
-}
-
-export default function BlogPostPage(props: Props) {
-  const author = getAuthors(props.blog.author?.split(",") ?? []);
-
-  const { basePath } = useRouter();
-
-  return (
-    <>
-       {
-              return cat;
-            }),
-          },
-          images: [
-            {
-              url: `${SITE_URL}${basePath}/images/blog/${props.blog.thumb}`,
-            },
-          ],
-        }}
-      />
-      
-
-
-

Blog post

-

{props.blog.title}

-
-

{props.blog.date}

-

-

{props.blog.readingTime}

-
-
- {author.map((author, index) => { - return ( -
- -
- {author.author_image_url && ( -
- -
- )} -
- - {author.author} - - - {author.position} - -
-
-
-
- ); - })} -
-
-
-
- {/* Content */} -
- {(props.blog.thumb ?? props.blog.cover) && ( -
- -
- )} -
- -
-
-
- Share this article -
-
- - - - - - - -
-
-
-
- {props.prevPost && ( - - )} -
-
- {props.nextPost && ( - - )} -
-
-
- {/* Sidebar */} -
-
-
-
-
- {props.blog.tags.map((tag: string) => { - return ( - - {tag} - - ); - })} -
- -
On this page
- -
- -
-
-
- {props.relatedPosts.length > 0 ? ( -
-
- Related articles -
- -
- {props.relatedPosts.map((post, index) => ( - - - - {post.title} - - ))} -
- - View all posts - -
-
-
- ) : null} -
-
-
-
- - ); -} - -interface NextCardProps { - readonly post: Post; - readonly label: string; - readonly className?: string; -} - -function NextCard({ post, label, className }: NextCardProps) { - return ( - -
{label}
- -
- {post.title} -
-
- {post.date} -
-
- ); -} diff --git a/www/pages/blog/tags/[tag].tsx b/www/pages/blog/tags/[tag].tsx deleted file mode 100644 index 4bee33e7f..000000000 --- a/www/pages/blog/tags/[tag].tsx +++ /dev/null @@ -1,68 +0,0 @@ -import { NextSeo } from "next-seo"; -import { getAlltags, getSortedPosts, Post } from "../../../lib/posts"; -import BlogListItem from "../../../components/blog/BlogListItem"; -import { ParsedUrlQuery } from "querystring"; -import { GetStaticPropsContext, GetStaticPropsResult } from "next"; -import InternalLink from "../../../components/InternalLink"; - -interface Params extends ParsedUrlQuery { - readonly tag: string; -} - -export async function getStaticProps({ - params, -}: GetStaticPropsContext): Promise> { - const posts = getSortedPosts(0, [params.tag]); - - return { - props: { - tag: params.tag, - blogs: posts, - }, - }; -} - -export async function getStaticPaths() { - const tags = getAlltags(); - return { - paths: tags.map((tag) => ({ params: { tag } })), - fallback: false, - }; -} - -interface Props { - readonly tag: string; - readonly blogs: readonly Post[]; -} - -export default function TagBlogsPage(props: Props) { - const { blogs, tag } = props; - return ( - <> - - -
-
-

- Blog -

-

/

-

{`${tag}`}

-
-
    - {blogs.map((blog, idx) => ( -
    - -
    - ))} -
-
- - ); -} diff --git a/www/pages/index.tsx b/www/pages/index.tsx deleted file mode 100644 index 3635a3e48..000000000 --- a/www/pages/index.tsx +++ /dev/null @@ -1,16 +0,0 @@ -import React from "react"; -import Examples from "../components/Examples"; -import CodeSnippets from "../components/CodeSnippets"; -import Features from "../components/Features"; -import Hero from "../components/Hero"; - -export default function Home() { - return ( - <> - - - - - - ); -} diff --git a/www/pages/login.tsx b/www/pages/login.tsx deleted file mode 100644 index 489c99b02..000000000 --- a/www/pages/login.tsx +++ /dev/null @@ -1,17 +0,0 @@ -import { withPageAuthRequired } from "@auth0/nextjs-auth0"; -import React from "react"; -import { useMount } from "react-use"; -import Home from "."; -import { useApiKeyModalState } from "../components/ApiKeyModal"; - -export default function Login() { - const [open, setOpen] = useApiKeyModalState(); - - useMount(() => { - setOpen(true); - }); - - return ; -} - -export const getServerSideProps = withPageAuthRequired(); diff --git a/www/pages/pricing.tsx b/www/pages/pricing.tsx deleted file mode 100644 index 69a343f6e..000000000 --- a/www/pages/pricing.tsx +++ /dev/null @@ -1,280 +0,0 @@ -import React from "react"; -import { Fragment } from "react"; -import classnames from "classnames"; -import { DISCORD_URL } from "../lib/constants"; -import ExternalLink from "../components/ExternalLink"; -import LoginButton from "../components/LoginButton"; - -const tiers = [ - { - name: "Hobby", - BuyButton: LoginButton, - price: ( - <> - $0{" "} - /mo - - ), - description: - "The perfect confluence of features to run your hobby-projects for free - forever.", - }, - { - name: "Pro", - BuyButton() { - const label = "Contact Us"; - - return ( - - {label} - - ); - }, - price: ( - - Let's Talk - - ), - description: - "Build on production quality infrastructure which scales to your needs.", - }, -]; - -const sections = [ - { - name: "Features", - features: [ - { - name: "Team Size", - tiers: { Hobby: 1, Pro: "Get in touch" }, - }, - { - name: "Deployments", - tiers: { Hobby: "Unlimited", Pro: "Unlimited" }, - }, - { - name: "Number of Projects", - tiers: { Hobby: 5, Pro: "Get in touch" }, - }, - { - name: "Requests", - tiers: { Hobby: "150K/mo", Pro: "Get in touch" }, - }, - { - name: "Workers", - tiers: { Hobby: 1, Pro: "Get in touch" }, - }, - { - name: "Database Storage", - tiers: { Hobby: "500 MB", Pro: "Get in touch" }, - }, - { - name: "Subdomains", - tiers: { Hobby: "1 Per Project", Pro: "1 Per Project" }, - }, - { - name: "Custom Domains", - tiers: { Hobby: "N/A", Pro: "1 Per Project" }, - }, - ], - }, - { - name: "Support", - features: [ - { - name: "Community", - tiers: { - Hobby: ( - - Discord - - ), - Pro: ( - - Discord - - ), - }, - }, - { - name: "Request Turnaround", - tiers: { Hobby: "N/A", Pro: "24 hr" }, - }, - ], - }, -]; - -export default function Pricing() { - return ( -
-
- {/* xs to lg */} -
- {tiers.map((tier, tierIdx) => ( -
-
-

- {tier.name} -

-

{tier.price}

-

- {tier.description} -

- -
- - {sections.map((section) => ( - - - - - - - - - - {section.features.map((feature) => ( - - - - - ))} - -
- {section.name} -
- Feature - - Included -
- {feature.name} - - - {feature.tiers[tier.name]} - -
- ))} - -
- -
-
- ))} -
- - {/* lg+ */} -
- - - - - - {tiers.map((tier) => ( - - ))} - - - - - - {tiers.map((tier) => ( - - ))} - - {sections.map((section) => ( - - - - - {section.features.map((feature) => ( - - - {tiers.map((tier) => ( - - ))} - - ))} - - ))} - - - - - {tiers.map((tier) => ( - - ))} - - -
Pricing plan comparison
- Feature by - Plans - - {tier.name} -
- Pricing - -
-

{tier.price}

-

- {tier.description} -

- -
-
- {section.name} -
- {feature.name} - - - {feature.tiers[tier.name]} - -
- Choose your plan - - -
-
-
-
- ); -} diff --git a/www/postcss.config.js b/www/postcss.config.js deleted file mode 100644 index 12a703d90..000000000 --- a/www/postcss.config.js +++ /dev/null @@ -1,6 +0,0 @@ -module.exports = { - plugins: { - tailwindcss: {}, - autoprefixer: {}, - }, -}; diff --git a/www/public/favicon.ico b/www/public/favicon.ico deleted file mode 100644 index 8305257ec..000000000 Binary files a/www/public/favicon.ico and /dev/null differ diff --git a/www/public/images/blog/0959.png b/www/public/images/blog/0959.png deleted file mode 100644 index 8d268fb85..000000000 Binary files a/www/public/images/blog/0959.png and /dev/null differ diff --git a/www/public/images/blog/authentication-banner.png b/www/public/images/blog/authentication-banner.png deleted file mode 100644 index 9f148e97e..000000000 Binary files a/www/public/images/blog/authentication-banner.png and /dev/null differ diff --git a/www/public/images/blog/authentication-demo-screenshot.png b/www/public/images/blog/authentication-demo-screenshot.png deleted file mode 100644 index f3a4981a4..000000000 Binary files a/www/public/images/blog/authentication-demo-screenshot.png and /dev/null differ diff --git a/www/public/images/blog/crab-builder.png b/www/public/images/blog/crab-builder.png deleted file mode 100644 index 7fa712d3d..000000000 Binary files a/www/public/images/blog/crab-builder.png and /dev/null differ diff --git a/www/public/images/blog/discord-bot-screenshots/application-registration.png b/www/public/images/blog/discord-bot-screenshots/application-registration.png deleted file mode 100644 index 6207a0b8a..000000000 Binary files a/www/public/images/blog/discord-bot-screenshots/application-registration.png and /dev/null differ diff --git a/www/public/images/blog/discord-bot-screenshots/application_id.png b/www/public/images/blog/discord-bot-screenshots/application_id.png deleted file mode 100644 index 10417f919..000000000 Binary files a/www/public/images/blog/discord-bot-screenshots/application_id.png and /dev/null differ diff --git a/www/public/images/blog/discord-bot-screenshots/bot-is-offline.png b/www/public/images/blog/discord-bot-screenshots/bot-is-offline.png deleted file mode 100644 index 041308734..000000000 Binary files a/www/public/images/blog/discord-bot-screenshots/bot-is-offline.png and /dev/null differ diff --git a/www/public/images/blog/discord-bot-screenshots/bot-is-online.png b/www/public/images/blog/discord-bot-screenshots/bot-is-online.png deleted file mode 100644 index 63afd950f..000000000 Binary files a/www/public/images/blog/discord-bot-screenshots/bot-is-online.png and /dev/null differ diff --git a/www/public/images/blog/discord-bot-screenshots/bot-name.png b/www/public/images/blog/discord-bot-screenshots/bot-name.png deleted file mode 100644 index 879018ed3..000000000 Binary files a/www/public/images/blog/discord-bot-screenshots/bot-name.png and /dev/null differ diff --git a/www/public/images/blog/discord-bot-screenshots/command-description.png b/www/public/images/blog/discord-bot-screenshots/command-description.png deleted file mode 100644 index 4a20b978e..000000000 Binary files a/www/public/images/blog/discord-bot-screenshots/command-description.png and /dev/null differ diff --git a/www/public/images/blog/discord-bot-screenshots/command-result.png b/www/public/images/blog/discord-bot-screenshots/command-result.png deleted file mode 100644 index de305ebf1..000000000 Binary files a/www/public/images/blog/discord-bot-screenshots/command-result.png and /dev/null differ diff --git a/www/public/images/blog/discord-bot-screenshots/guild-id.png b/www/public/images/blog/discord-bot-screenshots/guild-id.png deleted file mode 100644 index 0ea1691a8..000000000 Binary files a/www/public/images/blog/discord-bot-screenshots/guild-id.png and /dev/null differ diff --git a/www/public/images/blog/discord-bot-screenshots/weather-error.png b/www/public/images/blog/discord-bot-screenshots/weather-error.png deleted file mode 100644 index 96361b006..000000000 Binary files a/www/public/images/blog/discord-bot-screenshots/weather-error.png and /dev/null differ diff --git a/www/public/images/blog/discord-bot-screenshots/weather-forecast.png b/www/public/images/blog/discord-bot-screenshots/weather-forecast.png deleted file mode 100644 index d5197e1a7..000000000 Binary files a/www/public/images/blog/discord-bot-screenshots/weather-forecast.png and /dev/null differ diff --git a/www/public/images/blog/discord-bot-screenshots/weather-input.png b/www/public/images/blog/discord-bot-screenshots/weather-input.png deleted file mode 100644 index a482a957a..000000000 Binary files a/www/public/images/blog/discord-bot-screenshots/weather-input.png and /dev/null differ diff --git a/www/public/images/blog/discord-bot-thumbnail.png b/www/public/images/blog/discord-bot-thumbnail.png deleted file mode 100644 index e55a68645..000000000 Binary files a/www/public/images/blog/discord-bot-thumbnail.png and /dev/null differ diff --git a/www/public/images/blog/ferris-error-handling.png b/www/public/images/blog/ferris-error-handling.png deleted file mode 100644 index 4a7316394..000000000 Binary files a/www/public/images/blog/ferris-error-handling.png and /dev/null differ diff --git a/www/public/images/blog/ferris.png b/www/public/images/blog/ferris.png deleted file mode 100644 index 88fdb6c21..000000000 Binary files a/www/public/images/blog/ferris.png and /dev/null differ diff --git a/www/public/images/blog/generative-meta-images.png b/www/public/images/blog/generative-meta-images.png deleted file mode 100644 index e4b9e8d6f..000000000 Binary files a/www/public/images/blog/generative-meta-images.png and /dev/null differ diff --git a/www/public/images/blog/hyper-vs-rocket.png b/www/public/images/blog/hyper-vs-rocket.png deleted file mode 100644 index 583641f79..000000000 Binary files a/www/public/images/blog/hyper-vs-rocket.png and /dev/null differ diff --git a/www/public/images/blog/infrastructure-from-code-trans.png b/www/public/images/blog/infrastructure-from-code-trans.png deleted file mode 100644 index 325dca383..000000000 Binary files a/www/public/images/blog/infrastructure-from-code-trans.png and /dev/null differ diff --git a/www/public/images/blog/infrastructure-from-code.png b/www/public/images/blog/infrastructure-from-code.png deleted file mode 100644 index e03b23259..000000000 Binary files a/www/public/images/blog/infrastructure-from-code.png and /dev/null differ diff --git a/www/public/images/blog/introduction-to-async-snippet.png b/www/public/images/blog/introduction-to-async-snippet.png deleted file mode 100644 index 6f55eae6b..000000000 Binary files a/www/public/images/blog/introduction-to-async-snippet.png and /dev/null differ diff --git a/www/public/images/blog/metatag-discord.png b/www/public/images/blog/metatag-discord.png deleted file mode 100644 index 6f26d6002..000000000 Binary files a/www/public/images/blog/metatag-discord.png and /dev/null differ diff --git a/www/public/images/blog/metatag-ferris.png b/www/public/images/blog/metatag-ferris.png deleted file mode 100644 index ebdacc6a8..000000000 Binary files a/www/public/images/blog/metatag-ferris.png and /dev/null differ diff --git a/www/public/images/blog/metatag-shapes.png b/www/public/images/blog/metatag-shapes.png deleted file mode 100644 index 615f7c5e2..000000000 Binary files a/www/public/images/blog/metatag-shapes.png and /dev/null differ diff --git a/www/public/images/blog/metatag-test-text.png b/www/public/images/blog/metatag-test-text.png deleted file mode 100644 index 3420fc502..000000000 Binary files a/www/public/images/blog/metatag-test-text.png and /dev/null differ diff --git a/www/public/images/blog/middleware-banner.png b/www/public/images/blog/middleware-banner.png deleted file mode 100644 index e9e276ae3..000000000 Binary files a/www/public/images/blog/middleware-banner.png and /dev/null differ diff --git a/www/public/images/blog/rocket-logs.png b/www/public/images/blog/rocket-logs.png deleted file mode 100644 index 99c08c028..000000000 Binary files a/www/public/images/blog/rocket-logs.png and /dev/null differ diff --git a/www/public/images/blog/rust-programming-language.jpg b/www/public/images/blog/rust-programming-language.jpg deleted file mode 100644 index 7ec26b159..000000000 Binary files a/www/public/images/blog/rust-programming-language.jpg and /dev/null differ diff --git a/www/public/images/blog/rust-trait-rules-diagram.png b/www/public/images/blog/rust-trait-rules-diagram.png deleted file mode 100644 index 35373f65a..000000000 Binary files a/www/public/images/blog/rust-trait-rules-diagram.png and /dev/null differ diff --git a/www/public/images/blog/rust-type-patterns-banner.png b/www/public/images/blog/rust-type-patterns-banner.png deleted file mode 100644 index 2f418b7a5..000000000 Binary files a/www/public/images/blog/rust-type-patterns-banner.png and /dev/null differ diff --git a/www/public/images/blog/rust.jpeg b/www/public/images/blog/rust.jpeg deleted file mode 100644 index f3f840110..000000000 Binary files a/www/public/images/blog/rust.jpeg and /dev/null differ diff --git a/www/public/images/blog/shuttle-logo-rectangle.png b/www/public/images/blog/shuttle-logo-rectangle.png deleted file mode 100644 index 55f0b334b..000000000 Binary files a/www/public/images/blog/shuttle-logo-rectangle.png and /dev/null differ diff --git a/www/public/images/blog/shuttle-logo-square-rocket.png b/www/public/images/blog/shuttle-logo-square-rocket.png deleted file mode 100644 index 91021bcd5..000000000 Binary files a/www/public/images/blog/shuttle-logo-square-rocket.png and /dev/null differ diff --git a/www/public/images/blog/shuttle-logo-square.png b/www/public/images/blog/shuttle-logo-square.png deleted file mode 100644 index c73c2ec0e..000000000 Binary files a/www/public/images/blog/shuttle-logo-square.png and /dev/null differ diff --git a/www/public/images/blog/state-machine.jpeg b/www/public/images/blog/state-machine.jpeg deleted file mode 100644 index 4c9a07adf..000000000 Binary files a/www/public/images/blog/state-machine.jpeg and /dev/null differ diff --git a/www/public/images/icon1.svg b/www/public/images/icon1.svg deleted file mode 100644 index 472d75709..000000000 --- a/www/public/images/icon1.svg +++ /dev/null @@ -1,31 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/www/public/images/icon2.svg b/www/public/images/icon2.svg deleted file mode 100644 index 2cae1d25d..000000000 --- a/www/public/images/icon2.svg +++ /dev/null @@ -1,29 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/www/public/images/icon3.svg b/www/public/images/icon3.svg deleted file mode 100644 index 3c3e826c6..000000000 --- a/www/public/images/icon3.svg +++ /dev/null @@ -1,16 +0,0 @@ - - - - - - - - - - - - - - - - diff --git a/www/public/images/icon4.svg b/www/public/images/icon4.svg deleted file mode 100644 index 47f9a72fa..000000000 --- a/www/public/images/icon4.svg +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - - - - - - - - - - - - - diff --git a/www/public/images/logo.png b/www/public/images/logo.png deleted file mode 100644 index 62509e332..000000000 Binary files a/www/public/images/logo.png and /dev/null differ diff --git a/www/public/robots.txt b/www/public/robots.txt deleted file mode 100644 index 23a1bc1c4..000000000 --- a/www/public/robots.txt +++ /dev/null @@ -1,9 +0,0 @@ -# * -User-agent: * -Allow: / - -# Host -Host: https://shuttle.rs - -# Sitemaps -Sitemap: https://shuttle.rs/sitemap.xml diff --git a/www/styles/index.css b/www/styles/index.css deleted file mode 100644 index 841b19e7e..000000000 --- a/www/styles/index.css +++ /dev/null @@ -1,10 +0,0 @@ -@tailwind base; -@tailwind components; -@tailwind utilities; - -@import "prism-themes/themes/prism-one-dark.css"; -@import "@fortawesome/fontawesome-svg-core/styles.css"; - -pre[class*="language-"] span.token { - display: inline !important; -} diff --git a/www/tailwind.config.js b/www/tailwind.config.js deleted file mode 100644 index 392d545ba..000000000 --- a/www/tailwind.config.js +++ /dev/null @@ -1,131 +0,0 @@ -const defaultTheme = require("tailwindcss/defaultTheme"); - -module.exports = { - content: ["./{components,pages}/**/*.tsx"], - darkMode: "class", - theme: { - extend: { - colors: { - "brand-yellow1": "#fae15c", - "brand-orange1": "#ff8a3f", - "brand-orange2": "#f25100", - "brand-purple1": "#7777DD", - brand: { - 100: "#fae15c", - 200: "#fad149", - 300: "#fac138", - 400: "#f9b127", - 500: "#f9a016", - 600: "#f88e05", - 700: "#f67c00", - 800: "#f56800", - 900: "#f25100", - }, - dark: { - 100: "#eeeeee", - 200: "#e0e0e0", - 300: "#bbbbbb", - 400: "#666666", - 500: "#444444", - 600: "#2a2a2a", - 700: "#1f1f1f", - 800: "#181818", - 900: "#0f0f0f", - }, - gray: { - 100: "#eeeeee", - 200: "#e0e0e0", - 300: "#bbbbbb", - 400: "#7d7d7d", - 500: "#343434", - 600: "#2a2a2a", - 700: "#1f1f1f", - 800: "#181818", - 900: "#0f0f0f", - }, - }, - fontFamily: { - sans: ["Ubuntu", ...defaultTheme.fontFamily.sans], - mono: ["Source Code Pro", "Menlo", "monospace"], - }, - typography: ({ theme }) => { - const tocCSS = { - ul: { - "list-style-type": "none", - "padding-left": 0, - margin: 0, - li: { - "padding-left": 0, - }, - a: { - display: "block", - "text-decoration": "none", - fontSize: "0.8rem", - fontWeight: "200", - color: theme("colors.slate[500]"), - "&:hover": { - color: theme("colors.slate[900]"), - }, - "font-weight": "400", - }, - ul: { - "list-style-type": "none", - li: { - marginTop: "0.2rem", - marginBottom: "0.2rem", - "padding-left": "0 !important", - "margin-left": "0.5rem", - }, - a: { - fontWeight: "200", - color: theme("colors.slate[600]"), - "&:hover": { - color: theme("colors.slate[900]"), - }, - }, - }, - }, - }; - - return { - toc: { - css: tocCSS, - }, - "dark-toc": { - css: { - ...tocCSS, - ul: { - ...tocCSS.ul, - a: { - ...tocCSS.ul.a, - color: theme("colors.gray[300]"), - "&:hover": { - ...tocCSS.ul.a["&:hover"], - color: theme("colors.gray[200]"), - }, - }, - ul: { - ...tocCSS.ul.ul, - a: { - ...tocCSS.ul.ul.a, - color: theme("colors.scale[400]"), - "&:hover": { - ...tocCSS.ul.ul.a["&:hover"], - color: theme("colors.gray[200]"), - }, - }, - }, - }, - }, - }, - }; - }, - }, - }, - plugins: [ - require("@tailwindcss/typography"), - require("@tailwindcss/forms"), - // require('@tailwindcss/line-clamp'), - // require('@tailwindcss/aspect-ratio'), - ], -}; diff --git a/www/tsconfig.json b/www/tsconfig.json deleted file mode 100644 index b8d597880..000000000 --- a/www/tsconfig.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "compilerOptions": { - "target": "es5", - "lib": ["dom", "dom.iterable", "esnext"], - "allowJs": true, - "skipLibCheck": true, - "strict": false, - "forceConsistentCasingInFileNames": true, - "noEmit": true, - "esModuleInterop": true, - "module": "esnext", - "moduleResolution": "node", - "resolveJsonModule": true, - "isolatedModules": true, - "jsx": "preserve", - "incremental": true - }, - "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx"], - "exclude": ["node_modules"] -} diff --git a/www/vercel.json b/www/vercel.json deleted file mode 100644 index 0967ef424..000000000 --- a/www/vercel.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/www/yarn.lock b/www/yarn.lock deleted file mode 100644 index b1691c6b4..000000000 --- a/www/yarn.lock +++ /dev/null @@ -1,5120 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -"@auth0/nextjs-auth0@^1.7.0": - "integrity" "sha512-twz4f1A94i2ShJ6jB3UsFuVhzE0vJ1M/Ukq5bNBm/gpvCK6E/P8Ew2wQloak73+mXcV7eg5S+HOlmiKpcSe6mw==" - "resolved" "https://registry.npmjs.org/@auth0/nextjs-auth0/-/nextjs-auth0-1.7.0.tgz" - "version" "1.7.0" - dependencies: - "base64url" "^3.0.1" - "cookie" "^0.4.1" - "debug" "^4.3.3" - "futoin-hkdf" "^1.4.2" - "http-errors" "^1.8.1" - "joi" "^17.5.0" - "jose" "^2.0.5" - "on-headers" "^1.0.2" - "openid-client" "^4.9.1" - "tslib" "^2.3.1" - "url-join" "^4.0.1" - -"@babel/code-frame@^7.0.0": - "integrity" "sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg==" - "resolved" "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.16.7.tgz" - "version" "7.16.7" - dependencies: - "@babel/highlight" "^7.16.7" - -"@babel/helper-validator-identifier@^7.16.7": - "integrity" "sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw==" - "resolved" "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz" - "version" "7.16.7" - -"@babel/highlight@^7.16.7": - "integrity" "sha512-5FnTQLSLswEj6IkgVw5KusNUUFY9ZGqe/TRFnP/BKYHYgfh7tc+C7mwiy95/yNP7Dh9x580Vv8r7u7ZfTBFxdw==" - "resolved" "https://registry.npmjs.org/@babel/highlight/-/highlight-7.16.10.tgz" - "version" "7.16.10" - dependencies: - "@babel/helper-validator-identifier" "^7.16.7" - "chalk" "^2.0.0" - "js-tokens" "^4.0.0" - -"@babel/runtime@^7.1.2", "@babel/runtime@^7.3.1": - "integrity" "sha512-dQpEpK0O9o6lj6oPu0gRDbbnk+4LeHlNcBpspf6Olzt3GIX4P1lWF1gS+pHLDFlaJvbR6q7jCfQ08zA4QJBnmA==" - "resolved" "https://registry.npmjs.org/@babel/runtime/-/runtime-7.17.8.tgz" - "version" "7.17.8" - dependencies: - "regenerator-runtime" "^0.13.4" - -"@corex/deepmerge@^2.6.148": - "integrity" "sha512-6QMz0/2h5C3ua51iAnXMPWFbb1QOU1UvSM4bKBw5mzdT+WtLgjbETBBIQZ+Sh9WvEcGwlAt/DEdRpIC3XlDBMA==" - "resolved" "https://registry.npmjs.org/@corex/deepmerge/-/deepmerge-2.6.148.tgz" - "version" "2.6.148" - -"@cspotcode/source-map-consumer@0.8.0": - "integrity" "sha512-41qniHzTU8yAGbCp04ohlmSrZf8bkf/iJsl3V0dRGsQN/5GFfx+LbCSsCpp2gqrqjTVg/K6O8ycoV35JIwAzAg==" - "resolved" "https://registry.npmjs.org/@cspotcode/source-map-consumer/-/source-map-consumer-0.8.0.tgz" - "version" "0.8.0" - -"@cspotcode/source-map-support@0.7.0": - "integrity" "sha512-X4xqRHqN8ACt2aHVe51OxeA2HjbcL4MqFqXkrmQszJ1NOUuUu5u6Vqx/0lZSVNku7velL5FC/s5uEAj1lsBMhA==" - "resolved" "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.7.0.tgz" - "version" "0.7.0" - dependencies: - "@cspotcode/source-map-consumer" "0.8.0" - -"@fortawesome/fontawesome-common-types@6.1.0": - "integrity" "sha512-lFIJ5opxOKG9q88xOsuJJAdRZ+2WRldsZwUR/7MJoOMUMhF/LkHUjwWACIEPTa5Wo6uTDHvGRIX+XutdN7zYxA==" - "resolved" "https://registry.npmjs.org/@fortawesome/fontawesome-common-types/-/fontawesome-common-types-6.1.0.tgz" - "version" "6.1.0" - -"@fortawesome/fontawesome-svg-core@^6.1.0", "@fortawesome/fontawesome-svg-core@~1 || >=1.3.0-beta1": - "integrity" "sha512-racj+/EDnMZN0jcuHePOa+9kdHHOCpCAbBvVRnEi4G4DA5SWQiT/uXJ8WcfVEbLN51vPJjhukP4o+zH0cfYplg==" - "resolved" "https://registry.npmjs.org/@fortawesome/fontawesome-svg-core/-/fontawesome-svg-core-6.1.0.tgz" - "version" "6.1.0" - dependencies: - "@fortawesome/fontawesome-common-types" "6.1.0" - -"@fortawesome/free-brands-svg-icons@^6.1.0": - "integrity" "sha512-9utHuoCL12LCpZqTphg+tPLZeK+qcOX6tjky1DWtRXFmRgm67BA692STRJ2CXGlDkXSqfRGId8WkvsKqFAgmAQ==" - "resolved" "https://registry.npmjs.org/@fortawesome/free-brands-svg-icons/-/free-brands-svg-icons-6.1.0.tgz" - "version" "6.1.0" - dependencies: - "@fortawesome/fontawesome-common-types" "6.1.0" - -"@fortawesome/free-regular-svg-icons@^6.1.0": - "integrity" "sha512-MLSBZ3AprHZXVG6zVg9Jd/mGZrlX/1jxqQsHhvZuhdRdL9bZQyXHe5RmP0n4IOth/fdnoj3fs+EnbW/O+0K4dw==" - "resolved" "https://registry.npmjs.org/@fortawesome/free-regular-svg-icons/-/free-regular-svg-icons-6.1.0.tgz" - "version" "6.1.0" - dependencies: - "@fortawesome/fontawesome-common-types" "6.1.0" - -"@fortawesome/free-solid-svg-icons@^6.1.0": - "integrity" "sha512-OOr0jRHl5d41RzBS3sZh5Z3HmdPjMr43PxxKlYeLtQxFSixPf4sJFVM12/rTepB2m0rVShI0vtjHQmzOTlBaXg==" - "resolved" "https://registry.npmjs.org/@fortawesome/free-solid-svg-icons/-/free-solid-svg-icons-6.1.0.tgz" - "version" "6.1.0" - dependencies: - "@fortawesome/fontawesome-common-types" "6.1.0" - -"@fortawesome/react-fontawesome@^0.1.14": - "integrity" "sha512-dX43Z5IvMaW7fwzU8farosYjKNGfRb2HB/DgjVBHeJZ/NSnuuaujPPx0YOdcAq+n3mqn70tyCde2HM1mqbhiuw==" - "resolved" "https://registry.npmjs.org/@fortawesome/react-fontawesome/-/react-fontawesome-0.1.17.tgz" - "version" "0.1.17" - dependencies: - "prop-types" "^15.8.1" - -"@hapi/hoek@^9.0.0": - "integrity" "sha512-gfta+H8aziZsm8pZa0vj04KO6biEiisppNgA1kbJvFrrWu9Vm7eaUEy76DIxsuTaWvti5fkJVhllWc6ZTE+Mdw==" - "resolved" "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.2.1.tgz" - "version" "9.2.1" - -"@hapi/topo@^5.0.0": - "integrity" "sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg==" - "resolved" "https://registry.npmjs.org/@hapi/topo/-/topo-5.1.0.tgz" - "version" "5.1.0" - dependencies: - "@hapi/hoek" "^9.0.0" - -"@headlessui/react@^1.5.0": - "integrity" "sha512-aaRnYxBb3MU2FNJf3Ut9RMTUqqU3as0aI1lQhgo2n9Fa67wRu14iOGqx93xB+uMNVfNwZ5B3y/Ndm7qZGuFeMQ==" - "resolved" "https://registry.npmjs.org/@headlessui/react/-/react-1.5.0.tgz" - "version" "1.5.0" - -"@heroicons/react@^1.0.6": - "integrity" "sha512-JJCXydOFWMDpCP4q13iEplA503MQO3xLoZiKum+955ZCtHINWnx26CUxVxxFQu/uLb4LW3ge15ZpzIkXKkJ8oQ==" - "resolved" "https://registry.npmjs.org/@heroicons/react/-/react-1.0.6.tgz" - "version" "1.0.6" - -"@mapbox/rehype-prism@^0.8.0": - "integrity" "sha512-bIz4a3oZ8g+pQBHSMMIxNpYOMX4yq4aZdpezoiCl9yJudh1Z9SRmHQqH6f+WtTBNOORzXMEGkvBze4PiH17wtA==" - "resolved" "https://registry.npmjs.org/@mapbox/rehype-prism/-/rehype-prism-0.8.0.tgz" - "version" "0.8.0" - dependencies: - "hast-util-to-string" "^1.0.4" - "mrm" "^3.0.9" - "refractor" "^3.4.0" - "unist-util-visit" "^2.0.3" - -"@mdx-js/mdx@^2.0.0": - "integrity" "sha512-SXC18cChut3F2zkVXwsb2no0fzTQ1z6swjK13XwFbF5QU/SFQM0orAItPypSdL3GvqYyzVJtz8UofzJhPEQtMw==" - "resolved" "https://registry.npmjs.org/@mdx-js/mdx/-/mdx-2.1.1.tgz" - "version" "2.1.1" - dependencies: - "@types/estree-jsx" "^0.0.1" - "@types/mdx" "^2.0.0" - "astring" "^1.6.0" - "estree-util-build-jsx" "^2.0.0" - "estree-util-is-identifier-name" "^2.0.0" - "estree-walker" "^3.0.0" - "hast-util-to-estree" "^2.0.0" - "markdown-extensions" "^1.0.0" - "periscopic" "^3.0.0" - "remark-mdx" "^2.0.0" - "remark-parse" "^10.0.0" - "remark-rehype" "^10.0.0" - "unified" "^10.0.0" - "unist-util-position-from-estree" "^1.0.0" - "unist-util-stringify-position" "^3.0.0" - "unist-util-visit" "^4.0.0" - "vfile" "^5.0.0" - -"@mdx-js/react@^2.0.0": - "integrity" "sha512-7zlZDf5xmWH8I0kFE4DG91COOkxjaW9DX5f1HWztZpFcVua2gJgMYfIkFaDpO/DH/tWi6Mz+OheW4194r15igg==" - "resolved" "https://registry.npmjs.org/@mdx-js/react/-/react-2.1.1.tgz" - "version" "2.1.1" - dependencies: - "@types/mdx" "^2.0.0" - "@types/react" ">=16" - -"@next/env@12.1.0": - "integrity" "sha512-nrIgY6t17FQ9xxwH3jj0a6EOiQ/WDHUos35Hghtr+SWN/ntHIQ7UpuvSi0vaLzZVHQWaDupKI+liO5vANcDeTQ==" - "resolved" "https://registry.npmjs.org/@next/env/-/env-12.1.0.tgz" - "version" "12.1.0" - -"@next/swc-darwin-arm64@12.1.0": - "integrity" "sha512-R8vcXE2/iONJ1Unf5Ptqjk6LRW3bggH+8drNkkzH4FLEQkHtELhvcmJwkXcuipyQCsIakldAXhRbZmm3YN1vXg==" - "resolved" "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-12.1.0.tgz" - "version" "12.1.0" - -"@nodelib/fs.scandir@2.1.5": - "integrity" "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==" - "resolved" "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz" - "version" "2.1.5" - dependencies: - "@nodelib/fs.stat" "2.0.5" - "run-parallel" "^1.1.9" - -"@nodelib/fs.stat@^2.0.2", "@nodelib/fs.stat@2.0.5": - "integrity" "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==" - "resolved" "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz" - "version" "2.0.5" - -"@nodelib/fs.walk@^1.2.3": - "integrity" "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==" - "resolved" "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz" - "version" "1.2.8" - dependencies: - "@nodelib/fs.scandir" "2.1.5" - "fastq" "^1.6.0" - -"@panva/asn1.js@^1.0.0": - "integrity" "sha512-UdkG3mLEqXgnlKsWanWcgb6dOjUzJ+XC5f+aWw30qrtjxeNUSfKX1cd5FBzOaXQumoe9nIqeZUvrRJS03HCCtw==" - "resolved" "https://registry.npmjs.org/@panva/asn1.js/-/asn1.js-1.0.0.tgz" - "version" "1.0.0" - -"@sideway/address@^4.1.3": - "integrity" "sha512-8ncEUtmnTsMmL7z1YPB47kPUq7LpKWJNFPsRzHiIajGC5uXlWGn+AmkYPcHNl8S4tcEGx+cnORnNYaw2wvL+LQ==" - "resolved" "https://registry.npmjs.org/@sideway/address/-/address-4.1.3.tgz" - "version" "4.1.3" - dependencies: - "@hapi/hoek" "^9.0.0" - -"@sideway/formula@^3.0.0": - "integrity" "sha512-vHe7wZ4NOXVfkoRb8T5otiENVlT7a3IAiw7H5M2+GO+9CDgcVUUsX1zalAztCmwyOr2RUTGJdgB+ZvSVqmdHmg==" - "resolved" "https://registry.npmjs.org/@sideway/formula/-/formula-3.0.0.tgz" - "version" "3.0.0" - -"@sideway/pinpoint@^2.0.0": - "integrity" "sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ==" - "resolved" "https://registry.npmjs.org/@sideway/pinpoint/-/pinpoint-2.0.0.tgz" - "version" "2.0.0" - -"@sindresorhus/is@^0.14.0": - "integrity" "sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ==" - "resolved" "https://registry.npmjs.org/@sindresorhus/is/-/is-0.14.0.tgz" - "version" "0.14.0" - -"@sindresorhus/is@^4.0.0": - "integrity" "sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==" - "resolved" "https://registry.npmjs.org/@sindresorhus/is/-/is-4.6.0.tgz" - "version" "4.6.0" - -"@szmarczak/http-timer@^1.1.2": - "integrity" "sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA==" - "resolved" "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-1.1.2.tgz" - "version" "1.1.2" - dependencies: - "defer-to-connect" "^1.0.1" - -"@szmarczak/http-timer@^4.0.5": - "integrity" "sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w==" - "resolved" "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-4.0.6.tgz" - "version" "4.0.6" - dependencies: - "defer-to-connect" "^2.0.0" - -"@tailwindcss/forms@^0.5.0": - "integrity" "sha512-KzWugryEBFkmoaYcBE18rs6gthWCFHHO7cAZm2/hv3hwD67AzwP7udSCa22E7R1+CEJL/FfhYsJWrc0b1aeSzw==" - "resolved" "https://registry.npmjs.org/@tailwindcss/forms/-/forms-0.5.0.tgz" - "version" "0.5.0" - dependencies: - "mini-svg-data-uri" "^1.2.3" - -"@tailwindcss/typography@^0.5.2": - "integrity" "sha512-coq8DBABRPFcVhVIk6IbKyyHUt7YTEC/C992tatFB+yEx5WGBQrCgsSFjxHUr8AWXphWckadVJbominEduYBqw==" - "resolved" "https://registry.npmjs.org/@tailwindcss/typography/-/typography-0.5.2.tgz" - "version" "0.5.2" - dependencies: - "lodash.castarray" "^4.4.0" - "lodash.isplainobject" "^4.0.6" - "lodash.merge" "^4.6.2" - -"@tsconfig/node10@^1.0.7": - "integrity" "sha512-6XFfSQmMgq0CFLY1MslA/CPUfhIL919M1rMsa5lP2P097N2Wd1sSX0tx1u4olM16fLNhtHZpRhedZJphNJqmZg==" - "resolved" "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.8.tgz" - "version" "1.0.8" - -"@tsconfig/node12@^1.0.7": - "integrity" "sha512-/yBMcem+fbvhSREH+s14YJi18sp7J9jpuhYByADT2rypfajMZZN4WQ6zBGgBKp53NKmqI36wFYDb3yaMPurITw==" - "resolved" "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.9.tgz" - "version" "1.0.9" - -"@tsconfig/node14@^1.0.0": - "integrity" "sha512-509r2+yARFfHHE7T6Puu2jjkoycftovhXRqW328PDXTVGKihlb1P8Z9mMZH04ebyajfRY7dedfGynlrFHJUQCg==" - "resolved" "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.1.tgz" - "version" "1.0.1" - -"@tsconfig/node16@^1.0.2": - "integrity" "sha512-eZxlbI8GZscaGS7kkc/trHTT5xgrjH3/1n2JDwusC9iahPKWMRvRjJSAN5mCXviuTGQ/lHnhvv8Q1YTpnfz9gA==" - "resolved" "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.2.tgz" - "version" "1.0.2" - -"@types/acorn@^4.0.0": - "integrity" "sha512-veQTnWP+1D/xbxVrPC3zHnCZRjSrKfhbMUlEA43iMZLu7EsnTtkJklIuwrCPbOi8YkvDQAiW05VQQFvvz9oieQ==" - "resolved" "https://registry.npmjs.org/@types/acorn/-/acorn-4.0.6.tgz" - "version" "4.0.6" - dependencies: - "@types/estree" "*" - -"@types/cacheable-request@^6.0.1": - "integrity" "sha512-B3xVo+dlKM6nnKTcmm5ZtY/OL8bOAOd2Olee9M1zft65ox50OzjEHW91sDiU9j6cvW8Ejg1/Qkf4xd2kugApUA==" - "resolved" "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.2.tgz" - "version" "6.0.2" - dependencies: - "@types/http-cache-semantics" "*" - "@types/keyv" "*" - "@types/node" "*" - "@types/responselike" "*" - -"@types/debug@^4.0.0": - "integrity" "sha512-9AonUzyTjXXhEOa0DnqpzZi6VHlqKMswga9EXjpXnnqxwLtdvPPtlO8evrI5D9S6asFRCQ6v+wpiUKbw+vKqyg==" - "resolved" "https://registry.npmjs.org/@types/debug/-/debug-4.1.7.tgz" - "version" "4.1.7" - dependencies: - "@types/ms" "*" - -"@types/estree-jsx@^0.0.1": - "integrity" "sha512-gcLAYiMfQklDCPjQegGn0TBAn9it05ISEsEhlKQUddIk7o2XDokOcTN7HBO8tznM0D9dGezvHEfRZBfZf6me0A==" - "resolved" "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-0.0.1.tgz" - "version" "0.0.1" - dependencies: - "@types/estree" "*" - -"@types/estree@*": - "integrity" "sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ==" - "resolved" "https://registry.npmjs.org/@types/estree/-/estree-0.0.51.tgz" - "version" "0.0.51" - -"@types/estree@^0.0.46": - "integrity" "sha512-laIjwTQaD+5DukBZaygQ79K1Z0jb1bPEMRrkXSLjtCcZm+abyp5YbrqpSLzD42FwWW6gK/aS4NYpJ804nG2brg==" - "resolved" "https://registry.npmjs.org/@types/estree/-/estree-0.0.46.tgz" - "version" "0.0.46" - -"@types/estree@^0.0.50": - "integrity" "sha512-C6N5s2ZFtuZRj54k2/zyRhNDjJwwcViAM3Nbm8zjBpbqAdZ00mr0CFxvSKeO8Y/e03WVFLpQMdHYVfUd6SB+Hw==" - "resolved" "https://registry.npmjs.org/@types/estree/-/estree-0.0.50.tgz" - "version" "0.0.50" - -"@types/gtag.js@^0.0.11": - version "0.0.11" - resolved "https://registry.yarnpkg.com/@types/gtag.js/-/gtag.js-0.0.11.tgz#0c384ea32e4e40043a2dca82db79b5e48d681ad5" - integrity sha512-rUuSDedDjcuUpoc2zf6eX6zRrxqALNgwrmMBfVFopkLH7YGM52C7tt6j9GsYIvaxn+ioVRpOKoHnN1DXzHEqIg== - -"@types/hast@^2.0.0": - "integrity" "sha512-wLEm0QvaoawEDoTRwzTXp4b4jpwiJDvR5KMnFnVodm3scufTlBOWRD6N1OBf9TZMhjlNsSfcO5V+7AF4+Vy+9g==" - "resolved" "https://registry.npmjs.org/@types/hast/-/hast-2.3.4.tgz" - "version" "2.3.4" - dependencies: - "@types/unist" "*" - -"@types/http-cache-semantics@*": - "integrity" "sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ==" - "resolved" "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz" - "version" "4.0.1" - -"@types/js-cookie@^2.2.6": - "integrity" "sha512-aLkWa0C0vO5b4Sr798E26QgOkss68Un0bLjs7u9qxzPT5CG+8DuNTffWES58YzJs3hrVAOs1wonycqEBqNJubA==" - "resolved" "https://registry.npmjs.org/@types/js-cookie/-/js-cookie-2.2.7.tgz" - "version" "2.2.7" - -"@types/js-yaml@^4.0.0": - "integrity" "sha512-FhpRzf927MNQdRZP0J5DLIdTXhjLYzeUTmLAu69mnVksLH9CJY3IuSeEgbKUki7GQZm0WqDkGzyxju2EZGD2wA==" - "resolved" "https://registry.npmjs.org/@types/js-yaml/-/js-yaml-4.0.5.tgz" - "version" "4.0.5" - -"@types/keyv@*": - "integrity" "sha512-FXCJgyyN3ivVgRoml4h94G/p3kY+u/B86La+QptcqJaWtBWtmc6TtkNfS40n9bIvyLteHh7zXOtgbobORKPbDg==" - "resolved" "https://registry.npmjs.org/@types/keyv/-/keyv-3.1.3.tgz" - "version" "3.1.3" - dependencies: - "@types/node" "*" - -"@types/mdast@^3.0.0": - "integrity" "sha512-W864tg/Osz1+9f4lrGTZpCSO5/z4608eUp19tbozkq2HJK6i3z1kT0H9tlADXuYIb1YYOBByU4Jsqkk75q48qA==" - "resolved" "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.10.tgz" - "version" "3.0.10" - dependencies: - "@types/unist" "*" - -"@types/mdurl@^1.0.0": - "integrity" "sha512-eC4U9MlIcu2q0KQmXszyn5Akca/0jrQmwDRgpAMJai7qBWq4amIQhZyNau4VYGtCeALvW1/NtjzJJ567aZxfKA==" - "resolved" "https://registry.npmjs.org/@types/mdurl/-/mdurl-1.0.2.tgz" - "version" "1.0.2" - -"@types/mdx@^2.0.0": - "integrity" "sha512-JPEv4iAl0I+o7g8yVWDwk30es8mfVrjkvh5UeVR2sYPpZCK44vrAPsbJpIS+rJAUxLgaSAMKTEH5Vn5qd9XsrQ==" - "resolved" "https://registry.npmjs.org/@types/mdx/-/mdx-2.0.1.tgz" - "version" "2.0.1" - -"@types/ms@*": - "integrity" "sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==" - "resolved" "https://registry.npmjs.org/@types/ms/-/ms-0.7.31.tgz" - "version" "0.7.31" - -"@types/node@*", "@types/node@^17.0.21": - "integrity" "sha512-DBZCJbhII3r90XbQxI8Y9IjjiiOGlZ0Hr32omXIZvwwZ7p4DMMXGrKXVyPfuoBOri9XNtL0UK69jYIBIsRX3QQ==" - "resolved" "https://registry.npmjs.org/@types/node/-/node-17.0.21.tgz" - "version" "17.0.21" - -"@types/parse-json@^4.0.0": - "integrity" "sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==" - "resolved" "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz" - "version" "4.0.0" - -"@types/prop-types@*", "@types/prop-types@^15.0.0": - "integrity" "sha512-rZ5drC/jWjrArrS8BR6SIr4cWpW09RNTYt9AMZo3Jwwif+iacXAqgVjm0B0Bv/S1jhDXKHqRVNCbACkJ89RAnQ==" - "resolved" "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.4.tgz" - "version" "15.7.4" - -"@types/react-syntax-highlighter@^13.5.2": - "integrity" "sha512-sRZoKZBGKaE7CzMvTTgz+0x/aVR58ZYUTfB7HN76vC+yQnvo1FWtzNARBt0fGqcLGEVakEzMu/CtPzssmanu8Q==" - "resolved" "https://registry.npmjs.org/@types/react-syntax-highlighter/-/react-syntax-highlighter-13.5.2.tgz" - "version" "13.5.2" - dependencies: - "@types/react" "*" - -"@types/react@*", "@types/react@^17.0.2", "@types/react@>=16": - "integrity" "sha512-UrXhD/JyLH+W70nNSufXqMZNuUD2cXHu6UjCllC6pmOQgBX4SGXOH8fjRka0O0Ee0HrFxapDD8Bwn81Kmiz6jQ==" - "resolved" "https://registry.npmjs.org/@types/react/-/react-17.0.40.tgz" - "version" "17.0.40" - dependencies: - "@types/prop-types" "*" - "@types/scheduler" "*" - "csstype" "^3.0.2" - -"@types/responselike@*", "@types/responselike@^1.0.0": - "integrity" "sha512-85Y2BjiufFzaMIlvJDvTTB8Fxl2xfLo4HgmHzVBz08w4wDePCTjYw66PdrolO0kzli3yam/YCgRufyo1DdQVTA==" - "resolved" "https://registry.npmjs.org/@types/responselike/-/responselike-1.0.0.tgz" - "version" "1.0.0" - dependencies: - "@types/node" "*" - -"@types/scheduler@*": - "integrity" "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==" - "resolved" "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz" - "version" "0.16.2" - -"@types/unist@*", "@types/unist@^2.0.0": - "integrity" "sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ==" - "resolved" "https://registry.npmjs.org/@types/unist/-/unist-2.0.6.tgz" - "version" "2.0.6" - -"@xobotyi/scrollbar-width@^1.9.5": - "integrity" "sha512-N8tkAACJx2ww8vFMneJmaAgmjAG1tnVBZJRLRcx061tmsLRZHSEZSLuGWnwPtunsSLvSqXQ2wfp7Mgqg1I+2dQ==" - "resolved" "https://registry.npmjs.org/@xobotyi/scrollbar-width/-/scrollbar-width-1.9.5.tgz" - "version" "1.9.5" - -"acorn-jsx@^5.0.0": - "integrity" "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==" - "resolved" "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz" - "version" "5.3.2" - -"acorn-node@^1.6.1": - "integrity" "sha512-8mt+fslDufLYntIoPAaIMUe/lrbrehIiwmR3t2k9LljIzoigEPF27eLk2hy8zSGzmR/ogr7zbRKINMo1u0yh5A==" - "resolved" "https://registry.npmjs.org/acorn-node/-/acorn-node-1.8.2.tgz" - "version" "1.8.2" - dependencies: - "acorn" "^7.0.0" - "acorn-walk" "^7.0.0" - "xtend" "^4.0.2" - -"acorn-walk@^7.0.0": - "integrity" "sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==" - "resolved" "https://registry.npmjs.org/acorn-walk/-/acorn-walk-7.2.0.tgz" - "version" "7.2.0" - -"acorn-walk@^8.1.1": - "integrity" "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==" - "resolved" "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz" - "version" "8.2.0" - -"acorn@^6.0.0 || ^7.0.0 || ^8.0.0", "acorn@^8.0.0", "acorn@^8.4.1": - "integrity" "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==" - "resolved" "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz" - "version" "8.7.0" - -"acorn@^7.0.0": - "integrity" "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==" - "resolved" "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz" - "version" "7.4.1" - -"aggregate-error@^3.1.0": - "integrity" "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==" - "resolved" "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz" - "version" "3.1.0" - dependencies: - "clean-stack" "^2.0.0" - "indent-string" "^4.0.0" - -"ansi-align@^2.0.0": - "integrity" "sha1-w2rsy6VjuJzrVW82kPCx2eNUf38= sha512-TdlOggdA/zURfMYa7ABC66j+oqfMew58KpJMbUlH3bcZP1b+cBHIHDDn5uH9INsxrHBPjsqM0tDB4jPTF/vgJA==" - "resolved" "https://registry.npmjs.org/ansi-align/-/ansi-align-2.0.0.tgz" - "version" "2.0.0" - dependencies: - "string-width" "^2.0.0" - -"ansi-align@^3.0.0": - "integrity" "sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==" - "resolved" "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.1.tgz" - "version" "3.0.1" - dependencies: - "string-width" "^4.1.0" - -"ansi-escapes@^4.2.1": - "integrity" "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==" - "resolved" "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz" - "version" "4.3.2" - dependencies: - "type-fest" "^0.21.3" - -"ansi-red@^0.1.1": - "integrity" "sha1-jGOPnRCAgAo1PJwoyKgcpHBdlGw= sha512-ewaIr5y+9CUTGFwZfpECUbFlGcC0GCw1oqR9RI6h1gQCd9Aj2GxSckCnPsVJnmfMZbwFYE+leZGASgkWl06Jow==" - "resolved" "https://registry.npmjs.org/ansi-red/-/ansi-red-0.1.1.tgz" - "version" "0.1.1" - dependencies: - "ansi-wrap" "0.1.0" - -"ansi-regex@^2.0.0": - "integrity" "sha1-w7M6te42DYbg5ijwRorn7yfWVN8= sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA==" - "resolved" "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz" - "version" "2.1.1" - -"ansi-regex@^3.0.0": - "integrity" "sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw==" - "resolved" "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.1.tgz" - "version" "3.0.1" - -"ansi-regex@^4.1.0": - "integrity" "sha512-ILlv4k/3f6vfQ4OoP2AGvirOktlQ98ZEL1k9FaQjxa3L1abBgbuTDAdPOpvbGncC0BTVQrl+OM8xZGK6tWXt7g==" - "resolved" "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.1.tgz" - "version" "4.1.1" - -"ansi-regex@^5.0.1": - "integrity" "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - "resolved" "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz" - "version" "5.0.1" - -"ansi-styles@^2.2.1": - "integrity" "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= sha512-kmCevFghRiWM7HB5zTPULl4r9bVFSWjz62MhqizDGUrq2NWuNMQyuv4tHHoKJHs69M/MF64lEcHdYIocrdWQYA==" - "resolved" "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz" - "version" "2.2.1" - -"ansi-styles@^3.2.0": - "integrity" "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==" - "resolved" "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz" - "version" "3.2.1" - dependencies: - "color-convert" "^1.9.0" - -"ansi-styles@^3.2.1": - "integrity" "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==" - "resolved" "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz" - "version" "3.2.1" - dependencies: - "color-convert" "^1.9.0" - -"ansi-styles@^4.1.0": - "integrity" "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==" - "resolved" "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz" - "version" "4.3.0" - dependencies: - "color-convert" "^2.0.1" - -"ansi-wrap@0.1.0": - "integrity" "sha1-qCJQ3bABXponyoLoLqYDu/pF768= sha512-ZyznvL8k/FZeQHr2T6LzcJ/+vBApDnMNZvfVFy3At0knswWd6rJ3/0Hhmpu8oqa6C92npmozs890sX9Dl6q+Qw==" - "resolved" "https://registry.npmjs.org/ansi-wrap/-/ansi-wrap-0.1.0.tgz" - "version" "0.1.0" - -"anymatch@~3.1.2": - "integrity" "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==" - "resolved" "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz" - "version" "3.1.2" - dependencies: - "normalize-path" "^3.0.0" - "picomatch" "^2.0.4" - -"arg@^4.1.0": - "integrity" "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==" - "resolved" "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz" - "version" "4.1.3" - -"arg@^5.0.1": - "integrity" "sha512-e0hDa9H2Z9AwFkk2qDlwhoMYE4eToKarchkQHovNdLTCYMHZHeRjI71crOh+dio4K6u1IcwubQqo79Ga4CyAQA==" - "resolved" "https://registry.npmjs.org/arg/-/arg-5.0.1.tgz" - "version" "5.0.1" - -"argparse@^1.0.10", "argparse@^1.0.7": - "integrity" "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==" - "resolved" "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz" - "version" "1.0.10" - dependencies: - "sprintf-js" "~1.0.2" - -"argparse@^2.0.1": - "integrity" "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" - "resolved" "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz" - "version" "2.0.1" - -"astring@^1.6.0": - "integrity" "sha512-Aj3mbwVzj7Vve4I/v2JYOPFkCGM2YS7OqQTNSxmUR+LECRpokuPgAYghePgr6SALDo5bD5DlfbSaYjOzGJZOLQ==" - "resolved" "https://registry.npmjs.org/astring/-/astring-1.8.1.tgz" - "version" "1.8.1" - -"asynckit@^0.4.0": - "integrity" "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" - "resolved" "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz" - "version" "0.4.0" - -"autolinker@~0.28.0": - "integrity" "sha1-BlK0kYgYefB3XazgzcoyM5QqTkc= sha512-zQAFO1Dlsn69eXaO6+7YZc+v84aquQKbwpzCE3L0stj56ERn9hutFxPopViLjo9G+rWwjozRhgS5KJ25Xy19cQ==" - "resolved" "https://registry.npmjs.org/autolinker/-/autolinker-0.28.1.tgz" - "version" "0.28.1" - dependencies: - "gulp-header" "^1.7.1" - -"autoprefixer@^10.0.2", "autoprefixer@^10.4.4": - "integrity" "sha512-Tm8JxsB286VweiZ5F0anmbyGiNI3v3wGv3mz9W+cxEDYB/6jbnj6GM9H9mK3wIL8ftgl+C07Lcwb8PG5PCCPzA==" - "resolved" "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.4.tgz" - "version" "10.4.4" - dependencies: - "browserslist" "^4.20.2" - "caniuse-lite" "^1.0.30001317" - "fraction.js" "^4.2.0" - "normalize-range" "^0.1.2" - "picocolors" "^1.0.0" - "postcss-value-parser" "^4.2.0" - -"axios@^1.0.0": - "integrity" "sha512-SsHsGFN1qNPFT5QhSoSD37SHDfGyLSW5AESmyLk2JeCMHv5g0I9g0Hz/zQHx2KNe0jGXh2q2hAm7OdkXm360CA==" - "resolved" "https://registry.npmjs.org/axios/-/axios-1.0.0.tgz" - "version" "1.0.0" - dependencies: - "follow-redirects" "^1.15.0" - "form-data" "^4.0.0" - "proxy-from-env" "^1.1.0" - -"babel-code-frame@^6.26.0": - "integrity" "sha1-Y/1D99weO7fONZR9uP42mj9Yx0s= sha512-XqYMR2dfdGMW+hd0IUZ2PwK+fGeFkOxZJ0wY+JaQAHzt1Zx8LcvpiZD2NiGkEG8qx0CfkAOr5xt76d1e8vG90g==" - "resolved" "https://registry.npmjs.org/babel-code-frame/-/babel-code-frame-6.26.0.tgz" - "version" "6.26.0" - dependencies: - "chalk" "^1.1.3" - "esutils" "^2.0.2" - "js-tokens" "^3.0.2" - -"bail@^2.0.0": - "integrity" "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==" - "resolved" "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz" - "version" "2.0.2" - -"balanced-match@^1.0.0": - "integrity" "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" - "resolved" "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz" - "version" "1.0.2" - -"base64url@^3.0.1": - "integrity" "sha512-ir1UPr3dkwexU7FdV8qBBbNDRUhMmIekYMFZfi+C/sLNnRESKPl23nB9b2pltqfOQNnGzsDdId90AEtG5tCx4A==" - "resolved" "https://registry.npmjs.org/base64url/-/base64url-3.0.1.tgz" - "version" "3.0.1" - -"binary-extensions@^2.0.0": - "integrity" "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==" - "resolved" "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz" - "version" "2.2.0" - -"boxen@^1.2.1": - "integrity" "sha512-TNPjfTr432qx7yOjQyaXm3dSR0MH9vXp7eT1BFSl/C51g+EFnOR9hTg1IreahGBmDNCehscshe45f+C1TBZbLw==" - "resolved" "https://registry.npmjs.org/boxen/-/boxen-1.3.0.tgz" - "version" "1.3.0" - dependencies: - "ansi-align" "^2.0.0" - "camelcase" "^4.0.0" - "chalk" "^2.0.1" - "cli-boxes" "^1.0.0" - "string-width" "^2.0.0" - "term-size" "^1.2.0" - "widest-line" "^2.0.0" - -"boxen@^4.2.0": - "integrity" "sha512-eB4uT9RGzg2odpER62bBwSLvUeGC+WbRjjyyFhGsKnc8wp/m0+hQsMUvUe3H2V0D5vw0nBdO1hCJoZo5mKeuIQ==" - "resolved" "https://registry.npmjs.org/boxen/-/boxen-4.2.0.tgz" - "version" "4.2.0" - dependencies: - "ansi-align" "^3.0.0" - "camelcase" "^5.3.1" - "chalk" "^3.0.0" - "cli-boxes" "^2.2.0" - "string-width" "^4.1.0" - "term-size" "^2.1.0" - "type-fest" "^0.8.1" - "widest-line" "^3.1.0" - -"brace-expansion@^1.1.7": - "integrity" "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==" - "resolved" "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz" - "version" "1.1.11" - dependencies: - "balanced-match" "^1.0.0" - "concat-map" "0.0.1" - -"braces@^3.0.1", "braces@~3.0.2": - "integrity" "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==" - "resolved" "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz" - "version" "3.0.2" - dependencies: - "fill-range" "^7.0.1" - -"browserslist@^4.20.2": - "integrity" "sha512-CQOBCqp/9pDvDbx3xfMi+86pr4KXIf2FDkTTdeuYw8OxS9t898LA1Khq57gtufFILXpfgsSx5woNgsBgvGjpsA==" - "resolved" "https://registry.npmjs.org/browserslist/-/browserslist-4.20.2.tgz" - "version" "4.20.2" - dependencies: - "caniuse-lite" "^1.0.30001317" - "electron-to-chromium" "^1.4.84" - "escalade" "^3.1.1" - "node-releases" "^2.0.2" - "picocolors" "^1.0.0" - -"buffer-from@^1.0.0": - "integrity" "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==" - "resolved" "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz" - "version" "1.1.2" - -"builtins@^1.0.3": - "integrity" "sha1-y5T662HIaWRR2zZTThQi+U8K7og= sha512-uYBjakWipfaO/bXI7E8rq6kpwHRZK5cNYrUv2OzZSI/FvmdMyXJ2tG9dKcjEC5YHmHpUAwsargWIZNWdxb/bnQ==" - "resolved" "https://registry.npmjs.org/builtins/-/builtins-1.0.3.tgz" - "version" "1.0.3" - -"cacheable-lookup@^5.0.3": - "integrity" "sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA==" - "resolved" "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz" - "version" "5.0.4" - -"cacheable-request@^6.0.0": - "integrity" "sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg==" - "resolved" "https://registry.npmjs.org/cacheable-request/-/cacheable-request-6.1.0.tgz" - "version" "6.1.0" - dependencies: - "clone-response" "^1.0.2" - "get-stream" "^5.1.0" - "http-cache-semantics" "^4.0.0" - "keyv" "^3.0.0" - "lowercase-keys" "^2.0.0" - "normalize-url" "^4.1.0" - "responselike" "^1.0.2" - -"cacheable-request@^7.0.2": - "integrity" "sha512-pouW8/FmiPQbuGpkXQ9BAPv/Mo5xDGANgSNXzTzJ8DrKGuXOssM4wIQRjfanNRh3Yu5cfYPvcorqbhg2KIJtew==" - "resolved" "https://registry.npmjs.org/cacheable-request/-/cacheable-request-7.0.2.tgz" - "version" "7.0.2" - dependencies: - "clone-response" "^1.0.2" - "get-stream" "^5.1.0" - "http-cache-semantics" "^4.0.0" - "keyv" "^4.0.0" - "lowercase-keys" "^2.0.0" - "normalize-url" "^6.0.1" - "responselike" "^2.0.0" - -"callsites@^3.0.0": - "integrity" "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==" - "resolved" "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz" - "version" "3.1.0" - -"camelcase-css@^2.0.1": - "integrity" "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==" - "resolved" "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz" - "version" "2.0.1" - -"camelcase@^4.0.0": - "integrity" "sha1-1UVjW+HjPFQmScaRc+Xeas+uNN0= sha512-FxAv7HpHrXbh3aPo4o2qxHay2lkLY3x5Mw3KeE4KQE8ysVfziWeRZDwcjauvwBSGEC/nXUPzZy8zeh4HokqOnw==" - "resolved" "https://registry.npmjs.org/camelcase/-/camelcase-4.1.0.tgz" - "version" "4.1.0" - -"camelcase@^5.0.0", "camelcase@^5.3.1": - "integrity" "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==" - "resolved" "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz" - "version" "5.3.1" - -"caniuse-lite@^1.0.30001283", "caniuse-lite@^1.0.30001317": - "integrity" "sha512-xIZLh8gBm4dqNX0gkzrBeyI86J2eCjWzYAs40q88smG844YIrN4tVQl/RhquHvKEKImWWFIVh1Lxe5n1G/N+GQ==" - "resolved" "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001317.tgz" - "version" "1.0.30001317" - -"capture-stack-trace@^1.0.0": - "integrity" "sha512-mYQLZnx5Qt1JgB1WEiMCf2647plpGeQ2NMR/5L0HNZzGQo4fuSPnK+wjfPnKZV0aiJDgzmWqqkV/g7JD+DW0qw==" - "resolved" "https://registry.npmjs.org/capture-stack-trace/-/capture-stack-trace-1.0.1.tgz" - "version" "1.0.1" - -"ccount@^2.0.0": - "integrity" "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==" - "resolved" "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz" - "version" "2.0.1" - -"chalk@^1.1.3": - "integrity" "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= sha512-U3lRVLMSlsCfjqYPbLyVv11M9CPW4I728d6TCKMAOJueEeB9/8o+eSsMnxPJD+Q+K909sdESg7C+tIkoH6on1A==" - "resolved" "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz" - "version" "1.1.3" - dependencies: - "ansi-styles" "^2.2.1" - "escape-string-regexp" "^1.0.2" - "has-ansi" "^2.0.0" - "strip-ansi" "^3.0.0" - "supports-color" "^2.0.0" - -"chalk@^2.0.0": - "integrity" "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==" - "resolved" "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz" - "version" "2.4.2" - dependencies: - "ansi-styles" "^3.2.1" - "escape-string-regexp" "^1.0.5" - "supports-color" "^5.3.0" - -"chalk@^2.0.1": - "integrity" "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==" - "resolved" "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz" - "version" "2.4.2" - dependencies: - "ansi-styles" "^3.2.1" - "escape-string-regexp" "^1.0.5" - "supports-color" "^5.3.0" - -"chalk@^3.0.0": - "integrity" "sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==" - "resolved" "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz" - "version" "3.0.0" - dependencies: - "ansi-styles" "^4.1.0" - "supports-color" "^7.1.0" - -"chalk@^4.1.0", "chalk@^4.1.2": - "integrity" "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==" - "resolved" "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz" - "version" "4.1.2" - dependencies: - "ansi-styles" "^4.1.0" - "supports-color" "^7.1.0" - -"character-entities-html4@^2.0.0": - "integrity" "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==" - "resolved" "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz" - "version" "2.1.0" - -"character-entities-legacy@^1.0.0": - "integrity" "sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA==" - "resolved" "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz" - "version" "1.1.4" - -"character-entities-legacy@^3.0.0": - "integrity" "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==" - "resolved" "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz" - "version" "3.0.0" - -"character-entities@^1.0.0": - "integrity" "sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw==" - "resolved" "https://registry.npmjs.org/character-entities/-/character-entities-1.2.4.tgz" - "version" "1.2.4" - -"character-entities@^2.0.0": - "integrity" "sha512-OzmutCf2Kmc+6DrFrrPS8/tDh2+DpnrfzdICHWhcVC9eOd0N1PXmQEE1a8iM4IziIAG+8tmTq3K+oo0ubH6RRQ==" - "resolved" "https://registry.npmjs.org/character-entities/-/character-entities-2.0.1.tgz" - "version" "2.0.1" - -"character-reference-invalid@^1.0.0": - "integrity" "sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg==" - "resolved" "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-1.1.4.tgz" - "version" "1.1.4" - -"character-reference-invalid@^2.0.0": - "integrity" "sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==" - "resolved" "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-2.0.1.tgz" - "version" "2.0.1" - -"chardet@^0.7.0": - "integrity" "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==" - "resolved" "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz" - "version" "0.7.0" - -"chokidar@^3.5.3", "chokidar@>=3.0.0 <4.0.0": - "integrity" "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==" - "resolved" "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz" - "version" "3.5.3" - dependencies: - "anymatch" "~3.1.2" - "braces" "~3.0.2" - "glob-parent" "~5.1.2" - "is-binary-path" "~2.1.0" - "is-glob" "~4.0.1" - "normalize-path" "~3.0.0" - "readdirp" "~3.6.0" - optionalDependencies: - "fsevents" "~2.3.2" - -"ci-info@^1.5.0": - "integrity" "sha512-vsGdkwSCDpWmP80ncATX7iea5DWQemg1UgCW5J8tqjU3lYw4FBYuj89J0CTVomA7BEfvSZd84GmHko+MxFQU2A==" - "resolved" "https://registry.npmjs.org/ci-info/-/ci-info-1.6.0.tgz" - "version" "1.6.0" - -"ci-info@^2.0.0": - "integrity" "sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==" - "resolved" "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz" - "version" "2.0.0" - -"classnames@^2.3.1": - "integrity" "sha512-OlQdbZ7gLfGarSqxesMesDa5uz7KFbID8Kpq/SxIoNGDqY8lSYs0D+hhtBXhcdB3rcbXArFr7vlHheLk1voeNA==" - "resolved" "https://registry.npmjs.org/classnames/-/classnames-2.3.1.tgz" - "version" "2.3.1" - -"clean-stack@^2.0.0": - "integrity" "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==" - "resolved" "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz" - "version" "2.2.0" - -"cli-boxes@^1.0.0": - "integrity" "sha1-T6kXw+WclKAEzWH47lCdplFocUM= sha512-3Fo5wu8Ytle8q9iCzS4D2MWVL2X7JVWRiS1BnXbTFDhS9c/REkM9vd1AmabsoZoY5/dGi5TT9iKL8Kb6DeBRQg==" - "resolved" "https://registry.npmjs.org/cli-boxes/-/cli-boxes-1.0.0.tgz" - "version" "1.0.0" - -"cli-boxes@^2.2.0": - "integrity" "sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==" - "resolved" "https://registry.npmjs.org/cli-boxes/-/cli-boxes-2.2.1.tgz" - "version" "2.2.1" - -"cli-cursor@^3.1.0": - "integrity" "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==" - "resolved" "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz" - "version" "3.1.0" - dependencies: - "restore-cursor" "^3.1.0" - -"cli-width@^3.0.0": - "integrity" "sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==" - "resolved" "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz" - "version" "3.0.0" - -"cliui@^5.0.0": - "integrity" "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==" - "resolved" "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz" - "version" "5.0.0" - dependencies: - "string-width" "^3.1.0" - "strip-ansi" "^5.2.0" - "wrap-ansi" "^5.1.0" - -"clone-response@^1.0.2": - "integrity" "sha1-0dyXOSAxTfZ/vrlCI7TuNQI56Ws= sha512-yjLXh88P599UOyPTFX0POsd7WxnbsVsGohcwzHOLspIhhpalPw1BcqED8NblyZLKcGrL8dTgMlcaZxV2jAD41Q==" - "resolved" "https://registry.npmjs.org/clone-response/-/clone-response-1.0.2.tgz" - "version" "1.0.2" - dependencies: - "mimic-response" "^1.0.0" - -"coffee-script@^1.12.4": - "integrity" "sha512-fLeEhqwymYat/MpTPUjSKHVYYl0ec2mOyALEMLmzr5i1isuG+6jfI2j2d5oBO3VIzgUXgBVIcOT9uH1TFxBckw==" - "resolved" "https://registry.npmjs.org/coffee-script/-/coffee-script-1.12.7.tgz" - "version" "1.12.7" - -"color-convert@^1.9.0": - "integrity" "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==" - "resolved" "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz" - "version" "1.9.3" - dependencies: - "color-name" "1.1.3" - -"color-convert@^2.0.1": - "integrity" "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==" - "resolved" "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz" - "version" "2.0.1" - dependencies: - "color-name" "~1.1.4" - -"color-name@^1.1.4", "color-name@~1.1.4": - "integrity" "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - "resolved" "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz" - "version" "1.1.4" - -"color-name@1.1.3": - "integrity" "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" - "resolved" "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz" - "version" "1.1.3" - -"combined-stream@^1.0.8": - "integrity" "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==" - "resolved" "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz" - "version" "1.0.8" - dependencies: - "delayed-stream" "~1.0.0" - -"comma-separated-tokens@^1.0.0": - "integrity" "sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw==" - "resolved" "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-1.0.8.tgz" - "version" "1.0.8" - -"comma-separated-tokens@^2.0.0": - "integrity" "sha512-G5yTt3KQN4Yn7Yk4ed73hlZ1evrFKXeUW3086p3PRFNp7m2vIjI6Pg+Kgb+oyzhd9F2qdcoj67+y3SdxL5XWsg==" - "resolved" "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.2.tgz" - "version" "2.0.2" - -"commander@^2.19.0": - "integrity" "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" - "resolved" "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz" - "version" "2.20.3" - -"comment-json@^2.2.0": - "integrity" "sha512-T+iXox779qsqneMYx/x5BZyz4xjCeQRmuNVzz8tko7qZUs3MlzpA3RAs+O1XsgcKToNBMIvfVzafGOeiU7RggA==" - "resolved" "https://registry.npmjs.org/comment-json/-/comment-json-2.4.2.tgz" - "version" "2.4.2" - dependencies: - "core-util-is" "^1.0.2" - "esprima" "^4.0.1" - "has-own-prop" "^2.0.0" - "repeat-string" "^1.6.1" - -"concat-map@0.0.1": - "integrity" "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" - "resolved" "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" - "version" "0.0.1" - -"concat-stream@^1.5.2": - "integrity" "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==" - "resolved" "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz" - "version" "1.6.2" - dependencies: - "buffer-from" "^1.0.0" - "inherits" "^2.0.3" - "readable-stream" "^2.2.2" - "typedarray" "^0.0.6" - -"concat-with-sourcemaps@*": - "integrity" "sha512-4gEjHJFT9e+2W/77h/DS5SGUgwDaOwprX8L/gl5+3ixnzkVJJsZWDSelmN3Oilw3LNDZjZV0yqH1hLG3k6nghg==" - "resolved" "https://registry.npmjs.org/concat-with-sourcemaps/-/concat-with-sourcemaps-1.1.0.tgz" - "version" "1.1.0" - dependencies: - "source-map" "^0.6.1" - -"configstore@^3.0.0": - "integrity" "sha512-nlOhI4+fdzoK5xmJ+NY+1gZK56bwEaWZr8fYuXohZ9Vkc1o3a4T/R3M+yE/w7x/ZVJ1zF8c+oaOvF0dztdUgmA==" - "resolved" "https://registry.npmjs.org/configstore/-/configstore-3.1.5.tgz" - "version" "3.1.5" - dependencies: - "dot-prop" "^4.2.1" - "graceful-fs" "^4.1.2" - "make-dir" "^1.0.0" - "unique-string" "^1.0.0" - "write-file-atomic" "^2.0.0" - "xdg-basedir" "^3.0.0" - -"configstore@^5.0.1": - "integrity" "sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA==" - "resolved" "https://registry.npmjs.org/configstore/-/configstore-5.0.1.tgz" - "version" "5.0.1" - dependencies: - "dot-prop" "^5.2.0" - "graceful-fs" "^4.1.2" - "make-dir" "^3.0.0" - "unique-string" "^2.0.0" - "write-file-atomic" "^3.0.0" - "xdg-basedir" "^4.0.0" - -"cookie@^0.4.1": - "integrity" "sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA==" - "resolved" "https://registry.npmjs.org/cookie/-/cookie-0.4.2.tgz" - "version" "0.4.2" - -"copy-to-clipboard@^3.3.1": - "integrity" "sha512-i13qo6kIHTTpCm8/Wup+0b1mVWETvu2kIMzKoK8FpkLkFxlt0znUAHcMzox+T8sPlqtZXq3CulEjQHsYiGFJUw==" - "resolved" "https://registry.npmjs.org/copy-to-clipboard/-/copy-to-clipboard-3.3.1.tgz" - "version" "3.3.1" - dependencies: - "toggle-selection" "^1.0.6" - -"core-util-is@^1.0.2", "core-util-is@~1.0.0": - "integrity" "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" - "resolved" "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz" - "version" "1.0.3" - -"cosmiconfig@^7.0.1": - "integrity" "sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ==" - "resolved" "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.0.1.tgz" - "version" "7.0.1" - dependencies: - "@types/parse-json" "^4.0.0" - "import-fresh" "^3.2.1" - "parse-json" "^5.0.0" - "path-type" "^4.0.0" - "yaml" "^1.10.0" - -"create-error-class@^3.0.0": - "integrity" "sha1-Br56vvlHo/FKMP1hBnHUAbyot7Y= sha512-gYTKKexFO3kh200H1Nit76sRwRtOY32vQd3jpAQKpLtZqyNsSQNfI4N7o3eP2wUjV35pTWKRYqFUDBvUha/Pkw==" - "resolved" "https://registry.npmjs.org/create-error-class/-/create-error-class-3.0.2.tgz" - "version" "3.0.2" - dependencies: - "capture-stack-trace" "^1.0.0" - -"create-require@^1.1.0": - "integrity" "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==" - "resolved" "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz" - "version" "1.1.1" - -"cross-spawn@^5.0.1": - "integrity" "sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk= sha512-pTgQJ5KC0d2hcY8eyL1IzlBPYjTkyH72XRZPnLyKus2mBfNjQs3klqbJU2VILqZryAZUt9JOb3h/mWMy23/f5A==" - "resolved" "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz" - "version" "5.1.0" - dependencies: - "lru-cache" "^4.0.1" - "shebang-command" "^1.2.0" - "which" "^1.2.9" - -"crypto-random-string@^1.0.0": - "integrity" "sha1-ojD2T1aDEOFJgAmUB5DsmVRbyn4= sha512-GsVpkFPlycH7/fRR7Dhcmnoii54gV1nz7y4CWyeFS14N+JVBBhY+r8amRHE4BwSYal7BPTDp8isvAlCxyFt3Hg==" - "resolved" "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-1.0.0.tgz" - "version" "1.0.0" - -"crypto-random-string@^2.0.0": - "integrity" "sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==" - "resolved" "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz" - "version" "2.0.0" - -"css-in-js-utils@^2.0.0": - "integrity" "sha512-PJF0SpJT+WdbVVt0AOYp9C8GnuruRlL/UFW7932nLWmFLQTaWEzTBQEx7/hn4BuV+WON75iAViSUJLiU3PKbpA==" - "resolved" "https://registry.npmjs.org/css-in-js-utils/-/css-in-js-utils-2.0.1.tgz" - "version" "2.0.1" - dependencies: - "hyphenate-style-name" "^1.0.2" - "isobject" "^3.0.1" - -"css-tree@^1.1.2": - "integrity" "sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q==" - "resolved" "https://registry.npmjs.org/css-tree/-/css-tree-1.1.3.tgz" - "version" "1.1.3" - dependencies: - "mdn-data" "2.0.14" - "source-map" "^0.6.1" - -"cssesc@^3.0.0": - "integrity" "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==" - "resolved" "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz" - "version" "3.0.0" - -"csstype@^3.0.2", "csstype@^3.0.6": - "integrity" "sha512-sa6P2wJ+CAbgyy4KFssIb/JNMLxFvKF1pCYCSXS8ZMuqZnMsrxqI2E5sPyoTpxoPU/gVZMzr2zjOfg8GIZOMsw==" - "resolved" "https://registry.npmjs.org/csstype/-/csstype-3.0.11.tgz" - "version" "3.0.11" - -"data-uri-to-buffer@^4.0.0": - "integrity" "sha512-Vr3mLBA8qWmcuschSLAOogKgQ/Jwxulv3RNE4FXnYWRGujzrRWQI4m12fQqRkwX06C0KanhLr4hK+GydchZsaA==" - "resolved" "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.0.tgz" - "version" "4.0.0" - -"debug@^4.0.0", "debug@^4.3.3": - "integrity" "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==" - "resolved" "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz" - "version" "4.3.3" - dependencies: - "ms" "2.1.2" - -"decamelize@^1.2.0": - "integrity" "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==" - "resolved" "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz" - "version" "1.2.0" - -"decode-named-character-reference@^1.0.0": - "integrity" "sha512-YV/0HQHreRwKb7uBopyIkLG17jG6Sv2qUchk9qSoVJ2f+flwRsPNBO0hAnjt6mTNYUT+vw9Gy2ihXg4sUWPi2w==" - "resolved" "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.0.1.tgz" - "version" "1.0.1" - dependencies: - "character-entities" "^2.0.0" - -"decompress-response@^3.3.0": - "integrity" "sha1-gKTdMjdIOEv6JICDYirt7Jgq3/M= sha512-BzRPQuY1ip+qDonAOz42gRm/pg9F768C+npV/4JOsxRC2sq+Rlk+Q4ZCAsOhnIaMrgarILY+RMUIvMmmX1qAEA==" - "resolved" "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz" - "version" "3.3.0" - dependencies: - "mimic-response" "^1.0.0" - -"decompress-response@^6.0.0": - "integrity" "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==" - "resolved" "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz" - "version" "6.0.0" - dependencies: - "mimic-response" "^3.1.0" - -"deep-extend@^0.6.0": - "integrity" "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==" - "resolved" "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz" - "version" "0.6.0" - -"defer-to-connect@^1.0.1": - "integrity" "sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ==" - "resolved" "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-1.1.3.tgz" - "version" "1.1.3" - -"defer-to-connect@^2.0.0": - "integrity" "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==" - "resolved" "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz" - "version" "2.0.1" - -"defined@^1.0.0": - "integrity" "sha1-yY2bzvdWdBiOEQlpFRGZ45sfppM= sha512-Y2caI5+ZwS5c3RiNDJ6u53VhQHv+hHKwhkI1iHvceKUHw9Df6EK2zRLfjejRgMuCuxK7PfSWIMwWecceVvThjQ==" - "resolved" "https://registry.npmjs.org/defined/-/defined-1.0.0.tgz" - "version" "1.0.0" - -"delayed-stream@~1.0.0": - "integrity" "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" - "resolved" "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" - "version" "1.0.0" - -"depd@~1.1.2": - "integrity" "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==" - "resolved" "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz" - "version" "1.1.2" - -"dequal@^2.0.0": - "integrity" "sha512-q9K8BlJVxK7hQYqa6XISGmBZbtQQWVXSrRrWreHC94rMt1QL/Impruc+7p2CYSYuVIUr+YCt6hjrs1kkdJRTug==" - "resolved" "https://registry.npmjs.org/dequal/-/dequal-2.0.2.tgz" - "version" "2.0.2" - -"detect-indent@^6.0.0": - "integrity" "sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==" - "resolved" "https://registry.npmjs.org/detect-indent/-/detect-indent-6.1.0.tgz" - "version" "6.1.0" - -"detective@^5.2.0": - "integrity" "sha512-6SsIx+nUUbuK0EthKjv0zrdnajCCXVYGmbYYiYjFVpzcjwEs/JMDZ8tPRG29J/HhN56t3GJp2cGSWDRjjot8Pg==" - "resolved" "https://registry.npmjs.org/detective/-/detective-5.2.0.tgz" - "version" "5.2.0" - dependencies: - "acorn-node" "^1.6.1" - "defined" "^1.0.0" - "minimist" "^1.1.1" - -"diacritics-map@^0.1.0": - "integrity" "sha1-bfwP+dAQAKLt8oZTccrDFulJd68= sha512-3omnDTYrGigU0i4cJjvaKwD52B8aoqyX/NEIkukFFkogBemsIbhSa1O414fpTp5nuszJG6lvQ5vBvDVNCbSsaQ==" - "resolved" "https://registry.npmjs.org/diacritics-map/-/diacritics-map-0.1.0.tgz" - "version" "0.1.0" - -"didyoumean@^1.2.2": - "integrity" "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==" - "resolved" "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz" - "version" "1.2.2" - -"diff@^4.0.1": - "integrity" "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==" - "resolved" "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz" - "version" "4.0.2" - -"diff@^5.0.0": - "integrity" "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==" - "resolved" "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz" - "version" "5.0.0" - -"dlv@^1.1.3": - "integrity" "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==" - "resolved" "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz" - "version" "1.1.3" - -"dot-prop@^4.2.1": - "integrity" "sha512-l0p4+mIuJIua0mhxGoh4a+iNL9bmeK5DvnSVQa6T0OhrVmaEa1XScX5Etc673FePCJOArq/4Pa2cLGODUWTPOQ==" - "resolved" "https://registry.npmjs.org/dot-prop/-/dot-prop-4.2.1.tgz" - "version" "4.2.1" - dependencies: - "is-obj" "^1.0.0" - -"dot-prop@^5.2.0": - "integrity" "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==" - "resolved" "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz" - "version" "5.3.0" - dependencies: - "is-obj" "^2.0.0" - -"dotenv@^5.0.1": - "integrity" "sha512-4As8uPrjfwb7VXC+WnLCbXK7y+Ueb2B3zgNCePYfhxS1PYeaO1YTeplffTEcbfLhvFNGLAz90VvJs9yomG7bow==" - "resolved" "https://registry.npmjs.org/dotenv/-/dotenv-5.0.1.tgz" - "version" "5.0.1" - -"duplexer3@^0.1.4": - "integrity" "sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI= sha512-CEj8FwwNA4cVH2uFCoHUrmojhYh1vmCdOaneKJXwkeY1i9jnlslVo9dx+hQ5Hl9GnH/Bwy/IjxAyOePyPKYnzA==" - "resolved" "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.4.tgz" - "version" "0.1.4" - -"editorconfig@^0.15.3": - "integrity" "sha512-M9wIMFx96vq0R4F+gRpY3o2exzb8hEj/n9S8unZtHSvYjibBp/iMufSzvmOcV/laG0ZtuTVGtiJggPOSW2r93g==" - "resolved" "https://registry.npmjs.org/editorconfig/-/editorconfig-0.15.3.tgz" - "version" "0.15.3" - dependencies: - "commander" "^2.19.0" - "lru-cache" "^4.1.5" - "semver" "^5.6.0" - "sigmund" "^1.0.1" - -"electron-to-chromium@^1.4.84": - "integrity" "sha512-K9AsQ41WS2bjZUFpRWfvaS4RjEcRCamEkBJN1Z1TQILBfP1H8QnJ9ti0wiLiMv0sRjX3EHKzgs9jDnmGFx2jXg==" - "resolved" "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.85.tgz" - "version" "1.4.85" - -"emoji-regex@^7.0.1": - "integrity" "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" - "resolved" "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz" - "version" "7.0.3" - -"emoji-regex@^8.0.0": - "integrity" "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - "resolved" "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz" - "version" "8.0.0" - -"end-of-stream@^1.1.0": - "integrity" "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==" - "resolved" "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz" - "version" "1.4.4" - dependencies: - "once" "^1.4.0" - -"error-ex@^1.3.1": - "integrity" "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==" - "resolved" "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz" - "version" "1.3.2" - dependencies: - "is-arrayish" "^0.2.1" - -"error-stack-parser@^2.0.6": - "integrity" "sha512-chLOW0ZGRf4s8raLrDxa5sdkvPec5YdvwbFnqJme4rk0rFajP8mPtrDL1+I+CwrQDCjswDA5sREX7jYQDQs9vA==" - "resolved" "https://registry.npmjs.org/error-stack-parser/-/error-stack-parser-2.0.7.tgz" - "version" "2.0.7" - dependencies: - "stackframe" "^1.1.1" - -"escalade@^3.1.1": - "integrity" "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==" - "resolved" "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz" - "version" "3.1.1" - -"escape-goat@^2.0.0": - "integrity" "sha512-8/uIhbG12Csjy2JEW7D9pHbreaVaS/OpN3ycnyvElTdwM5n6GY6W6e2IPemfvGZeUMqZ9A/3GqIZMgKnBhAw/Q==" - "resolved" "https://registry.npmjs.org/escape-goat/-/escape-goat-2.1.1.tgz" - "version" "2.1.1" - -"escape-string-regexp@^1.0.2", "escape-string-regexp@^1.0.5": - "integrity" "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" - "resolved" "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" - "version" "1.0.5" - -"escape-string-regexp@^5.0.0": - "integrity" "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==" - "resolved" "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz" - "version" "5.0.0" - -"esprima@^4.0.0", "esprima@^4.0.1": - "integrity" "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==" - "resolved" "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz" - "version" "4.0.1" - -"estree-util-attach-comments@^2.0.0": - "integrity" "sha512-kT9YVRvlt2ewPp9BazfIIgXMGsXOEpOm57bK8aa4F3eOEndMml2JAETjWaG3SZYHmC6axSNIzHGY718dYwIuVg==" - "resolved" "https://registry.npmjs.org/estree-util-attach-comments/-/estree-util-attach-comments-2.0.0.tgz" - "version" "2.0.0" - dependencies: - "@types/estree" "^0.0.46" - -"estree-util-build-jsx@^2.0.0": - "integrity" "sha512-d49hPGqBCJF/bF06g1Ywg7zjH1mrrUdPPrixBlKBxcX4WvMYlUUJ8BkrwlzWc8/fm6XqGgk5jilhgeZBDEGwOQ==" - "resolved" "https://registry.npmjs.org/estree-util-build-jsx/-/estree-util-build-jsx-2.0.0.tgz" - "version" "2.0.0" - dependencies: - "@types/estree-jsx" "^0.0.1" - "estree-util-is-identifier-name" "^2.0.0" - "estree-walker" "^3.0.0" - -"estree-util-is-identifier-name@^2.0.0": - "integrity" "sha512-aXXZFVMnBBDRP81vS4YtAYJ0hUkgEsXea7lNKWCOeaAquGb1Jm2rcONPB5fpzwgbNxulTvrWuKnp9UElUGAKeQ==" - "resolved" "https://registry.npmjs.org/estree-util-is-identifier-name/-/estree-util-is-identifier-name-2.0.0.tgz" - "version" "2.0.0" - -"estree-util-visit@^1.0.0": - "integrity" "sha512-3lXJ4Us9j8TUif9cWcQy81t9p5OLasnDuuhrFiqb+XstmKC1d1LmrQWYsY49/9URcfHE64mPypDBaNK9NwWDPQ==" - "resolved" "https://registry.npmjs.org/estree-util-visit/-/estree-util-visit-1.1.0.tgz" - "version" "1.1.0" - dependencies: - "@types/estree-jsx" "^0.0.1" - "@types/unist" "^2.0.0" - -"estree-walker@^3.0.0": - "integrity" "sha512-woY0RUD87WzMBUiZLx8NsYr23N5BKsOMZHhu2hoNRVh6NXGfoiT1KOL8G3UHlJAnEDGmfa5ubNA/AacfG+Kb0g==" - "resolved" "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.1.tgz" - "version" "3.0.1" - -"esutils@^2.0.2": - "integrity" "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==" - "resolved" "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz" - "version" "2.0.3" - -"execa@^0.7.0": - "integrity" "sha1-lEvs00zEHuMqY6n68nrVpl/Fl3c= sha512-RztN09XglpYI7aBBrJCPW95jEH7YF1UEPOoX9yDhUTPdp7mK+CQvnLTuD10BNXZ3byLTu2uehZ8EcKT/4CGiFw==" - "resolved" "https://registry.npmjs.org/execa/-/execa-0.7.0.tgz" - "version" "0.7.0" - dependencies: - "cross-spawn" "^5.0.1" - "get-stream" "^3.0.0" - "is-stream" "^1.1.0" - "npm-run-path" "^2.0.0" - "p-finally" "^1.0.0" - "signal-exit" "^3.0.0" - "strip-eof" "^1.0.0" - -"expand-range@^1.8.1": - "integrity" "sha1-opnv/TNf4nIeuujiV+x5ZE/IUzc= sha512-AFASGfIlnIbkKPQwX1yHaDjFvh/1gyKJODme52V6IORh69uEYgZp0o9C+qsIGNVEiuuhQU0CSSl++Rlegg1qvA==" - "resolved" "https://registry.npmjs.org/expand-range/-/expand-range-1.8.2.tgz" - "version" "1.8.2" - dependencies: - "fill-range" "^2.1.0" - -"extend-shallow@^2.0.1": - "integrity" "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==" - "resolved" "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz" - "version" "2.0.1" - dependencies: - "is-extendable" "^0.1.0" - -"extend@^3.0.0": - "integrity" "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" - "resolved" "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz" - "version" "3.0.2" - -"external-editor@^3.0.3": - "integrity" "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==" - "resolved" "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz" - "version" "3.1.0" - dependencies: - "chardet" "^0.7.0" - "iconv-lite" "^0.4.24" - "tmp" "^0.0.33" - -"fast-deep-equal@^3.1.3": - "integrity" "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" - "resolved" "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz" - "version" "3.1.3" - -"fast-glob@^3.2.11": - "integrity" "sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew==" - "resolved" "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.11.tgz" - "version" "3.2.11" - dependencies: - "@nodelib/fs.stat" "^2.0.2" - "@nodelib/fs.walk" "^1.2.3" - "glob-parent" "^5.1.2" - "merge2" "^1.3.0" - "micromatch" "^4.0.4" - -"fast-shallow-equal@^1.0.0": - "integrity" "sha512-HPtaa38cPgWvaCFmRNhlc6NG7pv6NUHqjPgVAkWGoB9mQMwYB27/K0CvOM5Czy+qpT3e8XJ6Q4aPAnzpNpzNaw==" - "resolved" "https://registry.npmjs.org/fast-shallow-equal/-/fast-shallow-equal-1.0.0.tgz" - "version" "1.0.0" - -"fastest-stable-stringify@^2.0.2": - "integrity" "sha512-bijHueCGd0LqqNK9b5oCMHc0MluJAx0cwqASgbWMvkO01lCYgIhacVRLcaDz3QnyYIRNJRDwMb41VuT6pHJ91Q==" - "resolved" "https://registry.npmjs.org/fastest-stable-stringify/-/fastest-stable-stringify-2.0.2.tgz" - "version" "2.0.2" - -"fastq@^1.6.0": - "integrity" "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==" - "resolved" "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz" - "version" "1.13.0" - dependencies: - "reusify" "^1.0.4" - -"fault@^1.0.0": - "integrity" "sha512-CJ0HCB5tL5fYTEA7ToAq5+kTwd++Borf1/bifxd9iT70QcXr4MRrO3Llf8Ifs70q+SJcGHFtnIE/Nw6giCtECA==" - "resolved" "https://registry.npmjs.org/fault/-/fault-1.0.4.tgz" - "version" "1.0.4" - dependencies: - "format" "^0.2.0" - -"fetch-blob@^3.1.2", "fetch-blob@^3.1.4": - "integrity" "sha512-N64ZpKqoLejlrwkIAnb9iLSA3Vx/kjgzpcDhygcqJ2KKjky8nCgUQ+dzXtbrLaWZGZNmNfQTsiQ0weZ1svglHg==" - "resolved" "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.1.5.tgz" - "version" "3.1.5" - dependencies: - "node-domexception" "^1.0.0" - "web-streams-polyfill" "^3.0.3" - -"figures@^3.0.0": - "integrity" "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==" - "resolved" "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz" - "version" "3.2.0" - dependencies: - "escape-string-regexp" "^1.0.5" - -"fill-range@^2.1.0": - "integrity" "sha512-cnrcCbj01+j2gTG921VZPnHbjmdAf8oQV/iGeV2kZxGSyfYjjTyY79ErsK1WJWMpw6DaApEX72binqJE+/d+5Q==" - "resolved" "https://registry.npmjs.org/fill-range/-/fill-range-2.2.4.tgz" - "version" "2.2.4" - dependencies: - "is-number" "^2.1.0" - "isobject" "^2.0.0" - "randomatic" "^3.0.0" - "repeat-element" "^1.1.2" - "repeat-string" "^1.5.2" - -"fill-range@^7.0.1": - "integrity" "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==" - "resolved" "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz" - "version" "7.0.1" - dependencies: - "to-regex-range" "^5.0.1" - -"find-up@^3.0.0": - "integrity" "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==" - "resolved" "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz" - "version" "3.0.0" - dependencies: - "locate-path" "^3.0.0" - -"find-up@^4.1.0": - "integrity" "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==" - "resolved" "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz" - "version" "4.1.0" - dependencies: - "locate-path" "^5.0.0" - "path-exists" "^4.0.0" - -"follow-redirects@^1.15.0": - "integrity" "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==" - "resolved" "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz" - "version" "1.15.2" - -"for-in@^1.0.2": - "integrity" "sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= sha512-7EwmXrOjyL+ChxMhmG5lnW9MPt1aIeZEwKhQzoBUdTV0N3zuwWDZYVJatDvZ2OyzPUvdIAZDsCetk3coyMfcnQ==" - "resolved" "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz" - "version" "1.0.2" - -"form-data@^4.0.0": - "integrity" "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==" - "resolved" "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz" - "version" "4.0.0" - dependencies: - "asynckit" "^0.4.0" - "combined-stream" "^1.0.8" - "mime-types" "^2.1.12" - -"format@^0.2.0": - "integrity" "sha1-1hcBB+nv3E7TDJ3DkBbflCtctYs= sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==" - "resolved" "https://registry.npmjs.org/format/-/format-0.2.2.tgz" - "version" "0.2.2" - -"formdata-polyfill@^4.0.10": - "integrity" "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==" - "resolved" "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz" - "version" "4.0.10" - dependencies: - "fetch-blob" "^3.1.2" - -"fraction.js@^4.2.0": - "integrity" "sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA==" - "resolved" "https://registry.npmjs.org/fraction.js/-/fraction.js-4.2.0.tgz" - "version" "4.2.0" - -"fs-exists-sync@^0.1.0": - "integrity" "sha1-mC1ok6+RjnLQjeyehnP/K1qNat0= sha512-cR/vflFyPZtrN6b38ZyWxpWdhlXrzZEBawlpBQMq7033xVY7/kg0GDMBK5jg8lDYQckdJ5x/YC88lM3C7VMsLg==" - "resolved" "https://registry.npmjs.org/fs-exists-sync/-/fs-exists-sync-0.1.0.tgz" - "version" "0.1.0" - -"fs-extra@^8.1.0": - "integrity" "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==" - "resolved" "https://registry.npmjs.org/fs-extra/-/fs-extra-8.1.0.tgz" - "version" "8.1.0" - dependencies: - "graceful-fs" "^4.2.0" - "jsonfile" "^4.0.0" - "universalify" "^0.1.0" - -"fs.realpath@^1.0.0": - "integrity" "sha1-FQStJSMVjKpA20onh8sBQRmU6k8= sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" - "resolved" "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz" - "version" "1.0.0" - -"fsevents@~2.3.2": - "integrity" "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==" - "resolved" "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz" - "version" "2.3.2" - -"function-bind@^1.1.1": - "integrity" "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" - "resolved" "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz" - "version" "1.1.1" - -"futoin-hkdf@^1.4.2": - "integrity" "sha512-4CerDhtTgx4i5PKccQIpEp4T9wqmosPIP9Kep35SdCpYkQeriD3zddUVhrO1Fc4QvGhsAnd2rXyoOr5047mJEg==" - "resolved" "https://registry.npmjs.org/futoin-hkdf/-/futoin-hkdf-1.5.0.tgz" - "version" "1.5.0" - -"get-caller-file@^2.0.1": - "integrity" "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==" - "resolved" "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz" - "version" "2.0.5" - -"get-stream@^3.0.0": - "integrity" "sha1-jpQ9E1jcN1VQVOy+LtsFqhdO3hQ= sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ==" - "resolved" "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz" - "version" "3.0.0" - -"get-stream@^4.1.0": - "integrity" "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==" - "resolved" "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz" - "version" "4.1.0" - dependencies: - "pump" "^3.0.0" - -"get-stream@^5.1.0": - "integrity" "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==" - "resolved" "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz" - "version" "5.2.0" - dependencies: - "pump" "^3.0.0" - -"git-config-path@^1.0.1": - "integrity" "sha1-bTP37WPbDQ4RgTFQO6s6ykfVRmQ= sha512-KcJ2dlrrP5DbBnYIZ2nlikALfRhKzNSX0stvv3ImJ+fvC4hXKoV+U+74SV0upg+jlQZbrtQzc0bu6/Zh+7aQbg==" - "resolved" "https://registry.npmjs.org/git-config-path/-/git-config-path-1.0.1.tgz" - "version" "1.0.1" - dependencies: - "extend-shallow" "^2.0.1" - "fs-exists-sync" "^0.1.0" - "homedir-polyfill" "^1.0.0" - -"git-username@^1.0.0": - "integrity" "sha512-xm45KwBR6Eu1jO4umx/o2M84v9TC7tdOBuzLx8ayhdR9H1FBiiG9azz31uC0esDvaWVBTDINpJ5USomk+ja8OQ==" - "resolved" "https://registry.npmjs.org/git-username/-/git-username-1.0.0.tgz" - "version" "1.0.0" - dependencies: - "parse-github-url" "^1.0.2" - "remote-origin-url" "^1.0.0" - -"github-slugger@^1.1.1": - "integrity" "sha512-w0dzqw/nt51xMVmlaV1+JRzN+oCa1KfcgGEWhxUG16wbdA+Xnt/yoFO8Z8x/V82ZcZ0wy6ln9QDup5avbhiDhQ==" - "resolved" "https://registry.npmjs.org/github-slugger/-/github-slugger-1.4.0.tgz" - "version" "1.4.0" - -"glob-parent@^5.1.2", "glob-parent@~5.1.2": - "integrity" "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==" - "resolved" "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz" - "version" "5.1.2" - dependencies: - "is-glob" "^4.0.1" - -"glob-parent@^6.0.2": - "integrity" "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==" - "resolved" "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz" - "version" "6.0.2" - dependencies: - "is-glob" "^4.0.3" - -"glob@^7.1.3", "glob@^7.1.6": - "integrity" "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==" - "resolved" "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz" - "version" "7.2.0" - dependencies: - "fs.realpath" "^1.0.0" - "inflight" "^1.0.4" - "inherits" "2" - "minimatch" "^3.0.4" - "once" "^1.3.0" - "path-is-absolute" "^1.0.0" - -"global-dirs@^0.1.0": - "integrity" "sha1-sxnA3UYH81PzvpzKTHL8FIxJ9EU= sha512-NknMLn7F2J7aflwFOlGdNIuCDpN3VGoSoB+aap3KABFWbHVn1TCgFC+np23J8W2BiZbjfEw3BFBycSMv1AFblg==" - "resolved" "https://registry.npmjs.org/global-dirs/-/global-dirs-0.1.1.tgz" - "version" "0.1.1" - dependencies: - "ini" "^1.3.4" - -"global-dirs@^2.0.1": - "integrity" "sha512-MG6kdOUh/xBnyo9cJFeIKkLEc1AyFq42QTU4XiX51i2NEdxLxLWXIjEjmqKeSuKR7pAZjTqUVoT2b2huxVLgYQ==" - "resolved" "https://registry.npmjs.org/global-dirs/-/global-dirs-2.1.0.tgz" - "version" "2.1.0" - dependencies: - "ini" "1.3.7" - -"got@^11.8.0": - "integrity" "sha512-7gtQ5KiPh1RtGS9/Jbv1ofDpBFuq42gyfEib+ejaRBJuj/3tQFeR5+gw57e4ipaU8c/rCjvX6fkQz2lyDlGAOg==" - "resolved" "https://registry.npmjs.org/got/-/got-11.8.3.tgz" - "version" "11.8.3" - dependencies: - "@sindresorhus/is" "^4.0.0" - "@szmarczak/http-timer" "^4.0.5" - "@types/cacheable-request" "^6.0.1" - "@types/responselike" "^1.0.0" - "cacheable-lookup" "^5.0.3" - "cacheable-request" "^7.0.2" - "decompress-response" "^6.0.0" - "http2-wrapper" "^1.0.0-beta.5.2" - "lowercase-keys" "^2.0.0" - "p-cancelable" "^2.0.0" - "responselike" "^2.0.0" - -"got@^6.7.1": - "integrity" "sha1-JAzQV4WpoY5WHcG0S0HHY+8ejbA= sha512-Y/K3EDuiQN9rTZhBvPRWMLXIKdeD1Rj0nzunfoi0Yyn5WBEbzxXKU9Ub2X41oZBagVWOBU3MuDonFMgPWQFnwg==" - "resolved" "https://registry.npmjs.org/got/-/got-6.7.1.tgz" - "version" "6.7.1" - dependencies: - "create-error-class" "^3.0.0" - "duplexer3" "^0.1.4" - "get-stream" "^3.0.0" - "is-redirect" "^1.0.0" - "is-retry-allowed" "^1.0.0" - "is-stream" "^1.0.0" - "lowercase-keys" "^1.0.0" - "safe-buffer" "^5.0.1" - "timed-out" "^4.0.0" - "unzip-response" "^2.0.1" - "url-parse-lax" "^1.0.0" - -"got@^9.6.0": - "integrity" "sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q==" - "resolved" "https://registry.npmjs.org/got/-/got-9.6.0.tgz" - "version" "9.6.0" - dependencies: - "@sindresorhus/is" "^0.14.0" - "@szmarczak/http-timer" "^1.1.2" - "cacheable-request" "^6.0.0" - "decompress-response" "^3.3.0" - "duplexer3" "^0.1.4" - "get-stream" "^4.1.0" - "lowercase-keys" "^1.0.1" - "mimic-response" "^1.0.1" - "p-cancelable" "^1.0.0" - "to-readable-stream" "^1.0.0" - "url-parse-lax" "^3.0.0" - -"graceful-fs@^4.1.11", "graceful-fs@^4.1.2", "graceful-fs@^4.1.6", "graceful-fs@^4.2.0": - "integrity" "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==" - "resolved" "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz" - "version" "4.2.10" - -"gray-matter@^2.1.0": - "integrity" "sha1-MELZrewqHe1qdwep7SOA+KF6Qw4= sha512-vbmvP1Fe/fxuT2QuLVcqb2BfK7upGhhbLIt9/owWEvPYrZZEkelLcq2HqzxosV+PQ67dUFLaAeNpH7C4hhICAA==" - "resolved" "https://registry.npmjs.org/gray-matter/-/gray-matter-2.1.1.tgz" - "version" "2.1.1" - dependencies: - "ansi-red" "^0.1.1" - "coffee-script" "^1.12.4" - "extend-shallow" "^2.0.1" - "js-yaml" "^3.8.1" - "toml" "^2.3.2" - -"gray-matter@^4.0.3": - "integrity" "sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==" - "resolved" "https://registry.npmjs.org/gray-matter/-/gray-matter-4.0.3.tgz" - "version" "4.0.3" - dependencies: - "js-yaml" "^3.13.1" - "kind-of" "^6.0.2" - "section-matter" "^1.0.0" - "strip-bom-string" "^1.0.0" - -"gulp-header@^1.7.1": - "integrity" "sha512-lh9HLdb53sC7XIZOYzTXM4lFuXElv3EVkSDhsd7DoJBj7hm+Ni7D3qYbb+Rr8DuM8nRanBvkVO9d7askreXGnQ==" - "resolved" "https://registry.npmjs.org/gulp-header/-/gulp-header-1.8.12.tgz" - "version" "1.8.12" - dependencies: - "concat-with-sourcemaps" "*" - "lodash.template" "^4.4.0" - "through2" "^2.0.0" - -"has-ansi@^2.0.0": - "integrity" "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= sha512-C8vBJ8DwUCx19vhm7urhTuUsr4/IyP6l4VzNQDv+ryHQObW3TTTp9yB68WpYgRe2bbaGuZ/se74IqFeVnMnLZg==" - "resolved" "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz" - "version" "2.0.0" - dependencies: - "ansi-regex" "^2.0.0" - -"has-flag@^3.0.0": - "integrity" "sha1-tdRU3CGZriJWmfNGfloH87lVuv0= sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" - "resolved" "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz" - "version" "3.0.0" - -"has-flag@^4.0.0": - "integrity" "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" - "resolved" "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz" - "version" "4.0.0" - -"has-own-prop@^2.0.0": - "integrity" "sha512-Pq0h+hvsVm6dDEa8x82GnLSYHOzNDt7f0ddFa3FqcQlgzEiptPqL+XrOJNavjOzSYiYWIrgeVYYgGlLmnxwilQ==" - "resolved" "https://registry.npmjs.org/has-own-prop/-/has-own-prop-2.0.0.tgz" - "version" "2.0.0" - -"has-yarn@^2.1.0": - "integrity" "sha512-UqBRqi4ju7T+TqGNdqAO0PaSVGsDGJUBQvk9eUWNGRY1CFGDzYhLWoM7JQEemnlvVcv/YEmc2wNW8BC24EnUsw==" - "resolved" "https://registry.npmjs.org/has-yarn/-/has-yarn-2.1.0.tgz" - "version" "2.1.0" - -"has@^1.0.3": - "integrity" "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==" - "resolved" "https://registry.npmjs.org/has/-/has-1.0.3.tgz" - "version" "1.0.3" - dependencies: - "function-bind" "^1.1.1" - -"hast-util-has-property@^2.0.0": - "integrity" "sha512-4Qf++8o5v14us4Muv3HRj+Er6wTNGA/N9uCaZMty4JWvyFKLdhULrv4KE1b65AthsSO9TXSZnjuxS8ecIyhb0w==" - "resolved" "https://registry.npmjs.org/hast-util-has-property/-/hast-util-has-property-2.0.0.tgz" - "version" "2.0.0" - -"hast-util-heading-rank@^2.0.0": - "integrity" "sha512-w+Rw20Q/iWp2Bcnr6uTrYU6/ftZLbHKhvc8nM26VIWpDqDMlku2iXUVTeOlsdoih/UKQhY7PHQ+vZ0Aqq8bxtQ==" - "resolved" "https://registry.npmjs.org/hast-util-heading-rank/-/hast-util-heading-rank-2.1.0.tgz" - "version" "2.1.0" - dependencies: - "@types/hast" "^2.0.0" - -"hast-util-parse-selector@^2.0.0": - "integrity" "sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ==" - "resolved" "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-2.2.5.tgz" - "version" "2.2.5" - -"hast-util-to-estree@^2.0.0": - "integrity" "sha512-UQrZVeBj6A9od0lpFvqHKNSH9zvDrNoyWKbveu1a2oSCXEDUI+3bnd6BoiQLPnLrcXXn/jzJ6y9hmJTTlvf8lQ==" - "resolved" "https://registry.npmjs.org/hast-util-to-estree/-/hast-util-to-estree-2.0.2.tgz" - "version" "2.0.2" - dependencies: - "@types/estree-jsx" "^0.0.1" - "@types/hast" "^2.0.0" - "@types/unist" "^2.0.0" - "comma-separated-tokens" "^2.0.0" - "estree-util-attach-comments" "^2.0.0" - "estree-util-is-identifier-name" "^2.0.0" - "hast-util-whitespace" "^2.0.0" - "mdast-util-mdx-expression" "^1.0.0" - "mdast-util-mdxjs-esm" "^1.0.0" - "property-information" "^6.0.0" - "space-separated-tokens" "^2.0.0" - "style-to-object" "^0.3.0" - "unist-util-position" "^4.0.0" - "zwitch" "^2.0.0" - -"hast-util-to-string@^1.0.4": - "integrity" "sha512-eK0MxRX47AV2eZ+Lyr18DCpQgodvaS3fAQO2+b9Two9F5HEoRPhiUMNzoXArMJfZi2yieFzUBMRl3HNJ3Jus3w==" - "resolved" "https://registry.npmjs.org/hast-util-to-string/-/hast-util-to-string-1.0.4.tgz" - "version" "1.0.4" - -"hast-util-to-string@^2.0.0": - "integrity" "sha512-02AQ3vLhuH3FisaMM+i/9sm4OXGSq1UhOOCpTLLQtHdL3tZt7qil69r8M8iDkZYyC0HCFylcYoP+8IO7ddta1A==" - "resolved" "https://registry.npmjs.org/hast-util-to-string/-/hast-util-to-string-2.0.0.tgz" - "version" "2.0.0" - dependencies: - "@types/hast" "^2.0.0" - -"hast-util-whitespace@^2.0.0": - "integrity" "sha512-Pkw+xBHuV6xFeJprJe2BBEoDV+AvQySaz3pPDRUs5PNZEMQjpXJJueqrpcHIXxnWTcAGi/UOCgVShlkY6kLoqg==" - "resolved" "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-2.0.0.tgz" - "version" "2.0.0" - -"hastscript@^6.0.0": - "integrity" "sha512-nDM6bvd7lIqDUiYEiu5Sl/+6ReP0BMk/2f4U/Rooccxkj0P5nm+acM5PrGJ/t5I8qPGiqZSE6hVAwZEdZIvP4w==" - "resolved" "https://registry.npmjs.org/hastscript/-/hastscript-6.0.0.tgz" - "version" "6.0.0" - dependencies: - "@types/hast" "^2.0.0" - "comma-separated-tokens" "^1.0.0" - "hast-util-parse-selector" "^2.0.0" - "property-information" "^5.0.0" - "space-separated-tokens" "^1.0.0" - -"highlight.js@^10.4.1", "highlight.js@~10.7.0": - "integrity" "sha512-tzcUFauisWKNHaRkN4Wjl/ZA07gENAjFl3J/c480dprkGTg5EQstgaNFqBfUqCq54kZRIEcreTsAgF/m2quD7A==" - "resolved" "https://registry.npmjs.org/highlight.js/-/highlight.js-10.7.3.tgz" - "version" "10.7.3" - -"homedir-polyfill@^1.0.0": - "integrity" "sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA==" - "resolved" "https://registry.npmjs.org/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz" - "version" "1.0.3" - dependencies: - "parse-passwd" "^1.0.0" - -"hosted-git-info@^2.7.1": - "integrity" "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==" - "resolved" "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz" - "version" "2.8.9" - -"http-cache-semantics@^4.0.0": - "integrity" "sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ==" - "resolved" "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz" - "version" "4.1.0" - -"http-errors@^1.8.1": - "integrity" "sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g==" - "resolved" "https://registry.npmjs.org/http-errors/-/http-errors-1.8.1.tgz" - "version" "1.8.1" - dependencies: - "depd" "~1.1.2" - "inherits" "2.0.4" - "setprototypeof" "1.2.0" - "statuses" ">= 1.5.0 < 2" - "toidentifier" "1.0.1" - -"http2-wrapper@^1.0.0-beta.5.2": - "integrity" "sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg==" - "resolved" "https://registry.npmjs.org/http2-wrapper/-/http2-wrapper-1.0.3.tgz" - "version" "1.0.3" - dependencies: - "quick-lru" "^5.1.1" - "resolve-alpn" "^1.0.0" - -"hyphenate-style-name@^1.0.2": - "integrity" "sha512-ygGZLjmXfPHj+ZWh6LwbC37l43MhfztxetbFCoYTM2VjkIUpeHgSNn7QIyVFj7YQ1Wl9Cbw5sholVJPzWvC2MQ==" - "resolved" "https://registry.npmjs.org/hyphenate-style-name/-/hyphenate-style-name-1.0.4.tgz" - "version" "1.0.4" - -"iconv-lite@^0.4.24": - "integrity" "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==" - "resolved" "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz" - "version" "0.4.24" - dependencies: - "safer-buffer" ">= 2.1.2 < 3" - -"immutable@^4.0.0": - "integrity" "sha512-zIE9hX70qew5qTUjSS7wi1iwj/l7+m54KWU247nhM3v806UdGj1yDndXj+IOYxxtW9zyLI+xqFNZjTuDaLUqFw==" - "resolved" "https://registry.npmjs.org/immutable/-/immutable-4.0.0.tgz" - "version" "4.0.0" - -"import-fresh@^3.2.1": - "integrity" "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==" - "resolved" "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz" - "version" "3.3.0" - dependencies: - "parent-module" "^1.0.0" - "resolve-from" "^4.0.0" - -"import-lazy@^2.1.0": - "integrity" "sha1-BWmOPUXIjo1+nZLLBYTnfwlvPkM= sha512-m7ZEHgtw69qOGw+jwxXkHlrlIPdTGkyh66zXZ1ajZbxkDBNjSY/LGbmjc7h0s2ELsUDTAhFr55TrPSSqJGPG0A==" - "resolved" "https://registry.npmjs.org/import-lazy/-/import-lazy-2.1.0.tgz" - "version" "2.1.0" - -"imurmurhash@^0.1.4": - "integrity" "sha1-khi5srkoojixPcT7a21XbyMUU+o= sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==" - "resolved" "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz" - "version" "0.1.4" - -"indent-string@^4.0.0": - "integrity" "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==" - "resolved" "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz" - "version" "4.0.0" - -"inflight@^1.0.4": - "integrity" "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==" - "resolved" "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz" - "version" "1.0.6" - dependencies: - "once" "^1.3.0" - "wrappy" "1" - -"inherits@^2.0.3", "inherits@~2.0.3", "inherits@2", "inherits@2.0.4": - "integrity" "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - "resolved" "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz" - "version" "2.0.4" - -"ini@^1.3.4", "ini@~1.3.0": - "integrity" "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" - "resolved" "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz" - "version" "1.3.8" - -"ini@1.3.7": - "integrity" "sha512-iKpRpXP+CrP2jyrxvg1kMUpXDyRUFDWurxbnVT1vQPx+Wz9uCYsMIqYuSBLV+PAaZG/d7kRLKRFc9oDMsH+mFQ==" - "resolved" "https://registry.npmjs.org/ini/-/ini-1.3.7.tgz" - "version" "1.3.7" - -"inline-style-parser@0.1.1": - "integrity" "sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q==" - "resolved" "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.1.1.tgz" - "version" "0.1.1" - -"inline-style-prefixer@^6.0.0": - "integrity" "sha512-AsqazZ8KcRzJ9YPN1wMH2aNM7lkWQ8tSPrW5uDk1ziYwiAPWSZnUsC7lfZq+BDqLqz0B4Pho5wscWcJzVvRzDQ==" - "resolved" "https://registry.npmjs.org/inline-style-prefixer/-/inline-style-prefixer-6.0.1.tgz" - "version" "6.0.1" - dependencies: - "css-in-js-utils" "^2.0.0" - -"inquirer@^7.0.4": - "integrity" "sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA==" - "resolved" "https://registry.npmjs.org/inquirer/-/inquirer-7.3.3.tgz" - "version" "7.3.3" - dependencies: - "ansi-escapes" "^4.2.1" - "chalk" "^4.1.0" - "cli-cursor" "^3.1.0" - "cli-width" "^3.0.0" - "external-editor" "^3.0.3" - "figures" "^3.0.0" - "lodash" "^4.17.19" - "mute-stream" "0.0.8" - "run-async" "^2.4.0" - "rxjs" "^6.6.0" - "string-width" "^4.1.0" - "strip-ansi" "^6.0.0" - "through" "^2.3.6" - -"is-alphabetical@^1.0.0": - "integrity" "sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg==" - "resolved" "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-1.0.4.tgz" - "version" "1.0.4" - -"is-alphabetical@^2.0.0": - "integrity" "sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==" - "resolved" "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz" - "version" "2.0.1" - -"is-alphanumerical@^1.0.0": - "integrity" "sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A==" - "resolved" "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-1.0.4.tgz" - "version" "1.0.4" - dependencies: - "is-alphabetical" "^1.0.0" - "is-decimal" "^1.0.0" - -"is-alphanumerical@^2.0.0": - "integrity" "sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==" - "resolved" "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-2.0.1.tgz" - "version" "2.0.1" - dependencies: - "is-alphabetical" "^2.0.0" - "is-decimal" "^2.0.0" - -"is-arrayish@^0.2.1": - "integrity" "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==" - "resolved" "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz" - "version" "0.2.1" - -"is-binary-path@~2.1.0": - "integrity" "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==" - "resolved" "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz" - "version" "2.1.0" - dependencies: - "binary-extensions" "^2.0.0" - -"is-buffer@^1.1.5": - "integrity" "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" - "resolved" "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz" - "version" "1.1.6" - -"is-buffer@^2.0.0": - "integrity" "sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==" - "resolved" "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz" - "version" "2.0.5" - -"is-ci@^1.0.10": - "integrity" "sha512-s6tfsaQaQi3JNciBH6shVqEDvhGut0SUXr31ag8Pd8BBbVVlcGfWhpPmEOoM6RJ5TFhbypvf5yyRw/VXW1IiWg==" - "resolved" "https://registry.npmjs.org/is-ci/-/is-ci-1.2.1.tgz" - "version" "1.2.1" - dependencies: - "ci-info" "^1.5.0" - -"is-ci@^2.0.0": - "integrity" "sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==" - "resolved" "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz" - "version" "2.0.0" - dependencies: - "ci-info" "^2.0.0" - -"is-core-module@^2.8.1": - "integrity" "sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA==" - "resolved" "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.1.tgz" - "version" "2.8.1" - dependencies: - "has" "^1.0.3" - -"is-decimal@^1.0.0": - "integrity" "sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw==" - "resolved" "https://registry.npmjs.org/is-decimal/-/is-decimal-1.0.4.tgz" - "version" "1.0.4" - -"is-decimal@^2.0.0": - "integrity" "sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==" - "resolved" "https://registry.npmjs.org/is-decimal/-/is-decimal-2.0.1.tgz" - "version" "2.0.1" - -"is-directory@^0.3.1": - "integrity" "sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE= sha512-yVChGzahRFvbkscn2MlwGismPO12i9+znNruC5gVEntG3qu0xQMzsGg/JFbrsqDOHtHFPci+V5aP5T9I+yeKqw==" - "resolved" "https://registry.npmjs.org/is-directory/-/is-directory-0.3.1.tgz" - "version" "0.3.1" - -"is-extendable@^0.1.0": - "integrity" "sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==" - "resolved" "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz" - "version" "0.1.1" - -"is-extendable@^1.0.1": - "integrity" "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==" - "resolved" "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz" - "version" "1.0.1" - dependencies: - "is-plain-object" "^2.0.4" - -"is-extglob@^2.1.1": - "integrity" "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==" - "resolved" "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz" - "version" "2.1.1" - -"is-fullwidth-code-point@^2.0.0": - "integrity" "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==" - "resolved" "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz" - "version" "2.0.0" - -"is-fullwidth-code-point@^3.0.0": - "integrity" "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" - "resolved" "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz" - "version" "3.0.0" - -"is-glob@^4.0.1", "is-glob@^4.0.3", "is-glob@~4.0.1": - "integrity" "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==" - "resolved" "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz" - "version" "4.0.3" - dependencies: - "is-extglob" "^2.1.1" - -"is-hexadecimal@^1.0.0": - "integrity" "sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw==" - "resolved" "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-1.0.4.tgz" - "version" "1.0.4" - -"is-hexadecimal@^2.0.0": - "integrity" "sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==" - "resolved" "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz" - "version" "2.0.1" - -"is-installed-globally@^0.1.0": - "integrity" "sha1-Df2Y9akRFxbdU13aZJL2e/PSWoA= sha512-ERNhMg+i/XgDwPIPF3u24qpajVreaiSuvpb1Uu0jugw7KKcxGyCX8cgp8P5fwTmAuXku6beDHHECdKArjlg7tw==" - "resolved" "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.1.0.tgz" - "version" "0.1.0" - dependencies: - "global-dirs" "^0.1.0" - "is-path-inside" "^1.0.0" - -"is-installed-globally@^0.3.1": - "integrity" "sha512-wZ8x1js7Ia0kecP/CHM/3ABkAmujX7WPvQk6uu3Fly/Mk44pySulQpnHG46OMjHGXApINnV4QhY3SWnECO2z5g==" - "resolved" "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.3.2.tgz" - "version" "0.3.2" - dependencies: - "global-dirs" "^2.0.1" - "is-path-inside" "^3.0.1" - -"is-npm@^1.0.0": - "integrity" "sha1-8vtjpl5JBbQGyGBydloaTceTufQ= sha512-9r39FIr3d+KD9SbX0sfMsHzb5PP3uimOiwr3YupUaUFG4W0l1U57Rx3utpttV7qz5U3jmrO5auUa04LU9pyHsg==" - "resolved" "https://registry.npmjs.org/is-npm/-/is-npm-1.0.0.tgz" - "version" "1.0.0" - -"is-npm@^4.0.0": - "integrity" "sha512-96ECIfh9xtDDlPylNPXhzjsykHsMJZ18ASpaWzQyBr4YRTcVjUvzaHayDAES2oU/3KpljhHUjtSRNiDwi0F0ig==" - "resolved" "https://registry.npmjs.org/is-npm/-/is-npm-4.0.0.tgz" - "version" "4.0.0" - -"is-number@^2.1.0": - "integrity" "sha1-Afy7s5NGOlSPL0ZszhbezknbkI8= sha512-QUzH43Gfb9+5yckcrSA0VBDwEtDUchrk4F6tfJZQuNzDJbEDB9cZNzSfXGQ1jqmdDY/kl41lUOWM9syA8z8jlg==" - "resolved" "https://registry.npmjs.org/is-number/-/is-number-2.1.0.tgz" - "version" "2.1.0" - dependencies: - "kind-of" "^3.0.2" - -"is-number@^4.0.0": - "integrity" "sha512-rSklcAIlf1OmFdyAqbnWTLVelsQ58uvZ66S/ZyawjWqIviTWCjg2PzVGw8WUA+nNuPTqb4wgA+NszrJ+08LlgQ==" - "resolved" "https://registry.npmjs.org/is-number/-/is-number-4.0.0.tgz" - "version" "4.0.0" - -"is-number@^7.0.0": - "integrity" "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" - "resolved" "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz" - "version" "7.0.0" - -"is-obj@^1.0.0": - "integrity" "sha1-PkcprB9f3gJc19g6iW2rn09n2w8= sha512-l4RyHgRqGN4Y3+9JHVrNqO+tN0rV5My76uW5/nuO4K1b6vw5G8d/cmFjP9tRfEsdhZNt0IFdZuK/c2Vr4Nb+Qg==" - "resolved" "https://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz" - "version" "1.0.1" - -"is-obj@^2.0.0": - "integrity" "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==" - "resolved" "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz" - "version" "2.0.0" - -"is-path-inside@^1.0.0": - "integrity" "sha1-jvW33lBDej/cprToZe96pVy0gDY= sha512-qhsCR/Esx4U4hg/9I19OVUAJkGWtjRYHMRgUMZE2TDdj+Ag+kttZanLupfddNyglzz50cUlmWzUaI37GDfNx/g==" - "resolved" "https://registry.npmjs.org/is-path-inside/-/is-path-inside-1.0.1.tgz" - "version" "1.0.1" - dependencies: - "path-is-inside" "^1.0.1" - -"is-path-inside@^3.0.1": - "integrity" "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==" - "resolved" "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz" - "version" "3.0.3" - -"is-plain-obj@^4.0.0": - "integrity" "sha512-NXRbBtUdBioI73y/HmOhogw/U5msYPC9DAtGkJXeFcFWSFZw0mCUsPxk/snTuJHzNKA8kLBK4rH97RMB1BfCXw==" - "resolved" "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.0.0.tgz" - "version" "4.0.0" - -"is-plain-object@^2.0.4": - "integrity" "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==" - "resolved" "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz" - "version" "2.0.4" - dependencies: - "isobject" "^3.0.1" - -"is-redirect@^1.0.0": - "integrity" "sha1-HQPd7VO9jbDzDCbk+V02/HyH3CQ= sha512-cr/SlUEe5zOGmzvj9bUyC4LVvkNVAXu4GytXLNMr1pny+a65MpQ9IJzFHD5vi7FyJgb4qt27+eS3TuQnqB+RQw==" - "resolved" "https://registry.npmjs.org/is-redirect/-/is-redirect-1.0.0.tgz" - "version" "1.0.0" - -"is-reference@^3.0.0": - "integrity" "sha512-Eo1W3wUoHWoCoVM4GVl/a+K0IgiqE5aIo4kJABFyMum1ZORlPkC+UC357sSQUL5w5QCE5kCC9upl75b7+7CY/Q==" - "resolved" "https://registry.npmjs.org/is-reference/-/is-reference-3.0.0.tgz" - "version" "3.0.0" - dependencies: - "@types/estree" "*" - -"is-retry-allowed@^1.0.0": - "integrity" "sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg==" - "resolved" "https://registry.npmjs.org/is-retry-allowed/-/is-retry-allowed-1.2.0.tgz" - "version" "1.2.0" - -"is-stream@^1.0.0", "is-stream@^1.1.0": - "integrity" "sha1-EtSj3U5o4Lec6428hBc66A2RykQ= sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==" - "resolved" "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz" - "version" "1.1.0" - -"is-typedarray@^1.0.0": - "integrity" "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==" - "resolved" "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz" - "version" "1.0.0" - -"is-yarn-global@^0.3.0": - "integrity" "sha512-VjSeb/lHmkoyd8ryPVIKvOCn4D1koMqY+vqyjjUfc3xyKtP4dYOxM44sZrnqQSzSds3xyOrUTLTC9LVCVgLngw==" - "resolved" "https://registry.npmjs.org/is-yarn-global/-/is-yarn-global-0.3.0.tgz" - "version" "0.3.0" - -"isarray@~1.0.0", "isarray@1.0.0": - "integrity" "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" - "resolved" "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" - "version" "1.0.0" - -"isexe@^2.0.0": - "integrity" "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" - "resolved" "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz" - "version" "2.0.0" - -"isobject@^2.0.0": - "integrity" "sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= sha512-+OUdGJlgjOBZDfxnDjYYG6zp487z0JGNQq3cYQYg5f5hKR+syHMsaztzGeml/4kGG55CSpKSpWTY+jYGgsHLgA==" - "resolved" "https://registry.npmjs.org/isobject/-/isobject-2.1.0.tgz" - "version" "2.1.0" - dependencies: - "isarray" "1.0.0" - -"isobject@^3.0.1": - "integrity" "sha1-TkMekrEalzFjaqH5yNHMvP2reN8= sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==" - "resolved" "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz" - "version" "3.0.1" - -"joi@^17.5.0": - "integrity" "sha512-OX5dG6DTbcr/kbMFj0KGYxuew69HPcAE3K/sZpEV2nP6e/j/C0HV+HNiBPCASxdx5T7DMoa0s8UeHWMnb6n2zw==" - "resolved" "https://registry.npmjs.org/joi/-/joi-17.6.0.tgz" - "version" "17.6.0" - dependencies: - "@hapi/hoek" "^9.0.0" - "@hapi/topo" "^5.0.0" - "@sideway/address" "^4.1.3" - "@sideway/formula" "^3.0.0" - "@sideway/pinpoint" "^2.0.0" - -"jose@^2.0.5": - "integrity" "sha512-BAiDNeDKTMgk4tvD0BbxJ8xHEHBZgpeRZ1zGPPsitSyMgjoMWiLGYAE7H7NpP5h0lPppQajQs871E8NHUrzVPA==" - "resolved" "https://registry.npmjs.org/jose/-/jose-2.0.5.tgz" - "version" "2.0.5" - dependencies: - "@panva/asn1.js" "^1.0.0" - -"js-cookie@^2.2.1": - "integrity" "sha512-HvdH2LzI/EAZcUwA8+0nKNtWHqS+ZmijLA30RwZA0bo7ToCckjK5MkGhjED9KoRcXO6BaGI3I9UIzSA1FKFPOQ==" - "resolved" "https://registry.npmjs.org/js-cookie/-/js-cookie-2.2.1.tgz" - "version" "2.2.1" - -"js-tokens@^3.0.0 || ^4.0.0", "js-tokens@^3.0.2": - "integrity" "sha1-mGbfOVECEw449/mWvOtlRDIJwls= sha512-RjTcuD4xjtthQkaWH7dFlH85L+QaVtSoOyGdZ3g6HFhS9dFNDfLyqgm2NFe2X6cQpeFmt0452FJjFG5UameExg==" - "resolved" "https://registry.npmjs.org/js-tokens/-/js-tokens-3.0.2.tgz" - "version" "3.0.2" - -"js-tokens@^4.0.0": - "integrity" "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" - "resolved" "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz" - "version" "4.0.0" - -"js-yaml@^3.13.1", "js-yaml@^3.8.1": - "integrity" "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==" - "resolved" "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz" - "version" "3.14.1" - dependencies: - "argparse" "^1.0.7" - "esprima" "^4.0.0" - -"js-yaml@^4.0.0": - "integrity" "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==" - "resolved" "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz" - "version" "4.1.0" - dependencies: - "argparse" "^2.0.1" - -"json-buffer@3.0.0": - "integrity" "sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg= sha512-CuUqjv0FUZIdXkHPI8MezCnFCdaTAacej1TZYulLoAg1h/PhwkdXFN4V/gzY4g+fMBCOV2xF+rp7t2XD2ns/NQ==" - "resolved" "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.0.tgz" - "version" "3.0.0" - -"json-buffer@3.0.1": - "integrity" "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==" - "resolved" "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz" - "version" "3.0.1" - -"json-parse-even-better-errors@^2.3.0": - "integrity" "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==" - "resolved" "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz" - "version" "2.3.1" - -"jsonfile@^4.0.0": - "integrity" "sha1-h3Gq4HmbZAdrdmQPygWPnBDjPss= sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==" - "resolved" "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz" - "version" "4.0.0" - optionalDependencies: - "graceful-fs" "^4.1.6" - -"keyv@^3.0.0": - "integrity" "sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA==" - "resolved" "https://registry.npmjs.org/keyv/-/keyv-3.1.0.tgz" - "version" "3.1.0" - dependencies: - "json-buffer" "3.0.0" - -"keyv@^4.0.0": - "integrity" "sha512-tGv1yP6snQVDSM4X6yxrv2zzq/EvpW+oYiUz6aueW1u9CtS8RzUQYxxmFwgZlO2jSgCxQbchhxaqXXp2hnKGpQ==" - "resolved" "https://registry.npmjs.org/keyv/-/keyv-4.1.1.tgz" - "version" "4.1.1" - dependencies: - "json-buffer" "3.0.1" - -"kind-of@^3.0.2": - "integrity" "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==" - "resolved" "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz" - "version" "3.2.2" - dependencies: - "is-buffer" "^1.1.5" - -"kind-of@^6.0.0", "kind-of@^6.0.2": - "integrity" "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==" - "resolved" "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz" - "version" "6.0.3" - -"kleur@^3.0.3": - "integrity" "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==" - "resolved" "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz" - "version" "3.0.3" - -"kleur@^4.0.3": - "integrity" "sha512-8QADVssbrFjivHWQU7KkMgptGTl6WAcSdlbBPY4uNF+mWr6DGcKrvY2w4FQJoXch7+fKMjj0dRrL75vk3k23OA==" - "resolved" "https://registry.npmjs.org/kleur/-/kleur-4.1.4.tgz" - "version" "4.1.4" - -"latest-version@^3.0.0": - "integrity" "sha1-ogU4P+oyKzO1rjsYq+4NwvNW7hU= sha512-Be1YRHWWlZaSsrz2U+VInk+tO0EwLIyV+23RhWLINJYwg/UIikxjlj3MhH37/6/EDCAusjajvMkMMUXRaMWl/w==" - "resolved" "https://registry.npmjs.org/latest-version/-/latest-version-3.1.0.tgz" - "version" "3.1.0" - dependencies: - "package-json" "^4.0.0" - -"latest-version@^5.0.0": - "integrity" "sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA==" - "resolved" "https://registry.npmjs.org/latest-version/-/latest-version-5.1.0.tgz" - "version" "5.1.0" - dependencies: - "package-json" "^6.3.0" - -"lazy-cache@^2.0.2": - "integrity" "sha1-uRkKT5EzVGlIQIWfio9whNiCImQ= sha512-7vp2Acd2+Kz4XkzxGxaB1FWOi8KjWIWsgdfD5MCb86DWvlLqhRPM+d6Pro3iNEL5VT9mstz5hKAlcd+QR6H3aA==" - "resolved" "https://registry.npmjs.org/lazy-cache/-/lazy-cache-2.0.2.tgz" - "version" "2.0.2" - dependencies: - "set-getter" "^0.1.0" - -"libnpx@^10.2.4": - "integrity" "sha512-BPc0D1cOjBeS8VIBKUu5F80s6njm0wbVt7CsGMrIcJ+SI7pi7V0uVPGpEMH9H5L8csOcclTxAXFE2VAsJXUhfA==" - "resolved" "https://registry.npmjs.org/libnpx/-/libnpx-10.2.4.tgz" - "version" "10.2.4" - dependencies: - "dotenv" "^5.0.1" - "npm-package-arg" "^6.0.0" - "rimraf" "^2.6.2" - "safe-buffer" "^5.1.0" - "update-notifier" "^2.3.0" - "which" "^1.3.0" - "y18n" "^4.0.0" - "yargs" "^14.2.3" - -"lilconfig@^2.0.4": - "integrity" "sha512-bfTIN7lEsiooCocSISTWXkiWJkRqtL9wYtYy+8EK3Y41qh3mpwPU0ycTOgjdY9ErwXCc8QyrQp82bdL0Xkm9yA==" - "resolved" "https://registry.npmjs.org/lilconfig/-/lilconfig-2.0.4.tgz" - "version" "2.0.4" - -"lines-and-columns@^1.1.6": - "integrity" "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==" - "resolved" "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz" - "version" "1.2.4" - -"list-item@^1.1.1": - "integrity" "sha1-DGXQDih8tmPMs8s4Sad+iewmilY= sha512-S3D0WZ4J6hyM8o5SNKWaMYB1ALSacPZ2nHGEuCjmHZ+dc03gFeNZoNDcqfcnO4vDhTZmNrqrpYZCdXsRh22bzw==" - "resolved" "https://registry.npmjs.org/list-item/-/list-item-1.1.1.tgz" - "version" "1.1.1" - dependencies: - "expand-range" "^1.8.1" - "extend-shallow" "^2.0.1" - "is-number" "^2.1.0" - "repeat-string" "^1.5.2" - -"listify@^1.0.0": - "integrity" "sha512-083swF7iH7bx8666zdzBColpgEuy46HjN3r1isD4zV6Ix7FuHfb/2/WVnl4CH8hjuoWeFF7P5KkKNXUnJCFEJg==" - "resolved" "https://registry.npmjs.org/listify/-/listify-1.0.3.tgz" - "version" "1.0.3" - -"locate-path@^3.0.0": - "integrity" "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==" - "resolved" "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz" - "version" "3.0.0" - dependencies: - "p-locate" "^3.0.0" - "path-exists" "^3.0.0" - -"locate-path@^5.0.0": - "integrity" "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==" - "resolved" "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz" - "version" "5.0.0" - dependencies: - "p-locate" "^4.1.0" - -"lodash._reinterpolate@^3.0.0": - "integrity" "sha1-DM8tiRZq8Ds2Y8eWU4t1rG4RTZ0= sha512-xYHt68QRoYGjeeM/XOE1uJtvXQAgvszfBhjV4yvsQH0u2i9I6cI6c6/eG4Hh3UAOVn0y/xAXwmTzEay49Q//HA==" - "resolved" "https://registry.npmjs.org/lodash._reinterpolate/-/lodash._reinterpolate-3.0.0.tgz" - "version" "3.0.0" - -"lodash.castarray@^4.4.0": - "integrity" "sha1-wCUTUV4wna3dTCTGDP3c9ZdtkRU= sha512-aVx8ztPv7/2ULbArGJ2Y42bG1mEQ5mGjpdvrbJcJFU3TbYybe+QlLS4pst9zV52ymy2in1KpFPiZnAOATxD4+Q==" - "resolved" "https://registry.npmjs.org/lodash.castarray/-/lodash.castarray-4.4.0.tgz" - "version" "4.4.0" - -"lodash.isplainobject@^4.0.6": - "integrity" "sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs= sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==" - "resolved" "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz" - "version" "4.0.6" - -"lodash.merge@^4.6.2": - "integrity" "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==" - "resolved" "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz" - "version" "4.6.2" - -"lodash.template@^4.4.0": - "integrity" "sha512-84vYFxIkmidUiFxidA/KjjH9pAycqW+h980j7Fuz5qxRtO9pgB7MDFTdys1N7A5mcucRiDyEq4fusljItR1T/A==" - "resolved" "https://registry.npmjs.org/lodash.template/-/lodash.template-4.5.0.tgz" - "version" "4.5.0" - dependencies: - "lodash._reinterpolate" "^3.0.0" - "lodash.templatesettings" "^4.0.0" - -"lodash.templatesettings@^4.0.0": - "integrity" "sha512-stgLz+i3Aa9mZgnjr/O+v9ruKZsPsndy7qPZOchbqk2cnTU1ZaldKK+v7m54WoKIyxiuMZTKT2H81F8BeAc3ZQ==" - "resolved" "https://registry.npmjs.org/lodash.templatesettings/-/lodash.templatesettings-4.2.0.tgz" - "version" "4.2.0" - dependencies: - "lodash._reinterpolate" "^3.0.0" - -"lodash@^4.17.15", "lodash@^4.17.19": - "integrity" "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" - "resolved" "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz" - "version" "4.17.21" - -"longest-streak@^3.0.0": - "integrity" "sha512-cHlYSUpL2s7Fb3394mYxwTYj8niTaNHUCLr0qdiCXQfSjfuA7CKofpX2uSwEfFDQ0EB7JcnMnm+GjbqqoinYYg==" - "resolved" "https://registry.npmjs.org/longest-streak/-/longest-streak-3.0.1.tgz" - "version" "3.0.1" - -"longest@^2.0.1": - "integrity" "sha1-eB4YMpaqlPbU2RbcM10NF676I/g= sha512-Ajzxb8CM6WAnFjgiloPsI3bF+WCxcvhdIG3KNA2KN962+tdBsHcuQ4k4qX/EcS/2CRkcc0iAkR956Nib6aXU/Q==" - "resolved" "https://registry.npmjs.org/longest/-/longest-2.0.1.tgz" - "version" "2.0.1" - -"loose-envify@^1.1.0", "loose-envify@^1.4.0": - "integrity" "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==" - "resolved" "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz" - "version" "1.4.0" - dependencies: - "js-tokens" "^3.0.0 || ^4.0.0" - -"lowercase-keys@^1.0.0": - "integrity" "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==" - "resolved" "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz" - "version" "1.0.1" - -"lowercase-keys@^1.0.1": - "integrity" "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==" - "resolved" "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz" - "version" "1.0.1" - -"lowercase-keys@^2.0.0": - "integrity" "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==" - "resolved" "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz" - "version" "2.0.0" - -"lowlight@^1.17.0": - "integrity" "sha512-8Ktj+prEb1RoCPkEOrPMYUN/nCggB7qAWe3a7OpMjWQkh3l2RD5wKRQ+o8Q8YuI9RG/xs95waaI/E6ym/7NsTw==" - "resolved" "https://registry.npmjs.org/lowlight/-/lowlight-1.20.0.tgz" - "version" "1.20.0" - dependencies: - "fault" "^1.0.0" - "highlight.js" "~10.7.0" - -"lru-cache@^4.0.1", "lru-cache@^4.1.5": - "integrity" "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==" - "resolved" "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz" - "version" "4.1.5" - dependencies: - "pseudomap" "^1.0.2" - "yallist" "^2.1.2" - -"lru-cache@^6.0.0": - "integrity" "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==" - "resolved" "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz" - "version" "6.0.0" - dependencies: - "yallist" "^4.0.0" - -"make-dir@^1.0.0": - "integrity" "sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ==" - "resolved" "https://registry.npmjs.org/make-dir/-/make-dir-1.3.0.tgz" - "version" "1.3.0" - dependencies: - "pify" "^3.0.0" - -"make-dir@^3.0.0": - "integrity" "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==" - "resolved" "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz" - "version" "3.1.0" - dependencies: - "semver" "^6.0.0" - -"make-error@^1.1.1", "make-error@^1.3.6": - "integrity" "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==" - "resolved" "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz" - "version" "1.3.6" - -"markdown-extensions@^1.0.0": - "integrity" "sha512-WWC0ZuMzCyDHYCasEGs4IPvLyTGftYwh6wIEOULOF0HXcqZlhwRzrK0w2VUlxWA98xnvb/jszw4ZSkJ6ADpM6Q==" - "resolved" "https://registry.npmjs.org/markdown-extensions/-/markdown-extensions-1.1.1.tgz" - "version" "1.1.1" - -"markdown-link@^0.1.1": - "integrity" "sha1-MsXGUZmmRXMWMi0eQinRNAfIx88= sha512-TurLymbyLyo+kAUUAV9ggR9EPcDjP/ctlv9QAFiqUH7c+t6FlsbivPo9OKTU8xdOx9oNd2drW/Fi5RRElQbUqA==" - "resolved" "https://registry.npmjs.org/markdown-link/-/markdown-link-0.1.1.tgz" - "version" "0.1.1" - -"markdown-table@^3.0.0": - "integrity" "sha512-y8j3a5/DkJCmS5x4dMCQL+OR0+2EAq3DOtio1COSHsmW2BGXnNCK3v12hJt1LrUz5iZH5g0LmuYOjDdI+czghA==" - "resolved" "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.2.tgz" - "version" "3.0.2" - -"markdown-toc@^1.2.0": - "integrity" "sha512-eOsq7EGd3asV0oBfmyqngeEIhrbkc7XVP63OwcJBIhH2EpG2PzFcbZdhy1jutXSlRBBVMNXHvMtSr5LAxSUvUg==" - "resolved" "https://registry.npmjs.org/markdown-toc/-/markdown-toc-1.2.0.tgz" - "version" "1.2.0" - dependencies: - "concat-stream" "^1.5.2" - "diacritics-map" "^0.1.0" - "gray-matter" "^2.1.0" - "lazy-cache" "^2.0.2" - "list-item" "^1.1.1" - "markdown-link" "^0.1.1" - "minimist" "^1.2.0" - "mixin-deep" "^1.1.3" - "object.pick" "^1.2.0" - "remarkable" "^1.7.1" - "repeat-string" "^1.6.1" - "strip-color" "^0.1.0" - -"math-random@^1.0.1": - "integrity" "sha512-rUxjysqif/BZQH2yhd5Aaq7vXMSx9NdEsQcyA07uEzIvxgI7zIr33gGsh+RU0/XjmQpCW7RsVof1vlkvQVCK5A==" - "resolved" "https://registry.npmjs.org/math-random/-/math-random-1.0.4.tgz" - "version" "1.0.4" - -"mdast-util-definitions@^5.0.0": - "integrity" "sha512-5hcR7FL2EuZ4q6lLMUK5w4lHT2H3vqL9quPvYZ/Ku5iifrirfMHiGdhxdXMUbUkDmz5I+TYMd7nbaxUhbQkfpQ==" - "resolved" "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-5.1.0.tgz" - "version" "5.1.0" - dependencies: - "@types/mdast" "^3.0.0" - "@types/unist" "^2.0.0" - "unist-util-visit" "^3.0.0" - -"mdast-util-find-and-replace@^2.0.0": - "integrity" "sha512-1w1jbqAd13oU78QPBf5223+xB+37ecNtQ1JElq2feWols5oEYAl+SgNDnOZipe7NfLemoEt362yUS15/wip4mw==" - "resolved" "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-2.1.0.tgz" - "version" "2.1.0" - dependencies: - "escape-string-regexp" "^5.0.0" - "unist-util-is" "^5.0.0" - "unist-util-visit-parents" "^4.0.0" - -"mdast-util-from-markdown@^1.0.0": - "integrity" "sha512-iZJyyvKD1+K7QX1b5jXdE7Sc5dtoTry1vzV28UZZe8Z1xVnB/czKntJ7ZAkG0tANqRnBF6p3p7GpU1y19DTf2Q==" - "resolved" "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-1.2.0.tgz" - "version" "1.2.0" - dependencies: - "@types/mdast" "^3.0.0" - "@types/unist" "^2.0.0" - "decode-named-character-reference" "^1.0.0" - "mdast-util-to-string" "^3.1.0" - "micromark" "^3.0.0" - "micromark-util-decode-numeric-character-reference" "^1.0.0" - "micromark-util-decode-string" "^1.0.0" - "micromark-util-normalize-identifier" "^1.0.0" - "micromark-util-symbol" "^1.0.0" - "micromark-util-types" "^1.0.0" - "unist-util-stringify-position" "^3.0.0" - "uvu" "^0.5.0" - -"mdast-util-gfm-autolink-literal@^1.0.0": - "integrity" "sha512-FzopkOd4xTTBeGXhXSBU0OCDDh5lUj2rd+HQqG92Ld+jL4lpUfgX2AT2OHAVP9aEeDKp7G92fuooSZcYJA3cRg==" - "resolved" "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-1.0.2.tgz" - "version" "1.0.2" - dependencies: - "@types/mdast" "^3.0.0" - "ccount" "^2.0.0" - "mdast-util-find-and-replace" "^2.0.0" - "micromark-util-character" "^1.0.0" - -"mdast-util-gfm-footnote@^1.0.0": - "integrity" "sha512-p+PrYlkw9DeCRkTVw1duWqPRHX6Ywh2BNKJQcZbCwAuP/59B0Lk9kakuAd7KbQprVO4GzdW8eS5++A9PUSqIyw==" - "resolved" "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-1.0.1.tgz" - "version" "1.0.1" - dependencies: - "@types/mdast" "^3.0.0" - "mdast-util-to-markdown" "^1.3.0" - "micromark-util-normalize-identifier" "^1.0.0" - -"mdast-util-gfm-strikethrough@^1.0.0": - "integrity" "sha512-zKJbEPe+JP6EUv0mZ0tQUyLQOC+FADt0bARldONot/nefuISkaZFlmVK4tU6JgfyZGrky02m/I6PmehgAgZgqg==" - "resolved" "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-1.0.1.tgz" - "version" "1.0.1" - dependencies: - "@types/mdast" "^3.0.0" - "mdast-util-to-markdown" "^1.3.0" - -"mdast-util-gfm-table@^1.0.0": - "integrity" "sha512-aEuoPwZyP4iIMkf2cLWXxx3EQ6Bmh2yKy9MVCg4i6Sd3cX80dcLEfXO/V4ul3pGH9czBK4kp+FAl+ZHmSUt9/w==" - "resolved" "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-1.0.4.tgz" - "version" "1.0.4" - dependencies: - "markdown-table" "^3.0.0" - "mdast-util-from-markdown" "^1.0.0" - "mdast-util-to-markdown" "^1.3.0" - -"mdast-util-gfm-task-list-item@^1.0.0": - "integrity" "sha512-KZ4KLmPdABXOsfnM6JHUIjxEvcx2ulk656Z/4Balw071/5qgnhz+H1uGtf2zIGnrnvDC8xR4Fj9uKbjAFGNIeA==" - "resolved" "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-1.0.1.tgz" - "version" "1.0.1" - dependencies: - "@types/mdast" "^3.0.0" - "mdast-util-to-markdown" "^1.3.0" - -"mdast-util-gfm@^2.0.0": - "integrity" "sha512-42yHBbfWIFisaAfV1eixlabbsa6q7vHeSPY+cg+BBjX51M8xhgMacqH9g6TftB/9+YkcI0ooV4ncfrJslzm/RQ==" - "resolved" "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-2.0.1.tgz" - "version" "2.0.1" - dependencies: - "mdast-util-from-markdown" "^1.0.0" - "mdast-util-gfm-autolink-literal" "^1.0.0" - "mdast-util-gfm-footnote" "^1.0.0" - "mdast-util-gfm-strikethrough" "^1.0.0" - "mdast-util-gfm-table" "^1.0.0" - "mdast-util-gfm-task-list-item" "^1.0.0" - "mdast-util-to-markdown" "^1.0.0" - -"mdast-util-mdx-expression@^1.0.0": - "integrity" "sha512-wb36oi09XxqO9RVqgfD+xo8a7xaNgS+01+k3v0GKW0X0bYbeBmUZz22Z/IJ8SuphVlG+DNgNo9VoEaUJ3PKfJQ==" - "resolved" "https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-1.2.0.tgz" - "version" "1.2.0" - dependencies: - "@types/estree-jsx" "^0.0.1" - "@types/hast" "^2.0.0" - "@types/mdast" "^3.0.0" - "mdast-util-from-markdown" "^1.0.0" - "mdast-util-to-markdown" "^1.0.0" - -"mdast-util-mdx-jsx@^2.0.0": - "integrity" "sha512-oPC7/smPBf7vxnvIYH5y3fPo2lw1rdrswFfSb4i0GTAXRUQv7JUU/t/hbp07dgGdUFTSDOHm5DNamhNg/s2Hrg==" - "resolved" "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-2.0.1.tgz" - "version" "2.0.1" - dependencies: - "@types/estree-jsx" "^0.0.1" - "@types/hast" "^2.0.0" - "@types/mdast" "^3.0.0" - "ccount" "^2.0.0" - "mdast-util-to-markdown" "^1.3.0" - "parse-entities" "^4.0.0" - "stringify-entities" "^4.0.0" - "unist-util-remove-position" "^4.0.0" - "unist-util-stringify-position" "^3.0.0" - "vfile-message" "^3.0.0" - -"mdast-util-mdx@^2.0.0": - "integrity" "sha512-M09lW0CcBT1VrJUaF/PYxemxxHa7SLDHdSn94Q9FhxjCQfuW7nMAWKWimTmA3OyDMSTH981NN1csW1X+HPSluw==" - "resolved" "https://registry.npmjs.org/mdast-util-mdx/-/mdast-util-mdx-2.0.0.tgz" - "version" "2.0.0" - dependencies: - "mdast-util-mdx-expression" "^1.0.0" - "mdast-util-mdx-jsx" "^2.0.0" - "mdast-util-mdxjs-esm" "^1.0.0" - -"mdast-util-mdxjs-esm@^1.0.0": - "integrity" "sha512-IPpX9GBzAIbIRCjbyeLDpMhACFb0wxTIujuR3YElB8LWbducUdMgRJuqs/Vg8xQ1bIAMm7lw8L+YNtua0xKXRw==" - "resolved" "https://registry.npmjs.org/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-1.2.0.tgz" - "version" "1.2.0" - dependencies: - "@types/estree-jsx" "^0.0.1" - "@types/hast" "^2.0.0" - "@types/mdast" "^3.0.0" - "mdast-util-from-markdown" "^1.0.0" - "mdast-util-to-markdown" "^1.0.0" - -"mdast-util-to-hast@^12.1.0": - "integrity" "sha512-qE09zD6ylVP14jV4mjLIhDBOrpFdShHZcEsYvvKGABlr9mGbV7mTlRWdoFxL/EYSTNDiC9GZXy7y8Shgb9Dtzw==" - "resolved" "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-12.1.1.tgz" - "version" "12.1.1" - dependencies: - "@types/hast" "^2.0.0" - "@types/mdast" "^3.0.0" - "@types/mdurl" "^1.0.0" - "mdast-util-definitions" "^5.0.0" - "mdurl" "^1.0.0" - "micromark-util-sanitize-uri" "^1.0.0" - "unist-builder" "^3.0.0" - "unist-util-generated" "^2.0.0" - "unist-util-position" "^4.0.0" - "unist-util-visit" "^4.0.0" - -"mdast-util-to-markdown@^1.0.0", "mdast-util-to-markdown@^1.3.0": - "integrity" "sha512-6tUSs4r+KK4JGTTiQ7FfHmVOaDrLQJPmpjD6wPMlHGUVXoG9Vjc3jIeP+uyBWRf8clwB2blM+W7+KrlMYQnftA==" - "resolved" "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.3.0.tgz" - "version" "1.3.0" - dependencies: - "@types/mdast" "^3.0.0" - "@types/unist" "^2.0.0" - "longest-streak" "^3.0.0" - "mdast-util-to-string" "^3.0.0" - "micromark-util-decode-string" "^1.0.0" - "unist-util-visit" "^4.0.0" - "zwitch" "^2.0.0" - -"mdast-util-to-string@^3.0.0", "mdast-util-to-string@^3.1.0": - "integrity" "sha512-n4Vypz/DZgwo0iMHLQL49dJzlp7YtAJP+N07MZHpjPf/5XJuHUWstviF4Mn2jEiR/GNmtnRRqnwsXExk3igfFA==" - "resolved" "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.1.0.tgz" - "version" "3.1.0" - -"mdn-data@2.0.14": - "integrity" "sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==" - "resolved" "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.14.tgz" - "version" "2.0.14" - -"mdurl@^1.0.0": - "integrity" "sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4= sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==" - "resolved" "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz" - "version" "1.0.1" - -"merge2@^1.3.0": - "integrity" "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==" - "resolved" "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz" - "version" "1.4.1" - -"micromark-core-commonmark@^1.0.0", "micromark-core-commonmark@^1.0.1": - "integrity" "sha512-K+PkJTxqjFfSNkfAhp4GB+cZPfQd6dxtTXnf+RjZOV7T4EEXnvgzOcnp+eSTmpGk9d1S9sL6/lqrgSNn/s0HZA==" - "resolved" "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-1.0.6.tgz" - "version" "1.0.6" - dependencies: - "decode-named-character-reference" "^1.0.0" - "micromark-factory-destination" "^1.0.0" - "micromark-factory-label" "^1.0.0" - "micromark-factory-space" "^1.0.0" - "micromark-factory-title" "^1.0.0" - "micromark-factory-whitespace" "^1.0.0" - "micromark-util-character" "^1.0.0" - "micromark-util-chunked" "^1.0.0" - "micromark-util-classify-character" "^1.0.0" - "micromark-util-html-tag-name" "^1.0.0" - "micromark-util-normalize-identifier" "^1.0.0" - "micromark-util-resolve-all" "^1.0.0" - "micromark-util-subtokenize" "^1.0.0" - "micromark-util-symbol" "^1.0.0" - "micromark-util-types" "^1.0.1" - "uvu" "^0.5.0" - -"micromark-extension-gfm-autolink-literal@^1.0.0": - "integrity" "sha512-i3dmvU0htawfWED8aHMMAzAVp/F0Z+0bPh3YrbTPPL1v4YAlCZpy5rBO5p0LPYiZo0zFVkoYh7vDU7yQSiCMjg==" - "resolved" "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-1.0.3.tgz" - "version" "1.0.3" - dependencies: - "micromark-util-character" "^1.0.0" - "micromark-util-sanitize-uri" "^1.0.0" - "micromark-util-symbol" "^1.0.0" - "micromark-util-types" "^1.0.0" - "uvu" "^0.5.0" - -"micromark-extension-gfm-footnote@^1.0.0": - "integrity" "sha512-E/fmPmDqLiMUP8mLJ8NbJWJ4bTw6tS+FEQS8CcuDtZpILuOb2kjLqPEeAePF1djXROHXChM/wPJw0iS4kHCcIg==" - "resolved" "https://registry.npmjs.org/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-1.0.4.tgz" - "version" "1.0.4" - dependencies: - "micromark-core-commonmark" "^1.0.0" - "micromark-factory-space" "^1.0.0" - "micromark-util-character" "^1.0.0" - "micromark-util-normalize-identifier" "^1.0.0" - "micromark-util-sanitize-uri" "^1.0.0" - "micromark-util-symbol" "^1.0.0" - "micromark-util-types" "^1.0.0" - "uvu" "^0.5.0" - -"micromark-extension-gfm-strikethrough@^1.0.0": - "integrity" "sha512-/vjHU/lalmjZCT5xt7CcHVJGq8sYRm80z24qAKXzaHzem/xsDYb2yLL+NNVbYvmpLx3O7SYPuGL5pzusL9CLIQ==" - "resolved" "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-1.0.4.tgz" - "version" "1.0.4" - dependencies: - "micromark-util-chunked" "^1.0.0" - "micromark-util-classify-character" "^1.0.0" - "micromark-util-resolve-all" "^1.0.0" - "micromark-util-symbol" "^1.0.0" - "micromark-util-types" "^1.0.0" - "uvu" "^0.5.0" - -"micromark-extension-gfm-table@^1.0.0": - "integrity" "sha512-xAZ8J1X9W9K3JTJTUL7G6wSKhp2ZYHrFk5qJgY/4B33scJzE2kpfRL6oiw/veJTbt7jiM/1rngLlOKPWr1G+vg==" - "resolved" "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-1.0.5.tgz" - "version" "1.0.5" - dependencies: - "micromark-factory-space" "^1.0.0" - "micromark-util-character" "^1.0.0" - "micromark-util-symbol" "^1.0.0" - "micromark-util-types" "^1.0.0" - "uvu" "^0.5.0" - -"micromark-extension-gfm-tagfilter@^1.0.0": - "integrity" "sha512-Ty6psLAcAjboRa/UKUbbUcwjVAv5plxmpUTy2XC/3nJFL37eHej8jrHrRzkqcpipJliuBH30DTs7+3wqNcQUVA==" - "resolved" "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-1.0.1.tgz" - "version" "1.0.1" - dependencies: - "micromark-util-types" "^1.0.0" - -"micromark-extension-gfm-task-list-item@^1.0.0": - "integrity" "sha512-PpysK2S1Q/5VXi72IIapbi/jliaiOFzv7THH4amwXeYXLq3l1uo8/2Be0Ac1rEwK20MQEsGH2ltAZLNY2KI/0Q==" - "resolved" "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-1.0.3.tgz" - "version" "1.0.3" - dependencies: - "micromark-factory-space" "^1.0.0" - "micromark-util-character" "^1.0.0" - "micromark-util-symbol" "^1.0.0" - "micromark-util-types" "^1.0.0" - "uvu" "^0.5.0" - -"micromark-extension-gfm@^2.0.0": - "integrity" "sha512-p2sGjajLa0iYiGQdT0oelahRYtMWvLjy8J9LOCxzIQsllMCGLbsLW+Nc+N4vi02jcRJvedVJ68cjelKIO6bpDA==" - "resolved" "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-2.0.1.tgz" - "version" "2.0.1" - dependencies: - "micromark-extension-gfm-autolink-literal" "^1.0.0" - "micromark-extension-gfm-footnote" "^1.0.0" - "micromark-extension-gfm-strikethrough" "^1.0.0" - "micromark-extension-gfm-table" "^1.0.0" - "micromark-extension-gfm-tagfilter" "^1.0.0" - "micromark-extension-gfm-task-list-item" "^1.0.0" - "micromark-util-combine-extensions" "^1.0.0" - "micromark-util-types" "^1.0.0" - -"micromark-extension-mdx-expression@^1.0.0": - "integrity" "sha512-TjYtjEMszWze51NJCZmhv7MEBcgYRgb3tJeMAJ+HQCAaZHHRBaDCccqQzGizR/H4ODefP44wRTgOn2vE5I6nZA==" - "resolved" "https://registry.npmjs.org/micromark-extension-mdx-expression/-/micromark-extension-mdx-expression-1.0.3.tgz" - "version" "1.0.3" - dependencies: - "micromark-factory-mdx-expression" "^1.0.0" - "micromark-factory-space" "^1.0.0" - "micromark-util-character" "^1.0.0" - "micromark-util-events-to-acorn" "^1.0.0" - "micromark-util-symbol" "^1.0.0" - "micromark-util-types" "^1.0.0" - "uvu" "^0.5.0" - -"micromark-extension-mdx-jsx@^1.0.0": - "integrity" "sha512-VfA369RdqUISF0qGgv2FfV7gGjHDfn9+Qfiv5hEwpyr1xscRj/CiVRkU7rywGFCO7JwJ5L0e7CJz60lY52+qOA==" - "resolved" "https://registry.npmjs.org/micromark-extension-mdx-jsx/-/micromark-extension-mdx-jsx-1.0.3.tgz" - "version" "1.0.3" - dependencies: - "@types/acorn" "^4.0.0" - "estree-util-is-identifier-name" "^2.0.0" - "micromark-factory-mdx-expression" "^1.0.0" - "micromark-factory-space" "^1.0.0" - "micromark-util-character" "^1.0.0" - "micromark-util-symbol" "^1.0.0" - "micromark-util-types" "^1.0.0" - "uvu" "^0.5.0" - "vfile-message" "^3.0.0" - -"micromark-extension-mdx-md@^1.0.0": - "integrity" "sha512-xaRAMoSkKdqZXDAoSgp20Azm0aRQKGOl0RrS81yGu8Hr/JhMsBmfs4wR7m9kgVUIO36cMUQjNyiyDKPrsv8gOw==" - "resolved" "https://registry.npmjs.org/micromark-extension-mdx-md/-/micromark-extension-mdx-md-1.0.0.tgz" - "version" "1.0.0" - dependencies: - "micromark-util-types" "^1.0.0" - -"micromark-extension-mdxjs-esm@^1.0.0": - "integrity" "sha512-bIaxblNIM+CCaJvp3L/V+168l79iuNmxEiTU6i3vB0YuDW+rumV64BFMxvhfRDxaJxQE1zD5vTPdyLBbW4efGA==" - "resolved" "https://registry.npmjs.org/micromark-extension-mdxjs-esm/-/micromark-extension-mdxjs-esm-1.0.2.tgz" - "version" "1.0.2" - dependencies: - "micromark-core-commonmark" "^1.0.0" - "micromark-util-character" "^1.0.0" - "micromark-util-events-to-acorn" "^1.0.0" - "micromark-util-symbol" "^1.0.0" - "micromark-util-types" "^1.0.0" - "unist-util-position-from-estree" "^1.1.0" - "uvu" "^0.5.0" - "vfile-message" "^3.0.0" - -"micromark-extension-mdxjs@^1.0.0": - "integrity" "sha512-TZZRZgeHvtgm+IhtgC2+uDMR7h8eTKF0QUX9YsgoL9+bADBpBY6SiLvWqnBlLbCEevITmTqmEuY3FoxMKVs1rQ==" - "resolved" "https://registry.npmjs.org/micromark-extension-mdxjs/-/micromark-extension-mdxjs-1.0.0.tgz" - "version" "1.0.0" - dependencies: - "acorn" "^8.0.0" - "acorn-jsx" "^5.0.0" - "micromark-extension-mdx-expression" "^1.0.0" - "micromark-extension-mdx-jsx" "^1.0.0" - "micromark-extension-mdx-md" "^1.0.0" - "micromark-extension-mdxjs-esm" "^1.0.0" - "micromark-util-combine-extensions" "^1.0.0" - "micromark-util-types" "^1.0.0" - -"micromark-factory-destination@^1.0.0": - "integrity" "sha512-eUBA7Rs1/xtTVun9TmV3gjfPz2wEwgK5R5xcbIM5ZYAtvGF6JkyaDsj0agx8urXnO31tEO6Ug83iVH3tdedLnw==" - "resolved" "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-1.0.0.tgz" - "version" "1.0.0" - dependencies: - "micromark-util-character" "^1.0.0" - "micromark-util-symbol" "^1.0.0" - "micromark-util-types" "^1.0.0" - -"micromark-factory-label@^1.0.0": - "integrity" "sha512-CTIwxlOnU7dEshXDQ+dsr2n+yxpP0+fn271pu0bwDIS8uqfFcumXpj5mLn3hSC8iw2MUr6Gx8EcKng1dD7i6hg==" - "resolved" "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-1.0.2.tgz" - "version" "1.0.2" - dependencies: - "micromark-util-character" "^1.0.0" - "micromark-util-symbol" "^1.0.0" - "micromark-util-types" "^1.0.0" - "uvu" "^0.5.0" - -"micromark-factory-mdx-expression@^1.0.0": - "integrity" "sha512-WRQIc78FV7KrCfjsEf/sETopbYjElh3xAmNpLkd1ODPqxEngP42eVRGbiPEQWpRV27LzqW+XVTvQAMIIRLPnNA==" - "resolved" "https://registry.npmjs.org/micromark-factory-mdx-expression/-/micromark-factory-mdx-expression-1.0.6.tgz" - "version" "1.0.6" - dependencies: - "micromark-factory-space" "^1.0.0" - "micromark-util-character" "^1.0.0" - "micromark-util-events-to-acorn" "^1.0.0" - "micromark-util-symbol" "^1.0.0" - "micromark-util-types" "^1.0.0" - "unist-util-position-from-estree" "^1.0.0" - "uvu" "^0.5.0" - "vfile-message" "^3.0.0" - -"micromark-factory-space@^1.0.0": - "integrity" "sha512-qUmqs4kj9a5yBnk3JMLyjtWYN6Mzfcx8uJfi5XAveBniDevmZasdGBba5b4QsvRcAkmvGo5ACmSUmyGiKTLZew==" - "resolved" "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-1.0.0.tgz" - "version" "1.0.0" - dependencies: - "micromark-util-character" "^1.0.0" - "micromark-util-types" "^1.0.0" - -"micromark-factory-title@^1.0.0": - "integrity" "sha512-zily+Nr4yFqgMGRKLpTVsNl5L4PMu485fGFDOQJQBl2NFpjGte1e86zC0da93wf97jrc4+2G2GQudFMHn3IX+A==" - "resolved" "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-1.0.2.tgz" - "version" "1.0.2" - dependencies: - "micromark-factory-space" "^1.0.0" - "micromark-util-character" "^1.0.0" - "micromark-util-symbol" "^1.0.0" - "micromark-util-types" "^1.0.0" - "uvu" "^0.5.0" - -"micromark-factory-whitespace@^1.0.0": - "integrity" "sha512-Qx7uEyahU1lt1RnsECBiuEbfr9INjQTGa6Err+gF3g0Tx4YEviPbqqGKNv/NrBaE7dVHdn1bVZKM/n5I/Bak7A==" - "resolved" "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-1.0.0.tgz" - "version" "1.0.0" - dependencies: - "micromark-factory-space" "^1.0.0" - "micromark-util-character" "^1.0.0" - "micromark-util-symbol" "^1.0.0" - "micromark-util-types" "^1.0.0" - -"micromark-util-character@^1.0.0": - "integrity" "sha512-agJ5B3unGNJ9rJvADMJ5ZiYjBRyDpzKAOk01Kpi1TKhlT1APx3XZk6eN7RtSz1erbWHC2L8T3xLZ81wdtGRZzg==" - "resolved" "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-1.1.0.tgz" - "version" "1.1.0" - dependencies: - "micromark-util-symbol" "^1.0.0" - "micromark-util-types" "^1.0.0" - -"micromark-util-chunked@^1.0.0": - "integrity" "sha512-5e8xTis5tEZKgesfbQMKRCyzvffRRUX+lK/y+DvsMFdabAicPkkZV6gO+FEWi9RfuKKoxxPwNL+dFF0SMImc1g==" - "resolved" "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-1.0.0.tgz" - "version" "1.0.0" - dependencies: - "micromark-util-symbol" "^1.0.0" - -"micromark-util-classify-character@^1.0.0": - "integrity" "sha512-F8oW2KKrQRb3vS5ud5HIqBVkCqQi224Nm55o5wYLzY/9PwHGXC01tr3d7+TqHHz6zrKQ72Okwtvm/xQm6OVNZA==" - "resolved" "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-1.0.0.tgz" - "version" "1.0.0" - dependencies: - "micromark-util-character" "^1.0.0" - "micromark-util-symbol" "^1.0.0" - "micromark-util-types" "^1.0.0" - -"micromark-util-combine-extensions@^1.0.0": - "integrity" "sha512-J8H058vFBdo/6+AsjHp2NF7AJ02SZtWaVUjsayNFeAiydTxUwViQPxN0Hf8dp4FmCQi0UUFovFsEyRSUmFH3MA==" - "resolved" "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-1.0.0.tgz" - "version" "1.0.0" - dependencies: - "micromark-util-chunked" "^1.0.0" - "micromark-util-types" "^1.0.0" - -"micromark-util-decode-numeric-character-reference@^1.0.0": - "integrity" "sha512-OzO9AI5VUtrTD7KSdagf4MWgHMtET17Ua1fIpXTpuhclCqD8egFWo85GxSGvxgkGS74bEahvtM0WP0HjvV0e4w==" - "resolved" "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-1.0.0.tgz" - "version" "1.0.0" - dependencies: - "micromark-util-symbol" "^1.0.0" - -"micromark-util-decode-string@^1.0.0": - "integrity" "sha512-DLT5Ho02qr6QWVNYbRZ3RYOSSWWFuH3tJexd3dgN1odEuPNxCngTCXJum7+ViRAd9BbdxCvMToPOD/IvVhzG6Q==" - "resolved" "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-1.0.2.tgz" - "version" "1.0.2" - dependencies: - "decode-named-character-reference" "^1.0.0" - "micromark-util-character" "^1.0.0" - "micromark-util-decode-numeric-character-reference" "^1.0.0" - "micromark-util-symbol" "^1.0.0" - -"micromark-util-encode@^1.0.0": - "integrity" "sha512-U2s5YdnAYexjKDel31SVMPbfi+eF8y1U4pfiRW/Y8EFVCy/vgxk/2wWTxzcqE71LHtCuCzlBDRU2a5CQ5j+mQA==" - "resolved" "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-1.0.1.tgz" - "version" "1.0.1" - -"micromark-util-events-to-acorn@^1.0.0": - "integrity" "sha512-dpo8ecREK5s/KMph7jJ46RLM6g7N21CMc9LAJQbDLdbQnTpijigkSJPTIfLXZ+h5wdXlcsQ+b6ufAE9v76AdgA==" - "resolved" "https://registry.npmjs.org/micromark-util-events-to-acorn/-/micromark-util-events-to-acorn-1.0.4.tgz" - "version" "1.0.4" - dependencies: - "@types/acorn" "^4.0.0" - "@types/estree" "^0.0.50" - "estree-util-visit" "^1.0.0" - "micromark-util-types" "^1.0.0" - "uvu" "^0.5.0" - "vfile-message" "^3.0.0" - -"micromark-util-html-tag-name@^1.0.0": - "integrity" "sha512-NenEKIshW2ZI/ERv9HtFNsrn3llSPZtY337LID/24WeLqMzeZhBEE6BQ0vS2ZBjshm5n40chKtJ3qjAbVV8S0g==" - "resolved" "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-1.0.0.tgz" - "version" "1.0.0" - -"micromark-util-normalize-identifier@^1.0.0": - "integrity" "sha512-yg+zrL14bBTFrQ7n35CmByWUTFsgst5JhA4gJYoty4Dqzj4Z4Fr/DHekSS5aLfH9bdlfnSvKAWsAgJhIbogyBg==" - "resolved" "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-1.0.0.tgz" - "version" "1.0.0" - dependencies: - "micromark-util-symbol" "^1.0.0" - -"micromark-util-resolve-all@^1.0.0": - "integrity" "sha512-CB/AGk98u50k42kvgaMM94wzBqozSzDDaonKU7P7jwQIuH2RU0TeBqGYJz2WY1UdihhjweivStrJ2JdkdEmcfw==" - "resolved" "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-1.0.0.tgz" - "version" "1.0.0" - dependencies: - "micromark-util-types" "^1.0.0" - -"micromark-util-sanitize-uri@^1.0.0": - "integrity" "sha512-cCxvBKlmac4rxCGx6ejlIviRaMKZc0fWm5HdCHEeDWRSkn44l6NdYVRyU+0nT1XC72EQJMZV8IPHF+jTr56lAg==" - "resolved" "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-1.0.0.tgz" - "version" "1.0.0" - dependencies: - "micromark-util-character" "^1.0.0" - "micromark-util-encode" "^1.0.0" - "micromark-util-symbol" "^1.0.0" - -"micromark-util-subtokenize@^1.0.0": - "integrity" "sha512-d90uqCnXp/cy4G881Ub4psE57Sf8YD0pim9QdjCRNjfas2M1u6Lbt+XZK9gnHL2XFhnozZiEdCa9CNfXSfQ6xA==" - "resolved" "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-1.0.2.tgz" - "version" "1.0.2" - dependencies: - "micromark-util-chunked" "^1.0.0" - "micromark-util-symbol" "^1.0.0" - "micromark-util-types" "^1.0.0" - "uvu" "^0.5.0" - -"micromark-util-symbol@^1.0.0": - "integrity" "sha512-oKDEMK2u5qqAptasDAwWDXq0tG9AssVwAx3E9bBF3t/shRIGsWIRG+cGafs2p/SnDSOecnt6hZPCE2o6lHfFmQ==" - "resolved" "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-1.0.1.tgz" - "version" "1.0.1" - -"micromark-util-types@^1.0.0", "micromark-util-types@^1.0.1": - "integrity" "sha512-DCfg/T8fcrhrRKTPjRrw/5LLvdGV7BHySf/1LOZx7TzWZdYRjogNtyNq885z3nNallwr3QUKARjqvHqX1/7t+w==" - "resolved" "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-1.0.2.tgz" - "version" "1.0.2" - -"micromark@^3.0.0": - "integrity" "sha512-ryTDy6UUunOXy2HPjelppgJ2sNfcPz1pLlMdA6Rz9jPzhLikWXv/irpWV/I2jd68Uhmny7hHxAlAhk4+vWggpg==" - "resolved" "https://registry.npmjs.org/micromark/-/micromark-3.0.10.tgz" - "version" "3.0.10" - dependencies: - "@types/debug" "^4.0.0" - "debug" "^4.0.0" - "decode-named-character-reference" "^1.0.0" - "micromark-core-commonmark" "^1.0.1" - "micromark-factory-space" "^1.0.0" - "micromark-util-character" "^1.0.0" - "micromark-util-chunked" "^1.0.0" - "micromark-util-combine-extensions" "^1.0.0" - "micromark-util-decode-numeric-character-reference" "^1.0.0" - "micromark-util-encode" "^1.0.0" - "micromark-util-normalize-identifier" "^1.0.0" - "micromark-util-resolve-all" "^1.0.0" - "micromark-util-sanitize-uri" "^1.0.0" - "micromark-util-subtokenize" "^1.0.0" - "micromark-util-symbol" "^1.0.0" - "micromark-util-types" "^1.0.1" - "uvu" "^0.5.0" - -"micromatch@^4.0.4": - "integrity" "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==" - "resolved" "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz" - "version" "4.0.4" - dependencies: - "braces" "^3.0.1" - "picomatch" "^2.2.3" - -"middleearth-names@^1.1.0": - "integrity" "sha1-wdXuSN77NoEo+66/686IR80Y3f8= sha512-Oo1mbq9odpn6KHsDs8/UA5xFfX/gcrY+jWZpvd5MDaX0tjkxA7S7NTREQuqD7DWfluDgygjhKvETMWbwd3A9sA==" - "resolved" "https://registry.npmjs.org/middleearth-names/-/middleearth-names-1.1.0.tgz" - "version" "1.1.0" - dependencies: - "unique-random-array" "1.0.0" - -"mime-db@1.52.0": - "integrity" "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==" - "resolved" "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz" - "version" "1.52.0" - -"mime-types@^2.1.12": - "integrity" "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==" - "resolved" "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz" - "version" "2.1.35" - dependencies: - "mime-db" "1.52.0" - -"mimic-fn@^2.1.0": - "integrity" "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==" - "resolved" "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz" - "version" "2.1.0" - -"mimic-response@^1.0.0", "mimic-response@^1.0.1": - "integrity" "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==" - "resolved" "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz" - "version" "1.0.1" - -"mimic-response@^3.1.0": - "integrity" "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==" - "resolved" "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz" - "version" "3.1.0" - -"mini-svg-data-uri@^1.2.3": - "integrity" "sha512-r9deDe9p5FJUPZAk3A59wGH7Ii9YrjjWw0jmw/liSbHl2CHiyXj6FcDXDu2K3TjVAXqiJdaw3xxwlZZr9E6nHg==" - "resolved" "https://registry.npmjs.org/mini-svg-data-uri/-/mini-svg-data-uri-1.4.4.tgz" - "version" "1.4.4" - -"minimatch@^3.0.4": - "integrity" "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==" - "resolved" "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz" - "version" "3.1.2" - dependencies: - "brace-expansion" "^1.1.7" - -"minimist@^1.1.1", "minimist@^1.2.0", "minimist@^1.2.5": - "integrity" "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==" - "resolved" "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz" - "version" "1.2.6" - -"mixin-deep@^1.1.3": - "integrity" "sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==" - "resolved" "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz" - "version" "1.3.2" - dependencies: - "for-in" "^1.0.2" - "is-extendable" "^1.0.1" - -"mixpanel-browser@^2.45.0": - "integrity" "sha512-PQ1DaTk68yyYtLA0iejmzPA9iNDhT4uIZpqZjRTw7HWpYfl123fydHb2laKanaKjm8YDmrGGz3+xZ4Q6joogyg==" - "resolved" "https://registry.npmjs.org/mixpanel-browser/-/mixpanel-browser-2.45.0.tgz" - "version" "2.45.0" - -"mri@^1.1.0": - "integrity" "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==" - "resolved" "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz" - "version" "1.2.0" - -"mrm-core@^6.1.7": - "integrity" "sha512-jLGWrkupcgGIsLerrI/xmM/dFHbaoehRsuVbgYBrxYKXNMRBHN3Mgkd8cw+/ZCCoiZEXF8/SaZol0GCp6oBQ9g==" - "resolved" "https://registry.npmjs.org/mrm-core/-/mrm-core-6.1.7.tgz" - "version" "6.1.7" - dependencies: - "babel-code-frame" "^6.26.0" - "comment-json" "^2.2.0" - "detect-indent" "^6.0.0" - "editorconfig" "^0.15.3" - "find-up" "^4.1.0" - "fs-extra" "^8.1.0" - "kleur" "^3.0.3" - "listify" "^1.0.0" - "lodash" "^4.17.15" - "minimist" "^1.2.0" - "prop-ini" "^0.0.2" - "rc" "^1.2.8" - "readme-badger" "^0.3.0" - "semver" "^6.3.0" - "smpltmpl" "^1.0.2" - "split-lines" "^2.0.0" - "strip-bom" "^4.0.0" - "validate-npm-package-name" "^3.0.0" - "webpack-merge" "^4.2.2" - "yaml" "^2.0.0-1" - -"mrm@^3.0.9": - "integrity" "sha512-aRByZsPXMM8W0NHNH9afkKyk5OW4bB5pYNRIN+8iSVfpMAzqeMejmj/yIYcdFNJTksXmdPMfTaucm2NYdh4xIw==" - "resolved" "https://registry.npmjs.org/mrm/-/mrm-3.0.10.tgz" - "version" "3.0.10" - dependencies: - "git-username" "^1.0.0" - "glob" "^7.1.6" - "inquirer" "^7.0.4" - "is-directory" "^0.3.1" - "kleur" "^3.0.3" - "libnpx" "^10.2.4" - "listify" "^1.0.0" - "lodash" "^4.17.15" - "longest" "^2.0.1" - "middleearth-names" "^1.1.0" - "minimist" "^1.2.0" - "mrm-core" "^6.1.7" - "semver-utils" "^1.1.4" - "update-notifier" "^4.1.0" - "user-home" "^2.0.0" - "user-meta" "^1.0.0" - "which" "^2.0.2" - -"ms@2.1.2": - "integrity" "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" - "resolved" "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" - "version" "2.1.2" - -"mute-stream@0.0.8": - "integrity" "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==" - "resolved" "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz" - "version" "0.0.8" - -"nano-css@^5.3.1": - "integrity" "sha512-wfcviJB6NOxDIDfr7RFn/GlaN7I/Bhe4d39ZRCJ3xvZX60LVe2qZ+rDqM49nm4YT81gAjzS+ZklhKP/Gnfnubg==" - "resolved" "https://registry.npmjs.org/nano-css/-/nano-css-5.3.4.tgz" - "version" "5.3.4" - dependencies: - "css-tree" "^1.1.2" - "csstype" "^3.0.6" - "fastest-stable-stringify" "^2.0.2" - "inline-style-prefixer" "^6.0.0" - "rtl-css-js" "^1.14.0" - "sourcemap-codec" "^1.4.8" - "stacktrace-js" "^2.0.2" - "stylis" "^4.0.6" - -"nanoid@^3.1.30", "nanoid@^3.3.1": - "integrity" "sha512-n6Vs/3KGyxPQd6uO0eH4Bv0ojGSUvuLlIHtC3Y0kEO23YRge8H9x1GCzLn28YX0H66pMkxuaeESFq4tKISKwdw==" - "resolved" "https://registry.npmjs.org/nanoid/-/nanoid-3.3.1.tgz" - "version" "3.3.1" - -"next-mdx-remote@^4.0.2": - "integrity" "sha512-1cZM2xm+G1FyYodGt92lCXisP0owPeppVHeH5TIaXUGdt6ENBZYOxLNFaVl9fkS9wP/s2sLcC9m2c1iLH2H4rA==" - "resolved" "https://registry.npmjs.org/next-mdx-remote/-/next-mdx-remote-4.0.2.tgz" - "version" "4.0.2" - dependencies: - "@mdx-js/mdx" "^2.0.0" - "@mdx-js/react" "^2.0.0" - "vfile" "^5.3.0" - "vfile-matter" "^3.0.1" - -"next-seo@^5.1.0": - "integrity" "sha512-ampuQfNTOi1x+xtRIb6CZGunIo6rQXtMo2Tyu861d5GjJFIwfOXsA4lzCa4+e2rLkyXDyVpavNNUZWa3US9ELw==" - "resolved" "https://registry.npmjs.org/next-seo/-/next-seo-5.1.0.tgz" - "version" "5.1.0" - -"next-sitemap@^2.5.10": - "integrity" "sha512-9jFvViLL2US0obp6NAGlEpE4gWU5NACupHvVFtLhOhgJ33M/vBPqEgCiZQsG9dKDhZXchjrjQrtDvhk/KZBekg==" - "resolved" "https://registry.npmjs.org/next-sitemap/-/next-sitemap-2.5.10.tgz" - "version" "2.5.10" - dependencies: - "@corex/deepmerge" "^2.6.148" - "minimist" "^1.2.5" - -"next@*", "next@^12.1.0", "next@^8.1.1-canary.54 || >=9.0.0", "next@>=10": - "integrity" "sha512-s885kWvnIlxsUFHq9UGyIyLiuD0G3BUC/xrH0CEnH5lHEWkwQcHOORgbDF0hbrW9vr/7am4ETfX4A7M6DjrE7Q==" - "resolved" "https://registry.npmjs.org/next/-/next-12.1.0.tgz" - "version" "12.1.0" - dependencies: - "@next/env" "12.1.0" - "caniuse-lite" "^1.0.30001283" - "postcss" "8.4.5" - "styled-jsx" "5.0.0" - "use-subscription" "1.5.1" - optionalDependencies: - "@next/swc-android-arm64" "12.1.0" - "@next/swc-darwin-arm64" "12.1.0" - "@next/swc-darwin-x64" "12.1.0" - "@next/swc-linux-arm-gnueabihf" "12.1.0" - "@next/swc-linux-arm64-gnu" "12.1.0" - "@next/swc-linux-arm64-musl" "12.1.0" - "@next/swc-linux-x64-gnu" "12.1.0" - "@next/swc-linux-x64-musl" "12.1.0" - "@next/swc-win32-arm64-msvc" "12.1.0" - "@next/swc-win32-ia32-msvc" "12.1.0" - "@next/swc-win32-x64-msvc" "12.1.0" - -"node-domexception@^1.0.0": - "integrity" "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==" - "resolved" "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz" - "version" "1.0.0" - -"node-fetch@^3.2.3": - "integrity" "sha512-AXP18u4pidSZ1xYXRDPY/8jdv3RAozIt/WLNR/MBGZAz+xjtlr90RvCnsvHQRiXyWliZF/CpytExp32UU67/SA==" - "resolved" "https://registry.npmjs.org/node-fetch/-/node-fetch-3.2.3.tgz" - "version" "3.2.3" - dependencies: - "data-uri-to-buffer" "^4.0.0" - "fetch-blob" "^3.1.4" - "formdata-polyfill" "^4.0.10" - -"node-releases@^2.0.2": - "integrity" "sha512-XxYDdcQ6eKqp/YjI+tb2C5WM2LgjnZrfYg4vgQt49EK268b6gYCHsBLrK2qvJo4FmCtqmKezb0WZFK4fkrZNsg==" - "resolved" "https://registry.npmjs.org/node-releases/-/node-releases-2.0.2.tgz" - "version" "2.0.2" - -"normalize-path@^3.0.0", "normalize-path@~3.0.0": - "integrity" "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==" - "resolved" "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz" - "version" "3.0.0" - -"normalize-range@^0.1.2": - "integrity" "sha1-LRDAa9/TEuqXd2laTShDlFa3WUI= sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==" - "resolved" "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz" - "version" "0.1.2" - -"normalize-url@^4.1.0": - "integrity" "sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA==" - "resolved" "https://registry.npmjs.org/normalize-url/-/normalize-url-4.5.1.tgz" - "version" "4.5.1" - -"normalize-url@^6.0.1": - "integrity" "sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==" - "resolved" "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz" - "version" "6.1.0" - -"npm-package-arg@^6.0.0": - "integrity" "sha512-qBpssaL3IOZWi5vEKUKW0cO7kzLeT+EQO9W8RsLOZf76KF9E/K9+wH0C7t06HXPpaH8WH5xF1MExLuCwbTqRUg==" - "resolved" "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-6.1.1.tgz" - "version" "6.1.1" - dependencies: - "hosted-git-info" "^2.7.1" - "osenv" "^0.1.5" - "semver" "^5.6.0" - "validate-npm-package-name" "^3.0.0" - -"npm-run-path@^2.0.0": - "integrity" "sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw==" - "resolved" "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz" - "version" "2.0.2" - dependencies: - "path-key" "^2.0.0" - -"object-assign@^4.1.1": - "integrity" "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==" - "resolved" "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz" - "version" "4.1.1" - -"object-hash@^2.0.1", "object-hash@^2.2.0": - "integrity" "sha512-gScRMn0bS5fH+IuwyIFgnh9zBdo4DV+6GhygmWM9HyNJSgS0hScp1f5vjtm7oIIOiT9trXrShAkLFSc2IqKNgw==" - "resolved" "https://registry.npmjs.org/object-hash/-/object-hash-2.2.0.tgz" - "version" "2.2.0" - -"object.pick@^1.2.0": - "integrity" "sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= sha512-tqa/UMy/CCoYmj+H5qc07qvSL9dqcs/WZENZ1JbtWBlATP+iVOe778gE6MSijnyCnORzDuX6hU+LA4SZ09YjFQ==" - "resolved" "https://registry.npmjs.org/object.pick/-/object.pick-1.3.0.tgz" - "version" "1.3.0" - dependencies: - "isobject" "^3.0.1" - -"oidc-token-hash@^5.0.1": - "integrity" "sha512-EvoOtz6FIEBzE+9q253HsLCVRiK/0doEJ2HCvvqMQb3dHZrP3WlJKYtJ55CRTw4jmYomzH4wkPuCj/I3ZvpKxQ==" - "resolved" "https://registry.npmjs.org/oidc-token-hash/-/oidc-token-hash-5.0.1.tgz" - "version" "5.0.1" - -"on-headers@^1.0.2": - "integrity" "sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==" - "resolved" "https://registry.npmjs.org/on-headers/-/on-headers-1.0.2.tgz" - "version" "1.0.2" - -"once@^1.3.0", "once@^1.3.1", "once@^1.4.0": - "integrity" "sha1-WDsap3WWHUsROsF9nFC6753Xa9E= sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==" - "resolved" "https://registry.npmjs.org/once/-/once-1.4.0.tgz" - "version" "1.4.0" - dependencies: - "wrappy" "1" - -"onetime@^5.1.0": - "integrity" "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==" - "resolved" "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz" - "version" "5.1.2" - dependencies: - "mimic-fn" "^2.1.0" - -"openid-client@^4.9.1": - "integrity" "sha512-DYUF07AHjI3QDKqKbn2F7RqozT4hyi4JvmpodLrq0HHoNP7t/AjeG/uqiBK1/N2PZSAQEThVjDLHSmJN4iqu/w==" - "resolved" "https://registry.npmjs.org/openid-client/-/openid-client-4.9.1.tgz" - "version" "4.9.1" - dependencies: - "aggregate-error" "^3.1.0" - "got" "^11.8.0" - "jose" "^2.0.5" - "lru-cache" "^6.0.0" - "make-error" "^1.3.6" - "object-hash" "^2.0.1" - "oidc-token-hash" "^5.0.1" - -"os-homedir@^1.0.0": - "integrity" "sha1-/7xJiDNuDoM94MFox+8VISGqf7M= sha512-B5JU3cabzk8c67mRRd3ECmROafjYMXbuzlwtqdM8IbS8ktlTix8aFGb2bAGKrSRIlnfKwovGUUr72JUPyOb6kQ==" - "resolved" "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz" - "version" "1.0.2" - -"os-tmpdir@^1.0.0", "os-tmpdir@~1.0.2": - "integrity" "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==" - "resolved" "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz" - "version" "1.0.2" - -"osenv@^0.1.5": - "integrity" "sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==" - "resolved" "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz" - "version" "0.1.5" - dependencies: - "os-homedir" "^1.0.0" - "os-tmpdir" "^1.0.0" - -"p-cancelable@^1.0.0": - "integrity" "sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw==" - "resolved" "https://registry.npmjs.org/p-cancelable/-/p-cancelable-1.1.0.tgz" - "version" "1.1.0" - -"p-cancelable@^2.0.0": - "integrity" "sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg==" - "resolved" "https://registry.npmjs.org/p-cancelable/-/p-cancelable-2.1.1.tgz" - "version" "2.1.1" - -"p-finally@^1.0.0": - "integrity" "sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==" - "resolved" "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz" - "version" "1.0.0" - -"p-limit@^2.0.0", "p-limit@^2.2.0": - "integrity" "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==" - "resolved" "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz" - "version" "2.3.0" - dependencies: - "p-try" "^2.0.0" - -"p-locate@^3.0.0": - "integrity" "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==" - "resolved" "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz" - "version" "3.0.0" - dependencies: - "p-limit" "^2.0.0" - -"p-locate@^4.1.0": - "integrity" "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==" - "resolved" "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz" - "version" "4.1.0" - dependencies: - "p-limit" "^2.2.0" - -"p-try@^2.0.0": - "integrity" "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" - "resolved" "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz" - "version" "2.2.0" - -"package-json@^4.0.0": - "integrity" "sha1-iGmgQBJTZhxMTKPabCEh7VVfXu0= sha512-q/R5GrMek0vzgoomq6rm9OX+3PQve8sLwTirmK30YB3Cu0Bbt9OX9M/SIUnroN5BGJkzwGsFwDaRGD9EwBOlCA==" - "resolved" "https://registry.npmjs.org/package-json/-/package-json-4.0.1.tgz" - "version" "4.0.1" - dependencies: - "got" "^6.7.1" - "registry-auth-token" "^3.0.1" - "registry-url" "^3.0.3" - "semver" "^5.1.0" - -"package-json@^6.3.0": - "integrity" "sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ==" - "resolved" "https://registry.npmjs.org/package-json/-/package-json-6.5.0.tgz" - "version" "6.5.0" - dependencies: - "got" "^9.6.0" - "registry-auth-token" "^4.0.0" - "registry-url" "^5.0.0" - "semver" "^6.2.0" - -"parent-module@^1.0.0": - "integrity" "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==" - "resolved" "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz" - "version" "1.0.1" - dependencies: - "callsites" "^3.0.0" - -"parse-entities@^2.0.0": - "integrity" "sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ==" - "resolved" "https://registry.npmjs.org/parse-entities/-/parse-entities-2.0.0.tgz" - "version" "2.0.0" - dependencies: - "character-entities" "^1.0.0" - "character-entities-legacy" "^1.0.0" - "character-reference-invalid" "^1.0.0" - "is-alphanumerical" "^1.0.0" - "is-decimal" "^1.0.0" - "is-hexadecimal" "^1.0.0" - -"parse-entities@^4.0.0": - "integrity" "sha512-5nk9Fn03x3rEhGaX1FU6IDwG/k+GxLXlFAkgrbM1asuAFl3BhdQWvASaIsmwWypRNcZKHPYnIuOSfIWEyEQnPQ==" - "resolved" "https://registry.npmjs.org/parse-entities/-/parse-entities-4.0.0.tgz" - "version" "4.0.0" - dependencies: - "@types/unist" "^2.0.0" - "character-entities" "^2.0.0" - "character-entities-legacy" "^3.0.0" - "character-reference-invalid" "^2.0.0" - "decode-named-character-reference" "^1.0.0" - "is-alphanumerical" "^2.0.0" - "is-decimal" "^2.0.0" - "is-hexadecimal" "^2.0.0" - -"parse-git-config@^1.1.1": - "integrity" "sha1-06mYQxcTL1c5hxK7pDjhKVkN34w= sha512-S3LGXJZVSy/hswvbSkfdbKBRVsnqKrVu6j8fcvdtJ4TxosSELyQDsJPuGPXuZ+EyuYuJd3O4uAF8gcISR0OFrQ==" - "resolved" "https://registry.npmjs.org/parse-git-config/-/parse-git-config-1.1.1.tgz" - "version" "1.1.1" - dependencies: - "extend-shallow" "^2.0.1" - "fs-exists-sync" "^0.1.0" - "git-config-path" "^1.0.1" - "ini" "^1.3.4" - -"parse-github-url@^1.0.2": - "integrity" "sha512-kgBf6avCbO3Cn6+RnzRGLkUsv4ZVqv/VfAYkRsyBcgkshNvVBkRn1FEZcW0Jb+npXQWm2vHPnnOqFteZxRRGNw==" - "resolved" "https://registry.npmjs.org/parse-github-url/-/parse-github-url-1.0.2.tgz" - "version" "1.0.2" - -"parse-json@^5.0.0": - "integrity" "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==" - "resolved" "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz" - "version" "5.2.0" - dependencies: - "@babel/code-frame" "^7.0.0" - "error-ex" "^1.3.1" - "json-parse-even-better-errors" "^2.3.0" - "lines-and-columns" "^1.1.6" - -"parse-passwd@^1.0.0": - "integrity" "sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY= sha512-1Y1A//QUXEZK7YKz+rD9WydcE1+EuPr6ZBgKecAB8tmoW6UFv0NREVJe1p+jRxtThkcbbKkfwIbWJe/IeE6m2Q==" - "resolved" "https://registry.npmjs.org/parse-passwd/-/parse-passwd-1.0.0.tgz" - "version" "1.0.0" - -"path-exists@^3.0.0": - "integrity" "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==" - "resolved" "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz" - "version" "3.0.0" - -"path-exists@^4.0.0": - "integrity" "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==" - "resolved" "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz" - "version" "4.0.0" - -"path-is-absolute@^1.0.0": - "integrity" "sha1-F0uSaHNVNP+8es5r9TpanhtcX18= sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==" - "resolved" "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" - "version" "1.0.1" - -"path-is-inside@^1.0.1": - "integrity" "sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM= sha512-DUWJr3+ULp4zXmol/SZkFf3JGsS9/SIv+Y3Rt93/UjPpDpklB5f1er4O3POIbUuUJ3FXgqte2Q7SrU6zAqwk8w==" - "resolved" "https://registry.npmjs.org/path-is-inside/-/path-is-inside-1.0.2.tgz" - "version" "1.0.2" - -"path-key@^2.0.0": - "integrity" "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==" - "resolved" "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz" - "version" "2.0.1" - -"path-parse@^1.0.7": - "integrity" "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" - "resolved" "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz" - "version" "1.0.7" - -"path-type@^4.0.0": - "integrity" "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==" - "resolved" "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz" - "version" "4.0.0" - -"periscopic@^3.0.0": - "integrity" "sha512-SFx68DxCv0Iyo6APZuw/AKewkkThGwssmU0QWtTlvov3VAtPX+QJ4CadwSaz8nrT5jPIuxdvJWB4PnD2KNDxQg==" - "resolved" "https://registry.npmjs.org/periscopic/-/periscopic-3.0.4.tgz" - "version" "3.0.4" - dependencies: - "estree-walker" "^3.0.0" - "is-reference" "^3.0.0" - -"picocolors@^1.0.0": - "integrity" "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" - "resolved" "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz" - "version" "1.0.0" - -"picomatch@^2.0.4", "picomatch@^2.2.1", "picomatch@^2.2.3": - "integrity" "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==" - "resolved" "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz" - "version" "2.3.1" - -"pify@^3.0.0": - "integrity" "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY= sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==" - "resolved" "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz" - "version" "3.0.0" - -"postcss-js@^4.0.0": - "integrity" "sha512-77QESFBwgX4irogGVPgQ5s07vLvFqWr228qZY+w6lW599cRlK/HmnlivnnVUxkjHnCu4J16PDMHcH+e+2HbvTQ==" - "resolved" "https://registry.npmjs.org/postcss-js/-/postcss-js-4.0.0.tgz" - "version" "4.0.0" - dependencies: - "camelcase-css" "^2.0.1" - -"postcss-load-config@^3.1.0": - "integrity" "sha512-5EYgaM9auHGtO//ljHH+v/aC/TQ5LHXtL7bQajNAUBKUVKiYE8rYpFms7+V26D9FncaGe2zwCoPQsFKb5zF/Hw==" - "resolved" "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-3.1.3.tgz" - "version" "3.1.3" - dependencies: - "lilconfig" "^2.0.4" - "yaml" "^1.10.2" - -"postcss-nested@5.0.6": - "integrity" "sha512-rKqm2Fk0KbA8Vt3AdGN0FB9OBOMDVajMG6ZCf/GoHgdxUJ4sBFp0A/uMIRm+MJUdo33YXEtjqIz8u7DAp8B7DA==" - "resolved" "https://registry.npmjs.org/postcss-nested/-/postcss-nested-5.0.6.tgz" - "version" "5.0.6" - dependencies: - "postcss-selector-parser" "^6.0.6" - -"postcss-selector-parser@^6.0.6", "postcss-selector-parser@^6.0.9": - "integrity" "sha512-UO3SgnZOVTwu4kyLR22UQ1xZh086RyNZppb7lLAKBFK8a32ttG5i87Y/P3+2bRSjZNyJ1B7hfFNo273tKe9YxQ==" - "resolved" "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.9.tgz" - "version" "6.0.9" - dependencies: - "cssesc" "^3.0.0" - "util-deprecate" "^1.0.2" - -"postcss-value-parser@^4.2.0": - "integrity" "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==" - "resolved" "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz" - "version" "4.2.0" - -"postcss@^8.1.0", "postcss@^8.2.14", "postcss@^8.3.3", "postcss@^8.4.12", "postcss@^8.4.6": - "integrity" "sha512-lg6eITwYe9v6Hr5CncVbK70SoioNQIq81nsaG86ev5hAidQvmOeETBqs7jm43K2F5/Ley3ytDtriImV6TpNiSg==" - "resolved" "https://registry.npmjs.org/postcss/-/postcss-8.4.12.tgz" - "version" "8.4.12" - dependencies: - "nanoid" "^3.3.1" - "picocolors" "^1.0.0" - "source-map-js" "^1.0.2" - -"postcss@8.4.5": - "integrity" "sha512-jBDboWM8qpaqwkMwItqTQTiFikhs/67OYVvblFFTM7MrZjt6yMKd6r2kgXizEbTTljacm4NldIlZnhbjr84QYg==" - "resolved" "https://registry.npmjs.org/postcss/-/postcss-8.4.5.tgz" - "version" "8.4.5" - dependencies: - "nanoid" "^3.1.30" - "picocolors" "^1.0.0" - "source-map-js" "^1.0.1" - -"prepend-http@^1.0.1": - "integrity" "sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw= sha512-PhmXi5XmoyKw1Un4E+opM2KcsJInDvKyuOumcjjw3waw86ZNjHwVUOOWLc4bCzLdcKNaWBH9e99sbWzDQsVaYg==" - "resolved" "https://registry.npmjs.org/prepend-http/-/prepend-http-1.0.4.tgz" - "version" "1.0.4" - -"prepend-http@^2.0.0": - "integrity" "sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc= sha512-ravE6m9Atw9Z/jjttRUZ+clIXogdghyZAuWJ3qEzjT+jI/dL1ifAqhZeC5VHzQp1MSt1+jxKkFNemj/iO7tVUA==" - "resolved" "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz" - "version" "2.0.0" - -"prettier-plugin-tailwindcss@^0.1.8": - "integrity" "sha512-hwarSBCswAXa+kqYtaAkFr3Vop9o04WOyZs0qo3NyvW8L7f1rif61wRyq0+ArmVThOuRBcJF5hjGXYk86cwemg==" - "resolved" "https://registry.npmjs.org/prettier-plugin-tailwindcss/-/prettier-plugin-tailwindcss-0.1.8.tgz" - "version" "0.1.8" - -"prettier@^2.6.1", "prettier@>=2.2.0": - "integrity" "sha512-8UVbTBYGwN37Bs9LERmxCPjdvPxlEowx2urIL6urHzdb3SDq4B/Z6xLFCblrSnE4iKWcS6ziJ3aOYrc1kz/E2A==" - "resolved" "https://registry.npmjs.org/prettier/-/prettier-2.6.1.tgz" - "version" "2.6.1" - -"prism-themes@^1.9.0": - "integrity" "sha512-tX2AYsehKDw1EORwBps+WhBFKc2kxfoFpQAjxBndbZKr4fRmMkv47XN0BghC/K1qwodB1otbe4oF23vUTFDokw==" - "resolved" "https://registry.npmjs.org/prism-themes/-/prism-themes-1.9.0.tgz" - "version" "1.9.0" - -"prismjs@^1.27.0", "prismjs@~1.27.0": - "integrity" "sha512-t13BGPUlFDR7wRB5kQDG4jjl7XeuH6jbJGt11JHPL96qwsEHNX2+68tFXqc1/k+/jALsbSWJKUOT/hcYAZ5LkA==" - "resolved" "https://registry.npmjs.org/prismjs/-/prismjs-1.27.0.tgz" - "version" "1.27.0" - -"process-nextick-args@~2.0.0": - "integrity" "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" - "resolved" "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz" - "version" "2.0.1" - -"prop-ini@^0.0.2": - "integrity" "sha1-ZzOny1JCrKsr5C5gdYPYEksXKls= sha512-qyU57WvAvZDbzmRy9xDbJGVwrGJhmA+rYnVjy4xtX4Ny9c7gzvpmf/j7A3oq9ChbPh15MZQKjPep2mNdnAhtig==" - "resolved" "https://registry.npmjs.org/prop-ini/-/prop-ini-0.0.2.tgz" - "version" "0.0.2" - dependencies: - "extend" "^3.0.0" - -"prop-types@^15.0.0", "prop-types@^15.7.2", "prop-types@^15.8.1": - "integrity" "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==" - "resolved" "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz" - "version" "15.8.1" - dependencies: - "loose-envify" "^1.4.0" - "object-assign" "^4.1.1" - "react-is" "^16.13.1" - -"property-information@^5.0.0": - "integrity" "sha512-YUHSPk+A30YPv+0Qf8i9Mbfe/C0hdPXk1s1jPVToV8pk8BQtpw10ct89Eo7OWkutrwqvT0eicAxlOg3dOAu8JA==" - "resolved" "https://registry.npmjs.org/property-information/-/property-information-5.6.0.tgz" - "version" "5.6.0" - dependencies: - "xtend" "^4.0.0" - -"property-information@^6.0.0": - "integrity" "sha512-hrzC564QIl0r0vy4l6MvRLhafmUowhO/O3KgVSoXIbbA2Sz4j8HGpJc6T2cubRVwMwpdiG/vKGfhT4IixmKN9w==" - "resolved" "https://registry.npmjs.org/property-information/-/property-information-6.1.1.tgz" - "version" "6.1.1" - -"proxy-from-env@^1.1.0": - "integrity" "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" - "resolved" "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz" - "version" "1.1.0" - -"pseudomap@^1.0.2": - "integrity" "sha1-8FKijacOYYkX7wqKw0wa5aaChrM= sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ==" - "resolved" "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz" - "version" "1.0.2" - -"pump@^3.0.0": - "integrity" "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==" - "resolved" "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz" - "version" "3.0.0" - dependencies: - "end-of-stream" "^1.1.0" - "once" "^1.3.1" - -"pupa@^2.0.1": - "integrity" "sha512-l1jNAspIBSFqbT+y+5FosojNpVpF94nlI+wDUpqP9enwOTfHx9f0gh5nB96vl+6yTpsJsypeNrwfzPrKuHB41A==" - "resolved" "https://registry.npmjs.org/pupa/-/pupa-2.1.1.tgz" - "version" "2.1.1" - dependencies: - "escape-goat" "^2.0.0" - -"queue-microtask@^1.2.2": - "integrity" "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==" - "resolved" "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz" - "version" "1.2.3" - -"quick-lru@^5.1.1": - "integrity" "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==" - "resolved" "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz" - "version" "5.1.1" - -"randomatic@^3.0.0": - "integrity" "sha512-TuDE5KxZ0J461RVjrJZCJc+J+zCkTb1MbH9AQUq68sMhOMcy9jLcb3BrZKgp9q9Ncltdg4QVqWrH02W2EFFVYw==" - "resolved" "https://registry.npmjs.org/randomatic/-/randomatic-3.1.1.tgz" - "version" "3.1.1" - dependencies: - "is-number" "^4.0.0" - "kind-of" "^6.0.0" - "math-random" "^1.0.1" - -"rc@^1.0.1", "rc@^1.1.6", "rc@^1.2.1", "rc@^1.2.8": - "integrity" "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==" - "resolved" "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz" - "version" "1.2.8" - dependencies: - "deep-extend" "^0.6.0" - "ini" "~1.3.0" - "minimist" "^1.2.0" - "strip-json-comments" "~2.0.1" - -"react-dom@*", "react-dom@^16 || ^17 || ^18", "react-dom@^16.8.0 || ^17.0.0", "react-dom@^17.0.2", "react-dom@^17.0.2 || ^18.0.0-0", "react-dom@>=16.0.0", "react-dom@>=16.x <=17.x": - "integrity" "sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA==" - "resolved" "https://registry.npmjs.org/react-dom/-/react-dom-17.0.2.tgz" - "version" "17.0.2" - dependencies: - "loose-envify" "^1.1.0" - "object-assign" "^4.1.1" - "scheduler" "^0.20.2" - -"react-is@^16.13.1": - "integrity" "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" - "resolved" "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz" - "version" "16.13.1" - -"react-is@^18.0.0": - "integrity" "sha512-yUcBYdBBbo3QiPsgYDcfQcIkGZHfxOaoE6HLSnr1sPzMhdyxusbfKOSUbSd/ocGi32dxcj366PsTj+5oggeKKw==" - "resolved" "https://registry.npmjs.org/react-is/-/react-is-18.0.0.tgz" - "version" "18.0.0" - -"react-markdown@^8.0.2": - "integrity" "sha512-WeXeDlCPFZBbN75AiLVEmN4gC6pNWadsZVWWxWpvrYQnUTHsB3l1PH60I1sbxTJr0oWOQc3zhxTrRQMTceNifw==" - "resolved" "https://registry.npmjs.org/react-markdown/-/react-markdown-8.0.2.tgz" - "version" "8.0.2" - dependencies: - "@types/hast" "^2.0.0" - "@types/prop-types" "^15.0.0" - "@types/unist" "^2.0.0" - "comma-separated-tokens" "^2.0.0" - "hast-util-whitespace" "^2.0.0" - "prop-types" "^15.0.0" - "property-information" "^6.0.0" - "react-is" "^18.0.0" - "remark-parse" "^10.0.0" - "remark-rehype" "^10.0.0" - "space-separated-tokens" "^2.0.0" - "style-to-object" "^0.3.0" - "unified" "^10.0.0" - "unist-util-visit" "^4.0.0" - "vfile" "^5.0.0" - -"react-syntax-highlighter@^15.5.0": - "integrity" "sha512-+zq2myprEnQmH5yw6Gqc8lD55QHnpKaU8TOcFeC/Lg/MQSs8UknEA0JC4nTZGFAXC2J2Hyj/ijJ7NlabyPi2gg==" - "resolved" "https://registry.npmjs.org/react-syntax-highlighter/-/react-syntax-highlighter-15.5.0.tgz" - "version" "15.5.0" - dependencies: - "@babel/runtime" "^7.3.1" - "highlight.js" "^10.4.1" - "lowlight" "^1.17.0" - "prismjs" "^1.27.0" - "refractor" "^3.6.0" - -"react-tooltip@^4.2.21": - "integrity" "sha512-zSLprMymBDowknr0KVDiJ05IjZn9mQhhg4PRsqln0OZtURAJ1snt1xi5daZfagsh6vfsziZrc9pErPTDY1ACig==" - "resolved" "https://registry.npmjs.org/react-tooltip/-/react-tooltip-4.2.21.tgz" - "version" "4.2.21" - dependencies: - "prop-types" "^15.7.2" - "uuid" "^7.0.3" - -"react-universal-interface@^0.6.2": - "integrity" "sha512-dg8yXdcQmvgR13RIlZbTRQOoUrDciFVoSBZILwjE2LFISxZZ8loVJKAkuzswl5js8BHda79bIb2b84ehU8IjXw==" - "resolved" "https://registry.npmjs.org/react-universal-interface/-/react-universal-interface-0.6.2.tgz" - "version" "0.6.2" - -"react-use@^17.3.2": - "integrity" "sha512-bj7OD0/1wL03KyWmzFXAFe425zziuTf7q8olwCYBfOeFHY1qfO1FAMjROQLsLZYwG4Rx63xAfb7XAbBrJsZmEw==" - "resolved" "https://registry.npmjs.org/react-use/-/react-use-17.3.2.tgz" - "version" "17.3.2" - dependencies: - "@types/js-cookie" "^2.2.6" - "@xobotyi/scrollbar-width" "^1.9.5" - "copy-to-clipboard" "^3.3.1" - "fast-deep-equal" "^3.1.3" - "fast-shallow-equal" "^1.0.0" - "js-cookie" "^2.2.1" - "nano-css" "^5.3.1" - "react-universal-interface" "^0.6.2" - "resize-observer-polyfill" "^1.5.1" - "screenfull" "^5.1.0" - "set-harmonic-interval" "^1.0.1" - "throttle-debounce" "^3.0.1" - "ts-easing" "^0.2.0" - "tslib" "^2.1.0" - -"react@*", "react@^16 || ^17 || ^18", "react@^16.8.0 || ^17.0.0", "react@^16.8.0 || ^17.0.0", "react@^17.0.2", "react@^17.0.2 || ^18.0.0-0", "react@>= 0.14.0", "react@>= 16", "react@>= 16.8.0 || 17.x.x || 18.x.x", "react@>=16", "react@>=16.0.0", "react@>=16.x", "react@>=16.x <=17.x", "react@17.0.2": - "integrity" "sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA==" - "resolved" "https://registry.npmjs.org/react/-/react-17.0.2.tgz" - "version" "17.0.2" - dependencies: - "loose-envify" "^1.1.0" - "object-assign" "^4.1.1" - -"readable-stream@^2.2.2", "readable-stream@~2.3.6": - "integrity" "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==" - "resolved" "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz" - "version" "2.3.7" - dependencies: - "core-util-is" "~1.0.0" - "inherits" "~2.0.3" - "isarray" "~1.0.0" - "process-nextick-args" "~2.0.0" - "safe-buffer" "~5.1.1" - "string_decoder" "~1.1.1" - "util-deprecate" "~1.0.1" - -"readdirp@~3.6.0": - "integrity" "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==" - "resolved" "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz" - "version" "3.6.0" - dependencies: - "picomatch" "^2.2.1" - -"readme-badger@^0.3.0": - "integrity" "sha512-+sMOLSs1imZUISZ2Rhz7qqVd77QtpcAPbGeIraFdgJmijb04YtdlPjGNBvDChTNtLbeQ6JNGQy3pOgslWfaP3g==" - "resolved" "https://registry.npmjs.org/readme-badger/-/readme-badger-0.3.0.tgz" - "version" "0.3.0" - dependencies: - "balanced-match" "^1.0.0" - -"refractor@^3.4.0", "refractor@^3.6.0": - "integrity" "sha512-MY9W41IOWxxk31o+YvFCNyNzdkc9M20NoZK5vq6jkv4I/uh2zkWcfudj0Q1fovjUQJrNewS9NMzeTtqPf+n5EA==" - "resolved" "https://registry.npmjs.org/refractor/-/refractor-3.6.0.tgz" - "version" "3.6.0" - dependencies: - "hastscript" "^6.0.0" - "parse-entities" "^2.0.0" - "prismjs" "~1.27.0" - -"regenerator-runtime@^0.13.4": - "integrity" "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==" - "resolved" "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz" - "version" "0.13.9" - -"registry-auth-token@^3.0.1": - "integrity" "sha512-4LM6Fw8eBQdwMYcES4yTnn2TqIasbXuwDx3um+QRs7S55aMKCBKBxvPXl2RiUjHwuJLTyYfxSpmfSAjQpcuP+A==" - "resolved" "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-3.4.0.tgz" - "version" "3.4.0" - dependencies: - "rc" "^1.1.6" - "safe-buffer" "^5.0.1" - -"registry-auth-token@^4.0.0": - "integrity" "sha512-6gkSb4U6aWJB4SF2ZvLb76yCBjcvufXBqvvEx1HbmKPkutswjW1xNVRY0+daljIYRbogN7O0etYSlbiaEQyMyw==" - "resolved" "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-4.2.1.tgz" - "version" "4.2.1" - dependencies: - "rc" "^1.2.8" - -"registry-url@^3.0.3": - "integrity" "sha1-PU74cPc93h138M+aOBQyRE4XSUI= sha512-ZbgR5aZEdf4UKZVBPYIgaglBmSF2Hi94s2PcIHhRGFjKYu+chjJdYfHn4rt3hB6eCKLJ8giVIIfgMa1ehDfZKA==" - "resolved" "https://registry.npmjs.org/registry-url/-/registry-url-3.1.0.tgz" - "version" "3.1.0" - dependencies: - "rc" "^1.0.1" - -"registry-url@^5.0.0": - "integrity" "sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw==" - "resolved" "https://registry.npmjs.org/registry-url/-/registry-url-5.1.0.tgz" - "version" "5.1.0" - dependencies: - "rc" "^1.2.8" - -"rehype-slug@^5.0.1": - "integrity" "sha512-X5v3wV/meuOX9NFcGhJvUpEjIvQl2gDvjg3z40RVprYFt7q3th4qMmYLULiu3gXvbNX1ppx+oaa6JyY1W67pTA==" - "resolved" "https://registry.npmjs.org/rehype-slug/-/rehype-slug-5.0.1.tgz" - "version" "5.0.1" - dependencies: - "@types/hast" "^2.0.0" - "github-slugger" "^1.1.1" - "hast-util-has-property" "^2.0.0" - "hast-util-heading-rank" "^2.0.0" - "hast-util-to-string" "^2.0.0" - "unified" "^10.0.0" - "unist-util-visit" "^4.0.0" - -"remark-gfm@^3.0.1": - "integrity" "sha512-lEFDoi2PICJyNrACFOfDD3JlLkuSbOa5Wd8EPt06HUdptv8Gn0bxYTdbU/XXQ3swAPkEaGxxPN9cbnMHvVu1Ig==" - "resolved" "https://registry.npmjs.org/remark-gfm/-/remark-gfm-3.0.1.tgz" - "version" "3.0.1" - dependencies: - "@types/mdast" "^3.0.0" - "mdast-util-gfm" "^2.0.0" - "micromark-extension-gfm" "^2.0.0" - "unified" "^10.0.0" - -"remark-mdx@^2.0.0": - "integrity" "sha512-0wXdEITnFyjLquN3VvACNLzbGzWM5ujzTvfgOkONBZgSFJ7ezLLDaTWqf6H9eUgVITEP8asp6LJ0W/X090dXBg==" - "resolved" "https://registry.npmjs.org/remark-mdx/-/remark-mdx-2.1.1.tgz" - "version" "2.1.1" - dependencies: - "mdast-util-mdx" "^2.0.0" - "micromark-extension-mdxjs" "^1.0.0" - -"remark-parse@^10.0.0": - "integrity" "sha512-1fUyHr2jLsVOkhbvPRBJ5zTKZZyD6yZzYaWCS6BPBdQ8vEMBCH+9zNCDA6tET/zHCi/jLqjCWtlJZUPk+DbnFw==" - "resolved" "https://registry.npmjs.org/remark-parse/-/remark-parse-10.0.1.tgz" - "version" "10.0.1" - dependencies: - "@types/mdast" "^3.0.0" - "mdast-util-from-markdown" "^1.0.0" - "unified" "^10.0.0" - -"remark-rehype@^10.0.0": - "integrity" "sha512-EFmR5zppdBp0WQeDVZ/b66CWJipB2q2VLNFMabzDSGR66Z2fQii83G5gTBbgGEnEEA0QRussvrFHxk1HWGJskw==" - "resolved" "https://registry.npmjs.org/remark-rehype/-/remark-rehype-10.1.0.tgz" - "version" "10.1.0" - dependencies: - "@types/hast" "^2.0.0" - "@types/mdast" "^3.0.0" - "mdast-util-to-hast" "^12.1.0" - "unified" "^10.0.0" - -"remarkable@^1.7.1": - "integrity" "sha512-e6NKUXgX95whv7IgddywbeN/ItCkWbISmc2DiqHJb0wTrqZIexqdco5b8Z3XZoo/48IdNVKM9ZCvTPJ4F5uvhg==" - "resolved" "https://registry.npmjs.org/remarkable/-/remarkable-1.7.4.tgz" - "version" "1.7.4" - dependencies: - "argparse" "^1.0.10" - "autolinker" "~0.28.0" - -"remote-origin-url@^1.0.0": - "integrity" "sha512-xHDM6IBqivpiQ1e4WOuFpM/T6rbzA/WBsu+3WLtgPOhHyjA0nYlijV3NprlTb4FcXlQ5+Q+z174sQ1NnUF5FwA==" - "resolved" "https://registry.npmjs.org/remote-origin-url/-/remote-origin-url-1.0.0.tgz" - "version" "1.0.0" - dependencies: - "parse-git-config" "^1.1.1" - -"repeat-element@^1.1.2": - "integrity" "sha512-LFiNfRcSu7KK3evMyYOuCzv3L10TW7yC1G2/+StMjK8Y6Vqd2MG7r/Qjw4ghtuCOjFvlnms/iMmLqpvW/ES/WQ==" - "resolved" "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.4.tgz" - "version" "1.1.4" - -"repeat-string@^1.5.2", "repeat-string@^1.6.1": - "integrity" "sha1-jcrkcOHIirwtYA//Sndihtp15jc= sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==" - "resolved" "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz" - "version" "1.6.1" - -"require-directory@^2.1.1": - "integrity" "sha1-jGStX9MNqxyXbiNE/+f3kqam30I= sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==" - "resolved" "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz" - "version" "2.1.1" - -"require-main-filename@^2.0.0": - "integrity" "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==" - "resolved" "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz" - "version" "2.0.0" - -"resize-observer-polyfill@^1.5.1": - "integrity" "sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg==" - "resolved" "https://registry.npmjs.org/resize-observer-polyfill/-/resize-observer-polyfill-1.5.1.tgz" - "version" "1.5.1" - -"resolve-alpn@^1.0.0": - "integrity" "sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==" - "resolved" "https://registry.npmjs.org/resolve-alpn/-/resolve-alpn-1.2.1.tgz" - "version" "1.2.1" - -"resolve-from@^4.0.0": - "integrity" "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==" - "resolved" "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz" - "version" "4.0.0" - -"resolve@^1.22.0": - "integrity" "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==" - "resolved" "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz" - "version" "1.22.0" - dependencies: - "is-core-module" "^2.8.1" - "path-parse" "^1.0.7" - "supports-preserve-symlinks-flag" "^1.0.0" - -"responselike@^1.0.2": - "integrity" "sha1-kYcg7ztjHFZCvgaPFa3lpG9Loec= sha512-/Fpe5guzJk1gPqdJLJR5u7eG/gNY4nImjbRDaVWVMRhne55TCmj2i9Q+54PBRfatRC8v/rIiv9BN0pMd9OV5EQ==" - "resolved" "https://registry.npmjs.org/responselike/-/responselike-1.0.2.tgz" - "version" "1.0.2" - dependencies: - "lowercase-keys" "^1.0.0" - -"responselike@^2.0.0": - "integrity" "sha512-xH48u3FTB9VsZw7R+vvgaKeLKzT6jOogbQhEe/jewwnZgzPcnyWui2Av6JpoYZF/91uueC+lqhWqeURw5/qhCw==" - "resolved" "https://registry.npmjs.org/responselike/-/responselike-2.0.0.tgz" - "version" "2.0.0" - dependencies: - "lowercase-keys" "^2.0.0" - -"restore-cursor@^3.1.0": - "integrity" "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==" - "resolved" "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz" - "version" "3.1.0" - dependencies: - "onetime" "^5.1.0" - "signal-exit" "^3.0.2" - -"reusify@^1.0.4": - "integrity" "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==" - "resolved" "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz" - "version" "1.0.4" - -"rimraf@^2.6.2": - "integrity" "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==" - "resolved" "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz" - "version" "2.7.1" - dependencies: - "glob" "^7.1.3" - -"rtl-css-js@^1.14.0": - "integrity" "sha512-99Cu4wNNIhrI10xxUaABHsdDqzalrSRTie4GeCmbGVuehm4oj+fIy8fTzB+16pmKe8Bv9rl+hxIBez6KxExTew==" - "resolved" "https://registry.npmjs.org/rtl-css-js/-/rtl-css-js-1.15.0.tgz" - "version" "1.15.0" - dependencies: - "@babel/runtime" "^7.1.2" - -"run-async@^2.4.0": - "integrity" "sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==" - "resolved" "https://registry.npmjs.org/run-async/-/run-async-2.4.1.tgz" - "version" "2.4.1" - -"run-parallel@^1.1.9": - "integrity" "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==" - "resolved" "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz" - "version" "1.2.0" - dependencies: - "queue-microtask" "^1.2.2" - -"rxjs@^6.6.0": - "integrity" "sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ==" - "resolved" "https://registry.npmjs.org/rxjs/-/rxjs-6.6.7.tgz" - "version" "6.6.7" - dependencies: - "tslib" "^1.9.0" - -"sade@^1.7.3": - "integrity" "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==" - "resolved" "https://registry.npmjs.org/sade/-/sade-1.8.1.tgz" - "version" "1.8.1" - dependencies: - "mri" "^1.1.0" - -"safe-buffer@^5.0.1", "safe-buffer@^5.1.0": - "integrity" "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" - "resolved" "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz" - "version" "5.2.1" - -"safe-buffer@~5.1.0": - "integrity" "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" - "resolved" "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" - "version" "5.1.2" - -"safe-buffer@~5.1.1": - "integrity" "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" - "resolved" "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" - "version" "5.1.2" - -"safer-buffer@>= 2.1.2 < 3": - "integrity" "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" - "resolved" "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz" - "version" "2.1.2" - -"sass@^1.3.0", "sass@^1.32.8": - "integrity" "sha512-YlYWkkHP9fbwaFRZQRXgDi3mXZShslVmmo+FVK3kHLUELHHEYrCmL1x6IUjC7wLS6VuJSAFXRQS/DxdsC4xL1A==" - "resolved" "https://registry.npmjs.org/sass/-/sass-1.49.9.tgz" - "version" "1.49.9" - dependencies: - "chokidar" ">=3.0.0 <4.0.0" - "immutable" "^4.0.0" - "source-map-js" ">=0.6.2 <2.0.0" - -"scheduler@^0.20.2": - "integrity" "sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ==" - "resolved" "https://registry.npmjs.org/scheduler/-/scheduler-0.20.2.tgz" - "version" "0.20.2" - dependencies: - "loose-envify" "^1.1.0" - "object-assign" "^4.1.1" - -"screenfull@^5.1.0": - "integrity" "sha512-9BakfsO2aUQN2K9Fdbj87RJIEZ82Q9IGim7FqM5OsebfoFC6ZHXgDq/KvniuLTPdeM8wY2o6Dj3WQ7KeQCj3cA==" - "resolved" "https://registry.npmjs.org/screenfull/-/screenfull-5.2.0.tgz" - "version" "5.2.0" - -"section-matter@^1.0.0": - "integrity" "sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==" - "resolved" "https://registry.npmjs.org/section-matter/-/section-matter-1.0.0.tgz" - "version" "1.0.0" - dependencies: - "extend-shallow" "^2.0.1" - "kind-of" "^6.0.0" - -"semver-diff@^2.0.0": - "integrity" "sha1-S7uEN8jTfksM8aaP1ybsbWRdbTY= sha512-gL8F8L4ORwsS0+iQ34yCYv///jsOq0ZL7WP55d1HnJ32o7tyFYEFQZQA22mrLIacZdU6xecaBBZ+uEiffGNyXw==" - "resolved" "https://registry.npmjs.org/semver-diff/-/semver-diff-2.1.0.tgz" - "version" "2.1.0" - dependencies: - "semver" "^5.0.3" - -"semver-diff@^3.1.1": - "integrity" "sha512-GX0Ix/CJcHyB8c4ykpHGIAvLyOwOobtM/8d+TQkAd81/bEjgPHrfba41Vpesr7jX/t8Uh+R3EX9eAS5be+jQYg==" - "resolved" "https://registry.npmjs.org/semver-diff/-/semver-diff-3.1.1.tgz" - "version" "3.1.1" - dependencies: - "semver" "^6.3.0" - -"semver-utils@^1.1.4": - "integrity" "sha512-EjnoLE5OGmDAVV/8YDoN5KiajNadjzIp9BAHOhYeQHt7j0UWxjmgsx4YD48wp4Ue1Qogq38F1GNUJNqF1kKKxA==" - "resolved" "https://registry.npmjs.org/semver-utils/-/semver-utils-1.1.4.tgz" - "version" "1.1.4" - -"semver@^5.0.3", "semver@^5.1.0": - "integrity" "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" - "resolved" "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz" - "version" "5.7.1" - -"semver@^5.6.0": - "integrity" "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" - "resolved" "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz" - "version" "5.7.1" - -"semver@^6.0.0", "semver@^6.2.0", "semver@^6.3.0": - "integrity" "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" - "resolved" "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz" - "version" "6.3.0" - -"set-blocking@^2.0.0": - "integrity" "sha1-BF+XgtARrppoA93TgrJDkrPYkPc= sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==" - "resolved" "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz" - "version" "2.0.0" - -"set-getter@^0.1.0": - "integrity" "sha512-9sVWOy+gthr+0G9DzqqLaYNA7+5OKkSmcqjL9cBpDEaZrr3ShQlyX2cZ/O/ozE41oxn/Tt0LGEM/w4Rub3A3gw==" - "resolved" "https://registry.npmjs.org/set-getter/-/set-getter-0.1.1.tgz" - "version" "0.1.1" - dependencies: - "to-object-path" "^0.3.0" - -"set-harmonic-interval@^1.0.1": - "integrity" "sha512-AhICkFV84tBP1aWqPwLZqFvAwqEoVA9kxNMniGEUvzOlm4vLmOFLiTT3UZ6bziJTy4bOVpzWGTfSCbmaayGx8g==" - "resolved" "https://registry.npmjs.org/set-harmonic-interval/-/set-harmonic-interval-1.0.1.tgz" - "version" "1.0.1" - -"setprototypeof@1.2.0": - "integrity" "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" - "resolved" "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz" - "version" "1.2.0" - -"shebang-command@^1.2.0": - "integrity" "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==" - "resolved" "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz" - "version" "1.2.0" - dependencies: - "shebang-regex" "^1.0.0" - -"shebang-regex@^1.0.0": - "integrity" "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==" - "resolved" "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz" - "version" "1.0.0" - -"sigmund@^1.0.1": - "integrity" "sha1-P/IfGYytIXX587eBhT/ZTQ0ZtZA= sha512-fCvEXfh6NWpm+YSuY2bpXb/VIihqWA6hLsgboC+0nl71Q7N7o2eaCW8mJa/NLvQhs6jpd3VZV4UiUQlV6+lc8g==" - "resolved" "https://registry.npmjs.org/sigmund/-/sigmund-1.0.1.tgz" - "version" "1.0.1" - -"signal-exit@^3.0.0", "signal-exit@^3.0.2": - "integrity" "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" - "resolved" "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz" - "version" "3.0.7" - -"smpltmpl@^1.0.2": - "integrity" "sha512-Hq23NNgeZigOzIiX1dkb6W3gFn2/XQj43KhPxu65IMieG/gIwf/lQb1IudjYv0c/5LwJeS/mPayYzyo+8WJMxQ==" - "resolved" "https://registry.npmjs.org/smpltmpl/-/smpltmpl-1.0.2.tgz" - "version" "1.0.2" - dependencies: - "babel-code-frame" "^6.26.0" - -"source-map-js@^1.0.1", "source-map-js@^1.0.2", "source-map-js@>=0.6.2 <2.0.0": - "integrity" "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==" - "resolved" "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz" - "version" "1.0.2" - -"source-map@^0.6.1": - "integrity" "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" - "resolved" "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz" - "version" "0.6.1" - -"source-map@0.5.6": - "integrity" "sha1-dc449SvwczxafwwRjYEzSiu19BI= sha512-MjZkVp0NHr5+TPihLcadqnlVoGIoWo4IBHptutGh9wI3ttUYvCG26HkSuDi+K6lsZ25syXJXcctwgyVCt//xqA==" - "resolved" "https://registry.npmjs.org/source-map/-/source-map-0.5.6.tgz" - "version" "0.5.6" - -"sourcemap-codec@^1.4.8": - "integrity" "sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==" - "resolved" "https://registry.npmjs.org/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz" - "version" "1.4.8" - -"space-separated-tokens@^1.0.0": - "integrity" "sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA==" - "resolved" "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz" - "version" "1.1.5" - -"space-separated-tokens@^2.0.0": - "integrity" "sha512-ekwEbFp5aqSPKaqeY1PGrlGQxPNaq+Cnx4+bE2D8sciBQrHpbwoBbawqTN2+6jPs9IdWxxiUcN0K2pkczD3zmw==" - "resolved" "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.1.tgz" - "version" "2.0.1" - -"split-lines@^2.0.0": - "integrity" "sha512-8dv+1zKgTpfTkOy8XZLFyWrfxO0NV/bj/3EaQ+hBrBxGv2DwiroljPjU8NlCr+59nLnsVm9WYT7lXKwe4TC6bw==" - "resolved" "https://registry.npmjs.org/split-lines/-/split-lines-2.1.0.tgz" - "version" "2.1.0" - -"sprintf-js@~1.0.2": - "integrity" "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==" - "resolved" "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz" - "version" "1.0.3" - -"stack-generator@^2.0.5": - "integrity" "sha512-/t1ebrbHkrLrDuNMdeAcsvynWgoH/i4o8EGGfX7dEYDoTXOYVAkEpFdtshlvabzc6JlJ8Kf9YdFEoz7JkzGN9Q==" - "resolved" "https://registry.npmjs.org/stack-generator/-/stack-generator-2.0.5.tgz" - "version" "2.0.5" - dependencies: - "stackframe" "^1.1.1" - -"stackframe@^1.1.1": - "integrity" "sha512-h88QkzREN/hy8eRdyNhhsO7RSJ5oyTqxxmmn0dzBIMUclZsjpfmrsg81vp8mjjAs2vAZ72nyWxRUwSwmh0e4xg==" - "resolved" "https://registry.npmjs.org/stackframe/-/stackframe-1.2.1.tgz" - "version" "1.2.1" - -"stacktrace-gps@^3.0.4": - "integrity" "sha512-qIr8x41yZVSldqdqe6jciXEaSCKw1U8XTXpjDuy0ki/apyTn/r3w9hDAAQOhZdxvsC93H+WwwEu5cq5VemzYeg==" - "resolved" "https://registry.npmjs.org/stacktrace-gps/-/stacktrace-gps-3.0.4.tgz" - "version" "3.0.4" - dependencies: - "source-map" "0.5.6" - "stackframe" "^1.1.1" - -"stacktrace-js@^2.0.2": - "integrity" "sha512-Je5vBeY4S1r/RnLydLl0TBTi3F2qdfWmYsGvtfZgEI+SCprPppaIhQf5nGcal4gI4cGpCV/duLcAzT1np6sQqg==" - "resolved" "https://registry.npmjs.org/stacktrace-js/-/stacktrace-js-2.0.2.tgz" - "version" "2.0.2" - dependencies: - "error-stack-parser" "^2.0.6" - "stack-generator" "^2.0.5" - "stacktrace-gps" "^3.0.4" - -"statuses@>= 1.5.0 < 2": - "integrity" "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==" - "resolved" "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz" - "version" "1.5.0" - -"string_decoder@~1.1.1": - "integrity" "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==" - "resolved" "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" - "version" "1.1.1" - dependencies: - "safe-buffer" "~5.1.0" - -"string-width@^2.0.0", "string-width@^2.1.1": - "integrity" "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==" - "resolved" "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz" - "version" "2.1.1" - dependencies: - "is-fullwidth-code-point" "^2.0.0" - "strip-ansi" "^4.0.0" - -"string-width@^3.0.0": - "integrity" "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==" - "resolved" "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz" - "version" "3.1.0" - dependencies: - "emoji-regex" "^7.0.1" - "is-fullwidth-code-point" "^2.0.0" - "strip-ansi" "^5.1.0" - -"string-width@^3.1.0": - "integrity" "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==" - "resolved" "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz" - "version" "3.1.0" - dependencies: - "emoji-regex" "^7.0.1" - "is-fullwidth-code-point" "^2.0.0" - "strip-ansi" "^5.1.0" - -"string-width@^4.0.0", "string-width@^4.1.0": - "integrity" "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==" - "resolved" "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" - "version" "4.2.3" - dependencies: - "emoji-regex" "^8.0.0" - "is-fullwidth-code-point" "^3.0.0" - "strip-ansi" "^6.0.1" - -"stringify-entities@^4.0.0": - "integrity" "sha512-MTxTVcEkorNtBbNpoFJPEh0kKdM6+QbMjLbaxmvaPMmayOXdr/AIVIIJX7FReUVweRBFJfZepK4A4AKgwuFpMQ==" - "resolved" "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.2.tgz" - "version" "4.0.2" - dependencies: - "character-entities-html4" "^2.0.0" - "character-entities-legacy" "^3.0.0" - -"strip-ansi@^3.0.0": - "integrity" "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= sha512-VhumSSbBqDTP8p2ZLKj40UjBCV4+v8bUSEpUb4KjRgWk9pbqGF4REFj6KEagidb2f/M6AzC0EmFyDNGaw9OCzg==" - "resolved" "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz" - "version" "3.0.1" - dependencies: - "ansi-regex" "^2.0.0" - -"strip-ansi@^4.0.0": - "integrity" "sha1-qEeQIusaw2iocTibY1JixQXuNo8= sha512-4XaJ2zQdCzROZDivEVIDPkcQn8LMFSa8kj8Gxb/Lnwzv9A8VctNZ+lfivC/sV3ivW8ElJTERXZoPBRrZKkNKow==" - "resolved" "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz" - "version" "4.0.0" - dependencies: - "ansi-regex" "^3.0.0" - -"strip-ansi@^5.0.0", "strip-ansi@^5.1.0", "strip-ansi@^5.2.0": - "integrity" "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==" - "resolved" "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz" - "version" "5.2.0" - dependencies: - "ansi-regex" "^4.1.0" - -"strip-ansi@^6.0.0", "strip-ansi@^6.0.1": - "integrity" "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==" - "resolved" "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz" - "version" "6.0.1" - dependencies: - "ansi-regex" "^5.0.1" - -"strip-bom-string@^1.0.0": - "integrity" "sha1-5SEekiQ2n7uB1jOi8ABE3IztrZI= sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g==" - "resolved" "https://registry.npmjs.org/strip-bom-string/-/strip-bom-string-1.0.0.tgz" - "version" "1.0.0" - -"strip-bom@^4.0.0": - "integrity" "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==" - "resolved" "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz" - "version" "4.0.0" - -"strip-color@^0.1.0": - "integrity" "sha1-EG9l09PmotlAHKwOsM6LinArT3s= sha512-p9LsUieSjWNNAxVCXLeilaDlmuUOrDS5/dF9znM1nZc7EGX5+zEFC0bEevsNIaldjlks+2jns5Siz6F9iK6jwA==" - "resolved" "https://registry.npmjs.org/strip-color/-/strip-color-0.1.0.tgz" - "version" "0.1.0" - -"strip-eof@^1.0.0": - "integrity" "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q==" - "resolved" "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz" - "version" "1.0.0" - -"strip-json-comments@~2.0.1": - "integrity" "sha1-PFMZQukIwml8DsNEhYwobHygpgo= sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==" - "resolved" "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz" - "version" "2.0.1" - -"style-to-object@^0.3.0": - "integrity" "sha512-CzFnRRXhzWIdItT3OmF8SQfWyahHhjq3HwcMNCNLn+N7klOOqPjMeG/4JSu77D7ypZdGvSzvkrbyeTMizz2VrA==" - "resolved" "https://registry.npmjs.org/style-to-object/-/style-to-object-0.3.0.tgz" - "version" "0.3.0" - dependencies: - "inline-style-parser" "0.1.1" - -"styled-jsx@5.0.0": - "integrity" "sha512-qUqsWoBquEdERe10EW8vLp3jT25s/ssG1/qX5gZ4wu15OZpmSMFI2v+fWlRhLfykA5rFtlJ1ME8A8pm/peV4WA==" - "resolved" "https://registry.npmjs.org/styled-jsx/-/styled-jsx-5.0.0.tgz" - "version" "5.0.0" - -"stylis@^4.0.6": - "integrity" "sha512-xGPXiFVl4YED9Jh7Euv2V220mriG9u4B2TA6Ybjc1catrstKD2PpIdU3U0RKpkVBC2EhmL/F0sPCr9vrFTNRag==" - "resolved" "https://registry.npmjs.org/stylis/-/stylis-4.0.13.tgz" - "version" "4.0.13" - -"supports-color@^2.0.0": - "integrity" "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= sha512-KKNVtd6pCYgPIKU4cp2733HWYCpplQhddZLBUryaAHou723x+FRzQ5Df824Fj+IyyuiQTRoub4SnIFfIcrp70g==" - "resolved" "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz" - "version" "2.0.0" - -"supports-color@^5.3.0": - "integrity" "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==" - "resolved" "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz" - "version" "5.5.0" - dependencies: - "has-flag" "^3.0.0" - -"supports-color@^7.1.0": - "integrity" "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==" - "resolved" "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz" - "version" "7.2.0" - dependencies: - "has-flag" "^4.0.0" - -"supports-preserve-symlinks-flag@^1.0.0": - "integrity" "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==" - "resolved" "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz" - "version" "1.0.0" - -"tailwindcss@^3.0.23", "tailwindcss@>=3.0.0 || >= 3.0.0-alpha.1", "tailwindcss@>=3.0.0 || >= 3.0.0-alpha.1 || insiders": - "integrity" "sha512-+OZOV9ubyQ6oI2BXEhzw4HrqvgcARY38xv3zKcjnWtMIZstEsXdI9xftd1iB7+RbOnj2HOEzkA0OyB5BaSxPQA==" - "resolved" "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.0.23.tgz" - "version" "3.0.23" - dependencies: - "arg" "^5.0.1" - "chalk" "^4.1.2" - "chokidar" "^3.5.3" - "color-name" "^1.1.4" - "cosmiconfig" "^7.0.1" - "detective" "^5.2.0" - "didyoumean" "^1.2.2" - "dlv" "^1.1.3" - "fast-glob" "^3.2.11" - "glob-parent" "^6.0.2" - "is-glob" "^4.0.3" - "normalize-path" "^3.0.0" - "object-hash" "^2.2.0" - "postcss" "^8.4.6" - "postcss-js" "^4.0.0" - "postcss-load-config" "^3.1.0" - "postcss-nested" "5.0.6" - "postcss-selector-parser" "^6.0.9" - "postcss-value-parser" "^4.2.0" - "quick-lru" "^5.1.1" - "resolve" "^1.22.0" - -"term-size@^1.2.0": - "integrity" "sha1-RYuDiH8oj8Vtb/+/rSYuJmOO+mk= sha512-7dPUZQGy/+m3/wjVz3ZW5dobSoD/02NxJpoXUX0WIyjfVS3l0c+b/+9phIDFA7FHzkYtwtMFgeGZ/Y8jVTeqQQ==" - "resolved" "https://registry.npmjs.org/term-size/-/term-size-1.2.0.tgz" - "version" "1.2.0" - dependencies: - "execa" "^0.7.0" - -"term-size@^2.1.0": - "integrity" "sha512-wK0Ri4fOGjv/XPy8SBHZChl8CM7uMc5VML7SqiQ0zG7+J5Vr+RMQDoHa2CNT6KHUnTGIXH34UDMkPzAUyapBZg==" - "resolved" "https://registry.npmjs.org/term-size/-/term-size-2.2.1.tgz" - "version" "2.2.1" - -"throttle-debounce@^3.0.1": - "integrity" "sha512-dTEWWNu6JmeVXY0ZYoPuH5cRIwc0MeGbJwah9KUNYSJwommQpCzTySTpEe8Gs1J23aeWEuAobe4Ag7EHVt/LOg==" - "resolved" "https://registry.npmjs.org/throttle-debounce/-/throttle-debounce-3.0.1.tgz" - "version" "3.0.1" - -"through@^2.3.6": - "integrity" "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==" - "resolved" "https://registry.npmjs.org/through/-/through-2.3.8.tgz" - "version" "2.3.8" - -"through2@^2.0.0": - "integrity" "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==" - "resolved" "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz" - "version" "2.0.5" - dependencies: - "readable-stream" "~2.3.6" - "xtend" "~4.0.1" - -"timed-out@^4.0.0": - "integrity" "sha1-8y6srFoXW+ol1/q1Zas+2HQe9W8= sha512-G7r3AhovYtr5YKOWQkta8RKAPb+J9IsO4uVmzjl8AZwfhs8UcUwTiD6gcJYSgOtzyjvQKrKYn41syHbUWMkafA==" - "resolved" "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz" - "version" "4.0.1" - -"tmp@^0.0.33": - "integrity" "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==" - "resolved" "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz" - "version" "0.0.33" - dependencies: - "os-tmpdir" "~1.0.2" - -"to-object-path@^0.3.0": - "integrity" "sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= sha512-9mWHdnGRuh3onocaHzukyvCZhzvr6tiflAy/JRFXcJX0TjgfWA9pk9t8CMbzmBE4Jfw58pXbkngtBtqYxzNEyg==" - "resolved" "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz" - "version" "0.3.0" - dependencies: - "kind-of" "^3.0.2" - -"to-readable-stream@^1.0.0": - "integrity" "sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q==" - "resolved" "https://registry.npmjs.org/to-readable-stream/-/to-readable-stream-1.0.0.tgz" - "version" "1.0.0" - -"to-regex-range@^5.0.1": - "integrity" "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==" - "resolved" "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz" - "version" "5.0.1" - dependencies: - "is-number" "^7.0.0" - -"toggle-selection@^1.0.6": - "integrity" "sha1-bkWxJj8gF/oKzH2J14sVuL932jI= sha512-BiZS+C1OS8g/q2RRbJmy59xpyghNBqrr6k5L/uKBGRsTfxmu3ffiRnd8mlGPUVayg8pvfi5urfnu8TU7DVOkLQ==" - "resolved" "https://registry.npmjs.org/toggle-selection/-/toggle-selection-1.0.6.tgz" - "version" "1.0.6" - -"toidentifier@1.0.1": - "integrity" "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==" - "resolved" "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz" - "version" "1.0.1" - -"toml@^2.3.2": - "integrity" "sha512-gVweAectJU3ebq//Ferr2JUY4WKSDe5N+z0FvjDncLGyHmIDoxgY/2Ie4qfEIDm4IS7OA6Rmdm7pdEEdMcV/xQ==" - "resolved" "https://registry.npmjs.org/toml/-/toml-2.3.6.tgz" - "version" "2.3.6" - -"trough@^2.0.0": - "integrity" "sha512-AqTiAOLcj85xS7vQ8QkAV41hPDIJ71XJB4RCUrzo/1GM2CQwhkJGaf9Hgr7BOugMRpgGUrqRg/DrBDl4H40+8g==" - "resolved" "https://registry.npmjs.org/trough/-/trough-2.1.0.tgz" - "version" "2.1.0" - -"ts-easing@^0.2.0": - "integrity" "sha512-Z86EW+fFFh/IFB1fqQ3/+7Zpf9t2ebOAxNI/V6Wo7r5gqiqtxmgTlQ1qbqQcjLKYeSHPTsEmvlJUDg/EuL0uHQ==" - "resolved" "https://registry.npmjs.org/ts-easing/-/ts-easing-0.2.0.tgz" - "version" "0.2.0" - -"ts-node@^10.7.0", "ts-node@>=9.0.0": - "integrity" "sha512-TbIGS4xgJoX2i3do417KSaep1uRAW/Lu+WAL2doDHC0D6ummjirVOXU5/7aiZotbQ5p1Zp9tP7U6cYhA0O7M8A==" - "resolved" "https://registry.npmjs.org/ts-node/-/ts-node-10.7.0.tgz" - "version" "10.7.0" - dependencies: - "@cspotcode/source-map-support" "0.7.0" - "@tsconfig/node10" "^1.0.7" - "@tsconfig/node12" "^1.0.7" - "@tsconfig/node14" "^1.0.0" - "@tsconfig/node16" "^1.0.2" - "acorn" "^8.4.1" - "acorn-walk" "^8.1.1" - "arg" "^4.1.0" - "create-require" "^1.1.0" - "diff" "^4.0.1" - "make-error" "^1.1.1" - "v8-compile-cache-lib" "^3.0.0" - "yn" "3.1.1" - -"tslib@*", "tslib@^2.1.0", "tslib@^2.3.1": - "integrity" "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==" - "resolved" "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz" - "version" "2.3.1" - -"tslib@^1.9.0": - "integrity" "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" - "resolved" "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz" - "version" "1.14.1" - -"type-fest@^0.21.3": - "integrity" "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==" - "resolved" "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz" - "version" "0.21.3" - -"type-fest@^0.8.1": - "integrity" "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==" - "resolved" "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz" - "version" "0.8.1" - -"typedarray-to-buffer@^3.1.5": - "integrity" "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==" - "resolved" "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz" - "version" "3.1.5" - dependencies: - "is-typedarray" "^1.0.0" - -"typedarray@^0.0.6": - "integrity" "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==" - "resolved" "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz" - "version" "0.0.6" - -"typescript@^4.1.5", "typescript@>=2.7": - "integrity" "sha512-HM/hFigTBHZhLXshn9sN37H085+hQGeJHJ/X7LpBWLID/fbc2acUMfU+lGD98X81sKP+pFa9f0DZmCwB9GnbAg==" - "resolved" "https://registry.npmjs.org/typescript/-/typescript-4.6.2.tgz" - "version" "4.6.2" - -"unified@^10.0.0": - "integrity" "sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q==" - "resolved" "https://registry.npmjs.org/unified/-/unified-10.1.2.tgz" - "version" "10.1.2" - dependencies: - "@types/unist" "^2.0.0" - "bail" "^2.0.0" - "extend" "^3.0.0" - "is-buffer" "^2.0.0" - "is-plain-obj" "^4.0.0" - "trough" "^2.0.0" - "vfile" "^5.0.0" - -"unique-random-array@1.0.0": - "integrity" "sha1-QrNyHFeTiNi2Z8k8Lb3j1dgakTY= sha512-vtj2yltjcHPa69nFjNJ3xnhsEwE8pMyjqUQDw2myz/iSezqf4YCAcygwFQEsOgMid5VNW/dCPbnb2BcmaDCCKg==" - "resolved" "https://registry.npmjs.org/unique-random-array/-/unique-random-array-1.0.0.tgz" - "version" "1.0.0" - dependencies: - "unique-random" "^1.0.0" - -"unique-random@^1.0.0": - "integrity" "sha1-zj4iTIJCzTOg53sNcYDXfmti0MQ= sha512-K1sUkPf9EXCZFNIlMCoX4icAqcvkR4FMPH4Z61HbyiWhQl1ZGo0zYeV2bJmocK8Cp6tnKYrCnpkeKGebXZoRTQ==" - "resolved" "https://registry.npmjs.org/unique-random/-/unique-random-1.0.0.tgz" - "version" "1.0.0" - -"unique-string@^1.0.0": - "integrity" "sha1-nhBXzKhRq7kzmPizOuGHuZyuwRo= sha512-ODgiYu03y5g76A1I9Gt0/chLCzQjvzDy7DsZGsLOE/1MrF6wriEskSncj1+/C58Xk/kPZDppSctDybCwOSaGAg==" - "resolved" "https://registry.npmjs.org/unique-string/-/unique-string-1.0.0.tgz" - "version" "1.0.0" - dependencies: - "crypto-random-string" "^1.0.0" - -"unique-string@^2.0.0": - "integrity" "sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==" - "resolved" "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz" - "version" "2.0.0" - dependencies: - "crypto-random-string" "^2.0.0" - -"unist-builder@^3.0.0": - "integrity" "sha512-GFxmfEAa0vi9i5sd0R2kcrI9ks0r82NasRq5QHh2ysGngrc6GiqD5CDf1FjPenY4vApmFASBIIlk/jj5J5YbmQ==" - "resolved" "https://registry.npmjs.org/unist-builder/-/unist-builder-3.0.0.tgz" - "version" "3.0.0" - dependencies: - "@types/unist" "^2.0.0" - -"unist-util-generated@^2.0.0": - "integrity" "sha512-TiWE6DVtVe7Ye2QxOVW9kqybs6cZexNwTwSMVgkfjEReqy/xwGpAXb99OxktoWwmL+Z+Epb0Dn8/GNDYP1wnUw==" - "resolved" "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-2.0.0.tgz" - "version" "2.0.0" - -"unist-util-is@^4.0.0": - "integrity" "sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==" - "resolved" "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.1.0.tgz" - "version" "4.1.0" - -"unist-util-is@^5.0.0": - "integrity" "sha512-F5CZ68eYzuSvJjGhCLPL3cYx45IxkqXSetCcRgUXtbcm50X2L9oOWQlfUfDdAf+6Pd27YDblBfdtmsThXmwpbQ==" - "resolved" "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.1.1.tgz" - "version" "5.1.1" - -"unist-util-position-from-estree@^1.0.0", "unist-util-position-from-estree@^1.1.0": - "integrity" "sha512-xtoY50b5+7IH8tFbkw64gisG9tMSpxDjhX9TmaJJae/XuxQ9R/Kc8Nv1eOsf43Gt4KV/LkriMy9mptDr7XLcaw==" - "resolved" "https://registry.npmjs.org/unist-util-position-from-estree/-/unist-util-position-from-estree-1.1.1.tgz" - "version" "1.1.1" - dependencies: - "@types/unist" "^2.0.0" - -"unist-util-position@^4.0.0": - "integrity" "sha512-p/5EMGIa1qwbXjA+QgcBXaPWjSnZfQ2Sc3yBEEfgPwsEmJd8Qh+DSk3LGnmOM4S1bY2C0AjmMnB8RuEYxpPwXQ==" - "resolved" "https://registry.npmjs.org/unist-util-position/-/unist-util-position-4.0.3.tgz" - "version" "4.0.3" - dependencies: - "@types/unist" "^2.0.0" - -"unist-util-remove-position@^4.0.0": - "integrity" "sha512-0yDkppiIhDlPrfHELgB+NLQD5mfjup3a8UYclHruTJWmY74je8g+CIFr79x5f6AkmzSwlvKLbs63hC0meOMowQ==" - "resolved" "https://registry.npmjs.org/unist-util-remove-position/-/unist-util-remove-position-4.0.1.tgz" - "version" "4.0.1" - dependencies: - "@types/unist" "^2.0.0" - "unist-util-visit" "^4.0.0" - -"unist-util-stringify-position@^3.0.0": - "integrity" "sha512-7A6eiDCs9UtjcwZOcCpM4aPII3bAAGv13E96IkawkOAW0OhH+yRxtY0lzo8KiHpzEMfH7Q+FizUmwp8Iqy5EWg==" - "resolved" "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.2.tgz" - "version" "3.0.2" - dependencies: - "@types/unist" "^2.0.0" - -"unist-util-visit-parents@^3.0.0": - "integrity" "sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg==" - "resolved" "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-3.1.1.tgz" - "version" "3.1.1" - dependencies: - "@types/unist" "^2.0.0" - "unist-util-is" "^4.0.0" - -"unist-util-visit-parents@^4.0.0": - "integrity" "sha512-1xAFJXAKpnnJl8G7K5KgU7FY55y3GcLIXqkzUj5QF/QVP7biUm0K0O2oqVkYsdjzJKifYeWn9+o6piAK2hGSHw==" - "resolved" "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-4.1.1.tgz" - "version" "4.1.1" - dependencies: - "@types/unist" "^2.0.0" - "unist-util-is" "^5.0.0" - -"unist-util-visit-parents@^5.0.0": - "integrity" "sha512-y+QVLcY5eR/YVpqDsLf/xh9R3Q2Y4HxkZTp7ViLDU6WtJCEcPmRzW1gpdWDCDIqIlhuPDXOgttqPlykrHYDekg==" - "resolved" "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.0.tgz" - "version" "5.1.0" - dependencies: - "@types/unist" "^2.0.0" - "unist-util-is" "^5.0.0" - -"unist-util-visit@^2.0.3": - "integrity" "sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==" - "resolved" "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.3.tgz" - "version" "2.0.3" - dependencies: - "@types/unist" "^2.0.0" - "unist-util-is" "^4.0.0" - "unist-util-visit-parents" "^3.0.0" - -"unist-util-visit@^3.0.0": - "integrity" "sha512-Szoh+R/Ll68QWAyQyZZpQzZQm2UPbxibDvaY8Xc9SUtYgPsDzx5AWSk++UUt2hJuow8mvwR+rG+LQLw+KsuAKA==" - "resolved" "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-3.1.0.tgz" - "version" "3.1.0" - dependencies: - "@types/unist" "^2.0.0" - "unist-util-is" "^5.0.0" - "unist-util-visit-parents" "^4.0.0" - -"unist-util-visit@^4.0.0": - "integrity" "sha512-n7lyhFKJfVZ9MnKtqbsqkQEk5P1KShj0+//V7mAcoI6bpbUjh3C/OG8HVD+pBihfh6Ovl01m8dkcv9HNqYajmQ==" - "resolved" "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.0.tgz" - "version" "4.1.0" - dependencies: - "@types/unist" "^2.0.0" - "unist-util-is" "^5.0.0" - "unist-util-visit-parents" "^5.0.0" - -"universalify@^0.1.0": - "integrity" "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==" - "resolved" "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz" - "version" "0.1.2" - -"unzip-response@^2.0.1": - "integrity" "sha1-0vD3N9FrBhXnKmk17QQhRXLVb5c= sha512-N0XH6lqDtFH84JxptQoZYmloF4nzrQqqrAymNj+/gW60AO2AZgOcf4O/nUXJcYfyQkqvMo9lSupBZmmgvuVXlw==" - "resolved" "https://registry.npmjs.org/unzip-response/-/unzip-response-2.0.1.tgz" - "version" "2.0.1" - -"update-notifier@^2.3.0": - "integrity" "sha512-gwMdhgJHGuj/+wHJJs9e6PcCszpxR1b236igrOkUofGhqJuG+amlIKwApH1IW1WWl7ovZxsX49lMBWLxSdm5Dw==" - "resolved" "https://registry.npmjs.org/update-notifier/-/update-notifier-2.5.0.tgz" - "version" "2.5.0" - dependencies: - "boxen" "^1.2.1" - "chalk" "^2.0.1" - "configstore" "^3.0.0" - "import-lazy" "^2.1.0" - "is-ci" "^1.0.10" - "is-installed-globally" "^0.1.0" - "is-npm" "^1.0.0" - "latest-version" "^3.0.0" - "semver-diff" "^2.0.0" - "xdg-basedir" "^3.0.0" - -"update-notifier@^4.1.0": - "integrity" "sha512-Yld6Z0RyCYGB6ckIjffGOSOmHXj1gMeE7aROz4MG+XMkmixBX4jUngrGXNYz7wPKBmtoD4MnBa2Anu7RSKht/A==" - "resolved" "https://registry.npmjs.org/update-notifier/-/update-notifier-4.1.3.tgz" - "version" "4.1.3" - dependencies: - "boxen" "^4.2.0" - "chalk" "^3.0.0" - "configstore" "^5.0.1" - "has-yarn" "^2.1.0" - "import-lazy" "^2.1.0" - "is-ci" "^2.0.0" - "is-installed-globally" "^0.3.1" - "is-npm" "^4.0.0" - "is-yarn-global" "^0.3.0" - "latest-version" "^5.0.0" - "pupa" "^2.0.1" - "semver-diff" "^3.1.1" - "xdg-basedir" "^4.0.0" - -"url-join@^4.0.1": - "integrity" "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==" - "resolved" "https://registry.npmjs.org/url-join/-/url-join-4.0.1.tgz" - "version" "4.0.1" - -"url-parse-lax@^1.0.0": - "integrity" "sha1-evjzA2Rem9eaJy56FKxovAYJ2nM= sha512-BVA4lR5PIviy2PMseNd2jbFQ+jwSwQGdJejf5ctd1rEXt0Ypd7yanUK9+lYechVlN5VaTJGsu2U/3MDDu6KgBA==" - "resolved" "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-1.0.0.tgz" - "version" "1.0.0" - dependencies: - "prepend-http" "^1.0.1" - -"url-parse-lax@^3.0.0": - "integrity" "sha1-FrXK/Afb42dsGxmZF3gj1lA6yww= sha512-NjFKA0DidqPa5ciFcSrXnAltTtzz84ogy+NebPvfEgAck0+TNg4UJ4IN+fB7zRZfbgUf0syOo9MDxFkDSMuFaQ==" - "resolved" "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz" - "version" "3.0.0" - dependencies: - "prepend-http" "^2.0.0" - -"use-subscription@1.5.1": - "integrity" "sha512-Xv2a1P/yReAjAbhylMfFplFKj9GssgTwN7RlcTxBujFQcloStWNDQdc4g4NRWH9xS4i/FDk04vQBptAXoF3VcA==" - "resolved" "https://registry.npmjs.org/use-subscription/-/use-subscription-1.5.1.tgz" - "version" "1.5.1" - dependencies: - "object-assign" "^4.1.1" - -"user-home@^2.0.0": - "integrity" "sha1-nHC/2Babwdy/SGBODwS4tJzenp8= sha512-KMWqdlOcjCYdtIJpicDSFBQ8nFwS2i9sslAd6f4+CBGcU4gist2REnr2fxj2YocvJFxSF3ZOHLYLVZnUxv4BZQ==" - "resolved" "https://registry.npmjs.org/user-home/-/user-home-2.0.0.tgz" - "version" "2.0.0" - dependencies: - "os-homedir" "^1.0.0" - -"user-meta@^1.0.0": - "integrity" "sha512-Q/opMgFhVbBkdlTs44UKzV7L5Uj2zrJ4MVPXTTzJmrU1bHb2cX6wJzBIqEf1gROTzZIH8u39WmHsa5EvfnMPrw==" - "resolved" "https://registry.npmjs.org/user-meta/-/user-meta-1.0.0.tgz" - "version" "1.0.0" - dependencies: - "rc" "^1.2.1" - -"util-deprecate@^1.0.2", "util-deprecate@~1.0.1": - "integrity" "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" - "resolved" "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" - "version" "1.0.2" - -"uuid@^7.0.3": - "integrity" "sha512-DPSke0pXhTZgoF/d+WSt2QaKMCFSfx7QegxEWT+JOuHF5aWrKEn0G+ztjuJg/gG8/ItK+rbPCD/yNv8yyih6Cg==" - "resolved" "https://registry.npmjs.org/uuid/-/uuid-7.0.3.tgz" - "version" "7.0.3" - -"uvu@^0.5.0": - "integrity" "sha512-brFwqA3FXzilmtnIyJ+CxdkInkY/i4ErvP7uV0DnUVxQcQ55reuHphorpF+tZoVHK2MniZ/VJzI7zJQoc9T9Yw==" - "resolved" "https://registry.npmjs.org/uvu/-/uvu-0.5.3.tgz" - "version" "0.5.3" - dependencies: - "dequal" "^2.0.0" - "diff" "^5.0.0" - "kleur" "^4.0.3" - "sade" "^1.7.3" - -"v8-compile-cache-lib@^3.0.0": - "integrity" "sha512-mpSYqfsFvASnSn5qMiwrr4VKfumbPyONLCOPmsR3A6pTY/r0+tSaVbgPWSAIuzbk3lCTa+FForeTiO+wBQGkjA==" - "resolved" "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.0.tgz" - "version" "3.0.0" - -"validate-npm-package-name@^3.0.0": - "integrity" "sha1-X6kS2B630MdK/BQN5zF/DKffQ34= sha512-M6w37eVCMMouJ9V/sdPGnC5H4uDr73/+xdq0FBLO3TFFX1+7wiUY6Es328NN+y43tmY+doUdN9g9J21vqB7iLw==" - "resolved" "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-3.0.0.tgz" - "version" "3.0.0" - dependencies: - "builtins" "^1.0.3" - -"vfile-matter@^3.0.1": - "integrity" "sha512-CAAIDwnh6ZdtrqAuxdElUqQRQDQgbbIrYtDYI8gCjXS1qQ+1XdLoK8FIZWxJwn0/I+BkSSZpar3SOgjemQz4fg==" - "resolved" "https://registry.npmjs.org/vfile-matter/-/vfile-matter-3.0.1.tgz" - "version" "3.0.1" - dependencies: - "@types/js-yaml" "^4.0.0" - "is-buffer" "^2.0.0" - "js-yaml" "^4.0.0" - -"vfile-message@^3.0.0": - "integrity" "sha512-QjSNP6Yxzyycd4SVOtmKKyTsSvClqBPJcd00Z0zuPj3hOIjg0rUPG6DbFGPvUKRgYyaIWLPKpuEclcuvb3H8qA==" - "resolved" "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.2.tgz" - "version" "3.1.2" - dependencies: - "@types/unist" "^2.0.0" - "unist-util-stringify-position" "^3.0.0" - -"vfile@^5.0.0", "vfile@^5.3.0": - "integrity" "sha512-w0PLIugRY3Crkgw89TeMvHCzqCs/zpreR31hl4D92y6SOE07+bfJe+dK5Q2akwS+i/c801kzjoOr9gMcTe6IAA==" - "resolved" "https://registry.npmjs.org/vfile/-/vfile-5.3.2.tgz" - "version" "5.3.2" - dependencies: - "@types/unist" "^2.0.0" - "is-buffer" "^2.0.0" - "unist-util-stringify-position" "^3.0.0" - "vfile-message" "^3.0.0" - -"web-streams-polyfill@^3.0.3": - "integrity" "sha512-EqPmREeOzttaLRm5HS7io98goBgZ7IVz79aDvqjD0kYXLtFZTc0T/U6wHTPKyIjb+MdN7DFIIX6hgdBEpWmfPA==" - "resolved" "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.2.0.tgz" - "version" "3.2.0" - -"webpack-merge@^4.2.2": - "integrity" "sha512-TUE1UGoTX2Cd42j3krGYqObZbOD+xF7u28WB7tfUordytSjbWTIjK/8V0amkBfTYN4/pB/GIDlJZZ657BGG19g==" - "resolved" "https://registry.npmjs.org/webpack-merge/-/webpack-merge-4.2.2.tgz" - "version" "4.2.2" - dependencies: - "lodash" "^4.17.15" - -"which-module@^2.0.0": - "integrity" "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q==" - "resolved" "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz" - "version" "2.0.0" - -"which@^1.2.9": - "integrity" "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==" - "resolved" "https://registry.npmjs.org/which/-/which-1.3.1.tgz" - "version" "1.3.1" - dependencies: - "isexe" "^2.0.0" - -"which@^1.3.0": - "integrity" "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==" - "resolved" "https://registry.npmjs.org/which/-/which-1.3.1.tgz" - "version" "1.3.1" - dependencies: - "isexe" "^2.0.0" - -"which@^2.0.2": - "integrity" "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==" - "resolved" "https://registry.npmjs.org/which/-/which-2.0.2.tgz" - "version" "2.0.2" - dependencies: - "isexe" "^2.0.0" - -"widest-line@^2.0.0": - "integrity" "sha512-Ba5m9/Fa4Xt9eb2ELXt77JxVDV8w7qQrH0zS/TWSJdLyAwQjWoOzpzj5lwVftDz6n/EOu3tNACS84v509qwnJA==" - "resolved" "https://registry.npmjs.org/widest-line/-/widest-line-2.0.1.tgz" - "version" "2.0.1" - dependencies: - "string-width" "^2.1.1" - -"widest-line@^3.1.0": - "integrity" "sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==" - "resolved" "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz" - "version" "3.1.0" - dependencies: - "string-width" "^4.0.0" - -"wrap-ansi@^5.1.0": - "integrity" "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==" - "resolved" "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz" - "version" "5.1.0" - dependencies: - "ansi-styles" "^3.2.0" - "string-width" "^3.0.0" - "strip-ansi" "^5.0.0" - -"wrappy@1": - "integrity" "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" - "resolved" "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" - "version" "1.0.2" - -"write-file-atomic@^2.0.0": - "integrity" "sha512-GaETH5wwsX+GcnzhPgKcKjJ6M2Cq3/iZp1WyY/X1CSqrW+jVNM9Y7D8EC2sM4ZG/V8wZlSniJnCKWPmBYAucRQ==" - "resolved" "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.3.tgz" - "version" "2.4.3" - dependencies: - "graceful-fs" "^4.1.11" - "imurmurhash" "^0.1.4" - "signal-exit" "^3.0.2" - -"write-file-atomic@^3.0.0": - "integrity" "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==" - "resolved" "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz" - "version" "3.0.3" - dependencies: - "imurmurhash" "^0.1.4" - "is-typedarray" "^1.0.0" - "signal-exit" "^3.0.2" - "typedarray-to-buffer" "^3.1.5" - -"xdg-basedir@^3.0.0": - "integrity" "sha1-SWsswQnsqNus/i3HK2A8F8WHCtQ= sha512-1Dly4xqlulvPD3fZUQJLY+FUIeqN3N2MM3uqe4rCJftAvOjFa3jFGfctOgluGx4ahPbUCsZkmJILiP0Vi4T6lQ==" - "resolved" "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-3.0.0.tgz" - "version" "3.0.0" - -"xdg-basedir@^4.0.0": - "integrity" "sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==" - "resolved" "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-4.0.0.tgz" - "version" "4.0.0" - -"xtend@^4.0.0", "xtend@^4.0.2", "xtend@~4.0.1": - "integrity" "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" - "resolved" "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz" - "version" "4.0.2" - -"y18n@^4.0.0": - "integrity" "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==" - "resolved" "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz" - "version" "4.0.3" - -"yallist@^2.1.2": - "integrity" "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI= sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A==" - "resolved" "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz" - "version" "2.1.2" - -"yallist@^4.0.0": - "integrity" "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - "resolved" "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz" - "version" "4.0.0" - -"yaml@^1.10.0": - "integrity" "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==" - "resolved" "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz" - "version" "1.10.2" - -"yaml@^1.10.2": - "integrity" "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==" - "resolved" "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz" - "version" "1.10.2" - -"yaml@^2.0.0-1": - "integrity" "sha512-JbfdlHKGP2Ik9IHylzWlGd4pPK++EU46/IxMykphS2ZKw7a7h+dHNmcXObLgpRDriBY+rpWslldikckX8oruWQ==" - "resolved" "https://registry.npmjs.org/yaml/-/yaml-2.0.0.tgz" - "version" "2.0.0" - -"yargs-parser@^15.0.1": - "integrity" "sha512-/MVEVjTXy/cGAjdtQf8dW3V9b97bPN7rNn8ETj6BmAQL7ibC7O1Q9SPJbGjgh3SlwoBNXMzj/ZGIj8mBgl12YA==" - "resolved" "https://registry.npmjs.org/yargs-parser/-/yargs-parser-15.0.3.tgz" - "version" "15.0.3" - dependencies: - "camelcase" "^5.0.0" - "decamelize" "^1.2.0" - -"yargs@^14.2.3": - "integrity" "sha512-ZbotRWhF+lkjijC/VhmOT9wSgyBQ7+zr13+YLkhfsSiTriYsMzkTUFP18pFhWwBeMa5gUc1MzbhrO6/VB7c9Xg==" - "resolved" "https://registry.npmjs.org/yargs/-/yargs-14.2.3.tgz" - "version" "14.2.3" - dependencies: - "cliui" "^5.0.0" - "decamelize" "^1.2.0" - "find-up" "^3.0.0" - "get-caller-file" "^2.0.1" - "require-directory" "^2.1.1" - "require-main-filename" "^2.0.0" - "set-blocking" "^2.0.0" - "string-width" "^3.0.0" - "which-module" "^2.0.0" - "y18n" "^4.0.0" - "yargs-parser" "^15.0.1" - -"yn@3.1.1": - "integrity" "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==" - "resolved" "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz" - "version" "3.1.1" - -"zwitch@^2.0.0": - "integrity" "sha512-JZxotl7SxAJH0j7dN4pxsTV6ZLXoLdGME+PsjkL/DaBrVryK9kTGq06GfKrwcSOqypP+fdXGoCHE36b99fWVoA==" - "resolved" "https://registry.npmjs.org/zwitch/-/zwitch-2.0.2.tgz" - "version" "2.0.2"