From da3fe1bed414f94d8de7608d5d9d1640975a9141 Mon Sep 17 00:00:00 2001 From: Aaron Leopold <36278431+aaronleopold@users.noreply.github.com> Date: Sun, 30 Oct 2022 10:05:17 -0700 Subject: [PATCH] misc dx + bring in ci (#80) * Update GHA CI build (#72) * Update GHA CI build * Update job for current directory structure * reorg: prelude * dx, factory pattern, ci * bump prisma * wip: dao * oop * oop? Co-authored-by: Matt Boulanger --- .github/actions/pnpm-setup/action.yaml | 26 ++ .github/actions/prisma-setup/action.yaml | 18 ++ .github/actions/rust-setup/action.yaml | 22 ++ .github/workflows/ci.yaml | 88 +++++++ .gitignore | 3 +- Cargo.lock | 66 ++--- Cargo.toml | 8 +- apps/desktop/dist/.placeholder | 0 apps/server/src/config/state.rs | 2 +- apps/server/src/errors.rs | 2 +- apps/server/src/middleware/auth.rs | 2 +- apps/server/src/routers/api/auth.rs | 3 +- apps/server/src/routers/api/epub.rs | 2 +- apps/server/src/routers/api/filesystem.rs | 2 +- apps/server/src/routers/api/library.rs | 24 +- apps/server/src/routers/api/log.rs | 2 +- apps/server/src/routers/api/media.rs | 40 ++- apps/server/src/routers/api/mod.rs | 4 +- apps/server/src/routers/api/reading_list.rs | 155 ++++++----- apps/server/src/routers/api/series.rs | 32 ++- apps/server/src/routers/api/tag.rs | 2 +- apps/server/src/routers/api/user.rs | 18 +- apps/server/src/routers/opds.rs | 4 +- apps/server/src/routers/ws.rs | 2 +- apps/server/src/utils/auth.rs | 2 +- apps/server/src/utils/http.rs | 2 +- common/interface/package.json | 1 + core/integration-tests/tests/epub.rs | 4 +- core/integration-tests/tests/rar.rs | 10 +- core/integration-tests/tests/scanner.rs | 4 +- core/integration-tests/tests/utils.rs | 8 +- core/integration-tests/tests/zip.rs | 8 +- core/prisma/Cargo.toml | 8 +- core/src/config/env.rs | 2 +- core/src/config/mod.rs | 3 - core/src/db/dao/library_dao.rs | 91 +++++++ core/src/db/dao/library_options_dao.rs | 99 ++++++++ core/src/db/dao/media_dao.rs | 213 ++++++++++++++++ core/src/db/dao/mod.rs | 107 ++++++++ core/src/db/dao/read_progress_dao.rs | 132 ++++++++++ core/src/db/dao/reading_list_dao.rs | 109 ++++++++ core/src/db/dao/series_dao.rs | 88 +++++++ core/src/db/migration.rs | 2 +- core/src/db/mod.rs | 4 + core/src/{types => db}/models/epub.rs | 2 +- core/src/{types => db}/models/library.rs | 0 core/src/{types => db}/models/log.rs | 24 +- core/src/{types => db}/models/media.rs | 90 ++----- core/src/{types => db}/models/mod.rs | 9 +- .../src/{types => db}/models/read_progress.rs | 0 core/src/db/models/reading_list.rs | 25 ++ core/src/{types => db}/models/series.rs | 2 +- core/src/{types => db}/models/tag.rs | 0 core/src/{types => db}/models/user.rs | 0 core/src/db/utils.rs | 2 +- core/src/event/event_manager.rs | 2 +- core/src/event/mod.rs | 2 +- core/src/fs/image.rs | 6 +- core/src/fs/media_file/epub.rs | 10 +- core/src/fs/media_file/mod.rs | 45 ++-- core/src/fs/media_file/pdf.rs | 4 +- core/src/fs/media_file/rar.rs | 33 +-- core/src/fs/media_file/zip.rs | 22 +- core/src/fs/scanner/library_scanner.rs | 25 +- core/src/fs/scanner/mod.rs | 2 +- core/src/fs/scanner/utils.rs | 240 +++++++++++------- core/src/job/jobs.rs | 5 +- core/src/job/mod.rs | 3 +- core/src/job/pool.rs | 3 +- core/src/job/runner.rs | 3 +- core/src/lib.rs | 9 +- core/src/opds/author.rs | 2 +- core/src/opds/entry.rs | 2 +- core/src/opds/feed.rs | 2 +- core/src/opds/link.rs | 2 +- core/src/opds/opensearch.rs | 2 +- core/src/opds/util.rs | 2 +- core/src/{config => prelude}/context.rs | 13 +- core/src/{types => prelude}/enums.rs | 6 + core/src/{types => prelude}/errors.rs | 0 .../models => prelude/fs}/list_directory.rs | 0 core/src/prelude/fs/media_file.rs | 45 ++++ core/src/prelude/fs/mod.rs | 5 + core/src/{types => prelude}/mod.rs | 28 +- core/src/{types => prelude}/server/http.rs | 0 core/src/{types => prelude}/server/inputs.rs | 11 +- core/src/{types => prelude}/server/mod.rs | 0 .../src/{types => prelude}/server/pageable.rs | 18 +- core/src/{types => prelude}/server/query.rs | 55 ++-- core/src/types/models/readinglist.rs | 29 --- package.json | 1 + scripts/prisma-sed.sh | 4 + scripts/system-setup.sh | 116 +++++++++ 93 files changed, 1767 insertions(+), 568 deletions(-) create mode 100644 .github/actions/pnpm-setup/action.yaml create mode 100644 .github/actions/prisma-setup/action.yaml create mode 100644 .github/actions/rust-setup/action.yaml create mode 100644 .github/workflows/ci.yaml create mode 100644 apps/desktop/dist/.placeholder create mode 100644 core/src/db/dao/library_dao.rs create mode 100644 core/src/db/dao/library_options_dao.rs create mode 100644 core/src/db/dao/media_dao.rs create mode 100644 core/src/db/dao/mod.rs create mode 100644 core/src/db/dao/read_progress_dao.rs create mode 100644 core/src/db/dao/reading_list_dao.rs create mode 100644 core/src/db/dao/series_dao.rs rename core/src/{types => db}/models/epub.rs (97%) rename core/src/{types => db}/models/library.rs (100%) rename core/src/{types => db}/models/log.rs (79%) rename core/src/{types => db}/models/media.rs (60%) rename core/src/{types => db}/models/mod.rs (61%) rename core/src/{types => db}/models/read_progress.rs (100%) create mode 100644 core/src/db/models/reading_list.rs rename core/src/{types => db}/models/series.rs (98%) rename core/src/{types => db}/models/tag.rs (100%) rename core/src/{types => db}/models/user.rs (100%) rename core/src/{config => prelude}/context.rs (95%) rename core/src/{types => prelude}/enums.rs (96%) rename core/src/{types => prelude}/errors.rs (100%) rename core/src/{types/models => prelude/fs}/list_directory.rs (100%) create mode 100644 core/src/prelude/fs/media_file.rs create mode 100644 core/src/prelude/fs/mod.rs rename core/src/{types => prelude}/mod.rs (92%) rename core/src/{types => prelude}/server/http.rs (100%) rename core/src/{types => prelude}/server/inputs.rs (90%) rename core/src/{types => prelude}/server/mod.rs (100%) rename core/src/{types => prelude}/server/pageable.rs (94%) rename core/src/{types => prelude}/server/query.rs (75%) delete mode 100644 core/src/types/models/readinglist.rs create mode 100755 scripts/prisma-sed.sh create mode 100755 scripts/system-setup.sh diff --git a/.github/actions/pnpm-setup/action.yaml b/.github/actions/pnpm-setup/action.yaml new file mode 100644 index 000000000..2bd2c870d --- /dev/null +++ b/.github/actions/pnpm-setup/action.yaml @@ -0,0 +1,26 @@ +name: PNPM Setup +description: Setup PNPM and cache PNPM dependencies +runs: + using: 'composite' + steps: + - uses: pnpm/action-setup@v2.0.1 + name: Install pnpm + id: pnpm-install + with: + version: 7 + run_install: false + + - name: Get pnpm store directory + id: pnpm-cache + shell: bash + run: | + echo "::set-output name=pnpm_cache_dir::$(pnpm store path)" + - uses: actions/cache@v3 + name: Setup pnpm cache + with: + path: ${{ steps.pnpm-cache.outputs.pnpm_cache_dir }} + key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} + + - name: Install dependencies + shell: bash + run: pnpm install diff --git a/.github/actions/prisma-setup/action.yaml b/.github/actions/prisma-setup/action.yaml new file mode 100644 index 000000000..b0b6d680f --- /dev/null +++ b/.github/actions/prisma-setup/action.yaml @@ -0,0 +1,18 @@ +name: Prisma Setup +description: Generate/cache Prisma client +runs: + using: 'composite' + steps: + - name: Cache Prisma client + id: cache-prisma + uses: actions/cache@v3 + with: + path: core/src/prisma.rs + # FIXME: maybe it is just `act`, but this keeps failing: An internal error has occurred in cache backend. + key: ${{ runner.os }}-prisma-${{ hashFiles('**/schema.prisma') }} + + - name: Generate Prisma client + working-directory: core + if: steps.cache-prisma.outputs.cache-hit != 'true' + shell: bash + run: cargo run -p prisma --release -- generate diff --git a/.github/actions/rust-setup/action.yaml b/.github/actions/rust-setup/action.yaml new file mode 100644 index 000000000..e581945c0 --- /dev/null +++ b/.github/actions/rust-setup/action.yaml @@ -0,0 +1,22 @@ +name: Rust Setup +description: Generate/cache Rust dependencies and prisma client +runs: + using: 'composite' + steps: + - name: Cache cargo registry + uses: actions/cache@v3 + with: + path: ~/.cargo/registry + key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache cargo index + uses: actions/cache@v3 + with: + path: ~/.cargo/git + key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }} + + - name: Cache cargo build + uses: actions/cache@v3 + with: + path: target + key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }} diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 000000000..696fdf4f2 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,88 @@ +name: Stump-CI + +on: + pull_request: + push: + branches: + - main + +jobs: + check-rust: + name: Rust checks + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Install rust + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + profile: minimal + override: true + components: rustfmt, clippy + + - name: System setup + run: CHECK_NODE=0 CHECK_CARGO=0 DEV_SETUP=0 ./scripts/system-setup.sh + + - name: Setup rust + uses: ./.github/actions/rust-setup + + - name: Setup prisma + uses: ./.github/actions/prisma-setup + + - name: Run cargo checks + run: | + cargo fmt --all -- --check + cargo clippy -- -D warnings + # - name: Run tests + # run: | + # cargo integration-tests + + check-typescript: + name: TypeScript checks + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Install Node.js + uses: actions/setup-node@v3 + with: + node-version: 14 + + - name: Setup pnpm and typescript + uses: ./.github/actions/pnpm-setup + + - name: Run type checks + run: pnpm tsc:checks + + # release: + # name: Release (${{ matrix.platform }}) + # runs-on: ${{ matrix.platform }} + # # Release only runs on push to main. TODO: can I make this trigger on tag? + # if: github.event_name != 'pull_request' + # strategy: + # fail-fast: true + # matrix: + # platform: [ubuntu-latest, macos-latest, windows-latest] + # steps: + # - name: Checkout repository + # uses: actions/checkout@v3 + + # - name: Install Rust + # uses: actions-rs/toolchain@v1 + # with: + # toolchain: stable + # profile: minimal + # override: true + # # TODO: clippy?? + # components: rustfmt, rust-src + + # # TODO: figure out caching for rust deps + + # - name: Generate Prisma client + # uses: ./.github/actions/generate-prisma-client + + # TODO: pnpm setup + # TODO: docker builds -> maybe this helps? https://github.com/devture/matrix-corporal/blob/master/.gitlab-ci.yml diff --git a/.gitignore b/.gitignore index 71ed0038e..1d45ed6a7 100644 --- a/.gitignore +++ b/.gitignore @@ -8,7 +8,8 @@ node_modules $houdini *.log .eslintcache -dist +**/dist/* +!**/dist/.placeholder build # rust diff --git a/Cargo.lock b/Cargo.lock index eb1c23ea2..4d1a3c731 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1118,7 +1118,7 @@ checksum = "3ee2393c4a91429dffb4bedf19f4d6abf27d8a732c8ce4980305d782e5426d57" [[package]] name = "datamodel" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "bigdecimal", "chrono", @@ -1141,7 +1141,7 @@ dependencies = [ [[package]] name = "datamodel-connector" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "diagnostics", "enumflags2", @@ -1188,7 +1188,7 @@ dependencies = [ [[package]] name = "diagnostics" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "colored", "pest", @@ -1285,7 +1285,7 @@ checksum = "bd0c93bb4b0c6d9b77f4435b0ae98c24d17f1c45b2ff844c6151a07256ca923b" [[package]] name = "dml" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "chrono", "cuid 0.1.0", @@ -1301,7 +1301,7 @@ dependencies = [ [[package]] name = "dmmf" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "bigdecimal", "datamodel", @@ -2475,7 +2475,7 @@ dependencies = [ [[package]] name = "json-rpc-api-build" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "backtrace", "heck 0.3.3", @@ -2837,7 +2837,7 @@ dependencies = [ [[package]] name = "migration-connector" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "chrono", "datamodel", @@ -2851,7 +2851,7 @@ dependencies = [ [[package]] name = "migration-core" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "async-trait", "chrono", @@ -2962,7 +2962,7 @@ dependencies = [ [[package]] name = "native-types" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "serde", "serde_json", @@ -3474,7 +3474,7 @@ dependencies = [ [[package]] name = "parser-database" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "diagnostics", "either", @@ -3750,8 +3750,8 @@ dependencies = [ [[package]] name = "prisma-client-rust" -version = "0.6.1" -source = "git+https://github.com/Brendonovich/prisma-client-rust?rev=79ab6bd700199b92103711f01d4df42e4cef62a6#79ab6bd700199b92103711f01d4df42e4cef62a6" +version = "0.6.3" +source = "git+https://github.com/Brendonovich/prisma-client-rust?tag=0.6.3#e12e6932be7d224410784b52143384cc9235da3b" dependencies = [ "base64 0.13.0", "bigdecimal", @@ -3779,8 +3779,8 @@ dependencies = [ [[package]] name = "prisma-client-rust-cli" -version = "0.6.1" -source = "git+https://github.com/Brendonovich/prisma-client-rust?rev=79ab6bd700199b92103711f01d4df42e4cef62a6#79ab6bd700199b92103711f01d4df42e4cef62a6" +version = "0.6.3" +source = "git+https://github.com/Brendonovich/prisma-client-rust?tag=0.6.3#e12e6932be7d224410784b52143384cc9235da3b" dependencies = [ "datamodel", "prisma-client-rust-sdk", @@ -3790,12 +3790,13 @@ dependencies = [ "serde_json", "serde_path_to_error", "syn", + "thiserror", ] [[package]] name = "prisma-client-rust-sdk" -version = "0.6.1" -source = "git+https://github.com/Brendonovich/prisma-client-rust?rev=79ab6bd700199b92103711f01d4df42e4cef62a6#79ab6bd700199b92103711f01d4df42e4cef62a6" +version = "0.6.3" +source = "git+https://github.com/Brendonovich/prisma-client-rust?tag=0.6.3#e12e6932be7d224410784b52143384cc9235da3b" dependencies = [ "convert_case 0.5.0", "datamodel", @@ -3814,12 +3815,13 @@ dependencies = [ "serde_json", "serde_path_to_error", "syn", + "thiserror", ] [[package]] name = "prisma-models" version = "0.0.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "bigdecimal", "chrono", @@ -3836,7 +3838,7 @@ dependencies = [ [[package]] name = "prisma-value" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "base64 0.12.3", "bigdecimal", @@ -3901,7 +3903,7 @@ dependencies = [ [[package]] name = "psl" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "datamodel", ] @@ -3953,7 +3955,7 @@ dependencies = [ [[package]] name = "query-connector" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "anyhow", "async-trait", @@ -3973,7 +3975,7 @@ dependencies = [ [[package]] name = "query-core" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "async-trait", "base64 0.12.3", @@ -4278,7 +4280,7 @@ dependencies = [ [[package]] name = "request-handlers" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "bigdecimal", "connection-string", @@ -4491,7 +4493,7 @@ dependencies = [ [[package]] name = "schema" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "datamodel-connector", "once_cell", @@ -4501,7 +4503,7 @@ dependencies = [ [[package]] name = "schema-ast" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "diagnostics", "pest", @@ -4511,7 +4513,7 @@ dependencies = [ [[package]] name = "schema-builder" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "datamodel-connector", "itertools", @@ -4965,7 +4967,7 @@ dependencies = [ [[package]] name = "sql-datamodel-connector" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "connection-string", "datamodel-connector", @@ -4981,12 +4983,12 @@ dependencies = [ [[package]] name = "sql-ddl" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" [[package]] name = "sql-migration-connector" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "chrono", "connection-string", @@ -5013,7 +5015,7 @@ dependencies = [ [[package]] name = "sql-query-connector" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "anyhow", "async-trait", @@ -5045,7 +5047,7 @@ dependencies = [ [[package]] name = "sql-schema-describer" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "async-trait", "bigdecimal", @@ -6111,7 +6113,7 @@ checksum = "e8db7427f936968176eaa7cdf81b7f98b980b18495ec28f1b5791ac3bfe3eea9" [[package]] name = "user-facing-error-macros" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "proc-macro2", "quote", @@ -6121,7 +6123,7 @@ dependencies = [ [[package]] name = "user-facing-errors" version = "0.1.0" -source = "git+https://github.com/Brendonovich/prisma-engines?rev=5bacc96c3527f6a9e50c8011528fb64ac04e350b#5bacc96c3527f6a9e50c8011528fb64ac04e350b" +source = "git+https://github.com/Brendonovich/prisma-engines?rev=06a1b97ff1ca597521ec9f3d10c1e274065f5e93#06a1b97ff1ca597521ec9f3d10c1e274065f5e93" dependencies = [ "backtrace", "indoc", diff --git a/Cargo.toml b/Cargo.toml index 7f8813ad3..3f77d3580 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,13 +14,7 @@ version = "0.0.0" rust-version = "1.64.0" [workspace.dependencies] -# prisma-client-rust = { git = "https://github.com/Brendonovich/prisma-client-rust", tag = "0.6.1", features = [ -# 'rspc', -# # 'sqlite-create-many', -# # "migrations", -# # "sqlite", -# ] } -prisma-client-rust = { git = "https://github.com/Brendonovich/prisma-client-rust", rev = "79ab6bd700199b92103711f01d4df42e4cef62a6", features = [ +prisma-client-rust = { git = "https://github.com/Brendonovich/prisma-client-rust", tag = "0.6.3", features = [ 'rspc', # 'sqlite-create-many', "migrations", diff --git a/apps/desktop/dist/.placeholder b/apps/desktop/dist/.placeholder new file mode 100644 index 000000000..e69de29bb diff --git a/apps/server/src/config/state.rs b/apps/server/src/config/state.rs index 7b4866c08..ded26ccdc 100644 --- a/apps/server/src/config/state.rs +++ b/apps/server/src/config/state.rs @@ -1,7 +1,7 @@ use std::sync::Arc; use axum::Extension; -use stump_core::config::Ctx; +use stump_core::prelude::Ctx; // TODO: I don't feel like I need this module... Unless I add things to it.. diff --git a/apps/server/src/errors.rs b/apps/server/src/errors.rs index f196cc7cb..6fa7273a0 100644 --- a/apps/server/src/errors.rs +++ b/apps/server/src/errors.rs @@ -8,7 +8,7 @@ use prisma_client_rust::{ }; use stump_core::{ event::InternalCoreTask, - types::{errors::ProcessFileError, CoreError}, + prelude::{CoreError, ProcessFileError}, }; use tokio::sync::mpsc; diff --git a/apps/server/src/middleware/auth.rs b/apps/server/src/middleware/auth.rs index 9e187e6e1..d7b410881 100644 --- a/apps/server/src/middleware/auth.rs +++ b/apps/server/src/middleware/auth.rs @@ -12,7 +12,7 @@ use prisma_client_rust::{ prisma_errors::query_engine::{RecordNotFound, UniqueKeyViolation}, QueryError, }; -use stump_core::{config::Ctx, prisma::user, types::User}; +use stump_core::{db::models::User, prelude::Ctx, prisma::user}; use tracing::{error, trace}; use crate::utils::{decode_base64_credentials, verify_password}; diff --git a/apps/server/src/routers/api/auth.rs b/apps/server/src/routers/api/auth.rs index f8f1cf922..44fe185f3 100644 --- a/apps/server/src/routers/api/auth.rs +++ b/apps/server/src/routers/api/auth.rs @@ -4,8 +4,9 @@ use axum::{ }; use axum_sessions::extractors::{ReadableSession, WritableSession}; use stump_core::{ + db::models::User, + prelude::{LoginOrRegisterArgs, UserRole}, prisma::{user, user_preferences}, - types::{enums::UserRole, LoginOrRegisterArgs, User}, }; use crate::{ diff --git a/apps/server/src/routers/api/epub.rs b/apps/server/src/routers/api/epub.rs index bb8f9fe47..b2b80dfda 100644 --- a/apps/server/src/routers/api/epub.rs +++ b/apps/server/src/routers/api/epub.rs @@ -5,9 +5,9 @@ use axum::{ }; use axum_sessions::extractors::ReadableSession; use stump_core::{ + db::models::Epub, fs::epub, prisma::{media, read_progress}, - types::Epub, }; use crate::{ diff --git a/apps/server/src/routers/api/filesystem.rs b/apps/server/src/routers/api/filesystem.rs index 057eb9251..2fdfb6381 100644 --- a/apps/server/src/routers/api/filesystem.rs +++ b/apps/server/src/routers/api/filesystem.rs @@ -1,7 +1,7 @@ use axum::{extract::Query, middleware::from_extractor, routing::post, Json, Router}; use axum_sessions::extractors::ReadableSession; use std::path::Path; -use stump_core::types::{ +use stump_core::prelude::{ DirectoryListing, DirectoryListingFile, DirectoryListingInput, Pageable, PagedRequestParams, }; diff --git a/apps/server/src/routers/api/library.rs b/apps/server/src/routers/api/library.rs index 3aaeeb19c..a4761ff7c 100644 --- a/apps/server/src/routers/api/library.rs +++ b/apps/server/src/routers/api/library.rs @@ -11,18 +11,20 @@ use std::{path, str::FromStr}; use tracing::{debug, error, trace}; use stump_core::{ - db::utils::PrismaCountTrait, + db::{ + models::{LibrariesStats, Library, LibraryScanMode, Series}, + utils::PrismaCountTrait, + }, fs::{image, media_file}, job::LibraryScanJob, + prelude::{ + CreateLibraryArgs, Pageable, PagedRequestParams, QueryOrder, UpdateLibraryArgs, + }, prisma::{ library, library_options, media, series::{self, OrderByParam as SeriesOrderByParam}, tag, }, - types::{ - CreateLibraryArgs, FindManyTrait, LibrariesStats, Library, LibraryScanMode, - Pageable, PagedRequestParams, QueryOrder, Series, UpdateLibraryArgs, - }, }; use crate::{ @@ -155,19 +157,19 @@ async fn get_library_series( let order_by_param: SeriesOrderByParam = QueryOrder::from(page_params.clone()).try_into()?; - let base_query = db + let mut query = db .series() // TODO: add media relation count.... .find_many(vec![series::library_id::equals(Some(id.clone()))]) .order_by(order_by_param); - let series = match unpaged { - true => base_query.exec().await?, - false => base_query.paginated(page_params.clone()).exec().await?, - }; + if !unpaged { + let (skip, take) = page_params.get_skip_take(); + query = query.skip(skip).take(take); + } + let series = query.exec().await?; let series_ids = series.iter().map(|s| s.id.clone()).collect(); - let media_counts = db.series_media_count(series_ids).await?; let series = series diff --git a/apps/server/src/routers/api/log.rs b/apps/server/src/routers/api/log.rs index 8fd3c1b3a..08385060e 100644 --- a/apps/server/src/routers/api/log.rs +++ b/apps/server/src/routers/api/log.rs @@ -2,7 +2,7 @@ use axum::{middleware::from_extractor, routing::get, Json, Router}; use axum_sessions::extractors::ReadableSession; use prisma_client_rust::chrono::{DateTime, Utc}; use std::fs::File; -use stump_core::{config::logging::get_log_file, types::LogMetadata}; +use stump_core::{config::logging::get_log_file, db::models::LogMetadata}; use crate::{ errors::{ApiError, ApiResult}, diff --git a/apps/server/src/routers/api/media.rs b/apps/server/src/routers/api/media.rs index 8a9bf7857..45b7cedcd 100644 --- a/apps/server/src/routers/api/media.rs +++ b/apps/server/src/routers/api/media.rs @@ -8,16 +8,16 @@ use axum_sessions::extractors::ReadableSession; use prisma_client_rust::{raw, Direction}; use stump_core::{ config::get_config_dir, - db::utils::PrismaCountTrait, + db::{ + models::{Media, ReadProgress}, + utils::PrismaCountTrait, + }, fs::{image, media_file}, + prelude::{ContentType, Pageable, PagedRequestParams, QueryOrder}, prisma::{ media::{self, OrderByParam as MediaOrderByParam}, read_progress, user, }, - types::{ - ContentType, FindManyTrait, Media, Pageable, PagedRequestParams, QueryOrder, - ReadProgress, - }, }; use tracing::trace; @@ -64,7 +64,7 @@ async fn get_media( let order_by_param: MediaOrderByParam = QueryOrder::from(page_params.clone()).try_into()?; - let base_query = db + let mut query = db .media() .find_many(vec![]) .with(media::read_progresses::fetch(vec![ @@ -72,29 +72,25 @@ async fn get_media( ])) .order_by(order_by_param); - if unpaged { - return Ok(Json( - base_query - .exec() - .await? - .into_iter() - .map(|m| m.into()) - .collect::>() - .into(), - )); + if !unpaged { + let (skip, take) = page_params.get_skip_take(); + query = query.skip(skip).take(take); } - let count = db.media_count().await?; - - let media = base_query - .paginated(page_params.clone()) + let media = query .exec() .await? .into_iter() .map(|m| m.into()) .collect::>(); - Ok(Json((media, count, page_params).into())) + if unpaged { + return Ok(Json(Pageable::from(media))); + } + + let count = db.media_count().await?; + + Ok(Json(Pageable::from((media, count, page_params)))) } /// Get all media with identical checksums. This heavily implies duplicate files. @@ -305,7 +301,7 @@ async fn get_media_thumbnail( if webp_path.exists() { trace!("Found webp thumbnail for media {}", id); - return Ok((ContentType::WEBP, image::get_image_bytes(webp_path)?).into()); + return Ok((ContentType::WEBP, image::get_bytes(webp_path)?).into()); } let book = db diff --git a/apps/server/src/routers/api/mod.rs b/apps/server/src/routers/api/mod.rs index 5d8220274..17ca577bb 100644 --- a/apps/server/src/routers/api/mod.rs +++ b/apps/server/src/routers/api/mod.rs @@ -2,7 +2,7 @@ use axum::{ routing::{get, post}, Extension, Json, Router, }; -use stump_core::types::{ClaimResponse, StumpVersion}; +use stump_core::prelude::{ClaimResponse, StumpVersion}; use crate::{config::state::State, errors::ApiResult}; @@ -13,10 +13,10 @@ mod job; mod library; mod log; mod media; +mod reading_list; mod series; mod tag; mod user; -mod reading_list; pub(crate) fn mount() -> Router { Router::new().nest( diff --git a/apps/server/src/routers/api/reading_list.rs b/apps/server/src/routers/api/reading_list.rs index e0a34f8bf..410d9013c 100644 --- a/apps/server/src/routers/api/reading_list.rs +++ b/apps/server/src/routers/api/reading_list.rs @@ -1,126 +1,143 @@ -use axum::{ - routing::{get, post, put, delete}, - extract::Path, - Extension, Json, Router, -}; -use axum_sessions::extractors::{ReadableSession, WritableSession}; -use stump_core::{ - prisma::{reading_list, media, user}, - types::{User, readinglist::ReadingList, Media, readinglist::CreateReadingList}, -}; -use tracing::log::trace; use crate::{ config::state::State, errors::{ApiError, ApiResult}, - utils::{get_session_user}, + utils::get_session_user, }; +use axum::{extract::Path, routing::get, Extension, Json, Router}; +use axum_sessions::extractors::ReadableSession; +use stump_core::{ + db::models::ReadingList, + prelude::CreateReadingList, + prisma::{media, reading_list, user}, +}; +use tracing::log::trace; pub(crate) fn mount() -> Router { - Router::new() - .route("/reading-list", get(get_reading_list).post(create_reading_list)) - .nest( - "/reading-list/:id", - Router::new() - .route("/", get(get_reading_list_by_id).put(update_reading_list).delete(delete_reading_list_by_id)), - ) + Router::new() + .route( + "/reading-list", + get(get_reading_list).post(create_reading_list), + ) + .nest( + "/reading-list/:id", + Router::new().route( + "/", + get(get_reading_list_by_id) + .put(update_reading_list) + .delete(delete_reading_list_by_id), + ), + ) } async fn get_reading_list( - Extension(ctx): State, + Extension(ctx): State, session: ReadableSession, ) -> ApiResult>> { let user_id = get_session_user(&session)?.id; - Ok(Json( - ctx.db - .reading_list() - .find_many(vec![ - reading_list::creating_user_id::equals(user_id), - ]) + Ok(Json( + ctx.db + .reading_list() + .find_many(vec![reading_list::creating_user_id::equals(user_id)]) .exec() .await? .into_iter() .map(|u| u.into()) .collect::>(), - )) + )) } async fn create_reading_list( - Extension(ctx): State, + Extension(ctx): State, Json(input): Json, session: ReadableSession, ) -> ApiResult> { let db = ctx.get_db(); let user_id = get_session_user(&session)?.id; - let created_reading_list = db - .reading_list() - .create( - input.id.to_owned(), - user::id::equals(user_id.clone()), - vec![reading_list::media::connect(input.media_ids.iter().map(|id| media::id::equals(id.to_string())).collect())] - ) - .exec() - .await?; - - Ok(Json(created_reading_list.into())) + let created_reading_list = db + .reading_list() + .create( + input.id.to_owned(), + user::id::equals(user_id.clone()), + vec![reading_list::media::connect( + input + .media_ids + .iter() + .map(|id| media::id::equals(id.to_string())) + .collect(), + )], + ) + .exec() + .await?; + + Ok(Json(created_reading_list.into())) } async fn get_reading_list_by_id( Path(id): Path, - Extension(ctx): State, + Extension(ctx): State, session: ReadableSession, ) -> ApiResult> { - let user_id = get_session_user(&session)?.id; + let _user_id = get_session_user(&session)?.id; let db = ctx.get_db(); - let reading_list_id = db - .reading_list() - .find_unique(reading_list::id::equals(id.clone())) - .exec() - .await?; + let reading_list_id = db + .reading_list() + .find_unique(reading_list::id::equals(id.clone())) + .exec() + .await?; - if reading_list_id.is_none() { - return Err(ApiError::NotFound(format!( + if reading_list_id.is_none() { + return Err(ApiError::NotFound(format!( "Reading List with id {} not found", id ))); - } - - Ok(Json(reading_list_id.unwrap().into())) + } + + // TODO: access control for reading lists... + + Ok(Json(reading_list_id.unwrap().into())) } async fn update_reading_list( Path(id): Path, - Extension(ctx): State, + Extension(ctx): State, Json(input): Json, ) -> ApiResult> { let db = ctx.get_db(); - let created_reading_list: _ = db - .reading_list() - .update(reading_list::id::equals(id.clone()), vec![ - reading_list::media::connect(input.media_ids.iter().map(|id| media::id::equals(id.to_string())).collect()) - ]) - .exec() - .await?; - - Ok(Json(created_reading_list.into())) + let created_reading_list: _ = db + .reading_list() + .update( + reading_list::id::equals(id.clone()), + vec![reading_list::media::connect( + input + .media_ids + .iter() + .map(|id| media::id::equals(id.to_string())) + .collect(), + )], + ) + .exec() + .await?; + + Ok(Json(created_reading_list.into())) } async fn delete_reading_list_by_id( Path(id): Path, - Extension(ctx): State, + Extension(ctx): State, ) -> ApiResult> { - let db = ctx.get_db(); + let db = ctx.get_db(); trace!("Attempting to delete reading list with ID {}", &id); - let deleted = db - .reading_list() - .delete(reading_list::id::equals(id.clone())) - .exec() - .await?; + let deleted = db + .reading_list() + .delete(reading_list::id::equals(id.clone())) + .exec() + .await?; Ok(Json(deleted.id)) -} \ No newline at end of file +} diff --git a/apps/server/src/routers/api/series.rs b/apps/server/src/routers/api/series.rs index 3549d3b87..8a655743e 100644 --- a/apps/server/src/routers/api/series.rs +++ b/apps/server/src/routers/api/series.rs @@ -8,16 +8,16 @@ use axum_sessions::extractors::ReadableSession; use prisma_client_rust::Direction; use serde::Deserialize; use stump_core::{ - db::utils::PrismaCountTrait, + db::{ + models::{Media, Series}, + utils::PrismaCountTrait, + }, fs::{image, media_file}, + prelude::{ContentType, Pageable, PagedRequestParams, QueryOrder}, prisma::{ media::{self, OrderByParam as MediaOrderByParam}, read_progress, series, }, - types::{ - ContentType, FindManyTrait, Media, Pageable, PagedRequestParams, QueryOrder, - Series, - }, }; use tracing::trace; @@ -167,7 +167,7 @@ async fn get_series_thumbnail( if let Some(webp_path) = image::get_thumbnail_path(&media.id) { trace!("Found webp thumbnail for series {}", &id); - return Ok((ContentType::WEBP, image::get_image_bytes(webp_path)?).into()); + return Ok((ContentType::WEBP, image::get_bytes(webp_path)?).into()); } Ok(media_file::get_page(media.path.as_str(), 1)?.into()) @@ -190,7 +190,7 @@ async fn get_series_media( let order_by_param: MediaOrderByParam = QueryOrder::from(page_params.clone()).try_into()?; - let base_query = db + let mut query = db .media() .find_many(vec![media::series_id::equals(Some(id.clone()))]) .with(media::read_progresses::fetch(vec![ @@ -198,16 +198,20 @@ async fn get_series_media( ])) .order_by(order_by_param); - let media = if unpaged { - base_query.exec().await? - } else { - base_query.paginated(page_params.clone()).exec().await? - }; + if !unpaged { + let (skip, take) = page_params.get_skip_take(); + query = query.skip(skip).take(take); + } - let media = media.into_iter().map(|m| m.into()).collect::>(); + let media = query + .exec() + .await? + .into_iter() + .map(Media::from) + .collect::>(); if unpaged { - return Ok(Json(media.into())); + return Ok(Json(Pageable::from(media))); } // TODO: investigate this, I am getting incorrect counts here... diff --git a/apps/server/src/routers/api/tag.rs b/apps/server/src/routers/api/tag.rs index ffb6456bc..601e92855 100644 --- a/apps/server/src/routers/api/tag.rs +++ b/apps/server/src/routers/api/tag.rs @@ -1,6 +1,6 @@ use axum::{middleware::from_extractor, routing::get, Extension, Json, Router}; use serde::Deserialize; -use stump_core::types::Tag; +use stump_core::db::models::Tag; use crate::{config::state::State, errors::ApiResult, middleware::auth::Auth}; diff --git a/apps/server/src/routers/api/user.rs b/apps/server/src/routers/api/user.rs index 6da6fc7ef..5474d956d 100644 --- a/apps/server/src/routers/api/user.rs +++ b/apps/server/src/routers/api/user.rs @@ -3,8 +3,9 @@ use axum::{ }; use axum_sessions::extractors::ReadableSession; use stump_core::{ + db::models::{User, UserPreferences}, + prelude::{LoginOrRegisterArgs, UserPreferencesUpdate}, prisma::{user, user_preferences}, - types::{LoginOrRegisterArgs, User, UserPreferences, UserPreferencesUpdate}, }; use crate::{ @@ -21,7 +22,11 @@ pub(crate) fn mount() -> Router { .nest( "/users/:id", Router::new() - .route("/", get(get_user_by_id).put(update_user)) + .route( + "/", + // TODO: admin / self guard + get(get_user_by_id).put(update_user).delete(delete_user), + ) .route( "/preferences", get(get_user_preferences).put(update_user_preferences), @@ -36,7 +41,6 @@ async fn get_users( ) -> ApiResult>> { let user = get_session_user(&session)?; - // FIXME: admin middleware if !user.is_admin() { return Err(ApiError::Forbidden( "You do not have permission to access this resource.".into(), @@ -50,8 +54,8 @@ async fn get_users( .exec() .await? .into_iter() - .map(|u| u.into()) - .collect::>(), + .map(User::from) + .collect(), )) } @@ -116,6 +120,10 @@ async fn update_user() -> ApiResult<()> { Err(ApiError::NotImplemented) } +async fn delete_user() -> ApiResult<()> { + Err(ApiError::NotImplemented) +} + // FIXME: remove this once I resolve the below 'TODO' async fn get_user_preferences( Path(id): Path, diff --git a/apps/server/src/routers/opds.rs b/apps/server/src/routers/opds.rs index 35e4d0247..a218450c2 100644 --- a/apps/server/src/routers/opds.rs +++ b/apps/server/src/routers/opds.rs @@ -14,8 +14,8 @@ use stump_core::{ feed::OpdsFeed, link::{OpdsLink, OpdsLinkRel, OpdsLinkType}, }, + prelude::PagedRequestParams, prisma::{library, media, read_progress, series}, - types::PagedRequestParams, }; use crate::{ @@ -479,7 +479,7 @@ async fn get_book_page( let book = book.unwrap(); if book.path.ends_with(".epub") && correct_page == 1 { - return Ok(epub::get_epub_cover(&book.path)?.into()); + return Ok(epub::get_cover(&book.path)?.into()); } Ok(media_file::get_page(book.path.as_str(), correct_page)?.into()) diff --git a/apps/server/src/routers/ws.rs b/apps/server/src/routers/ws.rs index 91ae52175..67e4956c5 100644 --- a/apps/server/src/routers/ws.rs +++ b/apps/server/src/routers/ws.rs @@ -8,7 +8,7 @@ use axum::{ }; // use axum_typed_websockets::{Message, WebSocket, WebSocketUpgrade}; use futures_util::{sink::SinkExt, stream::StreamExt}; -use stump_core::config::Ctx; +use stump_core::prelude::Ctx; use tracing::error; use crate::config::state::State; diff --git a/apps/server/src/utils/auth.rs b/apps/server/src/utils/auth.rs index f40847116..5d543cd1a 100644 --- a/apps/server/src/utils/auth.rs +++ b/apps/server/src/utils/auth.rs @@ -1,5 +1,5 @@ use axum_sessions::extractors::ReadableSession; -use stump_core::types::{DecodedCredentials, User}; +use stump_core::{db::models::User, prelude::DecodedCredentials}; use crate::errors::{ApiError, ApiResult, AuthError}; diff --git a/apps/server/src/utils/http.rs b/apps/server/src/utils/http.rs index 11b8064d1..371eb4887 100644 --- a/apps/server/src/utils/http.rs +++ b/apps/server/src/utils/http.rs @@ -8,7 +8,7 @@ use std::{ io, path::{Path, PathBuf}, }; -use stump_core::types::{ContentType, PageParams, PagedRequestParams}; +use stump_core::prelude::{ContentType, PageParams, PagedRequestParams}; use tokio::fs::File; use tokio_util::io::ReaderStream; diff --git a/common/interface/package.json b/common/interface/package.json index 94941e48f..74a0e2c74 100644 --- a/common/interface/package.json +++ b/common/interface/package.json @@ -12,6 +12,7 @@ "./components/*": "./src/components/*" }, "scripts": { + "tsc": "tsc", "check": "tsc --noEmit" }, "dependencies": { diff --git a/core/integration-tests/tests/epub.rs b/core/integration-tests/tests/epub.rs index 098d3aa47..cc129cf2a 100644 --- a/core/integration-tests/tests/epub.rs +++ b/core/integration-tests/tests/epub.rs @@ -3,12 +3,12 @@ use std::path::PathBuf; use crate::utils::{init_test, TempLibrary}; use stump_core::{ - config::Ctx, + db::models::Epub, fs::media_file::epub::{ get_epub_chapter, get_epub_resource, normalize_resource_path, }, + prelude::{ContentType, CoreResult, Ctx}, prisma::media, - types::{models::epub::Epub, ContentType, CoreResult}, }; #[tokio::test] diff --git a/core/integration-tests/tests/rar.rs b/core/integration-tests/tests/rar.rs index c2dceb813..5daf0f16d 100644 --- a/core/integration-tests/tests/rar.rs +++ b/core/integration-tests/tests/rar.rs @@ -1,13 +1,13 @@ use crate::utils::{init_test, make_tmp_file, TempLibrary}; use stump_core::{ - config::Ctx, + db::models::{LibraryPattern, LibraryScanMode}, fs::{ checksum, - media_file::rar::{convert_rar_to_zip, rar_sample}, + media_file::rar::{convert_to_zip, sample_size}, }, + prelude::{CoreResult, Ctx}, prisma::media, - types::{CoreResult, LibraryPattern, LibraryScanMode}, }; // TODO: fix these tests... @@ -21,7 +21,7 @@ fn test_rar_to_zip() -> CoreResult<()> { let path = tmp_file.path(); - let result = convert_rar_to_zip(path); + let result = convert_to_zip(path); assert!(result.is_ok()); let zip_path = result.unwrap(); @@ -66,7 +66,7 @@ async fn digest_rars_synchronous() -> CoreResult<()> { } for rar in rars { - let rar_sample_result = rar_sample(&rar.path); + let rar_sample_result = sample_size(&rar.path); assert!(rar_sample_result.is_ok()); let rar_sample = rar_sample_result.unwrap(); diff --git a/core/integration-tests/tests/scanner.rs b/core/integration-tests/tests/scanner.rs index 5097e9e6c..3d593267a 100644 --- a/core/integration-tests/tests/scanner.rs +++ b/core/integration-tests/tests/scanner.rs @@ -1,9 +1,9 @@ use crate::utils::{init_test, run_test_scan, TempLibrary}; use stump_core::{ - config::Ctx, + db::models::{LibraryPattern, LibraryScanMode}, + prelude::{CoreResult, Ctx}, prisma::{library, PrismaClient}, - types::{CoreResult, LibraryPattern, LibraryScanMode}, }; async fn check_library_post_scan( diff --git a/core/integration-tests/tests/utils.rs b/core/integration-tests/tests/utils.rs index 5eca17129..6bfcc9e54 100644 --- a/core/integration-tests/tests/utils.rs +++ b/core/integration-tests/tests/utils.rs @@ -4,12 +4,14 @@ use std::{fs, path::PathBuf}; use tempfile::{Builder, NamedTempFile, TempDir}; use stump_core::{ - config::Ctx, - db::migration::run_migrations, + db::{ + migration::run_migrations, + models::{LibraryPattern, LibraryScanMode}, + }, fs::scanner::library_scanner::{scan_batch, scan_sync}, job::{persist_new_job, runner::RunnerCtx, LibraryScanJob}, + prelude::{CoreResult, Ctx}, prisma::{library, library_options, PrismaClient}, - types::{CoreResult, LibraryPattern, LibraryScanMode}, }; // https://web.mit.edu/rust-lang_v1.25/arch/amd64_ubuntu1404/share/doc/rust/html/book/second-edition/ch11-03-test-organization.html diff --git a/core/integration-tests/tests/zip.rs b/core/integration-tests/tests/zip.rs index 0671758e4..600bb7cc2 100644 --- a/core/integration-tests/tests/zip.rs +++ b/core/integration-tests/tests/zip.rs @@ -1,8 +1,8 @@ use stump_core::{ - config::Ctx, - fs::{checksum, media_file::zip::zip_sample}, + db::models::{LibraryPattern, LibraryScanMode}, + fs::{checksum, media_file::zip}, + prelude::{CoreResult, Ctx}, prisma::media, - types::{CoreResult, LibraryPattern, LibraryScanMode}, }; use crate::utils::{init_test, TempLibrary}; @@ -33,7 +33,7 @@ async fn digest_zips() -> CoreResult<()> { assert_ne!(zips.len(), 0); for zip in zips { - let zip_sample = zip_sample(&zip.path); + let zip_sample = zip::sample_size(&zip.path); let digest_result = checksum::digest(&zip.path, zip_sample); assert!(digest_result.is_ok()); diff --git a/core/prisma/Cargo.toml b/core/prisma/Cargo.toml index f9bdd4ba0..5c07e8ce7 100644 --- a/core/prisma/Cargo.toml +++ b/core/prisma/Cargo.toml @@ -5,13 +5,7 @@ rust-version = "1.64.0" edition = "2021" [dependencies] -# prisma-client-rust-cli = { git = "https://github.com/Brendonovich/prisma-client-rust", tag = "0.6.1", features = [ -# 'rspc', -# # 'sqlite-create-many', -# # "migrations", -# # "sqlite", -# ] } -prisma-client-rust-cli = { git = "https://github.com/Brendonovich/prisma-client-rust", rev = "79ab6bd700199b92103711f01d4df42e4cef62a6", features = [ +prisma-client-rust-cli = { git = "https://github.com/Brendonovich/prisma-client-rust", tag = "0.6.3", features = [ 'rspc', # 'sqlite-create-many', "migrations", diff --git a/core/src/config/env.rs b/core/src/config/env.rs index 2fb895f34..669d29a2c 100644 --- a/core/src/config/env.rs +++ b/core/src/config/env.rs @@ -5,7 +5,7 @@ use tracing::debug; use crate::{ config::get_config_dir, - types::{errors::CoreError, CoreResult}, + prelude::{errors::CoreError, CoreResult}, }; /// [`StumpEnvironment`] is the the representation of the Stump configuration file. diff --git a/core/src/config/mod.rs b/core/src/config/mod.rs index 45fa2d4e9..177ac29aa 100644 --- a/core/src/config/mod.rs +++ b/core/src/config/mod.rs @@ -1,8 +1,5 @@ use std::path::{Path, PathBuf}; -pub(crate) mod context; -pub use context::Ctx; - pub mod env; pub mod logging; diff --git a/core/src/db/dao/library_dao.rs b/core/src/db/dao/library_dao.rs new file mode 100644 index 000000000..fbeb30035 --- /dev/null +++ b/core/src/db/dao/library_dao.rs @@ -0,0 +1,91 @@ +use std::sync::Arc; + +use crate::{ + db::models::Library, + prelude::{CoreError, CoreResult}, + prisma::{library, library_options, PrismaClient}, +}; + +use super::{Dao, LibraryOptionsDao}; + +pub struct LibraryDao { + client: Arc, +} + +#[async_trait::async_trait] +impl Dao for LibraryDao { + type Model = Library; + + fn new(client: Arc) -> Self { + Self { client } + } + + async fn insert(&self, data: Self::Model) -> CoreResult { + let library_options_dao = LibraryOptionsDao::new(self.client.clone()); + let library_options = library_options_dao.insert(data.library_options).await?; + + let created_library = self + .client + .library() + .create( + data.name, + data.path, + library_options::id::equals(library_options.id.unwrap()), + vec![], + ) + .with(library::library_options::fetch()) + .exec() + .await?; + + Ok(Library::from(created_library)) + } + + async fn delete(&self, id: &str) -> CoreResult { + let deleted_library = self + .client + .library() + .delete(library::id::equals(id.to_string())) + .exec() + .await?; + + Ok(Library::from(deleted_library)) + } + + async fn find_by_id(&self, id: &str) -> CoreResult { + let library = self + .client + .library() + .find_unique(library::id::equals(id.to_string())) + .with(library::library_options::fetch()) + .exec() + .await?; + + if library.is_none() { + return Err(CoreError::NotFound(format!( + "Library with id {} not found", + id + ))); + } + + Ok(Library::from(library.unwrap())) + } + + async fn find_all(&self) -> CoreResult> { + let libraries = self.client.library().find_many(vec![]).exec().await?; + + Ok(libraries.into_iter().map(Library::from).collect()) + } + + async fn find_paginated(&self, skip: i64, take: i64) -> CoreResult> { + let libraries = self + .client + .library() + .find_many(vec![]) + .skip(skip) + .take(take) + .exec() + .await?; + + Ok(libraries.into_iter().map(Library::from).collect()) + } +} diff --git a/core/src/db/dao/library_options_dao.rs b/core/src/db/dao/library_options_dao.rs new file mode 100644 index 000000000..f05a7494b --- /dev/null +++ b/core/src/db/dao/library_options_dao.rs @@ -0,0 +1,99 @@ +use std::sync::Arc; + +use crate::{ + db::models::LibraryOptions, + prelude::{CoreError, CoreResult}, + prisma::{library_options, PrismaClient}, +}; + +use super::Dao; + +pub struct LibraryOptionsDao { + client: Arc, +} + +#[async_trait::async_trait] +impl Dao for LibraryOptionsDao { + type Model = LibraryOptions; + + fn new(client: Arc) -> Self { + Self { client } + } + + async fn insert(&self, data: Self::Model) -> CoreResult { + let created_library_options = self + .client + .library_options() + .create(vec![ + library_options::convert_rar_to_zip::set(data.convert_rar_to_zip), + library_options::hard_delete_conversions::set( + data.hard_delete_conversions, + ), + library_options::create_webp_thumbnails::set(data.create_webp_thumbnails), + library_options::library_pattern::set(data.library_pattern.to_string()), + ]) + .exec() + .await?; + + Ok(LibraryOptions::from(created_library_options)) + } + + async fn delete(&self, id: &str) -> CoreResult { + let deleted_library_options = self + .client + .library_options() + .delete(library_options::id::equals(id.to_string())) + .exec() + .await?; + + Ok(LibraryOptions::from(deleted_library_options)) + } + + async fn find_by_id(&self, id: &str) -> CoreResult { + let library_options = self + .client + .library_options() + .find_unique(library_options::id::equals(id.to_string())) + .exec() + .await?; + + if library_options.is_none() { + return Err(CoreError::NotFound(format!( + "LibraryOptions with id {} not found", + id + ))); + } + + Ok(LibraryOptions::from(library_options.unwrap())) + } + + async fn find_all(&self) -> CoreResult> { + let library_options = self + .client + .library_options() + .find_many(vec![]) + .exec() + .await?; + + Ok(library_options + .into_iter() + .map(LibraryOptions::from) + .collect()) + } + + async fn find_paginated(&self, skip: i64, take: i64) -> CoreResult> { + let library_options = self + .client + .library_options() + .find_many(vec![]) + .skip(skip) + .take(take) + .exec() + .await?; + + Ok(library_options + .into_iter() + .map(LibraryOptions::from) + .collect()) + } +} diff --git a/core/src/db/dao/media_dao.rs b/core/src/db/dao/media_dao.rs new file mode 100644 index 000000000..86f3e5b63 --- /dev/null +++ b/core/src/db/dao/media_dao.rs @@ -0,0 +1,213 @@ +use std::sync::Arc; + +use crate::{ + db::models::Media, + prelude::{CoreError, CoreResult}, + prisma::{media, series, PrismaClient}, +}; + +use super::{Dao, DaoBatch}; + +pub struct MediaDao { + client: Arc, +} + +#[async_trait::async_trait] +impl Dao for MediaDao { + type Model = Media; + + fn new(client: Arc) -> Self { + Self { client } + } + + async fn insert(&self, data: Self::Model) -> CoreResult { + let created_media = self + .client + .media() + .create( + data.name.to_owned(), + data.size, + data.extension.to_owned(), + data.pages, + data.path.to_owned(), + vec![ + media::checksum::set(data.checksum.to_owned()), + media::description::set(data.description.to_owned()), + media::series::connect(series::id::equals(data.series_id.to_owned())), + ], + ) + .exec() + .await?; + + Ok(Media::from(created_media)) + } + + async fn delete(&self, id: &str) -> CoreResult { + let deleted_media = self + .client + .media() + .delete(media::id::equals(id.to_string())) + .exec() + .await?; + + Ok(Media::from(deleted_media)) + } + + async fn find_all(&self) -> CoreResult> { + Ok(self + .client + .media() + .find_many(vec![]) + .exec() + .await? + .into_iter() + .map(Media::from) + .collect()) + } + + async fn find_by_id(&self, id: &str) -> CoreResult { + let media = self + .client + .media() + .find_unique(media::id::equals(id.to_string())) + .exec() + .await?; + + if media.is_none() { + return Err(CoreError::NotFound(format!( + "Media with id {} not found.", + id + ))); + } + + Ok(Media::from(media.unwrap())) + } + + async fn find_paginated(&self, skip: i64, take: i64) -> CoreResult> { + let media = self + .client + .media() + .find_many(vec![]) + .skip(skip) + .take(take) + .exec() + .await?; + + Ok(media.into_iter().map(Media::from).collect()) + } +} + +#[async_trait::async_trait] +impl DaoBatch for MediaDao { + type Model = Media; + + async fn insert_many(&self, data: Vec) -> CoreResult> { + let queries = data.into_iter().map(|media| { + self.client.media().create( + media.name, + media.size, + media.extension, + media.pages, + media.path, + vec![ + media::checksum::set(media.checksum), + media::description::set(media.description), + media::series::connect(series::id::equals(media.series_id)), + ], + ) + }); + + Ok(self + .client + ._batch(queries) + .await? + .into_iter() + .map(Media::from) + .collect()) + } + + async fn _insert_batch(&self, models: T) -> CoreResult> + where + T: Iterator + Send + Sync, + { + let queries = models.map(|media| { + self.client.media().create( + media.name, + media.size, + media.extension, + media.pages, + media.path, + vec![ + media::checksum::set(media.checksum), + media::description::set(media.description), + media::series::connect(series::id::equals(media.series_id)), + ], + ) + }); + + let created_media = self.client._batch(queries).await?; + + Ok(created_media.into_iter().map(Media::from).collect()) + } + + async fn delete_many(&self, ids: Vec) -> CoreResult { + Ok(self + .client + .media() + .delete_many(vec![media::id::in_vec(ids)]) + .exec() + .await?) + } + + async fn _delete_batch(&self, ids: Vec) -> CoreResult> { + let queries = ids + .into_iter() + .map(|id| self.client.media().delete(media::id::equals(id))); + + let deleted_media = self.client._batch(queries).await?; + + Ok(deleted_media.into_iter().map(Media::from).collect()) + } +} + +// #[async_trait::async_trait] +// impl DaoUpsert for MediaDao { +// type Model = Media; + +// async fn upsert(&self, data: &Self::Model) -> CoreResult { +// let client = self.client; +// let resulting_media = client +// .media() +// .upsert( +// media::id::equals(data.id.clone()), +// ( +// data.name.clone(), +// data.size, +// data.extension.clone(), +// data.pages, +// data.path.clone(), +// vec![ +// media::checksum::set(data.checksum.clone()), +// media::description::set(data.description.clone()), +// media::series::connect(series::id::equals( +// data.series_id.clone(), +// )), +// ], +// ), +// vec![ +// media::name::set(data.name.clone()), +// media::size::set(data.size), +// media::extension::set(data.extension.clone()), +// media::pages::set(data.pages), +// media::path::set(data.path.clone()), +// media::checksum::set(data.checksum.clone()), +// media::description::set(data.description.clone()), +// media::series::connect(series::id::equals(data.series_id.clone())), +// ], +// ) +// .exec() +// .await?; + +// Ok(Media::from(resulting_media)) +// } +// } diff --git a/core/src/db/dao/mod.rs b/core/src/db/dao/mod.rs new file mode 100644 index 000000000..c3b60202f --- /dev/null +++ b/core/src/db/dao/mod.rs @@ -0,0 +1,107 @@ +use std::sync::Arc; + +mod library_dao; +mod library_options_dao; +mod media_dao; +mod read_progress_dao; +mod reading_list_dao; +mod series_dao; + +pub use library_dao::*; +pub use library_options_dao::*; +pub use media_dao::*; +pub use read_progress_dao::*; +pub use reading_list_dao::*; +pub use series_dao::*; + +use crate::{prelude::CoreResult, prisma::PrismaClient}; + +/// [`Dao`] trait defines the basic DB operations for a model. Update operations are not included since +/// they are more niche per model, and are not used in the generic way as the other operations. +#[async_trait::async_trait] +pub trait Dao: Sync + Sized { + type Model: Sync; + + // Creates a new Dao instance. + fn new(client: Arc) -> Self; + + /// Creates a new record in the database. + async fn insert(&self, data: Self::Model) -> CoreResult; + + /// Deletes a record from the database. + async fn delete(&self, id: &str) -> CoreResult; + + /// Finds a record by its id. + async fn find_by_id(&self, id: &str) -> CoreResult; + + /// Finds all records. + async fn find_all(&self) -> CoreResult>; + + /// Finds all records, returning a paginated result. + async fn find_paginated(&self, skip: i64, take: i64) -> CoreResult>; +} + +/// [`DaoUpdate`] trait defines a single update operation for a model. This is a generic type signature +/// for the update operation, and since not all models really need this, it is contained in +/// a separate trait. +#[async_trait::async_trait] +pub trait DaoUpdate { + type Model: Sync; + + /// Updates a record in the database. + async fn update(&self, id: &str, data: Self::Model) -> CoreResult; + + /// Updates a record in the database, or creates it if it does not exist. + async fn upsert(&self, data: Self::Model) -> CoreResult; +} + +#[async_trait::async_trait] +pub trait DaoBatch: Sync + Sized { + type Model: Sync; + + /// Creates multiple new records in the database. + async fn insert_many(&self, data: Vec) -> CoreResult>; + + // async fn _insert_batch, Marker>( + // &self, + // queries: T, + // ) -> CoreResult>; + + // FIXME: maybe refactor to take something like IntoIterator ? + async fn _insert_batch>( + &self, + models: T, + ) -> CoreResult> + where + T: Iterator + Send + Sync; + + // async fn _update_batch, Marker>( + // &self, + // queries: T, + // ) -> CoreResult>; + + /// Deletes multiple records from the database. Returns the number of deleted records. + async fn delete_many(&self, ids: Vec) -> CoreResult; + + /// Deletes multiple records from the database. Returns the records that were deleted. + async fn _delete_batch(&self, ids: Vec) -> CoreResult>; +} + +#[async_trait::async_trait] +pub trait DaoRestricted: Sync + Sized { + type Model: Sync; + + /// Finds a record by its id, if the user has access to it. + async fn find_by_id(&self, id: &str, user_id: &str) -> CoreResult; + + /// Finds all records, if the user has access to them. + async fn find_all(&self, user_id: &str) -> CoreResult>; + + /// Finds all records, returning a paginated result, if the user has access to them. + async fn find_paginated( + &self, + skip: i64, + take: i64, + user_id: &str, + ) -> CoreResult>; +} diff --git a/core/src/db/dao/read_progress_dao.rs b/core/src/db/dao/read_progress_dao.rs new file mode 100644 index 000000000..301d3ffb7 --- /dev/null +++ b/core/src/db/dao/read_progress_dao.rs @@ -0,0 +1,132 @@ +use std::sync::Arc; + +use crate::{ + db::models::ReadProgress, + prelude::{CoreError, CoreResult}, + prisma::{ + media, + read_progress::{self, UniqueWhereParam}, + user, PrismaClient, + }, +}; + +use super::{Dao, DaoUpdate}; + +pub struct ReadProgressDao { + client: Arc, +} + +#[async_trait::async_trait] +impl Dao for ReadProgressDao { + type Model = ReadProgress; + + fn new(client: Arc) -> Self { + Self { client } + } + + async fn insert(&self, data: Self::Model) -> CoreResult { + if data.media_id.is_empty() { + return Err(CoreError::InvalidQuery( + "ReadProgress::media_id must be set".to_string(), + )); + } else if data.user_id.is_empty() { + return Err(CoreError::InvalidQuery( + "ReadProgress::user_id must be set".to_string(), + )); + } + + let created_read_progress = self + .client + .read_progress() + .create( + data.page, + media::id::equals(data.media_id), + user::id::equals(data.user_id), + vec![], + ) + .exec() + .await?; + + Ok(ReadProgress::from(created_read_progress)) + } + + async fn delete(&self, id: &str) -> CoreResult { + let deleted_read_progress = self + .client + .read_progress() + .delete(read_progress::id::equals(id.to_string())) + .exec() + .await?; + + Ok(ReadProgress::from(deleted_read_progress)) + } + + async fn find_by_id(&self, id: &str) -> CoreResult { + let read_progress = self + .client + .read_progress() + .find_unique(read_progress::id::equals(id.to_string())) + .exec() + .await?; + + if read_progress.is_none() { + return Err(CoreError::NotFound(format!( + "ReadProgress with id {} not found", + id + ))); + } + + Ok(ReadProgress::from(read_progress.unwrap())) + } + + async fn find_all(&self) -> CoreResult> { + let read_progress = self.client.read_progress().find_many(vec![]).exec().await?; + + Ok(read_progress.into_iter().map(ReadProgress::from).collect()) + } + + async fn find_paginated(&self, skip: i64, take: i64) -> CoreResult> { + let read_progress = self + .client + .read_progress() + .find_many(vec![]) + .skip(skip) + .take(take) + .exec() + .await?; + + Ok(read_progress.into_iter().map(ReadProgress::from).collect()) + } +} + +#[async_trait::async_trait] +impl DaoUpdate for ReadProgressDao { + type Model = ReadProgress; + + async fn update(&self, _id: &str, _data: Self::Model) -> CoreResult { + unreachable!("ReadProgressDao::update will not be implemented"); + } + + async fn upsert(&self, data: Self::Model) -> CoreResult { + let read_progress = self + .client + .read_progress() + .upsert( + UniqueWhereParam::UserIdMediaIdEquals( + data.user_id.clone(), + data.id.clone(), + ), + ( + data.page, + media::id::equals(data.media_id.clone()), + user::id::equals(data.user_id.clone()), + vec![], + ), + vec![read_progress::page::set(data.page)], + ) + .exec() + .await?; + + Ok(ReadProgress::from(read_progress)) + } +} diff --git a/core/src/db/dao/reading_list_dao.rs b/core/src/db/dao/reading_list_dao.rs new file mode 100644 index 000000000..7168da3e1 --- /dev/null +++ b/core/src/db/dao/reading_list_dao.rs @@ -0,0 +1,109 @@ +use std::sync::Arc; + +use crate::{ + db::models::ReadingList, + prelude::{CoreError, CoreResult}, + prisma::{media, reading_list, user, PrismaClient}, +}; + +use super::Dao; + +pub struct ReadingListDao { + client: Arc, +} + +#[async_trait::async_trait] +impl Dao for ReadingListDao { + type Model = ReadingList; + + fn new(client: Arc) -> Self { + Self { client } + } + + async fn insert(&self, data: Self::Model) -> CoreResult { + let media_ids = data + .media + .map(|m| m.into_iter().map(|m| m.id).collect::>()) + .unwrap_or_default(); + + let mut params = Vec::with_capacity(1); + if !media_ids.is_empty() { + params.push(reading_list::media::connect( + media_ids + .iter() + .map(|id| media::id::equals(id.to_string())) + .collect(), + )); + } + + let reading_list = self + .client + .reading_list() + .create( + data.name.to_owned(), + user::id::equals(data.creating_user_id.to_owned()), + params, + ) + .with(reading_list::media::fetch(vec![])) + .exec() + .await?; + + Ok(ReadingList::from(reading_list)) + } + + async fn delete(&self, id: &str) -> CoreResult { + let reading_list = self + .client + .reading_list() + .delete(reading_list::id::equals(id.to_string())) + .exec() + .await?; + + Ok(ReadingList::from(reading_list)) + } + + async fn find_by_id(&self, id: &str) -> CoreResult { + let reading_list = self + .client + .reading_list() + .find_unique(reading_list::id::equals(id.to_string())) + .with(reading_list::media::fetch(vec![])) + .exec() + .await?; + + if reading_list.is_none() { + return Err(CoreError::NotFound(format!( + "Reading list with ID {} not found", + id + ))); + } + + Ok(ReadingList::from(reading_list.unwrap())) + } + + async fn find_all(&self) -> CoreResult> { + let reading_lists = self + .client + .reading_list() + .find_many(vec![]) + .with(reading_list::media::fetch(vec![])) + .exec() + .await?; + + Ok(reading_lists.into_iter().map(ReadingList::from).collect()) + } + + async fn find_paginated(&self, skip: i64, take: i64) -> CoreResult> { + let reading_lists = self + .client + .reading_list() + .find_many(vec![]) + .with(reading_list::media::fetch(vec![])) + .skip(skip) + .take(take) + .exec() + .await?; + + Ok(reading_lists.into_iter().map(ReadingList::from).collect()) + } +} diff --git a/core/src/db/dao/series_dao.rs b/core/src/db/dao/series_dao.rs new file mode 100644 index 000000000..0eb6fde17 --- /dev/null +++ b/core/src/db/dao/series_dao.rs @@ -0,0 +1,88 @@ +use std::sync::Arc; + +use crate::{ + db::models::Series, + prelude::{CoreError, CoreResult}, + prisma::{library, series, PrismaClient}, +}; + +use super::Dao; + +pub struct SeriesDao { + client: Arc, +} + +#[async_trait::async_trait] +impl Dao for SeriesDao { + type Model = Series; + + fn new(client: Arc) -> Self { + Self { client } + } + + async fn insert(&self, data: Self::Model) -> CoreResult { + let created_series = self + .client + .series() + .create( + data.name, + data.path, + vec![ + series::library::connect(library::id::equals(data.library_id)), + series::status::set(data.status.to_string()), + ], + ) + .exec() + .await?; + + Ok(Self::Model::from(created_series)) + } + + async fn delete(&self, id: &str) -> CoreResult { + let deleted_series = self + .client + .series() + .delete(series::id::equals(id.to_string())) + .exec() + .await?; + + Ok(Series::from(deleted_series)) + } + + async fn find_by_id(&self, id: &str) -> CoreResult { + let series = self + .client + .series() + .find_unique(series::id::equals(id.to_string())) + .exec() + .await?; + + if series.is_none() { + return Err(CoreError::NotFound(format!( + "Series with id {} not found", + id + ))); + } + + Ok(Series::from(series.unwrap())) + } + + async fn find_all(&self) -> CoreResult> { + let series = self.client.series().find_many(vec![]).exec().await?; + + Ok(series.into_iter().map(Series::from).collect()) + } + + async fn find_paginated(&self, skip: i64, take: i64) -> CoreResult> { + let series = self + .client + .series() + .find_many(vec![]) + .skip(skip) + .take(take) + .exec() + .await?; + + Ok(series.into_iter().map(Series::from).collect()) + } +} diff --git a/core/src/db/migration.rs b/core/src/db/migration.rs index 2c2ee6b64..545d26636 100644 --- a/core/src/db/migration.rs +++ b/core/src/db/migration.rs @@ -1,6 +1,6 @@ use tracing::{debug, info}; -use crate::{prisma, types::CoreResult, CoreError}; +use crate::{prelude::CoreResult, prisma, CoreError}; pub async fn run_migrations(client: &prisma::PrismaClient) -> CoreResult<()> { info!("Running migrations..."); diff --git a/core/src/db/mod.rs b/core/src/db/mod.rs index fc9da54b6..28684deec 100644 --- a/core/src/db/mod.rs +++ b/core/src/db/mod.rs @@ -1,6 +1,10 @@ +pub(crate) mod dao; pub mod migration; +pub mod models; pub mod utils; +pub use dao::*; + use std::path::Path; use tracing::trace; diff --git a/core/src/types/models/epub.rs b/core/src/db/models/epub.rs similarity index 97% rename from core/src/types/models/epub.rs rename to core/src/db/models/epub.rs index e4c26e701..e693f4610 100644 --- a/core/src/types/models/epub.rs +++ b/core/src/db/models/epub.rs @@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize}; use specta::Type; use tracing::error; -use crate::{prisma::media, types::errors::ProcessFileError}; +use crate::{prelude::errors::ProcessFileError, prisma::media}; use super::media::Media; diff --git a/core/src/types/models/library.rs b/core/src/db/models/library.rs similarity index 100% rename from core/src/types/models/library.rs rename to core/src/db/models/library.rs diff --git a/core/src/types/models/log.rs b/core/src/db/models/log.rs similarity index 79% rename from core/src/types/models/log.rs rename to core/src/db/models/log.rs index 6e8280352..41a470334 100644 --- a/core/src/types/models/log.rs +++ b/core/src/db/models/log.rs @@ -26,6 +26,12 @@ pub enum LogLevel { Debug, } +impl Default for LogLevel { + fn default() -> Self { + LogLevel::Info + } +} + impl std::fmt::Display for LogLevel { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { @@ -37,39 +43,33 @@ impl std::fmt::Display for LogLevel { } } -#[derive(Clone, Serialize, Deserialize, Type)] +#[derive(Clone, Serialize, Deserialize, Default, Type)] pub struct Log { pub id: String, pub level: LogLevel, pub message: String, pub created_at: String, - - pub job_id: Option, -} - -/// A helper struct mainly to convert client events to structs easier to persist to DB. -pub struct TentativeLog { - pub level: LogLevel, - pub message: String, pub job_id: Option, } -impl From for TentativeLog { +impl From for Log { fn from(event: CoreEvent) -> Self { match event { - CoreEvent::JobFailed { runner_id, message } => TentativeLog { + CoreEvent::JobFailed { runner_id, message } => Self { level: LogLevel::Error, message, job_id: Some(runner_id), + ..Default::default() }, CoreEvent::CreateEntityFailed { runner_id, path, message, - } => TentativeLog { + } => Self { level: LogLevel::Error, message: format!("{}: {}", path, message), job_id: runner_id, + ..Default::default() }, _ => unimplemented!(), } diff --git a/core/src/types/models/media.rs b/core/src/db/models/media.rs similarity index 60% rename from core/src/types/models/media.rs rename to core/src/db/models/media.rs index a0743cc0c..8a11375e2 100644 --- a/core/src/types/models/media.rs +++ b/core/src/db/models/media.rs @@ -1,13 +1,16 @@ -use std::{path::PathBuf, str::FromStr}; +use std::{path::Path, str::FromStr}; use serde::{Deserialize, Serialize}; use specta::Type; -use crate::{config::context::Ctx, prisma, types::enums::FileStatus}; +use crate::{ + prelude::{enums::FileStatus, CoreResult}, + prisma::media, +}; -use super::{read_progress::ReadProgress, series::Series, tag::Tag}; +use super::{read_progress::ReadProgress, series::Series, tag::Tag, LibraryOptions}; -#[derive(Debug, Clone, Deserialize, Serialize, Type)] +#[derive(Debug, Clone, Deserialize, Serialize, Type, Default)] pub struct Media { pub id: String, /// The name of the media. ex: "The Amazing Spider-Man (2018) #69" @@ -42,39 +45,20 @@ pub struct Media { // pub status: String, } -// Note: used internally... -pub struct TentativeMedia { - pub name: String, - pub description: Option, - pub size: i32, - pub extension: String, - pub pages: i32, - pub checksum: Option, - pub path: String, +#[derive(Default)] +pub struct MediaBuilderOptions { pub series_id: String, + pub library_options: LibraryOptions, } -impl TentativeMedia { - pub fn into_action(self, ctx: &Ctx) -> prisma::media::Create { - ctx.db.media().create( - self.name, - self.size, - self.extension, - self.pages, - self.path, - vec![ - prisma::media::checksum::set(self.checksum), - prisma::media::description::set(self.description), - prisma::media::series::connect(prisma::series::id::equals( - self.series_id, - )), - ], - ) - } +pub trait MediaBuilder { + fn build(path: &Path, series_id: &str) -> CoreResult; + fn build_with_options(path: &Path, options: MediaBuilderOptions) + -> CoreResult; } -impl From for Media { - fn from(data: prisma::media::Data) -> Media { +impl From for Media { + fn from(data: media::Data) -> Media { let series = match data.series() { Ok(series) => Some(series.unwrap().to_owned().into()), Err(_e) => None, @@ -121,45 +105,3 @@ impl From for Media { } } } - -// Derived from ComicInfo.xml -#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Type, Default)] - -pub struct MediaMetadata { - #[serde(rename = "Series")] - pub series: Option, - #[serde(rename = "Number")] - pub number: Option, - #[serde(rename = "Web")] - pub web: Option, - #[serde(rename = "Summary")] - pub summary: Option, - #[serde(rename = "Publisher")] - pub publisher: Option, - #[serde(rename = "Genre")] - pub genre: Option, - #[serde(rename = "PageCount")] - pub page_count: Option, -} - -// impl MediaMetadata { -// pub fn default() -> Self { -// Self { -// series: None, -// number: None, -// web: None, -// summary: None, -// publisher: None, -// genre: None, -// page_count: None, -// } -// } -// } - -pub struct ProcessedMediaFile { - pub thumbnail_path: Option, - pub path: PathBuf, - pub checksum: Option, - pub metadata: Option, - pub pages: i32, -} diff --git a/core/src/types/models/mod.rs b/core/src/db/models/mod.rs similarity index 61% rename from core/src/types/models/mod.rs rename to core/src/db/models/mod.rs index 009eadcc0..9c47d37b0 100644 --- a/core/src/types/models/mod.rs +++ b/core/src/db/models/mod.rs @@ -1,21 +1,20 @@ pub mod epub; pub mod library; -pub mod list_directory; pub mod log; pub mod media; pub mod read_progress; +pub mod reading_list; pub mod series; pub mod tag; pub mod user; -pub mod readinglist; -pub use crate::types::models::epub::*; -pub use crate::types::models::log::*; +pub use crate::db::models::epub::*; +pub use crate::db::models::log::*; pub use library::*; -pub use list_directory::*; pub use media::*; pub use read_progress::*; +pub use reading_list::*; pub use series::*; pub use tag::*; pub use user::*; diff --git a/core/src/types/models/read_progress.rs b/core/src/db/models/read_progress.rs similarity index 100% rename from core/src/types/models/read_progress.rs rename to core/src/db/models/read_progress.rs diff --git a/core/src/db/models/reading_list.rs b/core/src/db/models/reading_list.rs new file mode 100644 index 000000000..e84b678a5 --- /dev/null +++ b/core/src/db/models/reading_list.rs @@ -0,0 +1,25 @@ +use serde::{Deserialize, Serialize}; +use specta::Type; + +use crate::{db::models::Media, prisma::reading_list}; + +#[derive(Debug, Clone, Serialize, Deserialize, Type, Default)] +pub struct ReadingList { + pub id: String, + pub name: String, + pub creating_user_id: String, + pub description: Option, + pub media: Option>, +} + +impl From for ReadingList { + fn from(data: reading_list::Data) -> ReadingList { + ReadingList { + id: data.id, + name: data.name, + creating_user_id: data.creating_user_id, + description: data.description, + media: None, + } + } +} diff --git a/core/src/types/models/series.rs b/core/src/db/models/series.rs similarity index 98% rename from core/src/types/models/series.rs rename to core/src/db/models/series.rs index 8d1a84429..627342aee 100644 --- a/core/src/types/models/series.rs +++ b/core/src/db/models/series.rs @@ -3,7 +3,7 @@ use std::str::FromStr; use serde::{Deserialize, Serialize}; use specta::Type; -use crate::{prisma, types::enums::FileStatus}; +use crate::{prelude::enums::FileStatus, prisma}; use super::{library::Library, media::Media, tag::Tag}; diff --git a/core/src/types/models/tag.rs b/core/src/db/models/tag.rs similarity index 100% rename from core/src/types/models/tag.rs rename to core/src/db/models/tag.rs diff --git a/core/src/types/models/user.rs b/core/src/db/models/user.rs similarity index 100% rename from core/src/types/models/user.rs rename to core/src/db/models/user.rs diff --git a/core/src/db/utils.rs b/core/src/db/utils.rs index a3a77a9c2..7ff1e6c64 100644 --- a/core/src/db/utils.rs +++ b/core/src/db/utils.rs @@ -3,7 +3,7 @@ use std::collections::HashMap; use prisma_client_rust::{raw, PrismaValue}; use serde::Deserialize; -use crate::{prisma::PrismaClient, types::CoreResult}; +use crate::{prelude::CoreResult, prisma::PrismaClient}; #[derive(Deserialize, Debug, Default)] pub struct CountQueryReturn { diff --git a/core/src/event/event_manager.rs b/core/src/event/event_manager.rs index 4fdacaf19..f00ffcb45 100644 --- a/core/src/event/event_manager.rs +++ b/core/src/event/event_manager.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use crate::{config::context::Ctx, event::InternalCoreTask, job::pool::JobPool}; +use crate::{event::InternalCoreTask, job::pool::JobPool, prelude::Ctx}; use tokio::{self, sync::mpsc}; use tracing::error; diff --git a/core/src/event/mod.rs b/core/src/event/mod.rs index ea0f57d47..3fe815f5c 100644 --- a/core/src/event/mod.rs +++ b/core/src/event/mod.rs @@ -6,8 +6,8 @@ use tokio::sync::oneshot; use crate::{ job::{Job, JobReport, JobStatus, JobUpdate}, + prelude::CoreResult, prisma, - types::CoreResult, }; pub enum InternalCoreTask { diff --git a/core/src/fs/image.rs b/core/src/fs/image.rs index 73167b304..7a2886013 100644 --- a/core/src/fs/image.rs +++ b/core/src/fs/image.rs @@ -8,11 +8,13 @@ use std::{ use tracing::{debug, error, trace}; use webp::{Encoder, WebPMemory}; -use crate::{config::get_thumbnails_dir, prisma::media, types::errors::ProcessFileError}; +use crate::{ + config::get_thumbnails_dir, prelude::errors::ProcessFileError, prisma::media, +}; use super::media_file; -pub fn get_image_bytes>(path: P) -> Result, ProcessFileError> { +pub fn get_bytes>(path: P) -> Result, ProcessFileError> { let mut file = File::open(path)?; let mut buf = Vec::new(); diff --git a/core/src/fs/media_file/epub.rs b/core/src/fs/media_file/epub.rs index 974691c21..73ee84182 100644 --- a/core/src/fs/media_file/epub.rs +++ b/core/src/fs/media_file/epub.rs @@ -14,7 +14,7 @@ use crate::{ checksum, media_file::{get_content_type_from_mime, guess_content_type}, }, - types::{errors::ProcessFileError, models::media::ProcessedMediaFile, ContentType}, + prelude::{errors::ProcessFileError, fs::ProcessedMediaFile, ContentType}, }; use epub::doc::EpubDoc; use tracing::{debug, error, warn}; @@ -24,7 +24,7 @@ epubcfi usually starts with /6, referring to spine element of package file file has three groups of elements: metadata, manifest and spine. */ // TODO: options: &LibraryOptions -pub fn digest_epub(path: &Path, size: u64) -> Option { +pub fn digest(path: &Path, size: u64) -> Option { let mut bytes_to_read = size; // FIXME: this isn't ideal @@ -48,7 +48,7 @@ fn load_epub(path: &str) -> Result, ProcessFileError> { EpubDoc::new(path).map_err(|e| ProcessFileError::EpubOpenError(e.to_string())) } -pub fn process_epub(path: &Path) -> Result { +pub fn process(path: &Path) -> Result { debug!("Processing Epub: {}", path.display()); let epub_file = load_epub(path.to_str().unwrap())?; @@ -68,14 +68,14 @@ pub fn process_epub(path: &Path) -> Result Ok(ProcessedMediaFile { thumbnail_path: None, path: path.to_path_buf(), - checksum: digest_epub(path, file_size), + checksum: digest(path, file_size), metadata: None, pages, }) } // TODO: change return type to make more sense -pub fn get_epub_cover(file: &str) -> Result<(ContentType, Vec), ProcessFileError> { +pub fn get_cover(file: &str) -> Result<(ContentType, Vec), ProcessFileError> { let mut epub_file = EpubDoc::new(file).map_err(|e| { error!("Failed to open epub file: {}", e); ProcessFileError::EpubOpenError(e.to_string()) diff --git a/core/src/fs/media_file/mod.rs b/core/src/fs/media_file/mod.rs index c7f2eff06..efca5243e 100644 --- a/core/src/fs/media_file/mod.rs +++ b/core/src/fs/media_file/mod.rs @@ -7,19 +7,14 @@ use std::path::Path; use tracing::{debug, warn}; use crate::{ - fs::media_file::{epub::process_epub, rar::process_rar, zip::process_zip}, - types::{ + db::models::LibraryOptions, + prelude::{ errors::ProcessFileError, - models::{ - library::LibraryOptions, - media::{MediaMetadata, ProcessedMediaFile}, - }, + fs::media_file::{MediaMetadata, ProcessedMediaFile}, ContentType, }, }; -use self::{epub::get_epub_cover, rar::get_rar_image, zip::get_zip_image}; - // FIXME: this module does way too much. It should be cleaned up, way too many vaguely // similar things shoved in here with little distinction. @@ -133,13 +128,13 @@ pub fn get_page( let mime = guess_mime(Path::new(file)); match mime.as_deref() { - Some("application/zip") => get_zip_image(file, page), - Some("application/vnd.comicbook+zip") => get_zip_image(file, page), - Some("application/vnd.rar") => get_rar_image(file, page), - Some("application/vnd.comicbook-rar") => get_rar_image(file, page), + Some("application/zip") => zip::get_image(file, page), + Some("application/vnd.comicbook+zip") => zip::get_image(file, page), + Some("application/vnd.rar") => rar::get_image(file, page), + Some("application/vnd.comicbook-rar") => rar::get_image(file, page), Some("application/epub+zip") => { if page == 1 { - get_epub_cover(file) + epub::get_cover(file) } else { Err(ProcessFileError::UnsupportedFileType( "You may only request the cover page (first page) for epub files on this endpoint".into() @@ -154,6 +149,18 @@ pub fn get_page( } } +fn process_rar( + convert: bool, + path: &Path, +) -> Result { + if convert { + let zip_path = rar::convert_to_zip(path)?; + zip::process(zip_path.as_path()) + } else { + rar::process(path) + } +} + pub fn process( path: &Path, options: &LibraryOptions, @@ -163,11 +170,13 @@ pub fn process( let mime = infer_mime_from_path(path); match mime.as_deref() { - Some("application/zip") => process_zip(path), - Some("application/vnd.comicbook+zip") => process_zip(path), - Some("application/vnd.rar") => process_rar(path, options), - Some("application/vnd.comicbook-rar") => process_rar(path, options), - Some("application/epub+zip") => process_epub(path), + Some("application/zip") => zip::process(path), + Some("application/vnd.comicbook+zip") => zip::process(path), + Some("application/vnd.rar") => process_rar(options.convert_rar_to_zip, path), + Some("application/vnd.comicbook-rar") => { + process_rar(options.convert_rar_to_zip, path) + }, + Some("application/epub+zip") => epub::process(path), None => Err(ProcessFileError::Unknown(format!( "Unable to determine mime type for file: {:?}", path diff --git a/core/src/fs/media_file/pdf.rs b/core/src/fs/media_file/pdf.rs index ca7346ba5..0c823afbe 100644 --- a/core/src/fs/media_file/pdf.rs +++ b/core/src/fs/media_file/pdf.rs @@ -1,6 +1,6 @@ -use crate::types::{errors::ProcessFileError, ContentType}; +use crate::prelude::{errors::ProcessFileError, ContentType}; -pub fn get_pdf_page( +pub fn get_page( _file: &str, _page: usize, ) -> Result<(ContentType, Vec), ProcessFileError> { diff --git a/core/src/fs/media_file/rar.rs b/core/src/fs/media_file/rar.rs index 8a6675d25..9b046a87b 100644 --- a/core/src/fs/media_file/rar.rs +++ b/core/src/fs/media_file/rar.rs @@ -7,12 +7,10 @@ use crate::{ fs::{ archive::create_zip_archive, checksum::{self, DIGEST_SAMPLE_COUNT, DIGEST_SAMPLE_SIZE}, - media_file::{self, zip, IsImage}, + media_file::{self, IsImage}, }, - types::{ - errors::ProcessFileError, - models::{library::LibraryOptions, media::ProcessedMediaFile}, - ContentType, + prelude::{ + errors::ProcessFileError, fs::media_file::ProcessedMediaFile, ContentType, }, }; @@ -31,7 +29,7 @@ impl IsImage for Entry { } } -pub fn convert_rar_to_zip(path: &Path) -> Result { +pub fn convert_to_zip(path: &Path) -> Result { debug!("Converting {:?} to zip format.", &path); let archive = unrar::Archive::new(path)?; @@ -91,18 +89,7 @@ pub fn convert_rar_to_zip(path: &Path) -> Result { /// Processes a rar file in its entirety. Will return a tuple of the comic info and the list of /// files in the rar. -pub fn process_rar( - path: &Path, - options: &LibraryOptions, -) -> Result { - if options.convert_rar_to_zip { - let new_path = convert_rar_to_zip(path)?; - - trace!("Using `process_zip` with converted rar."); - - return zip::process_zip(&new_path); - } - +pub fn process(path: &Path) -> Result { // or platform is windows if stump_in_docker() || cfg!(windows) { return Err(ProcessFileError::UnsupportedFileType( @@ -120,7 +107,7 @@ pub fn process_rar( #[allow(unused_mut)] let mut metadata_buf = Vec::::new(); - let checksum = digest_rar(&path_str); + let checksum = digest(&path_str); match archive.list_extract() { Ok(open_archive) => { @@ -165,7 +152,7 @@ pub fn process_rar( // FIXME: this is a temporary work around for the issue wonderful people on Discord // discovered. -pub fn rar_sample(file: &str) -> Result { +pub fn sample_size(file: &str) -> Result { debug!("Calculating checksum sample size for: {}", file); let file = std::fs::File::open(file)?; @@ -204,10 +191,10 @@ pub fn rar_sample(file: &str) -> Result { // .fold(0, |acc, e| acc + e.unpacked_size as u64)) } -pub fn digest_rar(file: &str) -> Option { +pub fn digest(file: &str) -> Option { debug!("Attempting to generate checksum for: {}", file); - let sample = rar_sample(file); + let sample = sample_size(file); // Error handled in `rar_sample` if sample.is_err() { @@ -242,7 +229,7 @@ pub fn digest_rar(file: &str) -> Option { // OpenArchive handle stored in Entry is no more. That's why I create another archive to grab what I want before // the iterator is done. At least, I *think* that is what is happening. // Fix location: https://github.com/aaronleopold/unrar.rs/tree/aleopold--read-bytes -pub fn get_rar_image( +pub fn get_image( file: &str, page: i32, ) -> Result<(ContentType, Vec), ProcessFileError> { diff --git a/core/src/fs/media_file/zip.rs b/core/src/fs/media_file/zip.rs index 9d6ec5c73..70fd2c2d5 100644 --- a/core/src/fs/media_file/zip.rs +++ b/core/src/fs/media_file/zip.rs @@ -7,7 +7,9 @@ use crate::{ checksum, media_file::{self, IsImage}, }, - types::{errors::ProcessFileError, models::media::ProcessedMediaFile, ContentType}, + prelude::{ + errors::ProcessFileError, fs::media_file::ProcessedMediaFile, ContentType, + }, }; impl<'a> IsImage for ZipFile<'a> { @@ -31,7 +33,7 @@ impl<'a> IsImage for ZipFile<'a> { /// Get the sample size (in bytes) to use for generating a checksum of a zip file. Rather than /// computing the sample size via the file size, we instead calculate the sample size by /// summing the size of the first 5 files in the zip file. -pub fn zip_sample(file: &str) -> u64 { +pub fn sample_size(file: &str) -> u64 { let zip_file = File::open(file).unwrap(); let mut archive = zip::ZipArchive::new(zip_file).unwrap(); @@ -53,8 +55,8 @@ pub fn zip_sample(file: &str) -> u64 { } /// Calls `checksum::digest` to attempt generating a checksum for the zip file. -pub fn digest_zip(path: &str) -> Option { - let size = zip_sample(path); +pub fn digest(path: &str) -> Option { + let size = sample_size(path); debug!( "Calculated sample size (in bytes) for generating checksum: {}", @@ -77,7 +79,7 @@ pub fn digest_zip(path: &str) -> Option { /// Processes a zip file in its entirety, includes: medatadata, page count, and the /// generated checksum for the file. // TODO: do I need to pass in the library options here? -pub fn process_zip(path: &Path) -> Result { +pub fn process(path: &Path) -> Result { debug!("Processing Zip: {}", path.display()); let zip_file = File::open(path)?; @@ -99,10 +101,16 @@ pub fn process_zip(path: &Path) -> Result } } + let checksum = if let Some(path) = path.to_str() { + digest(path) + } else { + None + }; + Ok(ProcessedMediaFile { thumbnail_path: None, path: path.to_path_buf(), - checksum: digest_zip(path.to_str().unwrap()), + checksum, metadata: comic_info, pages, }) @@ -111,7 +119,7 @@ pub fn process_zip(path: &Path) -> Result // FIXME: this solution is terrible, was just fighting with borrow checker and wanted // a quick solve. TODO: rework this! /// Get an image from a zip file by index (page). -pub fn get_zip_image( +pub fn get_image( file: &str, page: i32, ) -> Result<(ContentType, Vec), ProcessFileError> { diff --git a/core/src/fs/scanner/library_scanner.rs b/core/src/fs/scanner/library_scanner.rs index eb7acc146..95405a7dc 100644 --- a/core/src/fs/scanner/library_scanner.rs +++ b/core/src/fs/scanner/library_scanner.rs @@ -15,21 +15,15 @@ use tracing::{debug, error, trace, warn}; use walkdir::{DirEntry, WalkDir}; use crate::{ - config::context::Ctx, + db::models::{LibraryOptions, LibraryScanMode}, event::CoreEvent, - fs::{ - image, - scanner::{ - utils::{insert_series_batch, mark_media_missing}, - ScannedFileTrait, - }, + fs::scanner::{ + utils::{insert_series_batch, mark_media_missing}, + ScannedFileTrait, }, job::{persist_job_start, runner::RunnerCtx, JobUpdate}, + prelude::{CoreError, CoreResult, Ctx, FileStatus}, prisma::{library, media, series}, - types::{ - enums::FileStatus, errors::CoreError, models::library::LibraryOptions, - CoreResult, LibraryScanMode, - }, }; use super::{ @@ -310,7 +304,7 @@ async fn scan_series( Ok(media) => { visited_media.insert(media.path.clone(), true); - ctx.emit_client_event(CoreEvent::CreatedMedia(media.clone())); + // ctx.emit_client_event(CoreEvent::CreatedMedia(media.clone())); }, Err(e) => { error!("Failed to insert media: {:?}", e); @@ -547,9 +541,10 @@ pub async fn scan_batch( // sleep for a bit to let client catch up tokio::time::sleep(Duration::from_millis(50)).await; - if let Err(err) = image::generate_thumbnails(created_media) { - error!("Failed to generate thumbnails: {:?}", err); - } + // FIXME: dao + // if let Err(err) = image::generate_thumbnails(created_media) { + // error!("Failed to generate thumbnails: {:?}", err); + // } } ctx.progress(JobUpdate::job_finishing( diff --git a/core/src/fs/scanner/mod.rs b/core/src/fs/scanner/mod.rs index acdd6bcf5..227b42be5 100644 --- a/core/src/fs/scanner/mod.rs +++ b/core/src/fs/scanner/mod.rs @@ -8,7 +8,7 @@ use walkdir::WalkDir; use crate::{ fs::media_file::{self, guess_mime}, - types::ContentType, + prelude::ContentType, }; // TODO: refactor this trait? yes please diff --git a/core/src/fs/scanner/utils.rs b/core/src/fs/scanner/utils.rs index 29a740dee..26c17052f 100644 --- a/core/src/fs/scanner/utils.rs +++ b/core/src/fs/scanner/utils.rs @@ -10,19 +10,80 @@ use tracing::{debug, error, trace}; use walkdir::DirEntry; use crate::{ - config::context::Ctx, + db::{ + models::{LibraryOptions, Media, MediaBuilder, MediaBuilderOptions}, + Dao, DaoBatch, MediaDao, + }, event::CoreEvent, - fs::{image, media_file}, + fs::{image, media_file, scanner::BatchScanOperation}, + prelude::{CoreResult, Ctx, FileStatus, ScanError}, prisma::{library, media, series}, - types::{ - enums::FileStatus, - errors::ScanError, - models::{library::LibraryOptions, media::TentativeMedia}, - CoreResult, - }, }; -use super::BatchScanOperation; +impl MediaBuilder for Media { + fn build(path: &Path, series_id: &str) -> CoreResult { + Media::build_with_options( + path, + MediaBuilderOptions { + series_id: series_id.to_string(), + ..Default::default() + }, + ) + } + + fn build_with_options( + path: &Path, + options: MediaBuilderOptions, + ) -> CoreResult { + let processed_entry = media_file::process(path, &options.library_options)?; + + let pathbuf = processed_entry.path; + let path = pathbuf.as_path(); + + let path_str = path.to_str().unwrap_or_default().to_string(); + + let name = path + .file_stem() + .unwrap_or_default() + .to_str() + .unwrap_or_default() + .to_string(); + + let ext = path + .extension() + .unwrap_or_default() + .to_str() + .unwrap_or_default() + .to_string(); + + // Note: make this return a tuple if I need to grab anything else from metadata. + let size = match path.metadata() { + Ok(metadata) => metadata.len(), + _ => 0, + }; + + let comic_info = processed_entry.metadata.unwrap_or_default(); + + Ok(Media { + name, + description: comic_info.summary, + size: size.try_into().unwrap_or_else(|e| { + error!("Failed to calculate file size: {:?}", e); + + 0 + }), + extension: ext, + pages: match comic_info.page_count { + Some(count) => count as i32, + None => processed_entry.pages, + }, + checksum: processed_entry.checksum, + path: path_str, + series_id: options.series_id, + ..Default::default() + }) + } +} /// Will mark all series and media within the library as MISSING. Requires the /// series and series.media relations to have been loaded to function properly. @@ -55,83 +116,88 @@ pub async fn mark_library_missing(library: library::Data, ctx: &Ctx) -> CoreResu Ok(()) } -pub fn get_tentative_media( - path: &Path, - series_id: String, - library_options: &LibraryOptions, -) -> Result { - let processed_entry = media_file::process(path, library_options)?; - - let pathbuf = processed_entry.path; - let path = pathbuf.as_path(); - - let path_str = path.to_str().unwrap_or_default().to_string(); - - // EW, I hate that I need to do this over and over lol time to make a trait for Path. - let name = path - .file_stem() - .unwrap_or_default() - .to_str() - .unwrap_or_default() - .to_string(); - - let ext = path - .extension() - .unwrap_or_default() - .to_str() - .unwrap_or_default() - .to_string(); - - // Note: make this return a tuple if I need to grab anything else from metadata. - let size = match path.metadata() { - Ok(metadata) => metadata.len(), - _ => 0, - }; - - let comic_info = processed_entry.metadata.unwrap_or_default(); - - Ok(TentativeMedia { - name, - description: comic_info.summary, - size: size.try_into().unwrap_or_else(|e| { - error!("Failed to calculate file size: {:?}", e); - - 0 - }), - extension: ext, - pages: match comic_info.page_count { - Some(count) => count as i32, - None => processed_entry.pages, - }, - checksum: processed_entry.checksum, - path: path_str, - series_id, - }) -} +// pub fn get_tentative_media( +// path: &Path, +// series_id: String, +// library_options: &LibraryOptions, +// ) -> Result { +// let processed_entry = media_file::process(path, library_options)?; + +// let pathbuf = processed_entry.path; +// let path = pathbuf.as_path(); + +// let path_str = path.to_str().unwrap_or_default().to_string(); + +// // EW, I hate that I need to do this over and over lol time to make a trait for Path. +// let name = path +// .file_stem() +// .unwrap_or_default() +// .to_str() +// .unwrap_or_default() +// .to_string(); + +// let ext = path +// .extension() +// .unwrap_or_default() +// .to_str() +// .unwrap_or_default() +// .to_string(); + +// // Note: make this return a tuple if I need to grab anything else from metadata. +// let size = match path.metadata() { +// Ok(metadata) => metadata.len(), +// _ => 0, +// }; + +// let comic_info = processed_entry.metadata.unwrap_or_default(); + +// Ok(TentativeMedia { +// name, +// description: comic_info.summary, +// size: size.try_into().unwrap_or_else(|e| { +// error!("Failed to calculate file size: {:?}", e); + +// 0 +// }), +// extension: ext, +// pages: match comic_info.page_count { +// Some(count) => count as i32, +// None => processed_entry.pages, +// }, +// checksum: processed_entry.checksum, +// path: path_str, +// series_id, +// }) +// } pub async fn insert_media( ctx: &Ctx, path: &Path, series_id: String, library_options: &LibraryOptions, -) -> Result { +) -> CoreResult { let path_str = path.to_str().unwrap_or_default().to_string(); + let media_dao = MediaDao::new(ctx.db.clone()); + let media = Media::build_with_options( + path, + MediaBuilderOptions { + series_id, + library_options: library_options.clone(), + }, + )?; + let created_media = media_dao.insert(media).await?; - let tentative_media = get_tentative_media(path, series_id, library_options)?; - let create_action = tentative_media.into_action(ctx); - let media = create_action.exec().await?; - - trace!("Media entity created: {:?}", media); + trace!("Media entity created: {:?}", created_media); if library_options.create_webp_thumbnails { debug!("Attempting to create WEBP thumbnail"); - let thumbnail_path = image::generate_thumbnail(&media.id, &path_str)?; + let thumbnail_path = image::generate_thumbnail(&created_media.id, &path_str)?; debug!("Created WEBP thumbnail: {:?}", thumbnail_path); } debug!("Media for {} created successfully", path_str); - Ok(media) + Ok(created_media) } pub async fn insert_series( @@ -255,7 +321,8 @@ pub async fn batch_media_operations( ctx: &Ctx, operations: Vec, library_options: &LibraryOptions, -) -> Result, ScanError> { +) -> CoreResult> { + let media_dao = MediaDao::new(ctx.db.clone()); // Note: this won't work if I add any other operations... let (create_operations, mark_missing_operations): (Vec<_>, Vec<_>) = operations.into_iter().partition(|operation| { @@ -264,17 +331,20 @@ pub async fn batch_media_operations( let media_creates = create_operations .into_iter() - .map(|operation| { - match operation { - BatchScanOperation::CreateMedia { path, series_id } => { - // let result = insert_media(&ctx, &path, series_id, &library_options).await; - get_tentative_media(&path, series_id, library_options) - }, - _ => unreachable!(), - } + .map(|operation| match operation { + BatchScanOperation::CreateMedia { path, series_id } => { + Media::build_with_options( + &path, + MediaBuilderOptions { + series_id, + library_options: library_options.clone(), + }, + ) + }, + _ => unreachable!(), }) .filter_map(|res| match res { - Ok(entry) => Some(entry.into_action(ctx)), + Ok(entry) => Some(entry), Err(e) => { error!("Failed to create media: {:?}", e); @@ -290,15 +360,9 @@ pub async fn batch_media_operations( }) .collect::>(); - let result = mark_media_missing(ctx, missing_paths).await; - - if let Err(err) = result { - error!("Failed to mark media as MISSING: {:?}", err); - } else { - debug!("Marked {} media as MISSING", result.unwrap()); - } + let _result = mark_media_missing(ctx, missing_paths).await; - Ok(ctx.db._batch(media_creates).await?) + media_dao._insert_batch(media_creates).await } // TODO: error handling, i.e don't unwrap lol diff --git a/core/src/job/jobs.rs b/core/src/job/jobs.rs index 5d0e55828..6bb5a474d 100644 --- a/core/src/job/jobs.rs +++ b/core/src/job/jobs.rs @@ -1,8 +1,7 @@ use super::{Job, RunnerCtx}; use crate::{ - fs::scanner::library_scanner::scan, - job::JobUpdate, - types::{models::library::LibraryScanMode, CoreResult}, + db::models::LibraryScanMode, fs::scanner::library_scanner::scan, job::JobUpdate, + prelude::CoreResult, }; use tracing::info; diff --git a/core/src/job/mod.rs b/core/src/job/mod.rs index e7f96f769..39e114f9f 100644 --- a/core/src/job/mod.rs +++ b/core/src/job/mod.rs @@ -11,11 +11,10 @@ use specta::Type; // use tracing::error; use crate::{ - config::context::Ctx, event::CoreEvent, job::runner::RunnerCtx, + prelude::{errors::CoreError, CoreResult, Ctx}, prisma::{self}, - types::{errors::CoreError, CoreResult}, }; #[async_trait::async_trait] diff --git a/core/src/job/pool.rs b/core/src/job/pool.rs index 9247b8bbe..f5bbb9e66 100644 --- a/core/src/job/pool.rs +++ b/core/src/job/pool.rs @@ -8,10 +8,9 @@ use tracing::error; use super::{persist_job_cancelled, runner::Runner, Job, JobReport}; use crate::{ - config::context::Ctx, event::{CoreEvent, InternalCoreTask}, + prelude::{CoreError, CoreResult, Ctx}, prisma::job, - types::{CoreError, CoreResult}, }; // Note: this is 12 hours diff --git a/core/src/job/runner.rs b/core/src/job/runner.rs index bcef85e65..fe5d4d448 100644 --- a/core/src/job/runner.rs +++ b/core/src/job/runner.rs @@ -5,9 +5,8 @@ use tokio::{self, sync::Mutex}; use tracing::error; use crate::{ - config::context::Ctx, event::CoreEvent, - types::{CoreError, CoreResult}, + prelude::{CoreError, CoreResult, Ctx}, }; use super::{persist_new_job, pool::JobPool, Job, JobUpdate, JobWrapper}; diff --git a/core/src/lib.rs b/core/src/lib.rs index cb2091484..d0c7c3602 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -10,19 +10,14 @@ pub mod fs; pub mod job; pub mod opds; -// TODO: I don't really want this to be pub. I think the only way that is possible is if I -// made ALL the DB operations pub, interfacing with the prisma client directly. This way, -// the server invokes those functions, rather than building those queries. I don't see a nice, -// neat way to do this that won't give me a migraine lol. +pub mod prelude; pub mod prisma; -pub mod types; -use config::context::Ctx; use config::env::StumpEnvironment; use config::logging::STUMP_SHADOW_TEXT; use event::{event_manager::EventManager, InternalCoreTask}; +use prelude::{CoreError, CoreResult, Ctx}; use tokio::sync::mpsc::unbounded_channel; -use types::{errors::CoreError, CoreResult}; /// The [`StumpCore`] struct is the main entry point for any server-side Stump /// applications. It is responsible for managing incoming tasks ([`InternalCoreTask`]), diff --git a/core/src/opds/author.rs b/core/src/opds/author.rs index 16fdf4f68..e2f4e7c89 100644 --- a/core/src/opds/author.rs +++ b/core/src/opds/author.rs @@ -1,6 +1,6 @@ use xml::EventWriter; -use crate::types::CoreResult; +use crate::prelude::CoreResult; use super::util; diff --git a/core/src/opds/entry.rs b/core/src/opds/entry.rs index 8e0e21372..d32cbf509 100644 --- a/core/src/opds/entry.rs +++ b/core/src/opds/entry.rs @@ -3,7 +3,7 @@ use prisma_client_rust::chrono::{self, FixedOffset}; use urlencoding::encode; use xml::{writer::XmlEvent, EventWriter}; -use crate::types::CoreResult; +use crate::prelude::CoreResult; use crate::{ opds::link::OpdsStreamLink, prisma::{library, media, series}, diff --git a/core/src/opds/feed.rs b/core/src/opds/feed.rs index ea5b9b375..dd5d425d0 100644 --- a/core/src/opds/feed.rs +++ b/core/src/opds/feed.rs @@ -1,7 +1,7 @@ use crate::{ opds::link::OpdsLink, + prelude::errors::CoreError, prisma::{library, series}, - types::errors::CoreError, }; use prisma_client_rust::chrono; use tracing::warn; diff --git a/core/src/opds/link.rs b/core/src/opds/link.rs index 75c2850cb..3fb5f394e 100644 --- a/core/src/opds/link.rs +++ b/core/src/opds/link.rs @@ -1,6 +1,6 @@ use xml::{writer::XmlEvent, EventWriter}; -use crate::types::CoreResult; +use crate::prelude::CoreResult; use super::util::OpdsEnumStr; diff --git a/core/src/opds/opensearch.rs b/core/src/opds/opensearch.rs index 66bddc025..b83ab6cbb 100644 --- a/core/src/opds/opensearch.rs +++ b/core/src/opds/opensearch.rs @@ -1,6 +1,6 @@ use xml::{writer::XmlEvent, EventWriter}; -use crate::types::CoreResult; +use crate::prelude::CoreResult; use super::{ link::OpdsLinkType, diff --git a/core/src/opds/util.rs b/core/src/opds/util.rs index 425074441..cd5e195ca 100644 --- a/core/src/opds/util.rs +++ b/core/src/opds/util.rs @@ -1,6 +1,6 @@ use xml::{writer::XmlEvent, EventWriter}; -use crate::types::CoreResult; +use crate::prelude::CoreResult; pub trait OpdsEnumStr { fn as_str(&self) -> &'static str; diff --git a/core/src/config/context.rs b/core/src/prelude/context.rs similarity index 95% rename from core/src/config/context.rs rename to core/src/prelude/context.rs index 636acb56f..1f4f28786 100644 --- a/core/src/config/context.rs +++ b/core/src/prelude/context.rs @@ -6,11 +6,10 @@ use tokio::sync::{ }; use crate::{ - db, + db::{self, models::Log}, event::{CoreEvent, InternalCoreTask}, job::Job, prisma, - types::models::log::TentativeLog, }; type InternalSender = UnboundedSender; @@ -156,21 +155,19 @@ impl Ctx { pub async fn handle_failure_event(&self, event: CoreEvent) { use prisma::log; - // TODO: maybe log::error! here? - self.emit_client_event(event.clone()); - let tentative_log = TentativeLog::from(event); + let log = Log::from(event); // FIXME: error handling here... let _ = self .db .log() .create( - tentative_log.message, + log.message, vec![ - log::job_id::set(tentative_log.job_id), - log::level::set(tentative_log.level.to_string()), + log::job_id::set(log.job_id), + log::level::set(log.level.to_string()), ], ) .exec() diff --git a/core/src/types/enums.rs b/core/src/prelude/enums.rs similarity index 96% rename from core/src/types/enums.rs rename to core/src/prelude/enums.rs index 4f8bac9df..fc93c804c 100644 --- a/core/src/types/enums.rs +++ b/core/src/prelude/enums.rs @@ -33,6 +33,12 @@ pub enum FileStatus { Missing, } +impl Default for FileStatus { + fn default() -> Self { + Self::Ready + } +} + impl fmt::Display for FileStatus { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { diff --git a/core/src/types/errors.rs b/core/src/prelude/errors.rs similarity index 100% rename from core/src/types/errors.rs rename to core/src/prelude/errors.rs diff --git a/core/src/types/models/list_directory.rs b/core/src/prelude/fs/list_directory.rs similarity index 100% rename from core/src/types/models/list_directory.rs rename to core/src/prelude/fs/list_directory.rs diff --git a/core/src/prelude/fs/media_file.rs b/core/src/prelude/fs/media_file.rs new file mode 100644 index 000000000..38ba10d9b --- /dev/null +++ b/core/src/prelude/fs/media_file.rs @@ -0,0 +1,45 @@ +use std::path::PathBuf; + +use serde::{Deserialize, Serialize}; +use specta::Type; + +pub struct ProcessedMediaFile { + pub thumbnail_path: Option, + pub path: PathBuf, + pub checksum: Option, + pub metadata: Option, + pub pages: i32, +} + +// Derived from ComicInfo.xml +#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Type, Default)] +pub struct MediaMetadata { + #[serde(rename = "Series")] + pub series: Option, + #[serde(rename = "Number")] + pub number: Option, + #[serde(rename = "Web")] + pub web: Option, + #[serde(rename = "Summary")] + pub summary: Option, + #[serde(rename = "Publisher")] + pub publisher: Option, + #[serde(rename = "Genre")] + pub genre: Option, + #[serde(rename = "PageCount")] + pub page_count: Option, +} + +// impl MediaMetadata { +// pub fn default() -> Self { +// Self { +// series: None, +// number: None, +// web: None, +// summary: None, +// publisher: None, +// genre: None, +// page_count: None, +// } +// } +// } diff --git a/core/src/prelude/fs/mod.rs b/core/src/prelude/fs/mod.rs new file mode 100644 index 000000000..ac3fcda85 --- /dev/null +++ b/core/src/prelude/fs/mod.rs @@ -0,0 +1,5 @@ +pub mod list_directory; +pub mod media_file; + +pub use list_directory::*; +pub use media_file::*; diff --git a/core/src/types/mod.rs b/core/src/prelude/mod.rs similarity index 92% rename from core/src/types/mod.rs rename to core/src/prelude/mod.rs index ef15f0a9d..eedb91ad6 100644 --- a/core/src/types/mod.rs +++ b/core/src/prelude/mod.rs @@ -1,13 +1,15 @@ +pub mod context; pub mod enums; pub mod errors; -pub mod models; +pub mod fs; pub mod server; -pub use errors::CoreError; - pub type CoreResult = Result; -pub use models::*; +pub use context::*; +pub use enums::*; +pub use errors::*; +pub use fs::*; pub use server::*; #[allow(unused_imports)] @@ -16,19 +18,17 @@ mod tests { use specta::ts_export; - use crate::{event::*, job::*}; - - use super::{ - enums::*, - errors::*, - inputs::*, - models::{ - epub::*, library::*, list_directory::*, log::*, media::*, read_progress::*, - series::*, tag::*, user::*, readinglist::* + use crate::{ + db::models::{ + epub::*, library::*, log::*, media::*, read_progress::*, reading_list::*, + series::*, tag::*, user::*, }, - server::*, + event::*, + job::*, }; + use super::{enums::*, errors::*, fs::*, inputs::*, server::*}; + #[test] #[ignore] fn codegen() -> Result<(), Box> { diff --git a/core/src/types/server/http.rs b/core/src/prelude/server/http.rs similarity index 100% rename from core/src/types/server/http.rs rename to core/src/prelude/server/http.rs diff --git a/core/src/types/server/inputs.rs b/core/src/prelude/server/inputs.rs similarity index 90% rename from core/src/types/server/inputs.rs rename to core/src/prelude/server/inputs.rs index 20c64f6c0..eed174227 100644 --- a/core/src/types/server/inputs.rs +++ b/core/src/prelude/server/inputs.rs @@ -1,10 +1,7 @@ use serde::{Deserialize, Serialize}; use specta::Type; -use crate::types::{ - library::{LibraryOptions, LibraryScanMode}, - tag::Tag, -}; +use crate::db::models::{LibraryOptions, LibraryScanMode, Tag}; #[derive(Debug, Clone, Deserialize, Type)] pub struct UserPreferencesUpdate { @@ -66,3 +63,9 @@ pub struct UpdateLibraryArgs { /// Optional flag to indicate how the library should be automatically scanned after update. Default is `BATCHED`. pub scan_mode: Option, } + +#[derive(Debug, Clone, Serialize, Deserialize, Type)] +pub struct CreateReadingList { + pub id: String, + pub media_ids: Vec, +} diff --git a/core/src/types/server/mod.rs b/core/src/prelude/server/mod.rs similarity index 100% rename from core/src/types/server/mod.rs rename to core/src/prelude/server/mod.rs diff --git a/core/src/types/server/pageable.rs b/core/src/prelude/server/pageable.rs similarity index 94% rename from core/src/types/server/pageable.rs rename to core/src/prelude/server/pageable.rs index 1b0fe81a6..69e2e1795 100644 --- a/core/src/types/server/pageable.rs +++ b/core/src/prelude/server/pageable.rs @@ -2,7 +2,7 @@ use serde::{Deserialize, Serialize}; use specta::Type; use tracing::trace; -use crate::types::DirectoryListing; +use crate::prelude::DirectoryListing; use super::Direction; @@ -37,6 +37,22 @@ impl Default for PageParams { } } +impl PageParams { + /// Returns a tuple of (skip, take) for use in Prisma queries. + pub fn get_skip_take(&self) -> (i64, i64) { + let start = if self.zero_based { + self.page * self.page_size + } else { + (self.page - 1) * self.page_size + } as i64; + + // let end = start + self.page_size; + let take = self.page_size as i64; + + (start, take) + } +} + impl From> for PageParams { fn from(req_params: Option) -> Self { match req_params { diff --git a/core/src/types/server/query.rs b/core/src/prelude/server/query.rs similarity index 75% rename from core/src/types/server/query.rs rename to core/src/prelude/server/query.rs index 80e641662..076752ab5 100644 --- a/core/src/types/server/query.rs +++ b/core/src/prelude/server/query.rs @@ -1,12 +1,9 @@ -use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; use specta::Type; -use prisma_client_rust::{query_core::Selection, FindMany, PrismaValue, SerializedWhere}; - use crate::{ + prelude::{errors::CoreError, server::pageable::PageParams}, prisma::{media, series}, - types::{errors::CoreError, server::pageable::PageParams}, }; #[derive(Debug, Serialize, Deserialize, Clone, Type)] @@ -108,28 +105,28 @@ impl TryInto for QueryOrder { } } -pub trait FindManyTrait { - fn paginated(self, page_params: PageParams) -> Self; -} - -impl FindManyTrait - for FindMany<'_, Where, With, OrderBy, Cursor, Set, Data> -where - Where: Into, - With: Into, - OrderBy: Into<(String, PrismaValue)>, - Cursor: Into, - Set: Into<(String, PrismaValue)>, - Data: DeserializeOwned, -{ - fn paginated(self, page_params: PageParams) -> Self { - let skip = match page_params.zero_based { - true => page_params.page * page_params.page_size, - false => (page_params.page - 1) * page_params.page_size, - } as i64; - - let take = page_params.page_size as i64; - - self.skip(skip).take(take) - } -} +// pub trait FindManyTrait { +// fn paginated(self, page_params: PageParams) -> Self; +// } + +// impl FindManyTrait +// for FindMany<'_, Where, With, OrderBy, Cursor, Set, Data> +// where +// Where: Into, +// With: Into, +// OrderBy: Into<(String, PrismaValue)>, +// Cursor: Into, +// Set: Into<(String, PrismaValue)>, +// Data: DeserializeOwned, +// { +// fn paginated(self, page_params: PageParams) -> Self { +// let skip = match page_params.zero_based { +// true => page_params.page * page_params.page_size, +// false => (page_params.page - 1) * page_params.page_size, +// } as i64; + +// let take = page_params.page_size as i64; + +// self.skip(skip).take(take) +// } +// } diff --git a/core/src/types/models/readinglist.rs b/core/src/types/models/readinglist.rs deleted file mode 100644 index 783088aac..000000000 --- a/core/src/types/models/readinglist.rs +++ /dev/null @@ -1,29 +0,0 @@ -use serde::{Deserialize, Serialize}; -use specta::Type; - -use crate::prisma::{self}; - -#[derive(Debug, Clone, Serialize, Deserialize, Type)] -pub struct ReadingList { - pub id: String, - pub name: String, - pub creating_user_id: String, - pub description: Option, -} - -impl From for ReadingList { - fn from(data: prisma::reading_list::Data) -> ReadingList { - ReadingList { - id: data.id, - name: data.name, - creating_user_id: data.creating_user_id, - description: data.description, - } - } -} - -#[derive(Debug, Clone, Serialize, Deserialize, Type)] -pub struct CreateReadingList { - pub id: String, - pub media_ids: Vec -} \ No newline at end of file diff --git a/package.json b/package.json index a8faa658d..1767d668c 100644 --- a/package.json +++ b/package.json @@ -7,6 +7,7 @@ "scripts": { "prepare": "husky install", "setup": "pnpm i && pnpm web build && pnpm core run setup", + "tsc:checks": "pnpm -r tsc --noEmit", "checks": "pnpm -r check", "clippy": "cargo clippy -- -D warnings", "tests": "pnpm -r test", diff --git a/scripts/prisma-sed.sh b/scripts/prisma-sed.sh new file mode 100755 index 000000000..7590a9bff --- /dev/null +++ b/scripts/prisma-sed.sh @@ -0,0 +1,4 @@ +# TODO: make more dyanimc of a script +set -ex; \ + sed -i 's|\/.*\/core\/prisma\/schema.prisma|\/app\/core\/prisma\/schema.prisma|g' core/src/prisma.rs; \ + sed -i 's|\/.*\/core\/prisma\/migrations|\/app\/core\/prisma\/migrations|g' core/src/prisma.rs \ No newline at end of file diff --git a/scripts/system-setup.sh b/scripts/system-setup.sh new file mode 100755 index 000000000..6f16ec5be --- /dev/null +++ b/scripts/system-setup.sh @@ -0,0 +1,116 @@ +#!/bin/bash + +_DEV_SETUP=${DEV_SETUP:=1} +_CHECK_CARGO=${CHECK_CARGO:=1} +_CHECK_NODE=${CHECK_NODE:=1} +_FORCE_INSTALL_PNPM=${INSTALL_PNPM:=0} + +dev_setup() { + echo "Installing 'cargo-watch'..." + echo + + cargo install cargo-watch + + echo "Running 'pnpm run setup'..." + echo + + pnpm run setup + + echo +} + +if [ ${_CHECK_CARGO} == 1 ]; then + which cargo &> /dev/null + if [ $? -ne 0 ]; then + echo "Rust could not be found on your system. Visit https://www.rust-lang.org/tools/install" + exit 1 + else + echo "Rust requirement met!" + fi +fi + +if [ ${_CHECK_NODE} == 1 ]; then + which node &> /dev/null + if [ $? -eq 1 ]; then + echo "Node could not be found on your system. Visit https://nodejs.org/en/download/" + exit 1 + else + echo "Node requirement met!" + fi + + which pnpm &> /dev/null + if [ $? -eq 1 ]; then + if [ ${_FORCE_INSTALL_PNPM} == 1 ]; then + echo "Installing pnpm..." + npm install -g pnpm + else + echo "pnpm could not be found on your system. Would you like for this script to attempt to install 'pnpm'? (y/n)" + + can_continue=false + until [ $can_continue = true ]; do + read -p "Choice: " choice + + case $choice in + y) + echo "Attempting to install 'pnpm'..." + npm install -g pnpm + if [ $? -eq 0 ]; then + echo "pnpm installed successfully." + can_continue=true + else + echo "pnpm could not be installed. Please ensure you have node and npm installed." + can_continue=false + exit 1 + fi + ;; + n) + echo "Skipping 'pnpm' installation. Exiting." + can_continue=false + exit 1 + ;; + *) + echo "Invalid choice. Please enter 'y' or 'n'." + can_continue=false + ;; + esac + + echo + echo "Would you like for this script to attempt to install 'pnpm'? (y/n)" + done + fi + else + echo "pnpm requirement met!" + fi +fi + +if [[ "$OSTYPE" == "linux-gnu"* ]]; then + if which apt-get &> /dev/null; then + sudo apt-get -y update + sudo apt-get -y install pkg-config libssl-dev libdbus-1-dev libsoup2.4-dev libwebkit2gtk-4.0-dev curl wget libgtk-3-dev libappindicator3-dev librsvg2-dev build-essential + elif which pacman &> /dev/null; then + sudo pacman -Syu + sudo pacman -S --needed base-devel openssl + elif which dnf &> /dev/null; then + sudo dnf check-update + sudo dnf install "openssl-devel" + sudo dnf group install "C Development Tools and Libraries" + else + echo "Your distro '$(lsb_release -s -d)' is not supported by this script. Please consider adding support for it: https://github.com/aaronleopold/stump/issues" + exit 1 + fi + + if [ {$_DEV_SETUP} == 1 ]; then + dev_setup + fi + + echo "Setup completed! Run 'pnpm dev:web' or 'pnpm start:web' to get started." +elif [[ "$OSTYPE" == "darwin"* ]]; then + if [ {$_DEV_SETUP} == 1 ]; then + dev_setup + fi + + echo "Setup completed! Run 'pnpm dev:web' or 'pnpm start:web' to get started." +else + echo "Your OS '$OSTYPE' is not supported by the pre-setup script. Please consider adding support for it: https://github.com/aaronleopold/stump/issues" + exit 1 +fi \ No newline at end of file