diff --git a/buildDefaults.mjs b/buildDefaults.mjs new file mode 100644 index 000000000000..e3dd86aad72f --- /dev/null +++ b/buildDefaults.mjs @@ -0,0 +1,73 @@ +import path from 'node:path' + +import * as esbuild from 'esbuild' +import fg from 'fast-glob' +import fs from 'fs-extra' + +export const defaultBuildOptions = { + outdir: 'dist', + + platform: 'node', + target: ['node20'], + + format: 'cjs', + + logLevel: 'info', + + // For visualizing dist. See: + // - https://esbuild.github.io/api/#metafile + // - https://esbuild.github.io/analyze/ + metafile: true, +} + +export const defaultPatterns = ['./src/**/*.{ts,js}'] +export const defaultIgnorePatterns = ['**/__tests__', '**/*.test.{ts,js}'] + +/** + * @typedef {{ + * cwd?: string + * buildOptions?: import('esbuild').BuildOptions + * entryPointOptions?: { + * patterns?: string[] + * ignore?: string[] + * } + * metafileName?: string + * }} BuildOptions + * + * @param {BuildOptions} options + */ +export async function build({ + cwd, + buildOptions, + entryPointOptions, + metafileName, +} = {}) { + // Yarn and Nx both set this to the package's root dir path + cwd ??= process.cwd() + + buildOptions ??= defaultBuildOptions + metafileName ??= 'meta.json' + + // If the user didn't explicitly provide entryPoints, + // then we'll use fg to find all the files in `${cwd}/src` + let entryPoints = buildOptions.entryPoints + + if (!entryPoints) { + const patterns = entryPointOptions?.patterns ?? defaultPatterns + const ignore = entryPointOptions?.ignore ?? defaultIgnorePatterns + + entryPoints = await fg(patterns, { + cwd, + ignore, + }) + } + + const result = await esbuild.build({ + entryPoints, + ...buildOptions, + }) + + await fs.writeJSON(path.join(cwd, metafileName), result.metafile, { + spaces: 2, + }) +} diff --git a/nx.json b/nx.json index 207455780f38..d06435391a24 100644 --- a/nx.json +++ b/nx.json @@ -10,6 +10,7 @@ "!{projectRoot}/**/*.test.{js,jsx,ts,tsx}", "{workspaceRoot}/babel.config.js", "{workspaceRoot}/tsconfig.json", + "{workspaceRoot}/buildDefaults.mjs", { "runtime": "node -v" }, diff --git a/packages/api/src/__tests__/normalizeRequest.test.ts b/packages/api/src/__tests__/normalizeRequest.test.ts index e63299e0cab1..c39e316eceda 100644 --- a/packages/api/src/__tests__/normalizeRequest.test.ts +++ b/packages/api/src/__tests__/normalizeRequest.test.ts @@ -1,10 +1,10 @@ import { Headers } from '@whatwg-node/fetch' import type { APIGatewayProxyEvent } from 'aws-lambda' -import { test, expect } from 'vitest' +import { test, expect, describe } from 'vitest' import { normalizeRequest } from '../transforms' -export const createMockedEvent = ( +export const createMockedLambdaEvent = ( httpMethod = 'POST', body: any = undefined, isBase64Encoded = false @@ -54,41 +54,108 @@ export const createMockedEvent = ( } } -test('Normalizes an aws event with base64', () => { - const corsEventB64 = createMockedEvent( - 'POST', - Buffer.from(JSON.stringify({ bazinga: 'hello_world' }), 'utf8').toString( - 'base64' - ), - true - ) - - expect(normalizeRequest(corsEventB64)).toEqual({ - headers: new Headers(corsEventB64.headers), - method: 'POST', - query: null, - body: { - bazinga: 'hello_world', - }, +describe('Lambda Request', () => { + test('Normalizes an aws event with base64', async () => { + const corsEventB64 = createMockedLambdaEvent( + 'POST', + Buffer.from(JSON.stringify({ bazinga: 'hello_world' }), 'utf8').toString( + 'base64' + ), + true + ) + + expect(await normalizeRequest(corsEventB64)).toEqual({ + headers: new Headers(corsEventB64.headers as Record), + method: 'POST', + query: null, + jsonBody: { + bazinga: 'hello_world', + }, + }) + }) + + test('Handles CORS requests with and without b64 encoded', async () => { + const corsEventB64 = createMockedLambdaEvent('OPTIONS', undefined, true) + + expect(await normalizeRequest(corsEventB64)).toEqual({ + headers: new Headers(corsEventB64.headers as Record), // headers returned as symbol + method: 'OPTIONS', + query: null, + jsonBody: {}, + }) + + const corsEventWithoutB64 = createMockedLambdaEvent( + 'OPTIONS', + undefined, + false + ) + + expect(await normalizeRequest(corsEventWithoutB64)).toEqual({ + headers: new Headers(corsEventB64.headers as Record), // headers returned as symbol + method: 'OPTIONS', + query: null, + jsonBody: {}, + }) }) }) -test('Handles CORS requests with and without b64 encoded', () => { - const corsEventB64 = createMockedEvent('OPTIONS', undefined, true) +describe('Fetch API Request', () => { + test('Normalizes a fetch event', async () => { + const fetchEvent = new Request( + 'http://localhost:9210/graphql?whatsup=doc&its=bugs', + { + method: 'POST', + headers: { + 'content-type': 'application/json', + }, + body: JSON.stringify({ bazinga: 'kittens_purr_purr' }), + } + ) - expect(normalizeRequest(corsEventB64)).toEqual({ - headers: new Headers(corsEventB64.headers), // headers returned as symbol - method: 'OPTIONS', - query: null, - body: undefined, + const partial = await normalizeRequest(fetchEvent) + + expect(partial).toMatchObject({ + // headers: fetchEvent.headers, + method: 'POST', + query: { + whatsup: 'doc', + its: 'bugs', + }, + jsonBody: { + bazinga: 'kittens_purr_purr', + }, + }) + + expect(partial.headers.get('content-type')).toEqual('application/json') }) - const corsEventWithoutB64 = createMockedEvent('OPTIONS', undefined, false) + test('Handles an empty body', async () => { + const headers = { + 'content-type': 'application/json', + 'x-custom-header': 'bazinga', + } + + const fetchEvent = new Request( + 'http://localhost:9210/graphql?whatsup=doc&its=bugs', + { + method: 'PUT', + headers, + body: '', + } + ) + + const partial = await normalizeRequest(fetchEvent) + + expect(partial).toMatchObject({ + method: 'PUT', + query: { + whatsup: 'doc', + its: 'bugs', + }, + jsonBody: {}, // @NOTE empty body is {} not undefined + }) - expect(normalizeRequest(corsEventWithoutB64)).toEqual({ - headers: new Headers(corsEventB64.headers), // headers returned as symbol - method: 'OPTIONS', - query: null, - body: undefined, + expect(partial.headers.get('content-type')).toEqual(headers['content-type']) + expect(partial.headers.get('x-custom-header')).toEqual('bazinga') }) }) diff --git a/packages/api/src/auth/index.ts b/packages/api/src/auth/index.ts index ecb189696aa7..77fd6e7789f4 100644 --- a/packages/api/src/auth/index.ts +++ b/packages/api/src/auth/index.ts @@ -2,18 +2,22 @@ export * from './parseJWT' import type { APIGatewayProxyEvent, Context as LambdaContext } from 'aws-lambda' +import { getEventHeader } from '../event' + import type { Decoded } from './parseJWT' export type { Decoded } // This is shared by `@redwoodjs/web` const AUTH_PROVIDER_HEADER = 'auth-provider' -export const getAuthProviderHeader = (event: APIGatewayProxyEvent) => { +export const getAuthProviderHeader = ( + event: APIGatewayProxyEvent | Request +) => { const authProviderKey = Object.keys(event?.headers ?? {}).find( (key) => key.toLowerCase() === AUTH_PROVIDER_HEADER ) if (authProviderKey) { - return event?.headers[authProviderKey] + return getEventHeader(event, authProviderKey) } return undefined } @@ -27,11 +31,9 @@ export interface AuthorizationHeader { * Split the `Authorization` header into a schema and token part. */ export const parseAuthorizationHeader = ( - event: APIGatewayProxyEvent + event: APIGatewayProxyEvent | Request ): AuthorizationHeader => { - const parts = ( - event.headers?.authorization || event.headers?.Authorization - )?.split(' ') + const parts = getEventHeader(event, 'authorization')?.split(' ') if (parts?.length !== 2) { throw new Error('The `Authorization` header is not valid.') } @@ -42,16 +44,24 @@ export const parseAuthorizationHeader = ( return { schema, token } } +/** @MARK Note that we do not send LambdaContext when making fetch requests + * + * This part is incomplete, as we need to decide how we will make the breaking change to + * 1. getCurrentUser + * 2. authDecoders + + */ + export type AuthContextPayload = [ Decoded, { type: string } & AuthorizationHeader, - { event: APIGatewayProxyEvent; context: LambdaContext } + { event: APIGatewayProxyEvent | Request; context: LambdaContext } ] export type Decoder = ( token: string, type: string, - req: { event: APIGatewayProxyEvent; context: LambdaContext } + req: { event: APIGatewayProxyEvent | Request; context: LambdaContext } ) => Promise /** @@ -64,7 +74,7 @@ export const getAuthenticationContext = async ({ context, }: { authDecoder?: Decoder | Decoder[] - event: APIGatewayProxyEvent + event: APIGatewayProxyEvent | Request context: LambdaContext }): Promise => { const type = getAuthProviderHeader(event) @@ -89,7 +99,12 @@ export const getAuthenticationContext = async ({ let i = 0 while (!decoded && i < authDecoders.length) { - decoded = await authDecoders[i](token, type, { event, context }) + decoded = await authDecoders[i](token, type, { + // @TODO: We will need to make a breaking change to support `Request` objects. + // We can remove this typecast + event: event, + context, + }) i++ } diff --git a/packages/api/src/cors.ts b/packages/api/src/cors.ts index 47953651c871..7a33d55e0c14 100644 --- a/packages/api/src/cors.ts +++ b/packages/api/src/cors.ts @@ -1,6 +1,6 @@ import { Headers } from '@whatwg-node/fetch' -import type { Request } from './transforms' +import type { PartialRequest } from './transforms' export type CorsConfig = { origin?: boolean | string | string[] @@ -59,10 +59,10 @@ export function createCorsContext(cors: CorsConfig | undefined) { } return { - shouldHandleCors(request: Request) { + shouldHandleCors(request: PartialRequest) { return request.method === 'OPTIONS' }, - getRequestHeaders(request: Request): CorsHeaders { + getRequestHeaders(request: PartialRequest): CorsHeaders { const eventHeaders = new Headers(request.headers as HeadersInit) const requestCorsHeaders = new Headers(corsHeaders) diff --git a/packages/api/src/event.ts b/packages/api/src/event.ts new file mode 100644 index 000000000000..90b7e8fa8bbc --- /dev/null +++ b/packages/api/src/event.ts @@ -0,0 +1,15 @@ +import type { APIGatewayProxyEvent } from 'aws-lambda' + +import { isFetchApiRequest } from './transforms' + +// Extracts the header from an event, handling lower and upper case header names. +export const getEventHeader = ( + event: APIGatewayProxyEvent | Request, + headerName: string +) => { + if (isFetchApiRequest(event)) { + return event.headers.get(headerName) + } + + return event.headers[headerName] || event.headers[headerName.toLowerCase()] +} diff --git a/packages/api/src/index.ts b/packages/api/src/index.ts index 62ef5ec955c6..ce15aef5c7a0 100644 --- a/packages/api/src/index.ts +++ b/packages/api/src/index.ts @@ -7,6 +7,7 @@ export * from './types' export * from './transforms' export * from './cors' +export * from './event' // @NOTE: use require, to avoid messing around with tsconfig and nested output dirs const packageJson = require('../package.json') diff --git a/packages/api/src/transforms.ts b/packages/api/src/transforms.ts index ec784f1c1859..3fb7eb010f86 100644 --- a/packages/api/src/transforms.ts +++ b/packages/api/src/transforms.ts @@ -1,10 +1,11 @@ -import { Headers } from '@whatwg-node/fetch' +import { Headers, Request as PonyfillRequest } from '@whatwg-node/fetch' import type { APIGatewayProxyEvent } from 'aws-lambda' -// This is the same interface used by GraphQL Yoga -// But not importing here to avoid adding a dependency -export interface Request { - body?: any +// This is part of the request, dreived either from a LambdaEvent or FetchAPI Request +// We do this to keep the API consistent between the two +// When we support only the FetchAPI request, we should remove this +export interface PartialRequest> { + jsonBody: TBody headers: Headers method: string query: any @@ -13,9 +14,9 @@ export interface Request { /** * Extracts and parses body payload from event with base64 encoding check */ -export const parseEventBody = (event: APIGatewayProxyEvent) => { +export const parseLambdaEventBody = (event: APIGatewayProxyEvent) => { if (!event.body) { - return + return {} } if (event.isBase64Encoded) { @@ -25,14 +26,78 @@ export const parseEventBody = (event: APIGatewayProxyEvent) => { } } -export function normalizeRequest(event: APIGatewayProxyEvent): Request { - const body = parseEventBody(event) +/** + * Extracts and parses body payload from Fetch Request + * with check for empty body + * + * NOTE: whatwg/server expects that you will decode the base64 body yourself + * see readme here: https://github.com/ardatan/whatwg-node/tree/master/packages/server#aws-lambda + */ +export const parseFetchEventBody = async (event: Request) => { + if (!event.body) { + return {} + } + + const body = await event.text() + + return body ? JSON.parse(body) : {} +} + +export const isFetchApiRequest = ( + event: Request | APIGatewayProxyEvent +): event is Request => { + if ( + event.constructor.name === 'Request' || + event.constructor.name === PonyfillRequest.name + ) { + return true + } + + // Also do an extra check on type of headers + if (Symbol.iterator in Object(event.headers)) { + return true + } + + return false +} + +function getQueryStringParams(reqUrl: string) { + const url = new URL(reqUrl) + const params = new URLSearchParams(url.search) + + const paramObject: Record = {} + for (const entry of params.entries()) { + paramObject[entry[0]] = entry[1] // each 'entry' is a [key, value] tuple + } + return paramObject +} + +/** + * + * This function returns a an object that lets you access _some_ of the request properties in a consistent way + * You can give it either a LambdaEvent or a Fetch API Request + * + * NOTE: It does NOT return a full Request object! + */ +export async function normalizeRequest( + event: APIGatewayProxyEvent | Request +): Promise { + if (isFetchApiRequest(event)) { + return { + headers: event.headers, + method: event.method, + query: getQueryStringParams(event.url), + jsonBody: await parseFetchEventBody(event), + } + } + + const jsonBody = parseLambdaEventBody(event) return { headers: new Headers(event.headers as Record), method: event.httpMethod, query: event.queryStringParameters, - body, + jsonBody, } } diff --git a/packages/auth-providers/dbAuth/api/src/DbAuthHandler.ts b/packages/auth-providers/dbAuth/api/src/DbAuthHandler.ts index 0e54d2c95193..772f49e60ab0 100644 --- a/packages/auth-providers/dbAuth/api/src/DbAuthHandler.ts +++ b/packages/auth-providers/dbAuth/api/src/DbAuthHandler.ts @@ -16,8 +16,17 @@ import base64url from 'base64url' import md5 from 'md5' import { v4 as uuidv4 } from 'uuid' -import type { CorsConfig, CorsContext, CorsHeaders } from '@redwoodjs/api' -import { createCorsContext, normalizeRequest } from '@redwoodjs/api' +import type { + CorsConfig, + CorsContext, + CorsHeaders, + PartialRequest, +} from '@redwoodjs/api' +import { + createCorsContext, + isFetchApiRequest, + normalizeRequest, +} from '@redwoodjs/api' import * as DbAuthError from './errors' import { @@ -28,10 +37,10 @@ import { getSession, hashPassword, legacyHashPassword, - isLegacySession, hashToken, webAuthnSession, extractHashingOptions, + isLegacySession, } from './shared' type SetCookieHeader = { 'set-cookie': string } @@ -279,8 +288,11 @@ type Params = AuthenticationResponseJSON & RegistrationResponseJSON & { username?: string password?: string + resetToken?: string method: AuthMethodNames [key: string]: any + } & { + transports?: string // used by webAuthN for something } interface DbAuthSession { @@ -294,22 +306,32 @@ export class DbAuthHandler< TIdType = any, TUserAttributes = Record > { - event: APIGatewayProxyEvent - context: LambdaContext + event: Request | APIGatewayProxyEvent + _normalizedRequest: PartialRequest | undefined + httpMethod: string options: DbAuthHandlerOptions - cookie: string | undefined - params: Params + cookie: string db: PrismaClient dbAccessor: any dbCredentialAccessor: any allowedUserFields: string[] - headerCsrfToken: string | undefined hasInvalidSession: boolean session: DbAuthSession | undefined sessionCsrfToken: string | undefined corsContext: CorsContext | undefined sessionExpiresDate: string webAuthnExpiresDate: string + encryptedSession: string | null = null + + public get normalizedRequest() { + if (!this._normalizedRequest) { + // This is a dev time error, no need to throw a specialized error + throw new Error( + 'dbAuthHandler has not been initialised. Either await dbAuthHandler.invoke() or call await dbAuth.init()' + ) + } + return this._normalizedRequest + } // class constant: list of auth methods that are supported static get METHODS(): AuthMethodNames[] { @@ -373,29 +395,30 @@ export class DbAuthHandler< 'set-cookie': [ `${cookieName(this.options.cookie?.name)}=`, ...this._cookieAttributes({ expires: 'now' }), + // `auth-provider=`, + // ...this._cookieAttributes({ expires: 'now' }), ].join(';'), } } constructor( - event: APIGatewayProxyEvent, - context: LambdaContext, + event: APIGatewayProxyEvent | Request, + _context: LambdaContext, // @TODO: options: DbAuthHandlerOptions ) { - this.event = event - this.context = context this.options = options - this.cookie = extractCookie(this.event) + this.event = event + this.httpMethod = isFetchApiRequest(event) ? event.method : event.httpMethod + + this.cookie = extractCookie(event) || '' this._validateOptions() - this.params = this._parseBody() this.db = this.options.db this.dbAccessor = this.db[this.options.authModelAccessor] this.dbCredentialAccessor = this.options.credentialModelAccessor ? this.db[this.options.credentialModelAccessor] : null - this.headerCsrfToken = this.event.headers['csrf-token'] this.hasInvalidSession = false this.allowedUserFields = this.options.allowedUserFields || DEFAULT_ALLOWED_USER_FIELDS @@ -422,9 +445,9 @@ export class DbAuthHandler< } try { - const [session, csrfToken] = decryptSession( - getSession(this.cookie, this.options.cookie?.name) - ) + this.encryptedSession = getSession(this.cookie, this.options.cookie?.name) + + const [session, csrfToken] = decryptSession(this.encryptedSession) this.session = session this.sessionCsrfToken = csrfToken } catch (e) { @@ -438,15 +461,25 @@ export class DbAuthHandler< } } + // Initialize the request object. This is async now, because body in Fetch Request + // is parsed async + async init() { + if (!this._normalizedRequest) { + this._normalizedRequest = (await normalizeRequest( + this.event + )) as PartialRequest + } + } + // Actual function that triggers everything else to happen: `login`, `signup`, // etc. is called from here, after some checks to make sure the request is good async invoke() { - const request = normalizeRequest(this.event) let corsHeaders = {} + await this.init() if (this.corsContext) { - corsHeaders = this.corsContext.getRequestHeaders(request) + corsHeaders = this.corsContext.getRequestHeaders(this.normalizedRequest) // Return CORS headers for OPTIONS requests - if (this.corsContext.shouldHandleCors(request)) { + if (this.corsContext.shouldHandleCors(this.normalizedRequest)) { return this._buildResponseWithCorsHeaders( { body: '', statusCode: 200 }, corsHeaders @@ -464,7 +497,7 @@ export class DbAuthHandler< } try { - const method = this._getAuthMethod() + const method = await this._getAuthMethod() // get the auth method the incoming request is trying to call if (!DbAuthHandler.METHODS.includes(method)) { @@ -472,7 +505,7 @@ export class DbAuthHandler< } // make sure it's using the correct verb, GET vs POST - if (this.event.httpMethod !== DbAuthHandler.VERBS[method]) { + if (this.httpMethod !== DbAuthHandler.VERBS[method]) { return this._buildResponseWithCorsHeaders(this._notFound(), corsHeaders) } @@ -499,14 +532,15 @@ export class DbAuthHandler< async forgotPassword() { const { enabled = true } = this.options.forgotPassword + if (!enabled) { throw new DbAuthError.FlowNotEnabledError( (this.options.forgotPassword as ForgotPasswordFlowOptions)?.errors ?.flowNotEnabled || `Forgot password flow is not enabled` ) } - const { username } = this.params + const { username } = this.normalizedRequest.jsonBody || {} // was the username sent in at all? if (!username || username.trim() === '') { throw new DbAuthError.UsernameRequiredError( @@ -596,13 +630,15 @@ export class DbAuthHandler< async login() { const { enabled = true } = this.options.login + if (!enabled) { throw new DbAuthError.FlowNotEnabledError( (this.options.login as LoginFlowOptions)?.errors?.flowNotEnabled || `Login flow is not enabled` ) } - const { username, password } = this.params + + const { username, password } = this.normalizedRequest.jsonBody || {} const dbUser = await this._verifyUser(username, password) const handlerUser = await (this.options.login as LoginFlowOptions).handler( dbUser @@ -630,7 +666,8 @@ export class DbAuthHandler< ?.flowNotEnabled || `Reset password flow is not enabled` ) } - const { password, resetToken } = this.params + + const { password, resetToken } = this.normalizedRequest.jsonBody || {} // is the resetToken present? if (resetToken == null || String(resetToken).trim() === '') { @@ -704,7 +741,7 @@ export class DbAuthHandler< } // check if password is valid - const { password } = this.params + const { password } = this.normalizedRequest.jsonBody || {} ;(this.options.signup as SignupFlowOptions).passwordValidation?.( password as string ) @@ -724,11 +761,9 @@ export class DbAuthHandler< } async validateResetToken() { + const { resetToken } = this.normalizedRequest.jsonBody || {} // is token present at all? - if ( - this.params.resetToken == null || - String(this.params.resetToken).trim() === '' - ) { + if (!resetToken || String(resetToken).trim() === '') { throw new DbAuthError.ResetTokenRequiredError( ( this.options.resetPassword as ResetPasswordFlowOptions @@ -736,7 +771,7 @@ export class DbAuthHandler< ) } - const user = await this._findUserByToken(this.params.resetToken as string) + const user = await this._findUserByToken(resetToken) return [ JSON.stringify(this._sanitizeUser(user)), @@ -751,12 +786,18 @@ export class DbAuthHandler< const { verifyAuthenticationResponse } = require('@simplewebauthn/server') const webAuthnOptions = this.options.webAuthn + const { rawId } = this.normalizedRequest.jsonBody || {} + + if (!rawId) { + throw new DbAuthError.WebAuthnError('Missing Id in request') + } + if (!webAuthnOptions || !webAuthnOptions.enabled) { throw new DbAuthError.WebAuthnError('WebAuthn is not enabled') } const credential = await this.dbCredentialAccessor.findFirst({ - where: { id: this.params.rawId }, + where: { id: rawId }, }) if (!credential) { @@ -773,7 +814,8 @@ export class DbAuthHandler< let verification: VerifiedAuthenticationResponse try { const opts: VerifyAuthenticationResponseOpts = { - response: this.params, + response: this.normalizedRequest + ?.jsonBody as AuthenticationResponseJSON, // by this point jsonBody has been validated expectedChallenge: user[this.options.authFields.challenge as string], expectedOrigin: webAuthnOptions.origin, expectedRPID: webAuthnOptions.domain, @@ -821,7 +863,7 @@ export class DbAuthHandler< // get the regular `login` cookies const [, loginHeaders] = this._loginResponse(user) const cookies = [ - this._webAuthnCookie(this.params.rawId, this.webAuthnExpiresDate), + this._webAuthnCookie(rawId, this.webAuthnExpiresDate), loginHeaders['set-cookie'], ].flat() @@ -835,6 +877,7 @@ export class DbAuthHandler< if (this.options.webAuthn === undefined || !this.options.webAuthn.enabled) { throw new DbAuthError.WebAuthnError('WebAuthn is not enabled') } + const webAuthnOptions = this.options.webAuthn const credentialId = webAuthnSession(this.event) @@ -950,7 +993,7 @@ export class DbAuthHandler< let verification: VerifiedRegistrationResponse try { const options: VerifyRegistrationResponseOpts = { - response: this.params, + response: this.normalizedRequest.jsonBody as RegistrationResponseJSON, // by this point jsonBody has been validated expectedChallenge: user[this.options.authFields.challenge as string], expectedOrigin: this.options.webAuthn.origin, expectedRPID: this.options.webAuthn.domain, @@ -977,6 +1020,7 @@ export class DbAuthHandler< }) if (!existingDevice) { + const { transports } = this.normalizedRequest.jsonBody || {} await this.dbCredentialAccessor.create({ data: { [this.options.webAuthn.credentialFields.id]: plainCredentialId, @@ -984,9 +1028,8 @@ export class DbAuthHandler< user[this.options.authFields.id], [this.options.webAuthn.credentialFields.publicKey]: Buffer.from(credentialPublicKey), - [this.options.webAuthn.credentialFields.transports]: this.params - .transports - ? JSON.stringify(this.params.transports) + [this.options.webAuthn.credentialFields.transports]: transports + ? JSON.stringify(transports) : null, [this.options.webAuthn.credentialFields.counter]: counter, }, @@ -1113,20 +1156,8 @@ export class DbAuthHandler< return sanitized } - // parses the event body into JSON, whether it's base64 encoded or not - _parseBody() { - if (this.event.body) { - if (this.event.isBase64Encoded) { - return JSON.parse( - Buffer.from(this.event.body || '', 'base64').toString('utf-8') - ) - } else { - return JSON.parse(this.event.body) - } - } else { - return {} - } - } + // Converts LambdaEvent or FetchRequest to + _decodeEvent() {} // returns all the cookie attributes in an array with the proper expiration date // @@ -1186,6 +1217,8 @@ export class DbAuthHandler< const cookie = [ `${cookieName(this.options.cookie?.name)}=${encrypted}`, ...this._cookieAttributes({ expires: this.sessionExpiresDate }), + // 'auth-provider=dbAuth', + // ...this._cookieAttributes({ expires: this.sessionExpiresDate }), // TODO need this to be not http-only ].join(';') return { 'set-cookie': cookie } @@ -1193,8 +1226,10 @@ export class DbAuthHandler< // checks the CSRF token in the header against the CSRF token in the session // and throw an error if they are not the same (not used yet) - _validateCsrf() { - if (this.sessionCsrfToken !== this.headerCsrfToken) { + async _validateCsrf() { + if ( + this.sessionCsrfToken !== this.normalizedRequest.headers.get('csrf-token') + ) { throw new DbAuthError.CsrfTokenMismatchError() } return true @@ -1383,7 +1418,8 @@ export class DbAuthHandler< // creates and returns a user, first checking that the username/password // values pass validation async _createUser() { - const { username, password, ...userAttributes } = this.params + const { username, password, ...userAttributes } = + this.normalizedRequest.jsonBody || {} if ( this._validateField('username', username) && this._validateField('password', password) @@ -1418,14 +1454,17 @@ export class DbAuthHandler< } // figure out which auth method we're trying to call - _getAuthMethod() { + async _getAuthMethod() { // try getting it from the query string, /.redwood/functions/auth?method=[methodName] - let methodName = this.event.queryStringParameters?.method as AuthMethodNames + let methodName = this.normalizedRequest.query.method as AuthMethodNames - if (!DbAuthHandler.METHODS.includes(methodName) && this.params) { + if ( + !DbAuthHandler.METHODS.includes(methodName) && + this.normalizedRequest.jsonBody + ) { // try getting it from the body in JSON: { method: [methodName] } try { - methodName = this.params.method + methodName = this.normalizedRequest.jsonBody.method } catch (e) { // there's no body, or it's not JSON, `handler` will return a 404 } @@ -1465,6 +1504,8 @@ export class DbAuthHandler< sessionData, { 'csrf-token': csrfToken, + // @TODO We need to have multiple Set-Cookie headers + // Not sure how to do this yet! ...this._createSessionHeader(sessionData, csrfToken), }, { statusCode }, @@ -1485,6 +1526,7 @@ export class DbAuthHandler< _ok(body: string, headers = {}, options = { statusCode: 200 }) { return { statusCode: options.statusCode, + // @TODO should we do a null check in body?! body: typeof body === 'string' ? body : JSON.stringify(body), headers: { 'Content-Type': 'application/json', ...headers }, } diff --git a/packages/auth-providers/dbAuth/api/src/__tests__/DbAuthHandler.fetch.test.js b/packages/auth-providers/dbAuth/api/src/__tests__/DbAuthHandler.fetch.test.js new file mode 100644 index 000000000000..7f78bd11097f --- /dev/null +++ b/packages/auth-providers/dbAuth/api/src/__tests__/DbAuthHandler.fetch.test.js @@ -0,0 +1,3337 @@ +import crypto from 'node:crypto' +import path from 'node:path' + +import { + vi, + describe, + it, + expect, + beforeAll, + afterAll, + beforeEach, + afterEach, +} from 'vitest' + +import { DbAuthHandler } from '../DbAuthHandler' +import * as dbAuthError from '../errors' +import { hashToken } from '../shared' + +// mock prisma db client +const DbMock = class { + constructor(accessors) { + accessors.forEach((accessor) => { + this[accessor] = new TableMock(accessor) + }) + } +} + +// creates a mock database table accessor (db.user) +const TableMock = class { + constructor(accessor) { + this.accessor = accessor + this.records = [] + } + + count() { + return this.records.length + } + + create({ data }) { + if (data.id === undefined) { + data.id = Math.round(Math.random() * 10000000) + } + this.records.push(data) + + return JSON.parse(JSON.stringify(data)) + } + + update({ where, data }) { + let record = this.records.find((r) => r.id === where.id) + const index = this.records.indexOf(record) + const newRecord = Object.assign(record, data) + this.records[index] = newRecord + + return JSON.parse(JSON.stringify(newRecord)) + } + + findFirst({ where }) { + const keys = Object.keys(where) + let matchingRecords = this.records + keys.forEach((key) => { + matchingRecords = matchingRecords.filter( + (record) => record[key] === where[key] + ) + }) + return matchingRecords[0] + } + + findUnique({ where }) { + return this.records.find((record) => { + const key = Object.keys(where)[0] + return record[key] === where[key] + }) + } + + findMany({ where }) { + return this.records.filter((record) => { + const key = Object.keys(where)[0] + return record[key] === where[key] + }) + } + + deleteMany() { + const count = this.records.length + this.records = [] + return count + } +} + +// create a mock `db` provider that simulates prisma creating/finding/deleting records +const db = new DbMock(['user', 'userCredential']) + +const UUID_REGEX = + /\b[0-9a-f]{8}\b-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-\b[0-9a-f]{12}\b/ +const SET_SESSION_REGEX = /^session=[a-zA-Z0-9+=/]|[a-zA-Z0-9+=/]+;/ +const UTC_DATE_REGEX = /\w{3}, \d{2} \w{3} \d{4} [\d:]{8} GMT/ +const LOGOUT_COOKIE = 'session=;Expires=Thu, 01 Jan 1970 00:00:00 GMT' +const SESSION_SECRET = '540d03ebb00b441f8f7442cbc39958ad' +const FIXTURE_PATH = path.resolve( + __dirname, + '../../../../../../__fixtures__/example-todo-main' +) + +beforeAll(() => { + process.env.RWJS_CWD = FIXTURE_PATH +}) + +afterAll(() => { + delete process.env.RWJS_CWD +}) + +const createDbUser = async (attributes = {}) => { + return await db.user.create({ + data: { + email: 'rob@redwoodjs.com', + // default hashedPassword is from `node:crypto` + hashedPassword: + '230847bea5154b6c7d281d09593ad1be26fa03a93c04a73bcc2b608c073a8213|16384|8|1', + salt: 'ba8b7807c6de6d6a892ef27f4073c603', + ...attributes, + }, + }) +} + +const expectLoggedOutResponse = (response) => { + expect(response[1]['set-cookie']).toEqual(LOGOUT_COOKIE) +} + +const expectLoggedInResponse = (response) => { + expect(response[1]['set-cookie']).toMatch(SET_SESSION_REGEX) +} + +const encryptToCookie = (data) => { + const iv = crypto.randomBytes(16) + const cipher = crypto.createCipheriv( + 'aes-256-cbc', + SESSION_SECRET.substring(0, 32), + iv + ) + let encryptedSession = cipher.update(data, 'utf-8', 'base64') + encryptedSession += cipher.final('base64') + + return `session=${encryptedSession}|${iv.toString('base64')}` +} + +let req, context, options + +describe('dbAuth', () => { + beforeEach(() => { + // hide deprecation warnings during test + vi.spyOn(console, 'warn').mockImplementation(() => {}) + // encryption key so results are consistent regardless of settings in .env + process.env.SESSION_SECRET = SESSION_SECRET + delete process.env.DBAUTH_COOKIE_DOMAIN + + req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + headers: new Headers(), + }) + + context = {} + + options = { + authModelAccessor: 'user', + credentialModelAccessor: 'userCredential', + authFields: { + id: 'id', + username: 'email', + hashedPassword: 'hashedPassword', + salt: 'salt', + resetToken: 'resetToken', + resetTokenExpiresAt: 'resetTokenExpiresAt', + challenge: 'webAuthnChallenge', + }, + db: db, + excludeUserFields: [], + forgotPassword: { + handler: (user) => user, + expires: 10, + }, + login: { + handler: (user) => user, + errors: { + usernameOrPasswordMissing: 'Both username and password are required', + usernameNotFound: 'Username ${username} not found', + incorrectPassword: 'Incorrect password for ${username}', + }, + expires: 60 * 60, + }, + resetPassword: { + handler: (user) => user, + allowReusedPassword: false, + }, + signup: { + handler: ({ username, hashedPassword, salt, userAttributes }) => { + return db.user.create({ + data: { + email: username, + hashedPassword: hashedPassword, + salt: salt, + name: userAttributes.name, + }, + }) + }, + passwordValidation: (_password) => { + return true + }, + errors: { + fieldMissing: '${field} is required', + usernameTaken: 'Username `${username}` already in use', + }, + }, + webAuthn: { + enabled: true, + expires: 60 * 30, + name: 'Webauthn Test', + domain: 'localhost', + origin: 'http://localhost:8910', + type: 'platform', + timeout: 30000, + credentialFields: { + id: 'id', + userId: 'userId', + publicKey: 'publicKey', + transports: 'transports', + counter: 'counter', + }, + }, + cookie: { + name: 'session', + }, + } + }) + + afterEach(async () => { + vi.spyOn(console, 'warn').mockRestore() + await db.user.deleteMany({ + where: { email: 'rob@redwoodjs.com' }, + }) + await db.userCredential.deleteMany() + }) + + describe('CSRF_TOKEN', () => { + it('returns a UUID', () => { + expect(DbAuthHandler.CSRF_TOKEN).toMatch(UUID_REGEX) + }) + + it('returns a unique UUID after each call', () => { + const first = DbAuthHandler.CSRF_TOKEN + const second = DbAuthHandler.CSRF_TOKEN + + expect(first).not.toMatch(second) + }) + }) + + describe('PAST_EXPIRES_DATE', () => { + it('returns the start of epoch as a UTCString', () => { + expect(DbAuthHandler.PAST_EXPIRES_DATE).toEqual( + new Date('1970-01-01T00:00:00.000+00:00').toUTCString() + ) + }) + }) + + describe('dbAccessor', () => { + it('returns the prisma db accessor for a model', async () => { + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + expect(dbAuth.dbAccessor).toEqual(db.user) + }) + }) + + describe('dbCredentialAccessor', () => { + it('returns the prisma db accessor for a UserCredential model', async () => { + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + expect(dbAuth.dbCredentialAccessor).toEqual(db.userCredential) + }) + }) + + describe('sessionExpiresDate', () => { + it('returns a date in the future as a UTCString', async () => { + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const expiresAt = new Date() + expiresAt.setSeconds(expiresAt.getSeconds() + options.login.expires) + + expect(dbAuth.sessionExpiresDate).toEqual(expiresAt.toUTCString()) + }) + }) + + describe('webAuthnExpiresDate', () => { + it('returns a date in the future as a UTCString', async () => { + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const expiresAt = new Date() + expiresAt.setSeconds(expiresAt.getSeconds() + options.webAuthn.expires) + + expect(dbAuth.webAuthnExpiresDate).toEqual(expiresAt.toUTCString()) + }) + }) + + describe('_deleteSessionHeader', () => { + it('returns a Set-Cookie header to delete the session cookie', async () => { + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const headers = dbAuth._deleteSessionHeader + + expect(Object.keys(headers).length).toEqual(1) + expect(Object.keys(headers)).toContain('set-cookie') + expect(headers['set-cookie']).toEqual(LOGOUT_COOKIE) + }) + }) + + describe('constructor', () => { + it('initializes some variables with passed values', async () => { + req = { headers: {} } + context = { foo: 'bar' } + options = { + db: db, + forgotPassword: { + handler: () => {}, + }, + login: { + handler: () => {}, + expires: 1, + }, + resetPassword: { + handler: () => {}, + }, + signup: { + handler: () => {}, + }, + } + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + expect(dbAuth.event).toEqual(req) + expect(dbAuth.options).toEqual(options) + }) + + it('throws an error if no forgotPassword.handler option', () => { + expect( + () => + new DbAuthHandler(req, context, { + login: { + handler: () => {}, + expires: 1, + }, + resetPassword: { + handler: () => {}, + }, + signup: { + handler: () => {}, + }, + }) + ).toThrow(dbAuthError.NoForgotPasswordHandler) + + expect( + () => + new DbAuthHandler(req, context, { + forgotPassword: {}, + login: { + handler: () => {}, + expires: 1, + }, + resetPassword: { + handler: () => {}, + }, + signup: { + handler: () => {}, + }, + }) + ).toThrow(dbAuthError.NoForgotPasswordHandler) + }) + + it('does not throw an error if no forgotPassword.handler option but forgotPassword.enabled set to false', () => { + expect( + () => + new DbAuthHandler(req, context, { + db: db, + login: { + handler: () => {}, + expires: 1, + }, + resetPassword: { + handler: () => {}, + }, + signup: { + handler: () => {}, + }, + forgotPassword: { + enabled: false, + }, + }) + ).not.toThrow(dbAuthError.NoForgotPasswordHandler) + }) + + it('throws an error if login expiration time is not defined', () => { + // login object doesn't exist at all + expect( + () => + new DbAuthHandler(req, context, { + forgotPassword: { + handler: () => {}, + }, + resetPassword: { + handler: () => {}, + }, + signup: { + handler: () => {}, + }, + }) + ).toThrow(dbAuthError.NoSessionExpirationError) + // login object exists, but not `expires` key + expect( + () => + new DbAuthHandler(req, context, { + forgotPassword: { + handler: () => {}, + }, + login: { + handler: () => {}, + }, + resetPassword: { + handler: () => {}, + }, + signup: { + handler: () => {}, + }, + }) + ).toThrow(dbAuthError.NoSessionExpirationError) + }) + + it('throws an error if no login.handler option', () => { + expect( + () => + new DbAuthHandler(req, context, { + forgotPassword: { + handler: () => {}, + }, + login: { + expires: 1, + }, + resetPassword: { + handler: () => {}, + }, + signup: { + handler: () => {}, + }, + }) + ).toThrow(dbAuthError.NoLoginHandlerError) + }) + + it('does not throw an error if no login.handler option but login.enabled set to false', () => { + expect( + () => + new DbAuthHandler(req, context, { + login: { + enabled: false, + }, + resetPassword: { + handler: () => {}, + }, + signup: { + handler: () => {}, + }, + forgotPassword: { + handler: () => {}, + }, + }) + ).not.toThrow(dbAuthError.NoLoginHandlerError) + }) + + it('throws an error if no signup.handler option', () => { + expect( + () => + new DbAuthHandler(req, context, { + forgotPassword: { + handler: () => {}, + }, + login: { + handler: () => {}, + expires: 1, + }, + resetPassword: { + handler: () => {}, + }, + }) + ).toThrow(dbAuthError.NoSignupHandler) + + expect( + () => + new DbAuthHandler(req, context, { + forgotPassword: { + handler: () => {}, + }, + login: { + handler: () => {}, + expires: 1, + }, + resetPassword: { + handler: () => {}, + }, + signup: {}, + }) + ).toThrow(dbAuthError.NoSignupHandler) + }) + + it('does not throw an error if no signup.handler option but signup.enabled set to false', () => { + expect( + () => + new DbAuthHandler(req, context, { + db: db, + login: { + handler: () => {}, + expires: 1, + }, + resetPassword: { + handler: () => {}, + }, + signup: { + enabled: false, + }, + forgotPassword: { + handler: () => {}, + }, + }) + ).not.toThrow(dbAuthError.NoSignupHandler) + }) + + it('parses params from a plain text body', async () => { + req = { headers: {}, body: `{"foo":"bar", "baz":123}` } + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + // Need to wait for reqq to be parsed + await dbAuth.init() + + expect(dbAuth.normalizedRequest.jsonBody).toEqual({ + foo: 'bar', + baz: 123, + }) + }) + + it('parses an empty plain text body and still sets params', async () => { + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body: '', + }) + + context = { foo: 'bar' } + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + await dbAuth.init() + + expect(dbAuth.normalizedRequest.jsonBody).toEqual({}) + }) + + it('parses params from an undefined body', async () => { + req = { + isBase64Encoded: false, + headers: {}, + } + context = { foo: 'bar' } + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + await dbAuth.init() + + expect(dbAuth.normalizedRequest.jsonBody).toEqual({}) + }) + + it('sets header-based CSRF token', async () => { + req = { headers: { 'csrf-token': 'qwerty' } } + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + await dbAuth.init() + expect(dbAuth.normalizedRequest.headers.get('csrf-token')).toEqual( + 'qwerty' + ) + }) + + it('sets session variables to nothing if session cannot be decrypted', async () => { + req = { headers: { 'csrf-token': 'qwerty' } } + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + expect(dbAuth.session).toBeUndefined() + expect(dbAuth.sessionCsrfToken).toBeUndefined() + }) + + it('sets session variables to valid session data', async () => { + req = { + headers: { + cookie: + 'session=ko6iXKV11DSjb6kFJ4iwcf1FEqa5wPpbL1sdtKiV51Y=|cQaYkOPG/r3ILxWiFiz90w==', + }, + } + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + expect(dbAuth.session).toEqual({ foo: 'bar' }) + expect(dbAuth.sessionCsrfToken).toEqual('abcd') + }) + + it('throws an error if SESSION_SECRET is not defined', () => { + delete process.env.SESSION_SECRET + + expect(() => new DbAuthHandler(req, context, options)).toThrow( + dbAuthError.NoSessionSecretError + ) + }) + }) + + describe('invoke', () => { + it('returns a logout response if session is not valid', async () => { + // event.body = JSON.stringify({ method: 'logout' }) + // event.httpMethod = 'GET' + // event.headers.cookie = 'session=invalid' + + const myEvent = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + headers: new Headers({ + cookie: 'session=invalid', + }), + body: JSON.stringify({ method: 'logout' }), + }) + + const dbAuth = new DbAuthHandler(myEvent, context, options) + const response = await dbAuth.invoke() + + expect(response.headers['set-cookie']).toEqual(LOGOUT_COOKIE) + }) + + it('returns a 404 if using the wrong HTTP verb', async () => { + const fetchEvent = new Request('http://localhost:8910/_rw_mw', { + method: 'PUT', // wrong verb + headers: new Headers({ + cookie: + 'session=ko6iXKV11DSjb6kFJ4iwcf1FEqa5wPpbL1sdtKiV51Y=|cQaYkOPG/r3ILxWiFiz90w==', + }), + body: JSON.stringify({ method: 'logout' }), + }) + + const dbAuth = new DbAuthHandler(fetchEvent, context, options) + await dbAuth.init() + const response = await dbAuth.invoke() + + expect(response.statusCode).toEqual(404) + }) + + it('returns a 404 for unsupported method name', async () => { + const fetchEvent = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + headers: new Headers({ + cookie: + 'session=ko6iXKV11DSjb6kFJ4iwcf1FEqa5wPpbL1sdtKiV51Y=|cQaYkOPG/r3ILxWiFiz90w==', + }), + body: JSON.stringify({ method: 'foobar' }), + }) + const dbAuth = new DbAuthHandler(fetchEvent, context, options) + + const response = await dbAuth.invoke() + + expect(response.statusCode).toEqual(404) + }) + + it('returns a 400 for any other errors', async () => { + const fetchEvent = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + headers: new Headers({ + cookie: + 'session=ko6iXKV11DSjb6kFJ4iwcf1FEqa5wPpbL1sdtKiV51Y=|cQaYkOPG/r3ILxWiFiz90w==', + }), + body: JSON.stringify({ method: 'logout' }), + }) + + const dbAuth = new DbAuthHandler(fetchEvent, context, options) + await dbAuth.init() + + dbAuth.logout = vi.fn(() => { + throw Error('Logout error') + }) + const response = await dbAuth.invoke() + + expect(response.statusCode).toEqual(400) + expect(response.body).toEqual('{"error":"Logout error"}') + }) + + it('handlers CORS OPTIONS request', async () => { + const fetchEvent = new Request('http://localhost:8910/_rw_mw', { + method: 'OPTIONS', + body: JSON.stringify({ method: 'auth' }), + }) + + const dbAuth = new DbAuthHandler(fetchEvent, context, { + ...options, + cors: { + origin: 'https://www.myRedwoodWebSide.com', + credentials: true, + }, + }) + dbAuth.logout = vi.fn(() => { + throw Error('Logout error') + }) + const response = await dbAuth.invoke() + + expect(response.statusCode).toEqual(200) + expect(response.headers['access-control-allow-credentials']).toBe('true') + expect(response.headers['access-control-allow-origin']).toBe( + 'https://www.myRedwoodWebSide.com' + ) + }) + + it('calls the appropriate auth function', async () => { + const fetchEvent = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body: JSON.stringify({ method: 'logout' }), + headers: { + cookie: + 'session=ko6iXKV11DSjb6kFJ4iwcf1FEqa5wPpbL1sdtKiV51Y=|cQaYkOPG/r3ILxWiFiz90w==', + }, + }) + + const dbAuth = new DbAuthHandler(fetchEvent, context, options) + await dbAuth.init() + dbAuth.logout = vi.fn(() => ['body', { foo: 'bar' }]) + const response = await dbAuth.invoke() + + expect(dbAuth.logout).toHaveBeenCalled() + expect(response.statusCode).toEqual(200) + expect(response.body).toEqual('body') + expect(response.headers).toEqual({ + 'Content-Type': 'application/json', + foo: 'bar', + }) + }) + }) + + describe('forgotPassword', () => { + it('throws default error when not enabled', async () => { + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + name: 'Rob', + }) + + const fetchEvent = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + options.forgotPassword.enabled = false + const dbAuth = new DbAuthHandler(fetchEvent, context, options) + await dbAuth.init() + + try { + await dbAuth.forgotPassword() + } catch (e) { + expect(e.message).toEqual('Forgot password flow is not enabled') + } + expect.assertions(1) + }) + + it('throws custom error when not enabled and message provided', async () => { + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + name: 'Rob', + }) + + const fetchEvent = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + options.forgotPassword.enabled = false + options.forgotPassword.errors = { + ...options.forgotPassword.errors, + flowNotEnabled: 'Custom flow not enabled error', + } + const dbAuth = new DbAuthHandler(fetchEvent, context, options) + await dbAuth.init() + + try { + await dbAuth.forgotPassword() + } catch (e) { + expect(e.message).toEqual('Custom flow not enabled error') + } + expect.assertions(1) + }) + + it('throws an error if username is blank', async () => { + // missing completely + const emptyBodyReq = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body: JSON.stringify({}), + }) + + let dbAuth = new DbAuthHandler(emptyBodyReq, context, options) + await dbAuth.init() + + try { + await dbAuth.forgotPassword() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.UsernameRequiredError) + } + + // empty string + const emptyStringReq = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body: JSON.stringify({ username: ' ' }), + }) + + dbAuth = new DbAuthHandler(emptyStringReq, context, options) + await dbAuth.init() + + try { + await dbAuth.forgotPassword() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.UsernameRequiredError) + } + + expect.assertions(2) + }) + + it('throws an error if username is not found', async () => { + // missing completely + const body = JSON.stringify({ + username: 'notfound', + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + let dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth.forgotPassword() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.UsernameNotFoundError) + } + + expect.assertions(1) + }) + + it('sets the resetToken and resetTokenExpiresAt on the user', async () => { + const user = await createDbUser() + const body = JSON.stringify({ + username: user.email, + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + expect(user.resetToken).toEqual(undefined) + expect(user.resetTokenExpiresAt).toEqual(undefined) + + const response = await dbAuth.forgotPassword() + const responseBody = JSON.parse(response[0]) + const resetUser = await db.user.findUnique({ + where: { id: user.id }, + }) + + expect(resetUser.resetToken).not.toEqual(undefined) + // Should be a 64 character hex string for a 256 bit token hash (sha256) + expect(resetUser.resetToken).toMatch(/^\w{64}$/) + expect(resetUser.resetTokenExpiresAt instanceof Date).toEqual(true) + + // response contains data returned from the handler + expect(responseBody.id).toEqual(resetUser.id) + expect(responseBody.email).toEqual(resetUser.email) + + // response data should not include sensitive info + expect(responseBody.resetToken).toBeUndefined() + expect(responseBody.resetTokenExpiresAt).toBeUndefined() + expect(responseBody.hashedPassword).toBeUndefined() + expect(responseBody.salt).toBeUndefined() + }) + + it('returns a logout session cookie', async () => { + const user = await createDbUser() + const body = JSON.stringify({ + username: user.email, + }) + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const response = await dbAuth.forgotPassword() + + expectLoggedOutResponse(response) + }) + + it('invokes forgotPassword.handler() with the user', async () => { + const user = await createDbUser() + const body = JSON.stringify({ + username: user.email, + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + options.forgotPassword.handler = (handlerUser) => { + expect(handlerUser.id).toEqual(user.id) + } + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + await dbAuth.forgotPassword() + expect.assertions(1) + }) + + it('invokes forgotPassword.handler() with the raw resetToken', async () => { + const user = await createDbUser() + const body = JSON.stringify({ + username: user.email, + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + options.forgotPassword.handler = (handlerUser, token) => { + // tokens should be the raw resetToken NOT the hash + // resetToken consists of 16 base64 characters + expect(handlerUser.resetToken).toBeUndefined() + expect(token).toMatch(/^[A-Za-z0-9/+]{16}$/) + } + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + await dbAuth.forgotPassword() + expect.assertions(2) + }) + + it('removes the token from the forgotPassword response', async () => { + const user = await createDbUser() + const body = JSON.stringify({ + username: user.email, + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + options.forgotPassword.handler = (handlerUser) => { + return handlerUser + } + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const response = await dbAuth.forgotPassword() + const jsonResponse = JSON.parse(response[0]) + + expect(jsonResponse.resetToken).toBeUndefined() + expect(jsonResponse.resetTokenExpiresAt).toBeUndefined() + }) + + it('throws a generic error for an invalid client', async () => { + const user = await createDbUser() + const body = JSON.stringify({ + username: user.email, + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + // invalid db client + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + dbAuth.dbAccessor = undefined + + try { + await dbAuth.forgotPassword() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.GenericError) + } + + expect.assertions(1) + }) + }) + + describe('login', () => { + it('throws default error when not enabled', async () => { + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + name: 'Rob', + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + options.login.enabled = false + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth.login() + } catch (e) { + expect(e.message).toEqual('Login flow is not enabled') + } + expect.assertions(1) + }) + + it('throws custom error when not enabled and message provided', async () => { + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + name: 'Rob', + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + options.login.enabled = false + options.login.errors = { + ...options.signup.errors, + flowNotEnabled: 'Custom flow not enabled error', + } + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth.login() + } catch (e) { + expect(e.message).toEqual('Custom flow not enabled error') + } + expect.assertions(1) + }) + it('throws an error if username is not found', async () => { + delete options.signup.usernameMatch + delete options.login.usernameMatch + + await createDbUser() + const body = JSON.stringify({ + username: 'missing@redwoodjs.com', + password: 'password', + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth.login() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.UserNotFoundError) + } + + expect.assertions(1) + }) + + it('throws an error if password is wrong', async () => { + await createDbUser() + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'incorrect', + }) + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth.login() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.IncorrectPasswordError) + } + + expect.assertions(1) + }) + + it('throws an error if login.handler throws', async () => { + const _user = await createDbUser() + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + }) + options.login.handler = () => { + throw new Error('Cannot log in') + } + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth.login() + } catch (e) { + expect(e).toBeInstanceOf(Error) + } + + expect.assertions(1) + }) + + it('passes the found user to login.handler', async () => { + const user = await createDbUser() + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + }) + options.login.handler = () => { + expect(user).toEqual(user) + return user + } + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + await dbAuth.login() + }) + + it('throws an error if login.handler returns null', async () => { + const _user = await createDbUser() + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + }) + options.login.handler = () => { + return null + } + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + try { + await dbAuth.login() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.NoUserIdError) + } + + expect.assertions(1) + }) + + it('throws an error if login.handler returns an object without an id', async () => { + const _user = await createDbUser() + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + }) + options.login.handler = () => { + return { name: 'Rob' } + } + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + try { + await dbAuth.login() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.NoUserIdError) + } + expect.assertions(1) + }) + + it('returns a JSON body of the user that is logged in', async () => { + const user = await createDbUser() + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + }) + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + const response = await dbAuth.login() + + expect(response[0].id).toEqual(user.id) + }) + + it('returns a CSRF token in the header', async () => { + await createDbUser() + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + }) + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + const response = await dbAuth.login() + expect(response[1]['csrf-token']).toMatch(UUID_REGEX) + }) + + it('returns a set-cookie header to create session', async () => { + await createDbUser() + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + }) + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + const response = await dbAuth.login() + + expect(response[1]['csrf-token']).toMatch(UUID_REGEX) + }) + + it('returns a CSRF token in the header', async () => { + await createDbUser() + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + }) + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + const response = await dbAuth.login() + + expectLoggedInResponse(response) + }) + + it('login db check is called with insensitive string when user has provided one in LoginFlowOptions', async () => { + vi.clearAllMocks() + const spy = vi.spyOn(db.user, 'findFirst') + + options.signup.usernameMatch = 'insensitive' + options.login.usernameMatch = 'insensitive' + + await createDbUser() + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth.login() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.UserNotFoundError) + } + + return expect(spy).toHaveBeenCalledWith({ + where: { + email: expect.objectContaining({ mode: 'insensitive' }), + }, + }) + }) + + it('login db check is not called with insensitive string when user has not provided one in LoginFlowOptions', async () => { + vi.clearAllMocks() + const spy = vi.spyOn(db.user, 'findFirst') + + delete options.signup.usernameMatch + delete options.login.usernameMatch + + await createDbUser() + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + await dbAuth.login() + + return expect(spy).not.toHaveBeenCalledWith({ + where: { + email: expect.objectContaining({ mode: 'insensitive' }), + }, + }) + }) + }) + + describe('logout', () => { + it('returns set-cookie header for removing session', async () => { + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const response = dbAuth.logout() + + expectLoggedOutResponse(response) + }) + }) + + describe('resetPassword', () => { + it('throws default error when not enabled', async () => { + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + name: 'Rob', + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + options.resetPassword.enabled = false + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth.resetPassword() + } catch (e) { + expect(e.message).toEqual('Reset password flow is not enabled') + } + expect.assertions(1) + }) + + it('throws custom error when not enabled and message provided', async () => { + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + name: 'Rob', + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + options.resetPassword.enabled = false + options.resetPassword.errors = { + ...options.signup.errors, + flowNotEnabled: 'Custom flow not enabled error', + } + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth.resetPassword() + } catch (e) { + expect(e.message).toEqual('Custom flow not enabled error') + } + expect.assertions(1) + }) + it('throws an error if resetToken is blank', async () => { + // missing completely + const emptyBody = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body: JSON.stringify({}), + }) + + let dbAuth = new DbAuthHandler(emptyBody, context, options) + await dbAuth.init() + + try { + await dbAuth.resetPassword() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.ResetTokenRequiredError) + } + + // empty string + const emptyString = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body: JSON.stringify({ resetToken: ' ' }), + }) + dbAuth = new DbAuthHandler(emptyString, context, options) + await dbAuth.init() + + try { + await dbAuth.resetPassword() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.ResetTokenRequiredError) + } + + expect.assertions(2) + }) + + it('throws an error if password is blank', async () => { + // missing completely + const noPwd = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body: JSON.stringify({ resetToken: '1234' }), + }) + let dbAuth = new DbAuthHandler(noPwd, context, options) + await dbAuth.init() + + try { + await dbAuth.resetPassword() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.PasswordRequiredError) + } + + // empty string + const pwdEmpty = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body: JSON.stringify({ resetToken: '1234', password: ' ' }), + }) + dbAuth = new DbAuthHandler(pwdEmpty, context, options) + await dbAuth.init() + + try { + await dbAuth.resetPassword() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.PasswordRequiredError) + } + + expect.assertions(2) + }) + + it('throws an error if no user found with resetToken', async () => { + const body = JSON.stringify({ resetToken: '1234', password: 'password' }) + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + let dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth.resetPassword() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.ResetTokenInvalidError) + } + expect.assertions(1) + }) + + it('throws an error if resetToken is expired', async () => { + const tokenExpires = new Date() + tokenExpires.setSeconds( + tokenExpires.getSeconds() - options.forgotPassword.expires - 1 + ) + await createDbUser({ + resetToken: hashToken('1234'), + resetTokenExpiresAt: tokenExpires, + }) + + const body = JSON.stringify({ resetToken: '1234', password: 'password1' }) + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + let dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth.resetPassword() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.ResetTokenExpiredError) + } + expect.assertions(1) + }) + + it('clears out resetToken and resetTokenExpiresAt if expired', async () => { + const tokenExpires = new Date() + tokenExpires.setSeconds( + tokenExpires.getSeconds() - options.forgotPassword.expires - 1 + ) + const user = await createDbUser({ + resetToken: hashToken('1234'), + resetTokenExpiresAt: tokenExpires, + }) + + const body = JSON.stringify({ + resetToken: '1234', + password: 'password1', + }) + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + let dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth.resetPassword() + } catch (e) { + const expiredUser = await db.user.findUnique({ + where: { id: user.id }, + }) + expect(expiredUser.resetToken).toEqual(null) + expect(expiredUser.resetTokenExpiresAt).toEqual(null) + } + expect.assertions(2) + }) + + it('throws allowReusedPassword is false and new password is same as old', async () => { + const tokenExpires = new Date() + tokenExpires.setSeconds( + tokenExpires.getSeconds() - options.forgotPassword.expires + 1 + ) + await createDbUser({ + resetToken: hashToken('1234'), + resetTokenExpiresAt: tokenExpires, + }) + + const body = JSON.stringify({ + resetToken: '1234', + password: 'password', + }) + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + options.resetPassword.allowReusedPassword = false + let dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + await expect(dbAuth.resetPassword()).rejects.toThrow( + dbAuthError.ReusedPasswordError + ) + }) + + it('does not throw if allowReusedPassword is true and new password is same as old', async () => { + const tokenExpires = new Date() + tokenExpires.setSeconds( + tokenExpires.getSeconds() - options.forgotPassword.expires + 1 + ) + await createDbUser({ + resetToken: hashToken('1234'), + resetTokenExpiresAt: tokenExpires, + }) + + const body = JSON.stringify({ + resetToken: '1234', + password: 'password', + }) + options.resetPassword.allowReusedPassword = true + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + let dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + await expect(dbAuth.resetPassword()).resolves.not.toThrow() + }) + + it('updates the users password', async () => { + const tokenExpires = new Date() + tokenExpires.setSeconds( + tokenExpires.getSeconds() - options.forgotPassword.expires + 1 + ) + const user = await createDbUser({ + resetToken: hashToken('1234'), + resetTokenExpiresAt: tokenExpires, + }) + const body = JSON.stringify({ + resetToken: '1234', + password: 'new-password', + }) + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + let dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + await expect(dbAuth.resetPassword()).resolves.not.toThrow() + + const updatedUser = await db.user.findUnique({ + where: { id: user.id }, + }) + + expect(updatedUser.hashedPassword).not.toEqual(user.hashedPassword) + // should not change salt + expect(updatedUser.salt).toEqual(user.salt) + }) + + it('clears resetToken and resetTokenExpiresAt', async () => { + const tokenExpires = new Date() + tokenExpires.setSeconds( + tokenExpires.getSeconds() - options.forgotPassword.expires + 1 + ) + const user = await createDbUser({ + resetToken: hashToken('1234'), + resetTokenExpiresAt: tokenExpires, + }) + const body = JSON.stringify({ + resetToken: '1234', + password: 'new-password', + }) + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + let dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + await expect(dbAuth.resetPassword()).resolves.not.toThrow() + + const updatedUser = await db.user.findUnique({ + where: { id: user.id }, + }) + + expect(updatedUser.resetToken).toEqual(null) + expect(updatedUser.resetTokenExpiresAt).toEqual(null) + }) + + it('invokes resetPassword.handler() with the user', async () => { + const tokenExpires = new Date() + tokenExpires.setSeconds( + tokenExpires.getSeconds() - options.forgotPassword.expires + 1 + ) + const user = await createDbUser({ + resetToken: hashToken('1234'), + resetTokenExpiresAt: tokenExpires, + }) + const body = JSON.stringify({ + resetToken: '1234', + password: 'new-password', + }) + options.resetPassword.handler = (handlerUser) => { + expect(handlerUser.id).toEqual(user.id) + } + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + let dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + await dbAuth.resetPassword() + expect.assertions(1) + }) + + it('returns a logout response if handler returns falsy', async () => { + const tokenExpires = new Date() + tokenExpires.setSeconds( + tokenExpires.getSeconds() - options.forgotPassword.expires + 1 + ) + await createDbUser({ + resetToken: hashToken('1234'), + resetTokenExpiresAt: tokenExpires, + }) + const body = JSON.stringify({ + resetToken: '1234', + password: 'new-password', + }) + options.resetPassword.handler = () => false + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + let dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + const response = await dbAuth.resetPassword() + + expectLoggedOutResponse(response) + }) + + it('returns a login response if handler returns falsy', async () => { + const tokenExpires = new Date() + tokenExpires.setSeconds( + tokenExpires.getSeconds() - options.forgotPassword.expires + 1 + ) + await createDbUser({ + resetToken: hashToken('1234'), + resetTokenExpiresAt: tokenExpires, + }) + const body = JSON.stringify({ + resetToken: '1234', + password: 'new-password', + }) + options.resetPassword.handler = () => true + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + let dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + const response = await dbAuth.resetPassword() + + expectLoggedInResponse(response) + }) + }) + + describe('signup', () => { + it('bubbles up any error that is raised', async () => { + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + name: 'Rob', + }) + options.signup.handler = () => { + throw Error('Cannot signup') + } + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + expect.assertions(1) + await expect(dbAuth.signup()).rejects.toThrow('Cannot signup') + }) + + it('throws default error when not enabled', async () => { + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + name: 'Rob', + }) + options.signup.enabled = false + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth.signup() + } catch (e) { + expect(e.message).toEqual('Signup flow is not enabled') + } + expect.assertions(1) + }) + + it('throws custom error when not enabled and message provided', async () => { + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + name: 'Rob', + }) + options.signup.enabled = false + options.signup.errors = { + ...options.signup.errors, + flowNotEnabled: 'Custom flow not enabled error', + } + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth.signup() + } catch (e) { + expect(e.message).toEqual('Custom flow not enabled error') + } + expect.assertions(1) + }) + + it('throws password validation error if password invalid', async () => { + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'pass', + name: 'Rob', + }) + options.signup.passwordValidation = (password) => { + if (password.length < 8) { + throw new dbAuthError.PasswordValidationError('Password too short') + } + } + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth.signup() + } catch (e) { + expect(e.message).toEqual('Password too short') + } + expect.assertions(1) + }) + + it('throws no error if password valid', async () => { + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + name: 'Rob', + }) + options.signup.passwordValidation = (password) => { + if (password.length < 8) { + throw new dbAuthError.PasswordValidationError('Password too short') + } + } + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + expect(() => dbAuth.signup()).not.toThrow() + }) + + it('throws no error if passwordValidation function is undefined', async () => { + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + name: 'Rob', + }) + delete options.signup.passwordValidation + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + expect(() => dbAuth.signup()).not.toThrow() + }) + + it('creates a new user and logs them in', async () => { + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + name: 'Rob', + }) + const oldUserCount = await db.user.count() + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const response = await dbAuth.signup() + const newUserCount = await db.user.count() + + expect(newUserCount).toEqual(oldUserCount + 1) + // returns the user's ID + expect(response[0].id).not.toBeNull() + // logs them in + expectLoggedInResponse(response) + // 201 Created + expect(response[2].statusCode).toEqual(201) + }) + + it('returns a message if a string is returned and does not log in', async () => { + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + name: 'Rob', + }) + options.signup.handler = () => { + return 'Hello, world' + } + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + const response = await dbAuth.signup() + + // returns message + expect(response[0]).toEqual('{"message":"Hello, world"}') + // does not log them in + expect(response[1]['set-cookie']).toBeUndefined() + // 201 Created + expect(response[2].statusCode).toEqual(201) + }) + }) + + describe('getToken', () => { + it('returns the ID of the logged in user', async () => { + const user = await createDbUser() + const cookie = encryptToCookie( + JSON.stringify({ id: user.id }) + ';' + 'token' + ) + + const headers = { + cookie, + } + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + headers, + }) + + req.headers.get('cookie') + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const response = await dbAuth.getToken() + + expect(response[0]).toEqual(user.id) + }) + + it('returns nothing if user is not logged in', async () => { + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const response = await dbAuth.getToken() + + expect(response[0]).toEqual('') + }) + + it('returns any other error', async () => { + req = { + headers: { + cookie: encryptToCookie( + JSON.stringify({ id: 9999999999 }) + ';' + 'token' + ), + }, + } + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const response = await dbAuth.getToken() + + expect(response[0]).toEqual('{"error":"User not found"}') + }) + + it('re-encrypts the session cookie if using the legacy algorithm', async () => { + await createDbUser({ id: 7 }) + req = { + headers: { + // legacy session with { id: 7 } for userID + cookie: 'session=U2FsdGVkX1+s7seQJnVgGgInxuXm13l8VvzA3Mg2fYg=', + }, + } + process.env.SESSION_SECRET = + 'QKxN2vFSHAf94XYynK8LUALfDuDSdFowG6evfkFX8uszh4YZqhTiqEdshrhWbwbw' + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const [userId, headers] = await dbAuth.getToken() + + expect(userId).toEqual(7) + expect(headers['set-cookie']).toMatch(SET_SESSION_REGEX) + + // set session back to default + process.env.SESSION_SECRET = SESSION_SECRET + }) + }) + + describe('When a developer has set GraphiQL headers to mock a session cookie', () => { + describe('when in development environment', () => { + const curNodeEnv = process.env.NODE_ENV + + beforeAll(() => { + // Session cookie from graphiQLHeaders only extracted in dev + process.env.NODE_ENV = 'development' + }) + + afterAll(() => { + process.env.NODE_ENV = curNodeEnv + expect(process.env.NODE_ENV).toBe('test') + }) + + it('authenticates the user based on GraphiQL impersonated headers when no cookie present', async () => { + // Making Fetch API Requests with GraphiQL Headers in the body does not work because it's async + // but we can set the new 'rw-studio-impersonation-cookie' header + const dbUser = await createDbUser() + const headers = { + 'auth-provider': 'dbAuth', + 'rw-studio-impersonation-cookie': encryptToCookie( + JSON.stringify({ id: dbUser.id }) + ), + } + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + headers, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const user = await dbAuth._getCurrentUser() + expect(user.id).toEqual(dbUser.id) + }) + + it('Cookie from GraphiQLHeaders takes precedence over real headers when authenticating user', async () => { + // setup session cookie in GraphiQL header + const dbUser = await createDbUser() + const dbUserId = dbUser.id + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + headers: { + 'auth-provider': 'dbAuth', + authorization: 'Bearer ' + dbUserId, + cookie: encryptToCookie(JSON.stringify({ id: 9999999999 })), // The "real" cookie with an invalid userId + // 👇 The impersonated header takes precendence + 'rw-studio-impersonation-cookie': encryptToCookie( + JSON.stringify({ id: dbUser.id }) + ), + }, + }) + + // should read session from graphiQL header, not from cookie + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const user = await dbAuth._getCurrentUser() + expect(user.id).toEqual(dbUserId) + }) + }) + + describe('when in test/production environment and graphiqlHeader sets a session cookie', () => { + it("isn't used to authenticate a user", async () => { + const dbUser = await createDbUser() + const dbUserId = dbUser.id + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + headers: { + 'auth-provider': 'dbAuth', + 'rw-studio-impersonation-cookie': encryptToCookie( + JSON.stringify({ id: dbUserId }) + ), + authorization: 'Bearer ' + dbUserId, + }, + }) + + try { + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + await dbAuth._getCurrentUser() + } catch (e) { + expect(e.message).toEqual( + 'Cannot retrieve user details without being logged in' + ) + } + }) + }) + }) + + describe('webAuthnAuthenticate', () => { + it('throws an error if WebAuthn options are not defined', async () => { + req = { + headers: {}, + } + options.webAuthn = undefined + + try { + const _dbAuth = new DbAuthHandler(req, context, options) + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.NoWebAuthnConfigError) + } + expect.assertions(1) + }) + + it('throws an error if WebAuthn is disabled', async () => { + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + headers: {}, + }) + + options.webAuthn.enabled = false + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + expect.assertions(1) + await expect(dbAuth.webAuthnAuthenticate()).rejects.toThrow( + dbAuthError.WebAuthnError + ) + }) + + it('throws an error if UserCredential is not found in database', async () => { + const headers = { 'Content-Type': 'application/json' } + const body = + '{"method":"webAuthnAuthenticate","id":"CxMJqILwYufSaEQsJX6rKHw_LkMXAGU64PaKU55l6ejZ4FNO5kBLiA","rawId":"CxMJqILwYufSaEQsJX6rKHw_LkMXAGU64PaKU55l6ejZ4FNO5kBLiA","response":{"authenticatorData":"SZYN5YgOjGh0NBcPZHZgW4_krrmihjLHmVzzuoMdl2MFAAAAAA","clientDataJSON":"eyJ0eXBlIjoid2ViYXV0aG4uZ2V0IiwiY2hhbGxlbmdlIjoiTHRnV3BoWUtfZU41clhjX0hkdlVMdk9xcFBXeW9SdmJtbDJQbzAwVUhhZyIsIm9yaWdpbiI6Imh0dHA6Ly9sb2NhbGhvc3Q6ODkxMCIsImNyb3NzT3JpZ2luIjpmYWxzZSwib3RoZXJfa2V5c19jYW5fYmVfYWRkZWRfaGVyZSI6ImRvIG5vdCBjb21wYXJlIGNsaWVudERhdGFKU09OIGFnYWluc3QgYSB0ZW1wbGF0ZS4gU2VlIGh0dHBzOi8vZ29vLmdsL3lhYlBleCJ9","signature":"MEUCIQD3NOM7Aw0HxPw6EFGf86iwf2yd3p4NncNNLcjd-86zgwIgHuh80bLNV7EcwBi4IAcH57iueLg0X2gLtO5_Y6PMCFE","userHandle":"2"},"type":"public-key","clientExtensionResults":{}}' + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + headers, + }) + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + expect.assertions(1) + await expect(dbAuth.webAuthnAuthenticate()).rejects.toThrow( + 'Credentials not found' + ) + }) + + it('throws an error if signature verification fails', async () => { + const user = await createDbUser({ + webAuthnChallenge: 'QGdAFmPB711UDnEelZm-OHkLs1UwX6yebPI_jLoSVo', + }) + await db.userCredential.create({ + data: { + id: 'CxMJqILwYufSaEQsJX6rKHw_LkMXAGU64PaKU55l6ejZ4FNO5kBLiA', + userId: user.id, + transports: null, + publicKey: 'foobar', + }, + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body: '{"method":"webAuthnAuthenticate","id":"CxMJqILwYufSaEQsJX6rKHw_LkMXAGU64PaKU55l6ejZ4FNO5kBLiA","rawId":"CxMJqILwYufSaEQsJX6rKHw_LkMXAGU64PaKU55l6ejZ4FNO5kBLiA","response":{"authenticatorData":"SZYN5YgOjGh0NBcPZHZgW4_krrmihjLHmVzzuoMdl2MFAAAAAA","clientDataJSON":"eyJ0eXBlIjoid2ViYXV0aG4uZ2V0IiwiY2hhbGxlbmdlIjoiTHRnV3BoWUtfZU41clhjX0hkdlVMdk9xcFBXeW9SdmJtbDJQbzAwVUhhZyIsIm9yaWdpbiI6Imh0dHA6Ly9sb2NhbGhvc3Q6ODkxMCIsImNyb3NzT3JpZ2luIjpmYWxzZSwib3RoZXJfa2V5c19jYW5fYmVfYWRkZWRfaGVyZSI6ImRvIG5vdCBjb21wYXJlIGNsaWVudERhdGFKU09OIGFnYWluc3QgYSB0ZW1wbGF0ZS4gU2VlIGh0dHBzOi8vZ29vLmdsL3lhYlBleCJ9","signature":"MEUCIQD3NOM7Aw0HxPw6EFGf86iwf2yd3p4NncNNLcjd-86zgwIgHuh80bLNV7EcwBi4IAcH57iueLg0X2gLtO5_Y6PMCFE","userHandle":"2"},"type":"public-key","clientExtensionResults":{}}', + headers: { 'Content-Type': 'application/json' }, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + expect.assertions(1) + await expect(dbAuth.webAuthnAuthenticate()).rejects.toThrow( + 'Unexpected authentication response challenge' + ) + }) + + it('sets challenge in database to null', async () => { + const user = await createDbUser({ + webAuthnChallenge: 'GdAFmPB711UDnEelZm-OHkLs1UwX6yebPI_jLoSVo', + }) + await db.userCredential.create({ + data: { + id: 'CxMJqILwYufSaEQsJX6rKHw_LkMXAGU64PaKU55l6ejZ4FNO5kBLiA', + userId: user.id, + transports: null, + publicKey: 'foobar', + }, + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: '{"method":"webAuthnAuthenticate","id":"CxMJqILwYufSaEQsJX6rKHw_LkMXAGU64PaKU55l6ejZ4FNO5kBLiA","rawId":"CxMJqILwYufSaEQsJX6rKHw_LkMXAGU64PaKU55l6ejZ4FNO5kBLiA","response":{"authenticatorData":"SZYN5YgOjGh0NBcPZHZgW4_krrmihjLHmVzzuoMdl2MFAAAAAA","clientDataJSON":"eyJ0eXBlIjoid2ViYXV0aG4uZ2V0IiwiY2hhbGxlbmdlIjoiTHRnV3BoWUtfZU41clhjX0hkdlVMdk9xcFBXeW9SdmJtbDJQbzAwVUhhZyIsIm9yaWdpbiI6Imh0dHA6Ly9sb2NhbGhvc3Q6ODkxMCIsImNyb3NzT3JpZ2luIjpmYWxzZSwib3RoZXJfa2V5c19jYW5fYmVfYWRkZWRfaGVyZSI6ImRvIG5vdCBjb21wYXJlIGNsaWVudERhdGFKU09OIGFnYWluc3QgYSB0ZW1wbGF0ZS4gU2VlIGh0dHBzOi8vZ29vLmdsL3lhYlBleCJ9","signature":"MEUCIQD3NOM7Aw0HxPw6EFGf86iwf2yd3p4NncNNLcjd-86zgwIgHuh80bLNV7EcwBi4IAcH57iueLg0X2gLtO5_Y6PMCFE","userHandle":"2"},"type":"public-key","clientExtensionResults":{}}', + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + expect.assertions(1) + try { + await dbAuth.webAuthnAuthenticate() + } catch (e) { + const savedUser = await db.user.findFirst({ where: { id: user.id } }) + expect(savedUser.webAuthnChallenge).toEqual(null) + } + }) + + it('sets a webAuthn cookie if valid authentication', async () => { + const user = await createDbUser({ + webAuthnChallenge: 'LtgWphYK_eN5rXc_HdvULvOqpPWyoRvbml2Po00UHag', + }) + await db.userCredential.create({ + data: { + id: 'CxMJqILwYufSaEQsJX6rKHw_LkMXAGU64PaKU55l6ejZ4FNO5kBLiA', + userId: user.id, + publicKey: Buffer.from([ + 165, 1, 2, 3, 38, 32, 1, 33, 88, 32, 24, 136, 169, 77, 11, 126, 129, + 202, 3, 60, 234, 86, 233, 152, 222, 252, 11, 253, 11, 79, 163, 89, + 189, 145, 216, 240, 102, 92, 146, 75, 249, 207, 34, 88, 32, 187, + 235, 12, 104, 222, 236, 198, 241, 195, 234, 111, 64, 60, 86, 40, + 254, 118, 163, 27, 172, 76, 173, 16, 120, 238, 20, 235, 98, 67, 103, + 109, 240, + ]), + transports: null, + counter: 0, + }, + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body: '{"method":"webAuthnAuthenticate","id":"CxMJqILwYufSaEQsJX6rKHw_LkMXAGU64PaKU55l6ejZ4FNO5kBLiA","rawId":"CxMJqILwYufSaEQsJX6rKHw_LkMXAGU64PaKU55l6ejZ4FNO5kBLiA","response":{"authenticatorData":"SZYN5YgOjGh0NBcPZHZgW4_krrmihjLHmVzzuoMdl2MFAAAAAA","clientDataJSON":"eyJ0eXBlIjoid2ViYXV0aG4uZ2V0IiwiY2hhbGxlbmdlIjoiTHRnV3BoWUtfZU41clhjX0hkdlVMdk9xcFBXeW9SdmJtbDJQbzAwVUhhZyIsIm9yaWdpbiI6Imh0dHA6Ly9sb2NhbGhvc3Q6ODkxMCIsImNyb3NzT3JpZ2luIjpmYWxzZSwib3RoZXJfa2V5c19jYW5fYmVfYWRkZWRfaGVyZSI6ImRvIG5vdCBjb21wYXJlIGNsaWVudERhdGFKU09OIGFnYWluc3QgYSB0ZW1wbGF0ZS4gU2VlIGh0dHBzOi8vZ29vLmdsL3lhYlBleCJ9","signature":"MEUCIQD3NOM7Aw0HxPw6EFGf86iwf2yd3p4NncNNLcjd-86zgwIgHuh80bLNV7EcwBi4IAcH57iueLg0X2gLtO5_Y6PMCFE","userHandle":"2"},"type":"public-key","clientExtensionResults":{}}', + headers: { 'Content-Type': 'application/json' }, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + const [body, headers] = await dbAuth.webAuthnAuthenticate() + + expect(body).toEqual(false) + expect(headers['set-cookie'][0]).toMatch( + 'webAuthn=CxMJqILwYufSaEQsJX6rKHw_LkMXAGU64PaKU55l6ejZ4FNO5kBLiA' + ) + }) + }) + + describe('webAuthnAuthOptions', () => { + it('throws an error if user is not logged in', async () => { + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + headers: {}, + }) + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth.webAuthnAuthOptions() + } catch (e) { + expect(e instanceof dbAuthError.NotLoggedInError).toEqual(true) + } + expect.assertions(1) + }) + + it('throws an error if WebAuthn is disabled', async () => { + req = { + headers: {}, + } + options.webAuthn.enabled = false + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth.webAuthnAuthOptions() + } catch (e) { + expect(e instanceof dbAuthError.WebAuthnError).toEqual(true) + } + expect.assertions(1) + }) + + it('returns options needed for webAuthn registration', async () => { + const user = await createDbUser() + req = { + headers: { + cookie: encryptToCookie( + JSON.stringify({ id: user.id }) + ';' + 'token' + ), + }, + } + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const response = await dbAuth.webAuthnAuthOptions() + const regOptions = response[0] + + expect(regOptions.allowCredentials).toEqual([]) + expect(regOptions.challenge).not.toBeUndefined() + expect(regOptions.rpId).toEqual(options.webAuthn.domain) + expect(regOptions.timeout).toEqual(options.webAuthn.timeout) + }) + + it('includes existing devices', async () => { + const user = await createDbUser() + const credential = await db.userCredential.create({ + data: { + id: 'qwertyuiog', + userId: user.id, + transports: null, + }, + }) + + req = { + headers: { + cookie: encryptToCookie( + JSON.stringify({ id: user.id }) + ';' + 'token' + ), + }, + } + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const response = await dbAuth.webAuthnAuthOptions() + const regOptions = response[0] + + expect(regOptions.allowCredentials[0].id).toEqual(credential.id) + expect(regOptions.allowCredentials[0].transports).toEqual([ + 'usb', + 'ble', + 'nfc', + 'internal', + ]) + expect(regOptions.allowCredentials[0].type).toEqual('public-key') + }) + }) + + describe('webAuthnRegOptions', () => { + it('throws an error if user is not logged in', async () => { + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + headers: {}, + }) + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth.webAuthnRegOptions() + } catch (e) { + expect(e instanceof dbAuthError.NotLoggedInError).toEqual(true) + } + expect.assertions(1) + }) + + it('throws an error if WebAuthn is disabled', async () => { + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + headers: {}, + }) + + options.webAuthn.enabled = false + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth.webAuthnRegOptions() + } catch (e) { + expect(e instanceof dbAuthError.WebAuthnError).toEqual(true) + } + expect.assertions(1) + }) + + it('returns options needed for webAuthn registration', async () => { + const user = await createDbUser() + const headers = { + cookie: encryptToCookie( + JSON.stringify({ id: user.id }) + ';' + 'token' + ), + } + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + headers, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const response = await dbAuth.webAuthnRegOptions() + const regOptions = response[0] + + expect(regOptions.attestation).toEqual('none') + expect(regOptions.authenticatorSelection.authenticatorAttachment).toEqual( + options.webAuthn.type + ) + expect(regOptions.excludeCredentials).toEqual([]) + expect(regOptions.rp.name).toEqual(options.webAuthn.name) + expect(regOptions.rp.id).toEqual(options.webAuthn.domain) + expect(regOptions.timeout).toEqual(options.webAuthn.timeout) + expect(regOptions.user.id).toEqual(user.id) + expect(regOptions.user.displayName).toEqual(user.email) + expect(regOptions.user.name).toEqual(user.email) + }) + + it('defaults timeout if not set', async () => { + const user = await createDbUser() + const headers = { + cookie: encryptToCookie( + JSON.stringify({ id: user.id }) + ';' + 'token' + ), + } + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + headers, + }) + + options.webAuthn.timeout = null + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const response = await dbAuth.webAuthnRegOptions() + + expect(response[0].timeout).toEqual(60000) + }) + + it('saves the generated challenge to the user record', async () => { + let user = await createDbUser() + const headers = { + cookie: encryptToCookie( + JSON.stringify({ id: user.id }) + ';' + 'token' + ), + } + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + headers, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const response = await dbAuth.webAuthnRegOptions() + user = await db.user.findFirst({ where: { id: user.id } }) + + expect(user.webAuthnChallenge).toEqual(response[0].challenge) + }) + }) + + describe('webAuthnRegister', () => { + it('saves a credential record to the database', async () => { + const user = await createDbUser({ + webAuthnChallenge: 'HuGPrQqK7f53NLwMZMst_DL9Dig2BBivDYWWpawIPVM', + }) + req = { + headers: { + 'Content-Type': 'application/json', + cookie: encryptToCookie( + JSON.stringify({ id: user.id }) + ';' + 'token' + ), + }, + body: '{"method":"webAuthnRegister","id":"GqjZOuYYppObBDeVknbrcBLkaa9imS5EJJwtCV740asUz24sdAmGFg","rawId":"GqjZOuYYppObBDeVknbrcBLkaa9imS5EJJwtCV740asUz24sdAmGFg","response":{"attestationObject":"o2NmbXRkbm9uZWdhdHRTdG10oGhhdXRoRGF0YVisSZYN5YgOjGh0NBcPZHZgW4_krrmihjLHmVzzuoMdl2NFAAAAAK3OAAI1vMYKZIsLJfHwVQMAKBqo2TrmGKaTmwQ3lZJ263AS5GmvYpkuRCScLQle-NGrFM9uLHQJhhalAQIDJiABIVggGIipTQt-gcoDPOpW6Zje_Av9C0-jWb2R2PBmXJJL-c8iWCC76wxo3uzG8cPqb0A8Vij-dqMbrEytEHjuFOtiQ2dt8A","clientDataJSON":"eyJ0eXBlIjoid2ViYXV0aG4uY3JlYXRlIiwiY2hhbGxlbmdlIjoiSHVHUHJRcUs3ZjUzTkx3TVpNc3RfREw5RGlnMkJCaXZEWVdXcGF3SVBWTSIsIm9yaWdpbiI6Imh0dHA6Ly9sb2NhbGhvc3Q6ODkxMCIsImNyb3NzT3JpZ2luIjpmYWxzZSwib3RoZXJfa2V5c19jYW5fYmVfYWRkZWRfaGVyZSI6ImRvIG5vdCBjb21wYXJlIGNsaWVudERhdGFKU09OIGFnYWluc3QgYSB0ZW1wbGF0ZS4gU2VlIGh0dHBzOi8vZ29vLmdsL3lhYlBleCJ9"},"type":"public-key","clientExtensionResults":{},"transports":["internal"]}', + } + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + await dbAuth.webAuthnRegister() + + const credential = db.userCredential.findFirst({ + where: { userId: user.id }, + }) + + expect(credential.id).toEqual( + 'GqjZOuYYppObBDeVknbrcBLkaa9imS5EJJwtCV740asUz24sdAmGFg' + ) + expect(credential.transports).toEqual('["internal"]') + expect(credential.counter).toEqual(0) + }) + + it('works if event body is base64 encoded', async () => { + const user = await createDbUser({ + webAuthnChallenge: 'HuGPrQqK7f53NLwMZMst_DL9Dig2BBivDYWWpawIPVM', + }) + req = { + headers: { + 'Content-Type': 'application/json', + cookie: encryptToCookie( + JSON.stringify({ id: user.id }) + ';' + 'token' + ), + }, + body: Buffer.from( + `{"method":"webAuthnRegister","id":"GqjZOuYYppObBDeVknbrcBLkaa9imS5EJJwtCV740asUz24sdAmGFg","rawId":"GqjZOuYYppObBDeVknbrcBLkaa9imS5EJJwtCV740asUz24sdAmGFg","response":{"attestationObject":"o2NmbXRkbm9uZWdhdHRTdG10oGhhdXRoRGF0YVisSZYN5YgOjGh0NBcPZHZgW4_krrmihjLHmVzzuoMdl2NFAAAAAK3OAAI1vMYKZIsLJfHwVQMAKBqo2TrmGKaTmwQ3lZJ263AS5GmvYpkuRCScLQle-NGrFM9uLHQJhhalAQIDJiABIVggGIipTQt-gcoDPOpW6Zje_Av9C0-jWb2R2PBmXJJL-c8iWCC76wxo3uzG8cPqb0A8Vij-dqMbrEytEHjuFOtiQ2dt8A","clientDataJSON":"eyJ0eXBlIjoid2ViYXV0aG4uY3JlYXRlIiwiY2hhbGxlbmdlIjoiSHVHUHJRcUs3ZjUzTkx3TVpNc3RfREw5RGlnMkJCaXZEWVdXcGF3SVBWTSIsIm9yaWdpbiI6Imh0dHA6Ly9sb2NhbGhvc3Q6ODkxMCIsImNyb3NzT3JpZ2luIjpmYWxzZSwib3RoZXJfa2V5c19jYW5fYmVfYWRkZWRfaGVyZSI6ImRvIG5vdCBjb21wYXJlIGNsaWVudERhdGFKU09OIGFnYWluc3QgYSB0ZW1wbGF0ZS4gU2VlIGh0dHBzOi8vZ29vLmdsL3lhYlBleCJ9"},"type":"public-key","clientExtensionResults":{},"transports":["internal"]}`, + 'utf8' + ), + } + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + await dbAuth.webAuthnRegister() + + const credential = db.userCredential.findFirst({ + where: { userId: user.id }, + }) + + expect(credential.id).toEqual( + 'GqjZOuYYppObBDeVknbrcBLkaa9imS5EJJwtCV740asUz24sdAmGFg' + ) + }) + }) + + describe('_validateOptions', () => { + it('throws an error if credentialModelAccessor is defined but not webAuthn options', () => { + delete options.webAuthn + try { + const _instance = new DbAuthHandler({ headers: {} }, context, options) + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.NoWebAuthnConfigError) + } + expect.assertions(1) + }) + + it('throws an error if credentialModelAccessor is undefined but webAuthn options exist', () => { + delete options.credentialModelAccessor + try { + const _instance = new DbAuthHandler({ headers: {} }, context, options) + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.NoWebAuthnConfigError) + } + expect.assertions(1) + }) + }) + + describe('_webAuthnCookie', () => { + it('returns the parts needed for the webAuthn cookie, defaulted to future expire', () => { + const dbAuth = new DbAuthHandler({ headers: {} }, context, options) + + expect(dbAuth._webAuthnCookie('1234')).toMatch('webAuthn=1234;Expires=') + }) + + it('returns the parts needed for the expire the webAuthn cookie', () => { + const dbAuth = new DbAuthHandler({ headers: {} }, context, options) + + expect(dbAuth._webAuthnCookie('1234', 'now')).toMatch( + 'webAuthn=1234;Expires=Thu, 01 Jan 1970 00:00:00 GMT' + ) + }) + }) + + describe('_cookieAttributes', () => { + it('returns an array of attributes for the session cookie', () => { + const dbAuth = new DbAuthHandler( + { headers: { referer: 'http://test.host' } }, + context, + { + ...options, + cookie: { + attributes: { + Path: '/', + HttpOnly: true, + SameSite: 'Strict', + Secure: true, + Domain: 'example.com', + }, + }, + } + ) + const attributes = dbAuth._cookieAttributes({}) + + expect(attributes.length).toEqual(6) + expect(attributes[0]).toEqual('Path=/') + expect(attributes[1]).toEqual('HttpOnly') + expect(attributes[2]).toEqual('SameSite=Strict') + expect(attributes[3]).toEqual('Secure') + expect(attributes[4]).toEqual('Domain=example.com') + expect(attributes[5]).toMatch(`Expires=`) + expect(attributes[5]).toMatch(UTC_DATE_REGEX) + }) + + it('includes just a key if option set to `true`', () => { + const dbAuth = new DbAuthHandler(req, context, { + ...options, + cookie: { Secure: true }, + }) + + const attributes = dbAuth._cookieAttributes({}) + + expect(attributes[0]).toEqual('Secure') + }) + + it('does not include a key if option set to `false`', () => { + const dbAuth = new DbAuthHandler(req, context, { + ...options, + cookie: { Secure: false }, + }) + const attributes = dbAuth._cookieAttributes({}) + + expect(attributes[0]).not.toEqual('Secure') + }) + + it('includes key=value if property value is set', () => { + const dbAuth = new DbAuthHandler(req, context, { + ...options, + cookie: { Domain: 'example.com' }, + }) + const attributes = dbAuth._cookieAttributes({}) + + expect(attributes[0]).toEqual('Domain=example.com') + }) + + it('includes no cookie attributes if cookie options are empty', () => { + const dbAuth = new DbAuthHandler(req, context, { + ...options, + cookie: {}, + }) + const attributes = dbAuth._cookieAttributes({}) + + expect(attributes.length).toEqual(1) + expect(attributes[0]).toMatch(/Expires=/) + }) + + it('includes no cookie attributes if cookie options not set', () => { + const dbAuth = new DbAuthHandler(req, context, options) + const attributes = dbAuth._cookieAttributes({}) + + expect(attributes.length).toEqual(1) + expect(attributes[0]).toMatch(/Expires=/) + }) + }) + + describe('_createSessionHeader()', () => { + it('returns a Set-Cookie header', async () => { + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const headers = dbAuth._createSessionHeader({ foo: 'bar' }, 'abcd') + + expect(Object.keys(headers).length).toEqual(1) + expect(headers['set-cookie']).toMatch( + `Expires=${dbAuth.sessionExpiresDate}` + ) + // can't really match on the session value since it will change on every render, + // due to CSRF token generation but we can check that it contains only the + // characters that would be returned by the encrypt function + expect(headers['set-cookie']).toMatch(SET_SESSION_REGEX) + // and we can check that it's a certain number of characters + expect(headers['set-cookie'].split(';')[0].length).toEqual(77) + }) + }) + + describe('_validateCsrf()', () => { + it('returns true if session and header token match', async () => { + const data = { foo: 'bar' } + const token = 'abcd' + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + headers: { + cookie: encryptToCookie(JSON.stringify(data) + ';' + token), + 'csrf-token': token, + }, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const output = await dbAuth._validateCsrf() + + expect(output).toEqual(true) + }) + + it('throws an error if session and header token do not match', async () => { + const data = { foo: 'bar' } + const token = 'abcd' + req = { + headers: { + cookie: encryptToCookie(JSON.stringify(data) + ';' + token), + 'csrf-token': 'invalid', + }, + } + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + expect(async () => { + await dbAuth._validateCsrf() + }).rejects.toThrow(dbAuthError.CsrfTokenMismatchError) + }) + }) + + describe('_verifyUser()', () => { + it('throws an error if username is missing', async () => { + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth._verifyUser(null, 'password') + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.UsernameAndPasswordRequiredError) + } + try { + await dbAuth._verifyUser('', 'password') + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.UsernameAndPasswordRequiredError) + } + try { + await dbAuth._verifyUser(' ', 'password') + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.UsernameAndPasswordRequiredError) + } + expect.assertions(3) + }) + + it('throws an error if password is missing', async () => { + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth._verifyUser('username') + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.UsernameAndPasswordRequiredError) + } + + try { + await dbAuth._verifyUser('username', null) + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.UsernameAndPasswordRequiredError) + } + + try { + await dbAuth._verifyUser('username', '') + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.UsernameAndPasswordRequiredError) + } + + try { + await dbAuth._verifyUser('username', ' ') + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.UsernameAndPasswordRequiredError) + } + + expect.assertions(4) + }) + + it('can throw a custom error message', async () => { + // default error message + const defaultMessage = options.login.errors.usernameOrPasswordMissing + delete options.login.errors.usernameOrPasswordMissing + const dbAuth1 = new DbAuthHandler(req, context, options) + await dbAuth1.init() + try { + await dbAuth1._verifyUser(null, 'password') + } catch (e) { + expect(e.message).toEqual(defaultMessage) + } + + // custom error message + options.login.errors.usernameOrPasswordMissing = 'Missing!' + const customMessage = new DbAuthHandler(req, context, options) + + try { + await customMessage._verifyUser(null, 'password') + } catch (e) { + expect(e.message).toEqual('Missing!') + } + + expect.assertions(2) + }) + + it('throws a default error message if user is not found', async () => { + delete options.login.errors.usernameNotFound + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + try { + await dbAuth._verifyUser('username', 'password') + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.UserNotFoundError) + expect(e.message).toEqual('Username username not found') + } + + expect.assertions(2) + }) + + it('throws a custom error message if user is not found', async () => { + options.login.errors.usernameNotFound = 'Cannot find ${username}' + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth._verifyUser('Alice', 'password') + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.UserNotFoundError) + expect(e.message).toEqual('Cannot find Alice') + } + + expect.assertions(2) + }) + + it('throws a default error if password is incorrect', async () => { + delete options.login.errors.incorrectPassword + const dbUser = await createDbUser() + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth._verifyUser(dbUser.email, 'incorrect') + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.IncorrectPasswordError) + expect(e.message).toEqual(`Incorrect password for ${dbUser.email}`) + } + + expect.assertions(2) + }) + + it('throws a custom error if password is incorrect', async () => { + options.login.errors.incorrectPassword = 'Wrong password for ${username}' + const dbUser = await createDbUser() + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth._verifyUser(dbUser.email, 'incorrect') + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.IncorrectPasswordError) + expect(e.message).toEqual(`Wrong password for ${dbUser.email}`) + } + + expect.assertions(2) + }) + + it('throws a generic error for an invalid client', async () => { + const dbUser = await createDbUser() + // invalid db client + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + dbAuth.dbAccessor = undefined + + try { + await dbAuth._verifyUser(dbUser.email, 'password') + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.GenericError) + } + + expect.assertions(1) + }) + + it('returns the user with matching username and password', async () => { + const dbUser = await createDbUser() + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const user = await dbAuth._verifyUser(dbUser.email, 'password') + + expect(user.id).toEqual(dbUser.id) + }) + + it('returns the user if password is hashed with legacy algorithm', async () => { + const dbUser = await createDbUser({ + // CryptoJS hashed password + hashedPassword: + '0c2b24e20ee76a887eac1415cc2c175ff961e7a0f057cead74789c43399dd5ba', + salt: '2ef27f4073c603ba8b7807c6de6d6a89', + }) + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const user = await dbAuth._verifyUser(dbUser.email, 'password') + + expect(user.id).toEqual(dbUser.id) + }) + + it('updates the user hashPassword to the new algorithm', async () => { + const dbUser = await createDbUser({ + // CryptoJS hashed password + hashedPassword: + '0c2b24e20ee76a887eac1415cc2c175ff961e7a0f057cead74789c43399dd5ba', + salt: '2ef27f4073c603ba8b7807c6de6d6a89', + }) + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + await dbAuth._verifyUser(dbUser.email, 'password') + const user = await db.user.findFirst({ where: { id: dbUser.id } }) + + // password now hashed by node:crypto + expect(user.hashedPassword).toEqual( + 'f20d69d478fa1afc85057384e21bd457a76b23b23e2a94f5bd982976f700a552|16384|8|1' + ) + // salt should remain the same + expect(user.salt).toEqual('2ef27f4073c603ba8b7807c6de6d6a89') + }) + }) + + describe('_getCurrentUser()', () => { + it('throw an error if user is not logged in', async () => { + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth._getCurrentUser() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.NotLoggedInError) + } + + expect.assertions(1) + }) + + it('throw an error if user is not found', async () => { + const data = { id: 999999999999 } + const headers = { + cookie: encryptToCookie(JSON.stringify(data) + ';' + 'token'), + } + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + headers, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth._getCurrentUser() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.UserNotFoundError) + } + + expect.assertions(1) + }) + + it('throws a generic error for an invalid client', async () => { + const dbUser = await createDbUser() + const headers = { + cookie: encryptToCookie( + JSON.stringify({ id: dbUser.id }) + ';' + 'token' + ), + } + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + headers, + }) + + // invalid db client + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + dbAuth.dbAccessor = undefined + + try { + await dbAuth._getCurrentUser() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.GenericError) + } + + expect.assertions(1) + }) + + it('returns the user whos id is in session', async () => { + const dbUser = await createDbUser() + const headers = { + cookie: encryptToCookie( + JSON.stringify({ id: dbUser.id }) + ';' + 'token' + ), + } + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + headers, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const user = await dbAuth._getCurrentUser() + + expect(user.id).toEqual(dbUser.id) + }) + }) + + describe('_createUser()', () => { + it('throws a default error message if username is already taken', async () => { + const defaultMessage = options.signup.errors.usernameTaken + delete options.signup.errors.usernameTaken + const dbUser = await createDbUser() + + const body = JSON.stringify({ + username: dbUser.email, + password: 'password', + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth._createUser() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.DuplicateUsernameError) + expect(e.message).toEqual( + defaultMessage.replace(/\$\{username\}/, dbUser.email) + ) + } + + expect.assertions(2) + }) + + it('throws a custom error message if username is already taken', async () => { + options.signup.errors.usernameTaken = '${username} taken' + const dbUser = await createDbUser() + const body = JSON.stringify({ + username: dbUser.email, + password: 'password', + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth._createUser() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.DuplicateUsernameError) + expect(e.message).toEqual(`${dbUser.email} taken`) + } + + expect.assertions(2) + }) + + it('createUser db check is called with insensitive string when user has provided one in SignupFlowOptions', async () => { + const spy = vi.spyOn(db.user, 'findFirst') + options.signup.usernameMatch = 'insensitive' + + const dbUser = await createDbUser() + const body = JSON.stringify({ + username: dbUser.email, + password: 'password', + }) + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + await dbAuth._createUser() + expect(spy).toHaveBeenCalled() + return expect(spy).toHaveBeenCalledWith({ + where: { + email: expect.objectContaining({ mode: 'insensitive' }), + }, + }) + }) + + it('createUser db check is not called with insensitive string when user has not provided one in SignupFlowOptions', async () => { + vi.resetAllMocks() + vi.clearAllMocks() + + const defaultMessage = options.signup.errors.usernameTaken + const spy = vi.spyOn(db.user, 'findFirst') + delete options.signup.usernameMatch + + const dbUser = await createDbUser() + const body = JSON.stringify({ + username: dbUser.email, + password: 'password', + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth._createUser() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.DuplicateUsernameError) + expect(e.message).toEqual( + defaultMessage.replace(/\$\{username\}/, dbUser.email) + ) + } + + expect(spy).toHaveBeenCalled() + return expect(spy).not.toHaveBeenCalledWith({ + where: { + email: expect.objectContaining({ mode: 'insensitive' }), + }, + }) + }) + + it('throws a default error message if username is missing', async () => { + const defaultMessage = options.signup.errors.fieldMissing + delete options.signup.errors.fieldMissing + const body = JSON.stringify({ + password: 'password', + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth._createUser() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.FieldRequiredError) + expect(e.message).toEqual( + defaultMessage.replace(/\$\{field\}/, 'username') + ) + } + + expect.assertions(2) + }) + + it('throws a custom error message if username is missing', async () => { + options.signup.errors.fieldMissing = '${field} blank' + const body = JSON.stringify({ + password: 'password', + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth._createUser() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.FieldRequiredError) + expect(e.message).toEqual('username blank') + } + + expect.assertions(2) + }) + + it('throws a default error message if password is missing', async () => { + const defaultMessage = options.signup.errors.fieldMissing + delete options.signup.errors.fieldMissing + const body = JSON.stringify({ + username: 'user@redwdoodjs.com', + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + try { + await dbAuth._createUser() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.FieldRequiredError) + expect(e.message).toEqual( + defaultMessage.replace(/\$\{field\}/, 'password') + ) + } + + expect.assertions(2) + }) + + it('throws a custom error message if password is missing', async () => { + options.signup.errors.fieldMissing = '${field} blank' + const body = JSON.stringify({ + username: 'user@redwdoodjs.com', + }) + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + await dbAuth._createUser() + } catch (e) { + expect(e).toBeInstanceOf(dbAuthError.FieldRequiredError) + expect(e.message).toEqual('password blank') + } + + expect.assertions(2) + }) + + it('creates a new user', async () => { + const headers = { 'Content-Type': 'application/json' } + const body = JSON.stringify({ + username: 'rob@redwoodjs.com', + password: 'password', + name: 'Rob', + }) + + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body, + headers, + }) + + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + try { + const user = await dbAuth._createUser() + expect(user.email).toEqual('rob@redwoodjs.com') + expect(user.hashedPassword).not.toBeNull() + expect(user.salt).not.toBeNull() + expect(user.name).toEqual('Rob') + } catch (e) { + console.info(e) + } + }) + }) + + describe('getAuthMethod', () => { + it('gets methodName out of the query string', async () => { + const req = new Request('http://localhost:8910/_rw_mw?method=logout', { + method: 'POST', + body: '', + headers: {}, + }) + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + expect(await dbAuth._getAuthMethod()).toEqual('logout') + }) + + it('gets methodName out of a JSON body', async () => { + req = { + path: '/.redwood/functions/auth', + queryStringParameters: {}, + body: '{"method":"signup"}', + headers: {}, + } + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + expect(await dbAuth._getAuthMethod()).toEqual('signup') + }) + + it('otherwise returns undefined', async () => { + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', + body: '', + headers: {}, + }) + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + expect(await dbAuth._getAuthMethod()).toBeUndefined() + }) + }) + + describe('validateField', () => { + it('checks for the presence of a field', async () => { + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + expect(() => { + dbAuth._validateField('username', null) + }).toThrow(dbAuth.FieldRequiredError) + expect(() => { + dbAuth._validateField('username', '') + }).toThrow(dbAuth.FieldRequiredError) + expect(() => { + dbAuth._validateField('username', ' ') + }).toThrow(dbAuth.FieldRequiredError) + }) + + it('passes validation if everything is present', async () => { + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + + expect(dbAuth._validateField('username', 'cannikin')).toEqual(true) + }) + }) + + describe('logoutResponse', () => { + it('returns the response array necessary to log user out', async () => { + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const [body, headers] = dbAuth._logoutResponse() + + expect(body).toEqual('') + expect(headers['set-cookie']).toMatch(/^session=;/) + }) + + it('can accept an object to return in the body', async () => { + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const [body, _headers] = dbAuth._logoutResponse({ + error: 'error message', + }) + + expect(body).toEqual('{"error":"error message"}') + }) + }) + + describe('ok', () => { + it('returns a 200 response by default', async () => { + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const response = dbAuth._ok('', {}) + + expect(response.statusCode).toEqual(200) + }) + + it('can return other status codes', async () => { + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const response = dbAuth._ok('', {}, { statusCode: 201 }) + + expect(response.statusCode).toEqual(201) + }) + + it('stringifies a JSON body', async () => { + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const response = dbAuth._ok({ foo: 'bar' }, {}, { statusCode: 201 }) + + expect(response.body).toEqual('{"foo":"bar"}') + }) + + it('does not stringify a body that is a string already', async () => { + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const response = dbAuth._ok('{"foo":"bar"}', {}, { statusCode: 201 }) + + expect(response.body).toEqual('{"foo":"bar"}') + }) + }) + + describe('_notFound', () => { + it('returns a 404 response', async () => { + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const response = dbAuth._notFound() + + expect(response.statusCode).toEqual(404) + expect(response.body).toEqual(undefined) + }) + }) + + describe('_badRequest', () => { + it('returns a 400 response', async () => { + const dbAuth = new DbAuthHandler(req, context, options) + await dbAuth.init() + const response = dbAuth._badRequest('bad') + + expect(response.statusCode).toEqual(400) + expect(response.body).toEqual('{"error":"bad"}') + }) + }) +}) diff --git a/packages/auth-providers/dbAuth/api/src/__tests__/DbAuthHandler.test.js b/packages/auth-providers/dbAuth/api/src/__tests__/DbAuthHandler.test.js index 3fa67ac70b80..72ba44f25e9e 100644 --- a/packages/auth-providers/dbAuth/api/src/__tests__/DbAuthHandler.test.js +++ b/packages/auth-providers/dbAuth/api/src/__tests__/DbAuthHandler.test.js @@ -261,22 +261,25 @@ describe('dbAuth', () => { }) describe('dbAccessor', () => { - it('returns the prisma db accessor for a model', () => { + it('returns the prisma db accessor for a model', async () => { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() expect(dbAuth.dbAccessor).toEqual(db.user) }) }) describe('dbCredentialAccessor', () => { - it('returns the prisma db accessor for a UserCredential model', () => { + it('returns the prisma db accessor for a UserCredential model', async () => { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() expect(dbAuth.dbCredentialAccessor).toEqual(db.userCredential) }) }) describe('sessionExpiresDate', () => { - it('returns a date in the future as a UTCString', () => { + it('returns a date in the future as a UTCString', async () => { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const expiresAt = new Date() expiresAt.setSeconds(expiresAt.getSeconds() + options.login.expires) @@ -285,8 +288,9 @@ describe('dbAuth', () => { }) describe('webAuthnExpiresDate', () => { - it('returns a date in the future as a UTCString', () => { + it('returns a date in the future as a UTCString', async () => { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const expiresAt = new Date() expiresAt.setSeconds(expiresAt.getSeconds() + options.webAuthn.expires) @@ -295,8 +299,9 @@ describe('dbAuth', () => { }) describe('_deleteSessionHeader', () => { - it('returns a Set-Cookie header to delete the session cookie', () => { + it('returns a Set-Cookie header to delete the session cookie', async () => { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const headers = dbAuth._deleteSessionHeader expect(Object.keys(headers).length).toEqual(1) @@ -306,7 +311,7 @@ describe('dbAuth', () => { }) describe('constructor', () => { - it('initializes some variables with passed values', () => { + it('initializes some variables with passed values', async () => { event = { headers: {} } context = { foo: 'bar' } options = { @@ -326,9 +331,9 @@ describe('dbAuth', () => { }, } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() expect(dbAuth.event).toEqual(event) - expect(dbAuth.context).toEqual(context) expect(dbAuth.options).toEqual(options) }) @@ -522,55 +527,74 @@ describe('dbAuth', () => { ).not.toThrow(dbAuthError.NoSignupHandler) }) - it('parses params from a plain text body', () => { + it('parses params from a plain text body', async () => { event = { headers: {}, body: `{"foo":"bar", "baz":123}` } const dbAuth = new DbAuthHandler(event, context, options) - expect(dbAuth.params).toEqual({ foo: 'bar', baz: 123 }) + // Need to wait for reqq to be parsed + await dbAuth.init() + + expect(dbAuth.normalizedRequest.jsonBody).toEqual({ + foo: 'bar', + baz: 123, + }) }) - it('parses an empty plain text body and still sets params', () => { - event = { isBase64Encoded: false, headers: {}, body: '' } + it('parses an empty plain text body and still sets params', async () => { + event = { + isBase64Encoded: false, + headers: { + bazomga: 'yo', + }, + body: '', + } context = { foo: 'bar' } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() - expect(dbAuth.params).toEqual({}) + expect(dbAuth.normalizedRequest.jsonBody).toEqual({}) }) - it('parses params from an undefined body when isBase64Encoded == false', () => { + it('parses params from an undefined body when isBase64Encoded == false', async () => { event = { isBase64Encoded: false, headers: {}, } context = { foo: 'bar' } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() - expect(dbAuth.params).toEqual({}) + expect(dbAuth.normalizedRequest.jsonBody).toEqual({}) }) - it('parses params from a base64 encoded body', () => { + it('parses params from a base64 encoded body', async () => { event = { isBase64Encoded: true, headers: {}, body: Buffer.from(`{"foo":"bar", "baz":123}`, 'utf8'), } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() - expect(dbAuth.params).toEqual({ foo: 'bar', baz: 123 }) + expect(dbAuth.normalizedRequest.jsonBody).toEqual({ + foo: 'bar', + baz: 123, + }) }) - it('parses params from an undefined body when isBase64Encoded == true', () => { + it('parses params from an undefined body when isBase64Encoded == true', async () => { event = { isBase64Encoded: true, headers: {}, } context = { foo: 'bar' } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() - expect(dbAuth.params).toEqual({}) + expect(dbAuth.normalizedRequest.jsonBody).toEqual({}) }) - it('parses params from an empty body when isBase64Encoded == true', () => { + it('parses params from an empty body when isBase64Encoded == true', async () => { event = { isBase64Encoded: true, headers: {}, @@ -578,26 +602,30 @@ describe('dbAuth', () => { } context = { foo: 'bar' } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() - expect(dbAuth.params).toEqual({}) + expect(dbAuth.normalizedRequest.jsonBody).toEqual({}) }) - it('sets header-based CSRF token', () => { + it('sets header-based CSRF token', async () => { event = { headers: { 'csrf-token': 'qwerty' } } const dbAuth = new DbAuthHandler(event, context, options) - - expect(dbAuth.headerCsrfToken).toEqual('qwerty') + await dbAuth.init() + expect(dbAuth.normalizedRequest.headers.get('csrf-token')).toEqual( + 'qwerty' + ) }) - it('sets session variables to nothing if session cannot be decrypted', () => { + it('sets session variables to nothing if session cannot be decrypted', async () => { event = { headers: { 'csrf-token': 'qwerty' } } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() expect(dbAuth.session).toBeUndefined() expect(dbAuth.sessionCsrfToken).toBeUndefined() }) - it('sets session variables to valid session data', () => { + it('sets session variables to valid session data', async () => { event = { headers: { cookie: @@ -605,6 +633,7 @@ describe('dbAuth', () => { }, } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() expect(dbAuth.session).toEqual({ foo: 'bar' }) expect(dbAuth.sessionCsrfToken).toEqual('abcd') @@ -625,6 +654,7 @@ describe('dbAuth', () => { event.httpMethod = 'GET' event.headers.cookie = 'session=invalid' const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = await dbAuth.invoke() expect(response.headers['set-cookie']).toEqual(LOGOUT_COOKIE) @@ -636,6 +666,7 @@ describe('dbAuth', () => { event.headers.cookie = 'session=ko6iXKV11DSjb6kFJ4iwcf1FEqa5wPpbL1sdtKiV51Y=|cQaYkOPG/r3ILxWiFiz90w==' const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = await dbAuth.invoke() expect(response.statusCode).toEqual(404) @@ -647,6 +678,7 @@ describe('dbAuth', () => { event.headers.cookie = 'session=ko6iXKV11DSjb6kFJ4iwcf1FEqa5wPpbL1sdtKiV51Y=|cQaYkOPG/r3ILxWiFiz90w==' const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = await dbAuth.invoke() expect(response.statusCode).toEqual(404) @@ -658,6 +690,7 @@ describe('dbAuth', () => { event.headers.cookie = 'session=ko6iXKV11DSjb6kFJ4iwcf1FEqa5wPpbL1sdtKiV51Y=|cQaYkOPG/r3ILxWiFiz90w==' const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() dbAuth.logout = vi.fn(() => { throw Error('Logout error') }) @@ -678,6 +711,8 @@ describe('dbAuth', () => { credentials: true, }, }) + await dbAuth.init() + dbAuth.logout = vi.fn(() => { throw Error('Logout error') }) @@ -696,6 +731,7 @@ describe('dbAuth', () => { event.headers.cookie = 'session=ko6iXKV11DSjb6kFJ4iwcf1FEqa5wPpbL1sdtKiV51Y=|cQaYkOPG/r3ILxWiFiz90w==' const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() dbAuth.logout = vi.fn(() => ['body', { foo: 'bar' }]) const response = await dbAuth.invoke() @@ -718,6 +754,7 @@ describe('dbAuth', () => { }) options.forgotPassword.enabled = false const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.forgotPassword() @@ -739,6 +776,7 @@ describe('dbAuth', () => { flowNotEnabled: 'Custom flow not enabled error', } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.forgotPassword() @@ -751,7 +789,8 @@ describe('dbAuth', () => { it('throws an error if username is blank', async () => { // missing completely event.body = JSON.stringify({}) - let dbAuth = new DbAuthHandler(event, context, options) + const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.forgotPassword() @@ -761,10 +800,10 @@ describe('dbAuth', () => { // empty string event.body = JSON.stringify({ username: ' ' }) - dbAuth = new DbAuthHandler(event, context, options) - + const dbAuth2 = new DbAuthHandler(event, context, options) + await dbAuth2.init() try { - await dbAuth.forgotPassword() + await dbAuth2.forgotPassword() } catch (e) { expect(e).toBeInstanceOf(dbAuthError.UsernameRequiredError) } @@ -778,6 +817,7 @@ describe('dbAuth', () => { username: 'notfound', }) let dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.forgotPassword() @@ -794,6 +834,7 @@ describe('dbAuth', () => { username: user.email, }) const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() expect(user.resetToken).toEqual(undefined) expect(user.resetTokenExpiresAt).toEqual(undefined) @@ -826,6 +867,7 @@ describe('dbAuth', () => { username: user.email, }) const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = await dbAuth.forgotPassword() expectLoggedOutResponse(response) @@ -841,6 +883,7 @@ describe('dbAuth', () => { expect(token).toMatch(/^[A-Za-z0-9/+]{16}$/) } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() await dbAuth.forgotPassword() expect.assertions(2) }) @@ -857,6 +900,7 @@ describe('dbAuth', () => { expect(token).toMatch(/^[A-Za-z0-9/+]{16}$/) } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() await dbAuth.forgotPassword() expect.assertions(2) }) @@ -870,6 +914,7 @@ describe('dbAuth', () => { return handlerUser } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = await dbAuth.forgotPassword() const jsonResponse = JSON.parse(response[0]) @@ -884,6 +929,7 @@ describe('dbAuth', () => { }) // invalid db client const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() dbAuth.dbAccessor = undefined try { await dbAuth.forgotPassword() @@ -903,6 +949,7 @@ describe('dbAuth', () => { }) options.login.enabled = false const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.login() @@ -924,6 +971,7 @@ describe('dbAuth', () => { flowNotEnabled: 'Custom flow not enabled error', } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.login() @@ -942,6 +990,7 @@ describe('dbAuth', () => { password: 'password', }) const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.login() @@ -959,6 +1008,7 @@ describe('dbAuth', () => { password: 'incorrect', }) const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.login() @@ -979,6 +1029,7 @@ describe('dbAuth', () => { throw new Error('Cannot log in') } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.login() @@ -1000,6 +1051,7 @@ describe('dbAuth', () => { return user } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() await dbAuth.login() }) @@ -1013,7 +1065,7 @@ describe('dbAuth', () => { return null } const dbAuth = new DbAuthHandler(event, context, options) - + await dbAuth.init() try { await dbAuth.login() } catch (e) { @@ -1033,6 +1085,7 @@ describe('dbAuth', () => { return { name: 'Rob' } } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.login() } catch (e) { @@ -1048,6 +1101,7 @@ describe('dbAuth', () => { password: 'password', }) const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = await dbAuth.login() @@ -1061,6 +1115,7 @@ describe('dbAuth', () => { password: 'password', }) const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = await dbAuth.login() expect(response[1]['csrf-token']).toMatch(UUID_REGEX) @@ -1073,6 +1128,7 @@ describe('dbAuth', () => { password: 'password', }) const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = await dbAuth.login() @@ -1086,6 +1142,7 @@ describe('dbAuth', () => { password: 'password', }) const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = await dbAuth.login() @@ -1106,6 +1163,7 @@ describe('dbAuth', () => { }) const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.login() @@ -1134,6 +1192,7 @@ describe('dbAuth', () => { }) const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() await dbAuth.login() @@ -1148,6 +1207,7 @@ describe('dbAuth', () => { describe('logout', () => { it('returns set-cookie header for removing session', async () => { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = dbAuth.logout() expectLoggedOutResponse(response) @@ -1163,6 +1223,7 @@ describe('dbAuth', () => { }) options.resetPassword.enabled = false const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.resetPassword() @@ -1184,6 +1245,7 @@ describe('dbAuth', () => { flowNotEnabled: 'Custom flow not enabled error', } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.resetPassword() @@ -1196,7 +1258,7 @@ describe('dbAuth', () => { // missing completely event.body = JSON.stringify({}) let dbAuth = new DbAuthHandler(event, context, options) - + await dbAuth.init() try { await dbAuth.resetPassword() } catch (e) { @@ -1206,6 +1268,7 @@ describe('dbAuth', () => { // empty string event.body = JSON.stringify({ resetToken: ' ' }) dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.resetPassword() @@ -1220,6 +1283,7 @@ describe('dbAuth', () => { // missing completely event.body = JSON.stringify({ resetToken: '1234' }) let dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.resetPassword() @@ -1230,6 +1294,7 @@ describe('dbAuth', () => { // empty string event.body = JSON.stringify({ resetToken: '1234', password: ' ' }) dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.resetPassword() @@ -1243,6 +1308,7 @@ describe('dbAuth', () => { it('throws an error if no user found with resetToken', async () => { event.body = JSON.stringify({ resetToken: '1234', password: 'password' }) let dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.resetPassword() @@ -1264,6 +1330,7 @@ describe('dbAuth', () => { event.body = JSON.stringify({ resetToken: '1234', password: 'password1' }) let dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.resetPassword() @@ -1288,6 +1355,7 @@ describe('dbAuth', () => { password: 'password1', }) let dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.resetPassword() @@ -1317,6 +1385,7 @@ describe('dbAuth', () => { }) options.resetPassword.allowReusedPassword = false let dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() await expect(dbAuth.resetPassword()).rejects.toThrow( dbAuthError.ReusedPasswordError @@ -1339,6 +1408,7 @@ describe('dbAuth', () => { }) options.resetPassword.allowReusedPassword = true let dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() await expect(dbAuth.resetPassword()).resolves.not.toThrow() }) @@ -1357,6 +1427,7 @@ describe('dbAuth', () => { password: 'new-password', }) let dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() await expect(dbAuth.resetPassword()).resolves.not.toThrow() @@ -1383,6 +1454,7 @@ describe('dbAuth', () => { password: 'new-password', }) let dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() await expect(dbAuth.resetPassword()).resolves.not.toThrow() @@ -1411,6 +1483,7 @@ describe('dbAuth', () => { expect(handlerUser.id).toEqual(user.id) } let dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() await dbAuth.resetPassword() expect.assertions(1) @@ -1431,6 +1504,7 @@ describe('dbAuth', () => { }) options.resetPassword.handler = () => false let dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = await dbAuth.resetPassword() @@ -1452,7 +1526,7 @@ describe('dbAuth', () => { }) options.resetPassword.handler = () => true let dbAuth = new DbAuthHandler(event, context, options) - + await dbAuth.init() const response = await dbAuth.resetPassword() expectLoggedInResponse(response) @@ -1470,6 +1544,7 @@ describe('dbAuth', () => { throw Error('Cannot signup') } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() expect.assertions(1) await expect(dbAuth.signup()).rejects.toThrow('Cannot signup') @@ -1483,6 +1558,7 @@ describe('dbAuth', () => { }) options.signup.enabled = false const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.signup() @@ -1504,6 +1580,7 @@ describe('dbAuth', () => { flowNotEnabled: 'Custom flow not enabled error', } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.signup() @@ -1525,6 +1602,7 @@ describe('dbAuth', () => { } } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.signup() @@ -1546,6 +1624,7 @@ describe('dbAuth', () => { } } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() expect(() => dbAuth.signup()).not.toThrow() }) @@ -1558,6 +1637,7 @@ describe('dbAuth', () => { }) delete options.signup.passwordValidation const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() expect(() => dbAuth.signup()).not.toThrow() }) @@ -1570,6 +1650,7 @@ describe('dbAuth', () => { }) const oldUserCount = await db.user.count() const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = await dbAuth.signup() const newUserCount = await db.user.count() @@ -1592,6 +1673,7 @@ describe('dbAuth', () => { return 'Hello, world' } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = await dbAuth.signup() @@ -1605,16 +1687,19 @@ describe('dbAuth', () => { }) describe('getToken', () => { - it('returns the ID of the logged in user', async () => { + it('returns the token from the cookie', async () => { const user = await createDbUser() + const cookie = encryptToCookie( + JSON.stringify({ id: user.id }) + ';' + 'token' + ) + event = { headers: { - cookie: encryptToCookie( - JSON.stringify({ id: user.id }) + ';' + 'token' - ), + cookie, }, } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = await dbAuth.getToken() expect(response[0]).toEqual(user.id) @@ -1622,6 +1707,7 @@ describe('dbAuth', () => { it('returns nothing if user is not logged in', async () => { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = await dbAuth.getToken() expect(response[0]).toEqual('') @@ -1637,6 +1723,7 @@ describe('dbAuth', () => { } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = await dbAuth.getToken() expect(response[0]).toEqual('{"error":"User not found"}') @@ -1654,6 +1741,7 @@ describe('dbAuth', () => { 'QKxN2vFSHAf94XYynK8LUALfDuDSdFowG6evfkFX8uszh4YZqhTiqEdshrhWbwbw' const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const [userId, headers] = await dbAuth.getToken() expect(userId).toEqual(7) @@ -1692,6 +1780,7 @@ describe('dbAuth', () => { }) const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const user = await dbAuth._getCurrentUser() expect(user.id).toEqual(dbUser.id) }) @@ -1718,6 +1807,7 @@ describe('dbAuth', () => { // should read session from graphiQL header, not from cookie const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const user = await dbAuth._getCurrentUser() expect(user.id).toEqual(dbUserId) }) @@ -1740,6 +1830,7 @@ describe('dbAuth', () => { try { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() await dbAuth._getCurrentUser() } catch (e) { expect(e.message).toEqual( @@ -1771,6 +1862,7 @@ describe('dbAuth', () => { } options.webAuthn.enabled = false const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() expect.assertions(1) await expect(dbAuth.webAuthnAuthenticate()).rejects.toThrow( @@ -1784,6 +1876,7 @@ describe('dbAuth', () => { body: '{"method":"webAuthnAuthenticate","id":"CxMJqILwYufSaEQsJX6rKHw_LkMXAGU64PaKU55l6ejZ4FNO5kBLiA","rawId":"CxMJqILwYufSaEQsJX6rKHw_LkMXAGU64PaKU55l6ejZ4FNO5kBLiA","response":{"authenticatorData":"SZYN5YgOjGh0NBcPZHZgW4_krrmihjLHmVzzuoMdl2MFAAAAAA","clientDataJSON":"eyJ0eXBlIjoid2ViYXV0aG4uZ2V0IiwiY2hhbGxlbmdlIjoiTHRnV3BoWUtfZU41clhjX0hkdlVMdk9xcFBXeW9SdmJtbDJQbzAwVUhhZyIsIm9yaWdpbiI6Imh0dHA6Ly9sb2NhbGhvc3Q6ODkxMCIsImNyb3NzT3JpZ2luIjpmYWxzZSwib3RoZXJfa2V5c19jYW5fYmVfYWRkZWRfaGVyZSI6ImRvIG5vdCBjb21wYXJlIGNsaWVudERhdGFKU09OIGFnYWluc3QgYSB0ZW1wbGF0ZS4gU2VlIGh0dHBzOi8vZ29vLmdsL3lhYlBleCJ9","signature":"MEUCIQD3NOM7Aw0HxPw6EFGf86iwf2yd3p4NncNNLcjd-86zgwIgHuh80bLNV7EcwBi4IAcH57iueLg0X2gLtO5_Y6PMCFE","userHandle":"2"},"type":"public-key","clientExtensionResults":{}}', } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() expect.assertions(1) await expect(dbAuth.webAuthnAuthenticate()).rejects.toThrow( @@ -1808,6 +1901,7 @@ describe('dbAuth', () => { body: '{"method":"webAuthnAuthenticate","id":"CxMJqILwYufSaEQsJX6rKHw_LkMXAGU64PaKU55l6ejZ4FNO5kBLiA","rawId":"CxMJqILwYufSaEQsJX6rKHw_LkMXAGU64PaKU55l6ejZ4FNO5kBLiA","response":{"authenticatorData":"SZYN5YgOjGh0NBcPZHZgW4_krrmihjLHmVzzuoMdl2MFAAAAAA","clientDataJSON":"eyJ0eXBlIjoid2ViYXV0aG4uZ2V0IiwiY2hhbGxlbmdlIjoiTHRnV3BoWUtfZU41clhjX0hkdlVMdk9xcFBXeW9SdmJtbDJQbzAwVUhhZyIsIm9yaWdpbiI6Imh0dHA6Ly9sb2NhbGhvc3Q6ODkxMCIsImNyb3NzT3JpZ2luIjpmYWxzZSwib3RoZXJfa2V5c19jYW5fYmVfYWRkZWRfaGVyZSI6ImRvIG5vdCBjb21wYXJlIGNsaWVudERhdGFKU09OIGFnYWluc3QgYSB0ZW1wbGF0ZS4gU2VlIGh0dHBzOi8vZ29vLmdsL3lhYlBleCJ9","signature":"MEUCIQD3NOM7Aw0HxPw6EFGf86iwf2yd3p4NncNNLcjd-86zgwIgHuh80bLNV7EcwBi4IAcH57iueLg0X2gLtO5_Y6PMCFE","userHandle":"2"},"type":"public-key","clientExtensionResults":{}}', } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() expect.assertions(1) await expect(dbAuth.webAuthnAuthenticate()).rejects.toThrow( @@ -1832,6 +1926,7 @@ describe('dbAuth', () => { body: '{"method":"webAuthnAuthenticate","id":"CxMJqILwYufSaEQsJX6rKHw_LkMXAGU64PaKU55l6ejZ4FNO5kBLiA","rawId":"CxMJqILwYufSaEQsJX6rKHw_LkMXAGU64PaKU55l6ejZ4FNO5kBLiA","response":{"authenticatorData":"SZYN5YgOjGh0NBcPZHZgW4_krrmihjLHmVzzuoMdl2MFAAAAAA","clientDataJSON":"eyJ0eXBlIjoid2ViYXV0aG4uZ2V0IiwiY2hhbGxlbmdlIjoiTHRnV3BoWUtfZU41clhjX0hkdlVMdk9xcFBXeW9SdmJtbDJQbzAwVUhhZyIsIm9yaWdpbiI6Imh0dHA6Ly9sb2NhbGhvc3Q6ODkxMCIsImNyb3NzT3JpZ2luIjpmYWxzZSwib3RoZXJfa2V5c19jYW5fYmVfYWRkZWRfaGVyZSI6ImRvIG5vdCBjb21wYXJlIGNsaWVudERhdGFKU09OIGFnYWluc3QgYSB0ZW1wbGF0ZS4gU2VlIGh0dHBzOi8vZ29vLmdsL3lhYlBleCJ9","signature":"MEUCIQD3NOM7Aw0HxPw6EFGf86iwf2yd3p4NncNNLcjd-86zgwIgHuh80bLNV7EcwBi4IAcH57iueLg0X2gLtO5_Y6PMCFE","userHandle":"2"},"type":"public-key","clientExtensionResults":{}}', } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() expect.assertions(1) try { @@ -1868,6 +1963,7 @@ describe('dbAuth', () => { body: '{"method":"webAuthnAuthenticate","id":"CxMJqILwYufSaEQsJX6rKHw_LkMXAGU64PaKU55l6ejZ4FNO5kBLiA","rawId":"CxMJqILwYufSaEQsJX6rKHw_LkMXAGU64PaKU55l6ejZ4FNO5kBLiA","response":{"authenticatorData":"SZYN5YgOjGh0NBcPZHZgW4_krrmihjLHmVzzuoMdl2MFAAAAAA","clientDataJSON":"eyJ0eXBlIjoid2ViYXV0aG4uZ2V0IiwiY2hhbGxlbmdlIjoiTHRnV3BoWUtfZU41clhjX0hkdlVMdk9xcFBXeW9SdmJtbDJQbzAwVUhhZyIsIm9yaWdpbiI6Imh0dHA6Ly9sb2NhbGhvc3Q6ODkxMCIsImNyb3NzT3JpZ2luIjpmYWxzZSwib3RoZXJfa2V5c19jYW5fYmVfYWRkZWRfaGVyZSI6ImRvIG5vdCBjb21wYXJlIGNsaWVudERhdGFKU09OIGFnYWluc3QgYSB0ZW1wbGF0ZS4gU2VlIGh0dHBzOi8vZ29vLmdsL3lhYlBleCJ9","signature":"MEUCIQD3NOM7Aw0HxPw6EFGf86iwf2yd3p4NncNNLcjd-86zgwIgHuh80bLNV7EcwBi4IAcH57iueLg0X2gLtO5_Y6PMCFE","userHandle":"2"},"type":"public-key","clientExtensionResults":{}}', } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const [body, headers] = await dbAuth.webAuthnAuthenticate() @@ -1884,6 +1980,7 @@ describe('dbAuth', () => { headers: {}, } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.webAuthnAuthOptions() @@ -1899,6 +1996,7 @@ describe('dbAuth', () => { } options.webAuthn.enabled = false const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.webAuthnAuthOptions() @@ -1918,6 +2016,7 @@ describe('dbAuth', () => { }, } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = await dbAuth.webAuthnAuthOptions() const regOptions = response[0] @@ -1945,6 +2044,7 @@ describe('dbAuth', () => { }, } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = await dbAuth.webAuthnAuthOptions() const regOptions = response[0] @@ -1965,6 +2065,7 @@ describe('dbAuth', () => { headers: {}, } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.webAuthnRegOptions() @@ -1980,6 +2081,7 @@ describe('dbAuth', () => { } options.webAuthn.enabled = false const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth.webAuthnRegOptions() @@ -1999,6 +2101,7 @@ describe('dbAuth', () => { }, } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = await dbAuth.webAuthnRegOptions() const regOptions = response[0] @@ -2026,6 +2129,7 @@ describe('dbAuth', () => { } options.webAuthn.timeout = null const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = await dbAuth.webAuthnRegOptions() expect(response[0].timeout).toEqual(60000) @@ -2041,6 +2145,7 @@ describe('dbAuth', () => { }, } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = await dbAuth.webAuthnRegOptions() user = await db.user.findFirst({ where: { id: user.id } }) @@ -2063,6 +2168,7 @@ describe('dbAuth', () => { body: '{"method":"webAuthnRegister","id":"GqjZOuYYppObBDeVknbrcBLkaa9imS5EJJwtCV740asUz24sdAmGFg","rawId":"GqjZOuYYppObBDeVknbrcBLkaa9imS5EJJwtCV740asUz24sdAmGFg","response":{"attestationObject":"o2NmbXRkbm9uZWdhdHRTdG10oGhhdXRoRGF0YVisSZYN5YgOjGh0NBcPZHZgW4_krrmihjLHmVzzuoMdl2NFAAAAAK3OAAI1vMYKZIsLJfHwVQMAKBqo2TrmGKaTmwQ3lZJ263AS5GmvYpkuRCScLQle-NGrFM9uLHQJhhalAQIDJiABIVggGIipTQt-gcoDPOpW6Zje_Av9C0-jWb2R2PBmXJJL-c8iWCC76wxo3uzG8cPqb0A8Vij-dqMbrEytEHjuFOtiQ2dt8A","clientDataJSON":"eyJ0eXBlIjoid2ViYXV0aG4uY3JlYXRlIiwiY2hhbGxlbmdlIjoiSHVHUHJRcUs3ZjUzTkx3TVpNc3RfREw5RGlnMkJCaXZEWVdXcGF3SVBWTSIsIm9yaWdpbiI6Imh0dHA6Ly9sb2NhbGhvc3Q6ODkxMCIsImNyb3NzT3JpZ2luIjpmYWxzZSwib3RoZXJfa2V5c19jYW5fYmVfYWRkZWRfaGVyZSI6ImRvIG5vdCBjb21wYXJlIGNsaWVudERhdGFKU09OIGFnYWluc3QgYSB0ZW1wbGF0ZS4gU2VlIGh0dHBzOi8vZ29vLmdsL3lhYlBleCJ9"},"type":"public-key","clientExtensionResults":{},"transports":["internal"]}', } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() await dbAuth.webAuthnRegister() @@ -2094,6 +2200,7 @@ describe('dbAuth', () => { ), } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() await dbAuth.webAuthnRegister() @@ -2130,14 +2237,16 @@ describe('dbAuth', () => { }) describe('_webAuthnCookie', () => { - it('returns the parts needed for the webAuthn cookie, defaulted to future expire', () => { + it('returns the parts needed for the webAuthn cookie, defaulted to future expire', async () => { const dbAuth = new DbAuthHandler({ headers: {} }, context, options) + await dbAuth.init() expect(dbAuth._webAuthnCookie('1234')).toMatch('webAuthn=1234;Expires=') }) - it('returns the parts needed for the expire the webAuthn cookie', () => { + it('returns the parts needed for the expire the webAuthn cookie', async () => { const dbAuth = new DbAuthHandler({ headers: {} }, context, options) + await dbAuth.init() expect(dbAuth._webAuthnCookie('1234', 'now')).toMatch( 'webAuthn=1234;Expires=Thu, 01 Jan 1970 00:00:00 GMT' @@ -2163,6 +2272,7 @@ describe('dbAuth', () => { }, } ) + const attributes = dbAuth._cookieAttributes({}) expect(attributes.length).toEqual(6) @@ -2226,8 +2336,9 @@ describe('dbAuth', () => { }) describe('_createSessionHeader()', () => { - it('returns a Set-Cookie header', () => { + it('returns a Set-Cookie header', async () => { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const headers = dbAuth._createSessionHeader({ foo: 'bar' }, 'abcd') expect(Object.keys(headers).length).toEqual(1) @@ -2244,7 +2355,7 @@ describe('dbAuth', () => { }) describe('_validateCsrf()', () => { - it('returns true if session and header token match', () => { + it('returns true if session and header token match', async () => { const data = { foo: 'bar' } const token = 'abcd' event = { @@ -2254,11 +2365,14 @@ describe('dbAuth', () => { }, } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() - expect(dbAuth._validateCsrf()).toEqual(true) + const output = await dbAuth._validateCsrf() + + expect(output).toEqual(true) }) - it('throws an error if session and header token do not match', () => { + it('throws an error if session and header token do not match', async () => { const data = { foo: 'bar' } const token = 'abcd' event = { @@ -2268,16 +2382,18 @@ describe('dbAuth', () => { }, } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() - expect(() => { - dbAuth._validateCsrf() - }).toThrow(dbAuthError.CsrfTokenMismatchError) + expect(async () => { + await dbAuth._validateCsrf() + }).rejects.toThrow(dbAuthError.CsrfTokenMismatchError) }) }) describe('_verifyUser()', () => { it('throws an error if username is missing', async () => { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth._verifyUser(null, 'password') @@ -2299,6 +2415,7 @@ describe('dbAuth', () => { it('throws an error if password is missing', async () => { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth._verifyUser('username') @@ -2329,6 +2446,7 @@ describe('dbAuth', () => { const defaultMessage = options.login.errors.usernameOrPasswordMissing delete options.login.errors.usernameOrPasswordMissing const dbAuth1 = new DbAuthHandler(event, context, options) + await dbAuth1.init() try { await dbAuth1._verifyUser(null, 'password') } catch (e) { @@ -2350,7 +2468,7 @@ describe('dbAuth', () => { it('throws a default error message if user is not found', async () => { delete options.login.errors.usernameNotFound const dbAuth = new DbAuthHandler(event, context, options) - + await dbAuth.init() try { await dbAuth._verifyUser('username', 'password') } catch (e) { @@ -2364,6 +2482,7 @@ describe('dbAuth', () => { it('throws a custom error message if user is not found', async () => { options.login.errors.usernameNotFound = 'Cannot find ${username}' const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth._verifyUser('Alice', 'password') @@ -2379,6 +2498,7 @@ describe('dbAuth', () => { delete options.login.errors.incorrectPassword const dbUser = await createDbUser() const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth._verifyUser(dbUser.email, 'incorrect') @@ -2394,6 +2514,7 @@ describe('dbAuth', () => { options.login.errors.incorrectPassword = 'Wrong password for ${username}' const dbUser = await createDbUser() const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth._verifyUser(dbUser.email, 'incorrect') @@ -2409,6 +2530,7 @@ describe('dbAuth', () => { const dbUser = await createDbUser() // invalid db client const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() dbAuth.dbAccessor = undefined try { await dbAuth._verifyUser(dbUser.email, 'password') @@ -2421,6 +2543,7 @@ describe('dbAuth', () => { it('returns the user with matching username and password', async () => { const dbUser = await createDbUser() const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const user = await dbAuth._verifyUser(dbUser.email, 'password') expect(user.id).toEqual(dbUser.id) @@ -2434,6 +2557,7 @@ describe('dbAuth', () => { salt: '2ef27f4073c603ba8b7807c6de6d6a89', }) const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const user = await dbAuth._verifyUser(dbUser.email, 'password') expect(user.id).toEqual(dbUser.id) @@ -2447,6 +2571,7 @@ describe('dbAuth', () => { salt: '2ef27f4073c603ba8b7807c6de6d6a89', }) const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() await dbAuth._verifyUser(dbUser.email, 'password') const user = await db.user.findFirst({ where: { id: dbUser.id } }) @@ -2462,6 +2587,8 @@ describe('dbAuth', () => { describe('_getCurrentUser()', () => { it('throw an error if user is not logged in', async () => { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() + try { await dbAuth._getCurrentUser() } catch (e) { @@ -2478,6 +2605,7 @@ describe('dbAuth', () => { }, } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth._getCurrentUser() @@ -2498,6 +2626,7 @@ describe('dbAuth', () => { } // invalid db client const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() dbAuth.dbAccessor = undefined try { await dbAuth._getCurrentUser() @@ -2517,6 +2646,7 @@ describe('dbAuth', () => { }, } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const user = await dbAuth._getCurrentUser() expect(user.id).toEqual(dbUser.id) @@ -2533,6 +2663,7 @@ describe('dbAuth', () => { password: 'password', }) const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth._createUser() @@ -2554,6 +2685,7 @@ describe('dbAuth', () => { password: 'password', }) const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth._createUser() @@ -2575,6 +2707,7 @@ describe('dbAuth', () => { password: 'password', }) const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() await dbAuth._createUser() expect(spy).toHaveBeenCalled() @@ -2599,6 +2732,7 @@ describe('dbAuth', () => { password: 'password', }) const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth._createUser() @@ -2624,6 +2758,7 @@ describe('dbAuth', () => { password: 'password', }) const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth._createUser() @@ -2643,6 +2778,7 @@ describe('dbAuth', () => { password: 'password', }) const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth._createUser() @@ -2661,7 +2797,7 @@ describe('dbAuth', () => { username: 'user@redwdoodjs.com', }) const dbAuth = new DbAuthHandler(event, context, options) - + await dbAuth.init() try { await dbAuth._createUser() } catch (e) { @@ -2680,6 +2816,7 @@ describe('dbAuth', () => { username: 'user@redwdoodjs.com', }) const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { await dbAuth._createUser() @@ -2699,6 +2836,7 @@ describe('dbAuth', () => { name: 'Rob', }) const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() try { const user = await dbAuth._createUser() @@ -2713,7 +2851,7 @@ describe('dbAuth', () => { }) describe('getAuthMethod', () => { - it('gets methodName out of the query string', () => { + it('gets methodName out of the query string', async () => { event = { path: '/.redwood/functions/auth', queryStringParameters: { method: 'logout' }, @@ -2721,11 +2859,12 @@ describe('dbAuth', () => { headers: {}, } const dbAuth = new DbAuthHandler(event, context, options) - - expect(dbAuth._getAuthMethod()).toEqual('logout') + await dbAuth.init() + const method = await dbAuth._getAuthMethod() + expect(method).toEqual('logout') }) - it('gets methodName out of a JSON body', () => { + it('gets methodName out of a JSON body', async () => { event = { path: '/.redwood/functions/auth', queryStringParameters: {}, @@ -2733,11 +2872,13 @@ describe('dbAuth', () => { headers: {}, } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() + const method = await dbAuth._getAuthMethod() - expect(dbAuth._getAuthMethod()).toEqual('signup') + expect(method).toEqual('signup') }) - it('otherwise returns undefined', () => { + it('otherwise returns undefined', async () => { event = { path: '/.redwood/functions/auth', queryStringParameters: {}, @@ -2745,14 +2886,17 @@ describe('dbAuth', () => { headers: {}, } const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() - expect(dbAuth._getAuthMethod()).toBeUndefined() + const method = await dbAuth._getAuthMethod() + expect(method).toBeUndefined() }) }) describe('validateField', () => { - it('checks for the presence of a field', () => { + it('checks for the presence of a field', async () => { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() expect(() => { dbAuth._validateField('username', null) @@ -2765,24 +2909,27 @@ describe('dbAuth', () => { }).toThrow(dbAuth.FieldRequiredError) }) - it('passes validation if everything is present', () => { + it('passes validation if everything is present', async () => { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() expect(dbAuth._validateField('username', 'cannikin')).toEqual(true) }) }) describe('logoutResponse', () => { - it('returns the response array necessary to log user out', () => { + it('returns the response array necessary to log user out', async () => { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const [body, headers] = dbAuth._logoutResponse() expect(body).toEqual('') expect(headers['set-cookie']).toMatch(/^session=;/) }) - it('can accept an object to return in the body', () => { + it('can accept an object to return in the body', async () => { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const [body, _headers] = dbAuth._logoutResponse({ error: 'error message', }) @@ -2792,29 +2939,33 @@ describe('dbAuth', () => { }) describe('ok', () => { - it('returns a 200 response by default', () => { + it('returns a 200 response by default', async () => { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = dbAuth._ok('', {}) expect(response.statusCode).toEqual(200) }) - it('can return other status codes', () => { + it('can return other status codes', async () => { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = dbAuth._ok('', {}, { statusCode: 201 }) expect(response.statusCode).toEqual(201) }) - it('stringifies a JSON body', () => { + it('stringifies a JSON body', async () => { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = dbAuth._ok({ foo: 'bar' }, {}, { statusCode: 201 }) expect(response.body).toEqual('{"foo":"bar"}') }) - it('does not stringify a body that is a string already', () => { + it('does not stringify a body that is a string already', async () => { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = dbAuth._ok('{"foo":"bar"}', {}, { statusCode: 201 }) expect(response.body).toEqual('{"foo":"bar"}') @@ -2822,8 +2973,9 @@ describe('dbAuth', () => { }) describe('_notFound', () => { - it('returns a 404 response', () => { + it('returns a 404 response', async () => { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = dbAuth._notFound() expect(response.statusCode).toEqual(404) @@ -2832,8 +2984,9 @@ describe('dbAuth', () => { }) describe('_badRequest', () => { - it('returns a 400 response', () => { + it('returns a 400 response', async () => { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const response = dbAuth._badRequest('bad') expect(response.statusCode).toEqual(400) @@ -2842,8 +2995,9 @@ describe('dbAuth', () => { }) describe('_sanitizeUser', () => { - it('removes all but the default fields [id, email] on user', () => { + it('removes all but the default fields [id, email] on user', async () => { const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const user = { id: 1, email: 'rob@redwoodjs.com', @@ -2855,9 +3009,10 @@ describe('dbAuth', () => { expect(dbAuth._sanitizeUser(user).secret).toBeUndefined() }) - it('removes any fields not explictly allowed in allowedUserFields', () => { + it('removes any fields not explictly allowed in allowedUserFields', async () => { options.allowedUserFields = ['foo'] const dbAuth = new DbAuthHandler(event, context, options) + await dbAuth.init() const user = { id: 1, email: 'rob@redwoodjs.com', diff --git a/packages/auth-providers/dbAuth/api/src/__tests__/shared.test.ts b/packages/auth-providers/dbAuth/api/src/__tests__/shared.test.ts index 5e4a9f16b048..094a59e83dc3 100644 --- a/packages/auth-providers/dbAuth/api/src/__tests__/shared.test.ts +++ b/packages/auth-providers/dbAuth/api/src/__tests__/shared.test.ts @@ -304,7 +304,7 @@ describe('session cookie extraction', () => { expect(extractCookie(event)).toBeUndefined() }) - it('extracts GraphiQL cookie from the header extensions', () => { + it('extracts GraphiQL cookie from the body extensions', () => { const dbUserId = 42 const cookie = encryptToCookie(JSON.stringify({ id: dbUserId })) @@ -321,31 +321,46 @@ describe('session cookie extraction', () => { expect(extractCookie(event)).toEqual(cookie) }) - it('overwrites cookie with event header GraphiQL when in dev', () => { - const sessionCookie = encryptToCookie( - JSON.stringify({ id: 9999999999 }) + ';' + 'token' + it('extracts GraphiQL cookie from the rw-studio header (Fetch request)', () => { + const dbUserId = 42 + + const impersonatedCookie = encryptToCookie( + JSON.stringify({ id: dbUserId }) ) - event = { + const req = new Request('http://localhost:8910/_rw_mw', { + method: 'POST', headers: { - cookie: sessionCookie, + 'auth-provider': 'dbAuth', + 'rw-studio-impersonation-cookie': impersonatedCookie, + authorization: 'Bearer ' + dbUserId, }, - } + }) + + expect(extractCookie(req)).toEqual(impersonatedCookie) + }) + + it('impersonation cookie takes precendence', () => { + const sessionCookie = encryptToCookie( + JSON.stringify({ id: 9999999999 }) + ';' + 'token' + ) const dbUserId = 42 - const cookie = encryptToCookie(JSON.stringify({ id: dbUserId })) - event.body = JSON.stringify({ - extensions: { - headers: { - 'auth-provider': 'dbAuth', - cookie, - authorization: 'Bearer ' + dbUserId, - }, + const impersonatedCookie = encryptToCookie( + JSON.stringify({ id: dbUserId }) + ) + + event = { + headers: { + cookie: sessionCookie, // This user doesn't exist + 'auth-provider': 'dbAuth', + 'rw-studio-impersonation-cookie': impersonatedCookie, + authorization: 'Bearer ' + dbUserId, }, - }) + } - expect(extractCookie(event)).toEqual(cookie) + expect(extractCookie(event)).toEqual(impersonatedCookie) }) }) }) diff --git a/packages/auth-providers/dbAuth/api/src/decoder.ts b/packages/auth-providers/dbAuth/api/src/decoder.ts index 0657a2b622fa..d479f64025b8 100644 --- a/packages/auth-providers/dbAuth/api/src/decoder.ts +++ b/packages/auth-providers/dbAuth/api/src/decoder.ts @@ -5,28 +5,23 @@ import type { Decoder } from '@redwoodjs/api' import { dbAuthSession } from './shared' export const createAuthDecoder = (cookieNameOption: string): Decoder => { - return async (token, type, req) => { + return async (_token, type, req) => { if (type !== 'dbAuth') { return null } const session = dbAuthSession(req.event, cookieNameOption) - const authHeaderUserId = token - - if (session.id.toString() !== authHeaderUserId) { - console.error('Authorization header does not match decrypted user ID') - throw new Error('Authorization header does not match decrypted user ID') - } + // We no longer compare the session id with the bearer token return session } } /** @deprecated use `createAuthDecoder` */ export const authDecoder: Decoder = async ( - authHeaderValue: string, + _authHeaderValue: string, type: string, - req: { event: APIGatewayProxyEvent } + req: { event: APIGatewayProxyEvent | Request } ) => { if (type !== 'dbAuth') { return null @@ -36,12 +31,6 @@ export const authDecoder: Decoder = async ( // it fall back to the default cookie name `session`, making it backwards // compatible with existing RW apps. const session = dbAuthSession(req.event, undefined) - const authHeaderUserId = authHeaderValue - - if (session.id.toString() !== authHeaderUserId) { - console.error('Authorization header does not match decrypted user ID') - throw new Error('Authorization header does not match decrypted user ID') - } return session } diff --git a/packages/auth-providers/dbAuth/api/src/shared.ts b/packages/auth-providers/dbAuth/api/src/shared.ts index c10cf02aa4c0..cf2cfef29e03 100644 --- a/packages/auth-providers/dbAuth/api/src/shared.ts +++ b/packages/auth-providers/dbAuth/api/src/shared.ts @@ -2,6 +2,7 @@ import crypto from 'node:crypto' import type { APIGatewayProxyEvent } from 'aws-lambda' +import { getEventHeader, isFetchApiRequest } from '@redwoodjs/api' import { getConfig, getConfigPath } from '@redwoodjs/project-config' import * as DbAuthError from './errors' @@ -22,11 +23,6 @@ const DEFAULT_SCRYPT_OPTIONS: ScryptOptions = { parallelization: 1, } -// Extracts the cookie from an event, handling lower and upper case header names. -const eventHeadersCookie = (event: APIGatewayProxyEvent) => { - return event.headers.cookie || event.headers.Cookie -} - const getPort = () => { let configPath @@ -42,21 +38,28 @@ const getPort = () => { // When in development environment, check for auth impersonation cookie // if user has generated graphiql headers -const eventGraphiQLHeadersCookie = (event: APIGatewayProxyEvent) => { +const eventGraphiQLHeadersCookie = (event: APIGatewayProxyEvent | Request) => { if (process.env.NODE_ENV === 'development') { - if (event.headers['rw-studio-impersonation-cookie']) { - return event.headers['rw-studio-impersonation-cookie'] + const impersationationHeader = getEventHeader( + event, + 'rw-studio-impersonation-cookie' + ) + + if (impersationationHeader) { + return impersationationHeader } // TODO: Remove code below when we remove the old way of passing the cookie // from Studio, and decide it's OK to break compatibility with older Studio // versions try { - const jsonBody = JSON.parse(event.body ?? '{}') - return ( - jsonBody?.extensions?.headers?.cookie || - jsonBody?.extensions?.headers?.Cookie - ) + if (!isFetchApiRequest(event)) { + const jsonBody = JSON.parse(event.body ?? '{}') + return ( + jsonBody?.extensions?.headers?.cookie || + jsonBody?.extensions?.headers?.Cookie + ) + } } catch { // sometimes the event body isn't json return @@ -90,10 +93,9 @@ const legacyDecryptSession = (encryptedText: string) => { // Extracts the session cookie from an event, handling both // development environment GraphiQL headers and production environment headers. -export const extractCookie = (event: APIGatewayProxyEvent) => { - return eventGraphiQLHeadersCookie(event) || eventHeadersCookie(event) +export const extractCookie = (event: APIGatewayProxyEvent | Request) => { + return eventGraphiQLHeadersCookie(event) || getEventHeader(event, 'Cookie') } - // whether this encrypted session was made with the old CryptoJS algorithm export const isLegacySession = (text: string | undefined) => { if (!text) { @@ -178,12 +180,15 @@ export const getSession = ( // at once. Accepts the `event` argument from a Lambda function call and the // name of the dbAuth session cookie export const dbAuthSession = ( - event: APIGatewayProxyEvent, + event: APIGatewayProxyEvent | Request, cookieNameOption: string | undefined ) => { - if (extractCookie(event)) { + const sessionCookie = extractCookie(event) + + if (sessionCookie) { + // i.e. Browser making a request const [session, _csrfToken] = decryptSession( - getSession(extractCookie(event), cookieNameOption) + getSession(sessionCookie, cookieNameOption) ) return session } else { @@ -191,12 +196,14 @@ export const dbAuthSession = ( } } -export const webAuthnSession = (event: APIGatewayProxyEvent) => { - if (!event.headers.cookie) { +export const webAuthnSession = (event: APIGatewayProxyEvent | Request) => { + const cookieHeader = extractCookie(event) + + if (!cookieHeader) { return null } - const webAuthnCookie = event.headers.cookie.split(';').find((cook) => { + const webAuthnCookie = cookieHeader.split(';').find((cook: string) => { return cook.split('=')[0].trim() === 'webAuthn' }) diff --git a/packages/babel-config/build.mjs b/packages/babel-config/build.mjs index 7f99112baa8e..75e861b63346 100644 --- a/packages/babel-config/build.mjs +++ b/packages/babel-config/build.mjs @@ -1,25 +1,3 @@ -import fs from 'node:fs/promises' +import { build } from '../../buildDefaults.mjs' -import * as esbuild from 'esbuild' -import fg from 'fast-glob' - -const sourceFiles = await fg.glob(['./src/**/*.ts'], { - ignore: ['./src/**/__tests__'], -}) - -const result = await esbuild.build({ - entryPoints: sourceFiles, - outdir: 'dist', - - format: 'cjs', - platform: 'node', - target: ['node20'], - - logLevel: 'info', - - // For visualizing the bundle. - // See https://esbuild.github.io/api/#metafile and https://esbuild.github.io/analyze/. - metafile: true, -}) - -await fs.writeFile('meta.json', JSON.stringify(result.metafile, null, 2)) +await build() diff --git a/packages/babel-config/package.json b/packages/babel-config/package.json index 1b3275eff0f9..018a3999d0cd 100644 --- a/packages/babel-config/package.json +++ b/packages/babel-config/package.json @@ -49,7 +49,6 @@ "@types/babel__core": "7.20.4", "@types/node": "20.10.4", "babel-plugin-tester": "11.0.4", - "esbuild": "0.19.9", "jest": "29.7.0" }, "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" diff --git a/packages/cli-helpers/.babelrc.js b/packages/cli-helpers/.babelrc.js deleted file mode 100644 index 3b2c815712d9..000000000000 --- a/packages/cli-helpers/.babelrc.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = { extends: '../../babel.config.js' } diff --git a/packages/cli-helpers/__mocks__/fs.js b/packages/cli-helpers/__mocks__/fs.js index 99fec09d82ed..de739ddd902a 100644 --- a/packages/cli-helpers/__mocks__/fs.js +++ b/packages/cli-helpers/__mocks__/fs.js @@ -1,220 +1,4 @@ -import path from 'path' +import * as memfs from 'memfs' -const fs = { - ...jest.requireActual('fs'), -} - -let mockFiles = {} - -const pathSeparator = path.sep - -const getParentDir = (path) => { - return path.substring(0, path.lastIndexOf(pathSeparator)) -} - -const makeParentDirs = (path) => { - const parentDir = getParentDir(path) - if (parentDir && !(parentDir in mockFiles)) { - mockFiles[parentDir] = undefined - makeParentDirs(parentDir) - } -} - -/** - * This is a custom function that our tests can use during setup to specify - * what the files on the "mock" filesystem should look like when any of the - * `fs` APIs are used. - * - * Sets the state of the mocked file system - * @param newMockFiles - {[filepath]: contents} - */ -fs.__setMockFiles = (newMockFiles) => { - mockFiles = { ...newMockFiles } - - // Generate all the directories which implicitly exist - Object.keys(mockFiles).forEach((mockPath) => { - if (mockPath.includes(pathSeparator)) { - makeParentDirs(mockPath) - } - }) -} - -fs.__getMockFiles = () => { - return mockFiles -} - -fs.readFileSync = (path) => { - // In prisma v4.3.0, prisma format uses a Wasm module. See https://github.com/prisma/prisma/releases/tag/4.3.0. - // We shouldn't mock this, so we'll use the real fs.readFileSync. - if (path.includes('prisma_fmt_build_bg.wasm')) { - return jest.requireActual('fs').readFileSync(path) - } - - if (path in mockFiles) { - return mockFiles[path] - } else { - const fakeError = new Error( - `Error: ENOENT: no such file or directory, open '${path}'` - ) - fakeError.errno = -2 - fakeError.syscall = 'open' - fakeError.code = 'ENOENT' - fakeError.path = path - throw fakeError - } -} - -fs.writeFileSync = (path, contents) => { - const parentDir = getParentDir(path) - if (parentDir && !fs.existsSync(parentDir)) { - const fakeError = new Error( - `Error: ENOENT: no such file or directory, open '${path}'` - ) - fakeError.errno = -2 - fakeError.syscall = 'open' - fakeError.code = 'ENOENT' - fakeError.path = path - throw fakeError - } - mockFiles[path] = contents -} - -fs.appendFileSync = (path, contents) => { - if (path in mockFiles) { - mockFiles[path] = mockFiles[path] + contents - } else { - fs.writeFileSync(path, contents) - } -} - -fs.rmSync = (path, options = {}) => { - if (fs.existsSync(path)) { - if (options.recursive) { - Object.keys(mockFiles).forEach((mockedPath) => { - if (mockedPath.startsWith(path)) { - delete mockFiles[mockedPath] - } - }) - } else { - if (mockFiles[path] === undefined) { - const children = fs.readdirSync(path) - if (children.length !== 0) { - const fakeError = new Error( - `NodeError [SystemError]: Path is a directory: rm returned EISDIR (is a directory) ${path}` - ) - fakeError.errno = 21 - fakeError.syscall = 'rm' - fakeError.code = 'ERR_FS_EISDIR' - fakeError.path = path - throw fakeError - } - } - delete mockFiles[path] - } - } else { - const fakeError = new Error( - `Error: ENOENT: no such file or directory, stat '${path}'` - ) - fakeError.errno = -2 - fakeError.syscall = 'stat' - fakeError.code = 'ENOENT' - fakeError.path = path - throw fakeError - } -} - -fs.unlinkSync = (path) => { - if (path in mockFiles) { - delete mockFiles[path] - } else { - const fakeError = new Error( - `Error: ENOENT: no such file or directory, stat '${path}'` - ) - fakeError.errno = -2 - fakeError.syscall = 'unlink' - fakeError.code = 'ENOENT' - fakeError.path = path - throw fakeError - } -} - -fs.existsSync = (path) => { - return path in mockFiles -} - -fs.copyFileSync = (src, dist) => { - fs.writeFileSync(dist, fs.readFileSync(src)) -} - -fs.readdirSync = (path) => { - if (!fs.existsSync(path)) { - const fakeError = new Error( - `Error: ENOENT: no such file or directory, scandir '${path}'` - ) - fakeError.errno = -2 - fakeError.syscall = 'scandir' - fakeError.code = 'ENOENT' - fakeError.path = path - throw fakeError - } - - if (mockFiles[path] !== undefined) { - const fakeError = new Error( - `Error: ENOTDIR: not a directory, scandir '${path}'` - ) - fakeError.errno = -20 - fakeError.syscall = 'scandir' - fakeError.code = 'ENOTDIR' - fakeError.path = path - throw fakeError - } - - const content = [] - Object.keys(mockFiles).forEach((mockedPath) => { - const childPath = mockedPath.substring(path.length + 1) - if ( - mockedPath.startsWith(path) && - !childPath.includes(pathSeparator) && - childPath - ) { - content.push(childPath) - } - }) - return content -} - -fs.mkdirSync = (path, options = {}) => { - if (options.recursive) { - makeParentDirs(path) - } - // Directories are represented as paths with an "undefined" value - fs.writeFileSync(path, undefined) -} - -fs.rmdirSync = (path, options = {}) => { - if (!fs.existsSync(path)) { - const fakeError = new Error( - `Error: ENOENT: no such file or directory, rmdir '${path}'` - ) - fakeError.errno = -2 - fakeError.syscall = 'rmdir' - fakeError.code = 'ENOENT' - fakeError.path = path - throw fakeError - } - - if (mockFiles[path] !== undefined) { - const fakeError = new Error( - `Error: ENOTDIR: not a directory, rmdir '${path}'` - ) - fakeError.errno = -20 - fakeError.syscall = 'rmdir' - fakeError.code = 'ENOTDIR' - fakeError.path = path - throw fakeError - } - - fs.rmSync(path, options) -} - -module.exports = fs +export * from 'memfs' +export default memfs.fs diff --git a/packages/cli-helpers/build.js b/packages/cli-helpers/build.js new file mode 100644 index 000000000000..3a01088401ee --- /dev/null +++ b/packages/cli-helpers/build.js @@ -0,0 +1,26 @@ +import * as esbuild from 'esbuild' + +const options = { + entryPoints: ['./src/index.ts'], + outdir: 'dist', + + platform: 'node', + target: ['node20'], + bundle: true, + packages: 'external', + + logLevel: 'info', + metafile: true, +} + +await esbuild.build({ + ...options, + format: 'esm', + outExtension: { '.js': '.mjs' }, +}) + +await esbuild.build({ + ...options, + format: 'cjs', + outExtension: { '.js': '.cjs' }, +}) diff --git a/packages/cli-helpers/jest.config.js b/packages/cli-helpers/jest.config.js deleted file mode 100644 index 4b24969ced25..000000000000 --- a/packages/cli-helpers/jest.config.js +++ /dev/null @@ -1,4 +0,0 @@ -/** @type {import('@jest/types').Config.InitialOptions} */ -module.exports = { - testPathIgnorePatterns: ['fixtures', 'dist', 'mockFsFiles'], -} diff --git a/packages/cli-helpers/package.json b/packages/cli-helpers/package.json index 8bdc55d1ef3e..d7678c25d735 100644 --- a/packages/cli-helpers/package.json +++ b/packages/cli-helpers/package.json @@ -7,30 +7,32 @@ "directory": "packages/cli-helpers" }, "license": "MIT", - "main": "./dist/index.js", + "type": "module", + "exports": { + "types": "./dist/index.d.ts", + "import": "./dist/index.mjs", + "default": "./dist/index.cjs" + }, "types": "./dist/index.d.ts", "files": [ "dist" ], "scripts": { - "build": "yarn build:js && yarn build:types", - "build:js": "babel src -d dist --extensions \".js,.jsx,.ts,.tsx\"", + "build": "yarn node ./build.js && yarn build:types", "build:pack": "yarn pack -o redwoodjs-cli-helpers.tgz", "build:types": "tsc --build --verbose", "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx\" --ignore dist --exec \"yarn build\"", "prepublishOnly": "NODE_ENV=production yarn build", - "test": "jest src", - "test:watch": "yarn test --watch" + "test": "vitest run", + "test:watch": "vitest watch" }, "dependencies": { "@babel/core": "^7.22.20", - "@babel/runtime-corejs3": "7.23.6", "@iarna/toml": "2.2.5", "@opentelemetry/api": "1.7.0", "@redwoodjs/project-config": "6.0.7", "@redwoodjs/telemetry": "6.0.7", "chalk": "4.1.2", - "core-js": "3.34.0", "dotenv": "16.3.1", "execa": "5.1.1", "listr2": "6.6.1", @@ -41,12 +43,11 @@ "terminal-link": "2.1.1" }, "devDependencies": { - "@babel/cli": "7.23.4", "@types/lodash": "4.14.201", "@types/pascalcase": "1.0.3", "@types/yargs": "17.0.32", - "jest": "29.7.0", - "typescript": "5.3.3" + "typescript": "5.3.3", + "vitest": "1.2.1" }, "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" } diff --git a/packages/cli-helpers/src/auth/__tests__/__snapshots__/authTasks.test.ts.snap b/packages/cli-helpers/src/auth/__tests__/__snapshots__/authTasks.test.ts.snap index 585825351020..abc555315d2b 100644 --- a/packages/cli-helpers/src/auth/__tests__/__snapshots__/authTasks.test.ts.snap +++ b/packages/cli-helpers/src/auth/__tests__/__snapshots__/authTasks.test.ts.snap @@ -1,6 +1,6 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html -exports[`authTasks Components with props Should add useAuth on the same line for single line components, and separate line for multiline components 1`] = ` +exports[`authTasks > Components with props > Should add useAuth on the same line for single line components, and separate line for multiline components 1`] = ` "import { FatalErrorBoundary, RedwoodProvider } from '@redwoodjs/web' import { RedwoodApolloProvider } from '@redwoodjs/web/apollo' @@ -27,7 +27,7 @@ export default App " `; -exports[`authTasks Components with props Should add useAuth on the same line for single line components, and separate line for multiline components 2`] = ` +exports[`authTasks > Components with props > Should add useAuth on the same line for single line components, and separate line for multiline components 2`] = ` "// In this file, all Page components from 'src/pages\` are auto-imported. Nested // directories are supported, and should be uppercase. Each subdirectory will be // prepended onto the component name. @@ -62,7 +62,7 @@ export default Routes " `; -exports[`authTasks Components with props Should not add useAuth if one already exists 1`] = ` +exports[`authTasks > Components with props > Should not add useAuth if one already exists 1`] = ` "import { FatalErrorBoundary, RedwoodProvider } from '@redwoodjs/web' import { RedwoodApolloProvider } from '@redwoodjs/web/apollo' @@ -89,7 +89,7 @@ export default App " `; -exports[`authTasks Components with props Should not add useAuth if one already exists 2`] = ` +exports[`authTasks > Components with props > Should not add useAuth if one already exists 2`] = ` "// In this file, all Page components from 'src/pages\` are auto-imported. Nested // directories are supported, and should be uppercase. Each subdirectory will be // prepended onto the component name. @@ -128,7 +128,7 @@ export default Routes " `; -exports[`authTasks Customized App.js Should add auth config when using explicit return 1`] = ` +exports[`authTasks > Customized App.js > Should add auth config when using explicit return 1`] = ` "import { useEffect } from 'react' import { FatalErrorBoundary, RedwoodProvider } from '@redwoodjs/web' import { RedwoodApolloProvider } from '@redwoodjs/web/apollo' @@ -166,7 +166,7 @@ export default App " `; -exports[`authTasks Should update App.{jsx,tsx}, Routes.{jsx,tsx} and add auth.ts (Auth0) 1`] = ` +exports[`authTasks > Should update App.{jsx,tsx}, Routes.{jsx,tsx} and add auth.ts (Auth0) 1`] = ` "import { FatalErrorBoundary, RedwoodProvider } from '@redwoodjs/web' import { RedwoodApolloProvider } from '@redwoodjs/web/apollo' @@ -193,7 +193,7 @@ export default App " `; -exports[`authTasks Should update App.{jsx,tsx}, Routes.{jsx,tsx} and add auth.ts (Auth0) 2`] = ` +exports[`authTasks > Should update App.{jsx,tsx}, Routes.{jsx,tsx} and add auth.ts (Auth0) 2`] = ` "import { Auth0Client } from '@auth0/auth0-spa-js' import { createAuth } from '@redwoodjs/auth-auth0-web' @@ -221,7 +221,7 @@ export const { AuthProvider, useAuth } = createAuth(auth0) " `; -exports[`authTasks Should update App.{jsx,tsx}, Routes.{jsx,tsx} and add auth.ts (Auth0) 3`] = ` +exports[`authTasks > Should update App.{jsx,tsx}, Routes.{jsx,tsx} and add auth.ts (Auth0) 3`] = ` "// In this file, all Page components from 'src/pages\` are auto-imported. Nested // directories are supported, and should be uppercase. Each subdirectory will be // prepended onto the component name. @@ -247,7 +247,7 @@ export default Routes " `; -exports[`authTasks Should update App.{jsx,tsx}, Routes.{jsx,tsx} and add auth.ts (Clerk) 1`] = ` +exports[`authTasks > Should update App.{jsx,tsx}, Routes.{jsx,tsx} and add auth.ts (Clerk) 1`] = ` "import { FatalErrorBoundary, RedwoodProvider } from '@redwoodjs/web' import { RedwoodApolloProvider } from '@redwoodjs/web/apollo' @@ -274,7 +274,7 @@ export default App " `; -exports[`authTasks Should update App.{jsx,tsx}, Routes.{jsx,tsx} and add auth.ts (Clerk) 2`] = ` +exports[`authTasks > Should update App.{jsx,tsx}, Routes.{jsx,tsx} and add auth.ts (Clerk) 2`] = ` "import React, { useEffect } from 'react' import { ClerkLoaded, ClerkProvider, useUser } from '@clerk/clerk-react' @@ -327,7 +327,7 @@ export const AuthProvider = ({ children }: Props) => { " `; -exports[`authTasks Should update App.{jsx,tsx}, Routes.{jsx,tsx} and add auth.ts (Clerk) 3`] = ` +exports[`authTasks > Should update App.{jsx,tsx}, Routes.{jsx,tsx} and add auth.ts (Clerk) 3`] = ` "// In this file, all Page components from 'src/pages\` are auto-imported. Nested // directories are supported, and should be uppercase. Each subdirectory will be // prepended onto the component name. @@ -353,7 +353,7 @@ export default Routes " `; -exports[`authTasks Should update App.tsx for legacy apps 1`] = ` +exports[`authTasks > Should update App.tsx for legacy apps 1`] = ` "import netlifyIdentity from 'netlify-identity-widget' import { isBrowser } from '@redwoodjs/prerender/browserUtils' @@ -385,7 +385,7 @@ export default App " `; -exports[`authTasks Swapped out GraphQL client Should add auth config when app is missing RedwoodApolloProvider 1`] = ` +exports[`authTasks > Swapped out GraphQL client > Should add auth config when app is missing RedwoodApolloProvider 1`] = ` "import { FatalErrorBoundary, RedwoodProvider } from '@redwoodjs/web' import FatalErrorPage from 'src/pages/FatalErrorPage' @@ -415,7 +415,7 @@ export default App " `; -exports[`authTasks addApiConfig Adds authDecoder arg to default graphql.ts file 1`] = ` +exports[`authTasks > addApiConfig > Adds authDecoder arg to default graphql.ts file 1`] = ` "import { authDecoder } from 'test-auth-api' import { createGraphQLHandler } from '@redwoodjs/graphql-server' @@ -442,7 +442,7 @@ export const handler = createGraphQLHandler({ " `; -exports[`authTasks addApiConfig Doesn't add authDecoder arg if one already exists 1`] = ` +exports[`authTasks > addApiConfig > Doesn't add authDecoder arg if one already exists 1`] = ` "import { authDecoder } from 'test-auth-api' import { createGraphQLHandler } from '@redwoodjs/graphql-server' @@ -469,7 +469,7 @@ export const handler = createGraphQLHandler({ " `; -exports[`authTasks addApiConfig Doesn't add authDecoder arg if one already exists, even with a non-standard import name and arg placement 1`] = ` +exports[`authTasks > addApiConfig > Doesn't add authDecoder arg if one already exists, even with a non-standard import name and arg placement 1`] = ` "import { authDecoder } from 'test-auth-api' import { createGraphQLHandler } from '@redwoodjs/graphql-server' @@ -496,6 +496,6 @@ export const handler = createGraphQLHandler({ " `; -exports[`authTasks writes an auth.js file for JS projects 1`] = `"// web auth template"`; +exports[`authTasks > writes an auth.js file for JS projects 1`] = `"// web auth template"`; -exports[`authTasks writes an auth.ts file for TS projects 1`] = `"// web auth template"`; +exports[`authTasks > writes an auth.ts file for TS projects 1`] = `"// web auth template"`; diff --git a/packages/cli-helpers/src/auth/__tests__/authFiles.test.ts b/packages/cli-helpers/src/auth/__tests__/authFiles.test.ts index b7ca9a3d778e..2c4f6fe395d5 100644 --- a/packages/cli-helpers/src/auth/__tests__/authFiles.test.ts +++ b/packages/cli-helpers/src/auth/__tests__/authFiles.test.ts @@ -1,13 +1,14 @@ // Have to use `var` here to avoid "Temporal Dead Zone" issues let mockBasePath = '' -let mockIsTypeScriptProject = true globalThis.__dirname = __dirname -jest.mock('../../lib/paths', () => { +vi.mock('../../lib/paths', async (importOriginal) => { const path = require('path') + // eslint-disable-next-line @typescript-eslint/consistent-type-imports + const orginalPaths = await importOriginal() return { - ...jest.requireActual('../../lib/paths'), + ...orginalPaths, getPaths: () => { const base = mockBasePath || '/mock/base/path' @@ -22,17 +23,20 @@ jest.mock('../../lib/paths', () => { } }) -jest.mock('../../lib/project', () => ({ - isTypeScriptProject: () => mockIsTypeScriptProject, +vi.mock('../../lib/project', () => ({ + isTypeScriptProject: vi.fn(), })) import path from 'path' +import { vi, beforeEach, it, expect } from 'vitest' + import { getPaths } from '../../lib/paths' +import { isTypeScriptProject } from '../../lib/project' import { apiSideFiles, generateUniqueFileNames } from '../authFiles' beforeEach(() => { - mockIsTypeScriptProject = true + vi.mocked(isTypeScriptProject).mockReturnValue(true) }) it('generates a record of TS files', () => { @@ -51,7 +55,7 @@ it('generates a record of TS files', () => { }) it('generates a record of JS files', () => { - mockIsTypeScriptProject = false + vi.mocked(isTypeScriptProject).mockReturnValue(false) const filePaths = Object.keys( apiSideFiles({ diff --git a/packages/cli-helpers/src/auth/__tests__/authTasks.test.ts b/packages/cli-helpers/src/auth/__tests__/authTasks.test.ts index 70f6b77603e8..6d7f007b506b 100644 --- a/packages/cli-helpers/src/auth/__tests__/authTasks.test.ts +++ b/packages/cli-helpers/src/auth/__tests__/authTasks.test.ts @@ -1,72 +1,67 @@ -// Have to use `var` here to avoid "Temporal Dead Zone" issues -// eslint-disable-next-line -var mockIsTypeScriptProject = true - -jest.mock('../../lib/project', () => ({ - isTypeScriptProject: () => mockIsTypeScriptProject, -})) - -jest.mock('../../lib', () => ({ +vi.mock('../../lib', () => ({ transformTSToJS: (_path: string, data: string) => data, })) // mock Telemetry for CLI commands so they don't try to spawn a process -jest.mock('@redwoodjs/telemetry', () => { +vi.mock('@redwoodjs/telemetry', () => { return { - errorTelemetry: () => jest.fn(), - timedTelemetry: () => jest.fn(), + errorTelemetry: () => vi.fn(), + timedTelemetry: () => vi.fn(), } }) -jest.mock('../../lib/paths', () => { - const path = require('path') - const actualPaths = jest.requireActual('../../lib/paths') - const basedir = '/mock/setup/path' - const app = mockIsTypeScriptProject ? 'App.tsx' : 'App.jsx' - const routes = mockIsTypeScriptProject ? 'Routes.tsx' : 'Routes.jsx' - +vi.mock('../../lib/paths', () => { return { - resolveFile: actualPaths.resolveFile, - getPaths: () => ({ - api: { - functions: '', - src: '', - lib: '', - graphql: path.join(basedir, 'api/src/functions/graphql.ts'), - }, - web: { - src: path.join(basedir, 'web/src'), - app: path.join(basedir, `web/src/${app}`), - routes: path.join(basedir, `web/src/${routes}`), - }, - base: path.join(basedir), - }), + getPaths: vi.fn(), } }) -jest.mock('../../lib/project', () => { +vi.mock('../../lib/project', async () => { + const { getPaths } = await import('../../lib/paths') return { - isTypeScriptProject: () => mockIsTypeScriptProject, + isTypeScriptProject: vi.fn(), getGraphqlPath: () => { - const { getPaths } = require('../../lib/paths') return getPaths().api.graphql }, } }) // This will load packages/cli-helpers/__mocks__/fs.js -jest.mock('fs') +vi.mock('fs') +vi.mock('node:fs', async () => { + const memfs = await import('memfs') + return { + ...memfs.fs, + default: memfs.fs, + } +}) -const mockFS = fs as unknown as Omit, 'readdirSync'> & { - __setMockFiles: (files: Record) => void - __getMockFiles: () => Record - readdirSync: () => string[] +const mockedPathGenerator = (app: string, routes: string) => { + const basedir = '/mock/setup/path' + return { + api: { + functions: '', + src: '', + lib: '', + graphql: path.join(basedir, 'api/src/functions/graphql.ts'), + }, + web: { + src: path.join(basedir, 'web/src'), + app: path.join(basedir, `web/src/${app}`), + routes: path.join(basedir, `web/src/${routes}`), + }, + base: path.join(basedir), + } } import fs from 'fs' import path from 'path' +import { vol } from 'memfs' +import { vi, beforeEach, describe, it, expect, test } from 'vitest' + import { getPaths } from '../../lib/paths' +import { isTypeScriptProject } from '../../lib/project' import type { AuthGeneratorCtx } from '../authTasks' import { addApiConfig, @@ -98,10 +93,14 @@ function platformPath(filePath: string) { } beforeEach(() => { - mockIsTypeScriptProject = true - jest.restoreAllMocks() - - mockFS.__setMockFiles({ + vi.restoreAllMocks() + vi.mocked(isTypeScriptProject).mockReturnValue(true) + vi.mocked(getPaths).mockReturnValue( + // @ts-expect-error - We are not returning a full set of mock paths here + mockedPathGenerator('App.tsx', 'Routes.tsx') + ) + + vol.fromJSON({ [path.join( getPaths().base, platformPath('/templates/web/auth.ts.template') @@ -110,10 +109,6 @@ beforeEach(() => { [getPaths().api.graphql]: graphqlTs, [getPaths().web.routes]: routesTsx, }) - - mockFS.readdirSync = () => { - return ['auth.ts.template'] - } }) describe('authTasks', () => { @@ -123,8 +118,8 @@ describe('authTasks', () => { platformPath('/templates/web/auth.ts.template') ) - mockFS.__setMockFiles({ - ...mockFS.__getMockFiles(), + vol.fromJSON({ + ...vol.toJSON(), [templatePath]: auth0WebAuthTsTemplate, }) @@ -139,9 +134,9 @@ describe('authTasks', () => { const authTsPath = path.join(getPaths().web.src, 'auth.ts') - expect(fs.readFileSync(getPaths().web.app)).toMatchSnapshot() - expect(fs.readFileSync(authTsPath)).toMatchSnapshot() - expect(fs.readFileSync(getPaths().web.routes)).toMatchSnapshot() + expect(fs.readFileSync(getPaths().web.app, 'utf-8')).toMatchSnapshot() + expect(fs.readFileSync(authTsPath, 'utf-8')).toMatchSnapshot() + expect(fs.readFileSync(getPaths().web.routes, 'utf-8')).toMatchSnapshot() }) it('Should update App.{jsx,tsx}, Routes.{jsx,tsx} and add auth.ts (Clerk)', () => { @@ -150,13 +145,18 @@ describe('authTasks', () => { platformPath('/templates/web/auth.tsx.template') ) - mockFS.__setMockFiles({ - ...mockFS.__getMockFiles(), + // NOTE: We reset here because we had to remove the `auth.ts.template` + // file that would be here as a result of the `beforeEach` above. + // The previous implementation of this test was mocking the `fs` module to + // return only `auth.tsx.template` and not the `auth.ts.template` file even + // though it was on the mock filesystem. + vol.reset() + vol.fromJSON({ + [getPaths().web.app]: webAppTsx, + [getPaths().api.graphql]: graphqlTs, + [getPaths().web.routes]: routesTsx, [templatePath]: clerkWebAuthTsTemplate, }) - mockFS.readdirSync = () => { - return ['auth.tsx.template'] - } const ctx: AuthGeneratorCtx = { provider: 'clerk', @@ -169,14 +169,14 @@ describe('authTasks', () => { const authTsPath = path.join(getPaths().web.src, 'auth.tsx') - expect(fs.readFileSync(getPaths().web.app)).toMatchSnapshot() - expect(fs.readFileSync(authTsPath)).toMatchSnapshot() - expect(fs.readFileSync(getPaths().web.routes)).toMatchSnapshot() + expect(fs.readFileSync(getPaths().web.app, 'utf-8')).toMatchSnapshot() + expect(fs.readFileSync(authTsPath, 'utf-8')).toMatchSnapshot() + expect(fs.readFileSync(getPaths().web.routes, 'utf-8')).toMatchSnapshot() }) it('Should update App.tsx for legacy apps', () => { - mockFS.__setMockFiles({ - ...mockFS.__getMockFiles(), + vol.fromJSON({ + ...vol.toJSON(), [getPaths().web.app]: legacyAuthWebAppTsx, }) @@ -187,13 +187,13 @@ describe('authTasks', () => { addConfigToWebApp().task(ctx, {} as any) - expect(fs.readFileSync(getPaths().web.app)).toMatchSnapshot() + expect(fs.readFileSync(getPaths().web.app, 'utf-8')).toMatchSnapshot() }) describe('Components with props', () => { it('Should add useAuth on the same line for single line components, and separate line for multiline components', () => { - mockFS.__setMockFiles({ - ...mockFS.__getMockFiles(), + vol.fromJSON({ + ...vol.toJSON(), [getPaths().web.app]: customApolloAppTsx, [getPaths().web.routes]: customPropsRoutesTsx, }) @@ -206,13 +206,13 @@ describe('authTasks', () => { addConfigToWebApp().task(ctx, {} as any) addConfigToRoutes().task() - expect(fs.readFileSync(getPaths().web.app)).toMatchSnapshot() - expect(fs.readFileSync(getPaths().web.routes)).toMatchSnapshot() + expect(fs.readFileSync(getPaths().web.app, 'utf-8')).toMatchSnapshot() + expect(fs.readFileSync(getPaths().web.routes, 'utf-8')).toMatchSnapshot() }) it('Should not add useAuth if one already exists', () => { - mockFS.__setMockFiles({ - ...mockFS.__getMockFiles(), + vol.fromJSON({ + ...vol.toJSON(), [getPaths().web.app]: customApolloAppTsx, [getPaths().web.routes]: useAuthRoutesTsx, }) @@ -225,15 +225,15 @@ describe('authTasks', () => { addConfigToWebApp().task(ctx, {} as any) addConfigToRoutes().task() - expect(fs.readFileSync(getPaths().web.app)).toMatchSnapshot() - expect(fs.readFileSync(getPaths().web.routes)).toMatchSnapshot() + expect(fs.readFileSync(getPaths().web.app, 'utf-8')).toMatchSnapshot() + expect(fs.readFileSync(getPaths().web.routes, 'utf-8')).toMatchSnapshot() }) }) describe('Customized App.js', () => { it('Should add auth config when using explicit return', () => { - mockFS.__setMockFiles({ - ...mockFS.__getMockFiles(), + vol.fromJSON({ + ...vol.toJSON(), [getPaths().web.app]: explicitReturnAppTsx, }) @@ -244,14 +244,14 @@ describe('authTasks', () => { addConfigToWebApp().task(ctx, {} as any) - expect(fs.readFileSync(getPaths().web.app)).toMatchSnapshot() + expect(fs.readFileSync(getPaths().web.app, 'utf-8')).toMatchSnapshot() }) }) describe('Swapped out GraphQL client', () => { it('Should add auth config when app is missing RedwoodApolloProvider', () => { - mockFS.__setMockFiles({ - ...mockFS.__getMockFiles(), + vol.fromJSON({ + ...vol.toJSON(), [getPaths().web.app]: withoutRedwoodApolloAppTsx, }) @@ -264,7 +264,7 @@ describe('authTasks', () => { addConfigToWebApp().task(ctx, task) expect(task.output).toMatch(/GraphQL.*useAuth/) - expect(fs.readFileSync(getPaths().web.app)).toMatchSnapshot() + expect(fs.readFileSync(getPaths().web.app, 'utf-8')).toMatchSnapshot() }) }) @@ -275,12 +275,12 @@ describe('authTasks', () => { authDecoderImport: "import { authDecoder } from 'test-auth-api'", }) - expect(fs.readFileSync(getPaths().api.graphql)).toMatchSnapshot() + expect(fs.readFileSync(getPaths().api.graphql, 'utf-8')).toMatchSnapshot() }) it("Doesn't add authDecoder arg if one already exists", () => { - mockFS.__setMockFiles({ - ...mockFS.__getMockFiles(), + vol.fromJSON({ + ...vol.toJSON(), [getPaths().api.graphql]: withAuthDecoderGraphqlTs, }) @@ -289,12 +289,12 @@ describe('authTasks', () => { authDecoderImport: "import { authDecoder } from 'test-auth-api'", }) - expect(fs.readFileSync(getPaths().api.graphql)).toMatchSnapshot() + expect(fs.readFileSync(getPaths().api.graphql, 'utf-8')).toMatchSnapshot() }) it("Doesn't add authDecoder arg if one already exists, even with a non-standard import name and arg placement", () => { - mockFS.__setMockFiles({ - ...mockFS.__getMockFiles(), + vol.fromJSON({ + ...vol.toJSON(), [getPaths().api.graphql]: nonStandardAuthDecoderGraphqlTs, }) @@ -303,7 +303,7 @@ describe('authTasks', () => { authDecoderImport: "import { authDecoder } from 'test-auth-api'", }) - expect(fs.readFileSync(getPaths().api.graphql)).toMatchSnapshot() + expect(fs.readFileSync(getPaths().api.graphql, 'utf-8')).toMatchSnapshot() }) }) @@ -632,18 +632,23 @@ describe('authTasks', () => { provider: 'auth0', setupMode: 'FORCE', } + + // NOTE: The current fs related mocking leaves this file around from previous tests so we + // must delete it here. This should be fixed in a future refactoring of the entire test suite + fs.rmSync(path.join(getPaths().base, 'templates/web/auth.tsx.template')) + createWebAuth(getPaths().base, false).task(ctx) expect( - fs.readFileSync(path.join(getPaths().web.src, 'auth.ts')) + fs.readFileSync(path.join(getPaths().web.src, 'auth.ts'), 'utf-8') ).toMatchSnapshot() }) it('writes an auth.js file for JS projects', () => { - mockIsTypeScriptProject = false + vi.mocked(isTypeScriptProject).mockReturnValue(false) - mockFS.__setMockFiles({ - ...mockFS.__getMockFiles(), + vol.fromJSON({ + ...vol.toJSON(), [getPaths().web.app]: webAppTsx, }) @@ -654,7 +659,7 @@ describe('authTasks', () => { createWebAuth(getPaths().base, false).task(ctx) expect( - fs.readFileSync(path.join(getPaths().web.src, 'auth.js')) + fs.readFileSync(path.join(getPaths().web.src, 'auth.js'), 'utf-8') ).toMatchSnapshot() }) }) diff --git a/packages/cli-helpers/src/auth/__tests__/setupHelpers.test.ts b/packages/cli-helpers/src/auth/__tests__/setupHelpers.test.ts index eba30cdb464a..b549baf7a916 100644 --- a/packages/cli-helpers/src/auth/__tests__/setupHelpers.test.ts +++ b/packages/cli-helpers/src/auth/__tests__/setupHelpers.test.ts @@ -1,14 +1,14 @@ globalThis.__dirname = __dirname // mock Telemetry for CLI commands so they don't try to spawn a process -jest.mock('@redwoodjs/telemetry', () => { +vi.mock('@redwoodjs/telemetry', () => { return { - errorTelemetry: () => jest.fn(), - timedTelemetry: () => jest.fn(), + errorTelemetry: () => vi.fn(), + timedTelemetry: () => vi.fn(), } }) -jest.mock('../../lib/paths', () => { +vi.mock('../../lib/paths', () => { const path = require('path') const __dirname = path.resolve() @@ -28,42 +28,45 @@ jest.mock('../../lib/paths', () => { } }) -jest.mock('../../lib/project', () => ({ +vi.mock('../../lib/project', () => ({ isTypeScriptProject: () => true, })) -jest.mock('execa', () => {}) -jest.mock('listr2') -jest.mock('prompts', () => jest.fn(() => ({ answer: true }))) +vi.mock('execa') +vi.mock('listr2') +vi.mock('prompts', () => ({ + default: vi.fn(() => ({ answer: true })), +})) import fs from 'fs' import path from 'path' import { Listr } from 'listr2' import prompts from 'prompts' +import { vi, describe, afterEach, it, expect } from 'vitest' // import * as auth from '../auth' import { standardAuthHandler } from '../setupHelpers' describe('Auth generator tests', () => { - const processExitSpy = jest + const processExitSpy = vi .spyOn(process, 'exit') .mockImplementation((_code: any) => {}) - const mockListrRun = jest.fn() + const mockListrRun = vi.fn() - ;(Listr as jest.MockedFunction).mockImplementation(() => { + ;(Listr as vi.MockedFunction).mockImplementation(() => { return { run: mockListrRun, } }) - const fsSpy = jest.spyOn(fs, 'writeFileSync').mockImplementation(() => {}) + const fsSpy = vi.spyOn(fs, 'writeFileSync').mockImplementation(() => {}) afterEach(() => { processExitSpy.mockReset() fsSpy.mockReset() - ;(prompts as unknown as jest.Mock).mockClear() + ;(prompts as unknown as vi.Mock).mockClear() mockListrRun.mockClear() }) diff --git a/packages/cli-helpers/src/auth/authFiles.ts b/packages/cli-helpers/src/auth/authFiles.ts index 85bd5f812075..c64f48f8141a 100644 --- a/packages/cli-helpers/src/auth/authFiles.ts +++ b/packages/cli-helpers/src/auth/authFiles.ts @@ -3,9 +3,9 @@ import path from 'path' import pascalcase from 'pascalcase' -import { transformTSToJS } from '../lib' -import { getPaths } from '../lib/paths' -import { isTypeScriptProject } from '../lib/project' +import { transformTSToJS } from '../lib/index.js' +import { getPaths } from '../lib/paths.js' +import { isTypeScriptProject } from '../lib/project.js' interface FilesArgs { basedir: string diff --git a/packages/cli-helpers/src/auth/authTasks.ts b/packages/cli-helpers/src/auth/authTasks.ts index 69d19239cb73..c4d515c7e6d2 100644 --- a/packages/cli-helpers/src/auth/authTasks.ts +++ b/packages/cli-helpers/src/auth/authTasks.ts @@ -5,17 +5,17 @@ import type { ListrRenderer, ListrTask, ListrTaskWrapper } from 'listr2' import { resolveFile } from '@redwoodjs/project-config' -import type { ExistingFiles } from '../lib' -import { transformTSToJS, writeFilesTask } from '../lib' -import { colors } from '../lib/colors' -import { getPaths } from '../lib/paths' +import { colors } from '../lib/colors.js' +import type { ExistingFiles } from '../lib/index.js' +import { transformTSToJS, writeFilesTask } from '../lib/index.js' +import { getPaths } from '../lib/paths.js' import { getGraphqlPath, graphFunctionDoesExist, isTypeScriptProject, -} from '../lib/project' +} from '../lib/project.js' -import { apiSideFiles, generateUniqueFileNames } from './authFiles' +import { apiSideFiles, generateUniqueFileNames } from './authFiles.js' const AUTH_PROVIDER_HOOK_IMPORT = `import { AuthProvider, useAuth } from './auth'` const AUTH_HOOK_IMPORT = `import { useAuth } from './auth'` diff --git a/packages/cli-helpers/src/auth/setupHelpers.ts b/packages/cli-helpers/src/auth/setupHelpers.ts index 5c572a7c6427..0daaeae51e3a 100644 --- a/packages/cli-helpers/src/auth/setupHelpers.ts +++ b/packages/cli-helpers/src/auth/setupHelpers.ts @@ -1,18 +1,18 @@ import type { ListrTask } from 'listr2' import { Listr } from 'listr2' import terminalLink from 'terminal-link' -import type yargs from 'yargs' +import type { Argv } from 'yargs' import { errorTelemetry } from '@redwoodjs/telemetry' -import { colors } from '../lib/colors' +import { colors } from '../lib/colors.js' import { addApiPackages, addWebPackages, installPackages, -} from '../lib/installHelpers' +} from '../lib/installHelpers.js' -import type { AuthGeneratorCtx } from './authTasks' +import type { AuthGeneratorCtx } from './authTasks.js' import { addAuthConfigToGqlApi, addConfigToRoutes, @@ -20,9 +20,9 @@ import { setAuthSetupMode, createWebAuth, generateAuthApiFiles, -} from './authTasks' +} from './authTasks.js' -export const standardAuthBuilder = (yargs: yargs.Argv) => { +export const standardAuthBuilder = (yargs: Argv) => { return yargs .option('force', { alias: 'f', diff --git a/packages/cli-helpers/src/index.ts b/packages/cli-helpers/src/index.ts index 04e8c7a96987..35f5d335f459 100644 --- a/packages/cli-helpers/src/index.ts +++ b/packages/cli-helpers/src/index.ts @@ -1,13 +1,13 @@ // @WARN: This export is going to cause memory problems in the CLI. // We need to split this into smaller packages, or use export aliasing (like in packages/testing/cache) -export * from './lib' -export * from './lib/colors' -export * from './lib/paths' -export * from './lib/project' -export * from './lib/version' -export * from './auth/setupHelpers' +export * from './lib/index.js' +export * from './lib/colors.js' +export * from './lib/paths.js' +export * from './lib/project.js' +export * from './lib/version.js' +export * from './auth/setupHelpers.js' -export * from './lib/installHelpers' +export * from './lib/installHelpers.js' -export * from './telemetry/index' +export * from './telemetry/index.js' diff --git a/packages/cli-helpers/src/lib/__tests__/__snapshots__/index.test.ts.snap b/packages/cli-helpers/src/lib/__tests__/__snapshots__/index.test.ts.snap index 1b01f64e6fa0..63ae56f46b8b 100644 --- a/packages/cli-helpers/src/lib/__tests__/__snapshots__/index.test.ts.snap +++ b/packages/cli-helpers/src/lib/__tests__/__snapshots__/index.test.ts.snap @@ -1,4 +1,4 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html exports[`prettify formats tsx content 1`] = ` "import React from 'react' diff --git a/packages/cli-helpers/src/lib/__tests__/__snapshots__/project.test.ts.snap b/packages/cli-helpers/src/lib/__tests__/__snapshots__/project.test.ts.snap index 6da535eaf61e..c3d1be489a33 100644 --- a/packages/cli-helpers/src/lib/__tests__/__snapshots__/project.test.ts.snap +++ b/packages/cli-helpers/src/lib/__tests__/__snapshots__/project.test.ts.snap @@ -1,6 +1,6 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html -exports[`addEnvVar addEnvVar adds environment variables as part of a setup task should add a comment that the existing environment variable value was not changed, but include its new value as a comment 1`] = ` +exports[`addEnvVar > addEnvVar adds environment variables as part of a setup task > should add a comment that the existing environment variable value was not changed, but include its new value as a comment 1`] = ` "EXISTING_VAR=value # CommentedVar=123 @@ -10,7 +10,7 @@ exports[`addEnvVar addEnvVar adds environment variables as part of a setup task " `; -exports[`addEnvVar addEnvVar adds environment variables as part of a setup task should add a new environment variable when it does not exist 1`] = ` +exports[`addEnvVar > addEnvVar adds environment variables as part of a setup task > should add a new environment variable when it does not exist 1`] = ` "EXISTING_VAR = value # CommentedVar = 123 @@ -19,7 +19,7 @@ NEW_VAR = new_value " `; -exports[`addEnvVar addEnvVar adds environment variables as part of a setup task should add a new environment variable when it does not exist when existing envars have no spacing 1`] = ` +exports[`addEnvVar > addEnvVar adds environment variables as part of a setup task > should add a new environment variable when it does not exist when existing envars have no spacing 1`] = ` "EXISTING_VAR=value # CommentedVar = 123 @@ -28,7 +28,7 @@ NEW_VAR = new_value " `; -exports[`addEnvVar addEnvVar adds environment variables as part of a setup task should handle existing environment variables and new value with quoted values by not updating the original value 1`] = ` +exports[`addEnvVar > addEnvVar adds environment variables as part of a setup task > should handle existing environment variables and new value with quoted values by not updating the original value 1`] = ` "EXISTING_VAR = "value" # CommentedVar = 123 @@ -38,19 +38,19 @@ exports[`addEnvVar addEnvVar adds environment variables as part of a setup task " `; -exports[`addEnvVar addEnvVar adds environment variables as part of a setup task should handle existing environment variables with quoted values 1`] = ` +exports[`addEnvVar > addEnvVar adds environment variables as part of a setup task > should handle existing environment variables with quoted values 1`] = ` "EXISTING_VAR = "value" # CommentedVar = 123 " `; -exports[`addEnvVar addEnvVar adds environment variables as part of a setup task should handle existing environment variables with quoted values and no spacing 1`] = ` +exports[`addEnvVar > addEnvVar adds environment variables as part of a setup task > should handle existing environment variables with quoted values and no spacing 1`] = ` "EXISTING_VAR="value" # CommentedVar=123 " `; -exports[`updateTomlConfig updateTomlConfig configures a new CLI plugin adds package but keeps autoInstall false 1`] = ` +exports[`updateTomlConfig > updateTomlConfig configures a new CLI plugin > adds package but keeps autoInstall false 1`] = ` "[web] title = "Redwood App" port = 8_910 @@ -69,7 +69,7 @@ enabled = true " `; -exports[`updateTomlConfig updateTomlConfig configures a new CLI plugin adds when experimental cli has some plugins configured 1`] = ` +exports[`updateTomlConfig > updateTomlConfig configures a new CLI plugin > adds when experimental cli has some plugins configured 1`] = ` "[web] title = "Redwood App" port = 8_910 @@ -91,7 +91,7 @@ enabled = true " `; -exports[`updateTomlConfig updateTomlConfig configures a new CLI plugin adds when experimental cli is not configured 1`] = ` +exports[`updateTomlConfig > updateTomlConfig configures a new CLI plugin > adds when experimental cli is not configured 1`] = ` "[web] title = "Redwood App" port = 8_910 @@ -110,7 +110,7 @@ autoInstall = true " `; -exports[`updateTomlConfig updateTomlConfig configures a new CLI plugin adds when experimental cli is setup but has no plugins configured 1`] = ` +exports[`updateTomlConfig > updateTomlConfig configures a new CLI plugin > adds when experimental cli is setup but has no plugins configured 1`] = ` "[web] title = "Redwood App" port = 8_910 @@ -129,7 +129,7 @@ enabled = true " `; -exports[`updateTomlConfig updateTomlConfig configures a new CLI plugin does not add duplicate place when experimental cli has that plugin configured 1`] = ` +exports[`updateTomlConfig > updateTomlConfig configures a new CLI plugin > does not add duplicate place when experimental cli has that plugin configured 1`] = ` "[web] title = "Redwood App" port = 8_910 diff --git a/packages/cli-helpers/src/lib/__tests__/index.test.ts b/packages/cli-helpers/src/lib/__tests__/index.test.ts index 3fae865c059f..3c9c608c1734 100644 --- a/packages/cli-helpers/src/lib/__tests__/index.test.ts +++ b/packages/cli-helpers/src/lib/__tests__/index.test.ts @@ -1,6 +1,8 @@ +import { vi, test, expect } from 'vitest' + import { prettify } from '../index' -jest.mock('../paths', () => { +vi.mock('../paths', () => { return { getPaths: () => { return { diff --git a/packages/cli-helpers/src/lib/__tests__/project.addTomlSetting.test.ts b/packages/cli-helpers/src/lib/__tests__/project.addTomlSetting.test.ts index a6ed682047e3..c46185cf261f 100644 --- a/packages/cli-helpers/src/lib/__tests__/project.addTomlSetting.test.ts +++ b/packages/cli-helpers/src/lib/__tests__/project.addTomlSetting.test.ts @@ -1,7 +1,14 @@ -jest.mock('fs', () => require('memfs').fs) -jest.mock('node:fs', () => require('memfs').fs) +vi.mock('fs') +vi.mock('node:fs', async () => { + const memfs = await import('memfs') + return { + ...memfs.fs, + default: memfs.fs, + } +}) import { vol } from 'memfs' +import { vi, beforeAll, afterAll, it, expect } from 'vitest' import { setTomlSetting } from '../project' @@ -16,8 +23,8 @@ beforeAll(() => { afterAll(() => { process.env.RWJS_CWD = original_RWJS_CWD - jest.restoreAllMocks() - jest.resetModules() + vi.restoreAllMocks() + vi.resetModules() }) it('should add `fragments = true` to empty redwood.toml', () => { diff --git a/packages/cli-helpers/src/lib/__tests__/project.test.ts b/packages/cli-helpers/src/lib/__tests__/project.test.ts index a8aa58145848..f5e45301e0ef 100644 --- a/packages/cli-helpers/src/lib/__tests__/project.test.ts +++ b/packages/cli-helpers/src/lib/__tests__/project.test.ts @@ -1,9 +1,16 @@ -jest.mock('fs') -jest.mock('node:fs') +vi.mock('fs') +vi.mock('node:fs', async () => { + const memfs = await import('memfs') + return { + ...memfs.fs, + default: memfs.fs, + } +}) import * as fs from 'node:fs' import * as toml from '@iarna/toml' +import { vi, describe, beforeEach, afterEach, it, expect } from 'vitest' import { updateTomlConfig, addEnvVar } from '../project' @@ -23,7 +30,7 @@ const getRedwoodToml = () => { return defaultRedwoodToml } -jest.mock('@redwoodjs/project-config', () => { +vi.mock('@redwoodjs/project-config', () => { return { getPaths: () => { return { @@ -47,22 +54,22 @@ describe('addEnvVar', () => { describe('addEnvVar adds environment variables as part of a setup task', () => { beforeEach(() => { - jest.spyOn(fs, 'existsSync').mockImplementation(() => { + vi.spyOn(fs, 'existsSync').mockImplementation(() => { return true }) - jest.spyOn(fs, 'readFileSync').mockImplementation(() => { + vi.spyOn(fs, 'readFileSync').mockImplementation(() => { return envFileContent }) - jest.spyOn(fs, 'writeFileSync').mockImplementation((envPath, envFile) => { + vi.spyOn(fs, 'writeFileSync').mockImplementation((envPath, envFile) => { expect(envPath).toContain('.env') return envFile }) }) afterEach(() => { - jest.restoreAllMocks() + vi.restoreAllMocks() envFileContent = '' }) @@ -121,24 +128,22 @@ describe('addEnvVar', () => { describe('updateTomlConfig', () => { describe('updateTomlConfig configures a new CLI plugin', () => { beforeEach(() => { - jest.spyOn(fs, 'existsSync').mockImplementation(() => { + vi.spyOn(fs, 'existsSync').mockImplementation(() => { return true }) - jest.spyOn(fs, 'readFileSync').mockImplementation(() => { + vi.spyOn(fs, 'readFileSync').mockImplementation(() => { return toml.stringify(defaultRedwoodToml) }) - jest - .spyOn(fs, 'writeFileSync') - .mockImplementation((tomlPath, tomlFile) => { - expect(tomlPath).toContain('redwood.toml') - return tomlFile - }) + vi.spyOn(fs, 'writeFileSync').mockImplementation((tomlPath, tomlFile) => { + expect(tomlPath).toContain('redwood.toml') + return tomlFile + }) }) afterEach(() => { - jest.restoreAllMocks() + vi.restoreAllMocks() }) it('adds when experimental cli is not configured', () => { diff --git a/packages/cli-helpers/src/lib/__tests__/version.test.ts b/packages/cli-helpers/src/lib/__tests__/version.test.ts index cbcd8bb43e68..057e2463ccf8 100644 --- a/packages/cli-helpers/src/lib/__tests__/version.test.ts +++ b/packages/cli-helpers/src/lib/__tests__/version.test.ts @@ -1,4 +1,4 @@ -jest.mock('@redwoodjs/project-config', () => { +vi.mock('@redwoodjs/project-config', () => { return { getPaths: () => { return { @@ -7,10 +7,12 @@ jest.mock('@redwoodjs/project-config', () => { }, } }) -jest.mock('fs') +vi.mock('fs') import fs from 'fs' +import { vi, describe, test, expect, beforeEach } from 'vitest' + import { getCompatibilityData } from '../version' const EXAMPLE_PACKUMENT = { @@ -187,7 +189,7 @@ const EXAMPLE_PACKUMENT = { describe('version compatibility detection', () => { beforeEach(() => { - jest.spyOn(global, 'fetch').mockImplementation(() => { + vi.spyOn(global, 'fetch').mockImplementation(() => { return { json: () => { return EXAMPLE_PACKUMENT @@ -195,7 +197,7 @@ describe('version compatibility detection', () => { } as any }) - jest.spyOn(fs, 'readFileSync').mockImplementation(() => { + vi.spyOn(fs, 'readFileSync').mockImplementation(() => { return JSON.stringify({ devDependencies: { '@redwoodjs/core': '^6.0.0', @@ -206,15 +208,17 @@ describe('version compatibility detection', () => { test('throws for some fetch related error', async () => { // Mock the fetch function to throw an error - jest.spyOn(global, 'fetch').mockImplementation(() => { + vi.spyOn(global, 'fetch').mockImplementation(() => { throw new Error('Some fetch related error') }) await expect( getCompatibilityData('some-package', 'latest') - ).rejects.toThrowErrorMatchingInlineSnapshot(`"Some fetch related error"`) + ).rejects.toThrowErrorMatchingInlineSnapshot( + `[Error: Some fetch related error]` + ) // Mock the json parsing to throw an error - jest.spyOn(global, 'fetch').mockImplementation(() => { + vi.spyOn(global, 'fetch').mockImplementation(() => { return { json: () => { throw new Error('Some json parsing error') @@ -224,11 +228,13 @@ describe('version compatibility detection', () => { await expect( getCompatibilityData('some-package', 'latest') - ).rejects.toThrowErrorMatchingInlineSnapshot(`"Some json parsing error"`) + ).rejects.toThrowErrorMatchingInlineSnapshot( + `[Error: Some json parsing error]` + ) }) test('throws for some packument related error', async () => { - jest.spyOn(global, 'fetch').mockImplementation(() => { + vi.spyOn(global, 'fetch').mockImplementation(() => { return { json: () => { return { @@ -241,7 +247,7 @@ describe('version compatibility detection', () => { await expect( getCompatibilityData('some-package', 'latest') ).rejects.toThrowErrorMatchingInlineSnapshot( - `"Some packument related error"` + `[Error: Some packument related error]` ) }) @@ -249,7 +255,7 @@ describe('version compatibility detection', () => { await expect( getCompatibilityData('@scope/package-name', '0.0.4') ).rejects.toThrowErrorMatchingInlineSnapshot( - `"The package '@scope/package-name' does not have a version '0.0.4'"` + `[Error: The package '@scope/package-name' does not have a version '0.0.4']` ) }) @@ -257,12 +263,12 @@ describe('version compatibility detection', () => { await expect( getCompatibilityData('@scope/package-name', 'next') ).rejects.toThrowErrorMatchingInlineSnapshot( - `"The package '@scope/package-name' does not have a tag 'next'"` + `[Error: The package '@scope/package-name' does not have a tag 'next']` ) }) test('throws if no latest version could be found', async () => { - jest.spyOn(global, 'fetch').mockImplementation(() => { + vi.spyOn(global, 'fetch').mockImplementation(() => { return { json: () => { return { @@ -276,7 +282,7 @@ describe('version compatibility detection', () => { await expect( getCompatibilityData('@scope/package-name', 'latest') ).rejects.toThrowErrorMatchingInlineSnapshot( - `"The package '@scope/package-name' does not have a tag 'latest'"` + `[Error: The package '@scope/package-name' does not have a tag 'latest']` ) }) @@ -320,7 +326,7 @@ describe('version compatibility detection', () => { } ) - jest.spyOn(fs, 'readFileSync').mockImplementation(() => { + vi.spyOn(fs, 'readFileSync').mockImplementation(() => { return JSON.stringify({ devDependencies: { '@redwoodjs/core': '5.2.0', @@ -343,7 +349,7 @@ describe('version compatibility detection', () => { }) test('throws if no compatible version could be found', async () => { - jest.spyOn(fs, 'readFileSync').mockImplementation(() => { + vi.spyOn(fs, 'readFileSync').mockImplementation(() => { return JSON.stringify({ devDependencies: { '@redwoodjs/core': '7.0.0', @@ -354,7 +360,7 @@ describe('version compatibility detection', () => { expect( getCompatibilityData('@scope/package-name', 'latest') ).rejects.toThrowErrorMatchingInlineSnapshot( - `"No compatible version of '@scope/package-name' was found"` + `[Error: No compatible version of '@scope/package-name' was found]` ) }) }) diff --git a/packages/cli-helpers/src/lib/index.ts b/packages/cli-helpers/src/lib/index.ts index 1b294a27f9e5..1bc77ed61bea 100644 --- a/packages/cli-helpers/src/lib/index.ts +++ b/packages/cli-helpers/src/lib/index.ts @@ -10,8 +10,8 @@ import type { import { Listr } from 'listr2' import { format } from 'prettier' -import { colors } from './colors' -import { getPaths } from './paths' +import { colors } from './colors.js' +import { getPaths } from './paths.js' // TODO: Move this into `generateTemplate` when all templates have TS support /* diff --git a/packages/cli-helpers/src/lib/installHelpers.ts b/packages/cli-helpers/src/lib/installHelpers.ts index c8016953dee1..f70ae4e4c36f 100644 --- a/packages/cli-helpers/src/lib/installHelpers.ts +++ b/packages/cli-helpers/src/lib/installHelpers.ts @@ -1,6 +1,6 @@ import execa from 'execa' -import { getPaths } from './paths' +import { getPaths } from './paths.js' export const addWebPackages = (webPackages: string[]) => ({ title: 'Adding required web packages...', diff --git a/packages/cli-helpers/src/lib/paths.ts b/packages/cli-helpers/src/lib/paths.ts index 1f026d6ebc5c..7ad0f79cf755 100644 --- a/packages/cli-helpers/src/lib/paths.ts +++ b/packages/cli-helpers/src/lib/paths.ts @@ -1,6 +1,6 @@ import { getPaths as _getPaths } from '@redwoodjs/project-config' -import { colors } from './colors' +import { colors } from './colors.js' function isErrorWithMessage(e: any): e is { message: string } { return !!e.message diff --git a/packages/cli-helpers/src/lib/project.ts b/packages/cli-helpers/src/lib/project.ts index f1cd4d2b7225..8c79ffff5d29 100644 --- a/packages/cli-helpers/src/lib/project.ts +++ b/packages/cli-helpers/src/lib/project.ts @@ -13,8 +13,8 @@ import { resolveFile, } from '@redwoodjs/project-config' -import { colors } from './colors' -import { getPaths } from './paths' +import { colors } from './colors.js' +import { getPaths } from './paths.js' export const getGraphqlPath = () => { return resolveFile(path.join(getPaths().api.functions, 'graphql')) diff --git a/packages/cli-helpers/tsconfig.json b/packages/cli-helpers/tsconfig.json index 5e7f7fd919f2..e0e245aba844 100644 --- a/packages/cli-helpers/tsconfig.json +++ b/packages/cli-helpers/tsconfig.json @@ -1,7 +1,8 @@ { "extends": "../../tsconfig.compilerOption.json", "compilerOptions": { - "strict": true, + "moduleResolution": "NodeNext", + "module": "NodeNext", "baseUrl": ".", "rootDir": "src", "outDir": "dist" diff --git a/packages/cli-helpers/vitest.config.mts b/packages/cli-helpers/vitest.config.mts new file mode 100644 index 000000000000..55b4842e1875 --- /dev/null +++ b/packages/cli-helpers/vitest.config.mts @@ -0,0 +1,7 @@ +import { defineConfig, configDefaults } from 'vitest/config' + +export default defineConfig({ + test: { + exclude: [...configDefaults.exclude, '**/fixtures', '**/mockFsFiles'], + }, +}) diff --git a/packages/cli-packages/dataMigrate/build.mjs b/packages/cli-packages/dataMigrate/build.mjs index 02fe92e4bb11..a0d3e3a4ac7a 100644 --- a/packages/cli-packages/dataMigrate/build.mjs +++ b/packages/cli-packages/dataMigrate/build.mjs @@ -1,57 +1,27 @@ -import fs from 'node:fs/promises' - -import * as esbuild from 'esbuild' -import fg from 'fast-glob' - -// ─── Package ───────────────────────────────────────────────────────────────── -// -// Types don't need to be transformed by esbuild, and the bin is bundled later. - -const sourceFiles = await fg.glob(['./src/**/*.ts'], { - ignore: ['./src/__tests__', './src/types.ts', './src/bin.ts'], -}) - -let result = await esbuild.build({ - entryPoints: sourceFiles, - outdir: 'dist', - - format: 'cjs', - platform: 'node', - target: ['node20'], - - logLevel: 'info', - - // For visualizing the bundle. - // See https://esbuild.github.io/api/#metafile and https://esbuild.github.io/analyze/. - metafile: true, +import { + build, + defaultBuildOptions, + defaultIgnorePatterns, +} from '../../../buildDefaults.mjs' + +// Build the package. +await build({ + entryPointOptions: { + ignore: [...defaultIgnorePatterns, './src/types.ts', './src/bin.ts'], + }, }) -await fs.writeFile('meta.json', JSON.stringify(result.metafile, null, 2)) - -// ─── Bin ───────────────────────────────────────────────────────────────────── -// -// We build the bin differently because it doesn't have to asynchronously import the handler. - -result = await esbuild.build({ - entryPoints: ['./src/bin.ts'], - outdir: 'dist', - - banner: { - js: '#!/usr/bin/env node', +// Build the bin. +await build({ + buildOptions: { + ...defaultBuildOptions, + banner: { + js: '#!/usr/bin/env node', + }, + bundle: true, + entryPoints: ['./src/bin.ts'], + minify: true, + packages: 'external', }, - - bundle: true, - minify: true, - - platform: 'node', - target: ['node20'], - packages: 'external', - - logLevel: 'info', - - // For visualizing the bundle. - // See https://esbuild.github.io/api/#metafile and https://esbuild.github.io/analyze/. - metafile: true, + metafileName: 'meta.bin.json', }) - -await fs.writeFile('meta.bins.json', JSON.stringify(result.metafile, null, 2)) diff --git a/packages/cli-packages/dataMigrate/package.json b/packages/cli-packages/dataMigrate/package.json index 8011d75494c2..d3e88a2b55f2 100644 --- a/packages/cli-packages/dataMigrate/package.json +++ b/packages/cli-packages/dataMigrate/package.json @@ -39,8 +39,6 @@ "@prisma/client": "5.7.0", "@types/fs-extra": "11.0.4", "@types/yargs": "17.0.32", - "esbuild": "0.19.9", - "fast-glob": "3.3.2", "jest": "29.7.0", "memfs": "4.6.0", "typescript": "5.3.3" diff --git a/packages/cli-packages/storybook/build.mjs b/packages/cli-packages/storybook/build.mjs index f173e7ab9024..14b2d70d4a73 100644 --- a/packages/cli-packages/storybook/build.mjs +++ b/packages/cli-packages/storybook/build.mjs @@ -1,25 +1,3 @@ -import fs from 'node:fs' +import { build } from '../../../buildDefaults.mjs' -import * as esbuild from 'esbuild' -import fg from 'fast-glob' - -// Get source files -const sourceFiles = fg.sync(['./src/**/*.ts']) - -// Build general source files -const result = await esbuild.build({ - entryPoints: sourceFiles, - outdir: 'dist', - - format: 'cjs', - platform: 'node', - target: ['node20'], - - logLevel: 'info', - - // For visualizing dist. - // See https://esbuild.github.io/api/#metafile and https://esbuild.github.io/analyze/. - metafile: true, -}) - -fs.writeFileSync('meta.json', JSON.stringify(result.metafile, null, 2)) +await build() diff --git a/packages/cli-packages/storybook/package.json b/packages/cli-packages/storybook/package.json index 577fa3f4458d..4a3dce4ad0c6 100644 --- a/packages/cli-packages/storybook/package.json +++ b/packages/cli-packages/storybook/package.json @@ -19,11 +19,6 @@ "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx\" --ignore dist --exec \"yarn build\"", "prepublishOnly": "NODE_ENV=production yarn build" }, - "jest": { - "testPathIgnorePatterns": [ - "/dist/" - ] - }, "dependencies": { "@redwoodjs/cli-helpers": "6.0.7", "@redwoodjs/project-config": "6.0.7", @@ -40,9 +35,6 @@ }, "devDependencies": { "@types/yargs": "17.0.32", - "esbuild": "0.19.9", - "fast-glob": "3.3.2", - "jest": "29.7.0", "typescript": "5.3.3" }, "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" diff --git a/packages/cli/__mocks__/@vercel/nft.js b/packages/cli/__mocks__/@vercel/nft.js deleted file mode 100644 index 76aa20c931cc..000000000000 --- a/packages/cli/__mocks__/@vercel/nft.js +++ /dev/null @@ -1,3 +0,0 @@ -module.exports = { - nodeFileTrace: jest.fn(), -} diff --git a/packages/cli/__mocks__/fs-extra.js b/packages/cli/__mocks__/fs-extra.js new file mode 100644 index 000000000000..de739ddd902a --- /dev/null +++ b/packages/cli/__mocks__/fs-extra.js @@ -0,0 +1,4 @@ +import * as memfs from 'memfs' + +export * from 'memfs' +export default memfs.fs diff --git a/packages/cli/__mocks__/fs.js b/packages/cli/__mocks__/fs.js index c7bf119b4118..de739ddd902a 100644 --- a/packages/cli/__mocks__/fs.js +++ b/packages/cli/__mocks__/fs.js @@ -1,224 +1,4 @@ -import path from 'path' +import * as memfs from 'memfs' -const fs = { - ...jest.requireActual('fs'), -} - -let mockFiles = {} - -const pathSeparator = path.sep - -const getParentDir = (path) => { - return path.substring(0, path.lastIndexOf(pathSeparator)) -} - -const makeParentDirs = (path) => { - const parentDir = getParentDir(path) - if (parentDir && !(parentDir in mockFiles)) { - mockFiles[parentDir] = undefined - makeParentDirs(parentDir) - } -} - -/** - * This is a custom function that our tests can use during setup to specify - * what the files on the "mock" filesystem should look like when any of the - * `fs` APIs are used. - * - * Sets the state of the mocked file system - * @param newMockFiles - {[filepath]: contents} - */ -fs.__setMockFiles = (newMockFiles) => { - mockFiles = { ...newMockFiles } - - // Generate all the directories which implicitly exist - Object.keys(mockFiles).forEach((mockPath) => { - if (mockPath.includes(pathSeparator)) { - makeParentDirs(mockPath) - } - }) -} - -fs.__getMockFiles = () => { - return mockFiles -} - -fs.readFileSync = (path) => { - // In prisma v4.3.0, prisma format uses a Wasm module. See https://github.com/prisma/prisma/releases/tag/4.3.0. - // We shouldn't mock this, so we'll use the real fs.readFileSync. - // Prisma v5.0.0 seems to have added the schema_build Wasm module. - if ( - path.includes('prisma_fmt_build_bg.wasm') || - path.includes('prisma_schema_build_bg.wasm') - ) { - return jest.requireActual('fs').readFileSync(path) - } - - if (path in mockFiles) { - return mockFiles[path] - } else { - const fakeError = new Error( - `Error: ENOENT: no such file or directory, open '${path}'` - ) - fakeError.errno = -2 - fakeError.syscall = 'open' - fakeError.code = 'ENOENT' - fakeError.path = path - throw fakeError - } -} - -fs.writeFileSync = (path, contents) => { - const parentDir = getParentDir(path) - if (parentDir && !fs.existsSync(parentDir)) { - const fakeError = new Error( - `Error: ENOENT: no such file or directory, open '${path}'` - ) - fakeError.errno = -2 - fakeError.syscall = 'open' - fakeError.code = 'ENOENT' - fakeError.path = path - throw fakeError - } - mockFiles[path] = contents -} - -fs.appendFileSync = (path, contents) => { - if (path in mockFiles) { - mockFiles[path] = mockFiles[path] + contents - } else { - fs.writeFileSync(path, contents) - } -} - -fs.rmSync = (path, options = {}) => { - if (fs.existsSync(path)) { - if (options.recursive) { - Object.keys(mockFiles).forEach((mockedPath) => { - if (mockedPath.startsWith(path)) { - delete mockFiles[mockedPath] - } - }) - } else { - if (mockFiles[path] === undefined) { - const children = fs.readdirSync(path) - if (children.length !== 0) { - const fakeError = new Error( - `NodeError [SystemError]: Path is a directory: rm returned EISDIR (is a directory) ${path}` - ) - fakeError.errno = 21 - fakeError.syscall = 'rm' - fakeError.code = 'ERR_FS_EISDIR' - fakeError.path = path - throw fakeError - } - } - delete mockFiles[path] - } - } else { - const fakeError = new Error( - `Error: ENOENT: no such file or directory, stat '${path}'` - ) - fakeError.errno = -2 - fakeError.syscall = 'stat' - fakeError.code = 'ENOENT' - fakeError.path = path - throw fakeError - } -} - -fs.unlinkSync = (path) => { - if (path in mockFiles) { - delete mockFiles[path] - } else { - const fakeError = new Error( - `Error: ENOENT: no such file or directory, stat '${path}'` - ) - fakeError.errno = -2 - fakeError.syscall = 'unlink' - fakeError.code = 'ENOENT' - fakeError.path = path - throw fakeError - } -} - -fs.existsSync = (path) => { - return path in mockFiles -} - -fs.copyFileSync = (src, dist) => { - fs.writeFileSync(dist, fs.readFileSync(src)) -} - -fs.readdirSync = (path) => { - if (!fs.existsSync(path)) { - const fakeError = new Error( - `Error: ENOENT: no such file or directory, scandir '${path}'` - ) - fakeError.errno = -2 - fakeError.syscall = 'scandir' - fakeError.code = 'ENOENT' - fakeError.path = path - throw fakeError - } - - if (mockFiles[path] !== undefined) { - const fakeError = new Error( - `Error: ENOTDIR: not a directory, scandir '${path}'` - ) - fakeError.errno = -20 - fakeError.syscall = 'scandir' - fakeError.code = 'ENOTDIR' - fakeError.path = path - throw fakeError - } - - const content = [] - Object.keys(mockFiles).forEach((mockedPath) => { - const childPath = mockedPath.substring(path.length + 1) - if ( - mockedPath.startsWith(path) && - !childPath.includes(pathSeparator) && - childPath - ) { - content.push(childPath) - } - }) - return content -} - -fs.mkdirSync = (path, options = {}) => { - if (options.recursive) { - makeParentDirs(path) - } - // Directories are represented as paths with an "undefined" value - fs.writeFileSync(path, undefined) -} - -fs.rmdirSync = (path, options = {}) => { - if (!fs.existsSync(path)) { - const fakeError = new Error( - `Error: ENOENT: no such file or directory, rmdir '${path}'` - ) - fakeError.errno = -2 - fakeError.syscall = 'rmdir' - fakeError.code = 'ENOENT' - fakeError.path = path - throw fakeError - } - - if (mockFiles[path] !== undefined) { - const fakeError = new Error( - `Error: ENOTDIR: not a directory, rmdir '${path}'` - ) - fakeError.errno = -20 - fakeError.syscall = 'rmdir' - fakeError.code = 'ENOTDIR' - fakeError.path = path - throw fakeError - } - - fs.rmSync(path, options) -} - -module.exports = fs +export * from 'memfs' +export default memfs.fs diff --git a/packages/cli/jest.config.ts b/packages/cli/jest.config.ts deleted file mode 100644 index 92eb1ec9cce9..000000000000 --- a/packages/cli/jest.config.ts +++ /dev/null @@ -1,39 +0,0 @@ -import type { Config } from 'jest' - -const config: Config = { - projects: [ - { - displayName: 'root', - testMatch: ['**/__tests__/**/*.[jt]s?(x)', '**/*.test.[jt]s?(x)'], - testPathIgnorePatterns: [ - '__fixtures__', - '__testfixtures__', - '__codemod_tests__', - '__tests__/utils/*', - '__tests__/fixtures/*', - '.d.ts', - 'dist', - ], - moduleNameMapper: { - '^src/(.*)': '/src/$1', - }, - setupFilesAfterEnv: ['./jest.setup.js'], - }, - { - displayName: 'setup codemods', - testMatch: ['**/commands/setup/**/__codemod_tests__/*.ts'], - testPathIgnorePatterns: [ - '__fixtures__', - '__testfixtures__', - '__tests__/utils/*', - '__tests__/fixtures/*', - '.d.ts', - 'dist', - ], - setupFilesAfterEnv: ['./src/jest.codemods.setup.ts'], - }, - ], - testTimeout: 20_000, -} - -export default config diff --git a/packages/cli/jest.setup.js b/packages/cli/jest.setup.js deleted file mode 100644 index d131f52765de..000000000000 --- a/packages/cli/jest.setup.js +++ /dev/null @@ -1,2 +0,0 @@ -// Disable telemetry within framework tests -process.env.REDWOOD_DISABLE_TELEMETRY = 1 diff --git a/packages/cli/package.json b/packages/cli/package.json index 2be393b13122..e5b05180e820 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -25,8 +25,8 @@ "dev": "RWJS_CWD=../../__fixtures__/example-todo-main node dist/index.js", "fix:permissions": "chmod +x dist/index.js dist/rwfw.js", "prepublishOnly": "yarn build", - "test": "jest", - "test:watch": "yarn test --watch" + "test": "vitest run", + "test:watch": "vitest watch" }, "dependencies": { "@babel/runtime-corejs3": "7.23.6", @@ -87,8 +87,9 @@ "@babel/cli": "7.23.4", "@babel/core": "^7.22.20", "@types/archiver": "^6", - "jest": "29.7.0", - "typescript": "5.3.3" + "memfs": "4.6.0", + "typescript": "5.3.3", + "vitest": "1.2.1" }, "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" } diff --git a/packages/cli/src/__tests__/__snapshots__/plugin.test.js.snap b/packages/cli/src/__tests__/__snapshots__/plugin.test.js.snap index b72bcb8b9f18..d24aeb89006a 100644 --- a/packages/cli/src/__tests__/__snapshots__/plugin.test.js.snap +++ b/packages/cli/src/__tests__/__snapshots__/plugin.test.js.snap @@ -1,6 +1,6 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html -exports[`plugin loading correct loading for @redwoodjs namespace help ('') 1`] = ` +exports[`plugin loading > correct loading for @redwoodjs namespace help ('') 1`] = ` [ [ "@redwoodjs/cli-some-package", @@ -41,7 +41,7 @@ exports[`plugin loading correct loading for @redwoodjs namespace help ('') 1`] = ] `; -exports[`plugin loading correct loading for @redwoodjs namespace help ('--help') 1`] = ` +exports[`plugin loading > correct loading for @redwoodjs namespace help ('--help') 1`] = ` [ [ "@redwoodjs/cli-some-package", @@ -82,7 +82,7 @@ exports[`plugin loading correct loading for @redwoodjs namespace help ('--help') ] `; -exports[`plugin loading correct loading for @redwoodjs namespace help ('-h') 1`] = ` +exports[`plugin loading > correct loading for @redwoodjs namespace help ('-h') 1`] = ` [ [ "@redwoodjs/cli-some-package", @@ -123,7 +123,7 @@ exports[`plugin loading correct loading for @redwoodjs namespace help ('-h') 1`] ] `; -exports[`plugin loading correct loading for known redwood command (with cache) 1`] = ` +exports[`plugin loading > correct loading for known redwood command (with cache) 1`] = ` [ [ "@redwoodjs/cli-some-package", @@ -152,7 +152,7 @@ exports[`plugin loading correct loading for known redwood command (with cache) 1 ] `; -exports[`plugin loading correct loading for known redwood command (without cache) 1`] = ` +exports[`plugin loading > correct loading for known redwood command (without cache) 1`] = ` [ [ "@redwoodjs/cli-some-package", @@ -181,7 +181,7 @@ exports[`plugin loading correct loading for known redwood command (without cache ] `; -exports[`plugin loading correct loading for known third party command (with cache) 1`] = ` +exports[`plugin loading > correct loading for known third party command (with cache) 1`] = ` [ [ "@redwoodjs/cli-some-package", @@ -210,7 +210,7 @@ exports[`plugin loading correct loading for known third party command (with cach ] `; -exports[`plugin loading correct loading for known third party command (without cache) 1`] = ` +exports[`plugin loading > correct loading for known third party command (without cache) 1`] = ` [ [ "@bluewoodjs/cli-some-package", @@ -239,7 +239,7 @@ exports[`plugin loading correct loading for known third party command (without c ] `; -exports[`plugin loading correct loading for root help ('') 1`] = ` +exports[`plugin loading > correct loading for root help ('') 1`] = ` [ [ "@redwoodjs/cli-some-package", @@ -280,7 +280,7 @@ exports[`plugin loading correct loading for root help ('') 1`] = ` ] `; -exports[`plugin loading correct loading for root help ('--help') 1`] = ` +exports[`plugin loading > correct loading for root help ('--help') 1`] = ` [ [ "@redwoodjs/cli-some-package", @@ -321,7 +321,7 @@ exports[`plugin loading correct loading for root help ('--help') 1`] = ` ] `; -exports[`plugin loading correct loading for root help ('-h') 1`] = ` +exports[`plugin loading > correct loading for root help ('-h') 1`] = ` [ [ "@redwoodjs/cli-some-package", @@ -362,7 +362,7 @@ exports[`plugin loading correct loading for root help ('-h') 1`] = ` ] `; -exports[`plugin loading correct loading for third party namespace help ('') 1`] = ` +exports[`plugin loading > correct loading for third party namespace help ('') 1`] = ` [ [ "@redwoodjs/cli-some-package", @@ -391,7 +391,7 @@ exports[`plugin loading correct loading for third party namespace help ('') 1`] ] `; -exports[`plugin loading correct loading for third party namespace help ('--help') 1`] = ` +exports[`plugin loading > correct loading for third party namespace help ('--help') 1`] = ` [ [ "@redwoodjs/cli-some-package", @@ -420,7 +420,7 @@ exports[`plugin loading correct loading for third party namespace help ('--help' ] `; -exports[`plugin loading correct loading for third party namespace help ('-h') 1`] = ` +exports[`plugin loading > correct loading for third party namespace help ('-h') 1`] = ` [ [ "@redwoodjs/cli-some-package", @@ -449,7 +449,7 @@ exports[`plugin loading correct loading for third party namespace help ('-h') 1` ] `; -exports[`plugin loading correct loading for unknown namespace (no command) 1`] = ` +exports[`plugin loading > correct loading for unknown namespace (no command) 1`] = ` [ [ "@redwoodjs/cli-some-package", @@ -490,7 +490,7 @@ exports[`plugin loading correct loading for unknown namespace (no command) 1`] = ] `; -exports[`plugin loading correct loading for unknown namespace (with command) 1`] = ` +exports[`plugin loading > correct loading for unknown namespace (with command) 1`] = ` [ [ "@redwoodjs/cli-some-package", @@ -531,7 +531,7 @@ exports[`plugin loading correct loading for unknown namespace (with command) 1`] ] `; -exports[`plugin loading correct loading for unknown redwood command 1`] = ` +exports[`plugin loading > correct loading for unknown redwood command 1`] = ` [ [ "@redwoodjs/cli-some-package", @@ -572,7 +572,7 @@ exports[`plugin loading correct loading for unknown redwood command 1`] = ` ] `; -exports[`plugin loading correct loading for unknown third party command 1`] = ` +exports[`plugin loading > correct loading for unknown third party command 1`] = ` [ [ "@redwoodjs/cli-some-package", diff --git a/packages/cli/src/__tests__/cwd.test.js b/packages/cli/src/__tests__/cwd.test.js index 14826e7cdb1a..0c9347877030 100644 --- a/packages/cli/src/__tests__/cwd.test.js +++ b/packages/cli/src/__tests__/cwd.test.js @@ -1,6 +1,8 @@ import { spawnSync } from 'child_process' import path from 'path' +import { describe, it, expect } from 'vitest' + describe('The CLI sets `cwd` correctly', () => { describe('--cwd', () => { it('lets the user set the cwd via the `--cwd` option', async () => { diff --git a/packages/cli/src/__tests__/fs.test.js b/packages/cli/src/__tests__/fs.test.js deleted file mode 100644 index ad793de93484..000000000000 --- a/packages/cli/src/__tests__/fs.test.js +++ /dev/null @@ -1,140 +0,0 @@ -jest.mock('fs') - -import path from 'path' - -import fs from 'fs-extra' - -const INITIAL_FS = { - file_a: 'content_a', - [path.join('fake_dir', 'mock_dir', 'made_up_file')]: 'made_up_content', -} - -describe('setup', () => { - beforeEach(() => { - fs.__setMockFiles(INITIAL_FS) - }) - - test('correct initial mock', () => { - const originalMock = fs.__getMockFiles() - const pathFixedMock = {} - - for (const [key, value] of Object.entries(originalMock)) { - const fixedKey = key.replaceAll(path.sep, '/') - pathFixedMock[fixedKey] = value - } - - expect(pathFixedMock).toMatchInlineSnapshot(` - { - "fake_dir": undefined, - "fake_dir/mock_dir": undefined, - "fake_dir/mock_dir/made_up_file": "made_up_content", - "file_a": "content_a", - } - `) - }) -}) - -describe('files', () => { - beforeEach(() => { - fs.__setMockFiles(INITIAL_FS) - }) - - test('exists', () => { - expect(fs.existsSync('file_a')).toBe(true) - expect(fs.existsSync('file_b')).toBe(false) - }) - - test('reading', () => { - expect(fs.readFileSync('file_a')).toBe('content_a') - expect(() => fs.readFileSync('file_b')).toThrowError() - }) - - test('writing', () => { - fs.writeFileSync('file_a', 'content_a_new') - expect(fs.readFileSync('file_a')).toBe('content_a_new') - fs.writeFileSync('file_b', 'content_b') - expect(fs.readFileSync('file_b')).toBe('content_b') - - expect(() => - fs.writeFileSync(path.join('non_existing_dir', 'test'), 'test') - ).toThrowError() - }) - - test('appending', () => { - fs.appendFileSync('file_a', '_new') - expect(fs.readFileSync('file_a')).toBe('content_a_new') - fs.appendFileSync('file_b', 'content_b') - expect(fs.readFileSync('file_b')).toBe('content_b') - - expect(() => - fs.appendFileSync(path.join('non_existing_dir', 'test'), 'test') - ).toThrowError() - }) - - test('deleting', () => { - fs.rmSync('file_a') - expect(() => fs.readFileSync('file_a')).toThrowError() - - fs.writeFileSync('file_a', 'content_a') - fs.unlinkSync('file_a') - expect(() => fs.readFileSync('file_a')).toThrowError() - - expect(() => fs.rmSync('file_b')).toThrowError() - expect(() => fs.unlinkSync('file_b')).toThrowError() - }) - - test('copy', () => { - fs.copyFileSync('file_a', 'file_b') - expect(fs.readFileSync('file_a')).toBe('content_a') - expect(fs.readFileSync('file_b')).toBe('content_a') - expect(() => fs.copyFileSync('file_c', 'file_d')).toThrowError() - }) -}) - -describe('directories', () => { - beforeEach(() => { - fs.__setMockFiles(INITIAL_FS) - }) - - test('exists', () => { - expect(fs.existsSync('fake_dir')).toBe(true) - expect(fs.existsSync('not_a_dir')).toBe(false) - expect(fs.existsSync(path.join('fake_dir', 'mock_dir'))).toBe(true) - expect(fs.existsSync(path.join('fake_dir', 'not_a_mock_dir'))).toBe(false) - }) - - test('reading', () => { - expect(fs.readdirSync('fake_dir')).toStrictEqual(['mock_dir']) - expect(fs.readdirSync(path.join('fake_dir', 'mock_dir'))).toStrictEqual([ - 'made_up_file', - ]) - expect(() => fs.readdirSync('not_a_fake_dir')).toThrowError() - expect(() => - fs.readdirSync(path.join('fake_dir', 'mock_dir', 'made_up_file')) - ).toThrowError() - }) - - test('writing', () => { - fs.mkdirSync('new_fake_dir') - expect(fs.existsSync('new_fake_dir')).toBe(true) - expect(fs.readdirSync('new_fake_dir')).toStrictEqual([]) - }) - - test('deleting', () => { - fs.mkdirSync('new_fake_dir') - expect(fs.existsSync('new_fake_dir')).toBe(true) - fs.rmdirSync('new_fake_dir') - expect(fs.existsSync('new_fake_dir')).toBe(false) - - expect(() => fs.rmdirSync('not_a_fake_dir')).toThrowError() - - expect(() => fs.rmdirSync(path.join('fake_dir', 'mock_dir'))).toThrowError() - - expect(() => - fs.rmdirSync(path.join('fake_dir', 'mock_dir'), { recursive: true }) - ).not.toThrowError() - expect(fs.readdirSync('fake_dir')).toStrictEqual([]) - - expect(() => fs.rmdirSync('fake_a')).toThrowError() - }) -}) diff --git a/packages/cli/src/__tests__/plugin.test.js b/packages/cli/src/__tests__/plugin.test.js index 692d098634af..e318292f6f01 100644 --- a/packages/cli/src/__tests__/plugin.test.js +++ b/packages/cli/src/__tests__/plugin.test.js @@ -1,4 +1,14 @@ -import fs from 'fs-extra' +import { vol } from 'memfs' +import { + vi, + describe, + beforeEach, + test, + expect, + beforeAll, + afterEach, + afterAll, +} from 'vitest' import yargs from 'yargs' import { hideBin } from 'yargs/helpers' @@ -7,17 +17,19 @@ import { getConfig, getPaths } from '@redwoodjs/project-config' import * as pluginLib from '../lib/plugin' import { loadPlugins } from '../plugin' -jest.mock('fs') -jest.mock('@redwoodjs/project-config', () => { +vi.mock('fs-extra') +vi.mock('@redwoodjs/project-config', async (importOriginal) => { + const originalProjectConfig = await importOriginal() return { - getPaths: jest.fn(), - getConfig: jest.fn(), + ...originalProjectConfig, + getPaths: vi.fn(), + getConfig: vi.fn(), } }) -jest.mock('../lib/packages', () => { +vi.mock('../lib/packages', () => { return { - installModule: jest.fn(), - isModuleInstalled: jest.fn().mockReturnValue(true), + installModule: vi.fn(), + isModuleInstalled: vi.fn().mockReturnValue(true), } }) @@ -68,7 +80,7 @@ describe('command information caching', () => { }, }, } - fs.__setMockFiles({ + vol.fromJSON({ ['commandCache.json']: JSON.stringify({ ...exampleCacheEntry, ...anExistingDefaultCacheEntry, @@ -86,7 +98,7 @@ describe('command information caching', () => { describe('plugin loading', () => { beforeAll(() => { - jest.spyOn(console, 'log').mockImplementation(() => {}) + vi.spyOn(console, 'log').mockImplementation(() => {}) }) beforeEach(() => { @@ -96,10 +108,10 @@ describe('plugin loading', () => { }, }) - jest.spyOn(pluginLib, 'loadCommadCache') - jest.spyOn(pluginLib, 'loadPluginPackage') - jest.spyOn(pluginLib, 'checkPluginListAndWarn') - jest.spyOn(pluginLib, 'saveCommandCache') + vi.spyOn(pluginLib, 'loadCommadCache') + vi.spyOn(pluginLib, 'loadPluginPackage') + vi.spyOn(pluginLib, 'checkPluginListAndWarn') + vi.spyOn(pluginLib, 'saveCommandCache') }) afterEach(() => { @@ -179,7 +191,7 @@ describe('plugin loading', () => { }, }, }) - jest.mock( + vi.mock( '@redwoodjs/cli-some-package-not-in-cache', () => { return { @@ -194,7 +206,7 @@ describe('plugin loading', () => { }, { virtual: true } ) - fs.__setMockFiles({ + vol.fromJSON({ ['commandCache.json']: JSON.stringify({ '@redwoodjs/cli-some-package': { 'some-command': { @@ -279,7 +291,7 @@ describe('plugin loading', () => { }, }, }) - jest.mock( + vi.mock( '@redwoodjs/cli-some-package-not-in-cache', () => { return { @@ -294,7 +306,7 @@ describe('plugin loading', () => { }, { virtual: true } ) - fs.__setMockFiles({ + vol.fromJSON({ ['commandCache.json']: JSON.stringify({ '@redwoodjs/cli-some-package': { 'some-command': { @@ -379,7 +391,7 @@ describe('plugin loading', () => { }, }, }) - jest.mock( + vi.mock( '@redwoodjs/cli-some-package-not-in-cache', () => { return { @@ -394,7 +406,7 @@ describe('plugin loading', () => { }, { virtual: true } ) - fs.__setMockFiles({ + vol.fromJSON({ ['commandCache.json']: JSON.stringify({ '@redwoodjs/cli-some-package': { 'some-command': { @@ -467,7 +479,7 @@ describe('plugin loading', () => { }, }, }) - jest.mock( + vi.mock( '@redwoodjs/cli-some-package-not-in-cache', () => { return { @@ -482,7 +494,7 @@ describe('plugin loading', () => { }, { virtual: true } ) - fs.__setMockFiles({ + vol.fromJSON({ ['commandCache.json']: JSON.stringify({ '@redwoodjs/cli-some-package': { 'some-command': { @@ -563,7 +575,7 @@ describe('plugin loading', () => { }, }, }) - jest.mock( + vi.mock( '@redwoodjs/cli-some-package-not-in-cache', () => { return { @@ -578,7 +590,7 @@ describe('plugin loading', () => { }, { virtual: true } ) - fs.__setMockFiles({ + vol.fromJSON({ ['commandCache.json']: JSON.stringify({ '@redwoodjs/cli-some-package': { 'some-command': { @@ -660,7 +672,7 @@ describe('plugin loading', () => { }, }, }) - jest.mock( + vi.mock( '@redwoodjs/cli-some-package-not-in-cache', () => { return { @@ -675,7 +687,7 @@ describe('plugin loading', () => { }, { virtual: true } ) - fs.__setMockFiles({ + vol.fromJSON({ ['commandCache.json']: JSON.stringify({ '@redwoodjs/cli-some-package': { 'some-command': { @@ -765,7 +777,7 @@ describe('plugin loading', () => { }, }, }) - jest.mock( + vi.mock( '@redwoodjs/cli-some-package-not-in-cache', () => { return { @@ -780,7 +792,7 @@ describe('plugin loading', () => { }, { virtual: true } ) - jest.mock( + vi.mock( '@redwoodjs/cli-some-package', () => { return { @@ -795,7 +807,7 @@ describe('plugin loading', () => { }, { virtual: true } ) - fs.__setMockFiles({ + vol.fromJSON({ ['commandCache.json']: JSON.stringify({}), }) @@ -893,7 +905,7 @@ describe('plugin loading', () => { }, }, }) - jest.mock( + vi.mock( '@redwoodjs/cli-some-package-not-in-cache', () => { return { @@ -908,7 +920,7 @@ describe('plugin loading', () => { }, { virtual: true } ) - fs.__setMockFiles({ + vol.fromJSON({ ['commandCache.json']: JSON.stringify({ '@redwoodjs/cli-some-package': { 'some-command': { @@ -1029,7 +1041,7 @@ describe('plugin loading', () => { }, }, }) - jest.mock( + vi.mock( '@redwoodjs/cli-some-package-not-in-cache', () => { return { @@ -1044,7 +1056,7 @@ describe('plugin loading', () => { }, { virtual: true } ) - fs.__setMockFiles({ + vol.fromJSON({ ['commandCache.json']: JSON.stringify({ '@redwoodjs/cli-some-package': { 'some-command': { @@ -1137,7 +1149,7 @@ describe('plugin loading', () => { }, }, }) - fs.__setMockFiles({ + vol.fromJSON({ ['commandCache.json']: JSON.stringify({}), }) @@ -1235,7 +1247,7 @@ describe('plugin loading', () => { }, }, }) - fs.__setMockFiles({ + vol.fromJSON({ ['commandCache.json']: JSON.stringify({ '@redwoodjs/cli-some-package': { 'some-command': { diff --git a/packages/cli/src/commands/__tests__/build.test.js b/packages/cli/src/commands/__tests__/build.test.js index 2a9906caa2bc..8c257288207a 100644 --- a/packages/cli/src/commands/__tests__/build.test.js +++ b/packages/cli/src/commands/__tests__/build.test.js @@ -1,5 +1,7 @@ -jest.mock('@redwoodjs/project-config', () => { +vi.mock('@redwoodjs/project-config', async (importOriginal) => { + const originalProjectConfig = await importOriginal() return { + ...originalProjectConfig, getPaths: () => { return { api: { @@ -22,19 +24,23 @@ jest.mock('@redwoodjs/project-config', () => { }) import { Listr } from 'listr2' -jest.mock('listr2') +import { vi, afterEach, test, expect } from 'vitest' + +vi.mock('listr2') // Make sure prerender doesn't get triggered -jest.mock('execa', () => - jest.fn((cmd, params) => ({ +vi.mock('execa', () => ({ + default: vi.fn((cmd, params) => ({ cmd, params, - })) -) + })), +})) import { handler } from '../build' -afterEach(() => jest.clearAllMocks()) +afterEach(() => { + vi.clearAllMocks() +}) test('the build tasks are in the correct sequence', async () => { await handler({}) @@ -49,12 +55,12 @@ test('the build tasks are in the correct sequence', async () => { `) }) -jest.mock('@redwoodjs/prerender/detection', () => { +vi.mock('@redwoodjs/prerender/detection', () => { return { detectPrerenderRoutes: () => [] } }) test('Should run prerender for web', async () => { - const consoleSpy = jest.spyOn(console, 'log').mockImplementation(() => {}) + const consoleSpy = vi.spyOn(console, 'log').mockImplementation(() => {}) await handler({ side: ['web'], prerender: true }) expect(Listr.mock.calls[0][0].map((x) => x.title)).toMatchInlineSnapshot(` diff --git a/packages/cli/src/commands/__tests__/dev.test.js b/packages/cli/src/commands/__tests__/dev.test.js index 595e453b7644..861900942def 100644 --- a/packages/cli/src/commands/__tests__/dev.test.js +++ b/packages/cli/src/commands/__tests__/dev.test.js @@ -1,8 +1,8 @@ import '../../lib/mockTelemetry' -jest.mock('concurrently', () => ({ +vi.mock('concurrently', () => ({ __esModule: true, // this property makes it work - default: jest.fn().mockReturnValue({ + default: vi.fn().mockReturnValue({ result: { catch: () => {}, }, @@ -10,25 +10,28 @@ jest.mock('concurrently', () => ({ })) // dev checks for existence of api/src and web/src folders -jest.mock('fs', () => { +vi.mock('fs-extra', async () => { + const actualFs = await vi.importActual('fs-extra') return { - ...jest.requireActual('fs'), - readFileSync: () => 'File content', - existsSync: () => true, + default: { + ...actualFs, + readFileSync: () => 'File content', + existsSync: () => true, + }, } }) -jest.mock('@redwoodjs/internal/dist/dev', () => { +vi.mock('@redwoodjs/internal/dist/dev', () => { return { - shutdownPort: jest.fn(), + shutdownPort: vi.fn(), } }) -jest.mock('@redwoodjs/project-config', () => { - const actualProjectConfig = jest.requireActual('@redwoodjs/project-config') +vi.mock('@redwoodjs/project-config', async () => { + const actualProjectConfig = await vi.importActual('@redwoodjs/project-config') return { - getConfig: jest.fn(), + getConfig: vi.fn(), getConfigPath: () => '/mocked/project/redwood.toml', resolveFile: actualProjectConfig.resolveFile, getPaths: () => { @@ -47,13 +50,13 @@ jest.mock('@redwoodjs/project-config', () => { } }) -jest.mock('../../lib/generatePrismaClient', () => { +vi.mock('../../lib/generatePrismaClient', () => { return { - generatePrismaClient: jest.fn().mockResolvedValue(true), + generatePrismaClient: vi.fn().mockResolvedValue(true), } }) -jest.mock('../../lib/ports', () => { +vi.mock('../../lib/ports', () => { return { // We're not actually going to use the port, so it's fine to just say it's // free. It prevents the tests from failing if the ports are already in use @@ -64,6 +67,7 @@ jest.mock('../../lib/ports', () => { import concurrently from 'concurrently' import { find } from 'lodash' +import { vi, describe, afterEach, it, expect } from 'vitest' import { getConfig } from '@redwoodjs/project-config' @@ -72,7 +76,7 @@ import { handler } from '../dev' describe('yarn rw dev', () => { afterEach(() => { - jest.clearAllMocks() + vi.clearAllMocks() }) it('Should run api and web dev servers, and generator watcher by default', async () => { diff --git a/packages/cli/src/commands/__tests__/prisma.test.js b/packages/cli/src/commands/__tests__/prisma.test.js index 8a322e89970b..81800a6914d4 100644 --- a/packages/cli/src/commands/__tests__/prisma.test.js +++ b/packages/cli/src/commands/__tests__/prisma.test.js @@ -1,5 +1,7 @@ -jest.mock('@redwoodjs/project-config', () => { +vi.mock('@redwoodjs/project-config', async (importOriginal) => { + const originalProjectConfig = await importOriginal() return { + ...originalProjectConfig, getPaths: () => { return { api: { @@ -12,30 +14,36 @@ jest.mock('@redwoodjs/project-config', () => { } }) -jest.mock('execa', () => ({ - sync: jest.fn((cmd, params, options) => { - return { - cmd, - params, - options, - } - }), +vi.mock('execa', () => ({ + default: { + sync: vi.fn((cmd, params, options) => { + return { + cmd, + params, + options, + } + }), + }, })) -jest.mock('fs', () => { +vi.mock('fs-extra', async (importOriginal) => { + const originalFsExtra = await importOriginal() return { - ...jest.requireActual('fs'), - existsSync: () => true, + default: { + ...originalFsExtra, + existsSync: () => true, + }, } }) import execa from 'execa' +import { vi, beforeEach, afterEach, test, expect } from 'vitest' import { handler } from '../prisma' beforeEach(() => { - jest.spyOn(console, 'info').mockImplementation(() => {}) - jest.spyOn(console, 'log').mockImplementation(() => {}) + vi.spyOn(console, 'info').mockImplementation(() => {}) + vi.spyOn(console, 'log').mockImplementation(() => {}) }) afterEach(() => { diff --git a/packages/cli/src/commands/__tests__/serve.test.js b/packages/cli/src/commands/__tests__/serve.test.js index f45e60380c43..184ac5d60755 100644 --- a/packages/cli/src/commands/__tests__/serve.test.js +++ b/packages/cli/src/commands/__tests__/serve.test.js @@ -1,8 +1,10 @@ globalThis.__dirname = __dirname // We mock these to skip the check for web/dist and api/dist -jest.mock('@redwoodjs/project-config', () => { +vi.mock('@redwoodjs/project-config', async (importOriginal) => { + const originalProjectConfig = await importOriginal() return { + ...originalProjectConfig, getPaths: () => { return { api: { @@ -23,40 +25,46 @@ jest.mock('@redwoodjs/project-config', () => { } }) -jest.mock('fs', () => { +vi.mock('fs-extra', async (importOriginal) => { + const originalFsExtra = await importOriginal() return { - ...jest.requireActual('fs'), - existsSync: (p) => { - // Don't detect the experimental server file, can't use path.sep here so the replaceAll is used - if (p.replaceAll('\\', '/') === '/mocked/project/api/dist/server.js') { - return false - } - return true + default: { + ...originalFsExtra, + existsSync: (p) => { + // Don't detect the experimental server file, can't use path.sep here so the replaceAll is used + if (p.replaceAll('\\', '/') === '/mocked/project/api/dist/server.js') { + return false + } + return true + }, }, } }) -jest.mock('../serveApiHandler', () => { +vi.mock('../serveApiHandler', async (importOriginal) => { + const originalHandler = await importOriginal() return { - ...jest.requireActual('../serveApiHandler'), - apiServerHandler: jest.fn(), + ...originalHandler, + apiServerHandler: vi.fn(), } }) -jest.mock('../serveBothHandler', () => { +vi.mock('../serveBothHandler', async (importOriginal) => { + const originalHandler = await importOriginal() return { - ...jest.requireActual('../serveBothHandler'), - bothServerHandler: jest.fn(), + ...originalHandler, + bothServerHandler: vi.fn(), } }) -jest.mock('execa', () => - jest.fn((cmd, params) => ({ +vi.mock('execa', () => ({ + default: vi.fn((cmd, params) => ({ cmd, params, - })) -) + })), +})) import execa from 'execa' -import yargs from 'yargs' +import { vi, describe, afterEach, it, expect } from 'vitest' +import yargs from 'yargs/yargs' import { builder } from '../serve' import { apiServerHandler } from '../serveApiHandler' @@ -64,11 +72,11 @@ import { bothServerHandler } from '../serveBothHandler' describe('yarn rw serve', () => { afterEach(() => { - jest.clearAllMocks() + vi.clearAllMocks() }) it('Should proxy serve api with params to api-server handler', async () => { - const parser = yargs.command('serve [side]', false, builder) + const parser = yargs().command('serve [side]', false, builder) await parser.parse('serve api --port 5555 --apiRootPath funkyFunctions') @@ -81,7 +89,7 @@ describe('yarn rw serve', () => { }) it('Should proxy serve api with params to api-server handler (alias and slashes in path)', async () => { - const parser = yargs.command('serve [side]', false, builder) + const parser = yargs().command('serve [side]', false, builder) await parser.parse( 'serve api --port 5555 --rootPath funkyFunctions/nested/' @@ -96,7 +104,7 @@ describe('yarn rw serve', () => { }) it('Should proxy serve web with params to web server handler', async () => { - const parser = yargs.command('serve [side]', false, builder) + const parser = yargs().command('serve [side]', false, builder) await parser.parse( 'serve web --port 9898 --socket abc --apiHost https://myapi.redwood/api' @@ -118,7 +126,7 @@ describe('yarn rw serve', () => { }) it('Should proxy rw serve with params to appropriate handler', async () => { - const parser = yargs.command('serve [side]', false, builder) + const parser = yargs().command('serve [side]', false, builder) await parser.parse('serve --port 9898 --socket abc') diff --git a/packages/cli/src/commands/__tests__/studioHandler.test.js b/packages/cli/src/commands/__tests__/studioHandler.test.js new file mode 100644 index 000000000000..cfe609feacd1 --- /dev/null +++ b/packages/cli/src/commands/__tests__/studioHandler.test.js @@ -0,0 +1,157 @@ +// Have to use `var` here to avoid "Temporal Dead Zone" issues +// eslint-disable-next-line +var mockedRedwoodVersion = '0.0.0' + +vi.mock('@redwoodjs/project-config', async (importOriginal) => { + const originalProjectConfig = await importOriginal() + return { + ...originalProjectConfig, + getPaths: () => ({ base: '' }), + } +}) + +vi.mock('fs-extra', () => ({ + default: { + readJSONSync: () => ({ + devDependencies: { + '@redwoodjs/core': mockedRedwoodVersion, + }, + }), + }, +})) + +import { vi, describe, it, afterEach, afterAll, expect } from 'vitest' + +import { assertRedwoodVersion } from '../studioHandler' + +describe('studioHandler', () => { + describe('assertRedwoodVersion', () => { + const exitSpy = vi.spyOn(process, 'exit').mockImplementation((code) => { + throw new Error(`process.exit(${code})`) + }) + + vi.spyOn(console, 'error').mockImplementation() + + afterEach(() => { + vi.clearAllMocks() + }) + + afterAll(() => { + vi.restoreAllMocks() + }) + + const minVersions = ['7.0.0-canary.874', '7.x', '8.0.0-0'] + + it('exits on RW v6', () => { + mockedRedwoodVersion = '6.6.2' + + expect(() => assertRedwoodVersion(minVersions)).toThrow() + expect(exitSpy).toHaveBeenCalledWith(1) + }) + + it('exits on RW v7.0.0-canary.785', () => { + mockedRedwoodVersion = '7.0.0-canary.785' + + expect(() => assertRedwoodVersion(minVersions)).toThrow() + expect(exitSpy).toHaveBeenCalledWith(1) + }) + + it('exits on RW v7.0.0-canary.785+fcb9d66b5', () => { + mockedRedwoodVersion = '7.0.0-canary.785+fcb9d66b5' + + expect(() => assertRedwoodVersion(minVersions)).toThrow() + expect(exitSpy).toHaveBeenCalledWith(1) + }) + + it('exits on RW v0.0.0-experimental.999', () => { + mockedRedwoodVersion = '0.0.0-experimental.999' + + expect(() => assertRedwoodVersion(minVersions)).toThrow() + expect(exitSpy).toHaveBeenCalledWith(1) + }) + + it('exits on RW v7.0.0-alpha.999', () => { + mockedRedwoodVersion = '7.0.0-alpha.999' + + expect(() => assertRedwoodVersion(minVersions)).toThrow() + expect(exitSpy).toHaveBeenCalledWith(1) + }) + + it('exits on RW v7.0.0-rc.999', () => { + mockedRedwoodVersion = '7.0.0-rc.999' + + expect(() => assertRedwoodVersion(minVersions)).toThrow() + expect(exitSpy).toHaveBeenCalledWith(1) + }) + + it('allows RW v7.0.0-canary.874', () => { + mockedRedwoodVersion = '7.0.0-canary.874' + + expect(() => assertRedwoodVersion(minVersions)).not.toThrow() + expect(exitSpy).not.toHaveBeenCalled() + }) + + it('allows RW v7.0.0-canary.874+fcb9d66b5', () => { + mockedRedwoodVersion = '7.0.0-canary.874+fcb9d66b5' + + expect(() => assertRedwoodVersion(minVersions)).not.toThrow() + expect(exitSpy).not.toHaveBeenCalled() + }) + + it('allows RW v7.0.0', () => { + mockedRedwoodVersion = '7.0.0' + + expect(() => assertRedwoodVersion(minVersions)).not.toThrow() + expect(exitSpy).not.toHaveBeenCalled() + }) + + it('allows RW v8.0.0', () => { + mockedRedwoodVersion = '8.0.0' + + expect(() => assertRedwoodVersion(minVersions)).not.toThrow() + expect(exitSpy).not.toHaveBeenCalled() + }) + + it('allows RW v7.0.1', () => { + mockedRedwoodVersion = '7.0.1' + + expect(() => assertRedwoodVersion(minVersions)).not.toThrow() + expect(exitSpy).not.toHaveBeenCalled() + }) + + it('allows RW v8.0.0-canary.1', () => { + mockedRedwoodVersion = '8.0.0-canary.1' + + expect(() => assertRedwoodVersion(minVersions)).not.toThrow() + expect(exitSpy).not.toHaveBeenCalled() + }) + + it('allows RW v8.0.0-rc.1', () => { + mockedRedwoodVersion = '8.0.0-rc.1' + + expect(() => assertRedwoodVersion(minVersions)).not.toThrow() + expect(exitSpy).not.toHaveBeenCalled() + }) + + it('allows RW v8.0.0', () => { + mockedRedwoodVersion = '8.0.0' + + expect(() => assertRedwoodVersion(minVersions)).not.toThrow() + expect(exitSpy).not.toHaveBeenCalled() + }) + + it('allows RW v8.0.1', () => { + mockedRedwoodVersion = '8.0.1' + + expect(() => assertRedwoodVersion(minVersions)).not.toThrow() + expect(exitSpy).not.toHaveBeenCalled() + }) + + it('allows RW v9.1.0', () => { + mockedRedwoodVersion = '9.1.0' + + expect(() => assertRedwoodVersion(minVersions)).not.toThrow() + expect(exitSpy).not.toHaveBeenCalled() + }) + }) +}) diff --git a/packages/cli/src/commands/__tests__/test.test.js b/packages/cli/src/commands/__tests__/test.test.js index a73490d98a2c..3a3cef335395 100644 --- a/packages/cli/src/commands/__tests__/test.test.js +++ b/packages/cli/src/commands/__tests__/test.test.js @@ -1,18 +1,19 @@ globalThis.__dirname = __dirname import '../../lib/test' -jest.mock('execa', () => - jest.fn((cmd, params) => ({ +vi.mock('execa', () => ({ + default: vi.fn((cmd, params) => ({ cmd, params, - })) -) + })), +})) import execa from 'execa' +import { vi, afterEach, test, expect } from 'vitest' import { handler } from '../test' -jest.mock('@redwoodjs/structure', () => { +vi.mock('@redwoodjs/structure', () => { return { getProject: () => ({ sides: ['web', 'api'], @@ -21,15 +22,18 @@ jest.mock('@redwoodjs/structure', () => { }) // Before rw tests run, api/ and web/ `jest.config.js` is confirmed via existsSync() -jest.mock('fs', () => { +vi.mock('fs-extra', async (importOriginal) => { + const originalFsExtra = await importOriginal() return { - ...jest.requireActual('fs'), - existsSync: () => true, + default: { + ...originalFsExtra, + existsSync: () => true, + }, } }) afterEach(() => { - jest.clearAllMocks() + vi.clearAllMocks() }) test('Runs tests for all available sides if no filter passed', async () => { diff --git a/packages/cli/src/commands/__tests__/type-check.test.js b/packages/cli/src/commands/__tests__/type-check.test.js index dfa7543fec25..8caa836b94d5 100644 --- a/packages/cli/src/commands/__tests__/type-check.test.js +++ b/packages/cli/src/commands/__tests__/type-check.test.js @@ -1,21 +1,19 @@ -jest.mock('execa', () => - jest.fn((cmd, params, options) => { +vi.mock('execa', () => ({ + default: vi.fn((cmd, params, options) => { return { cmd, params, options, } - }) -) + }), +})) -jest.mock('concurrently', () => - jest.fn((commands, options) => { - return { - commands, - options, - } - }) -) +vi.mock('concurrently', () => ({ + default: vi.fn((commands, options) => ({ + commands, + options, + })), +})) import '../../lib/mockTelemetry' @@ -25,10 +23,11 @@ let mockedRedwoodConfig = { browser: {}, } -jest.mock('../../lib', () => { +vi.mock('../../lib', async (importOriginal) => { + const originalLib = await importOriginal() return { - ...jest.requireActual('../../lib'), - runCommandTask: jest.fn((commands) => { + ...originalLib, + runCommandTask: vi.fn((commands) => { return commands.map(({ cmd, args }) => `${cmd} ${args?.join(' ')}`) }), getPaths: () => ({ @@ -44,7 +43,7 @@ jest.mock('../../lib', () => { } }) -jest.mock('../../commands/upgrade', () => { +vi.mock('../../commands/upgrade', () => { return { getCmdMajorVersion: () => 3, } @@ -54,17 +53,18 @@ import path from 'path' import concurrently from 'concurrently' import execa from 'execa' +import { vi, beforeEach, afterEach, test, expect } from 'vitest' import { runCommandTask } from '../../lib' import { handler } from '../type-check' beforeEach(() => { - jest.spyOn(console, 'info').mockImplementation(() => {}) - jest.spyOn(console, 'log').mockImplementation(() => {}) + vi.spyOn(console, 'info').mockImplementation(() => {}) + vi.spyOn(console, 'log').mockImplementation(() => {}) }) afterEach(() => { - jest.clearAllMocks() + vi.clearAllMocks() console.info.mockRestore() console.log.mockRestore() }) diff --git a/packages/cli/src/commands/deploy/__tests__/baremetal.test.js b/packages/cli/src/commands/deploy/__tests__/baremetal.test.js index 3a076da64799..01b1a6dd98af 100644 --- a/packages/cli/src/commands/deploy/__tests__/baremetal.test.js +++ b/packages/cli/src/commands/deploy/__tests__/baremetal.test.js @@ -1,7 +1,10 @@ import { Listr } from 'listr2' +import { vi, describe, it, expect } from 'vitest' -jest.mock('@redwoodjs/project-config', () => { +vi.mock('@redwoodjs/project-config', async (importOriginal) => { + const originalProjectConfig = await importOriginal() return { + ...originalProjectConfig, getPaths: () => ({ base: `${__dirname}/fixtures`, }), diff --git a/packages/cli/src/commands/deploy/__tests__/nftPack.test.js b/packages/cli/src/commands/deploy/__tests__/nftPack.test.js index b1d90a728ce8..dc081e00889f 100644 --- a/packages/cli/src/commands/deploy/__tests__/nftPack.test.js +++ b/packages/cli/src/commands/deploy/__tests__/nftPack.test.js @@ -1,8 +1,16 @@ +import { vi, test, expect } from 'vitest' + import { findApiDistFunctions } from '@redwoodjs/internal/dist/files' import * as nftPacker from '../packing/nft' -jest.mock('@redwoodjs/internal/dist/files', () => { +vi.mock('@vercel/nft', () => { + return { + nodeFileTrace: vi.fn(), + } +}) + +vi.mock('@redwoodjs/internal/dist/files', () => { return { findApiDistFunctions: () => { return [ @@ -16,7 +24,7 @@ jest.mock('@redwoodjs/internal/dist/files', () => { } }) -jest.mock('@redwoodjs/project-config', () => { +vi.mock('@redwoodjs/project-config', () => { return { getPaths: () => { return { @@ -30,7 +38,7 @@ jest.mock('@redwoodjs/project-config', () => { }) test('Check packager detects all functions', () => { - const packageFileMock = jest + const packageFileMock = vi .spyOn(nftPacker, 'packageSingleFunction') .mockResolvedValue(true) diff --git a/packages/cli/src/commands/destroy/cell/__tests__/cell.test.js b/packages/cli/src/commands/destroy/cell/__tests__/cell.test.js index 2cde53ce55a2..c9e772ef60ef 100644 --- a/packages/cli/src/commands/destroy/cell/__tests__/cell.test.js +++ b/packages/cli/src/commands/destroy/cell/__tests__/cell.test.js @@ -1,14 +1,15 @@ globalThis.__dirname = __dirname -jest.mock('fs') -jest.mock('../../../../lib', () => { +vi.mock('fs-extra') +vi.mock('../../../../lib', async (importOriginal) => { + const originalLib = await importOriginal() return { - ...jest.requireActual('../../../../lib'), + ...originalLib, generateTemplate: () => '', } }) -jest.mock('@redwoodjs/structure', () => { +vi.mock('@redwoodjs/structure', () => { return { getProject: () => ({ cells: [{ queryOperationName: undefined }], @@ -17,6 +18,8 @@ jest.mock('@redwoodjs/structure', () => { }) import fs from 'fs-extra' +import { vol } from 'memfs' +import { vi, beforeEach, afterEach, test, expect } from 'vitest' import '../../../../lib/test' @@ -24,20 +27,19 @@ import { files } from '../../../generate/cell/cell' import { tasks } from '../cell' beforeEach(() => { - jest.spyOn(console, 'info').mockImplementation(() => {}) - jest.spyOn(console, 'log').mockImplementation(() => {}) + vi.spyOn(console, 'info').mockImplementation(() => {}) + vi.spyOn(console, 'log').mockImplementation(() => {}) }) afterEach(() => { - fs.__setMockFiles({}) - jest.spyOn(fs, 'unlinkSync').mockClear() + vol.reset() console.info.mockRestore() console.log.mockRestore() }) test('destroys cell files', async () => { - fs.__setMockFiles(await files({ name: 'User' })) - const unlinkSpy = jest.spyOn(fs, 'unlinkSync') + vol.fromJSON(await files({ name: 'User' })) + const unlinkSpy = vi.spyOn(fs, 'unlinkSync') const t = tasks({ componentName: 'cell', filesFn: files, @@ -53,8 +55,8 @@ test('destroys cell files', async () => { }) test('destroys cell files with stories and tests', async () => { - fs.__setMockFiles(await files({ name: 'User', stories: true, tests: true })) - const unlinkSpy = jest.spyOn(fs, 'unlinkSync') + vol.fromJSON(await files({ name: 'User', stories: true, tests: true })) + const unlinkSpy = vi.spyOn(fs, 'unlinkSync') const t = tasks({ componentName: 'cell', filesFn: files, diff --git a/packages/cli/src/commands/destroy/component/__tests__/component.test.js b/packages/cli/src/commands/destroy/component/__tests__/component.test.js index 95d2013ca909..93ad25f7b2a0 100644 --- a/packages/cli/src/commands/destroy/component/__tests__/component.test.js +++ b/packages/cli/src/commands/destroy/component/__tests__/component.test.js @@ -1,13 +1,16 @@ globalThis.__dirname = __dirname -jest.mock('fs') -jest.mock('../../../../lib', () => { +vi.mock('fs-extra') +vi.mock('../../../../lib', async (importOriginal) => { + const originalLib = await importOriginal() return { - ...jest.requireActual('../../../../lib'), + ...originalLib, generateTemplate: () => '', } }) import fs from 'fs-extra' +import { vol } from 'memfs' +import { vi, beforeEach, afterEach, test, expect } from 'vitest' import '../../../../lib/test' @@ -15,20 +18,20 @@ import { files } from '../../../generate/component/component' import { tasks } from '../component' beforeEach(() => { - fs.__setMockFiles(files({ name: 'About' })) - jest.spyOn(console, 'info').mockImplementation(() => {}) - jest.spyOn(console, 'log').mockImplementation(() => {}) + vol.fromJSON(files({ name: 'About' })) + vi.spyOn(console, 'info').mockImplementation(() => {}) + vi.spyOn(console, 'log').mockImplementation(() => {}) }) afterEach(() => { - fs.__setMockFiles({}) - jest.spyOn(fs, 'unlinkSync').mockClear() + vol.reset() + vi.spyOn(fs, 'unlinkSync').mockClear() console.info.mockRestore() console.log.mockRestore() }) test('destroys component files', async () => { - const unlinkSpy = jest.spyOn(fs, 'unlinkSync') + const unlinkSpy = vi.spyOn(fs, 'unlinkSync') const t = tasks({ componentName: 'component', filesFn: files, name: 'About' }) t.options.renderer = 'silent' @@ -40,8 +43,8 @@ test('destroys component files', async () => { }) test('destroys component files including stories and tests', async () => { - fs.__setMockFiles(files({ name: 'About', stories: true, tests: true })) - const unlinkSpy = jest.spyOn(fs, 'unlinkSync') + vol.fromJSON(files({ name: 'About', stories: true, tests: true })) + const unlinkSpy = vi.spyOn(fs, 'unlinkSync') const t = tasks({ componentName: 'component', filesFn: files, diff --git a/packages/cli/src/commands/destroy/directive/__tests__/directive.test.js b/packages/cli/src/commands/destroy/directive/__tests__/directive.test.js index b1b4552b5f1e..2531b9b5a94d 100644 --- a/packages/cli/src/commands/destroy/directive/__tests__/directive.test.js +++ b/packages/cli/src/commands/destroy/directive/__tests__/directive.test.js @@ -1,14 +1,17 @@ globalThis.__dirname = __dirname -jest.mock('fs') -jest.mock('../../../../lib', () => { +vi.mock('fs-extra') +vi.mock('../../../../lib', async (importOriginal) => { + const originalLib = await importOriginal() return { - ...jest.requireActual('../../../../lib'), + ...originalLib, generateTemplate: () => '', } }) import fs from 'fs-extra' +import { vol } from 'memfs' +import { vi, beforeEach, afterEach, test, expect } from 'vitest' import '../../../../lib/test' @@ -16,22 +19,20 @@ import { files } from '../../../generate/directive/directive' import { tasks } from '../directive' beforeEach(() => { - fs.__setMockFiles( - files({ name: 'require-admin', type: 'validator', tests: true }) - ) - jest.spyOn(console, 'info').mockImplementation(() => {}) - jest.spyOn(console, 'log').mockImplementation(() => {}) + vol.fromJSON(files({ name: 'require-admin', type: 'validator', tests: true })) + vi.spyOn(console, 'info').mockImplementation(() => {}) + vi.spyOn(console, 'log').mockImplementation(() => {}) }) afterEach(() => { - fs.__setMockFiles({}) - jest.spyOn(fs, 'unlinkSync').mockClear() + vol.reset() + vi.spyOn(fs, 'unlinkSync').mockClear() console.info.mockRestore() console.log.mockRestore() }) test('destroys directive files', async () => { - const unlinkSpy = jest.spyOn(fs, 'unlinkSync') + const unlinkSpy = vi.spyOn(fs, 'unlinkSync') const t = tasks({ componentName: 'directive', filesFn: (args) => files({ ...args, type: 'validator' }), diff --git a/packages/cli/src/commands/destroy/function/__tests__/function.test.js b/packages/cli/src/commands/destroy/function/__tests__/function.test.js index fa302c5935a0..bf13eb620247 100644 --- a/packages/cli/src/commands/destroy/function/__tests__/function.test.js +++ b/packages/cli/src/commands/destroy/function/__tests__/function.test.js @@ -1,13 +1,16 @@ globalThis.__dirname = __dirname -jest.mock('fs') -jest.mock('../../../../lib', () => { +vi.mock('fs-extra') +vi.mock('../../../../lib', async (importOriginal) => { + const originalLib = await importOriginal() return { - ...jest.requireActual('../../../../lib'), + ...originalLib, generateTemplate: () => '', } }) import fs from 'fs-extra' +import { vol } from 'memfs' +import { vi, beforeEach, afterEach, test, expect } from 'vitest' import '../../../../lib/test' @@ -15,20 +18,20 @@ import { files } from '../../../generate/function/function' import { tasks } from '../function' beforeEach(async () => { - fs.__setMockFiles(files({ name: 'sendMail' })) - jest.spyOn(console, 'info').mockImplementation(() => {}) - jest.spyOn(console, 'log').mockImplementation(() => {}) + vol.fromJSON(files({ name: 'sendMail' })) + vi.spyOn(console, 'info').mockImplementation(() => {}) + vi.spyOn(console, 'log').mockImplementation(() => {}) }) afterEach(() => { - fs.__setMockFiles({}) - jest.spyOn(fs, 'unlinkSync').mockClear() + vol.reset() + vi.spyOn(fs, 'unlinkSync').mockClear() console.info.mockRestore() console.log.mockRestore() }) test('destroys service files', async () => { - const unlinkSpy = jest.spyOn(fs, 'unlinkSync') + const unlinkSpy = vi.spyOn(fs, 'unlinkSync') const t = tasks({ componentName: 'service', filesFn: files, diff --git a/packages/cli/src/commands/destroy/layout/__tests__/layout.test.js b/packages/cli/src/commands/destroy/layout/__tests__/layout.test.js index 751e54ceac00..e43e6530a223 100644 --- a/packages/cli/src/commands/destroy/layout/__tests__/layout.test.js +++ b/packages/cli/src/commands/destroy/layout/__tests__/layout.test.js @@ -1,13 +1,16 @@ globalThis.__dirname = __dirname -jest.mock('fs') -jest.mock('../../../../lib', () => { +vi.mock('fs-extra') +vi.mock('../../../../lib', async (importOriginal) => { + const originalLib = await importOriginal() return { - ...jest.requireActual('../../../../lib'), + ...originalLib, generateTemplate: () => '', } }) import fs from 'fs-extra' +import { vol } from 'memfs' +import { vi, beforeEach, afterEach, test, expect } from 'vitest' import '../../../../lib/test' @@ -15,20 +18,20 @@ import { files } from '../../../generate/layout/layout' import { tasks } from '../layout' beforeEach(() => { - fs.__setMockFiles(files({ name: 'Blog' })) - jest.spyOn(console, 'info').mockImplementation(() => {}) - jest.spyOn(console, 'log').mockImplementation(() => {}) + vol.fromJSON(files({ name: 'Blog' })) + vi.spyOn(console, 'info').mockImplementation(() => {}) + vi.spyOn(console, 'log').mockImplementation(() => {}) }) afterEach(() => { - fs.__setMockFiles({}) - jest.spyOn(fs, 'unlinkSync').mockClear() + vol.reset() + vi.spyOn(fs, 'unlinkSync').mockClear() console.info.mockRestore() console.log.mockRestore() }) test('destroys layout files', async () => { - const unlinkSpy = jest.spyOn(fs, 'unlinkSync') + const unlinkSpy = vi.spyOn(fs, 'unlinkSync') const t = tasks({ componentName: 'layout', filesFn: files, name: 'Blog' }) t.options.renderer = 'silent' @@ -40,8 +43,8 @@ test('destroys layout files', async () => { }) test('destroys layout files with stories and tests', async () => { - fs.__setMockFiles(files({ name: 'Blog', stories: true, tests: true })) - const unlinkSpy = jest.spyOn(fs, 'unlinkSync') + vol.fromJSON(files({ name: 'Blog', stories: true, tests: true })) + const unlinkSpy = vi.spyOn(fs, 'unlinkSync') const t = tasks({ componentName: 'layout', filesFn: files, diff --git a/packages/cli/src/commands/destroy/page/__tests__/page.test.js b/packages/cli/src/commands/destroy/page/__tests__/page.test.js index 6e2fd1e6b752..e219734d4d07 100644 --- a/packages/cli/src/commands/destroy/page/__tests__/page.test.js +++ b/packages/cli/src/commands/destroy/page/__tests__/page.test.js @@ -1,13 +1,16 @@ globalThis.__dirname = __dirname -jest.mock('fs') -jest.mock('../../../../lib', () => { +vi.mock('fs-extra') +vi.mock('../../../../lib', async (importOriginal) => { + const originalLib = await importOriginal() return { - ...jest.requireActual('../../../../lib'), + ...originalLib, generateTemplate: () => '', } }) import fs from 'fs-extra' +import { vol } from 'memfs' +import { vi, beforeEach, afterEach, test, expect } from 'vitest' import '../../../../lib/test' @@ -16,7 +19,7 @@ import { files } from '../../../generate/page/page' import { tasks } from '../page' beforeEach(() => { - fs.__setMockFiles({ + vol.fromJSON({ ...files({ name: 'About' }), [getPaths().web.routes]: [ '', @@ -29,12 +32,12 @@ beforeEach(() => { }) afterEach(() => { - fs.__setMockFiles({}) - jest.spyOn(fs, 'unlinkSync').mockClear() + vol.reset() + vi.spyOn(fs, 'unlinkSync').mockClear() }) test('destroys page files', async () => { - const unlinkSpy = jest.spyOn(fs, 'unlinkSync') + const unlinkSpy = vi.spyOn(fs, 'unlinkSync') const t = tasks({ name: 'About' }) t.options.renderer = 'silent' @@ -47,7 +50,7 @@ test('destroys page files', async () => { test('destroys page files with stories and tests', async () => { const fileOptions = { name: 'About', stories: true, tests: true } - fs.__setMockFiles({ + vol.fromJSON({ ...files(fileOptions), [getPaths().web.routes]: [ '', @@ -58,7 +61,7 @@ test('destroys page files with stories and tests', async () => { ].join('\n'), }) - const unlinkSpy = jest.spyOn(fs, 'unlinkSync') + const unlinkSpy = vi.spyOn(fs, 'unlinkSync') const t = tasks(fileOptions) t.options.renderer = 'silent' @@ -74,7 +77,7 @@ test('cleans up route from Routes.js', async () => { t.options.renderer = 'silent' return t.tasks[1].run().then(() => { - const routes = fs.readFileSync(getPaths().web.routes) + const routes = fs.readFileSync(getPaths().web.routes, 'utf-8') expect(routes).toEqual( [ '', @@ -91,7 +94,7 @@ test('cleans up route with a custom path from Routes.js', async () => { t.options.renderer = 'silent' return t.tasks[1].run().then(() => { - const routes = fs.readFileSync(getPaths().web.routes) + const routes = fs.readFileSync(getPaths().web.routes, 'utf-8') expect(routes).toEqual( [ '', diff --git a/packages/cli/src/commands/destroy/scaffold/__tests__/scaffold.test.js b/packages/cli/src/commands/destroy/scaffold/__tests__/scaffold.test.js index 61ae251c0742..2c6a7587e2a5 100644 --- a/packages/cli/src/commands/destroy/scaffold/__tests__/scaffold.test.js +++ b/packages/cli/src/commands/destroy/scaffold/__tests__/scaffold.test.js @@ -3,6 +3,8 @@ globalThis.__dirname = __dirname import path from 'path' import fs from 'fs-extra' +import { vol } from 'memfs' +import { vi, test, describe, beforeEach, afterEach, expect } from 'vitest' import '../../../../lib/test' @@ -14,20 +16,22 @@ import { import { files } from '../../../generate/scaffold/scaffold' import { tasks } from '../scaffold' -jest.mock('fs') -jest.mock('execa') +vi.mock('fs-extra') +vi.mock('execa') -jest.mock('../../../../lib', () => { +vi.mock('../../../../lib', async (importOriginal) => { + const originalLib = await importOriginal() return { - ...jest.requireActual('../../../../lib'), + ...originalLib, generateTemplate: () => '', } }) -jest.mock('../../../../lib/schemaHelpers', () => { +vi.mock('../../../../lib/schemaHelpers', async (importOriginal) => { + const originalSchemaHelpers = await importOriginal() const path = require('path') return { - ...jest.requireActual('../../../../lib/schemaHelpers'), + ...originalSchemaHelpers, getSchema: () => require(path.join(globalThis.__dirname, 'fixtures', 'post.json')), } @@ -48,21 +52,20 @@ const templateDirectories = templateDirectoryNames.map((name) => { }) }) const scaffoldTemplates = {} +const actualFs = await vi.importActual('fs-extra') templateDirectories.forEach((directory) => { - const files = jest.requireActual('fs').readdirSync(directory) + const files = actualFs.readdirSync(directory) files.forEach((file) => { const filePath = path.join(directory, file) - scaffoldTemplates[filePath] = jest - .requireActual('fs') - .readFileSync(filePath, { encoding: 'utf8', flag: 'r' }) + scaffoldTemplates[filePath] = actualFs.readFileSync(filePath, 'utf-8') }) }) describe('rw destroy scaffold', () => { describe('destroy scaffold post', () => { beforeEach(async () => { - fs.__setMockFiles(scaffoldTemplates) - fs.__setMockFiles({ + vol.fromJSON(scaffoldTemplates) + vol.fromJSON({ ...scaffoldTemplates, ...(await files({ ...getDefaultArgs(defaults), @@ -84,12 +87,12 @@ describe('rw destroy scaffold', () => { }) afterEach(() => { - fs.__setMockFiles(scaffoldTemplates) - jest.spyOn(fs, 'unlinkSync').mockClear() + vol.fromJSON(scaffoldTemplates) + vi.spyOn(fs, 'unlinkSync').mockClear() }) test('destroys files', async () => { - const unlinkSpy = jest.spyOn(fs, 'unlinkSync') + const unlinkSpy = vi.spyOn(fs, 'unlinkSync') const t = tasks({ model: 'Post', tests: false, @@ -114,9 +117,10 @@ describe('rw destroy scaffold', () => { describe('for typescript files', () => { beforeEach(async () => { // clear filesystem so files call works as expected - fs.__setMockFiles(scaffoldTemplates) + vol.reset() + vol.fromJSON(scaffoldTemplates) - fs.__setMockFiles({ + vol.fromJSON({ ...scaffoldTemplates, ...(await files({ ...getDefaultArgs(defaults), @@ -139,7 +143,7 @@ describe('rw destroy scaffold', () => { }) test('destroys files', async () => { - const unlinkSpy = jest.spyOn(fs, 'unlinkSync') + const unlinkSpy = vi.spyOn(fs, 'unlinkSync') const t = tasks({ model: 'Post', tests: false, @@ -174,7 +178,7 @@ describe('rw destroy scaffold', () => { t.options.renderer = 'silent' return t.tasks[1].run().then(() => { - const routes = fs.readFileSync(getPaths().web.routes) + const routes = fs.readFileSync(getPaths().web.routes, 'utf-8') expect(routes).toEqual( [ '', @@ -189,8 +193,8 @@ describe('rw destroy scaffold', () => { describe('destroy namespaced scaffold post', () => { beforeEach(async () => { - fs.__setMockFiles(scaffoldTemplates) - fs.__setMockFiles({ + vol.fromJSON(scaffoldTemplates) + vol.fromJSON({ ...scaffoldTemplates, ...(await files({ ...getDefaultArgs(defaults), @@ -212,12 +216,12 @@ describe('rw destroy scaffold', () => { }) afterEach(() => { - fs.__setMockFiles(scaffoldTemplates) - jest.spyOn(fs, 'unlinkSync').mockClear() + vol.fromJSON(scaffoldTemplates) + vi.spyOn(fs, 'unlinkSync').mockClear() }) test('destroys files', async () => { - const unlinkSpy = jest.spyOn(fs, 'unlinkSync') + const unlinkSpy = vi.spyOn(fs, 'unlinkSync') const t = tasks({ model: 'Post', path: 'admin', @@ -244,9 +248,9 @@ describe('rw destroy scaffold', () => { describe('for typescript files', () => { beforeEach(async () => { // clear filesystem so files call works as expected - fs.__setMockFiles(scaffoldTemplates) + vol.fromJSON(scaffoldTemplates) - fs.__setMockFiles({ + vol.fromJSON({ ...scaffoldTemplates, ...(await files({ ...getDefaultArgs(defaults), @@ -267,7 +271,7 @@ describe('rw destroy scaffold', () => { }) }) test('destroys files', async () => { - const unlinkSpy = jest.spyOn(fs, 'unlinkSync') + const unlinkSpy = vi.spyOn(fs, 'unlinkSync') const t = tasks({ model: 'Post', path: 'admin', @@ -304,7 +308,7 @@ describe('rw destroy scaffold', () => { t.options.renderer = 'silent' return t.tasks[1].run().then(() => { - const routes = fs.readFileSync(getPaths().web.routes) + const routes = fs.readFileSync(getPaths().web.routes, 'utf-8') expect(routes).toEqual( [ '', diff --git a/packages/cli/src/commands/destroy/scaffold/__tests__/scaffoldNoNest.test.js b/packages/cli/src/commands/destroy/scaffold/__tests__/scaffoldNoNest.test.js index 09af6b5d0c6b..fb96fd021d49 100644 --- a/packages/cli/src/commands/destroy/scaffold/__tests__/scaffoldNoNest.test.js +++ b/packages/cli/src/commands/destroy/scaffold/__tests__/scaffoldNoNest.test.js @@ -3,6 +3,8 @@ globalThis.__dirname = __dirname import path from 'path' import fs from 'fs-extra' +import { vol } from 'memfs' +import { vi, test, describe, beforeEach, afterEach, expect } from 'vitest' import '../../../../lib/test' @@ -14,20 +16,22 @@ import { import { files } from '../../../generate/scaffold/scaffold' import { tasks } from '../scaffold' -jest.mock('fs') -jest.mock('execa') +vi.mock('fs-extra') +vi.mock('execa') -jest.mock('../../../../lib', () => { +vi.mock('../../../../lib', async (importOriginal) => { + const originalLib = await importOriginal() return { - ...jest.requireActual('../../../../lib'), + ...originalLib, generateTemplate: () => '', } }) -jest.mock('../../../../lib/schemaHelpers', () => { +vi.mock('../../../../lib/schemaHelpers', async (importOriginal) => { + const originalSchemaHelpers = await importOriginal() const path = require('path') return { - ...jest.requireActual('../../../../lib/schemaHelpers'), + ...originalSchemaHelpers, getSchema: () => require(path.join(globalThis.__dirname, 'fixtures', 'post.json')), } @@ -48,21 +52,20 @@ const templateDirectories = templateDirectoryNames.map((name) => { }) }) const scaffoldTemplates = {} -templateDirectories.forEach((directory) => { - const files = jest.requireActual('fs').readdirSync(directory) +const actualFs = await vi.importActual('fs-extra') +templateDirectories.forEach(async (directory) => { + const files = actualFs.readdirSync(directory) files.forEach((file) => { const filePath = path.join(directory, file) - scaffoldTemplates[filePath] = jest - .requireActual('fs') - .readFileSync(filePath, { encoding: 'utf8', flag: 'r' }) + scaffoldTemplates[filePath] = actualFs.readFileSync(filePath, 'utf-8') }) }) describe('rw destroy scaffold', () => { describe('destroy scaffold post', () => { beforeEach(async () => { - fs.__setMockFiles(scaffoldTemplates) - fs.__setMockFiles({ + vol.fromJSON(scaffoldTemplates) + vol.fromJSON({ ...scaffoldTemplates, ...(await files({ ...getDefaultArgs(defaults), @@ -84,12 +87,12 @@ describe('rw destroy scaffold', () => { }) afterEach(() => { - fs.__setMockFiles(scaffoldTemplates) - jest.spyOn(fs, 'unlinkSync').mockClear() + vol.fromJSON(scaffoldTemplates) + vi.spyOn(fs, 'unlinkSync').mockClear() }) test('destroys files', async () => { - const unlinkSpy = jest.spyOn(fs, 'unlinkSync') + const unlinkSpy = vi.spyOn(fs, 'unlinkSync') const t = tasks({ model: 'Post', tests: false, @@ -113,8 +116,9 @@ describe('rw destroy scaffold', () => { describe('for typescript files', () => { beforeEach(async () => { - fs.__setMockFiles(scaffoldTemplates) // clear filesystem so files call works as expected - fs.__setMockFiles({ + vol.reset() + vol.fromJSON(scaffoldTemplates) + vol.fromJSON({ ...scaffoldTemplates, ...(await files({ ...getDefaultArgs(defaults), @@ -137,7 +141,7 @@ describe('rw destroy scaffold', () => { }) test('destroys files', async () => { - const unlinkSpy = jest.spyOn(fs, 'unlinkSync') + const unlinkSpy = vi.spyOn(fs, 'unlinkSync') const t = tasks({ model: 'Post', tests: false, @@ -172,7 +176,7 @@ describe('rw destroy scaffold', () => { t.options.renderer = 'silent' return t.tasks[1].run().then(() => { - const routes = fs.readFileSync(getPaths().web.routes) + const routes = fs.readFileSync(getPaths().web.routes, 'utf-8') expect(routes).toEqual( [ '', @@ -187,8 +191,7 @@ describe('rw destroy scaffold', () => { describe('destroy namespaced scaffold post', () => { beforeEach(async () => { - fs.__setMockFiles(scaffoldTemplates) - fs.__setMockFiles({ + vol.fromJSON({ ...scaffoldTemplates, ...(await files({ ...getDefaultArgs(defaults), @@ -210,12 +213,12 @@ describe('rw destroy scaffold', () => { }) afterEach(() => { - fs.__setMockFiles(scaffoldTemplates) - jest.spyOn(fs, 'unlinkSync').mockClear() + vol.fromJSON(scaffoldTemplates) + vi.spyOn(fs, 'unlinkSync').mockClear() }) test('destroys files', async () => { - const unlinkSpy = jest.spyOn(fs, 'unlinkSync') + const unlinkSpy = vi.spyOn(fs, 'unlinkSync') const t = tasks({ model: 'Post', path: 'admin', @@ -241,8 +244,7 @@ describe('rw destroy scaffold', () => { describe('for typescript files', () => { beforeEach(async () => { - fs.__setMockFiles(scaffoldTemplates) // clear filesystem so files call works as expected - fs.__setMockFiles({ + vol.fromJSON({ ...scaffoldTemplates, ...(await files({ ...getDefaultArgs(defaults), @@ -263,7 +265,7 @@ describe('rw destroy scaffold', () => { }) }) test('destroys files', async () => { - const unlinkSpy = jest.spyOn(fs, 'unlinkSync') + const unlinkSpy = vi.spyOn(fs, 'unlinkSync') const t = tasks({ model: 'Post', path: 'admin', @@ -300,7 +302,7 @@ describe('rw destroy scaffold', () => { t.options.renderer = 'silent' return t.tasks[1].run().then(() => { - const routes = fs.readFileSync(getPaths().web.routes) + const routes = fs.readFileSync(getPaths().web.routes, 'utf-8') expect(routes).toEqual( [ '', diff --git a/packages/cli/src/commands/destroy/sdl/__tests__/sdl.test.js b/packages/cli/src/commands/destroy/sdl/__tests__/sdl.test.js index b1577c9a0370..8d2fec9f9058 100644 --- a/packages/cli/src/commands/destroy/sdl/__tests__/sdl.test.js +++ b/packages/cli/src/commands/destroy/sdl/__tests__/sdl.test.js @@ -1,6 +1,8 @@ globalThis.__dirname = __dirname import fs from 'fs-extra' +import { vol } from 'memfs' +import { vi, beforeEach, afterEach, test, expect, describe } from 'vitest' import '../../../../lib/test' @@ -8,19 +10,21 @@ import { getDefaultArgs } from '../../../../lib' import { builder, files } from '../../../generate/sdl/sdl' import { tasks } from '../sdl' -jest.mock('fs') +vi.mock('fs-extra') -jest.mock('../../../../lib', () => { +vi.mock('../../../../lib', async (importOriginal) => { + const originalLib = await importOriginal() return { - ...jest.requireActual('../../../../lib'), + ...originalLib, generateTemplate: () => '', } }) -jest.mock('../../../../lib/schemaHelpers', () => { +vi.mock('../../../../lib/schemaHelpers', async (importOriginal) => { + const originalSchemaHelpers = await importOriginal() const path = require('path') return { - ...jest.requireActual('../../../../lib/schemaHelpers'), + ...originalSchemaHelpers, getSchema: () => require(path.join(globalThis.__dirname, 'fixtures', 'post.json')), } @@ -28,19 +32,17 @@ jest.mock('../../../../lib/schemaHelpers', () => { describe('rw destroy sdl', () => { afterEach(() => { - fs.__setMockFiles({}) - jest.spyOn(fs, 'unlinkSync').mockClear() + vol.reset() + vi.spyOn(fs, 'unlinkSync').mockClear() }) describe('for javascript files', () => { beforeEach(async () => { - fs.__setMockFiles( - await files({ ...getDefaultArgs(builder), name: 'Post' }) - ) + vol.fromJSON(await files({ ...getDefaultArgs(builder), name: 'Post' })) }) test('destroys sdl files', async () => { - const unlinkSpy = jest.spyOn(fs, 'unlinkSync') + const unlinkSpy = vi.spyOn(fs, 'unlinkSync') const t = tasks({ model: 'Post' }) t.options.renderer = 'silent' @@ -56,7 +58,7 @@ describe('rw destroy sdl', () => { describe('for typescript files', () => { beforeEach(async () => { - fs.__setMockFiles( + vol.fromJSON( await files({ ...getDefaultArgs(builder), typescript: true, @@ -66,7 +68,7 @@ describe('rw destroy sdl', () => { }) test('destroys sdl files', async () => { - const unlinkSpy = jest.spyOn(fs, 'unlinkSync') + const unlinkSpy = vi.spyOn(fs, 'unlinkSync') const t = tasks({ model: 'Post' }) t.options.renderer = 'silent' diff --git a/packages/cli/src/commands/destroy/service/__tests__/service.test.js b/packages/cli/src/commands/destroy/service/__tests__/service.test.js index 6973bb303a96..3e9db329c0c1 100644 --- a/packages/cli/src/commands/destroy/service/__tests__/service.test.js +++ b/packages/cli/src/commands/destroy/service/__tests__/service.test.js @@ -1,5 +1,7 @@ globalThis.__dirname = __dirname import fs from 'fs-extra' +import { vol } from 'memfs' +import { vi, describe, beforeEach, afterEach, test, expect } from 'vitest' import '../../../../lib/test' @@ -7,19 +9,21 @@ import { getDefaultArgs } from '../../../../lib' import { builder, files } from '../../../generate/service/service' import { tasks } from '../service' -jest.mock('fs') +vi.mock('fs-extra') -jest.mock('../../../../lib', () => { +vi.mock('../../../../lib', async (importOriginal) => { + const originalLib = await importOriginal() return { - ...jest.requireActual('../../../../lib'), + ...originalLib, generateTemplate: () => '', } }) -jest.mock('../../../../lib/schemaHelpers', () => { +vi.mock('../../../../lib/schemaHelpers', async (importOriginal) => { + const originalSchemaHelpers = await importOriginal() const path = require('path') return { - ...jest.requireActual('../../../../lib/schemaHelpers'), + ...originalSchemaHelpers, getSchema: () => require(path.join(globalThis.__dirname, 'fixtures', 'post.json')), } @@ -27,25 +31,23 @@ jest.mock('../../../../lib/schemaHelpers', () => { describe('rw destroy service', () => { beforeEach(() => { - jest.spyOn(console, 'info').mockImplementation(() => {}) - jest.spyOn(console, 'log').mockImplementation(() => {}) + vi.spyOn(console, 'info').mockImplementation(() => {}) + vi.spyOn(console, 'log').mockImplementation(() => {}) }) afterEach(() => { - fs.__setMockFiles({}) - jest.spyOn(fs, 'unlinkSync').mockClear() + vol.reset() + vi.spyOn(fs, 'unlinkSync').mockClear() console.info.mockRestore() console.log.mockRestore() }) describe('for javascript files', () => { beforeEach(async () => { - fs.__setMockFiles( - await files({ ...getDefaultArgs(builder), name: 'User' }) - ) + vol.fromJSON(await files({ ...getDefaultArgs(builder), name: 'User' })) }) test('destroys service files', async () => { - const unlinkSpy = jest.spyOn(fs, 'unlinkSync') + const unlinkSpy = vi.spyOn(fs, 'unlinkSync') const t = tasks({ componentName: 'service', filesFn: files, @@ -65,7 +67,7 @@ describe('rw destroy service', () => { describe('for typescript files', () => { beforeEach(async () => { - fs.__setMockFiles( + vol.fromJSON( await files({ ...getDefaultArgs(builder), typescript: true, @@ -75,7 +77,7 @@ describe('rw destroy service', () => { }) test('destroys service files', async () => { - const unlinkSpy = jest.spyOn(fs, 'unlinkSync') + const unlinkSpy = vi.spyOn(fs, 'unlinkSync') const t = tasks({ componentName: 'service', filesFn: files, diff --git a/packages/cli/src/commands/experimental/__tests__/setupDocker.test.js b/packages/cli/src/commands/experimental/__tests__/setupDocker.test.js index 5a1803bad42a..7c96f33c050a 100644 --- a/packages/cli/src/commands/experimental/__tests__/setupDocker.test.js +++ b/packages/cli/src/commands/experimental/__tests__/setupDocker.test.js @@ -1,12 +1,14 @@ +import { vi, test, describe, expect } from 'vitest' + import { recordTelemetryAttributes } from '@redwoodjs/cli-helpers' import { command, description, builder, handler } from '../setupDocker' -jest.mock('../setupDockerHandler.js') +vi.mock('../setupDockerHandler.js') -jest.mock('@redwoodjs/cli-helpers', () => { +vi.mock('@redwoodjs/cli-helpers', () => { return { - recordTelemetryAttributes: jest.fn(), + recordTelemetryAttributes: vi.fn(), } }) @@ -23,8 +25,8 @@ describe('setupDocker', () => { test('builder configures command options force and verbose ', () => { const yargs = { - option: jest.fn(() => yargs), - epilogue: jest.fn(() => yargs), + option: vi.fn(() => yargs), + epilogue: vi.fn(() => yargs), } builder(yargs) diff --git a/packages/cli/src/commands/generate/__mocks__/@redwoodjs/structure.js b/packages/cli/src/commands/generate/__mocks__/@redwoodjs/structure.js deleted file mode 100644 index 184f2179d39d..000000000000 --- a/packages/cli/src/commands/generate/__mocks__/@redwoodjs/structure.js +++ /dev/null @@ -1,12 +0,0 @@ -// We need this because we check for typescript automatically in generate - -const mockedStructure = { - ...jest.requireActual('@redwoodjs/structure'), - getProject: () => { - return { - isTypeScriptProject: false, - } - }, -} - -module.exports = mockedStructure diff --git a/packages/cli/src/commands/generate/__tests__/createYargsForComponentGeneration.test.js b/packages/cli/src/commands/generate/__tests__/createYargsForComponentGeneration.test.js index 26856d1bc26a..ef247cdc283f 100644 --- a/packages/cli/src/commands/generate/__tests__/createYargsForComponentGeneration.test.js +++ b/packages/cli/src/commands/generate/__tests__/createYargsForComponentGeneration.test.js @@ -2,8 +2,9 @@ globalThis.__dirname = __dirname import '../../../lib/test' -jest.mock('listr2') +vi.mock('listr2') import { Listr } from 'listr2' +import { vi, test, expect } from 'vitest' import * as helpers from '../helpers' @@ -15,7 +16,7 @@ test('createYargsForComponentGeneration generates a yargs handler as expected', return [ { title: 'Cool beans, with rad sauce', - task: jest.fn(), + task: vi.fn(), enabled: () => true, }, ] diff --git a/packages/cli/src/commands/generate/__tests__/helpers.test.js b/packages/cli/src/commands/generate/__tests__/helpers.test.js index 5ba670cb4772..5383ea2156fc 100644 --- a/packages/cli/src/commands/generate/__tests__/helpers.test.js +++ b/packages/cli/src/commands/generate/__tests__/helpers.test.js @@ -1,6 +1,7 @@ import path from 'path' import fs from 'fs-extra' +import { vi, test, expect, describe, it } from 'vitest' // Setup test mocks globalThis.__dirname = __dirname @@ -48,7 +49,7 @@ test('customOrDefaultTemplatePath returns the default path if no custom template test('customOrDefaultTemplatePath returns the app path if a custom template exists', () => { // pretend the custom template exists - jest.spyOn(fs, 'existsSync').mockImplementationOnce(() => true) + vi.spyOn(fs, 'existsSync').mockImplementationOnce(() => true) const output = helpers.customOrDefaultTemplatePath({ side: 'web', @@ -63,7 +64,7 @@ test('customOrDefaultTemplatePath returns the app path if a custom template exis test('customOrDefaultTemplatePath returns the app path with proper side, generator and path', () => { // pretend the custom template exists - jest.spyOn(fs, 'existsSync').mockImplementationOnce(() => true) + vi.spyOn(fs, 'existsSync').mockImplementationOnce(() => true) const output = helpers.customOrDefaultTemplatePath({ side: 'api', diff --git a/packages/cli/src/commands/generate/cell/__tests__/__snapshots__/cell.test.js.snap b/packages/cli/src/commands/generate/cell/__tests__/__snapshots__/cell.test.js.snap index fdea4f2615ef..9ddbaad1d041 100644 --- a/packages/cli/src/commands/generate/cell/__tests__/__snapshots__/cell.test.js.snap +++ b/packages/cli/src/commands/generate/cell/__tests__/__snapshots__/cell.test.js.snap @@ -1,4 +1,4 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html exports[`"equipment" with list flag 1`] = ` "export const QUERY = gql\` @@ -52,7 +52,7 @@ export const Success = ({ equipment }) => { " `; -exports[`Custom Id Field files List cell creates a cell list component with a custom id field 1`] = ` +exports[`Custom Id Field files > List cell > creates a cell list component with a custom id field 1`] = ` "export const QUERY = gql\` query CustomIdFieldsQuery { customIdFields { @@ -81,7 +81,7 @@ export const Success = ({ customIdFields }) => { " `; -exports[`Custom Id Field files List cell creates a cell list mock with a custom id field 1`] = ` +exports[`Custom Id Field files > List cell > creates a cell list mock with a custom id field 1`] = ` "// Define your own mock data here: export const standard = (/* vars, { ctx, req } */) => ({ customIdFields: [{ uuid: '42' }, { uuid: '43' }, { uuid: '44' }], @@ -89,7 +89,7 @@ export const standard = (/* vars, { ctx, req } */) => ({ " `; -exports[`Custom Id Field files List cell creates a cell list stories with a custom id field 1`] = ` +exports[`Custom Id Field files > List cell > creates a cell list stories with a custom id field 1`] = ` "import { Loading, Empty, Failure, Success } from './CustomIdFieldsCell' import { standard } from './CustomIdFieldsCell.mock' @@ -126,7 +126,7 @@ export const success = { " `; -exports[`Custom Id Field files List cell creates a cell list test with a custom id field 1`] = ` +exports[`Custom Id Field files > List cell > creates a cell list test with a custom id field 1`] = ` "import { render } from '@redwoodjs/testing/web' import { Loading, Empty, Failure, Success } from './CustomIdFieldsCell' import { standard } from './CustomIdFieldsCell.mock' @@ -171,7 +171,7 @@ describe('CustomIdFieldsCell', () => { " `; -exports[`Custom Id Field files Single cell creates a cell component with a custom id field 1`] = ` +exports[`Custom Id Field files > Single cell > creates a cell component with a custom id field 1`] = ` "export const QUERY = gql\` query FindCustomIdFieldQuery($id: String!) { customIdField: customIdField(uuid: $id) { @@ -194,7 +194,7 @@ export const Success = ({ customIdField }) => { " `; -exports[`Custom Id Field files Single cell creates a cell mock with a custom id field 1`] = ` +exports[`Custom Id Field files > Single cell > creates a cell mock with a custom id field 1`] = ` "// Define your own mock data here: export const standard = (/* vars, { ctx, req } */) => ({ customIdField: { @@ -204,7 +204,7 @@ export const standard = (/* vars, { ctx, req } */) => ({ " `; -exports[`Custom Id Field files Single cell creates a cell stories with a custom id field 1`] = ` +exports[`Custom Id Field files > Single cell > creates a cell stories with a custom id field 1`] = ` "import { Loading, Empty, Failure, Success } from './CustomIdFieldCell' import { standard } from './CustomIdFieldCell.mock' @@ -241,7 +241,7 @@ export const success = { " `; -exports[`Custom Id Field files Single cell creates a cell test with a custom id field 1`] = ` +exports[`Custom Id Field files > Single cell > creates a cell test with a custom id field 1`] = ` "import { render } from '@redwoodjs/testing/web' import { Loading, Empty, Failure, Success } from './CustomIdFieldCell' import { standard } from './CustomIdFieldCell.mock' @@ -286,7 +286,7 @@ describe('CustomIdFieldCell', () => { " `; -exports[`Kebab case words creates a cell component with a kebabCase word name 1`] = ` +exports[`Kebab case words > creates a cell component with a kebabCase word name 1`] = ` "export const QUERY = gql\` query FindUserProfileQuery($id: Int!) { userProfile: userProfile(id: $id) { @@ -309,7 +309,7 @@ export const Success = ({ userProfile }) => { " `; -exports[`Kebab case words creates a cell mock with a kebabCase word name 1`] = ` +exports[`Kebab case words > creates a cell mock with a kebabCase word name 1`] = ` "// Define your own mock data here: export const standard = (/* vars, { ctx, req } */) => ({ userProfile: { @@ -319,7 +319,7 @@ export const standard = (/* vars, { ctx, req } */) => ({ " `; -exports[`Kebab case words creates a cell stories with a kebabCase word name 1`] = ` +exports[`Kebab case words > creates a cell stories with a kebabCase word name 1`] = ` "import { Loading, Empty, Failure, Success } from './UserProfileCell' import { standard } from './UserProfileCell.mock' @@ -356,7 +356,7 @@ export const success = { " `; -exports[`Kebab case words creates a cell test with a kebabCase word name 1`] = ` +exports[`Kebab case words > creates a cell test with a kebabCase word name 1`] = ` "import { render } from '@redwoodjs/testing/web' import { Loading, Empty, Failure, Success } from './UserProfileCell' import { standard } from './UserProfileCell.mock' @@ -401,7 +401,7 @@ describe('UserProfileCell', () => { " `; -exports[`Multiword files creates a cell component with a multi word name 1`] = ` +exports[`Multiword files > creates a cell component with a multi word name 1`] = ` "export const QUERY = gql\` query FindUserProfileQuery($id: Int!) { userProfile: userProfile(id: $id) { @@ -424,7 +424,7 @@ export const Success = ({ userProfile }) => { " `; -exports[`Multiword files creates a cell mock with a multi word name 1`] = ` +exports[`Multiword files > creates a cell mock with a multi word name 1`] = ` "// Define your own mock data here: export const standard = (/* vars, { ctx, req } */) => ({ userProfile: { @@ -434,7 +434,7 @@ export const standard = (/* vars, { ctx, req } */) => ({ " `; -exports[`Multiword files creates a cell stories with a multi word name 1`] = ` +exports[`Multiword files > creates a cell stories with a multi word name 1`] = ` "import { Loading, Empty, Failure, Success } from './UserProfileCell' import { standard } from './UserProfileCell.mock' @@ -471,7 +471,7 @@ export const success = { " `; -exports[`Multiword files creates a cell test with a multi word name 1`] = ` +exports[`Multiword files > creates a cell test with a multi word name 1`] = ` "import { render } from '@redwoodjs/testing/web' import { Loading, Empty, Failure, Success } from './UserProfileCell' import { standard } from './UserProfileCell.mock' @@ -516,7 +516,7 @@ describe('UserProfileCell', () => { " `; -exports[`Single word files creates a cell component with a single word name 1`] = ` +exports[`Single word files > creates a cell component with a single word name 1`] = ` "export const QUERY = gql\` query FindUserQuery($id: Int!) { user: user(id: $id) { @@ -539,7 +539,7 @@ export const Success = ({ user }) => { " `; -exports[`Single word files creates a cell mock with a single word name 1`] = ` +exports[`Single word files > creates a cell mock with a single word name 1`] = ` "// Define your own mock data here: export const standard = (/* vars, { ctx, req } */) => ({ user: { @@ -549,7 +549,7 @@ export const standard = (/* vars, { ctx, req } */) => ({ " `; -exports[`Single word files creates a cell stories with a single word name 1`] = ` +exports[`Single word files > creates a cell stories with a single word name 1`] = ` "import { Loading, Empty, Failure, Success } from './UserCell' import { standard } from './UserCell.mock' @@ -586,7 +586,7 @@ export const success = { " `; -exports[`Single word files creates a cell test with a single word name 1`] = ` +exports[`Single word files > creates a cell test with a single word name 1`] = ` "import { render } from '@redwoodjs/testing/web' import { Loading, Empty, Failure, Success } from './UserCell' import { standard } from './UserCell.mock' @@ -631,7 +631,7 @@ describe('UserCell', () => { " `; -exports[`Snake case words creates a cell component with a snakeCase word name 1`] = ` +exports[`Snake case words > creates a cell component with a snakeCase word name 1`] = ` "export const QUERY = gql\` query FindUserProfileQuery($id: Int!) { userProfile: userProfile(id: $id) { @@ -654,7 +654,7 @@ export const Success = ({ userProfile }) => { " `; -exports[`Snake case words creates a cell mock with a snakeCase word name 1`] = ` +exports[`Snake case words > creates a cell mock with a snakeCase word name 1`] = ` "// Define your own mock data here: export const standard = (/* vars, { ctx, req } */) => ({ userProfile: { @@ -664,7 +664,7 @@ export const standard = (/* vars, { ctx, req } */) => ({ " `; -exports[`Snake case words creates a cell stories with a snakeCase word name 1`] = ` +exports[`Snake case words > creates a cell stories with a snakeCase word name 1`] = ` "import { Loading, Empty, Failure, Success } from './UserProfileCell' import { standard } from './UserProfileCell.mock' @@ -701,7 +701,7 @@ export const success = { " `; -exports[`Snake case words creates a cell test with a snakeCase word name 1`] = ` +exports[`Snake case words > creates a cell test with a snakeCase word name 1`] = ` "import { render } from '@redwoodjs/testing/web' import { Loading, Empty, Failure, Success } from './UserProfileCell' import { standard } from './UserProfileCell.mock' @@ -918,7 +918,7 @@ export const Success = ({ members }: CellSuccessProps) => { " `; -exports[`camelCase words creates a cell component with a camelCase word name 1`] = ` +exports[`camelCase words > creates a cell component with a camelCase word name 1`] = ` "export const QUERY = gql\` query FindUserProfileQuery($id: Int!) { userProfile: userProfile(id: $id) { @@ -941,7 +941,7 @@ export const Success = ({ userProfile }) => { " `; -exports[`camelCase words creates a cell mock with a camelCase word name 1`] = ` +exports[`camelCase words > creates a cell mock with a camelCase word name 1`] = ` "// Define your own mock data here: export const standard = (/* vars, { ctx, req } */) => ({ userProfile: { @@ -951,7 +951,7 @@ export const standard = (/* vars, { ctx, req } */) => ({ " `; -exports[`camelCase words creates a cell stories with a camelCase word name 1`] = ` +exports[`camelCase words > creates a cell stories with a camelCase word name 1`] = ` "import { Loading, Empty, Failure, Success } from './UserProfileCell' import { standard } from './UserProfileCell.mock' @@ -988,7 +988,7 @@ export const success = { " `; -exports[`camelCase words creates a cell test with a camelCase word name 1`] = ` +exports[`camelCase words > creates a cell test with a camelCase word name 1`] = ` "import { render } from '@redwoodjs/testing/web' import { Loading, Empty, Failure, Success } from './UserProfileCell' import { standard } from './UserProfileCell.mock' diff --git a/packages/cli/src/commands/generate/cell/__tests__/cell.test.js b/packages/cli/src/commands/generate/cell/__tests__/cell.test.js index cd72445c048b..f77f53e160c9 100644 --- a/packages/cli/src/commands/generate/cell/__tests__/cell.test.js +++ b/packages/cli/src/commands/generate/cell/__tests__/cell.test.js @@ -1,11 +1,13 @@ globalThis.__dirname = __dirname import path from 'path' +import { vi, describe, it, expect, test, beforeAll } from 'vitest' + // Load mocks import '../../../../lib/test' import * as cell from '../cell' -jest.mock('@redwoodjs/structure', () => { +vi.mock('@redwoodjs/structure', () => { return { getProject: () => ({ cells: [{ queryOperationName: 'AlreadyDefinedQueryName' }], diff --git a/packages/cli/src/commands/generate/component/__tests__/__snapshots__/component.test.ts.snap b/packages/cli/src/commands/generate/component/__tests__/__snapshots__/component.test.ts.snap index 3d2c13a8415d..3af4940c6757 100644 --- a/packages/cli/src/commands/generate/component/__tests__/__snapshots__/component.test.ts.snap +++ b/packages/cli/src/commands/generate/component/__tests__/__snapshots__/component.test.ts.snap @@ -1,4 +1,4 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html exports[`creates a TS component and test 1`] = ` "const TypescriptUser = () => { diff --git a/packages/cli/src/commands/generate/component/__tests__/component.test.ts b/packages/cli/src/commands/generate/component/__tests__/component.test.ts index 6df97bcb84cc..7e23a479e5d9 100644 --- a/packages/cli/src/commands/generate/component/__tests__/component.test.ts +++ b/packages/cli/src/commands/generate/component/__tests__/component.test.ts @@ -1,7 +1,8 @@ globalThis.__dirname = __dirname import path from 'path' -import yargs from 'yargs' +import { beforeAll, test, expect } from 'vitest' +import yargs from 'yargs/yargs' // Shared mocks for paths, etc. import '../../../../lib/test' @@ -57,7 +58,7 @@ test('returns exactly 3 files', () => { }) test('keeps Component in name', () => { - const { name } = yargs + const { name } = yargs() .command('component ', false, component.builder) .parse('component BazingaComponent') diff --git a/packages/cli/src/commands/generate/dataMigration/__tests__/__snapshots__/dataMigration.test.js.snap b/packages/cli/src/commands/generate/dataMigration/__tests__/__snapshots__/dataMigration.test.js.snap index 59b39cd27169..4419632fcdf6 100644 --- a/packages/cli/src/commands/generate/dataMigration/__tests__/__snapshots__/dataMigration.test.js.snap +++ b/packages/cli/src/commands/generate/dataMigration/__tests__/__snapshots__/dataMigration.test.js.snap @@ -1,4 +1,4 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html exports[`can generate a TS file with expected contents 1`] = ` "import type { PrismaClient } from '@prisma/client' diff --git a/packages/cli/src/commands/generate/dataMigration/__tests__/dataMigration.test.js b/packages/cli/src/commands/generate/dataMigration/__tests__/dataMigration.test.js index 0b322d7302d0..e2326a1e292f 100644 --- a/packages/cli/src/commands/generate/dataMigration/__tests__/dataMigration.test.js +++ b/packages/cli/src/commands/generate/dataMigration/__tests__/dataMigration.test.js @@ -2,6 +2,8 @@ globalThis.__dirname = __dirname import path from 'path' import '../../../../lib/test' +import { afterEach, test, expect } from 'vitest' + import * as generator from '../dataMigration' const asyncForEach = async (array, callback) => { diff --git a/packages/cli/src/commands/generate/dbAuth/__tests__/__snapshots__/dbAuth.test.js.snap b/packages/cli/src/commands/generate/dbAuth/__tests__/__snapshots__/dbAuth.test.js.snap index 2229d11ab60c..33504e3400bc 100644 --- a/packages/cli/src/commands/generate/dbAuth/__tests__/__snapshots__/dbAuth.test.js.snap +++ b/packages/cli/src/commands/generate/dbAuth/__tests__/__snapshots__/dbAuth.test.js.snap @@ -1,6 +1,6 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html -exports[`dbAuth handler exits when all files are skipped 1`] = ` +exports[`dbAuth > handler > exits when all files are skipped 1`] = ` [ " No files to generate. @@ -8,7 +8,7 @@ exports[`dbAuth handler exits when all files are skipped 1`] = ` ] `; -exports[`dbAuth handler produces the correct files with custom password set via flag 1`] = ` +exports[`dbAuth > handler > produces the correct files with custom password set via flag 1`] = ` "import { useEffect, useRef } from 'react' import { Form, Label, TextField, Submit, FieldError } from '@redwoodjs/forms' @@ -102,7 +102,7 @@ export default ForgotPasswordPage " `; -exports[`dbAuth handler produces the correct files with custom password set via flag 2`] = ` +exports[`dbAuth > handler > produces the correct files with custom password set via flag 2`] = ` "import { useRef } from 'react' import { useEffect } from 'react' @@ -240,7 +240,7 @@ export default LoginPage " `; -exports[`dbAuth handler produces the correct files with custom password set via flag 3`] = ` +exports[`dbAuth > handler > produces the correct files with custom password set via flag 3`] = ` "import { useEffect, useRef, useState } from 'react' import { @@ -363,7 +363,7 @@ export default ResetPasswordPage " `; -exports[`dbAuth handler produces the correct files with custom password set via flag 4`] = ` +exports[`dbAuth > handler > produces the correct files with custom password set via flag 4`] = ` "import { useRef } from 'react' import { useEffect } from 'react' @@ -496,7 +496,7 @@ export default SignupPage " `; -exports[`dbAuth handler produces the correct files with custom password set via prompt 1`] = ` +exports[`dbAuth > handler > produces the correct files with custom password set via prompt 1`] = ` "import { useEffect, useRef } from 'react' import { Form, Label, TextField, Submit, FieldError } from '@redwoodjs/forms' @@ -590,7 +590,7 @@ export default ForgotPasswordPage " `; -exports[`dbAuth handler produces the correct files with custom password set via prompt 2`] = ` +exports[`dbAuth > handler > produces the correct files with custom password set via prompt 2`] = ` "import { useRef } from 'react' import { useEffect } from 'react' @@ -728,7 +728,7 @@ export default LoginPage " `; -exports[`dbAuth handler produces the correct files with custom password set via prompt 3`] = ` +exports[`dbAuth > handler > produces the correct files with custom password set via prompt 3`] = ` "import { useEffect, useRef, useState } from 'react' import { @@ -851,7 +851,7 @@ export default ResetPasswordPage " `; -exports[`dbAuth handler produces the correct files with custom password set via prompt 4`] = ` +exports[`dbAuth > handler > produces the correct files with custom password set via prompt 4`] = ` "import { useRef } from 'react' import { useEffect } from 'react' @@ -984,7 +984,7 @@ export default SignupPage " `; -exports[`dbAuth handler produces the correct files with custom username and password set via flag 1`] = ` +exports[`dbAuth > handler > produces the correct files with custom username and password set via flag 1`] = ` "import { useEffect, useRef } from 'react' import { Form, Label, TextField, Submit, FieldError } from '@redwoodjs/forms' @@ -1078,7 +1078,7 @@ export default ForgotPasswordPage " `; -exports[`dbAuth handler produces the correct files with custom username and password set via flag 2`] = ` +exports[`dbAuth > handler > produces the correct files with custom username and password set via flag 2`] = ` "import { useRef } from 'react' import { useEffect } from 'react' @@ -1216,7 +1216,7 @@ export default LoginPage " `; -exports[`dbAuth handler produces the correct files with custom username and password set via flag 3`] = ` +exports[`dbAuth > handler > produces the correct files with custom username and password set via flag 3`] = ` "import { useEffect, useRef, useState } from 'react' import { @@ -1339,7 +1339,7 @@ export default ResetPasswordPage " `; -exports[`dbAuth handler produces the correct files with custom username and password set via flag 4`] = ` +exports[`dbAuth > handler > produces the correct files with custom username and password set via flag 4`] = ` "import { useRef } from 'react' import { useEffect } from 'react' @@ -1472,7 +1472,7 @@ export default SignupPage " `; -exports[`dbAuth handler produces the correct files with custom username and password set via prompt 1`] = ` +exports[`dbAuth > handler > produces the correct files with custom username and password set via prompt 1`] = ` "import { useEffect, useRef } from 'react' import { Form, Label, TextField, Submit, FieldError } from '@redwoodjs/forms' @@ -1566,7 +1566,7 @@ export default ForgotPasswordPage " `; -exports[`dbAuth handler produces the correct files with custom username and password set via prompt 2`] = ` +exports[`dbAuth > handler > produces the correct files with custom username and password set via prompt 2`] = ` "import { useRef } from 'react' import { useEffect } from 'react' @@ -1704,7 +1704,7 @@ export default LoginPage " `; -exports[`dbAuth handler produces the correct files with custom username and password set via prompt 3`] = ` +exports[`dbAuth > handler > produces the correct files with custom username and password set via prompt 3`] = ` "import { useEffect, useRef, useState } from 'react' import { @@ -1827,7 +1827,7 @@ export default ResetPasswordPage " `; -exports[`dbAuth handler produces the correct files with custom username and password set via prompt 4`] = ` +exports[`dbAuth > handler > produces the correct files with custom username and password set via prompt 4`] = ` "import { useRef } from 'react' import { useEffect } from 'react' @@ -1960,7 +1960,7 @@ export default SignupPage " `; -exports[`dbAuth handler produces the correct files with custom username and password set via prompt and with webauthn enabled via flag 1`] = ` +exports[`dbAuth > handler > produces the correct files with custom username and password set via prompt and with webauthn enabled via flag 1`] = ` "import { useEffect, useRef } from 'react' import { Form, Label, TextField, Submit, FieldError } from '@redwoodjs/forms' @@ -2054,7 +2054,7 @@ export default ForgotPasswordPage " `; -exports[`dbAuth handler produces the correct files with custom username and password set via prompt and with webauthn enabled via flag 2`] = ` +exports[`dbAuth > handler > produces the correct files with custom username and password set via prompt and with webauthn enabled via flag 2`] = ` "import { useRef, useState } from 'react' import { useEffect } from 'react' @@ -2324,7 +2324,7 @@ export default LoginPage " `; -exports[`dbAuth handler produces the correct files with custom username and password set via prompt and with webauthn enabled via flag 3`] = ` +exports[`dbAuth > handler > produces the correct files with custom username and password set via prompt and with webauthn enabled via flag 3`] = ` "import { useEffect, useRef, useState } from 'react' import { @@ -2447,7 +2447,7 @@ export default ResetPasswordPage " `; -exports[`dbAuth handler produces the correct files with custom username and password set via prompt and with webauthn enabled via flag 4`] = ` +exports[`dbAuth > handler > produces the correct files with custom username and password set via prompt and with webauthn enabled via flag 4`] = ` "import { useRef } from 'react' import { useEffect } from 'react' @@ -2580,7 +2580,7 @@ export default SignupPage " `; -exports[`dbAuth handler produces the correct files with custom username and password set via prompt and with webauthn enabled via prompt 1`] = ` +exports[`dbAuth > handler > produces the correct files with custom username and password set via prompt and with webauthn enabled via prompt 1`] = ` "import { useEffect, useRef } from 'react' import { Form, Label, TextField, Submit, FieldError } from '@redwoodjs/forms' @@ -2674,7 +2674,7 @@ export default ForgotPasswordPage " `; -exports[`dbAuth handler produces the correct files with custom username and password set via prompt and with webauthn enabled via prompt 2`] = ` +exports[`dbAuth > handler > produces the correct files with custom username and password set via prompt and with webauthn enabled via prompt 2`] = ` "import { useRef } from 'react' import { useEffect } from 'react' @@ -2812,7 +2812,7 @@ export default LoginPage " `; -exports[`dbAuth handler produces the correct files with custom username and password set via prompt and with webauthn enabled via prompt 3`] = ` +exports[`dbAuth > handler > produces the correct files with custom username and password set via prompt and with webauthn enabled via prompt 3`] = ` "import { useEffect, useRef, useState } from 'react' import { @@ -2935,7 +2935,7 @@ export default ResetPasswordPage " `; -exports[`dbAuth handler produces the correct files with custom username and password set via prompt and with webauthn enabled via prompt 4`] = ` +exports[`dbAuth > handler > produces the correct files with custom username and password set via prompt and with webauthn enabled via prompt 4`] = ` "import { useRef } from 'react' import { useEffect } from 'react' @@ -3068,7 +3068,7 @@ export default SignupPage " `; -exports[`dbAuth handler produces the correct files with custom username set via flag 1`] = ` +exports[`dbAuth > handler > produces the correct files with custom username set via flag 1`] = ` "import { useEffect, useRef } from 'react' import { Form, Label, TextField, Submit, FieldError } from '@redwoodjs/forms' @@ -3166,7 +3166,7 @@ export default ForgotPasswordPage " `; -exports[`dbAuth handler produces the correct files with custom username set via flag 2`] = ` +exports[`dbAuth > handler > produces the correct files with custom username set via flag 2`] = ` "import { useRef } from 'react' import { useEffect } from 'react' @@ -3304,7 +3304,7 @@ export default LoginPage " `; -exports[`dbAuth handler produces the correct files with custom username set via flag 3`] = ` +exports[`dbAuth > handler > produces the correct files with custom username set via flag 3`] = ` "import { useEffect, useRef, useState } from 'react' import { @@ -3429,7 +3429,7 @@ export default ResetPasswordPage " `; -exports[`dbAuth handler produces the correct files with custom username set via flag 4`] = ` +exports[`dbAuth > handler > produces the correct files with custom username set via flag 4`] = ` "import { useRef } from 'react' import { useEffect } from 'react' @@ -3562,7 +3562,7 @@ export default SignupPage " `; -exports[`dbAuth handler produces the correct files with custom username set via prompt 1`] = ` +exports[`dbAuth > handler > produces the correct files with custom username set via prompt 1`] = ` "import { useEffect, useRef } from 'react' import { Form, Label, TextField, Submit, FieldError } from '@redwoodjs/forms' @@ -3660,7 +3660,7 @@ export default ForgotPasswordPage " `; -exports[`dbAuth handler produces the correct files with custom username set via prompt 2`] = ` +exports[`dbAuth > handler > produces the correct files with custom username set via prompt 2`] = ` "import { useRef } from 'react' import { useEffect } from 'react' @@ -3798,7 +3798,7 @@ export default LoginPage " `; -exports[`dbAuth handler produces the correct files with custom username set via prompt 3`] = ` +exports[`dbAuth > handler > produces the correct files with custom username set via prompt 3`] = ` "import { useEffect, useRef, useState } from 'react' import { @@ -3923,7 +3923,7 @@ export default ResetPasswordPage " `; -exports[`dbAuth handler produces the correct files with custom username set via prompt 4`] = ` +exports[`dbAuth > handler > produces the correct files with custom username set via prompt 4`] = ` "import { useRef } from 'react' import { useEffect } from 'react' @@ -4056,7 +4056,7 @@ export default SignupPage " `; -exports[`dbAuth handler produces the correct files with default labels 1`] = ` +exports[`dbAuth > handler > produces the correct files with default labels 1`] = ` "import { useEffect, useRef } from 'react' import { Form, Label, TextField, Submit, FieldError } from '@redwoodjs/forms' @@ -4154,7 +4154,7 @@ export default ForgotPasswordPage " `; -exports[`dbAuth handler produces the correct files with default labels 2`] = ` +exports[`dbAuth > handler > produces the correct files with default labels 2`] = ` "import { useRef } from 'react' import { useEffect } from 'react' @@ -4292,7 +4292,7 @@ export default LoginPage " `; -exports[`dbAuth handler produces the correct files with default labels 3`] = ` +exports[`dbAuth > handler > produces the correct files with default labels 3`] = ` "import { useEffect, useRef, useState } from 'react' import { @@ -4417,7 +4417,7 @@ export default ResetPasswordPage " `; -exports[`dbAuth handler produces the correct files with default labels 4`] = ` +exports[`dbAuth > handler > produces the correct files with default labels 4`] = ` "import { useRef } from 'react' import { useEffect } from 'react' diff --git a/packages/cli/src/commands/generate/dbAuth/__tests__/dbAuth.test.js b/packages/cli/src/commands/generate/dbAuth/__tests__/dbAuth.test.js index 5031ad25c8a7..0050ee66add4 100644 --- a/packages/cli/src/commands/generate/dbAuth/__tests__/dbAuth.test.js +++ b/packages/cli/src/commands/generate/dbAuth/__tests__/dbAuth.test.js @@ -1,15 +1,17 @@ global.__dirname = __dirname -jest.mock('fs') +vi.mock('fs-extra') import path from 'path' // Load mocks import '../../../../lib/test' -const realfs = jest.requireActual('fs') +const realfs = await vi.importActual('fs-extra') import Enquirer from 'enquirer' import fs from 'fs-extra' +import { vol } from 'memfs' +import { vi, describe, it, expect, beforeEach } from 'vitest' import { getPaths } from '../../../../lib' import * as dbAuth from '../dbAuth' @@ -61,7 +63,8 @@ mockFiles[getPaths().web.app] = realfs describe('dbAuth', () => { beforeEach(() => { - fs.__setMockFiles(mockFiles) + vol.reset() + vol.fromJSON(mockFiles) }) it('creates a login page', () => { @@ -86,8 +89,8 @@ describe('dbAuth', () => { describe('handler', () => { it('exits when all files are skipped', async () => { - const mockExit = jest.spyOn(process, 'exit').mockImplementation() - const mockConsoleInfo = jest.spyOn(console, 'info').mockImplementation() + const mockExit = vi.spyOn(process, 'exit').mockImplementation() + const mockConsoleInfo = vi.spyOn(console, 'info').mockImplementation() await dbAuth.handler({ listr2: { silentRendererCondition: true }, diff --git a/packages/cli/src/commands/generate/directive/__tests__/__snapshots__/directive.test.ts.snap b/packages/cli/src/commands/generate/directive/__tests__/__snapshots__/directive.test.ts.snap index c320bb9432bd..d8cf3b8f3821 100644 --- a/packages/cli/src/commands/generate/directive/__tests__/__snapshots__/directive.test.ts.snap +++ b/packages/cli/src/commands/generate/directive/__tests__/__snapshots__/directive.test.ts.snap @@ -1,6 +1,6 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html -exports[`creates a JavaScript validator directive: js directive 1`] = ` +exports[`creates a JavaScript validator directive > js directive 1`] = ` "import { createValidatorDirective } from '@redwoodjs/graphql-server' import { logger } from 'src/lib/logger' @@ -42,7 +42,7 @@ export default requireAdmin " `; -exports[`creates a JavaScript validator directive: js directive test 1`] = ` +exports[`creates a JavaScript validator directive > js directive test 1`] = ` "import { mockRedwoodDirective, getDirectiveName } from '@redwoodjs/testing/api' import requireAdmin from './requireAdmin' @@ -64,7 +64,7 @@ describe('requireAdmin directive', () => { " `; -exports[`creates a TypeScript transformer directive: ts directive 1`] = ` +exports[`creates a TypeScript transformer directive > ts directive 1`] = ` "import { createTransformerDirective, TransformerDirectiveFunc, @@ -109,7 +109,7 @@ export default bazingaFooBar " `; -exports[`creates a TypeScript transformer directive: ts directive test 1`] = ` +exports[`creates a TypeScript transformer directive > ts directive test 1`] = ` "import { mockRedwoodDirective, getDirectiveName } from '@redwoodjs/testing/api' import bazingaFooBar from './bazingaFooBar' diff --git a/packages/cli/src/commands/generate/directive/__tests__/directive.test.ts b/packages/cli/src/commands/generate/directive/__tests__/directive.test.ts index b8faabb31f97..dabf7a243b12 100644 --- a/packages/cli/src/commands/generate/directive/__tests__/directive.test.ts +++ b/packages/cli/src/commands/generate/directive/__tests__/directive.test.ts @@ -4,7 +4,8 @@ import '../../../../lib/test' import path from 'path' -import yargs from 'yargs' +import { test, expect } from 'vitest' +import yargs from 'yargs/yargs' import * as directive from '../directive' @@ -51,7 +52,7 @@ test('creates a TypeScript transformer directive', () => { }) test('keeps Directive in name', () => { - const { name } = yargs + const { name } = yargs() .command('directive ', false, directive.builder) .parse('directive BazingaDirective') diff --git a/packages/cli/src/commands/generate/function/__tests__/__snapshots__/function.test.ts.snap b/packages/cli/src/commands/generate/function/__tests__/__snapshots__/function.test.ts.snap index 76f30b5566e3..347b5cf901bd 100644 --- a/packages/cli/src/commands/generate/function/__tests__/__snapshots__/function.test.ts.snap +++ b/packages/cli/src/commands/generate/function/__tests__/__snapshots__/function.test.ts.snap @@ -1,41 +1,6 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html -exports[`Single word default files creates a single word function file 1`] = ` -"import { logger } from 'src/lib/logger' - -/** - * The handler function is your code that processes http request events. - * You can use return and throw to send a response or error, respectively. - * - * Important: When deployed, a custom serverless function is an open API endpoint and - * is your responsibility to secure appropriately. - * - * @see {@link https://redwoodjs.com/docs/serverless-functions#security-considerations|Serverless Function Considerations} - * in the RedwoodJS documentation for more information. - * - * @typedef { import('aws-lambda').APIGatewayEvent } APIGatewayEvent - * @typedef { import('aws-lambda').Context } Context - * @param { APIGatewayEvent } event - an object which contains information from the invoker. - * @param { Context } context - contains information about the invocation, - * function, and execution environment. - */ -export const handler = async (event, _context) => { - logger.info(\`\${event.httpMethod} \${event.path}: foo function\`) - - return { - statusCode: 200, - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - data: 'foo function', - }), - } -} -" -`; - -exports[`Single word default files creates a single word function file: Scenario snapshot 1`] = ` +exports[`Single word default files > creates a single word function file > Scenario snapshot 1`] = ` "export const standard = defineScenario({ // Define the "fixture" to write into your test database here // See guide: https://redwoodjs.com/docs/testing#scenarios @@ -43,7 +8,7 @@ exports[`Single word default files creates a single word function file: Scenario " `; -exports[`Single word default files creates a single word function file: Test snapshot 1`] = ` +exports[`Single word default files > creates a single word function file > Test snapshot 1`] = ` "import { mockHttpEvent } from '@redwoodjs/testing/api' import { handler } from './foo' @@ -76,6 +41,41 @@ describe('foo function', () => { " `; +exports[`Single word default files > creates a single word function file 1`] = ` +"import { logger } from 'src/lib/logger' + +/** + * The handler function is your code that processes http request events. + * You can use return and throw to send a response or error, respectively. + * + * Important: When deployed, a custom serverless function is an open API endpoint and + * is your responsibility to secure appropriately. + * + * @see {@link https://redwoodjs.com/docs/serverless-functions#security-considerations|Serverless Function Considerations} + * in the RedwoodJS documentation for more information. + * + * @typedef { import('aws-lambda').APIGatewayEvent } APIGatewayEvent + * @typedef { import('aws-lambda').Context } Context + * @param { APIGatewayEvent } event - an object which contains information from the invoker. + * @param { Context } context - contains information about the invocation, + * function, and execution environment. + */ +export const handler = async (event, _context) => { + logger.info(\`\${event.httpMethod} \${event.path}: foo function\`) + + return { + statusCode: 200, + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + data: 'foo function', + }), + } +} +" +`; + exports[`creates a .js file if --javascript=true 1`] = ` "import { logger } from 'src/lib/logger' diff --git a/packages/cli/src/commands/generate/function/__tests__/function.test.ts b/packages/cli/src/commands/generate/function/__tests__/function.test.ts index 29abd3aa9dab..03ac2ddcd248 100644 --- a/packages/cli/src/commands/generate/function/__tests__/function.test.ts +++ b/packages/cli/src/commands/generate/function/__tests__/function.test.ts @@ -4,7 +4,8 @@ import '../../../../lib/test' import path from 'path' -import yargs from 'yargs' +import { describe, it, expect, test } from 'vitest' +import yargs from 'yargs/yargs' import * as functionGenerator from '../function' @@ -55,7 +56,7 @@ describe('Single word default files', () => { test('Keeps Function in name', () => { // @ts-expect-error Not sure how to pass generic to yargs here - const { name } = yargs + const { name } = yargs() .command('function ', false, functionGenerator.builder) .parse('function BazingaFunction') diff --git a/packages/cli/src/commands/generate/layout/__tests__/__snapshots__/layout.test.ts.snap b/packages/cli/src/commands/generate/layout/__tests__/__snapshots__/layout.test.ts.snap index f992b50aeda9..2fe095bbdd3d 100644 --- a/packages/cli/src/commands/generate/layout/__tests__/__snapshots__/layout.test.ts.snap +++ b/packages/cli/src/commands/generate/layout/__tests__/__snapshots__/layout.test.ts.snap @@ -1,4 +1,4 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html exports[`JavaScript: includes skip link when --skipLink is set to true 1`] = ` "import { SkipNavLink, SkipNavContent } from '@redwoodjs/router' @@ -27,7 +27,7 @@ export default A11yLayout " `; -exports[`Multi word default files creates a multi word layout component 1`] = ` +exports[`Multi word default files > creates a multi word layout component 1`] = ` "const SinglePageLayout = ({ children }) => { return <>{children} } @@ -36,7 +36,7 @@ export default SinglePageLayout " `; -exports[`Multi word default files creates a multi word layout test 1`] = ` +exports[`Multi word default files > creates a multi word layout test 1`] = ` "import { render } from '@redwoodjs/testing/web' import SinglePageLayout from './SinglePageLayout' @@ -54,7 +54,7 @@ describe('SinglePageLayout', () => { " `; -exports[`Multi word default files creates a multi word layout test 2`] = ` +exports[`Multi word default files > creates a multi word layout test 2`] = ` "import SinglePageLayout from './SinglePageLayout' const meta = { @@ -67,7 +67,7 @@ export const Primary = {} " `; -exports[`Single Word default files creates a single word layout component 1`] = ` +exports[`Single Word default files > creates a single word layout component 1`] = ` "const AppLayout = ({ children }) => { return <>{children} } @@ -76,7 +76,7 @@ export default AppLayout " `; -exports[`Single Word default files creates a single word layout stories 1`] = ` +exports[`Single Word default files > creates a single word layout stories 1`] = ` "import AppLayout from './AppLayout' const meta = { @@ -89,7 +89,7 @@ export const Primary = {} " `; -exports[`Single Word default files creates a single word layout test 1`] = ` +exports[`Single Word default files > creates a single word layout test 1`] = ` "import { render } from '@redwoodjs/testing/web' import AppLayout from './AppLayout' diff --git a/packages/cli/src/commands/generate/layout/__tests__/layout.test.ts b/packages/cli/src/commands/generate/layout/__tests__/layout.test.ts index 82decf5e227f..24cbbc4232c0 100644 --- a/packages/cli/src/commands/generate/layout/__tests__/layout.test.ts +++ b/packages/cli/src/commands/generate/layout/__tests__/layout.test.ts @@ -1,6 +1,8 @@ globalThis.__dirname = __dirname import path from 'path' +import { describe, test, it, expect } from 'vitest' + // Load shared mocks import '../../../../lib/test' diff --git a/packages/cli/src/commands/generate/page/__tests__/__snapshots__/page.test.js.snap b/packages/cli/src/commands/generate/page/__tests__/__snapshots__/page.test.js.snap index 7557712c9a30..29751dcdd8e2 100644 --- a/packages/cli/src/commands/generate/page/__tests__/__snapshots__/page.test.js.snap +++ b/packages/cli/src/commands/generate/page/__tests__/__snapshots__/page.test.js.snap @@ -1,6 +1,6 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html -exports[`Plural word files creates a page component with a plural word for name 1`] = ` +exports[`Plural word files > creates a page component with a plural word for name 1`] = ` "import { Link, routes } from '@redwoodjs/router' import { Metadata } from '@redwoodjs/web' @@ -25,7 +25,7 @@ export default CatsPage " `; -exports[`Single world files creates a page component 1`] = ` +exports[`Single world files > creates a page component 1`] = ` "import { Link, routes } from '@redwoodjs/router' import { Metadata } from '@redwoodjs/web' @@ -50,7 +50,7 @@ export default HomePage " `; -exports[`Single world files creates a page story 1`] = ` +exports[`Single world files > creates a page story 1`] = ` "import HomePage from './HomePage' const meta = { @@ -63,7 +63,7 @@ export const Primary = {} " `; -exports[`Single world files creates a page test 1`] = ` +exports[`Single world files > creates a page test 1`] = ` "import { render } from '@redwoodjs/testing/web' import HomePage from './HomePage' @@ -81,7 +81,7 @@ describe('HomePage', () => { " `; -exports[`TS Files TS Params 1`] = ` +exports[`TS Files > TS Params 1`] = ` "import { Link, routes } from '@redwoodjs/router' import { Metadata } from '@redwoodjs/web' @@ -112,7 +112,7 @@ export default TsParamFilesPage " `; -exports[`TS Files TS Params with type 1`] = ` +exports[`TS Files > TS Params with type 1`] = ` "import { Link, routes } from '@redwoodjs/router' import { Metadata } from '@redwoodjs/web' @@ -149,7 +149,7 @@ export default TsParamTypeFilesPage " `; -exports[`TS Files generates typescript pages 1`] = ` +exports[`TS Files > generates typescript pages 1`] = ` "import { Link, routes } from '@redwoodjs/router' import { Metadata } from '@redwoodjs/web' @@ -174,7 +174,7 @@ export default TsFilesPage " `; -exports[`TS Files generates typescript pages 2`] = ` +exports[`TS Files > generates typescript pages 2`] = ` "import type { Meta, StoryObj } from '@storybook/react' import TsFilesPage from './TsFilesPage' @@ -191,7 +191,7 @@ export const Primary: Story = {} " `; -exports[`TS Files generates typescript pages 3`] = ` +exports[`TS Files > generates typescript pages 3`] = ` "import { render } from '@redwoodjs/testing/web' import TsFilesPage from './TsFilesPage' @@ -209,7 +209,7 @@ describe('TsFilesPage', () => { " `; -exports[`handler file generation 1`] = ` +exports[`handler > file generation 1`] = ` { "fileContent": "import HomePage from './HomePage' @@ -225,7 +225,7 @@ export const Primary = {} } `; -exports[`handler file generation 2`] = ` +exports[`handler > file generation 2`] = ` { "fileContent": "import { render } from '@redwoodjs/testing/web' @@ -246,7 +246,7 @@ describe('HomePage', () => { } `; -exports[`handler file generation 3`] = ` +exports[`handler > file generation 3`] = ` { "fileContent": "import { Link, routes } from '@redwoodjs/router' import { Metadata } from '@redwoodjs/web' @@ -274,7 +274,7 @@ export default HomePage } `; -exports[`handler file generation 4`] = ` +exports[`handler > file generation 4`] = ` { "fileContent": "import { Router, Route } from '@redwoodjs/router' @@ -293,7 +293,7 @@ export default Routes", } `; -exports[`handler file generation with route params 1`] = ` +exports[`handler > file generation with route params 1`] = ` { "fileContent": "import PostPage from './PostPage' @@ -309,7 +309,7 @@ export const Primary = {} } `; -exports[`handler file generation with route params 2`] = ` +exports[`handler > file generation with route params 2`] = ` { "fileContent": "import { render } from '@redwoodjs/testing/web' @@ -330,7 +330,7 @@ describe('PostPage', () => { } `; -exports[`handler file generation with route params 3`] = ` +exports[`handler > file generation with route params 3`] = ` { "fileContent": "import { Link, routes } from '@redwoodjs/router' import { Metadata } from '@redwoodjs/web' @@ -359,7 +359,7 @@ export default PostPage } `; -exports[`handler file generation with route params 4`] = ` +exports[`handler > file generation with route params 4`] = ` { "fileContent": "import { Router, Route } from '@redwoodjs/router' @@ -378,7 +378,7 @@ export default Routes", } `; -exports[`multiWorldFiles creates a page component 1`] = ` +exports[`multiWorldFiles > creates a page component 1`] = ` "import { Link, routes } from '@redwoodjs/router' import { Metadata } from '@redwoodjs/web' @@ -403,7 +403,7 @@ export default ContactUsPage " `; -exports[`multiWorldFiles creates a page story 1`] = ` +exports[`multiWorldFiles > creates a page story 1`] = ` "import ContactUsPage from './ContactUsPage' const meta = { @@ -416,7 +416,7 @@ export const Primary = {} " `; -exports[`multiWorldFiles creates a test for a component with multiple words for a name 1`] = ` +exports[`multiWorldFiles > creates a test for a component with multiple words for a name 1`] = ` "import { render } from '@redwoodjs/testing/web' import ContactUsPage from './ContactUsPage' @@ -434,7 +434,7 @@ describe('ContactUsPage', () => { " `; -exports[`paramFiles creates a page component with params 1`] = ` +exports[`paramFiles > creates a page component with params 1`] = ` "import { Link, routes } from '@redwoodjs/router' import { Metadata } from '@redwoodjs/web' @@ -460,7 +460,7 @@ export default PostPage " `; -exports[`paramFiles creates a test for page component with params 1`] = ` +exports[`paramFiles > creates a test for page component with params 1`] = ` "import { render } from '@redwoodjs/testing/web' import PostPage from './PostPage' diff --git a/packages/cli/src/commands/generate/page/__tests__/page.test.js b/packages/cli/src/commands/generate/page/__tests__/page.test.js index 7836abe2f8dc..cc2d62eda128 100644 --- a/packages/cli/src/commands/generate/page/__tests__/page.test.js +++ b/packages/cli/src/commands/generate/page/__tests__/page.test.js @@ -3,35 +3,37 @@ globalThis.__dirname = __dirname globalThis.mockFs = false let mockFiles = {} -jest.mock('fs', () => { - const actual = jest.requireActual('fs') +vi.mock('fs-extra', async (importOriginal) => { + const originalFsExtra = await importOriginal() return { - ...actual, - existsSync: (...args) => { - if (!globalThis.mockFs) { - return actual.existsSync.apply(null, args) - } - return false - }, - mkdirSync: (...args) => { - if (!globalThis.mockFs) { - return actual.mkdirSync.apply(null, args) - } - }, - writeFileSync: (target, contents) => { - if (!globalThis.mockFs) { - return actual.writeFileSync.call(null, target, contents) - } - }, - readFileSync: (path) => { - if (!globalThis.mockFs) { - return actual.readFileSync.call(null, path) - } - - const mockedContent = mockFiles[path] - - return mockedContent || actual.readFileSync.call(null, path) + default: { + ...originalFsExtra, + existsSync: (...args) => { + if (!globalThis.mockFs) { + return originalFsExtra.existsSync.apply(null, args) + } + return false + }, + mkdirSync: (...args) => { + if (!globalThis.mockFs) { + return originalFsExtra.mkdirSync.apply(null, args) + } + }, + writeFileSync: (target, contents) => { + if (!globalThis.mockFs) { + return originalFsExtra.writeFileSync.call(null, target, contents) + } + }, + readFileSync: (path) => { + if (!globalThis.mockFs) { + return originalFsExtra.readFileSync.call(null, path) + } + + const mockedContent = mockFiles[path] + + return mockedContent || originalFsExtra.readFileSync.call(null, path) + }, }, } }) @@ -39,6 +41,7 @@ jest.mock('fs', () => { import path from 'path' import fs from 'fs-extra' +import { vi, describe, it, test, expect, beforeEach, afterEach } from 'vitest' // Load mocks import '../../../../lib/test' @@ -339,8 +342,8 @@ test('paramVariants paramType defaults to string', () => { describe('handler', () => { beforeEach(() => { - jest.spyOn(console, 'info').mockImplementation(() => {}) - jest.spyOn(console, 'log').mockImplementation(() => {}) + vi.spyOn(console, 'info').mockImplementation(() => {}) + vi.spyOn(console, 'log').mockImplementation(() => {}) }) afterEach(() => { @@ -366,7 +369,7 @@ describe('handler', () => { ].join('\n'), } - const spy = jest.spyOn(fs, 'writeFileSync') + const spy = vi.spyOn(fs, 'writeFileSync') globalThis.mockFs = true @@ -411,7 +414,7 @@ describe('handler', () => { ].join('\n'), } - const spy = jest.spyOn(fs, 'writeFileSync') + const spy = vi.spyOn(fs, 'writeFileSync') globalThis.mockFs = true await page.handler({ diff --git a/packages/cli/src/commands/generate/scaffold/__tests__/__snapshots__/scaffold.test.js.snap b/packages/cli/src/commands/generate/scaffold/__tests__/__snapshots__/scaffold.test.js.snap index 4293c83e2c5d..3ccc477e41b3 100644 --- a/packages/cli/src/commands/generate/scaffold/__tests__/__snapshots__/scaffold.test.js.snap +++ b/packages/cli/src/commands/generate/scaffold/__tests__/__snapshots__/scaffold.test.js.snap @@ -1,6 +1,6 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html -exports[`in javascript (default) mode creates a edit page 1`] = ` +exports[`in javascript (default) mode > creates a edit page 1`] = ` "import EditPostCell from 'src/components/Post/EditPostCell' const EditPostPage = ({ id }) => { @@ -11,7 +11,7 @@ export default EditPostPage " `; -exports[`in javascript (default) mode creates a form component 1`] = ` +exports[`in javascript (default) mode > creates a form component 1`] = ` "import { Form, FormError, @@ -273,7 +273,7 @@ export default PostForm " `; -exports[`in javascript (default) mode creates a formatters function file 1`] = ` +exports[`in javascript (default) mode > creates a formatters function file 1`] = ` "import React from 'react' import humanize from 'humanize-string' @@ -335,7 +335,7 @@ export const checkboxInputTag = (checked) => { " `; -exports[`in javascript (default) mode creates a formatters function test file 1`] = ` +exports[`in javascript (default) mode > creates a formatters function test file 1`] = ` "import { render, waitFor, screen } from '@redwoodjs/testing/web' import { @@ -531,7 +531,7 @@ describe('checkboxInputTag', () => { " `; -exports[`in javascript (default) mode creates a index page 1`] = ` +exports[`in javascript (default) mode > creates a index page 1`] = ` "import PostsCell from 'src/components/Post/PostsCell' const PostsPage = () => { @@ -542,7 +542,7 @@ export default PostsPage " `; -exports[`in javascript (default) mode creates a layout 1`] = ` +exports[`in javascript (default) mode > creates a layout 1`] = ` "import { Link, routes } from '@redwoodjs/router' import { Toaster } from '@redwoodjs/web/toast' @@ -575,7 +575,7 @@ export default ScaffoldLayout " `; -exports[`in javascript (default) mode creates a new component 1`] = ` +exports[`in javascript (default) mode > creates a new component 1`] = ` "import { navigate, routes } from '@redwoodjs/router' import { useMutation } from '@redwoodjs/web' @@ -622,7 +622,7 @@ export default NewPost " `; -exports[`in javascript (default) mode creates a new component with int foreign keys converted in onSave 1`] = ` +exports[`in javascript (default) mode > creates a new component with int foreign keys converted in onSave 1`] = ` "import { navigate, routes } from '@redwoodjs/router' import { useMutation } from '@redwoodjs/web' @@ -672,7 +672,7 @@ export default NewUserProfile " `; -exports[`in javascript (default) mode creates a new page 1`] = ` +exports[`in javascript (default) mode > creates a new page 1`] = ` "import NewPost from 'src/components/Post/NewPost' const NewPostPage = () => { @@ -683,7 +683,7 @@ export default NewPostPage " `; -exports[`in javascript (default) mode creates a show cell 1`] = ` +exports[`in javascript (default) mode > creates a show cell 1`] = ` "import Post from 'src/components/Post/Post' export const QUERY = gql\` @@ -720,7 +720,7 @@ export const Success = ({ post }) => { " `; -exports[`in javascript (default) mode creates a show component 1`] = ` +exports[`in javascript (default) mode > creates a show component 1`] = ` "import { Link, routes, navigate } from '@redwoodjs/router' import { useMutation } from '@redwoodjs/web' @@ -841,7 +841,7 @@ export default Post " `; -exports[`in javascript (default) mode creates a show page 1`] = ` +exports[`in javascript (default) mode > creates a show page 1`] = ` "import PostCell from 'src/components/Post/PostCell' const PostPage = ({ id }) => { @@ -852,7 +852,7 @@ export default PostPage " `; -exports[`in javascript (default) mode creates a stylesheet 1`] = ` +exports[`in javascript (default) mode > creates a stylesheet 1`] = ` "/* normalize.css v8.0.1 | MIT License | github.com/necolas/normalize.css */ @@ -1253,9 +1253,9 @@ exports[`in javascript (default) mode creates a stylesheet 1`] = ` " `; -exports[`in javascript (default) mode creates an edit cell 1`] = `undefined`; +exports[`in javascript (default) mode > creates an edit cell 1`] = `undefined`; -exports[`in javascript (default) mode creates an edit component with int foreign keys converted in onSave 1`] = ` +exports[`in javascript (default) mode > creates an edit component with int foreign keys converted in onSave 1`] = ` "import { navigate, routes } from '@redwoodjs/router' import { useMutation } from '@redwoodjs/web' @@ -1331,7 +1331,7 @@ export const Success = ({ userProfile }) => { " `; -exports[`in javascript (default) mode creates an index cell 1`] = ` +exports[`in javascript (default) mode > creates an index cell 1`] = ` "import { Link, routes } from '@redwoodjs/router' import Posts from 'src/components/Post/Posts' @@ -1379,7 +1379,7 @@ export const Success = ({ posts }) => { " `; -exports[`in javascript (default) mode creates an index component 1`] = ` +exports[`in javascript (default) mode > creates an index component 1`] = ` "import { Link, routes } from '@redwoodjs/router' import { useMutation } from '@redwoodjs/web' @@ -1497,7 +1497,7 @@ export default PostsList " `; -exports[`in typescript mode creates a edit page 1`] = ` +exports[`in typescript mode > creates a edit page 1`] = ` "import EditPostCell from 'src/components/Post/EditPostCell' type PostPageProps = { @@ -1512,7 +1512,7 @@ export default EditPostPage " `; -exports[`in typescript mode creates a form component 1`] = ` +exports[`in typescript mode > creates a form component 1`] = ` "import type { EditPostById, UpdatePostInput } from 'types/graphql' import type { RWGqlError } from '@redwoodjs/forms' @@ -1786,7 +1786,7 @@ export default PostForm " `; -exports[`in typescript mode creates a formatters function file 1`] = ` +exports[`in typescript mode > creates a formatters function file 1`] = ` "import React from 'react' import humanize from 'humanize-string' @@ -1848,7 +1848,7 @@ export const checkboxInputTag = (checked: boolean) => { " `; -exports[`in typescript mode creates a formatters function test file 1`] = ` +exports[`in typescript mode > creates a formatters function test file 1`] = ` "import { render, waitFor, screen } from '@redwoodjs/testing/web' import { @@ -2044,7 +2044,7 @@ describe('checkboxInputTag', () => { " `; -exports[`in typescript mode creates a index page 1`] = ` +exports[`in typescript mode > creates a index page 1`] = ` "import PostsCell from 'src/components/Post/PostsCell' const PostsPage = () => { @@ -2055,7 +2055,7 @@ export default PostsPage " `; -exports[`in typescript mode creates a layout 1`] = ` +exports[`in typescript mode > creates a layout 1`] = ` "import { Link, routes } from '@redwoodjs/router' import { Toaster } from '@redwoodjs/web/toast' @@ -2096,7 +2096,7 @@ export default ScaffoldLayout " `; -exports[`in typescript mode creates a new component 1`] = ` +exports[`in typescript mode > creates a new component 1`] = ` "import type { CreatePostMutation, CreatePostInput, @@ -2152,7 +2152,7 @@ export default NewPost " `; -exports[`in typescript mode creates a new component with int foreign keys converted in onSave 1`] = ` +exports[`in typescript mode > creates a new component with int foreign keys converted in onSave 1`] = ` "import type { CreateUserProfileMutation, CreateUserProfileInput, @@ -2211,7 +2211,7 @@ export default NewUserProfile " `; -exports[`in typescript mode creates a new page 1`] = ` +exports[`in typescript mode > creates a new page 1`] = ` "import NewPost from 'src/components/Post/NewPost' const NewPostPage = () => { @@ -2222,7 +2222,7 @@ export default NewPostPage " `; -exports[`in typescript mode creates a show cell 1`] = ` +exports[`in typescript mode > creates a show cell 1`] = ` "import type { FindPostById, FindPostByIdVariables } from 'types/graphql' import type { @@ -2272,7 +2272,7 @@ export const Success = ({ " `; -exports[`in typescript mode creates a show component 1`] = ` +exports[`in typescript mode > creates a show component 1`] = ` "import type { DeletePostMutation, DeletePostMutationVariables, @@ -2406,7 +2406,7 @@ export default Post " `; -exports[`in typescript mode creates a show page 1`] = ` +exports[`in typescript mode > creates a show page 1`] = ` "import PostCell from 'src/components/Post/PostCell' type PostPageProps = { @@ -2421,7 +2421,7 @@ export default PostPage " `; -exports[`in typescript mode creates a stylesheet 1`] = ` +exports[`in typescript mode > creates a stylesheet 1`] = ` "/* normalize.css v8.0.1 | MIT License | github.com/necolas/normalize.css */ @@ -2822,7 +2822,7 @@ exports[`in typescript mode creates a stylesheet 1`] = ` " `; -exports[`in typescript mode creates an edit cell 1`] = ` +exports[`in typescript mode > creates an edit cell 1`] = ` "import type { EditPostById, UpdatePostInput, @@ -2920,7 +2920,7 @@ export const Success = ({ post }: CellSuccessProps) => { " `; -exports[`in typescript mode creates an edit component with int foreign keys converted in onSave 1`] = ` +exports[`in typescript mode > creates an edit component with int foreign keys converted in onSave 1`] = ` "import type { EditUserProfileById, UpdateUserProfileInput, @@ -3014,7 +3014,7 @@ export const Success = ({ " `; -exports[`in typescript mode creates an index cell 1`] = ` +exports[`in typescript mode > creates an index cell 1`] = ` "import type { FindPosts, FindPostsVariables } from 'types/graphql' import { Link, routes } from '@redwoodjs/router' @@ -3071,7 +3071,7 @@ export const Success = ({ " `; -exports[`in typescript mode creates an index component 1`] = ` +exports[`in typescript mode > creates an index component 1`] = ` "import type { DeletePostMutation, DeletePostMutationVariables, @@ -3198,7 +3198,7 @@ export default PostsList " `; -exports[`tailwind flag set to \`false\` generates a scaffold.css with raw CSS 1`] = ` +exports[`tailwind flag > set to \`false\` generates a scaffold.css with raw CSS 1`] = ` "/* normalize.css v8.0.1 | MIT License | github.com/necolas/normalize.css */ @@ -3599,7 +3599,7 @@ exports[`tailwind flag set to \`false\` generates a scaffold.css with raw CSS 1` " `; -exports[`tailwind flag set to \`true\` generates a scaffold.css with Tailwind components 1`] = ` +exports[`tailwind flag > set to \`true\` generates a scaffold.css with Tailwind components 1`] = ` ".rw-scaffold { @apply bg-white text-gray-600; } diff --git a/packages/cli/src/commands/generate/scaffold/__tests__/__snapshots__/scaffoldNoNest.test.js.snap b/packages/cli/src/commands/generate/scaffold/__tests__/__snapshots__/scaffoldNoNest.test.js.snap index 818e8bc936b4..ba8b0b735e86 100644 --- a/packages/cli/src/commands/generate/scaffold/__tests__/__snapshots__/scaffoldNoNest.test.js.snap +++ b/packages/cli/src/commands/generate/scaffold/__tests__/__snapshots__/scaffoldNoNest.test.js.snap @@ -1,6 +1,6 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html -exports[`in javascript (default) mode creates a edit page 1`] = ` +exports[`in javascript (default) mode > creates a edit page 1`] = ` "import EditPostCell from 'src/components/EditPostCell' const EditPostPage = ({ id }) => { @@ -11,7 +11,7 @@ export default EditPostPage " `; -exports[`in javascript (default) mode creates a form component 1`] = ` +exports[`in javascript (default) mode > creates a form component 1`] = ` "import { Form, FormError, @@ -273,7 +273,7 @@ export default PostForm " `; -exports[`in javascript (default) mode creates a index page 1`] = ` +exports[`in javascript (default) mode > creates a index page 1`] = ` "import PostsCell from 'src/components/PostsCell' const PostsPage = () => { @@ -284,7 +284,7 @@ export default PostsPage " `; -exports[`in javascript (default) mode creates a layout 1`] = ` +exports[`in javascript (default) mode > creates a layout 1`] = ` "import { Link, routes } from '@redwoodjs/router' import { Toaster } from '@redwoodjs/web/toast' @@ -317,9 +317,9 @@ export default ScaffoldLayout " `; -exports[`in javascript (default) mode creates a new component 1`] = `undefined`; +exports[`in javascript (default) mode > creates a new component 1`] = `undefined`; -exports[`in javascript (default) mode creates a new component with int foreign keys converted in onSave 1`] = ` +exports[`in javascript (default) mode > creates a new component with int foreign keys converted in onSave 1`] = ` "import { navigate, routes } from '@redwoodjs/router' import { useMutation } from '@redwoodjs/web' @@ -369,7 +369,7 @@ export default NewUserProfile " `; -exports[`in javascript (default) mode creates a new page 1`] = ` +exports[`in javascript (default) mode > creates a new page 1`] = ` "import NewPost from 'src/components/NewPost' const NewPostPage = () => { @@ -380,7 +380,7 @@ export default NewPostPage " `; -exports[`in javascript (default) mode creates a show cell 1`] = ` +exports[`in javascript (default) mode > creates a show cell 1`] = ` "import Post from 'src/components/Post' export const QUERY = gql\` @@ -417,7 +417,7 @@ export const Success = ({ post }) => { " `; -exports[`in javascript (default) mode creates a show component 1`] = ` +exports[`in javascript (default) mode > creates a show component 1`] = ` "import { Link, routes, navigate } from '@redwoodjs/router' import { useMutation } from '@redwoodjs/web' @@ -538,7 +538,7 @@ export default Post " `; -exports[`in javascript (default) mode creates a show page 1`] = ` +exports[`in javascript (default) mode > creates a show page 1`] = ` "import PostCell from 'src/components/PostCell' const PostPage = ({ id }) => { @@ -549,7 +549,7 @@ export default PostPage " `; -exports[`in javascript (default) mode creates a stylesheet 1`] = ` +exports[`in javascript (default) mode > creates a stylesheet 1`] = ` "/* normalize.css v8.0.1 | MIT License | github.com/necolas/normalize.css */ @@ -950,9 +950,9 @@ exports[`in javascript (default) mode creates a stylesheet 1`] = ` " `; -exports[`in javascript (default) mode creates an edit cell 1`] = `undefined`; +exports[`in javascript (default) mode > creates an edit cell 1`] = `undefined`; -exports[`in javascript (default) mode creates an edit component with int foreign keys converted in onSave 1`] = ` +exports[`in javascript (default) mode > creates an edit component with int foreign keys converted in onSave 1`] = ` "import { navigate, routes } from '@redwoodjs/router' import { useMutation } from '@redwoodjs/web' @@ -1028,7 +1028,7 @@ export const Success = ({ userProfile }) => { " `; -exports[`in javascript (default) mode creates an index cell 1`] = ` +exports[`in javascript (default) mode > creates an index cell 1`] = ` "import { Link, routes } from '@redwoodjs/router' import Posts from 'src/components/Posts' @@ -1076,7 +1076,7 @@ export const Success = ({ posts }) => { " `; -exports[`in javascript (default) mode creates an index component 1`] = ` +exports[`in javascript (default) mode > creates an index component 1`] = ` "import { Link, routes } from '@redwoodjs/router' import { useMutation } from '@redwoodjs/web' @@ -1194,7 +1194,7 @@ export default PostsList " `; -exports[`in typescript mode creates a edit page 1`] = ` +exports[`in typescript mode > creates a edit page 1`] = ` "import EditPostCell from 'src/components/EditPostCell' type PostPageProps = { @@ -1209,7 +1209,7 @@ export default EditPostPage " `; -exports[`in typescript mode creates a form component 1`] = ` +exports[`in typescript mode > creates a form component 1`] = ` "import type { EditPostById, UpdatePostInput } from 'types/graphql' import type { RWGqlError } from '@redwoodjs/forms' @@ -1483,7 +1483,7 @@ export default PostForm " `; -exports[`in typescript mode creates a index page 1`] = ` +exports[`in typescript mode > creates a index page 1`] = ` "import PostsCell from 'src/components/PostsCell' const PostsPage = () => { @@ -1494,7 +1494,7 @@ export default PostsPage " `; -exports[`in typescript mode creates a layout 1`] = ` +exports[`in typescript mode > creates a layout 1`] = ` "import { Link, routes } from '@redwoodjs/router' import { Toaster } from '@redwoodjs/web/toast' @@ -1535,9 +1535,9 @@ export default ScaffoldLayout " `; -exports[`in typescript mode creates a new component 1`] = `undefined`; +exports[`in typescript mode > creates a new component 1`] = `undefined`; -exports[`in typescript mode creates a new component with int foreign keys converted in onSave 1`] = ` +exports[`in typescript mode > creates a new component with int foreign keys converted in onSave 1`] = ` "import type { CreateUserProfileMutation, CreateUserProfileInput, @@ -1596,7 +1596,7 @@ export default NewUserProfile " `; -exports[`in typescript mode creates a new page 1`] = ` +exports[`in typescript mode > creates a new page 1`] = ` "import NewPost from 'src/components/NewPost' const NewPostPage = () => { @@ -1607,7 +1607,7 @@ export default NewPostPage " `; -exports[`in typescript mode creates a show cell 1`] = ` +exports[`in typescript mode > creates a show cell 1`] = ` "import type { FindPostById, FindPostByIdVariables } from 'types/graphql' import type { @@ -1657,7 +1657,7 @@ export const Success = ({ " `; -exports[`in typescript mode creates a show component 1`] = ` +exports[`in typescript mode > creates a show component 1`] = ` "import type { DeletePostMutation, DeletePostMutationVariables, @@ -1791,7 +1791,7 @@ export default Post " `; -exports[`in typescript mode creates a show page 1`] = ` +exports[`in typescript mode > creates a show page 1`] = ` "import PostCell from 'src/components/PostCell' type PostPageProps = { @@ -1806,7 +1806,7 @@ export default PostPage " `; -exports[`in typescript mode creates a stylesheet 1`] = ` +exports[`in typescript mode > creates a stylesheet 1`] = ` "/* normalize.css v8.0.1 | MIT License | github.com/necolas/normalize.css */ @@ -2207,7 +2207,7 @@ exports[`in typescript mode creates a stylesheet 1`] = ` " `; -exports[`in typescript mode creates an edit cell 1`] = ` +exports[`in typescript mode > creates an edit cell 1`] = ` "import type { EditPostById, UpdatePostInput, @@ -2305,7 +2305,7 @@ export const Success = ({ post }: CellSuccessProps) => { " `; -exports[`in typescript mode creates an edit component with int foreign keys converted in onSave 1`] = ` +exports[`in typescript mode > creates an edit component with int foreign keys converted in onSave 1`] = ` "import type { EditUserProfileById, UpdateUserProfileInput, @@ -2399,7 +2399,7 @@ export const Success = ({ " `; -exports[`in typescript mode creates an index cell 1`] = ` +exports[`in typescript mode > creates an index cell 1`] = ` "import type { FindPosts, FindPostsVariables } from 'types/graphql' import { Link, routes } from '@redwoodjs/router' @@ -2456,7 +2456,7 @@ export const Success = ({ " `; -exports[`in typescript mode creates an index component 1`] = ` +exports[`in typescript mode > creates an index component 1`] = ` "import type { DeletePostMutation, DeletePostMutationVariables, diff --git a/packages/cli/src/commands/generate/scaffold/__tests__/editableColumns.test.js b/packages/cli/src/commands/generate/scaffold/__tests__/editableColumns.test.js index 23dc45553873..91409e62cdbc 100644 --- a/packages/cli/src/commands/generate/scaffold/__tests__/editableColumns.test.js +++ b/packages/cli/src/commands/generate/scaffold/__tests__/editableColumns.test.js @@ -4,11 +4,13 @@ import path from 'path' // Load mocks import '../../../../lib/test' +import { vi, describe, beforeAll, test, expect } from 'vitest' + import { getDefaultArgs } from '../../../../lib' import { yargsDefaults as defaults } from '../../helpers' import * as scaffold from '../scaffold' -jest.mock('execa') +vi.mock('execa') describe('editable columns', () => { let files diff --git a/packages/cli/src/commands/generate/scaffold/__tests__/scaffold.test.js b/packages/cli/src/commands/generate/scaffold/__tests__/scaffold.test.js index 0dd3b19681cc..7e63881aaf18 100644 --- a/packages/cli/src/commands/generate/scaffold/__tests__/scaffold.test.js +++ b/packages/cli/src/commands/generate/scaffold/__tests__/scaffold.test.js @@ -1,6 +1,8 @@ globalThis.__dirname = __dirname import path from 'path' +import { vi, describe, test, expect, beforeAll } from 'vitest' + // Load mocks import '../../../../lib/test' @@ -8,7 +10,7 @@ import { getDefaultArgs } from '../../../../lib' import { yargsDefaults as defaults } from '../../helpers' import * as scaffold from '../scaffold' -jest.mock('execa') +vi.mock('execa') describe('in javascript (default) mode', () => { let files diff --git a/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldCustomIdName.test.js b/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldCustomIdName.test.js index a138fae5ff9c..fdfc8a9e7fa1 100644 --- a/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldCustomIdName.test.js +++ b/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldCustomIdName.test.js @@ -1,6 +1,8 @@ globalThis.__dirname = __dirname import path from 'path' +import { vi, describe, beforeAll, test, expect } from 'vitest' + // Load mocks import '../../../../lib/test' @@ -8,7 +10,7 @@ import { getDefaultArgs } from '../../../../lib' import { yargsDefaults as defaults } from '../../helpers' import * as scaffold from '../scaffold' -jest.mock('execa') +vi.mock('execa') describe('support custom @id name', () => { let files diff --git a/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldNoNest.test.js b/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldNoNest.test.js index ffd8290baf1e..d6d267f4a905 100644 --- a/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldNoNest.test.js +++ b/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldNoNest.test.js @@ -1,6 +1,8 @@ globalThis.__dirname = __dirname import path from 'path' +import { vi, describe, beforeAll, test, expect } from 'vitest' + // Load mocks import '../../../../lib/test' @@ -8,7 +10,7 @@ import { getDefaultArgs } from '../../../../lib' import { yargsDefaults as defaults } from '../../helpers' import * as scaffold from '../scaffold' -jest.mock('execa') +vi.mock('execa') describe('in javascript (default) mode', () => { let files diff --git a/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPath.test.js b/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPath.test.js index 01b1b29e6b76..a62d34b52cea 100644 --- a/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPath.test.js +++ b/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPath.test.js @@ -1,11 +1,13 @@ globalThis.__dirname = __dirname import path from 'path' +import { vi, describe, beforeAll, test, it, expect } from 'vitest' + import '../../../../lib/test' import * as scaffold from '../scaffold' -jest.mock('execa') +vi.mock('execa') describe('admin/post', () => { let filesLower diff --git a/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPathMulti.test.js b/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPathMulti.test.js index b1b27e736cac..ee1f5e980514 100644 --- a/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPathMulti.test.js +++ b/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPathMulti.test.js @@ -1,11 +1,13 @@ globalThis.__dirname = __dirname import path from 'path' +import { vi, describe, beforeAll, test, expect } from 'vitest' + import '../../../../lib/test' import * as scaffold from '../scaffold' -jest.mock('execa') +vi.mock('execa') describe('admin/pages/post', () => { let filesNestedLower diff --git a/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPathMultiNoNest.test.js b/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPathMultiNoNest.test.js index 3d5b462a7f71..c24383e6ebec 100644 --- a/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPathMultiNoNest.test.js +++ b/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPathMultiNoNest.test.js @@ -1,11 +1,13 @@ globalThis.__dirname = __dirname import path from 'path' +import { vi, describe, beforeAll, test, expect } from 'vitest' + import '../../../../lib/test' import * as scaffold from '../scaffold' -jest.mock('execa') +vi.mock('execa') describe('admin/pages/post', () => { let filesNestedLower diff --git a/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPathMultiword.test.js b/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPathMultiword.test.js index 67bd4a6f71c6..4cf4a90a35d5 100644 --- a/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPathMultiword.test.js +++ b/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPathMultiword.test.js @@ -1,11 +1,13 @@ globalThis.__dirname = __dirname import path from 'path' +import { vi, describe, beforeAll, test, expect } from 'vitest' + import '../../../../lib/test' import * as scaffold from '../scaffold' -jest.mock('execa') +vi.mock('execa') describe('AdminPages/Post', () => { let filesMultiwordUpper diff --git a/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPathMultiwordNoNest.test.js b/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPathMultiwordNoNest.test.js index 7d640a7d3d9c..f80468486808 100644 --- a/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPathMultiwordNoNest.test.js +++ b/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPathMultiwordNoNest.test.js @@ -1,11 +1,13 @@ globalThis.__dirname = __dirname import path from 'path' +import { vi, describe, beforeAll, test, expect } from 'vitest' + import '../../../../lib/test' import * as scaffold from '../scaffold' -jest.mock('execa') +vi.mock('execa') describe('AdminPages/Post', () => { let filesMultiwordUpper diff --git a/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPathNoNest.test.js b/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPathNoNest.test.js index 043eb483011b..0129fdda8076 100644 --- a/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPathNoNest.test.js +++ b/packages/cli/src/commands/generate/scaffold/__tests__/scaffoldPathNoNest.test.js @@ -1,11 +1,13 @@ globalThis.__dirname = __dirname import path from 'path' +import { vi, describe, beforeAll, test, expect } from 'vitest' + import '../../../../lib/test' import * as scaffold from '../scaffold' -jest.mock('execa') +vi.mock('execa') describe('admin/Post', () => { let filesLower diff --git a/packages/cli/src/commands/generate/scaffold/__tests__/shouldUseEmptyAsUndefined.test.js b/packages/cli/src/commands/generate/scaffold/__tests__/shouldUseEmptyAsUndefined.test.js index 10cec9fd9908..2335234e4966 100644 --- a/packages/cli/src/commands/generate/scaffold/__tests__/shouldUseEmptyAsUndefined.test.js +++ b/packages/cli/src/commands/generate/scaffold/__tests__/shouldUseEmptyAsUndefined.test.js @@ -1,6 +1,8 @@ globalThis.__dirname = __dirname import path from 'path' +import { vi, describe, beforeAll, test, expect } from 'vitest' + // Load mocks import '../../../../lib/test' @@ -8,7 +10,7 @@ import { getDefaultArgs } from '../../../../lib' import { yargsDefaults as defaults } from '../../helpers' import * as scaffold from '../scaffold' -jest.mock('execa') +vi.mock('execa') describe('relational form field', () => { let form diff --git a/packages/cli/src/commands/generate/scaffold/__tests__/shouldUseTailwindCSS.test.js b/packages/cli/src/commands/generate/scaffold/__tests__/shouldUseTailwindCSS.test.js index 7af86d2d1794..9b4dbbabf338 100644 --- a/packages/cli/src/commands/generate/scaffold/__tests__/shouldUseTailwindCSS.test.js +++ b/packages/cli/src/commands/generate/scaffold/__tests__/shouldUseTailwindCSS.test.js @@ -1,27 +1,27 @@ globalThis.__dirname = __dirname import fs from 'fs-extra' +import { vi, describe, expect, test, afterEach } from 'vitest' import '../../../../lib/test' import { shouldUseTailwindCSS } from '../scaffold' -jest.mock('fs', () => { - const fs = jest.requireActual('fs') - return { - ...fs, - existsSync: jest.fn(), - } +vi.mock('fs-extra') + +let existsSyncSpy = vi.spyOn(fs, 'existsSync') +afterEach(() => { + existsSyncSpy.mockClear() }) describe('with --tailwind flag not set', () => { test('having a tailwind config file present', () => { - fs.existsSync.mockReturnValue(true) + existsSyncSpy.mockReturnValue(true) expect(shouldUseTailwindCSS(undefined)).toEqual(true) }) test('not having a tailwind config file present', () => { - fs.existsSync.mockReturnValue(false) + existsSyncSpy.mockReturnValue(false) expect(shouldUseTailwindCSS(undefined)).toEqual(false) }) @@ -29,14 +29,14 @@ describe('with --tailwind flag not set', () => { describe('with --tailwind flag set', () => { test('having a tailwind config file', () => { - fs.existsSync.mockReturnValue(true) + existsSyncSpy.mockReturnValue(true) expect(shouldUseTailwindCSS(true)).toEqual(true) expect(shouldUseTailwindCSS(false)).toEqual(false) }) test('not having a tailwind config file present', () => { - fs.existsSync.mockReturnValue(false) + existsSyncSpy.mockReturnValue(false) expect(shouldUseTailwindCSS(true)).toEqual(true) expect(shouldUseTailwindCSS(false)).toEqual(false) diff --git a/packages/cli/src/commands/generate/script/__tests__/__snapshots__/script.test.ts.snap b/packages/cli/src/commands/generate/script/__tests__/__snapshots__/script.test.ts.snap index a3586e722eb2..ffde76735dc6 100644 --- a/packages/cli/src/commands/generate/script/__tests__/__snapshots__/script.test.ts.snap +++ b/packages/cli/src/commands/generate/script/__tests__/__snapshots__/script.test.ts.snap @@ -1,4 +1,4 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html exports[`creates a JavaScript function to execute 1`] = ` "// To access your database diff --git a/packages/cli/src/commands/generate/script/__tests__/script.test.ts b/packages/cli/src/commands/generate/script/__tests__/script.test.ts index 73744cec071b..5ecf120af207 100644 --- a/packages/cli/src/commands/generate/script/__tests__/script.test.ts +++ b/packages/cli/src/commands/generate/script/__tests__/script.test.ts @@ -4,12 +4,11 @@ import '../../../../lib/test' import path from 'path' +import { test, expect } from 'vitest' import yargs from 'yargs' import * as script from '../script' -beforeAll(() => {}) - test('creates a JavaScript function to execute', () => { const output = script.files({ name: 'scriptyMcScript', @@ -46,7 +45,7 @@ test('creates a TypeScript function to execute', () => { }) test('keeps Script in name', () => { - const { name } = yargs + const { name } = yargs() .command('script ', false, script.builder) .parse('script BazingaScript') diff --git a/packages/cli/src/commands/generate/sdl/__tests__/__snapshots__/sdl.test.js.snap b/packages/cli/src/commands/generate/sdl/__tests__/__snapshots__/sdl.test.js.snap index bbe1df8bc373..9459e6f4dd78 100644 --- a/packages/cli/src/commands/generate/sdl/__tests__/__snapshots__/sdl.test.js.snap +++ b/packages/cli/src/commands/generate/sdl/__tests__/__snapshots__/sdl.test.js.snap @@ -1,6 +1,6 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html -exports[`handler can be called with PascalCase model name 1`] = ` +exports[`handler > can be called with PascalCase model name 1`] = ` { "fileContent": "export const schema = gql\` type User { @@ -39,7 +39,7 @@ exports[`handler can be called with PascalCase model name 1`] = ` } `; -exports[`handler can be called with PascalCase model name 2`] = ` +exports[`handler > can be called with PascalCase model name 2`] = ` { "fileContent": "export const standard = defineScenario({ user: { @@ -52,7 +52,7 @@ exports[`handler can be called with PascalCase model name 2`] = ` } `; -exports[`handler can be called with PascalCase model name 3`] = ` +exports[`handler > can be called with PascalCase model name 3`] = ` { "fileContent": "import { users, user, createUser, updateUser, deleteUser } from './users' @@ -105,7 +105,7 @@ describe('users', () => { } `; -exports[`handler can be called with PascalCase model name 4`] = ` +exports[`handler > can be called with PascalCase model name 4`] = ` { "fileContent": "import { db } from 'src/lib/db' @@ -148,7 +148,7 @@ export const User = { } `; -exports[`handler can be called with PascalCase model name 5`] = ` +exports[`handler > can be called with PascalCase model name 5`] = ` { "fileContent": "export const schema = gql\` type CustomData { @@ -181,7 +181,7 @@ exports[`handler can be called with PascalCase model name 5`] = ` } `; -exports[`handler can be called with PascalCase model name 6`] = ` +exports[`handler > can be called with PascalCase model name 6`] = ` { "fileContent": "export const standard = defineScenario({ customData: { @@ -194,7 +194,7 @@ exports[`handler can be called with PascalCase model name 6`] = ` } `; -exports[`handler can be called with PascalCase model name 7`] = ` +exports[`handler > can be called with PascalCase model name 7`] = ` { "fileContent": "import { customDatums, @@ -257,7 +257,7 @@ describe('customDatums', () => { } `; -exports[`handler can be called with PascalCase model name 8`] = ` +exports[`handler > can be called with PascalCase model name 8`] = ` { "fileContent": "import { db } from 'src/lib/db' @@ -294,7 +294,7 @@ export const deleteCustomData = ({ id }) => { } `; -exports[`handler can be called with camelCase model name 1`] = ` +exports[`handler > can be called with camelCase model name 1`] = ` { "fileContent": "export const schema = gql\` type User { @@ -333,7 +333,7 @@ exports[`handler can be called with camelCase model name 1`] = ` } `; -exports[`handler can be called with camelCase model name 2`] = ` +exports[`handler > can be called with camelCase model name 2`] = ` { "fileContent": "export const standard = defineScenario({ user: { @@ -346,7 +346,7 @@ exports[`handler can be called with camelCase model name 2`] = ` } `; -exports[`handler can be called with camelCase model name 3`] = ` +exports[`handler > can be called with camelCase model name 3`] = ` { "fileContent": "import { users, user, createUser, updateUser, deleteUser } from './users' @@ -399,7 +399,7 @@ describe('users', () => { } `; -exports[`handler can be called with camelCase model name 4`] = ` +exports[`handler > can be called with camelCase model name 4`] = ` { "fileContent": "import { db } from 'src/lib/db' @@ -442,7 +442,7 @@ export const User = { } `; -exports[`handler can be called with camelCase model name 5`] = ` +exports[`handler > can be called with camelCase model name 5`] = ` { "fileContent": "export const schema = gql\` type CustomData { @@ -475,7 +475,7 @@ exports[`handler can be called with camelCase model name 5`] = ` } `; -exports[`handler can be called with camelCase model name 6`] = ` +exports[`handler > can be called with camelCase model name 6`] = ` { "fileContent": "export const standard = defineScenario({ customData: { @@ -488,7 +488,7 @@ exports[`handler can be called with camelCase model name 6`] = ` } `; -exports[`handler can be called with camelCase model name 7`] = ` +exports[`handler > can be called with camelCase model name 7`] = ` { "fileContent": "import { customDatums, @@ -551,7 +551,7 @@ describe('customDatums', () => { } `; -exports[`handler can be called with camelCase model name 8`] = ` +exports[`handler > can be called with camelCase model name 8`] = ` { "fileContent": "import { db } from 'src/lib/db' @@ -588,7 +588,7 @@ export const deleteCustomData = ({ id }) => { } `; -exports[`with graphql documentations in javascript mode creates a multi word sdl file 1`] = ` +exports[`with graphql documentations > in javascript mode > creates a multi word sdl file 1`] = ` "export const schema = gql\` """ Representation of UserProfile. @@ -658,7 +658,7 @@ exports[`with graphql documentations in javascript mode creates a multi word sdl " `; -exports[`with graphql documentations in javascript mode creates a multi word sdl file with CRUD actions 1`] = ` +exports[`with graphql documentations > in javascript mode > creates a multi word sdl file with CRUD actions 1`] = ` "export const schema = gql\` """ Representation of UserProfile. @@ -728,7 +728,7 @@ exports[`with graphql documentations in javascript mode creates a multi word sdl " `; -exports[`with graphql documentations in javascript mode creates a sdl file with Byte definitions 1`] = ` +exports[`with graphql documentations > in javascript mode > creates a sdl file with Byte definitions 1`] = ` "export const schema = gql\` """ Representation of Key. @@ -785,7 +785,7 @@ exports[`with graphql documentations in javascript mode creates a sdl file with " `; -exports[`with graphql documentations in javascript mode creates a sdl file with enum definitions 1`] = ` +exports[`with graphql documentations > in javascript mode > creates a sdl file with enum definitions 1`] = ` "export const schema = gql\` """ A shoe worn by a user. @@ -851,7 +851,7 @@ exports[`with graphql documentations in javascript mode creates a sdl file with " `; -exports[`with graphql documentations in javascript mode creates a sdl file with json definitions 1`] = ` +exports[`with graphql documentations > in javascript mode > creates a sdl file with json definitions 1`] = ` "export const schema = gql\` """ A photograph taken by a user. @@ -917,7 +917,7 @@ exports[`with graphql documentations in javascript mode creates a sdl file with " `; -exports[`with graphql documentations in javascript mode creates a single word sdl file 1`] = ` +exports[`with graphql documentations > in javascript mode > creates a single word sdl file 1`] = ` "export const schema = gql\` """ Representation of User. @@ -978,7 +978,7 @@ exports[`with graphql documentations in javascript mode creates a single word sd " `; -exports[`with graphql documentations in javascript mode creates a single word sdl file with CRUD actions 1`] = ` +exports[`with graphql documentations > in javascript mode > creates a single word sdl file with CRUD actions 1`] = ` "export const schema = gql\` """ Representation of Post. @@ -1080,7 +1080,7 @@ exports[`with graphql documentations in javascript mode creates a single word sd " `; -exports[`with graphql documentations in typescript mode creates a multi word sdl file 1`] = ` +exports[`with graphql documentations > in typescript mode > creates a multi word sdl file 1`] = ` "export const schema = gql\` """ Representation of UserProfile. @@ -1150,7 +1150,7 @@ exports[`with graphql documentations in typescript mode creates a multi word sdl " `; -exports[`with graphql documentations in typescript mode creates a multi word sdl file with CRUD actions 1`] = ` +exports[`with graphql documentations > in typescript mode > creates a multi word sdl file with CRUD actions 1`] = ` "export const schema = gql\` """ Representation of UserProfile. @@ -1220,7 +1220,7 @@ exports[`with graphql documentations in typescript mode creates a multi word sdl " `; -exports[`with graphql documentations in typescript mode creates a sdl file with Byte definitions 1`] = ` +exports[`with graphql documentations > in typescript mode > creates a sdl file with Byte definitions 1`] = ` "export const schema = gql\` """ Representation of Key. @@ -1277,7 +1277,7 @@ exports[`with graphql documentations in typescript mode creates a sdl file with " `; -exports[`with graphql documentations in typescript mode creates a sdl file with enum definitions 1`] = ` +exports[`with graphql documentations > in typescript mode > creates a sdl file with enum definitions 1`] = ` "export const schema = gql\` """ A shoe worn by a user. @@ -1343,7 +1343,7 @@ exports[`with graphql documentations in typescript mode creates a sdl file with " `; -exports[`with graphql documentations in typescript mode creates a sdl file with json definitions 1`] = ` +exports[`with graphql documentations > in typescript mode > creates a sdl file with json definitions 1`] = ` "export const schema = gql\` """ A photograph taken by a user. @@ -1409,7 +1409,7 @@ exports[`with graphql documentations in typescript mode creates a sdl file with " `; -exports[`with graphql documentations in typescript mode creates a single word sdl file 1`] = ` +exports[`with graphql documentations > in typescript mode > creates a single word sdl file 1`] = ` "export const schema = gql\` """ Representation of User. @@ -1470,7 +1470,7 @@ exports[`with graphql documentations in typescript mode creates a single word sd " `; -exports[`with graphql documentations in typescript mode creates a single word sdl file with CRUD actions 1`] = ` +exports[`with graphql documentations > in typescript mode > creates a single word sdl file with CRUD actions 1`] = ` "export const schema = gql\` """ Representation of Post. @@ -1572,7 +1572,7 @@ exports[`with graphql documentations in typescript mode creates a single word sd " `; -exports[`without graphql documentations in javascript mode creates a multi word sdl file 1`] = ` +exports[`without graphql documentations > in javascript mode > creates a multi word sdl file 1`] = ` "export const schema = gql\` type UserProfile { id: Int! @@ -1606,7 +1606,7 @@ exports[`without graphql documentations in javascript mode creates a multi word " `; -exports[`without graphql documentations in javascript mode creates a multi word sdl file with CRUD actions 1`] = ` +exports[`without graphql documentations > in javascript mode > creates a multi word sdl file with CRUD actions 1`] = ` "export const schema = gql\` type UserProfile { id: Int! @@ -1640,7 +1640,7 @@ exports[`without graphql documentations in javascript mode creates a multi word " `; -exports[`without graphql documentations in javascript mode creates a sdl file with Byte definitions 1`] = ` +exports[`without graphql documentations > in javascript mode > creates a sdl file with Byte definitions 1`] = ` "export const schema = gql\` type Key { id: Int! @@ -1669,7 +1669,7 @@ exports[`without graphql documentations in javascript mode creates a sdl file wi " `; -exports[`without graphql documentations in javascript mode creates a sdl file with enum definitions 1`] = ` +exports[`without graphql documentations > in javascript mode > creates a sdl file with enum definitions 1`] = ` "export const schema = gql\` type Shoe { id: Int! @@ -1704,7 +1704,7 @@ exports[`without graphql documentations in javascript mode creates a sdl file wi " `; -exports[`without graphql documentations in javascript mode creates a sdl file with json definitions 1`] = ` +exports[`without graphql documentations > in javascript mode > creates a sdl file with json definitions 1`] = ` "export const schema = gql\` type Photo { id: Int! @@ -1736,7 +1736,7 @@ exports[`without graphql documentations in javascript mode creates a sdl file wi " `; -exports[`without graphql documentations in javascript mode creates a single word sdl file 1`] = ` +exports[`without graphql documentations > in javascript mode > creates a single word sdl file 1`] = ` "export const schema = gql\` type User { id: Int! @@ -1765,7 +1765,7 @@ exports[`without graphql documentations in javascript mode creates a single word " `; -exports[`without graphql documentations in javascript mode creates a single word sdl file with CRUD actions 1`] = ` +exports[`without graphql documentations > in javascript mode > creates a single word sdl file with CRUD actions 1`] = ` "export const schema = gql\` type Post { id: Int! @@ -1809,7 +1809,7 @@ exports[`without graphql documentations in javascript mode creates a single word " `; -exports[`without graphql documentations in typescript mode creates a multi word sdl file 1`] = ` +exports[`without graphql documentations > in typescript mode > creates a multi word sdl file 1`] = ` "export const schema = gql\` type UserProfile { id: Int! @@ -1843,7 +1843,7 @@ exports[`without graphql documentations in typescript mode creates a multi word " `; -exports[`without graphql documentations in typescript mode creates a multi word sdl file with CRUD actions 1`] = ` +exports[`without graphql documentations > in typescript mode > creates a multi word sdl file with CRUD actions 1`] = ` "export const schema = gql\` type UserProfile { id: Int! @@ -1877,7 +1877,7 @@ exports[`without graphql documentations in typescript mode creates a multi word " `; -exports[`without graphql documentations in typescript mode creates a sdl file with Byte definitions 1`] = ` +exports[`without graphql documentations > in typescript mode > creates a sdl file with Byte definitions 1`] = ` "export const schema = gql\` type Key { id: Int! @@ -1906,7 +1906,7 @@ exports[`without graphql documentations in typescript mode creates a sdl file wi " `; -exports[`without graphql documentations in typescript mode creates a sdl file with enum definitions 1`] = ` +exports[`without graphql documentations > in typescript mode > creates a sdl file with enum definitions 1`] = ` "export const schema = gql\` type Shoe { id: Int! @@ -1941,7 +1941,7 @@ exports[`without graphql documentations in typescript mode creates a sdl file wi " `; -exports[`without graphql documentations in typescript mode creates a sdl file with json definitions 1`] = ` +exports[`without graphql documentations > in typescript mode > creates a sdl file with json definitions 1`] = ` "export const schema = gql\` type Photo { id: Int! @@ -1973,7 +1973,7 @@ exports[`without graphql documentations in typescript mode creates a sdl file wi " `; -exports[`without graphql documentations in typescript mode creates a single word sdl file 1`] = ` +exports[`without graphql documentations > in typescript mode > creates a single word sdl file 1`] = ` "export const schema = gql\` type User { id: Int! @@ -2002,7 +2002,7 @@ exports[`without graphql documentations in typescript mode creates a single word " `; -exports[`without graphql documentations in typescript mode creates a single word sdl file with CRUD actions 1`] = ` +exports[`without graphql documentations > in typescript mode > creates a single word sdl file with CRUD actions 1`] = ` "export const schema = gql\` type Post { id: Int! diff --git a/packages/cli/src/commands/generate/sdl/__tests__/sdl.test.js b/packages/cli/src/commands/generate/sdl/__tests__/sdl.test.js index 77ec6be07efc..fccd76153bb4 100644 --- a/packages/cli/src/commands/generate/sdl/__tests__/sdl.test.js +++ b/packages/cli/src/commands/generate/sdl/__tests__/sdl.test.js @@ -1,25 +1,42 @@ globalThis.__dirname = __dirname globalThis.mockFs = false +const mockFiles = {} -jest.mock('fs', () => { - const actual = jest.requireActual('fs') - +vi.mock('fs-extra', async (importOriginal) => { + const originalFsExtra = await importOriginal() return { - ...actual, - mkdirSync: (...args) => { - if (globalThis.mockFs) { - return - } + default: { + ...originalFsExtra, + existsSync: (...args) => { + if (!globalThis.mockFs) { + return originalFsExtra.existsSync.apply(null, args) + } + return false + }, + mkdirSync: (...args) => { + if (globalThis.mockFs) { + return + } - return actual.mkdirSync.apply(null, args) - }, - writeFileSync: (target, contents) => { - if (globalThis.mockFs) { - return - } + return originalFsExtra.mkdirSync.apply(null, args) + }, + writeFileSync: (target, contents) => { + if (globalThis.mockFs) { + return + } + + return originalFsExtra.writeFileSync.call(null, target, contents) + }, + readFileSync: (path) => { + if (!globalThis.mockFs) { + return originalFsExtra.readFileSync.call(null, path) + } + + const mockedContent = mockFiles[path] - return actual.writeFileSync.call(null, target, contents) + return mockedContent || originalFsExtra.readFileSync.call(null, path) + }, }, } }) @@ -28,6 +45,7 @@ import path from 'path' import fs from 'fs-extra' import prompts from 'prompts' +import { vi, afterEach, test, expect, describe } from 'vitest' // Load mocks import '../../../../lib/test' @@ -38,7 +56,7 @@ import { getDefaultArgs } from '../../../../lib' import * as sdl from '../sdl' afterEach(() => { - jest.clearAllMocks() + vi.clearAllMocks() }) const extensionForBaseArgs = (baseArgs) => @@ -294,7 +312,7 @@ describe('with graphql documentations', () => { describe('handler', () => { const canBeCalledWithGivenModelName = (letterCase, model) => { test(`can be called with ${letterCase} model name`, async () => { - const spy = jest.spyOn(fs, 'writeFileSync') + const spy = vi.spyOn(fs, 'writeFileSync') globalThis.mockFs = true diff --git a/packages/cli/src/commands/generate/secret/__tests__/secret.test.js b/packages/cli/src/commands/generate/secret/__tests__/secret.test.js index 417f52f9cadd..faa6c6b445ee 100644 --- a/packages/cli/src/commands/generate/secret/__tests__/secret.test.js +++ b/packages/cli/src/commands/generate/secret/__tests__/secret.test.js @@ -1,4 +1,5 @@ -import yargs from 'yargs' +import { describe, it, expect } from 'vitest' +import yargs from 'yargs/yargs' import { DEFAULT_LENGTH, @@ -35,7 +36,7 @@ describe('generateSecret', () => { console.info = (...args) => (output += args.join(' ') + '\n') process.stdout.write = (str) => (output += str) - const { raw } = yargs + const { raw } = yargs() .command('secret', false, builder, handler) .parse('secret --raw') diff --git a/packages/cli/src/commands/generate/service/__tests__/__snapshots__/service.test.js.snap b/packages/cli/src/commands/generate/service/__tests__/__snapshots__/service.test.js.snap index 4ffa78e04fc4..33c5bfeaa721 100644 --- a/packages/cli/src/commands/generate/service/__tests__/__snapshots__/service.test.js.snap +++ b/packages/cli/src/commands/generate/service/__tests__/__snapshots__/service.test.js.snap @@ -1,6 +1,6 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html -exports[`in javascript mode creates a multi word service file 1`] = ` +exports[`in javascript mode > creates a multi word service file 1`] = ` "import { db } from 'src/lib/db' export const userProfiles = () => { @@ -9,7 +9,7 @@ export const userProfiles = () => { " `; -exports[`in javascript mode creates a multi word service test file 1`] = ` +exports[`in javascript mode > creates a multi word service test file 1`] = ` "import { userProfiles } from './userProfiles' // Generated boilerplate tests do not account for all circumstances @@ -28,7 +28,7 @@ describe('userProfiles', () => { " `; -exports[`in javascript mode creates a multi word service test file with crud actions and only foreign as mandatory field 1`] = ` +exports[`in javascript mode > creates a multi word service test file with crud actions and only foreign as mandatory field 1`] = ` "import { transactions, transaction, @@ -88,7 +88,7 @@ describe('transactions', () => { " `; -exports[`in javascript mode creates a multi word service test file with multiple scalar types 1`] = ` +exports[`in javascript mode > creates a multi word service test file with multiple scalar types 1`] = ` "import { scalarTypes, scalarType, @@ -158,7 +158,7 @@ describe('scalarTypes', () => { " `; -exports[`in javascript mode creates a single word service file 1`] = ` +exports[`in javascript mode > creates a single word service file 1`] = ` "import { db } from 'src/lib/db' export const users = () => { @@ -192,7 +192,7 @@ export const deleteUser = ({ id }) => { " `; -exports[`in javascript mode creates a single word service file with CRUD actions 1`] = ` +exports[`in javascript mode > creates a single word service file with CRUD actions 1`] = ` "import { db } from 'src/lib/db' export const posts = () => { @@ -226,7 +226,7 @@ export const deletePost = ({ id }) => { " `; -exports[`in javascript mode creates a single word service file with a belongsTo relation 1`] = ` +exports[`in javascript mode > creates a single word service file with a belongsTo relation 1`] = ` "import { db } from 'src/lib/db' export const users = () => { @@ -247,7 +247,7 @@ export const User = { " `; -exports[`in javascript mode creates a single word service file with a hasMany relation 1`] = ` +exports[`in javascript mode > creates a single word service file with a hasMany relation 1`] = ` "import { db } from 'src/lib/db' export const users = () => { @@ -268,7 +268,7 @@ export const User = { " `; -exports[`in javascript mode creates a single word service file with multiple relations 1`] = ` +exports[`in javascript mode > creates a single word service file with multiple relations 1`] = ` "import { db } from 'src/lib/db' export const users = () => { @@ -292,7 +292,7 @@ export const User = { " `; -exports[`in javascript mode creates a single word service scenario file 1`] = ` +exports[`in javascript mode > creates a single word service scenario file 1`] = ` "export const standard = defineScenario({ user: { one: { data: { email: 'String1234567' } }, @@ -302,7 +302,7 @@ exports[`in javascript mode creates a single word service scenario file 1`] = ` " `; -exports[`in javascript mode creates a single word service test file 1`] = ` +exports[`in javascript mode > creates a single word service test file 1`] = ` "import { users, user, createUser, updateUser, deleteUser } from './users' // Generated boilerplate tests do not account for all circumstances @@ -352,7 +352,7 @@ describe('users', () => { " `; -exports[`in typescript mode creates a multi word service file 1`] = ` +exports[`in typescript mode > creates a multi word service file 1`] = ` "import type { QueryResolvers } from 'types/graphql' import { db } from 'src/lib/db' @@ -363,7 +363,7 @@ export const userProfiles: QueryResolvers['userProfiles'] = () => { " `; -exports[`in typescript mode creates a multi word service test file 1`] = ` +exports[`in typescript mode > creates a multi word service test file 1`] = ` "import type { UserProfile } from '@prisma/client' import { userProfiles } from './userProfiles' @@ -385,7 +385,7 @@ describe('userProfiles', () => { " `; -exports[`in typescript mode creates a multi word service test file with crud actions and only foreign as mandatory field 1`] = ` +exports[`in typescript mode > creates a multi word service test file with crud actions and only foreign as mandatory field 1`] = ` "import type { Transaction } from '@prisma/client' import { @@ -451,7 +451,7 @@ describe('transactions', () => { " `; -exports[`in typescript mode creates a multi word service test file with multiple scalar types 1`] = ` +exports[`in typescript mode > creates a multi word service test file with multiple scalar types 1`] = ` "import type { ScalarType } from '@prisma/client' import { @@ -527,7 +527,7 @@ describe('scalarTypes', () => { " `; -exports[`in typescript mode creates a single word service file 1`] = ` +exports[`in typescript mode > creates a single word service file 1`] = ` "import type { QueryResolvers, MutationResolvers } from 'types/graphql' import { db } from 'src/lib/db' @@ -563,7 +563,7 @@ export const deleteUser: MutationResolvers['deleteUser'] = ({ id }) => { " `; -exports[`in typescript mode creates a single word service file with CRUD actions 1`] = ` +exports[`in typescript mode > creates a single word service file with CRUD actions 1`] = ` "import type { QueryResolvers, MutationResolvers } from 'types/graphql' import { db } from 'src/lib/db' @@ -599,7 +599,7 @@ export const deletePost: MutationResolvers['deletePost'] = ({ id }) => { " `; -exports[`in typescript mode creates a single word service file with a belongsTo relation 1`] = ` +exports[`in typescript mode > creates a single word service file with a belongsTo relation 1`] = ` "import type { QueryResolvers, UserRelationResolvers } from 'types/graphql' import { db } from 'src/lib/db' @@ -622,7 +622,7 @@ export const User: UserRelationResolvers = { " `; -exports[`in typescript mode creates a single word service file with a hasMany relation 1`] = ` +exports[`in typescript mode > creates a single word service file with a hasMany relation 1`] = ` "import type { QueryResolvers, UserRelationResolvers } from 'types/graphql' import { db } from 'src/lib/db' @@ -645,7 +645,7 @@ export const User: UserRelationResolvers = { " `; -exports[`in typescript mode creates a single word service file with multiple relations 1`] = ` +exports[`in typescript mode > creates a single word service file with multiple relations 1`] = ` "import type { QueryResolvers, UserRelationResolvers } from 'types/graphql' import { db } from 'src/lib/db' @@ -671,7 +671,7 @@ export const User: UserRelationResolvers = { " `; -exports[`in typescript mode creates a single word service scenario file 1`] = ` +exports[`in typescript mode > creates a single word service scenario file 1`] = ` "import type { Prisma, User } from '@prisma/client' import type { ScenarioData } from '@redwoodjs/testing/api' @@ -686,7 +686,7 @@ export type StandardScenario = ScenarioData " `; -exports[`in typescript mode creates a single word service test file 1`] = ` +exports[`in typescript mode > creates a single word service test file 1`] = ` "import type { User } from '@prisma/client' import { users, user, createUser, updateUser, deleteUser } from './users' diff --git a/packages/cli/src/commands/generate/service/__tests__/scenario.test.js b/packages/cli/src/commands/generate/service/__tests__/scenario.test.js index a505c3be7aeb..46f94e6f0831 100644 --- a/packages/cli/src/commands/generate/service/__tests__/scenario.test.js +++ b/packages/cli/src/commands/generate/service/__tests__/scenario.test.js @@ -2,6 +2,8 @@ globalThis.__dirname = __dirname // Load mocks import '../../../../lib/test' +import { describe, test, expect } from 'vitest' + import * as service from '../service' describe('the scenario generator', () => { diff --git a/packages/cli/src/commands/generate/service/__tests__/service.test.js b/packages/cli/src/commands/generate/service/__tests__/service.test.js index b7de2380837f..87175e775bb9 100644 --- a/packages/cli/src/commands/generate/service/__tests__/service.test.js +++ b/packages/cli/src/commands/generate/service/__tests__/service.test.js @@ -1,7 +1,8 @@ globalThis.__dirname = __dirname import path from 'path' -import yargs from 'yargs' +import { vi, beforeAll, afterAll, test, expect, describe, it } from 'vitest' +import yargs from 'yargs/yargs' // Load mocks import '../../../../lib/test' @@ -10,12 +11,12 @@ import { getDefaultArgs } from '../../../../lib' import * as service from '../service' beforeAll(() => { - jest.useFakeTimers() - jest.setSystemTime(new Date('2022-09-30T09:50:00.000Z')) + vi.useFakeTimers() + vi.setSystemTime(new Date('2022-09-30T09:50:00.000Z')) }) afterAll(() => { - jest.useRealTimers() + vi.useRealTimers() }) const extensionForBaseArgs = (baseArgs) => @@ -293,7 +294,7 @@ const itCreatesAMultiWordServiceTestFileWithCRUDAndOnlyForeignKeyRequired = ( } test('keeps Service in name', () => { - const { name } = yargs + const { name } = yargs() .command('service ', false, service.builder) .parse('service BazingaService') diff --git a/packages/cli/src/commands/setup/deploy/__tests__/__snapshots__/netlify.test.js.snap b/packages/cli/src/commands/setup/deploy/__tests__/__snapshots__/netlify.test.js.snap index 10ceee2e1564..70a54481a262 100644 --- a/packages/cli/src/commands/setup/deploy/__tests__/__snapshots__/netlify.test.js.snap +++ b/packages/cli/src/commands/setup/deploy/__tests__/__snapshots__/netlify.test.js.snap @@ -1,6 +1,6 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html -exports[`netlify should add netlify.toml 1`] = ` +exports[`netlify > should add netlify.toml 1`] = ` "[build] command = "yarn rw deploy netlify" publish = "web/dist" @@ -30,7 +30,7 @@ exports[`netlify should add netlify.toml 1`] = ` " `; -exports[`netlify should call the handler without error 1`] = ` +exports[`netlify > should call the handler without error 1`] = ` "[build] command = "yarn rw deploy netlify" publish = "web/dist" diff --git a/packages/cli/src/commands/setup/deploy/__tests__/netlify.test.js b/packages/cli/src/commands/setup/deploy/__tests__/netlify.test.js index 98c45ed54fe1..10203da45d80 100644 --- a/packages/cli/src/commands/setup/deploy/__tests__/netlify.test.js +++ b/packages/cli/src/commands/setup/deploy/__tests__/netlify.test.js @@ -1,19 +1,18 @@ -// Automock fs using ../..../__mocks__/fs -jest.mock('fs') +vi.mock('fs-extra') import path from 'path' import fs from 'fs-extra' +import { vol } from 'memfs' +import { vi, describe, it, expect, beforeEach } from 'vitest' +import '../../../../lib/test' import { getPaths } from '../../../../lib' import { updateApiURLTask } from '../helpers' // Mock telemetry and other things -import '../../../../lib/test' -jest.mock('../../../../lib', () => { - const path = jest.requireActual('path') - - const { printSetupNotes } = jest.requireActual('../../../../lib') +vi.mock('../../../../lib', async (importOriginal) => { + const { printSetupNotes } = await importOriginal() return { printSetupNotes, @@ -40,7 +39,9 @@ jest.mock('../../../../lib', () => { // interpreted as a new-line. And need to use double backslashes, so // that one "survives" into the regexp expect(keys[0]).toMatch(new RegExp(`\\${path.sep}netlify.toml$`)) - expect(fileNameToContentMap[keys[0]]).toMatchSnapshot() + for (const key of keys) { + fs.writeFileSync(key, fileNameToContentMap[key]) + } }, } }) @@ -48,7 +49,7 @@ jest.mock('../../../../lib', () => { const REDWOOD_TOML_PATH = path.join(getPaths().base, 'redwood.toml') beforeEach(() => { - fs.__setMockFiles({ + vol.fromJSON({ [REDWOOD_TOML_PATH]: `[web] title = "Redwood App" port = 8910 @@ -64,8 +65,21 @@ beforeEach(() => { describe('netlify', () => { it('should call the handler without error', async () => { - const netlify = require('../providers/netlify') - expect(async () => await netlify.handler({ force: true })).not.toThrow() + const netlify = await import('../providers/netlify') + + let error = undefined + try { + await netlify.handler({ force: true }) + } catch (err) { + error = err + } + expect(error).toBeUndefined() + const filesystem = vol.toJSON() + const netlifyTomlPath = Object.keys(filesystem).find((path) => + path.endsWith('netlify.toml') + ) + expect(netlifyTomlPath).toBeDefined() + expect(filesystem[netlifyTomlPath]).toMatchSnapshot() }) it('Should update redwood.toml apiUrl', () => { @@ -77,8 +91,14 @@ describe('netlify', () => { }) it('should add netlify.toml', async () => { - const netlify = require('../providers/netlify') + const netlify = await import('../providers/netlify') await netlify.handler({ force: true }) - // Will be verified by a snapshot up above in the mocked `writeFilesTask` + + const filesystem = vol.toJSON() + const netlifyTomlPath = Object.keys(filesystem).find((path) => + path.endsWith('netlify.toml') + ) + expect(netlifyTomlPath).toBeDefined() + expect(filesystem[netlifyTomlPath]).toMatchSnapshot() }) }) diff --git a/packages/cli/src/commands/setup/graphiql/__tests__/graphiqlHandler.test.js b/packages/cli/src/commands/setup/graphiql/__tests__/graphiqlHandler.test.js index 9a366643706f..d04caf8d012b 100644 --- a/packages/cli/src/commands/setup/graphiql/__tests__/graphiqlHandler.test.js +++ b/packages/cli/src/commands/setup/graphiql/__tests__/graphiqlHandler.test.js @@ -2,35 +2,34 @@ globalThis.__dirname = __dirname import '../../../../lib/mockTelemetry' -jest.mock('@redwoodjs/babel-config', () => { +vi.mock('@redwoodjs/babel-config', () => { return { registerApiSideBabelHook: () => null, } }) -jest.mock('../../../../lib', () => ({ +vi.mock('../../../../lib', () => ({ getPaths: () => ({ api: { lib: '', functions: '' }, }), existsAnyExtensionSync: () => false, })) -jest.mock('../../../../lib/project', () => ({ +vi.mock('../../../../lib/project', () => ({ isTypeScriptProject: () => false, })) -jest.mock('listr2') +vi.mock('listr2') import chalk from 'chalk' import { Listr } from 'listr2' +import { vi, describe, it, afterEach, expect } from 'vitest' import * as graphiqlHandler from '../graphiqlHandler' import * as graphiqlHelpers from '../graphiqlHelpers' describe('Graphiql generator tests', () => { - const processExitSpy = jest - .spyOn(process, 'exit') - .mockImplementation(() => {}) - const cSpy = jest.spyOn(console, 'error').mockImplementation(() => {}) + const processExitSpy = vi.spyOn(process, 'exit').mockImplementation(() => {}) + const cSpy = vi.spyOn(console, 'error').mockImplementation(() => {}) - const mockListrRun = jest.fn() + const mockListrRun = vi.fn() Listr.mockImplementation(() => { return { run: mockListrRun, @@ -43,7 +42,7 @@ describe('Graphiql generator tests', () => { }) it('throws an error if source path does not exist when viewing headers', async () => { - jest.spyOn(graphiqlHelpers, 'getOutputPath').mockImplementation(() => '') + vi.spyOn(graphiqlHelpers, 'getOutputPath').mockImplementation(() => '') await graphiqlHandler.handler({ view: true, provider: 'dbAuth' }) expect(console.error).toHaveBeenCalledWith( chalk.bold.red( diff --git a/packages/cli/src/commands/setup/graphql/features/fragments/__codemod_tests__/appGqlConfigTransform.test.ts b/packages/cli/src/commands/setup/graphql/features/fragments/__codemod_tests__/appGqlConfigTransform.test.ts index 56397b430698..0c2e81deb83f 100644 --- a/packages/cli/src/commands/setup/graphql/features/fragments/__codemod_tests__/appGqlConfigTransform.test.ts +++ b/packages/cli/src/commands/setup/graphql/features/fragments/__codemod_tests__/appGqlConfigTransform.test.ts @@ -1,6 +1,8 @@ import fs from 'node:fs' import path from 'node:path' +import { describe, test } from 'vitest' + import { findUp } from '@redwoodjs/project-config' describe('fragments graphQLClientConfig', () => { diff --git a/packages/cli/src/commands/setup/graphql/features/fragments/__codemod_tests__/appImportTransform.test.ts b/packages/cli/src/commands/setup/graphql/features/fragments/__codemod_tests__/appImportTransform.test.ts index d5cff2bb93bb..e12a1d0b5604 100644 --- a/packages/cli/src/commands/setup/graphql/features/fragments/__codemod_tests__/appImportTransform.test.ts +++ b/packages/cli/src/commands/setup/graphql/features/fragments/__codemod_tests__/appImportTransform.test.ts @@ -1,3 +1,5 @@ +import { describe, test } from 'vitest' + describe('fragments possibleTypes import', () => { test('Default App.tsx', async () => { await matchFolderTransform('appImportTransform', 'import-simple', { diff --git a/packages/cli/src/commands/setup/graphql/features/fragments/__tests__/fragmentsHandler.test.ts b/packages/cli/src/commands/setup/graphql/features/fragments/__tests__/fragmentsHandler.test.ts index da1ec1b0b94a..1055456fcdbc 100644 --- a/packages/cli/src/commands/setup/graphql/features/fragments/__tests__/fragmentsHandler.test.ts +++ b/packages/cli/src/commands/setup/graphql/features/fragments/__tests__/fragmentsHandler.test.ts @@ -1,11 +1,27 @@ let mockExecutedTaskTitles: Array = [] let mockSkippedTaskTitles: Array = [] -jest.mock('fs', () => require('memfs').fs) -jest.mock('node:fs', () => require('memfs').fs) -jest.mock('execa') +vi.mock('fs', async () => { + const memfs = await import('memfs') + return { + ...memfs.fs, + default: { + ...memfs.fs, + }, + } +}) +vi.mock('node:fs', async () => { + const memfs = await import('memfs') + return { + ...memfs.fs, + default: { + ...memfs.fs, + }, + } +}) +vi.mock('execa') // The jscodeshift parts are tested by another test -jest.mock('../runTransform', () => { +vi.mock('../runTransform', () => { return { runTransform: () => { return {} @@ -13,10 +29,10 @@ jest.mock('../runTransform', () => { } }) -jest.mock('listr2', () => { +vi.mock('listr2', () => { return { // Return a constructor function, since we're calling `new` on Listr - Listr: jest.fn().mockImplementation((tasks: Array) => { + Listr: vi.fn().mockImplementation((tasks: Array) => { return { run: async () => { mockExecutedTaskTitles = [] @@ -40,6 +56,7 @@ jest.mock('listr2', () => { }) import { vol } from 'memfs' +import { vi, beforeAll, afterAll, test, expect } from 'vitest' import { handler } from '../fragmentsHandler' @@ -54,8 +71,8 @@ beforeAll(() => { afterAll(() => { process.env.RWJS_CWD = original_RWJS_CWD - jest.resetAllMocks() - jest.resetModules() + vi.resetAllMocks() + vi.resetModules() }) test('all tasks are being called', async () => { diff --git a/packages/cli/src/commands/setup/graphql/features/trustedDocuments/__codemod_tests__/grapqlTransform.test.ts b/packages/cli/src/commands/setup/graphql/features/trustedDocuments/__codemod_tests__/grapqlTransform.test.ts index a44dd63ff138..5fc47e1d8627 100644 --- a/packages/cli/src/commands/setup/graphql/features/trustedDocuments/__codemod_tests__/grapqlTransform.test.ts +++ b/packages/cli/src/commands/setup/graphql/features/trustedDocuments/__codemod_tests__/grapqlTransform.test.ts @@ -1,3 +1,5 @@ +import { describe, test } from 'vitest' + describe('trusted-documents graphql handler transform', () => { test('Default handler', async () => { await matchFolderTransform('graphqlTransform', 'graphql', { diff --git a/packages/cli/src/commands/setup/graphql/features/trustedDocuments/__tests__/__snapshots__/trustedDocuments.test.ts.snap b/packages/cli/src/commands/setup/graphql/features/trustedDocuments/__tests__/__snapshots__/trustedDocuments.test.ts.snap index 05ac34b420f2..46364b5cd147 100644 --- a/packages/cli/src/commands/setup/graphql/features/trustedDocuments/__tests__/__snapshots__/trustedDocuments.test.ts.snap +++ b/packages/cli/src/commands/setup/graphql/features/trustedDocuments/__tests__/__snapshots__/trustedDocuments.test.ts.snap @@ -1,6 +1,6 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html -exports[`Trusted documents setup Project toml configuration updates default toml where graphql fragments are already setup updates the toml file with graphql and trusted documents enabled and keeps fragments 1`] = ` +exports[`Trusted documents setup > Project toml configuration updates > default toml where graphql fragments are already setup > updates the toml file with graphql and trusted documents enabled and keeps fragments 1`] = ` "# This file contains the configuration settings for your Redwood app. # This file is also what makes your Redwood app a Redwood app. # If you remove it and try to run \`yarn rw dev\`, you'll get an error. @@ -28,7 +28,7 @@ exports[`Trusted documents setup Project toml configuration updates default toml " `; -exports[`Trusted documents setup Project toml configuration updates default toml where graphql fragments are already setup using no spaces updates the toml file with graphql and trusted documents enabled and keeps fragments 1`] = ` +exports[`Trusted documents setup > Project toml configuration updates > default toml where graphql fragments are already setup using no spaces > updates the toml file with graphql and trusted documents enabled and keeps fragments 1`] = ` "# This file contains the configuration settings for your Redwood app. # This file is also what makes your Redwood app a Redwood app. # If you remove it and try to run \`yarn rw dev\`, you'll get an error. @@ -56,7 +56,7 @@ exports[`Trusted documents setup Project toml configuration updates default toml " `; -exports[`Trusted documents setup Project toml configuration updates default toml where no graphql or trusted documents is setup updates the toml file with graphql and trusted documents enabled 1`] = ` +exports[`Trusted documents setup > Project toml configuration updates > default toml where no graphql or trusted documents is setup > updates the toml file with graphql and trusted documents enabled 1`] = ` "# This file contains the configuration settings for your Redwood app. # This file is also what makes your Redwood app a Redwood app. # If you remove it and try to run \`yarn rw dev\`, you'll get an error. @@ -83,7 +83,7 @@ exports[`Trusted documents setup Project toml configuration updates default toml trustedDocuments = true" `; -exports[`Trusted documents setup Project toml configuration updates toml where graphql section is commented out adds a new section with \`trustedDocuments = true\` 1`] = ` +exports[`Trusted documents setup > Project toml configuration updates > toml where graphql section is commented out > adds a new section with \`trustedDocuments = true\` 1`] = ` "# This file contains the configuration settings for your Redwood app. # This file is also what makes your Redwood app a Redwood app. # If you remove it and try to run \`yarn rw dev\`, you'll get an error. diff --git a/packages/cli/src/commands/setup/graphql/features/trustedDocuments/__tests__/trustedDocuments.test.ts b/packages/cli/src/commands/setup/graphql/features/trustedDocuments/__tests__/trustedDocuments.test.ts index e54eed9b64d6..a472d6d7fc9b 100644 --- a/packages/cli/src/commands/setup/graphql/features/trustedDocuments/__tests__/trustedDocuments.test.ts +++ b/packages/cli/src/commands/setup/graphql/features/trustedDocuments/__tests__/trustedDocuments.test.ts @@ -1,11 +1,27 @@ let mockExecutedTaskTitles: Array = [] let mockSkippedTaskTitles: Array = [] -jest.mock('fs', () => require('memfs').fs) -jest.mock('node:fs', () => require('memfs').fs) -jest.mock('execa') +vi.mock('fs', async () => { + const memfs = await import('memfs') + return { + ...memfs.fs, + default: { + ...memfs.fs, + }, + } +}) +vi.mock('node:fs', async () => { + const memfs = await import('memfs') + return { + ...memfs.fs, + default: { + ...memfs.fs, + }, + } +}) +vi.mock('execa') // The jscodeshift parts are tested by another test -jest.mock('../../fragments/runTransform', () => { +vi.mock('../../fragments/runTransform', () => { return { runTransform: () => { return {} @@ -13,10 +29,10 @@ jest.mock('../../fragments/runTransform', () => { } }) -jest.mock('listr2', () => { +vi.mock('listr2', () => { return { // Return a constructor function, since we're calling `new` on Listr - Listr: jest.fn().mockImplementation((tasks: Array) => { + Listr: vi.fn().mockImplementation((tasks: Array) => { return { run: async () => { mockExecutedTaskTitles = [] @@ -42,6 +58,7 @@ jest.mock('listr2', () => { import path from 'node:path' import { vol } from 'memfs' +import { vi, expect, it, describe, beforeAll, afterAll } from 'vitest' import { handler } from '../trustedDocumentsHandler' @@ -51,11 +68,12 @@ const APP_PATH = '/redwood-app' const tomlFixtures: Record = {} -beforeAll(() => { +beforeAll(async () => { original_RWJS_CWD = process.env.RWJS_CWD process.env.RWJS_CWD = APP_PATH - const actualFs = jest.requireActual('fs') + // eslint-disable-next-line @typescript-eslint/consistent-type-imports + const actualFs = await vi.importActual('fs') const tomlFixturesPath = path.join(__dirname, '__fixtures__', 'toml') tomlFixtures.default = actualFs.readFileSync( @@ -96,12 +114,12 @@ beforeAll(() => { afterAll(() => { process.env.RWJS_CWD = original_RWJS_CWD - jest.resetAllMocks() - jest.resetModules() + vi.resetAllMocks() + vi.resetModules() }) // Silence console.info -console.info = jest.fn() +console.info = vi.fn() describe('Trusted documents setup', () => { it('runs all tasks', async () => { diff --git a/packages/cli/src/commands/setup/package/__tests__/packageHandler.test.js b/packages/cli/src/commands/setup/package/__tests__/packageHandler.test.js index d29ff1d969f4..7112f736f6ba 100644 --- a/packages/cli/src/commands/setup/package/__tests__/packageHandler.test.js +++ b/packages/cli/src/commands/setup/package/__tests__/packageHandler.test.js @@ -1,4 +1,4 @@ -jest.mock('@redwoodjs/project-config', () => { +vi.mock('@redwoodjs/project-config', () => { return { getPaths: () => { const path = require('path') @@ -8,49 +8,52 @@ jest.mock('@redwoodjs/project-config', () => { }, } }) -jest.mock('@redwoodjs/cli-helpers', () => { +vi.mock('@redwoodjs/cli-helpers', () => { return { - getCompatibilityData: jest.fn(() => { + getCompatibilityData: vi.fn(() => { throw new Error('Mock Not Implemented') }), } }) -jest.mock('fs') -jest.mock('execa', () => - jest.fn((cmd, params) => ({ +vi.mock('fs-extra') +vi.mock('execa', () => ({ + default: vi.fn((cmd, params) => ({ cmd, params, - })) -) -jest.mock('enquirer', () => { + })), +})) + +vi.mock('enquirer', () => { return { - Select: jest.fn(() => { - return { - run: jest.fn(() => { - throw new Error('Mock Not Implemented') - }), - } - }), + default: { + Select: vi.fn(() => { + return { + run: vi.fn(() => { + throw new Error('Mock Not Implemented') + }), + } + }), + }, } }) import path from 'path' +import enq from 'enquirer' import execa from 'execa' -import fs from 'fs-extra' +import { vol } from 'memfs' +import { vi, describe, beforeEach, afterEach, test, expect } from 'vitest' import { getCompatibilityData } from '@redwoodjs/cli-helpers' import { handler } from '../packageHandler' -const { Select } = require('enquirer') - describe('packageHandler', () => { beforeEach(() => { - jest.spyOn(console, 'log').mockImplementation(() => {}) - jest.spyOn(console, 'error').mockImplementation(() => {}) + vi.spyOn(console, 'log').mockImplementation(() => {}) + vi.spyOn(console, 'error').mockImplementation(() => {}) - fs.__setMockFiles({ + vol.fromJSON({ ['package.json']: JSON.stringify({ devDependencies: { '@redwoodjs/core': '1.0.0', @@ -60,8 +63,8 @@ describe('packageHandler', () => { }) afterEach(() => { - fs.__setMockFiles({}) - jest.clearAllMocks() + vol.reset() + vi.clearAllMocks() }) test('using force does not check compatibility', async () => { @@ -107,9 +110,9 @@ describe('packageHandler', () => { throw new Error('No compatible version found') }) - Select.mockImplementation(() => { + enq.Select.mockImplementation(() => { return { - run: jest.fn(() => 'cancel'), + run: vi.fn(() => 'cancel'), } }) await handler({ @@ -117,12 +120,12 @@ describe('packageHandler', () => { force: false, _: ['setup', 'package'], }) - expect(Select).toHaveBeenCalledTimes(1) + expect(enq.Select).toHaveBeenCalledTimes(1) expect(execa).not.toHaveBeenCalled() - Select.mockImplementation(() => { + enq.Select.mockImplementation(() => { return { - run: jest.fn(() => 'continue'), + run: vi.fn(() => 'continue'), } }) await handler({ @@ -130,7 +133,7 @@ describe('packageHandler', () => { force: false, _: ['setup', 'package'], }) - expect(Select).toHaveBeenCalledTimes(2) + expect(enq.Select).toHaveBeenCalledTimes(2) expect(execa).toHaveBeenCalledWith('yarn', ['dlx', 'some-package@latest'], { stdio: 'inherit', cwd: path.join('mocked', 'project'), @@ -177,9 +180,9 @@ describe('packageHandler', () => { } }) - Select.mockImplementation(() => { + enq.Select.mockImplementation(() => { return { - run: jest.fn(() => 'useLatestCompatibleVersion'), + run: vi.fn(() => 'useLatestCompatibleVersion'), } }) await handler({ @@ -192,7 +195,7 @@ describe('packageHandler', () => { 'some-package', 'latest' ) - expect(Select).toHaveBeenCalledTimes(1) + expect(enq.Select).toHaveBeenCalledTimes(1) expect(execa).toHaveBeenNthCalledWith( 1, 'yarn', @@ -203,9 +206,9 @@ describe('packageHandler', () => { } ) - Select.mockImplementation(() => { + enq.Select.mockImplementation(() => { return { - run: jest.fn(() => 'usePreferredVersion'), + run: vi.fn(() => 'usePreferredVersion'), } }) await handler({ @@ -218,7 +221,7 @@ describe('packageHandler', () => { 'some-package', 'latest' ) - expect(Select).toHaveBeenCalledTimes(2) + expect(enq.Select).toHaveBeenCalledTimes(2) expect(execa).toHaveBeenNthCalledWith( 2, 'yarn', @@ -229,9 +232,9 @@ describe('packageHandler', () => { } ) - Select.mockImplementation(() => { + enq.Select.mockImplementation(() => { return { - run: jest.fn(() => 'cancel'), + run: vi.fn(() => 'cancel'), } }) await handler({ @@ -244,7 +247,7 @@ describe('packageHandler', () => { 'some-package', 'latest' ) - expect(Select).toHaveBeenCalledTimes(3) + expect(enq.Select).toHaveBeenCalledTimes(3) expect(execa).toBeCalledTimes(2) // Only called for the previous two select options }) @@ -289,9 +292,9 @@ describe('packageHandler', () => { } }) - Select.mockImplementation(() => { + enq.Select.mockImplementation(() => { return { - run: jest.fn(() => 'useLatestCompatibleVersion'), + run: vi.fn(() => 'useLatestCompatibleVersion'), } }) await handler({ @@ -304,7 +307,7 @@ describe('packageHandler', () => { 'some-package', 'stable' ) - expect(Select).toHaveBeenCalledTimes(1) + expect(enq.Select).toHaveBeenCalledTimes(1) expect(execa).toHaveBeenNthCalledWith( 1, 'yarn', @@ -315,9 +318,9 @@ describe('packageHandler', () => { } ) - Select.mockImplementation(() => { + enq.Select.mockImplementation(() => { return { - run: jest.fn(() => 'usePreferredVersion'), + run: vi.fn(() => 'usePreferredVersion'), } }) await handler({ @@ -330,7 +333,7 @@ describe('packageHandler', () => { 'some-package', 'stable' ) - expect(Select).toHaveBeenCalledTimes(2) + expect(enq.Select).toHaveBeenCalledTimes(2) expect(execa).toHaveBeenNthCalledWith( 2, 'yarn', @@ -341,9 +344,9 @@ describe('packageHandler', () => { } ) - Select.mockImplementation(() => { + enq.Select.mockImplementation(() => { return { - run: jest.fn(() => 'cancel'), + run: vi.fn(() => 'cancel'), } }) await handler({ @@ -356,7 +359,7 @@ describe('packageHandler', () => { 'some-package', 'stable' ) - expect(Select).toHaveBeenCalledTimes(3) + expect(enq.Select).toHaveBeenCalledTimes(3) expect(execa).toBeCalledTimes(2) // Only called for the previous two select options }) @@ -400,9 +403,9 @@ describe('packageHandler', () => { } }) - Select.mockImplementation(() => { + enq.Select.mockImplementation(() => { return { - run: jest.fn(() => 'useLatestCompatibleVersion'), + run: vi.fn(() => 'useLatestCompatibleVersion'), } }) await handler({ @@ -415,7 +418,7 @@ describe('packageHandler', () => { 'some-package', '1.0.0' ) - expect(Select).toHaveBeenCalledTimes(1) + expect(enq.Select).toHaveBeenCalledTimes(1) expect(execa).toHaveBeenNthCalledWith( 1, 'yarn', @@ -426,9 +429,9 @@ describe('packageHandler', () => { } ) - Select.mockImplementation(() => { + enq.Select.mockImplementation(() => { return { - run: jest.fn(() => 'usePreferredVersion'), + run: vi.fn(() => 'usePreferredVersion'), } }) await handler({ @@ -441,7 +444,7 @@ describe('packageHandler', () => { 'some-package', '1.0.0' ) - expect(Select).toHaveBeenCalledTimes(2) + expect(enq.Select).toHaveBeenCalledTimes(2) expect(execa).toHaveBeenNthCalledWith( 2, 'yarn', @@ -452,9 +455,9 @@ describe('packageHandler', () => { } ) - Select.mockImplementation(() => { + enq.Select.mockImplementation(() => { return { - run: jest.fn(() => 'cancel'), + run: vi.fn(() => 'cancel'), } }) await handler({ @@ -467,7 +470,7 @@ describe('packageHandler', () => { 'some-package', '1.0.0' ) - expect(Select).toHaveBeenCalledTimes(3) + expect(enq.Select).toHaveBeenCalledTimes(3) expect(execa).toBeCalledTimes(2) // Only called for the previous two select options }) @@ -496,9 +499,9 @@ describe('packageHandler', () => { ) // No force should prompt - Select.mockImplementation(() => { + enq.Select.mockImplementation(() => { return { - run: jest.fn(() => 'useLatestCompatibleVersion'), + run: vi.fn(() => 'useLatestCompatibleVersion'), } }) await handler({ @@ -511,7 +514,7 @@ describe('packageHandler', () => { 'some-package', '0.0.1' ) - expect(Select).toHaveBeenCalledTimes(1) + expect(enq.Select).toHaveBeenCalledTimes(1) expect(execa).toHaveBeenNthCalledWith( 1, 'yarn', diff --git a/packages/cli/src/commands/setup/package/packageHandler.js b/packages/cli/src/commands/setup/package/packageHandler.js index 8118122c43ae..0b5e032cb9df 100644 --- a/packages/cli/src/commands/setup/package/packageHandler.js +++ b/packages/cli/src/commands/setup/package/packageHandler.js @@ -1,12 +1,10 @@ +import enq from 'enquirer' import execa from 'execa' import semver from 'semver' import { getCompatibilityData } from '@redwoodjs/cli-helpers' import { getPaths } from '@redwoodjs/project-config' -const { Select } = require('enquirer') - -// TODO: Yarn3 requirement? What do we do, just not run? I'm not sure about this one. export async function handler({ npmPackage, force, _: _args }) { // Extract package name and version which the user provided const isScoped = npmPackage.startsWith('@') @@ -157,7 +155,7 @@ async function runPackage(packageName, version, options = []) { async function promptWithChoices(message, choices) { try { - const prompt = new Select({ + const prompt = new enq.Select({ name: message.substring(0, 8).toLowerCase(), message, choices, diff --git a/packages/cli/src/commands/studioHandler.js b/packages/cli/src/commands/studioHandler.js index 50ca7df5c3ef..787b575f62ca 100644 --- a/packages/cli/src/commands/studioHandler.js +++ b/packages/cli/src/commands/studioHandler.js @@ -1,4 +1,9 @@ -import { setTomlSetting } from '@redwoodjs/cli-helpers' +import path from 'node:path' + +import fs from 'fs-extra' +import semver from 'semver' + +import { getPaths } from '@redwoodjs/project-config' import { isModuleInstalled, installModule } from '../lib/packages' @@ -6,14 +11,28 @@ export const handler = async (options) => { try { // Check the module is installed if (!isModuleInstalled('@redwoodjs/studio')) { + const minVersions = ['7.0.0-canary.874', '7.x', '8.0.0-0'] + assertRedwoodVersion(minVersions) + console.log( 'The studio package is not installed, installing it for you, this may take a moment...' ) - await installModule('@redwoodjs/studio', '11.0.1') + await installModule('@redwoodjs/studio', '11') console.log('Studio package installed successfully.') - console.log('Adding config to redwood.toml...') - setTomlSetting('studio', 'enabled', true) + const installedRealtime = await installModule('@redwoodjs/realtime') + if (installedRealtime) { + console.log( + "Added @redwoodjs/realtime to your project, as it's used by Studio" + ) + } + + const installedApiServer = await installModule('@redwoodjs/api-server') + if (installedApiServer) { + console.log( + "Added @redwoodjs/api-server to your project, as it's used by Studio" + ) + } } // Import studio and start it @@ -25,3 +44,51 @@ export const handler = async (options) => { process.exit(1) } } + +// Exported for unit testing +export function assertRedwoodVersion(minVersions) { + const rwVersion = getProjectRedwoodVersion() + const coercedRwVersion = semver.coerce(rwVersion) + + if ( + minVersions.some((minVersion) => { + // Have to do this to handle pre-release versions until + // https://github.com/npm/node-semver/pull/671 is merged + const v = semver.valid(minVersion) || semver.coerce(minVersion) + + const coercedMin = semver.coerce(minVersion) + + // According to semver 1.0.0-rc.X > 1.0.0-canary.Y (for all values of X + // and Y) + // But for Redwood an RC release can be much older than a Canary release + // (and not contain features from Canary that whoever calls this need) + // Because RW doesn't 100% follow SemVer for pre-releases we have to + // have some custom logic here + return ( + semver.gte(rwVersion, v) && + (coercedRwVersion.major === coercedMin.major + ? semver.prerelease(rwVersion)?.[0] === semver.prerelease(v)?.[0] + : true) + ) + }) + ) { + // All good, the user's RW version meets at least one of the minimum + // version requirements + return + } + + console.error( + `The studio command requires Redwood version ${minVersions[0]} or ` + + `greater, you are using ${rwVersion}.` + ) + + process.exit(1) +} + +function getProjectRedwoodVersion() { + const { devDependencies } = fs.readJSONSync( + path.join(getPaths().base, 'package.json') + ) + + return devDependencies['@redwoodjs/core'] +} diff --git a/packages/cli/src/lib/__tests__/getDevNodeOptions.test.js b/packages/cli/src/lib/__tests__/getDevNodeOptions.test.js index d57b78488656..f180b370d7a0 100644 --- a/packages/cli/src/lib/__tests__/getDevNodeOptions.test.js +++ b/packages/cli/src/lib/__tests__/getDevNodeOptions.test.js @@ -1,3 +1,5 @@ +import { describe, it, expect } from 'vitest' + import { getDevNodeOptions } from '../../commands/devHandler' describe('getNodeOptions', () => { diff --git a/packages/cli/src/lib/__tests__/index.test.js b/packages/cli/src/lib/__tests__/index.test.js index ee2218118a6f..cb5181beffaf 100644 --- a/packages/cli/src/lib/__tests__/index.test.js +++ b/packages/cli/src/lib/__tests__/index.test.js @@ -1,8 +1,9 @@ global.__dirname = __dirname -jest.mock('@redwoodjs/project-config', () => { +vi.mock('@redwoodjs/project-config', async (importOriginal) => { + const originalProjectConfig = await importOriginal() const path = require('path') return { - ...jest.requireActual('@redwoodjs/project-config'), + ...originalProjectConfig, getPaths: () => { const BASE_PATH = path.join(globalThis.__dirname, 'fixtures') return { @@ -18,6 +19,7 @@ jest.mock('@redwoodjs/project-config', () => { import path from 'path' import fs from 'fs-extra' +import { vi, test, expect, describe } from 'vitest' import * as index from '../index' diff --git a/packages/cli/src/lib/__tests__/locking.test.js b/packages/cli/src/lib/__tests__/locking.test.js index 8728adc51e71..911e88693665 100644 --- a/packages/cli/src/lib/__tests__/locking.test.js +++ b/packages/cli/src/lib/__tests__/locking.test.js @@ -1,7 +1,8 @@ global.__dirname = __dirname -jest.mock('@redwoodjs/project-config', () => { +vi.mock('@redwoodjs/project-config', async (importOriginal) => { + const originalProjectConfig = await importOriginal() return { - ...jest.requireActual('@redwoodjs/project-config'), + ...originalProjectConfig, getPaths: () => { return { generated: { @@ -11,18 +12,20 @@ jest.mock('@redwoodjs/project-config', () => { }, } }) -jest.mock('fs') +vi.mock('fs-extra') import path from 'path' import fs from 'fs-extra' +import { vol } from 'memfs' +import { vi, it, expect, beforeEach } from 'vitest' import { setLock, unsetLock, isLockSet, clearLocks } from '../locking' beforeEach(() => { // Start with no files - fs.__setMockFiles({}) - fs.statSync = jest.fn(() => { + vol.reset() + fs.statSync = vi.fn(() => { return { birthtimeMs: Date.now(), } @@ -80,7 +83,7 @@ it('Detects a stale lock', () => { birthtimeMs: Date.now() - 3600001, } }) - const spy = jest.spyOn(fs, 'rmSync') + const spy = vi.spyOn(fs, 'rmSync') setLock('TEST') diff --git a/packages/cli/src/lib/__tests__/mergeBasics.test.js b/packages/cli/src/lib/__tests__/mergeBasics.test.js index cac630c55e73..d7efa12795bb 100644 --- a/packages/cli/src/lib/__tests__/mergeBasics.test.js +++ b/packages/cli/src/lib/__tests__/mergeBasics.test.js @@ -1,3 +1,5 @@ +import { expect, it, describe } from 'vitest' + import { merge } from '../merge' import { concatUnique } from '../merge/strategy' diff --git a/packages/cli/src/lib/__tests__/mergeConfig.test.js b/packages/cli/src/lib/__tests__/mergeConfig.test.js index ad7bfe4070db..d7a082ab8866 100644 --- a/packages/cli/src/lib/__tests__/mergeConfig.test.js +++ b/packages/cli/src/lib/__tests__/mergeConfig.test.js @@ -1,6 +1,7 @@ import path from 'path' import fs from 'fs-extra' +import { expect, it, describe, test } from 'vitest' import { merge } from '../merge' import { diff --git a/packages/cli/src/lib/__tests__/mergeSemantics.test.js b/packages/cli/src/lib/__tests__/mergeSemantics.test.js index a5129491be5b..c9d1de49add9 100644 --- a/packages/cli/src/lib/__tests__/mergeSemantics.test.js +++ b/packages/cli/src/lib/__tests__/mergeSemantics.test.js @@ -1,4 +1,5 @@ import { parse, traverse } from '@babel/core' +import { describe, it, expect } from 'vitest' import { semanticIdentity } from '../merge/semanticIdentity' diff --git a/packages/cli/src/lib/__tests__/pluralHelpers.test.js b/packages/cli/src/lib/__tests__/pluralHelpers.test.js index df994f5d2a43..8dc2aa159ca7 100644 --- a/packages/cli/src/lib/__tests__/pluralHelpers.test.js +++ b/packages/cli/src/lib/__tests__/pluralHelpers.test.js @@ -1,4 +1,5 @@ import prompts from 'prompts' +import { test, expect } from 'vitest' import * as helpers from '../pluralHelpers' import { pluralize, singularize } from '../rwPluralize' diff --git a/packages/cli/src/lib/__tests__/rollback.test.js b/packages/cli/src/lib/__tests__/rollback.test.js index 3e25ecc1529e..6a85167adc6c 100644 --- a/packages/cli/src/lib/__tests__/rollback.test.js +++ b/packages/cli/src/lib/__tests__/rollback.test.js @@ -1,14 +1,20 @@ import path from 'path' +vi.mock('fs-extra') + import fs from 'fs-extra' import { Listr } from 'listr2' - -jest.mock('fs') +import { vol } from 'memfs' +import { vi, it, expect, beforeEach } from 'vitest' import * as rollback from '../rollback' +beforeEach(() => { + vol.reset() +}) + it('resets file contents', async () => { - fs.__setMockFiles({ + vol.fromJSON({ 'fake-file-1': 'fake-content-1', 'fake-file-2': 'fake-content-2', }) @@ -17,12 +23,12 @@ it('resets file contents', async () => { fs.writeFileSync('fake-file-1', 'fake-content-changed') await rollback.executeRollback() - expect(fs.readFileSync('fake-file-1')).toBe('fake-content-1') - expect(fs.readFileSync('fake-file-2')).toBe('fake-content-2') + expect(fs.readFileSync('fake-file-1', 'utf-8')).toBe('fake-content-1') + expect(fs.readFileSync('fake-file-2', 'utf-8')).toBe('fake-content-2') }) it('removes new files', async () => { - fs.__setMockFiles({ + vol.fromJSON({ 'fake-file-1': 'fake-content-1', }) rollback.addFileToRollback('fake-file-1') @@ -31,12 +37,12 @@ it('removes new files', async () => { fs.writeFileSync('fake-file-2', 'fake-content-new') await rollback.executeRollback() - expect(fs.readFileSync('fake-file-1')).toBe('fake-content-1') + expect(fs.readFileSync('fake-file-1', 'utf-8')).toBe('fake-content-1') expect(fs.existsSync('fake-file-2')).toBe(false) }) it('removes empty folders after removing files', async () => { - fs.__setMockFiles({ + vol.fromJSON({ [path.join('fake_dir', 'mock_dir', 'test_dir')]: undefined, }) rollback.addFileToRollback( @@ -55,30 +61,30 @@ it('removes empty folders after removing files', async () => { }) it('executes sync functions', async () => { - fs.__setMockFiles({}) + vol.fromJSON({}) rollback.addFunctionToRollback(() => { - fs.writeFileSync('fake-file', 'fake-content') + fs.writeFileSync('/fake-file', 'fake-content') }) await rollback.executeRollback() - expect(fs.readFileSync('fake-file')).toBe('fake-content') + expect(fs.readFileSync('/fake-file', 'utf-8')).toBe('fake-content') }) it('executes async functions', async () => { - fs.__setMockFiles({}) + vol.fromJSON({}) rollback.addFunctionToRollback(async () => { // make up some async process await new Promise((resolve, _) => { - fs.writeFileSync('fake-file', 'fake-content') + fs.writeFileSync('/fake-file', 'fake-content') resolve() }) }) await rollback.executeRollback() - expect(fs.readFileSync('fake-file')).toBe('fake-content') + expect(fs.readFileSync('/fake-file', 'utf-8')).toBe('fake-content') }) it('executes rollback in order', async () => { // default stack ordering LIFO - fs.__setMockFiles({ + vol.fromJSON({ 'fake-file': '0', }) rollback.addFunctionToRollback(() => { @@ -91,10 +97,10 @@ it('executes rollback in order', async () => { fs.writeFileSync('fake-file', '3') }) await rollback.executeRollback() - expect(fs.readFileSync('fake-file')).toBe('1') + expect(fs.readFileSync('fake-file', 'utf-8')).toBe('1') // handles the atEnd flag - fs.__setMockFiles({ + vol.fromJSON({ 'fake-file': '0', }) rollback.addFunctionToRollback(() => { @@ -107,10 +113,10 @@ it('executes rollback in order', async () => { fs.writeFileSync('fake-file', '3') }) await rollback.executeRollback() - expect(fs.readFileSync('fake-file')).toBe('2') + expect(fs.readFileSync('fake-file', 'utf-8')).toBe('2') // using files rather than functions - fs.__setMockFiles({ + vol.fromJSON({ 'fake-file': '0', }) rollback.addFileToRollback('fake-file') @@ -120,10 +126,10 @@ it('executes rollback in order', async () => { rollback.addFileToRollback('fake-file') fs.writeFileSync('fake-file', '3') await rollback.executeRollback() - expect(fs.readFileSync('fake-file')).toBe('0') + expect(fs.readFileSync('fake-file', 'utf-8')).toBe('0') // using files rather than functions and the atEnd flag - fs.__setMockFiles({ + vol.fromJSON({ 'fake-file': '0', }) rollback.addFileToRollback('fake-file') @@ -133,11 +139,11 @@ it('executes rollback in order', async () => { rollback.addFileToRollback('fake-file', true) fs.writeFileSync('fake-file', '3') await rollback.executeRollback() - expect(fs.readFileSync('fake-file')).toBe('2') + expect(fs.readFileSync('fake-file', 'utf-8')).toBe('2') }) it('reset clears the stack', async () => { - fs.__setMockFiles({}) + vol.fromJSON({}) rollback.addFunctionToRollback(() => { fs.writeFileSync('fake-file', 'fake-content') }) @@ -147,7 +153,7 @@ it('reset clears the stack', async () => { }) it('prepare clears the stack', async () => { - fs.__setMockFiles({}) + vol.fromJSON({}) rollback.addFunctionToRollback(() => { fs.writeFileSync('fake-file', 'fake-content') }) @@ -157,8 +163,8 @@ it('prepare clears the stack', async () => { }) it('prepare sets listr2 rollback functions and rollback executes correctly', async () => { - const fakeTaskFunction = jest.fn() - const fakeRollbackFunction = jest.fn() + const fakeTaskFunction = vi.fn() + const fakeRollbackFunction = vi.fn() const tasks = new Listr( [ { diff --git a/packages/cli/src/lib/__tests__/rwPluralize.test.js b/packages/cli/src/lib/__tests__/rwPluralize.test.js index ed3c6ab7ecbe..cd0deca77d42 100644 --- a/packages/cli/src/lib/__tests__/rwPluralize.test.js +++ b/packages/cli/src/lib/__tests__/rwPluralize.test.js @@ -1,3 +1,5 @@ +import { test, expect } from 'vitest' + import { pluralize, singularize, diff --git a/packages/cli/src/lib/__tests__/schemaHelpers.test.js b/packages/cli/src/lib/__tests__/schemaHelpers.test.js index cd4d171f8b68..e1827ad4a3e3 100644 --- a/packages/cli/src/lib/__tests__/schemaHelpers.test.js +++ b/packages/cli/src/lib/__tests__/schemaHelpers.test.js @@ -1,8 +1,9 @@ global.__dirname = __dirname -jest.mock('@redwoodjs/project-config', () => { +vi.mock('@redwoodjs/project-config', async (importOriginal) => { + const originalProjectConfig = await importOriginal() const path = require('path') return { - ...jest.requireActual('@redwoodjs/project-config'), + ...originalProjectConfig, getPaths: () => { const BASE_PATH = path.join(globalThis.__dirname, 'fixtures') return { @@ -17,6 +18,7 @@ jest.mock('@redwoodjs/project-config', () => { }) import prompts from 'prompts' +import { vi, test, expect, describe, it } from 'vitest' import { getSchema, verifyModelName } from '../schemaHelpers' diff --git a/packages/cli/src/lib/__tests__/updateCheck.test.js b/packages/cli/src/lib/__tests__/updateCheck.test.js index e7a1bcd5fe82..1871fba42be5 100644 --- a/packages/cli/src/lib/__tests__/updateCheck.test.js +++ b/packages/cli/src/lib/__tests__/updateCheck.test.js @@ -1,10 +1,12 @@ global.__dirname = __dirname -jest.mock('fs') -jest.mock('latest-version') +vi.mock('fs-extra') +vi.mock('latest-version') -jest.mock('@redwoodjs/project-config', () => { +vi.mock('@redwoodjs/project-config', async (importOriginal) => { + const originalProjectConfig = await importOriginal() return { + ...originalProjectConfig, getPaths: () => { return { generated: { @@ -13,12 +15,23 @@ jest.mock('@redwoodjs/project-config', () => { base: '', } }, - getConfig: jest.fn(), + getConfig: vi.fn(), } }) import fs from 'fs-extra' import latestVersion from 'latest-version' +import { vol } from 'memfs' +import { + vi, + describe, + beforeAll, + afterAll, + it, + expect, + beforeEach, + afterEach, +} from 'vitest' import { getConfig } from '@redwoodjs/project-config' @@ -30,27 +43,28 @@ const TESTING_CURRENT_DATETIME = 1640995200000 describe('Update is not available (1.0.0 -> 1.0.0)', () => { beforeAll(() => { // Use fake datetime - jest.useFakeTimers() - jest.setSystemTime(new Date(TESTING_CURRENT_DATETIME)) + vi.useFakeTimers() + vi.setSystemTime(new Date(TESTING_CURRENT_DATETIME)) getConfig.mockReturnValue({ notifications: { versionUpdates: ['latest'], }, }) // Prevent the appearance of stale locks - fs.statSync = jest.fn(() => { + fs.statSync = vi.fn(() => { return { birthtimeMs: Date.now(), } }) // Prevent console output during tests - console.log = jest.fn() - console.time = jest.fn() + console.log = vi.fn() + console.time = vi.fn() + console.timeEnd = vi.fn() }) afterAll(() => { - jest.useRealTimers() + vi.useRealTimers() }) beforeEach(() => { @@ -59,7 +73,7 @@ describe('Update is not available (1.0.0 -> 1.0.0)', () => { return '1.0.0' }) - fs.__setMockFiles({ + vol.fromJSON({ // Users package.json containing the redwood version 'package.json': JSON.stringify({ devDependencies: { @@ -70,8 +84,8 @@ describe('Update is not available (1.0.0 -> 1.0.0)', () => { }) afterEach(() => { - fs.__setMockFiles({}) - jest.clearAllMocks() + vol.reset() + vi.clearAllMocks() }) it('Produces the correct updateData.json file', async () => { @@ -113,15 +127,15 @@ describe('Update is not available (1.0.0 -> 1.0.0)', () => { describe('Update is available (1.0.0 -> 2.0.0)', () => { beforeAll(() => { // Use fake datetime - jest.useFakeTimers() - jest.setSystemTime(new Date(TESTING_CURRENT_DATETIME)) + vi.useFakeTimers() + vi.setSystemTime(new Date(TESTING_CURRENT_DATETIME)) getConfig.mockReturnValue({ notifications: { versionUpdates: ['latest'], }, }) // Prevent the appearance of stale locks - fs.statSync = jest.fn(() => { + fs.statSync = vi.fn(() => { return { birthtimeMs: Date.now(), } @@ -129,7 +143,7 @@ describe('Update is available (1.0.0 -> 2.0.0)', () => { }) afterAll(() => { - jest.useRealTimers() + vi.useRealTimers() }) beforeEach(() => { @@ -138,7 +152,7 @@ describe('Update is available (1.0.0 -> 2.0.0)', () => { return '2.0.0' }) - fs.__setMockFiles({ + vol.fromJSON({ // Users package.json containing the redwood version 'package.json': JSON.stringify({ devDependencies: { @@ -149,8 +163,8 @@ describe('Update is available (1.0.0 -> 2.0.0)', () => { }) afterEach(() => { - fs.__setMockFiles({}) - jest.clearAllMocks() + vol.reset() + vi.clearAllMocks() }) it('Produces the correct updateData.json file', async () => { @@ -192,15 +206,15 @@ describe('Update is available (1.0.0 -> 2.0.0)', () => { describe('Update is available with rc tag (1.0.0-rc.1 -> 1.0.1-rc.58)', () => { beforeAll(() => { // Use fake datetime - jest.useFakeTimers() - jest.setSystemTime(new Date(TESTING_CURRENT_DATETIME)) + vi.useFakeTimers() + vi.setSystemTime(new Date(TESTING_CURRENT_DATETIME)) getConfig.mockReturnValue({ notifications: { versionUpdates: ['latest', 'rc'], }, }) // Prevent the appearance of stale locks - fs.statSync = jest.fn(() => { + fs.statSync = vi.fn(() => { return { birthtimeMs: Date.now(), } @@ -208,7 +222,7 @@ describe('Update is available with rc tag (1.0.0-rc.1 -> 1.0.1-rc.58)', () => { }) afterAll(() => { - jest.useRealTimers() + vi.useRealTimers() }) beforeEach(() => { @@ -217,7 +231,7 @@ describe('Update is available with rc tag (1.0.0-rc.1 -> 1.0.1-rc.58)', () => { return version === 'rc' ? '1.0.1-rc.58' : '1.0.0' }) - fs.__setMockFiles({ + vol.fromJSON({ // Users package.json containing the redwood version 'package.json': JSON.stringify({ devDependencies: { @@ -228,8 +242,8 @@ describe('Update is available with rc tag (1.0.0-rc.1 -> 1.0.1-rc.58)', () => { }) afterEach(() => { - fs.__setMockFiles({}) - jest.clearAllMocks() + vol.reset() + vi.clearAllMocks() }) it('Produces the correct updateData.json file', async () => { diff --git a/packages/cli/src/lib/index.js b/packages/cli/src/lib/index.js index 3aae217d4576..6f626bb17876 100644 --- a/packages/cli/src/lib/index.js +++ b/packages/cli/src/lib/index.js @@ -232,6 +232,26 @@ export const prettierOptions = () => { try { return require(path.join(getPaths().base, 'prettier.config.js')) } catch (e) { + // If we're in our vitest environment we want to return a consistent set of prettier options + // such that snapshots don't change unexpectedly. + if (process.env.VITEST_POOL_ID !== undefined) { + return { + trailingComma: 'es5', + bracketSpacing: true, + tabWidth: 2, + semi: false, + singleQuote: true, + arrowParens: 'always', + overrides: [ + { + files: 'Routes.*', + options: { + printWidth: 999, + }, + }, + ], + } + } return undefined } } diff --git a/packages/cli/src/lib/merge/index.js b/packages/cli/src/lib/merge/index.js index bc0637484540..27e17648040d 100644 --- a/packages/cli/src/lib/merge/index.js +++ b/packages/cli/src/lib/merge/index.js @@ -225,7 +225,7 @@ export function merge(base, extension, strategy) { // When testing, use prettier here to produce predictable outputs. // Otherwise, leave formatting to the caller. - return process.env.JEST_WORKER_ID + return process.env.VITEST_POOL_ID ? prettier.format(code, { parser: 'babel', bracketSpacing: true, diff --git a/packages/cli/src/lib/mockTelemetry.js b/packages/cli/src/lib/mockTelemetry.js index 9dfd7aee1f89..a88f9b7c7dbd 100644 --- a/packages/cli/src/lib/mockTelemetry.js +++ b/packages/cli/src/lib/mockTelemetry.js @@ -1,9 +1,11 @@ -/* eslint-env jest */ +/* eslint-env vitest */ + +import { vi } from 'vitest' // mock Telemetry for CLI commands so they don't try to spawn a process -jest.mock('@redwoodjs/telemetry', () => { +vi.mock('@redwoodjs/telemetry', () => { return { - errorTelemetry: () => jest.fn(), - timedTelemetry: () => jest.fn(), + errorTelemetry: () => vi.fn(), + timedTelemetry: () => vi.fn(), } }) diff --git a/packages/cli/src/lib/packages.js b/packages/cli/src/lib/packages.js index aaaf04240333..c65c7dc9d49c 100644 --- a/packages/cli/src/lib/packages.js +++ b/packages/cli/src/lib/packages.js @@ -5,10 +5,12 @@ import fs from 'fs-extra' import { getPaths } from './index' +// Note: Have to add backslash (\) before @ below for intellisense to display +// the doc comments properly /** - * Installs a module into a user's project. If the module is already installed, - * this function does nothing. If no version is specified, the version will be assumed - * to be the same as that of \@redwoodjs/cli. + * Installs a module into a user's project. If the module is already installed, + * this function does nothing. If no version is specified, the version will be + * assumed to be the same as that of \@redwoodjs/cli. * * @param {string} name The name of the module to install * @param {string} version The version of the module to install, otherwise the same as that of \@redwoodjs/cli @@ -19,21 +21,25 @@ export async function installModule(name, version = undefined) { if (isModuleInstalled(name)) { return false } + if (version === undefined) { - return await installRedwoodModule(name) + return installRedwoodModule(name) } else { await execa.command(`yarn add -D ${name}@${version}`, { stdio: 'inherit', cwd: getPaths().base, }) } + return true } /** - * Installs a Redwood module into a user's project keeping the version consistent with that of \@redwoodjs/cli. + * Installs a Redwood module into a user's project keeping the version + * consistent with that of \@redwoodjs/cli. * If the module is already installed, this function does nothing. - * If no remote version can not be found which matches the local cli version then the latest canary version will be used. + * If no remote version can not be found which matches the local cli version + * then the latest canary version will be used. * * @param {string} module A redwoodjs module, e.g. \@redwoodjs/web * @returns {boolean} Whether the module was installed or not diff --git a/packages/cli/src/lib/test.js b/packages/cli/src/lib/test.js index cf89be25d000..94d7fd2bf967 100644 --- a/packages/cli/src/lib/test.js +++ b/packages/cli/src/lib/test.js @@ -1,4 +1,4 @@ -/* eslint-env jest */ +/* eslint-env vitest */ // Include at the top of your tests. Automatically mocks out the file system // @@ -11,10 +11,11 @@ import path from 'path' import fs from 'fs-extra' +import { vi } from 'vitest' import './mockTelemetry' -jest.mock('@redwoodjs/internal/dist/generate/generate', () => { +vi.mock('@redwoodjs/internal/dist/generate/generate', () => { return { generate: () => { return { errors: [] } @@ -22,10 +23,11 @@ jest.mock('@redwoodjs/internal/dist/generate/generate', () => { } }) -jest.mock('@redwoodjs/project-config', () => { +vi.mock('@redwoodjs/project-config', async (importOriginal) => { const path = require('path') + const originalProjectConfig = await importOriginal() return { - ...jest.requireActual('@redwoodjs/project-config'), + ...originalProjectConfig, getPaths: () => { const BASE_PATH = '/path/to/project' return { @@ -69,7 +71,7 @@ jest.mock('@redwoodjs/project-config', () => { } }) -jest.mock('./project', () => ({ +vi.mock('./project', () => ({ isTypeScriptProject: () => false, sides: () => ['web', 'api'], })) @@ -79,24 +81,7 @@ globalThis.__prettierPath = path.resolve( './__tests__/fixtures/prettier.config.js' ) -jest.mock('path', () => { - const path = jest.requireActual('path') - return { - ...path, - join: (...paths) => { - if ( - paths && - paths[0] === '/path/to/project' && - paths[1] === 'prettier.config.js' - ) { - return globalThis.__prettierPath - } - return path.join(...paths) - }, - } -}) - -jest.spyOn(Math, 'random').mockReturnValue(0.123456789) +vi.spyOn(Math, 'random').mockReturnValue(0.123456789) export const generatorsRootPath = path.join( __dirname, diff --git a/packages/cli/src/plugin.js b/packages/cli/src/plugin.js index 6860507dd7e6..a3463e62e393 100644 --- a/packages/cli/src/plugin.js +++ b/packages/cli/src/plugin.js @@ -72,9 +72,9 @@ export async function loadPlugins(yargs) { } // Order alphabetically but with @redwoodjs namespace first, orders the help output - const namespaces = Array.from( - thirdPartyPackages.map((p) => p.split('/')[0]) - ).sort() + const namespaces = Array.from(thirdPartyPackages) + .map((p) => p.split('/')[0]) + .sort() if (redwoodPackages.size > 0) { namespaces.unshift('@redwoodjs') } diff --git a/packages/cli/src/testUtils/matchFolderTransform.ts b/packages/cli/src/testUtils/matchFolderTransform.ts index 178e6d721794..c17b14822db6 100644 --- a/packages/cli/src/testUtils/matchFolderTransform.ts +++ b/packages/cli/src/testUtils/matchFolderTransform.ts @@ -1,7 +1,9 @@ +import { createRequire } from 'node:module' import path from 'path' import fg from 'fast-glob' import fse from 'fs-extra' +import { expect } from 'vitest' import runTransform from '../testLib/runTransform' @@ -23,6 +25,8 @@ type MatchFolderTransformFunction = ( options?: Options ) => Promise +const require = createRequire(import.meta.url) + export const matchFolderTransform: MatchFolderTransformFunction = async ( transformFunctionOrName, fixtureName, @@ -75,7 +79,7 @@ export const matchFolderTransform: MatchFolderTransformFunction = async ( } const transformName = transformFunctionOrName const transformPath = require.resolve( - path.join(testPath, '../../', transformName) + path.join(testPath, '../../', transformName + '.ts') ) const targetPaths = fg.sync(targetPathsGlob, { diff --git a/packages/cli/src/testUtils/matchInlineTransformSnapshot.ts b/packages/cli/src/testUtils/matchInlineTransformSnapshot.ts index fc302e947a05..95638a2abb41 100644 --- a/packages/cli/src/testUtils/matchInlineTransformSnapshot.ts +++ b/packages/cli/src/testUtils/matchInlineTransformSnapshot.ts @@ -1,12 +1,16 @@ import fs from 'fs' +import { createRequire } from 'node:module' import path from 'path' import tempy from 'tempy' +import { expect } from 'vitest' import runTransform from '../testLib/runTransform' import { formatCode } from './index' +const require = createRequire(import.meta.url) + export const matchInlineTransformSnapshot = async ( transformName: string, fixtureCode: string, @@ -23,7 +27,7 @@ export const matchInlineTransformSnapshot = async ( } const transformPath = require.resolve( - path.join(testPath, '../../', transformName) + path.join(testPath, '../../', transformName + '.ts') ) // Step 1: Write passed in code to a temp file diff --git a/packages/cli/src/testUtils/matchTransformSnapshot.ts b/packages/cli/src/testUtils/matchTransformSnapshot.ts deleted file mode 100644 index a8384bea4411..000000000000 --- a/packages/cli/src/testUtils/matchTransformSnapshot.ts +++ /dev/null @@ -1,60 +0,0 @@ -import fs from 'fs' -import path from 'path' - -import tempy from 'tempy' - -import runTransform from '../testLib/runTransform' - -import { formatCode } from './index' - -export interface MatchTransformSnapshotFunction { - (transformName: string, fixtureName?: string, parser?: 'ts' | 'tsx'): void -} - -export const matchTransformSnapshot: MatchTransformSnapshotFunction = async ( - transformName, - fixtureName, - parser -) => { - const tempFilePath = tempy.file() - - // Looks up the path of the caller - const testPath = expect.getState().testPath - - if (!testPath) { - throw new Error('Could not find test path') - } - - // Use require.resolve, so we can pass in ts/js/tsx/jsx without specifying - const fixturePath = require.resolve( - path.join(testPath, '../../__testfixtures__', `${fixtureName}.input`) - ) - - const transformPath = require.resolve( - path.join(testPath, '../../', transformName) - ) - - // Step 1: Copy fixture to temp file - fs.copyFileSync(fixturePath, tempFilePath, fs.constants.COPYFILE_FICLONE) - - // Step 2: Run transform against temp file - await runTransform({ - transformPath, - targetPaths: [tempFilePath], - parser, - options: { - verbose: 1, - print: true, - }, - }) - - // Step 3: Read modified file and snapshot - const transformedContent = fs.readFileSync(tempFilePath, 'utf-8') - - const expectedOutput = fs.readFileSync( - fixturePath.replace('.input.', '.output.'), - 'utf-8' - ) - - expect(formatCode(transformedContent)).toEqual(formatCode(expectedOutput)) -} diff --git a/packages/cli/src/jest.codemods.setup.ts b/packages/cli/src/vitest.codemods.setup.ts similarity index 74% rename from packages/cli/src/jest.codemods.setup.ts rename to packages/cli/src/vitest.codemods.setup.ts index 9a77e6be7996..422caa5a830d 100644 --- a/packages/cli/src/jest.codemods.setup.ts +++ b/packages/cli/src/vitest.codemods.setup.ts @@ -1,19 +1,21 @@ -/* eslint-env node, jest */ - -import { formatCode } from './testUtils' +/* eslint-env node, vitest */ // Disable telemetry within framework tests process.env.REDWOOD_DISABLE_TELEMETRY = 1 -const fs = require('fs') -const path = require('path') +import fs from 'fs' +import path from 'path' + +import { expect } from 'vitest' + +import { formatCode } from './testUtils' -globalThis.matchTransformSnapshot = - require('./testUtils/matchTransformSnapshot').matchTransformSnapshot -globalThis.matchInlineTransformSnapshot = - require('./testUtils/matchInlineTransformSnapshot').matchInlineTransformSnapshot -globalThis.matchFolderTransform = - require('./testUtils/matchFolderTransform').matchFolderTransform +globalThis.matchInlineTransformSnapshot = ( + await import('./testUtils/matchInlineTransformSnapshot') +).matchInlineTransformSnapshot +globalThis.matchFolderTransform = ( + await import('./testUtils/matchFolderTransform') +).matchFolderTransform // Custom matcher for checking fixtures using paths // e.g. expect(transformedPath).toMatchFileContents(expectedPath) diff --git a/packages/cli/vitest.config.mts b/packages/cli/vitest.config.mts new file mode 100644 index 000000000000..13c4a28c923f --- /dev/null +++ b/packages/cli/vitest.config.mts @@ -0,0 +1,12 @@ +import { defineConfig } from 'vitest/config' + +export default defineConfig({ + test: { + testTimeout: 20_000, + sequence: { + hooks: 'list', + }, + logHeapUsage: true, + workspace: './vitest.workspaces.ts', + }, +}) diff --git a/packages/cli/vitest.setup.mts b/packages/cli/vitest.setup.mts new file mode 100644 index 000000000000..eaef4289011d --- /dev/null +++ b/packages/cli/vitest.setup.mts @@ -0,0 +1,6 @@ +import { beforeAll } from "vitest" + +// Disable telemetry within framework tests +beforeAll(() => { + process.env.REDWOOD_DISABLE_TELEMETRY = '1' +}) diff --git a/packages/cli/vitest.workspaces.ts b/packages/cli/vitest.workspaces.ts new file mode 100644 index 000000000000..26c9feb729d4 --- /dev/null +++ b/packages/cli/vitest.workspaces.ts @@ -0,0 +1,43 @@ +import { defineWorkspace, configDefaults } from 'vitest/config' + +export default defineWorkspace([ + { + extends: './vitest.config.mts', + test: { + name: 'root', + include: ['**/__tests__/**/*.[jt]s?(x)', '**/*.test.[jt]s?(x)'], + exclude: [ + ...configDefaults.exclude, + '__fixtures__', + '__testfixtures__', + '**/__codemod_tests__', + '__tests__/utils/*', + '**/__tests__/fixtures/**/*', + '.d.ts', + 'dist', + ], + alias: { + '^src/(.*)': '/src/$1', + }, + setupFiles: ['./vitest.setup.mts'], + }, + }, + { + extends: './vitest.config.mts', + test: { + name: 'setup codemods', + include: ['**/commands/setup/**/__codemod_tests__/*.ts'], + exclude: [ + ...configDefaults.exclude, + '__fixtures__', + '__testfixtures__', + '__tests__/utils/*', + '__tests__/fixtures/*', + '.d.ts', + 'dist', + ], + setupFiles: ['./src/vitest.codemods.setup.ts'], + pool: 'forks', + }, + }, +]) diff --git a/packages/context/.babelrc.js b/packages/context/.babelrc.js deleted file mode 100644 index 3b2c815712d9..000000000000 --- a/packages/context/.babelrc.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = { extends: '../../babel.config.js' } diff --git a/packages/context/build.mjs b/packages/context/build.mjs index 95bdb1e83cfb..75e861b63346 100644 --- a/packages/context/build.mjs +++ b/packages/context/build.mjs @@ -1,27 +1,3 @@ -import fs from 'node:fs' +import { build } from '../../buildDefaults.mjs' -import * as esbuild from 'esbuild' -import fg from 'fast-glob' - -// Get source files -const sourceFiles = fg.sync(['./src/**/*.ts'], { - ignore: ['**/*.test.ts'], -}) - -// Build general source files -const result = await esbuild.build({ - entryPoints: sourceFiles, - outdir: 'dist', - - format: 'cjs', - platform: 'node', - target: ['node18'], - - logLevel: 'info', - - // For visualizing dist. - // See https://esbuild.github.io/api/#metafile and https://esbuild.github.io/analyze/. - metafile: true, -}) - -fs.writeFileSync('meta.json', JSON.stringify(result.metafile, null, 2)) +await build() diff --git a/packages/context/package.json b/packages/context/package.json index 7eaba9167f9a..1ddb8e2d1f4d 100644 --- a/packages/context/package.json +++ b/packages/context/package.json @@ -19,15 +19,7 @@ "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx\" --ignore dist --exec \"yarn build\"", "prepublishOnly": "NODE_ENV=production yarn build" }, - "jest": { - "testPathIgnorePatterns": [ - "/dist/" - ] - }, "devDependencies": { - "esbuild": "0.19.9", - "fast-glob": "3.3.2", - "jest": "29.7.0", "typescript": "5.3.3" }, "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" diff --git a/packages/create-redwood-app/package.json b/packages/create-redwood-app/package.json index 52db9b47ecc9..951c59d2ed98 100644 --- a/packages/create-redwood-app/package.json +++ b/packages/create-redwood-app/package.json @@ -37,7 +37,6 @@ "check-node-version": "4.2.1", "ci-info": "4.0.0", "envinfo": "7.11.0", - "esbuild": "0.19.9", "execa": "5.1.1", "fs-extra": "11.2.0", "jest": "29.7.0", diff --git a/packages/create-redwood-app/scripts/build.js b/packages/create-redwood-app/scripts/build.js index 6694591a9998..6e69287e22fa 100644 --- a/packages/create-redwood-app/scripts/build.js +++ b/packages/create-redwood-app/scripts/build.js @@ -1,7 +1,4 @@ -/* eslint-env node */ - -import * as esbuild from 'esbuild' -import fs from 'fs-extra' +import { build, defaultBuildOptions } from '../../../buildDefaults.mjs' const jsBanner = `\ #!/usr/bin/env node @@ -11,24 +8,15 @@ const __filename = (await import("node:url")).fileURLToPath(import.meta.url); const __dirname = (await import("node:path")).dirname(__filename); ` -const result = await esbuild.build({ - entryPoints: ['src/create-redwood-app.js'], - outdir: 'dist', - - platform: 'node', - target: ['node20'], - format: 'esm', - bundle: true, - banner: { - js: jsBanner, +await build({ + buildOptions: { + ...defaultBuildOptions, + banner: { + js: jsBanner, + }, + bundle: true, + entryPoints: ['src/create-redwood-app.js'], + format: 'esm', + minify: true, }, - - minify: true, - - logLevel: 'info', - metafile: true, -}) - -await fs.writeJSON(new URL('./meta.json', import.meta.url), result.metafile, { - spaces: 2, }) diff --git a/packages/create-redwood-app/src/create-redwood-app.js b/packages/create-redwood-app/src/create-redwood-app.js index bd3d75d9596d..8e9e535a7634 100644 --- a/packages/create-redwood-app/src/create-redwood-app.js +++ b/packages/create-redwood-app/src/create-redwood-app.js @@ -38,6 +38,20 @@ const { telemetry } = Parser(hideBin(process.argv), { const tui = new RedwoodTUI() +function isYarnBerryOrNewer() { + const { npm_config_user_agent: npmConfigUserAgent } = process.env + + if (npmConfigUserAgent) { + const match = npmConfigUserAgent.match(/yarn\/(\d+)/) + + if (match && match[1]) { + return parseInt(match[1], 10) >= 2 + } + } + + return false +} + const USE_GITPOD_TEXT = [ ` As an alternative solution, you can launch a Redwood project using GitPod instead. GitPod is a an online IDE.`, ` See: ${terminalLink( @@ -669,11 +683,6 @@ async function createRedwoodApp() { type: 'string', describe: 'Commit message for the initial commit', }) - .option('yarn-install', { - default: null, - type: 'boolean', - describe: 'Install node modules. Skip via --no-yarn-install.', - }) .option('telemetry', { default: true, type: 'boolean', @@ -681,6 +690,17 @@ async function createRedwoodApp() { 'Enables sending telemetry events for this create command and all Redwood CLI commands https://telemetry.redwoodjs.com', }) + const _isYarnBerryOrNewer = isYarnBerryOrNewer() + + // Only add the yarn-install flag if the yarn version is >= 2 + if (_isYarnBerryOrNewer) { + cli.option('yarn-install', { + default: null, + type: 'boolean', + describe: 'Install node modules. Skip via --no-yarn-install.', + }) + } + const parsedFlags = cli.parse() tui.drawText( @@ -696,7 +716,9 @@ async function createRedwoodApp() { // Extract the args as provided by the user in the command line // TODO: Make all flags have the 'flag' suffix const args = parsedFlags._ - const yarnInstallFlag = parsedFlags['yarn-install'] ?? parsedFlags.yes + const yarnInstallFlag = + parsedFlags['yarn-install'] ?? + (_isYarnBerryOrNewer ? parsedFlags.yes : null) const typescriptFlag = parsedFlags.typescript ?? parsedFlags.yes const overwrite = parsedFlags.overwrite const gitInitFlag = parsedFlags['git-init'] ?? parsedFlags.yes @@ -734,7 +756,11 @@ async function createRedwoodApp() { commitMessage = await handleCommitMessagePreference(commitMessageFlag) } - const yarnInstall = await handleYarnInstallPreference(yarnInstallFlag) + let yarnInstall = false + + if (_isYarnBerryOrNewer) { + yarnInstall = await handleYarnInstallPreference(yarnInstallFlag) + } let newAppDir = path.resolve(process.cwd(), targetDir) @@ -750,7 +776,9 @@ async function createRedwoodApp() { .getActiveSpan() ?.setAttribute('yarn-install-time', Date.now() - yarnInstallStart) } else { - tui.drawText(`${RedwoodStyling.info('ℹ')} Skipped yarn install step`) + if (_isYarnBerryOrNewer) { + tui.drawText(`${RedwoodStyling.info('ℹ')} Skipped yarn install step`) + } } // Generate types diff --git a/packages/create-redwood-app/tests/e2e.test.ts b/packages/create-redwood-app/tests/e2e.test.ts index b0343c02446e..875d4f3ec834 100644 --- a/packages/create-redwood-app/tests/e2e.test.ts +++ b/packages/create-redwood-app/tests/e2e.test.ts @@ -28,12 +28,12 @@ describe('create-redwood-app', () => { --git-init, --git Initialize a git repository [boolean] [default: null] -m, --commit-message Commit message for the initial commit [string] [default: null] - --yarn-install Install node modules. Skip via --no-yarn-install. - [boolean] [default: null] --telemetry Enables sending telemetry events for this create command and all Redwood CLI commands https://telemetry.redwoodjs.com [boolean] [default: true] + --yarn-install Install node modules. Skip via --no-yarn-install. + [boolean] [default: null] Examples: create-redwood-app my-redwood-app diff --git a/packages/eslint-plugin/build.mjs b/packages/eslint-plugin/build.mjs index 8fac87acabc5..75e861b63346 100644 --- a/packages/eslint-plugin/build.mjs +++ b/packages/eslint-plugin/build.mjs @@ -1,23 +1,3 @@ -import fs from 'node:fs' +import { build } from '../../buildDefaults.mjs' -import * as esbuild from 'esbuild' -import fg from 'fast-glob' - -const sourceFiles = fg.sync(['./src/**/*.ts'], { ignore: ['./src/__tests__'] }) - -const result = await esbuild.build({ - entryPoints: sourceFiles, - outdir: 'dist', - - format: 'cjs', - platform: 'node', - target: ['node20'], - - logLevel: 'info', - - // For visualizing dist. - // See https://esbuild.github.io/api/#metafile and https://esbuild.github.io/analyze/. - metafile: true, -}) - -fs.writeFileSync('meta.json', JSON.stringify(result.metafile, null, 2)) +await build() diff --git a/packages/eslint-plugin/package.json b/packages/eslint-plugin/package.json index 9e1d5075d155..1f65a702e6fd 100644 --- a/packages/eslint-plugin/package.json +++ b/packages/eslint-plugin/package.json @@ -29,8 +29,6 @@ "@types/eslint": "8", "@types/estree": "1.0.5", "@typescript-eslint/parser": "5.62.0", - "esbuild": "0.19.9", - "fast-glob": "3.3.2", "glob": "10.3.10", "tsx": "4.6.2", "typescript": "5.3.3" diff --git a/packages/fastify/build.mjs b/packages/fastify/build.mjs index 74a3ecf2f5e0..75e861b63346 100644 --- a/packages/fastify/build.mjs +++ b/packages/fastify/build.mjs @@ -1,19 +1,3 @@ -import * as esbuild from 'esbuild' +import { build } from '../../buildDefaults.mjs' -await esbuild.build({ - entryPoints: [ - 'src/api.ts', - 'src/config.ts', - 'src/index.ts', - 'src/types.ts', - 'src/web.ts', - 'src/lambda/index.ts', - ], - outdir: 'dist', - - format: 'cjs', - platform: 'node', - target: ['node20'], - - logLevel: 'info', -}) +await build() diff --git a/packages/fastify/package.json b/packages/fastify/package.json index 0eb6d1619598..d180343d8405 100644 --- a/packages/fastify/package.json +++ b/packages/fastify/package.json @@ -35,7 +35,6 @@ "@types/aws-lambda": "8.10.126", "@types/lodash": "4.14.201", "@types/qs": "6.9.11", - "esbuild": "0.19.9", "typescript": "5.3.3" }, "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" diff --git a/packages/graphql-server/src/functions/__tests__/normalizeRequest.test.ts b/packages/graphql-server/src/functions/__tests__/normalizeRequest.test.ts deleted file mode 100644 index 3f9590dbae46..000000000000 --- a/packages/graphql-server/src/functions/__tests__/normalizeRequest.test.ts +++ /dev/null @@ -1,99 +0,0 @@ -import { Headers } from '@whatwg-node/fetch' -import type { APIGatewayProxyEvent } from 'aws-lambda' - -import { normalizeRequest } from '@redwoodjs/api' - -export const createMockedEvent = ( - httpMethod = 'POST', - body: any = undefined, - isBase64Encoded = false -): APIGatewayProxyEvent => { - return { - body, - headers: {}, - multiValueHeaders: {}, - httpMethod, - isBase64Encoded, - path: '/MOCK_PATH', - pathParameters: null, - queryStringParameters: null, - multiValueQueryStringParameters: null, - stageVariables: null, - requestContext: { - accountId: 'MOCKED_ACCOUNT', - apiId: 'MOCKED_API_ID', - authorizer: { name: 'MOCKED_AUTHORIZER' }, - protocol: 'HTTP', - identity: { - accessKey: null, - accountId: null, - apiKey: null, - apiKeyId: null, - caller: null, - clientCert: null, - cognitoAuthenticationProvider: null, - cognitoAuthenticationType: null, - cognitoIdentityId: null, - cognitoIdentityPoolId: null, - principalOrgId: null, - sourceIp: '123.123.123.123', - user: null, - userAgent: null, - userArn: null, - }, - httpMethod: 'POST', - path: '/MOCK_PATH', - stage: 'MOCK_STAGE', - requestId: 'MOCKED_REQUEST_ID', - requestTimeEpoch: 1, - resourceId: 'MOCKED_RESOURCE_ID', - resourcePath: 'MOCKED_RESOURCE_PATH', - }, - resource: 'MOCKED_RESOURCE', - } -} - -test('Normalizes an aws event with base64', () => { - const corsEventB64 = createMockedEvent( - 'POST', - Buffer.from(JSON.stringify({ bazinga: 'hello_world' }), 'utf8').toString( - 'base64' - ), - true - ) - - const normalizedRequest = normalizeRequest(corsEventB64) - const expectedRequest = { - headers: new Headers(corsEventB64.headers), - method: 'POST', - query: null, - body: { - bazinga: 'hello_world', - }, - } - - expect(normalizedRequest.method).toEqual(expectedRequest.method) - expect(normalizedRequest.query).toEqual(expectedRequest.query) - expect(normalizedRequest.body).toEqual(expectedRequest.body) - expectedRequest.headers.forEach((value, key) => { - expect(normalizedRequest.headers.get(key)).toEqual(value) - }) -}) - -test('Handles CORS requests with and without b64 encoded', () => { - const corsEventB64 = createMockedEvent('OPTIONS', undefined, true) - - const normalizedRequest = normalizeRequest(corsEventB64) - const expectedRequest = { - headers: new Headers(corsEventB64.headers), - method: 'OPTIONS', - query: null, - body: undefined, - } - expect(normalizedRequest.method).toEqual(expectedRequest.method) - expect(normalizedRequest.query).toEqual(expectedRequest.query) - expect(normalizedRequest.body).toEqual(expectedRequest.body) - expectedRequest.headers.forEach((value, key) => { - expect(normalizedRequest.headers.get(key)).toEqual(value) - }) -}) diff --git a/packages/mailer/core/.babelrc.js b/packages/mailer/core/.babelrc.js deleted file mode 100644 index cdc48920e1c2..000000000000 --- a/packages/mailer/core/.babelrc.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = { extends: '../../../babel.config.js' } diff --git a/packages/mailer/core/build.mjs b/packages/mailer/core/build.mjs index dd87290560db..14b2d70d4a73 100644 --- a/packages/mailer/core/build.mjs +++ b/packages/mailer/core/build.mjs @@ -1,27 +1,3 @@ -import fs from 'node:fs' +import { build } from '../../../buildDefaults.mjs' -import * as esbuild from 'esbuild' -import fg from 'fast-glob' - -// Get source files -const sourceFiles = fg.sync(['./src/**/*.ts'], { - ignore: ['**/*.test.ts'], -}) - -// Build general source files -const result = await esbuild.build({ - entryPoints: sourceFiles, - outdir: 'dist', - - format: 'cjs', - platform: 'node', - target: ['node20'], - - logLevel: 'info', - - // For visualizing dist. - // See https://esbuild.github.io/api/#metafile and https://esbuild.github.io/analyze/. - metafile: true, -}) - -fs.writeFileSync('meta.json', JSON.stringify(result.metafile, null, 2)) +await build() diff --git a/packages/mailer/core/package.json b/packages/mailer/core/package.json index 8d14ba239da4..58a9a4975b15 100644 --- a/packages/mailer/core/package.json +++ b/packages/mailer/core/package.json @@ -21,15 +21,8 @@ "test": "vitest run src", "test:watch": "vitest watch src" }, - "jest": { - "testPathIgnorePatterns": [ - "/dist/" - ] - }, "devDependencies": { "@redwoodjs/api": "6.0.7", - "esbuild": "0.19.9", - "fast-glob": "3.3.2", "typescript": "5.3.3", "vitest": "1.2.1" }, diff --git a/packages/mailer/handlers/in-memory/build.mjs b/packages/mailer/handlers/in-memory/build.mjs index f173e7ab9024..2a6302021b2a 100644 --- a/packages/mailer/handlers/in-memory/build.mjs +++ b/packages/mailer/handlers/in-memory/build.mjs @@ -1,25 +1,3 @@ -import fs from 'node:fs' +import { build } from '../../../../buildDefaults.mjs' -import * as esbuild from 'esbuild' -import fg from 'fast-glob' - -// Get source files -const sourceFiles = fg.sync(['./src/**/*.ts']) - -// Build general source files -const result = await esbuild.build({ - entryPoints: sourceFiles, - outdir: 'dist', - - format: 'cjs', - platform: 'node', - target: ['node20'], - - logLevel: 'info', - - // For visualizing dist. - // See https://esbuild.github.io/api/#metafile and https://esbuild.github.io/analyze/. - metafile: true, -}) - -fs.writeFileSync('meta.json', JSON.stringify(result.metafile, null, 2)) +await build() diff --git a/packages/mailer/handlers/in-memory/package.json b/packages/mailer/handlers/in-memory/package.json index a43f2f6e16c4..06494f4c6585 100644 --- a/packages/mailer/handlers/in-memory/package.json +++ b/packages/mailer/handlers/in-memory/package.json @@ -19,17 +19,10 @@ "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx\" --ignore dist --exec \"yarn build\"", "prepublishOnly": "NODE_ENV=production yarn build" }, - "jest": { - "testPathIgnorePatterns": [ - "/dist/" - ] - }, "dependencies": { "@redwoodjs/mailer-core": "6.0.7" }, "devDependencies": { - "esbuild": "0.19.9", - "fast-glob": "3.3.2", "typescript": "5.3.3" }, "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" diff --git a/packages/mailer/handlers/nodemailer/build.mjs b/packages/mailer/handlers/nodemailer/build.mjs index f173e7ab9024..2a6302021b2a 100644 --- a/packages/mailer/handlers/nodemailer/build.mjs +++ b/packages/mailer/handlers/nodemailer/build.mjs @@ -1,25 +1,3 @@ -import fs from 'node:fs' +import { build } from '../../../../buildDefaults.mjs' -import * as esbuild from 'esbuild' -import fg from 'fast-glob' - -// Get source files -const sourceFiles = fg.sync(['./src/**/*.ts']) - -// Build general source files -const result = await esbuild.build({ - entryPoints: sourceFiles, - outdir: 'dist', - - format: 'cjs', - platform: 'node', - target: ['node20'], - - logLevel: 'info', - - // For visualizing dist. - // See https://esbuild.github.io/api/#metafile and https://esbuild.github.io/analyze/. - metafile: true, -}) - -fs.writeFileSync('meta.json', JSON.stringify(result.metafile, null, 2)) +await build() diff --git a/packages/mailer/handlers/nodemailer/package.json b/packages/mailer/handlers/nodemailer/package.json index fb8e6ef0fd04..935881edb622 100644 --- a/packages/mailer/handlers/nodemailer/package.json +++ b/packages/mailer/handlers/nodemailer/package.json @@ -19,19 +19,12 @@ "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx\" --ignore dist --exec \"yarn build\"", "prepublishOnly": "NODE_ENV=production yarn build" }, - "jest": { - "testPathIgnorePatterns": [ - "/dist/" - ] - }, "dependencies": { "@redwoodjs/mailer-core": "6.0.7", "nodemailer": "6.9.7" }, "devDependencies": { "@types/nodemailer": "^6", - "esbuild": "0.19.9", - "fast-glob": "3.3.2", "typescript": "5.3.3" }, "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" diff --git a/packages/mailer/handlers/resend/build.mjs b/packages/mailer/handlers/resend/build.mjs index f173e7ab9024..2a6302021b2a 100644 --- a/packages/mailer/handlers/resend/build.mjs +++ b/packages/mailer/handlers/resend/build.mjs @@ -1,25 +1,3 @@ -import fs from 'node:fs' +import { build } from '../../../../buildDefaults.mjs' -import * as esbuild from 'esbuild' -import fg from 'fast-glob' - -// Get source files -const sourceFiles = fg.sync(['./src/**/*.ts']) - -// Build general source files -const result = await esbuild.build({ - entryPoints: sourceFiles, - outdir: 'dist', - - format: 'cjs', - platform: 'node', - target: ['node20'], - - logLevel: 'info', - - // For visualizing dist. - // See https://esbuild.github.io/api/#metafile and https://esbuild.github.io/analyze/. - metafile: true, -}) - -fs.writeFileSync('meta.json', JSON.stringify(result.metafile, null, 2)) +await build() diff --git a/packages/mailer/handlers/resend/package.json b/packages/mailer/handlers/resend/package.json index 7102aa3c8fc9..d77df3465cf6 100644 --- a/packages/mailer/handlers/resend/package.json +++ b/packages/mailer/handlers/resend/package.json @@ -19,18 +19,11 @@ "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx\" --ignore dist --exec \"yarn build\"", "prepublishOnly": "NODE_ENV=production yarn build" }, - "jest": { - "testPathIgnorePatterns": [ - "/dist/" - ] - }, "dependencies": { "@redwoodjs/mailer-core": "6.0.7", "resend": "1.1.0" }, "devDependencies": { - "esbuild": "0.19.9", - "fast-glob": "3.3.2", "typescript": "5.3.3" }, "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" diff --git a/packages/mailer/handlers/studio/build.mjs b/packages/mailer/handlers/studio/build.mjs index f173e7ab9024..2a6302021b2a 100644 --- a/packages/mailer/handlers/studio/build.mjs +++ b/packages/mailer/handlers/studio/build.mjs @@ -1,25 +1,3 @@ -import fs from 'node:fs' +import { build } from '../../../../buildDefaults.mjs' -import * as esbuild from 'esbuild' -import fg from 'fast-glob' - -// Get source files -const sourceFiles = fg.sync(['./src/**/*.ts']) - -// Build general source files -const result = await esbuild.build({ - entryPoints: sourceFiles, - outdir: 'dist', - - format: 'cjs', - platform: 'node', - target: ['node20'], - - logLevel: 'info', - - // For visualizing dist. - // See https://esbuild.github.io/api/#metafile and https://esbuild.github.io/analyze/. - metafile: true, -}) - -fs.writeFileSync('meta.json', JSON.stringify(result.metafile, null, 2)) +await build() diff --git a/packages/mailer/handlers/studio/package.json b/packages/mailer/handlers/studio/package.json index 5054b248e467..149e32d024d1 100644 --- a/packages/mailer/handlers/studio/package.json +++ b/packages/mailer/handlers/studio/package.json @@ -19,19 +19,12 @@ "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx\" --ignore dist --exec \"yarn build\"", "prepublishOnly": "NODE_ENV=production yarn build" }, - "jest": { - "testPathIgnorePatterns": [ - "/dist/" - ] - }, "dependencies": { "@redwoodjs/mailer-core": "6.0.7", "@redwoodjs/mailer-handler-nodemailer": "6.0.7" }, "devDependencies": { "@types/nodemailer": "^6", - "esbuild": "0.19.9", - "fast-glob": "3.3.2", "typescript": "5.3.3" }, "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" diff --git a/packages/mailer/renderers/mjml-react/build.mjs b/packages/mailer/renderers/mjml-react/build.mjs index f173e7ab9024..2a6302021b2a 100644 --- a/packages/mailer/renderers/mjml-react/build.mjs +++ b/packages/mailer/renderers/mjml-react/build.mjs @@ -1,25 +1,3 @@ -import fs from 'node:fs' +import { build } from '../../../../buildDefaults.mjs' -import * as esbuild from 'esbuild' -import fg from 'fast-glob' - -// Get source files -const sourceFiles = fg.sync(['./src/**/*.ts']) - -// Build general source files -const result = await esbuild.build({ - entryPoints: sourceFiles, - outdir: 'dist', - - format: 'cjs', - platform: 'node', - target: ['node20'], - - logLevel: 'info', - - // For visualizing dist. - // See https://esbuild.github.io/api/#metafile and https://esbuild.github.io/analyze/. - metafile: true, -}) - -fs.writeFileSync('meta.json', JSON.stringify(result.metafile, null, 2)) +await build() diff --git a/packages/mailer/renderers/mjml-react/package.json b/packages/mailer/renderers/mjml-react/package.json index 00c28ffd4f87..278f48e088f7 100644 --- a/packages/mailer/renderers/mjml-react/package.json +++ b/packages/mailer/renderers/mjml-react/package.json @@ -19,11 +19,6 @@ "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx\" --ignore dist --exec \"yarn build\"", "prepublishOnly": "NODE_ENV=production yarn build" }, - "jest": { - "testPathIgnorePatterns": [ - "/dist/" - ] - }, "dependencies": { "@faire/mjml-react": "3.3.0", "@redwoodjs/mailer-core": "6.0.7", @@ -31,8 +26,6 @@ }, "devDependencies": { "@types/mjml": "4", - "esbuild": "0.19.9", - "fast-glob": "3.3.2", "typescript": "5.3.3" }, "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" diff --git a/packages/mailer/renderers/react-email/build.mjs b/packages/mailer/renderers/react-email/build.mjs index f173e7ab9024..2a6302021b2a 100644 --- a/packages/mailer/renderers/react-email/build.mjs +++ b/packages/mailer/renderers/react-email/build.mjs @@ -1,25 +1,3 @@ -import fs from 'node:fs' +import { build } from '../../../../buildDefaults.mjs' -import * as esbuild from 'esbuild' -import fg from 'fast-glob' - -// Get source files -const sourceFiles = fg.sync(['./src/**/*.ts']) - -// Build general source files -const result = await esbuild.build({ - entryPoints: sourceFiles, - outdir: 'dist', - - format: 'cjs', - platform: 'node', - target: ['node20'], - - logLevel: 'info', - - // For visualizing dist. - // See https://esbuild.github.io/api/#metafile and https://esbuild.github.io/analyze/. - metafile: true, -}) - -fs.writeFileSync('meta.json', JSON.stringify(result.metafile, null, 2)) +await build() diff --git a/packages/mailer/renderers/react-email/package.json b/packages/mailer/renderers/react-email/package.json index a5a07b4f68cd..252724127dee 100644 --- a/packages/mailer/renderers/react-email/package.json +++ b/packages/mailer/renderers/react-email/package.json @@ -19,18 +19,11 @@ "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx\" --ignore dist --exec \"yarn build\"", "prepublishOnly": "NODE_ENV=production yarn build" }, - "jest": { - "testPathIgnorePatterns": [ - "/dist/" - ] - }, "dependencies": { "@react-email/render": "0.0.10", "@redwoodjs/mailer-core": "6.0.7" }, "devDependencies": { - "esbuild": "0.19.9", - "fast-glob": "3.3.2", "typescript": "5.3.3" }, "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" diff --git a/packages/project-config/build.js b/packages/project-config/build.js index 4560f1d78590..6db5ba7e0e40 100644 --- a/packages/project-config/build.js +++ b/packages/project-config/build.js @@ -1,28 +1,25 @@ -/* eslint-disable import/no-extraneous-dependencies */ - -import * as esbuild from 'esbuild' +import { build, defaultBuildOptions } from '../../buildDefaults.mjs' const options = { - entryPoints: ['./src/index.ts'], - outdir: 'dist', - - platform: 'node', - target: ['node20'], + ...defaultBuildOptions, bundle: true, + entryPoints: ['./src/index.ts'], packages: 'external', - - logLevel: 'info', - metafile: true, } -await esbuild.build({ - ...options, - format: 'esm', - outExtension: { '.js': '.mjs' }, +// ESM build. +await build({ + buildOptions: { + ...options, + format: 'esm', + outExtension: { '.js': '.mjs' }, + }, }) -await esbuild.build({ - ...options, - format: 'cjs', - outExtension: { '.js': '.cjs' }, +// CJS build. +await build({ + buildOptions: { + ...options, + outExtension: { '.js': '.cjs' }, + }, }) diff --git a/packages/project-config/package.json b/packages/project-config/package.json index 2c1e5f34767c..e6d3d8748126 100644 --- a/packages/project-config/package.json +++ b/packages/project-config/package.json @@ -33,7 +33,6 @@ "string-env-interpolation": "1.0.1" }, "devDependencies": { - "esbuild": "0.19.9", "rimraf": "5.0.5", "typescript": "5.3.3", "vitest": "1.2.1" diff --git a/packages/project-config/tsconfig.json b/packages/project-config/tsconfig.json index 78a31c197ddd..23e3bf19df5b 100644 --- a/packages/project-config/tsconfig.json +++ b/packages/project-config/tsconfig.json @@ -7,5 +7,5 @@ "rootDir": "src", "outDir": "dist", }, - "include": ["src/**/*"], + "include": ["src"], } diff --git a/packages/realtime/build.mjs b/packages/realtime/build.mjs index 2cec19a1a453..ff512ad9698a 100644 --- a/packages/realtime/build.mjs +++ b/packages/realtime/build.mjs @@ -1,22 +1,10 @@ -import fs from 'node:fs' - -import * as esbuild from 'esbuild' - -const result = await esbuild.build({ - entryPoints: ['src/index.ts'], - outdir: 'dist', - - bundle: true, - - platform: 'node', - target: ['node20'], - packages: 'external', - - logLevel: 'info', - - // For visualizing the bundle. - // See https://esbuild.github.io/api/#metafile and https://esbuild.github.io/analyze/. - metafile: true, +import { build, defaultBuildOptions } from '../../buildDefaults.mjs' + +await build({ + buildOptions: { + ...defaultBuildOptions, + bundle: true, + entryPoints: ['src/index.ts'], + packages: 'external', + }, }) - -fs.writeFileSync('meta.json', JSON.stringify(result.metafile)) diff --git a/packages/realtime/package.json b/packages/realtime/package.json index faad07152bd7..71727572f266 100644 --- a/packages/realtime/package.json +++ b/packages/realtime/package.json @@ -40,7 +40,6 @@ "@envelop/core": "5.0.0", "@envelop/testing": "6.0.3", "@envelop/types": "4.0.1", - "esbuild": "0.19.9", "jest": "29.7.0", "nodemon": "3.0.2", "typescript": "5.3.3" diff --git a/packages/record/package.json b/packages/record/package.json index ff135d97ff9d..69efd8aaaaed 100644 --- a/packages/record/package.json +++ b/packages/record/package.json @@ -22,11 +22,6 @@ "test": "vitest run src", "test:watch": "vitest watch src" }, - "jest": { - "testPathIgnorePatterns": [ - "/dist/" - ] - }, "dependencies": { "@babel/runtime-corejs3": "7.23.6", "@prisma/client": "5.7.0", diff --git a/packages/structure/package.json b/packages/structure/package.json index 71a0fc937236..6d9b2d23c1bd 100644 --- a/packages/structure/package.json +++ b/packages/structure/package.json @@ -21,14 +21,8 @@ "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx\" --ignore dist --exec \"yarn build\"", "prepublishOnly": "NODE_ENV=production yarn build", "prettier": "prettier --write './src/**/*.{ts,tsx}'", - "test": "jest src", - "test:watch": "yarn test --watch" - }, - "jest": { - "testPathIgnorePatterns": [ - "/fixtures/", - "/dist/" - ] + "test": "vitest run", + "test:watch": "vitest watch" }, "dependencies": { "@babel/runtime-corejs3": "7.23.6", @@ -63,8 +57,8 @@ "@types/lru-cache": "7.10.10", "@types/node": "20.10.4", "@types/vscode": "1.79.1", - "jest": "29.7.0", - "typescript": "5.3.3" + "typescript": "5.3.3", + "vitest": "1.2.1" }, "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" } diff --git a/packages/structure/src/model/__tests__/model.test.ts b/packages/structure/src/model/__tests__/model.test.ts index a59848dbbdcc..47b7bef47220 100644 --- a/packages/structure/src/model/__tests__/model.test.ts +++ b/packages/structure/src/model/__tests__/model.test.ts @@ -1,5 +1,7 @@ import { basename, resolve } from 'path' +import { describe, it, expect } from 'vitest' + import { DefaultHost } from '../../hosts' import { URL_file } from '../../x/URL' import { RWProject } from '../RWProject' diff --git a/packages/structure/src/model/util/__tests__/advanced_path_parser.test.ts b/packages/structure/src/model/util/__tests__/advanced_path_parser.test.ts index c7e3557c5b33..00f5760e6f15 100644 --- a/packages/structure/src/model/util/__tests__/advanced_path_parser.test.ts +++ b/packages/structure/src/model/util/__tests__/advanced_path_parser.test.ts @@ -1,4 +1,7 @@ +import { describe, test } from 'vitest' + import { advanced_path_parser } from '../advanced_path_parser' + describe('advanced_path_parser', () => { test('it works', () => { const route = '/foo/{param1}/bar/{baz:Int}/x' diff --git a/packages/structure/src/model/util/__tests__/process_env_diagnostics.test.ts b/packages/structure/src/model/util/__tests__/process_env_diagnostics.test.ts index 4000b7fcc0e9..4c0f7bb069da 100644 --- a/packages/structure/src/model/util/__tests__/process_env_diagnostics.test.ts +++ b/packages/structure/src/model/util/__tests__/process_env_diagnostics.test.ts @@ -1,5 +1,7 @@ import { resolve, join } from 'path' +import { describe, test, expect } from 'vitest' + import { process_env_findInFile, process_env_findAll } from '../process_env' describe('process_env_findInFile', () => { diff --git a/packages/structure/src/outline/__tests__/outline.test.ts b/packages/structure/src/outline/__tests__/outline.test.ts index 9fd60e83c2b3..4b37c63960c6 100644 --- a/packages/structure/src/outline/__tests__/outline.test.ts +++ b/packages/structure/src/outline/__tests__/outline.test.ts @@ -1,5 +1,7 @@ import { resolve } from 'path' +import { describe, it } from 'vitest' + import { DefaultHost } from '../../hosts' import { RWProject } from '../../model' import { getOutline } from '../outline' diff --git a/packages/structure/src/x/__tests__/URL.test.ts b/packages/structure/src/x/__tests__/URL.test.ts index e3ed431060b1..79eb8c90efcd 100644 --- a/packages/structure/src/x/__tests__/URL.test.ts +++ b/packages/structure/src/x/__tests__/URL.test.ts @@ -1,5 +1,7 @@ import { sep } from 'path' +import { describe, it, expect } from 'vitest' + import { URL_file, URL_toFile } from '../URL' describe('URL_fromFile', () => { diff --git a/packages/structure/src/x/__tests__/prisma.test.ts b/packages/structure/src/x/__tests__/prisma.test.ts index 7fa27b97de39..68818eb7366d 100644 --- a/packages/structure/src/x/__tests__/prisma.test.ts +++ b/packages/structure/src/x/__tests__/prisma.test.ts @@ -1,3 +1,4 @@ +import { describe, it, expect } from 'vitest' import { Range } from 'vscode-languageserver' import { prisma_parseEnvExpressions } from '../prisma' diff --git a/packages/structure/src/x/__tests__/vscode-languageserver-types-x.test.ts b/packages/structure/src/x/__tests__/vscode-languageserver-types-x.test.ts index 16ae77296f78..b5b795465d9a 100644 --- a/packages/structure/src/x/__tests__/vscode-languageserver-types-x.test.ts +++ b/packages/structure/src/x/__tests__/vscode-languageserver-types-x.test.ts @@ -1,3 +1,4 @@ +import { describe, it, expect, test } from 'vitest' import { DiagnosticSeverity, Position, diff --git a/packages/telemetry/package.json b/packages/telemetry/package.json index 6cb322e60c79..a834e77395a5 100644 --- a/packages/telemetry/package.json +++ b/packages/telemetry/package.json @@ -21,11 +21,6 @@ "test": "vitest run src", "test:watch": "vitest watch src" }, - "jest": { - "testPathIgnorePatterns": [ - "/dist/" - ] - }, "dependencies": { "@babel/runtime-corejs3": "7.23.6", "@redwoodjs/project-config": "6.0.7", diff --git a/packages/tui/build.mjs b/packages/tui/build.mjs index f572f24d1308..75e861b63346 100644 --- a/packages/tui/build.mjs +++ b/packages/tui/build.mjs @@ -1,19 +1,3 @@ -import fs from 'node:fs' +import { build } from '../../buildDefaults.mjs' -import * as esbuild from 'esbuild' - -// Since this is a library, there's no bundling going on here by design. -// Instead we plan for this library to be bundled by leaf packages so-to-speak like create-redwood-app. -const result = await esbuild.build({ - entryPoints: ['src/index.ts'], - format: 'cjs', - platform: 'node', - target: ['node20'], - outfile: 'dist/index.js', - - // For visualizing the bundle. - // See https://esbuild.github.io/api/#metafile and https://esbuild.github.io/analyze/. - metafile: true, -}) - -fs.writeFileSync('meta.json', JSON.stringify(result.metafile)) +await build() diff --git a/packages/tui/package.json b/packages/tui/package.json index f5be200fe3a1..9c47deab1214 100644 --- a/packages/tui/package.json +++ b/packages/tui/package.json @@ -19,11 +19,6 @@ "build:watch": "nodemon --watch src --ext \"js,jsx,ts,tsx\" --ignore dist --exec \"yarn build\"", "prepublishOnly": "NODE_ENV=production yarn build" }, - "jest": { - "testPathIgnorePatterns": [ - "/dist/" - ] - }, "dependencies": { "boxen": "5.1.2", "chalk": "4.1.2", @@ -31,7 +26,6 @@ "stdout-update": "1.6.8" }, "devDependencies": { - "esbuild": "0.19.9", "typescript": "5.3.3" }, "gitHead": "3905ed045508b861b495f8d5630d76c7a157d8f1" diff --git a/packages/web-server/.babelrc.js b/packages/web-server/.babelrc.js deleted file mode 100644 index 3b2c815712d9..000000000000 --- a/packages/web-server/.babelrc.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = { extends: '../../babel.config.js' } diff --git a/packages/web-server/build.mjs b/packages/web-server/build.mjs index 8fac87acabc5..75e861b63346 100644 --- a/packages/web-server/build.mjs +++ b/packages/web-server/build.mjs @@ -1,23 +1,3 @@ -import fs from 'node:fs' +import { build } from '../../buildDefaults.mjs' -import * as esbuild from 'esbuild' -import fg from 'fast-glob' - -const sourceFiles = fg.sync(['./src/**/*.ts'], { ignore: ['./src/__tests__'] }) - -const result = await esbuild.build({ - entryPoints: sourceFiles, - outdir: 'dist', - - format: 'cjs', - platform: 'node', - target: ['node20'], - - logLevel: 'info', - - // For visualizing dist. - // See https://esbuild.github.io/api/#metafile and https://esbuild.github.io/analyze/. - metafile: true, -}) - -fs.writeFileSync('meta.json', JSON.stringify(result.metafile, null, 2)) +await build() diff --git a/packages/web/src/components/ServerInject.tsx b/packages/web/src/components/ServerInject.tsx index ae5e5b214c71..225614db6dbf 100644 --- a/packages/web/src/components/ServerInject.tsx +++ b/packages/web/src/components/ServerInject.tsx @@ -70,14 +70,3 @@ export function useServerInsertedHTML(callback: () => React.ReactNode): void { addInsertedServerHTMLCallback(callback) } } - -// @TODO use this in streamHelpers final block -export const AppendToHead = ({ tagsToAppend }: { tagsToAppend: string }) => { - return ( -