diff --git a/.eslintrc.js b/.eslintrc.js index 692ec549f898d..59dae5ce57ff3 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -292,6 +292,7 @@ module.exports = { ], }, ], + 'no-else-return': 'warn' }, overrides: [ { diff --git a/cypress/e2e/surveys.cy.ts b/cypress/e2e/surveys.cy.ts index fd3a60ca6bc1a..55a87ddbffe60 100644 --- a/cypress/e2e/surveys.cy.ts +++ b/cypress/e2e/surveys.cy.ts @@ -281,4 +281,23 @@ describe('Surveys', () => { .contains('DRAFT') .should('exist') }) + + it.only('can set responses limit', () => { + cy.get('h1').should('contain', 'Surveys') + cy.get('[data-attr=new-survey]').click() + cy.get('[data-attr=new-blank-survey]').click() + + cy.get('[data-attr=survey-name]').focus().type(name) + + // Set responses limit + cy.get('.LemonCollapsePanel').contains('Completion conditions').click() + cy.get('[data-attr=survey-responses-limit-input]').focus().type('228').click() + + // Save the survey + cy.get('[data-attr=save-survey]').first().click() + cy.get('button[data-attr="launch-survey"]').should('have.text', 'Launch') + + cy.reload() + cy.contains('The survey will be stopped once 228 responses are received.').should('be.visible') + }) }) diff --git a/frontend/__snapshots__/scenes-app-dashboards--erroring--dark.png b/frontend/__snapshots__/scenes-app-dashboards--erroring--dark.png new file mode 100644 index 0000000000000..f1a9183eb6846 Binary files /dev/null and b/frontend/__snapshots__/scenes-app-dashboards--erroring--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-dashboards--erroring--light.png b/frontend/__snapshots__/scenes-app-dashboards--erroring--light.png new file mode 100644 index 0000000000000..6575373d99bca Binary files /dev/null and b/frontend/__snapshots__/scenes-app-dashboards--erroring--light.png differ diff --git a/frontend/__snapshots__/scenes-app-dashboards--not-found--dark.png b/frontend/__snapshots__/scenes-app-dashboards--not-found--dark.png new file mode 100644 index 0000000000000..5c446ee87eee6 Binary files /dev/null and b/frontend/__snapshots__/scenes-app-dashboards--not-found--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-dashboards--not-found--light.png b/frontend/__snapshots__/scenes-app-dashboards--not-found--light.png new file mode 100644 index 0000000000000..62d735187a94a Binary files /dev/null and b/frontend/__snapshots__/scenes-app-dashboards--not-found--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights-error-empty-states--estimated-query-execution-time-too-long--dark.png b/frontend/__snapshots__/scenes-app-insights-error-empty-states--estimated-query-execution-time-too-long--dark.png index 3bef2b27e6bd8..885f2a5702c0a 100644 Binary files a/frontend/__snapshots__/scenes-app-insights-error-empty-states--estimated-query-execution-time-too-long--dark.png and b/frontend/__snapshots__/scenes-app-insights-error-empty-states--estimated-query-execution-time-too-long--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights-error-empty-states--estimated-query-execution-time-too-long--light.png b/frontend/__snapshots__/scenes-app-insights-error-empty-states--estimated-query-execution-time-too-long--light.png index 52ffe7a338bca..169ad1e1093be 100644 Binary files a/frontend/__snapshots__/scenes-app-insights-error-empty-states--estimated-query-execution-time-too-long--light.png and b/frontend/__snapshots__/scenes-app-insights-error-empty-states--estimated-query-execution-time-too-long--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights-error-empty-states--server-error--dark.png b/frontend/__snapshots__/scenes-app-insights-error-empty-states--server-error--dark.png index f575e7615f929..fcc49da4ac9f8 100644 Binary files a/frontend/__snapshots__/scenes-app-insights-error-empty-states--server-error--dark.png and b/frontend/__snapshots__/scenes-app-insights-error-empty-states--server-error--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights-error-empty-states--server-error--light.png b/frontend/__snapshots__/scenes-app-insights-error-empty-states--server-error--light.png index d60541174dfda..a899188e78de2 100644 Binary files a/frontend/__snapshots__/scenes-app-insights-error-empty-states--server-error--light.png and b/frontend/__snapshots__/scenes-app-insights-error-empty-states--server-error--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights-error-empty-states--validation-error--dark.png b/frontend/__snapshots__/scenes-app-insights-error-empty-states--validation-error--dark.png index 9c94b509734fc..2628564c68138 100644 Binary files a/frontend/__snapshots__/scenes-app-insights-error-empty-states--validation-error--dark.png and b/frontend/__snapshots__/scenes-app-insights-error-empty-states--validation-error--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights-error-empty-states--validation-error--light.png b/frontend/__snapshots__/scenes-app-insights-error-empty-states--validation-error--light.png index 11c5cd31a9701..fe530f50f66ff 100644 Binary files a/frontend/__snapshots__/scenes-app-insights-error-empty-states--validation-error--light.png and b/frontend/__snapshots__/scenes-app-insights-error-empty-states--validation-error--light.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-no-email--dark.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-no-email--dark.png index c17dad0b17e61..6b35aa0c9aa40 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-no-email--dark.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-no-email--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-no-email--light.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-no-email--light.png index 474c85c22515e..7c438d20e10f8 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-no-email--light.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-support-no-email--light.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-survey--dark.png b/frontend/__snapshots__/scenes-app-surveys--new-survey--dark.png index 92e985fd14840..c891cc11c1495 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-survey--dark.png and b/frontend/__snapshots__/scenes-app-surveys--new-survey--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-survey--light.png b/frontend/__snapshots__/scenes-app-surveys--new-survey--light.png index e08a8d54ebfa3..03107f4105ab8 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-survey--light.png and b/frontend/__snapshots__/scenes-app-surveys--new-survey--light.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-survey-appearance-section--dark.png b/frontend/__snapshots__/scenes-app-surveys--new-survey-appearance-section--dark.png index 8107500bf02b9..73fcf46c2537a 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-survey-appearance-section--dark.png and b/frontend/__snapshots__/scenes-app-surveys--new-survey-appearance-section--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-survey-appearance-section--light.png b/frontend/__snapshots__/scenes-app-surveys--new-survey-appearance-section--light.png index 5ee862cb8a7c2..3720fb3b2f2ce 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-survey-appearance-section--light.png and b/frontend/__snapshots__/scenes-app-surveys--new-survey-appearance-section--light.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--dark.png b/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--dark.png index 9a52778e02f1d..bbb49aee27975 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--dark.png and b/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--light.png b/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--light.png index b1c98bd5be96c..35d4ffbea5e2a 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--light.png and b/frontend/__snapshots__/scenes-app-surveys--new-survey-customisation-section--light.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-survey-presentation-section--dark.png b/frontend/__snapshots__/scenes-app-surveys--new-survey-presentation-section--dark.png index 85e8e5939649e..2979dd4f45bff 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-survey-presentation-section--dark.png and b/frontend/__snapshots__/scenes-app-surveys--new-survey-presentation-section--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-survey-presentation-section--light.png b/frontend/__snapshots__/scenes-app-surveys--new-survey-presentation-section--light.png index b4f3e8c8f057b..098a7d1a35d6d 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-survey-presentation-section--light.png and b/frontend/__snapshots__/scenes-app-surveys--new-survey-presentation-section--light.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-survey-targeting-section--dark.png b/frontend/__snapshots__/scenes-app-surveys--new-survey-targeting-section--dark.png index db9932e1f2320..49f611b6e5f06 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-survey-targeting-section--dark.png and b/frontend/__snapshots__/scenes-app-surveys--new-survey-targeting-section--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--new-survey-targeting-section--light.png b/frontend/__snapshots__/scenes-app-surveys--new-survey-targeting-section--light.png index bbbe76e720788..25c9dc6e9f764 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--new-survey-targeting-section--light.png and b/frontend/__snapshots__/scenes-app-surveys--new-survey-targeting-section--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png index 02b1fdf3d65c3..c3a204e9d821d 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project--light.png b/frontend/__snapshots__/scenes-other-settings--settings-project--light.png index ae4d45535e16d..9035f956108e3 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-project--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--heatmap--dark.png b/frontend/__snapshots__/scenes-other-toolbar--heatmap--dark.png index ca9cb5bc87861..164509ab972b4 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--heatmap--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--heatmap--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--heatmap--light.png b/frontend/__snapshots__/scenes-other-toolbar--heatmap--light.png index 1cefef9bc27f8..b506ac15c6f5f 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--heatmap--light.png and b/frontend/__snapshots__/scenes-other-toolbar--heatmap--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--dark.png b/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--dark.png index dc8141139bfb4..37ba76926c200 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--light.png b/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--light.png index a02e8534103ef..186af373d26d8 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--light.png and b/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--light.png differ diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSupport.tsx b/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSupport.tsx index ec21983e256ba..7cea4611511e9 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSupport.tsx +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelSupport.tsx @@ -1,5 +1,4 @@ import { - IconBug, IconChevronDown, IconFeatures, IconFlask, @@ -76,8 +75,8 @@ const SupportFormBlock = ({ onCancel }: { onCancel: () => void }): JSX.Element = // TODO(@zach): remove after updated plans w/ support levels are shipped const supportResponseTimes = { - [AvailableFeature.EMAIL_SUPPORT]: '2-3 days', - [AvailableFeature.PRIORITY_SUPPORT]: '4-6 hours', + [AvailableFeature.EMAIL_SUPPORT]: '24 hours', + [AvailableFeature.PRIORITY_SUPPORT]: '12 hours', } return ( @@ -144,7 +143,6 @@ const SupportFormBlock = ({ onCancel }: { onCancel: () => void }): JSX.Element = export const SidePanelSupport = (): JSX.Element => { const { openSidePanel, closeSidePanel } = useActions(sidePanelStateLogic) - const { hasAvailableFeature } = useValues(userLogic) const { openEmailForm, closeEmailForm } = useActions(supportLogic) const { isEmailFormOpen } = useValues(supportLogic) const { preflight } = useValues(preflightLogic) @@ -214,64 +212,29 @@ export const SidePanelSupport = (): JSX.Element => { ) : null} - {hasAvailableFeature(AvailableFeature.EMAIL_SUPPORT) || - window.location.href.includes(urls.organizationBilling()) ? ( - <> -
-

Can't find what you need in the docs?

- openEmailForm()} - targetBlank - className="mt-2" - > - Email an engineer - -
-
-

- Questions about features, how to's, or use cases? There are thousands of - discussions in our community forums.{' '} - Ask a question -

-
- - ) : ( -
-

- Questions about features, how to's, or use cases? There are thousands of - discussions in our community forums. -

- - Ask a question - -
- )} +
+

Can't find what you need in the docs?

+ openEmailForm()} + targetBlank + className="mt-2" + > + Email an engineer + +
+
+

+ Questions about features, how to's, or use cases? There are thousands of discussions + in our community forums.{' '} + Ask a question +

+
- - {hasAvailableFeature(AvailableFeature.EMAIL_SUPPORT) || - window.location.href.includes(urls.organizationBilling()) ? null : ( -
-

- Due to our large userbase, we're unable to offer email support to organizations - on the free plan. But we still want to help! -

- -
    -
  1. - Search our docs -

    - We're constantly updating our docs and tutorials to provide the latest - information about installing, using, and troubleshooting. -

    -
  2. -
  3. - Ask a community question -

    - Many common (and niche) questions have already been resolved by users - just like you. (Our own engineers also keep an eye on the questions as - they have time!){' '} - - Search community questions or ask your own. - -

    -
  4. -
  5. - - Explore PostHog partners - -

    - Third-party providers can help with installation and debugging of data - issues. -

    -
  6. -
  7. - Upgrade to a paid plan -

    - Our paid plans offer email support.{' '} - - Explore options. - -

    -
  8. -
-
- )} )} diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic.ts b/frontend/src/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic.ts index 22d997bd673f5..180461c465996 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic.ts +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/activity/activityForSceneLogic.ts @@ -50,9 +50,8 @@ export const activityForSceneLogic = kea([ state, activeLoadedScene?.paramsToProps?.(activeLoadedScene?.sceneParams) || props ) - } else { - return activityFiltersForScene(sceneConfig) } + return activityFiltersForScene(sceneConfig) }, ], (filters): ActivityFilters | null => filters, diff --git a/frontend/src/lib/components/AnnotationsOverlay/useAnnotationsPositioning.ts b/frontend/src/lib/components/AnnotationsOverlay/useAnnotationsPositioning.ts index a65a0d6947997..2a2432b8d940c 100644 --- a/frontend/src/lib/components/AnnotationsOverlay/useAnnotationsPositioning.ts +++ b/frontend/src/lib/components/AnnotationsOverlay/useAnnotationsPositioning.ts @@ -31,11 +31,10 @@ export function useAnnotationsPositioning( tickIntervalPx: (lastTickLeftPx - firstTickLeftPx) / (tickCount - 1), firstTickLeftPx, } - } else { - return { - tickIntervalPx: 0, - firstTickLeftPx: 0, - } + } + return { + tickIntervalPx: 0, + firstTickLeftPx: 0, } }, [chart, chartWidth, chartHeight]) } diff --git a/frontend/src/lib/components/Cards/InsightCard/InsightCard.tsx b/frontend/src/lib/components/Cards/InsightCard/InsightCard.tsx index 084371830fa99..96619d094f892 100644 --- a/frontend/src/lib/components/Cards/InsightCard/InsightCard.tsx +++ b/frontend/src/lib/components/Cards/InsightCard/InsightCard.tsx @@ -215,13 +215,13 @@ export function FilterBasedCardContent({ {tooFewFunnelSteps ? ( ) : validationError ? ( - + ) : empty ? ( ) : !loading && timedOut ? ( ) : apiErrored && !loading ? ( - + ) : ( !apiErrored && )} @@ -348,6 +348,7 @@ function InsightCardInternal( ) : (
diff --git a/frontend/src/lib/components/CommandBar/utils.ts b/frontend/src/lib/components/CommandBar/utils.ts index cb7a621c6d423..00de3bd6b5f12 100644 --- a/frontend/src/lib/components/CommandBar/utils.ts +++ b/frontend/src/lib/components/CommandBar/utils.ts @@ -3,7 +3,6 @@ import { actionScopeToName } from './constants' export const getNameFromActionScope = (scope: string): string => { if (scope in actionScopeToName) { return actionScopeToName[scope] - } else { - return scope } + return scope } diff --git a/frontend/src/lib/components/DatabaseTableTree/TreeRow.tsx b/frontend/src/lib/components/DatabaseTableTree/TreeRow.tsx index 21cce7c1c9974..a4320dd9842d1 100644 --- a/frontend/src/lib/components/DatabaseTableTree/TreeRow.tsx +++ b/frontend/src/lib/components/DatabaseTableTree/TreeRow.tsx @@ -19,7 +19,7 @@ export interface TreeRowProps { export function TreeRow({ item, onClick, selected }: TreeRowProps): JSX.Element { const _onClick = useCallback(() => { onClick && onClick(item.table) - }, []) + }, [onClick, item]) return (
  • diff --git a/frontend/src/lib/components/JSBookmarklet.tsx b/frontend/src/lib/components/JSBookmarklet.tsx index c0cee95aed608..19827c037a73d 100644 --- a/frontend/src/lib/components/JSBookmarklet.tsx +++ b/frontend/src/lib/components/JSBookmarklet.tsx @@ -1,12 +1,15 @@ import { useActions } from 'kea' import { IconBookmarkBorder } from 'lib/lemon-ui/icons' +import { apiHostOrigin } from 'lib/utils/apiHost' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' import { useEffect, useRef } from 'react' import { TeamBasicType } from '~/types' export function JSBookmarklet({ team }: { team: TeamBasicType }): JSX.Element { - const initCall = `posthog.init('${team?.api_token}',{api_host:'${location.origin}', loaded: () => alert('PostHog is now tracking events!')})` + const initCall = `posthog.init('${ + team?.api_token + }',{api_host:'${apiHostOrigin()}', loaded: () => alert('PostHog is now tracking events!')})` const href = `javascript:(function()%7Bif%20(window.posthog)%20%7Balert(%22Error%3A%20PostHog%20already%20is%20installed%20on%20this%20site%22)%7D%20else%20%7B!function(t%2Ce)%7Bvar%20o%2Cn%2Cp%2Cr%3Be.__SV%7C%7C(window.posthog%3De%2Ce._i%3D%5B%5D%2Ce.init%3Dfunction(i%2Cs%2Ca)%7Bfunction%20g(t%2Ce)%7Bvar%20o%3De.split(%22.%22)%3B2%3D%3Do.length%26%26(t%3Dt%5Bo%5B0%5D%5D%2Ce%3Do%5B1%5D)%2Ct%5Be%5D%3Dfunction()%7Bt.push(%5Be%5D.concat(Array.prototype.slice.call(arguments%2C0)))%7D%7D(p%3Dt.createElement(%22script%22)).type%3D%22text%2Fjavascript%22%2Cp.async%3D!0%2Cp.src%3Ds.api_host%2B%22%2Fstatic%2Farray.js%22%2C(r%3Dt.getElementsByTagName(%22script%22)%5B0%5D).parentNode.insertBefore(p%2Cr)%3Bvar%20u%3De%3Bfor(void%200!%3D%3Da%3Fu%3De%5Ba%5D%3D%5B%5D%3Aa%3D%22posthog%22%2Cu.people%3Du.people%7C%7C%5B%5D%2Cu.toString%3Dfunction(t)%7Bvar%20e%3D%22posthog%22%3Breturn%22posthog%22!%3D%3Da%26%26(e%2B%3D%22.%22%2Ba)%2Ct%7C%7C(e%2B%3D%22%20(stub)%22)%2Ce%7D%2Cu.people.toString%3Dfunction()%7Breturn%20u.toString(1)%2B%22.people%20(stub)%22%7D%2Co%3D%22capture%20identify%20alias%20people.set%20people.set_once%20set_config%20register%20register_once%20unregister%20opt_out_capturing%20has_opted_out_capturing%20opt_in_capturing%20reset%20isFeatureEnabled%20onFeatureFlags%22.split(%22%20%22)%2Cn%3D0%3Bn%3Co.length%3Bn%2B%2B)g(u%2Co%5Bn%5D)%3Be._i.push(%5Bi%2Cs%2Ca%5D)%7D%2Ce.__SV%3D1)%7D(document%2Cwindow.posthog%7C%7C%5B%5D)%3B${encodeURIComponent( initCall )}%7D%7D)()` diff --git a/frontend/src/lib/components/PropertyFilters/propertyFilterLogic.ts b/frontend/src/lib/components/PropertyFilters/propertyFilterLogic.ts index 08fd06bdeedbd..9e96aa3599339 100644 --- a/frontend/src/lib/components/PropertyFilters/propertyFilterLogic.ts +++ b/frontend/src/lib/components/PropertyFilters/propertyFilterLogic.ts @@ -63,9 +63,8 @@ export const propertyFilterLogic = kea([ (filters) => { if (filters.length === 0 || isValidPropertyFilter(filters[filters.length - 1])) { return [...filters, {} as AnyPropertyFilter] - } else { - return filters } + return filters }, ], }), diff --git a/frontend/src/lib/components/Support/SupportForm.tsx b/frontend/src/lib/components/Support/SupportForm.tsx index fe67dc43db6e5..32d6f72ff5675 100644 --- a/frontend/src/lib/components/Support/SupportForm.tsx +++ b/frontend/src/lib/components/Support/SupportForm.tsx @@ -19,8 +19,6 @@ import { useEffect, useRef } from 'react' import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' import { userLogic } from 'scenes/userLogic' -import { AvailableFeature } from '~/types' - import { SEVERITY_LEVEL_TO_NAME, SUPPORT_TICKET_TEMPLATES, @@ -58,7 +56,7 @@ export function SupportForm(): JSX.Element | null { const { setSendSupportRequestValue } = useActions(supportLogic) const { objectStorageAvailable } = useValues(preflightLogic) // the support model can be shown when logged out, file upload is not offered to anonymous users - const { user, hasAvailableFeature } = useValues(userLogic) + const { user } = useValues(userLogic) // only allow authentication issues for logged out users const dropRef = useRef(null) @@ -131,8 +129,6 @@ export function SupportForm(): JSX.Element | null { disabledReason={ !user ? 'Please login to your account before opening a ticket unrelated to authentication issues.' - : !hasAvailableFeature(AvailableFeature.EMAIL_SUPPORT) - ? 'You can only create billing related issues while viewing the billing page.' : null } fullWidth diff --git a/frontend/src/lib/components/Support/supportLogic.ts b/frontend/src/lib/components/Support/supportLogic.ts index 9adb17a94a5e2..1e58e31236bbd 100644 --- a/frontend/src/lib/components/Support/supportLogic.ts +++ b/frontend/src/lib/components/Support/supportLogic.ts @@ -8,7 +8,6 @@ import { uuid } from 'lib/utils' import posthog from 'posthog-js' import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' import { teamLogic } from 'scenes/teamLogic' -import { urls } from 'scenes/urls' import { userLogic } from 'scenes/userLogic' import { sidePanelStateLogic } from '~/layout/navigation-3000/sidepanel/sidePanelStateLogic' @@ -167,10 +166,10 @@ export const TARGET_AREA_TO_NAME = [ ] export const SEVERITY_LEVEL_TO_NAME = { - critical: 'Product outage / data loss / data breach', - high: 'Specific feature not working at all', - medium: 'Feature functioning but not as expected', - low: 'General question or feature request', + critical: 'Outage, data loss, or data breach', + high: 'Feature is not working at all', + medium: 'Feature not working as expected', + low: 'Question or feature request', } export const SUPPORT_KIND_TO_SUBJECT = { @@ -338,11 +337,6 @@ export const supportLogic = kea([ actions.setSidePanelOptions(panelOptions) } }, - openEmailForm: async () => { - if (window.location.href.includes(urls.organizationBilling())) { - actions.setSendSupportRequestValue('target_area', 'billing') - } - }, openSupportForm: async ({ name, email, kind, target_area, severity_level, message }) => { let area = target_area ?? getURLPathToTargetArea(window.location.pathname) if (!userLogic.values.user) { diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx index ca029c4102896..5c1269eaf76ce 100644 --- a/frontend/src/lib/constants.tsx +++ b/frontend/src/lib/constants.tsx @@ -209,7 +209,6 @@ export const FEATURE_FLAGS = { SESSION_REPLAY_MOBILE_ONBOARDING: 'session-replay-mobile-onboarding', // owner: #team-replay IP_ALLOWLIST_SETTING: 'ip-allowlist-setting', // owner: @benjackwhite EMAIL_VERIFICATION_TICKET_SUBMISSION: 'email-verification-ticket-submission', // owner: #team-growth - TOOLBAR_HEATMAPS: 'toolbar-heatmaps', // owner: #team-replay THEME: 'theme', // owner: @aprilfools SESSION_TABLE_PROPERTY_FILTERS: 'session-table-property-filters', // owner: @robbie-c } as const diff --git a/frontend/src/lib/hooks/useOutsideClickHandler.ts b/frontend/src/lib/hooks/useOutsideClickHandler.ts index c69acfd21fb20..c7342f659842d 100644 --- a/frontend/src/lib/hooks/useOutsideClickHandler.ts +++ b/frontend/src/lib/hooks/useOutsideClickHandler.ts @@ -21,10 +21,9 @@ export function useOutsideClickHandler( allRefs.some((maybeRef) => { if (typeof maybeRef === 'string') { return event.composedPath?.()?.find((e) => (e as HTMLElement)?.matches?.(maybeRef)) - } else { - const ref = maybeRef.current - return event.target && ref && `contains` in ref && ref.contains(event.target as Element) } + const ref = maybeRef.current + return event.target && ref && `contains` in ref && ref.contains(event.target as Element) }) ) { return diff --git a/frontend/src/lib/introductions/groupsAccessLogic.ts b/frontend/src/lib/introductions/groupsAccessLogic.ts index 37bcb2e97972c..e00ca51dc0bee 100644 --- a/frontend/src/lib/introductions/groupsAccessLogic.ts +++ b/frontend/src/lib/introductions/groupsAccessLogic.ts @@ -37,9 +37,8 @@ export const groupsAccessLogic = kea([ return GroupsAccessStatus.HasAccess } else if (hasGroups) { return GroupsAccessStatus.HasGroupTypes - } else { - return GroupsAccessStatus.NoAccess } + return GroupsAccessStatus.NoAccess }, ], needsUpgradeForGroups: [ diff --git a/frontend/src/lib/lemon-ui/hooks.ts b/frontend/src/lib/lemon-ui/hooks.ts index da53cbe7538eb..d9d5aa8675f77 100644 --- a/frontend/src/lib/lemon-ui/hooks.ts +++ b/frontend/src/lib/lemon-ui/hooks.ts @@ -29,9 +29,8 @@ export function useSliderPositioning setTransitioning(false), transitionMs) return () => clearTimeout(transitioningTimeout) - } else { - hasRenderedInitiallyRef.current = true } + hasRenderedInitiallyRef.current = true } }, [currentValue, containerWidth]) diff --git a/frontend/src/lib/logic/featureFlagLogic.ts b/frontend/src/lib/logic/featureFlagLogic.ts index 4fa049bacc0db..db646d26d7656 100644 --- a/frontend/src/lib/logic/featureFlagLogic.ts +++ b/frontend/src/lib/logic/featureFlagLogic.ts @@ -54,22 +54,21 @@ function spyOnFeatureFlags(featureFlags: FeatureFlagsSet): FeatureFlagsSet { }, } ) - } else { - // Fallback for IE11. Won't track "false" results. ¯\_(ツ)_/¯ - const flags: FeatureFlagsSet = {} - for (const flag of Object.keys(availableFlags)) { - Object.defineProperty(flags, flag, { - get: function () { - if (flag === 'toJSON') { - return () => availableFlags - } - notifyFlagIfNeeded(flag, true) - return true - }, - }) - } - return flags } + // Fallback for IE11. Won't track "false" results. ¯\_(ツ)_/¯ + const flags: FeatureFlagsSet = {} + for (const flag of Object.keys(availableFlags)) { + Object.defineProperty(flags, flag, { + get: function () { + if (flag === 'toJSON') { + return () => availableFlags + } + notifyFlagIfNeeded(flag, true) + return true + }, + }) + } + return flags } export const featureFlagLogic = kea([ diff --git a/frontend/src/lib/sortable.ts b/frontend/src/lib/sortable.ts index 3b62cb4ea7db9..edad9ff60ecd6 100644 --- a/frontend/src/lib/sortable.ts +++ b/frontend/src/lib/sortable.ts @@ -8,9 +8,8 @@ import { CollisionDetection, DroppableContainer, UniqueIdentifier } from '@dnd-k export const verticalSortableListCollisionDetection: CollisionDetection = (args) => { if (args.collisionRect.top < (args.active.rect.current?.initial?.top ?? 0)) { return highestDroppableContainerMajorityCovered(args) - } else { - return lowestDroppableContainerMajorityCovered(args) } + return lowestDroppableContainerMajorityCovered(args) } // Look for the first (/ furthest up / highest) droppable container that is at least diff --git a/frontend/src/mocks/utils.ts b/frontend/src/mocks/utils.ts index a1d23f28969fb..e6bcda39be06a 100644 --- a/frontend/src/mocks/utils.ts +++ b/frontend/src/mocks/utils.ts @@ -32,12 +32,10 @@ export const mocksToHandlers = (mocks: Mocks): ReturnType<(typeof rest)['get']>[ return res(...responseArray) } else if (!response) { return res() - } else { - return response } - } else { - return res(ctx.json(handler ?? null)) + return response } + return res(ctx.json(handler ?? null)) }) ) }) diff --git a/frontend/src/models/groupsModel.ts b/frontend/src/models/groupsModel.ts index eb8babc316dc2..83a76fa89c1ab 100644 --- a/frontend/src/models/groupsModel.ts +++ b/frontend/src/models/groupsModel.ts @@ -83,11 +83,10 @@ export const groupsModel = kea([ singular: groupType.name_plural || groupType.group_type, plural: groupType.name_plural || `${groupType.group_type}(s)`, } - } else { - return { - singular: 'unknown group', - plural: 'unknown groups', - } + } + return { + singular: 'unknown group', + plural: 'unknown groups', } } return deferToUserWording diff --git a/frontend/src/models/propertyDefinitionsModel.ts b/frontend/src/models/propertyDefinitionsModel.ts index 0bc2dfae5c11b..497a218b3eaaf 100644 --- a/frontend/src/models/propertyDefinitionsModel.ts +++ b/frontend/src/models/propertyDefinitionsModel.ts @@ -65,9 +65,8 @@ const getPropertyKey = ( ): string => { if (type === PropertyDefinitionType.Group) { return `${type}/${groupTypeIndex}/${propertyName}` - } else { - return `${type}/${propertyName}` } + return `${type}/${propertyName}` } /** Schedules an immediate background task, that fetches property definitions after a 10ms debounce. Returns the property sync if already found. */ diff --git a/frontend/src/queries/QueryEditor/QueryEditor.tsx b/frontend/src/queries/QueryEditor/QueryEditor.tsx index dc7b44eb22e12..88026a6c7262c 100644 --- a/frontend/src/queries/QueryEditor/QueryEditor.tsx +++ b/frontend/src/queries/QueryEditor/QueryEditor.tsx @@ -16,6 +16,7 @@ export interface QueryEditorProps { query: string setQuery?: (query: string) => void className?: string + aboveButton?: JSX.Element context?: QueryContext } @@ -78,6 +79,7 @@ export function QueryEditor(props: QueryEditorProps): JSX.Element { Error parsing JSON: {error}
  • ) : null} + {props.aboveButton} {time.toFixed(3)}s ))} - {timings.length > 0 ? ( + {elapsedTime !== undefined && timings.length > 0 ? (
    + HTTP overhead
    {(elapsedTime / 1000 - timings[timings.length - 1].t).toFixed(3)}s
    diff --git a/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts b/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts index 744b1aa784aa1..56d11af1665b5 100644 --- a/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts +++ b/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts @@ -76,9 +76,8 @@ const concurrencyController = new ConcurrencyController(Infinity) const queryEqual = (a: DataNode, b: DataNode): boolean => { if (isInsightQueryNode(a) && isInsightQueryNode(b)) { return compareInsightQuery(a, b, true) - } else { - return objectsEqual(a, b) } + return objectsEqual(a, b) } /** Tests wether a query is valid to prevent unnecessary requests. */ @@ -86,9 +85,8 @@ const queryValid = (q: DataNode): boolean => { if (isFunnelsQuery(q)) { // funnels require at least two steps return q.series.length >= 2 - } else { - return true } + return true } export const dataNodeLogic = kea([ @@ -406,9 +404,8 @@ export const dataNodeLogic = kea([ if (firstTimestamp) { const nextQuery: EventsQuery = { ...query, after: firstTimestamp } return nextQuery - } else { - return query } + return query } } } diff --git a/frontend/src/queries/nodes/DataTable/DataTable.tsx b/frontend/src/queries/nodes/DataTable/DataTable.tsx index f178e451f8994..f0174514a1a47 100644 --- a/frontend/src/queries/nodes/DataTable/DataTable.tsx +++ b/frontend/src/queries/nodes/DataTable/DataTable.tsx @@ -510,6 +510,7 @@ export function DataTable({ responseError ? ( sourceFeatures.has(QueryFeature.displayResponseError) ? ( ) : ( - + ) ) : ( ([ lastResult = result } return newResults - } else { - return results.map((result) => ({ result })) } + return results.map((result) => ({ result })) } } diff --git a/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.ts b/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.ts index 3505204aed08a..6b3a38c01e93e 100644 --- a/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.ts +++ b/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.ts @@ -46,10 +46,9 @@ export const cleanGlobalProperties = ( values: [properties], } return cleanPropertyGroupFilter(properties) - } else { - // property group filter - return cleanPropertyGroupFilter(properties) } + // property group filter + return cleanPropertyGroupFilter(properties) } /** Cleans properties of entities i.e. event and action nodes. These are a simple list of property filters. */ @@ -75,9 +74,8 @@ export const cleanEntityProperties = ( ) { // property group filter value return properties.values.map(cleanProperty) - } else { - throw new Error('Unexpected format of entity properties.') } + throw new Error('Unexpected format of entity properties.') } const cleanPropertyGroupFilter = (properties: Record): PropertyGroupFilter => { @@ -98,10 +96,9 @@ const cleanPropertyGroupFilterValue = ( // property group filter value property['values'] = cleanPropertyGroupFilterValues(property['values'] as PropertyGroupFilterValue[]) return property - } else { - // property filter - return cleanProperty(property) } + // property filter + return cleanProperty(property) } const cleanProperty = (property: Record): AnyPropertyFilter => { diff --git a/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.ts b/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.ts index 46565f648bd34..ee5d2af61f5ce 100644 --- a/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.ts +++ b/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.ts @@ -143,13 +143,12 @@ export const legacyEntityToNode = ( id: entity.id, ...shared, }) as any - } else { - return objectCleanWithEmpty({ - kind: NodeKind.EventsNode, - event: entity.id, - ...shared, - }) as any } + return objectCleanWithEmpty({ + kind: NodeKind.EventsNode, + event: entity.id, + ...shared, + }) as any } export const exlusionEntityToNode = ( @@ -219,17 +218,15 @@ const processBool = (value: string | boolean | null | undefined): boolean | unde return value } else if (typeof value == 'string') { return strToBool(value) - } else { - return false } + return false } const strToBool = (value: any): boolean | undefined => { if (value == null) { return undefined - } else { - return ['y', 'yes', 't', 'true', 'on', '1'].includes(String(value).toLowerCase()) } + return ['y', 'yes', 't', 'true', 'on', '1'].includes(String(value).toLowerCase()) } export const filtersToQueryNode = (filters: Partial): InsightQueryNode => { diff --git a/frontend/src/queries/nodes/InsightViz/InsightVizDisplay.tsx b/frontend/src/queries/nodes/InsightViz/InsightVizDisplay.tsx index dd3cfbb43fb90..52bab0544ea96 100644 --- a/frontend/src/queries/nodes/InsightViz/InsightVizDisplay.tsx +++ b/frontend/src/queries/nodes/InsightViz/InsightVizDisplay.tsx @@ -75,6 +75,7 @@ export function InsightVizDisplay({ erroredQueryId, timedOutQueryId, vizSpecificOptions, + query, } = useValues(insightVizDataLogic(insightProps)) const { exportContext } = useValues(insightDataLogic(insightProps)) @@ -92,7 +93,7 @@ export function InsightVizDisplay({ } if (validationError) { - return + return } // Insight specific empty states - note order is important here @@ -107,7 +108,7 @@ export function InsightVizDisplay({ // Insight agnostic empty states if (erroredQueryId) { - return + return } if (timedOutQueryId) { return ( diff --git a/frontend/src/queries/nodes/InsightViz/utils.ts b/frontend/src/queries/nodes/InsightViz/utils.ts index 6bb24f64cdad2..60d7aa143c0d2 100644 --- a/frontend/src/queries/nodes/InsightViz/utils.ts +++ b/frontend/src/queries/nodes/InsightViz/utils.ts @@ -43,9 +43,8 @@ export const getDisplay = (query: InsightQueryNode): ChartDisplayType | undefine return query.stickinessFilter?.display } else if (isTrendsQuery(query)) { return query.trendsFilter?.display - } else { - return undefined } + return undefined } export const getCompare = (query: InsightQueryNode): boolean | undefined => { @@ -53,41 +52,36 @@ export const getCompare = (query: InsightQueryNode): boolean | undefined => { return query.stickinessFilter?.compare } else if (isTrendsQuery(query)) { return query.trendsFilter?.compare - } else { - return undefined } + return undefined } export const getFormula = (query: InsightQueryNode): string | undefined => { if (isTrendsQuery(query)) { return query.trendsFilter?.formula - } else { - return undefined } + return undefined } export const getSeries = (query: InsightQueryNode): (EventsNode | ActionsNode | DataWarehouseNode)[] | undefined => { if (isInsightQueryWithSeries(query)) { return query.series - } else { - return undefined } + return undefined } export const getInterval = (query: InsightQueryNode): IntervalType | undefined => { if (isInsightQueryWithSeries(query)) { return query.interval - } else { - return undefined } + return undefined } export const getBreakdown = (query: InsightQueryNode): BreakdownFilter | undefined => { if (isInsightQueryWithBreakdown(query)) { return query.breakdownFilter - } else { - return undefined } + return undefined } export const getShowLegend = (query: InsightQueryNode): boolean | undefined => { @@ -95,9 +89,8 @@ export const getShowLegend = (query: InsightQueryNode): boolean | undefined => { return query.stickinessFilter?.showLegend } else if (isTrendsQuery(query)) { return query.trendsFilter?.showLegend - } else { - return undefined } + return undefined } export const getShowValueOnSeries = (query: InsightQueryNode): boolean | undefined => { @@ -107,25 +100,22 @@ export const getShowValueOnSeries = (query: InsightQueryNode): boolean | undefin return query.stickinessFilter?.showValuesOnSeries } else if (isTrendsQuery(query)) { return query.trendsFilter?.showValuesOnSeries - } else { - return undefined } + return undefined } export const getShowLabelsOnSeries = (query: InsightQueryNode): boolean | undefined => { if (isTrendsQuery(query)) { return query.trendsFilter?.showLabelsOnSeries - } else { - return undefined } + return undefined } export const getShowPercentStackView = (query: InsightQueryNode): boolean | undefined => { if (isTrendsQuery(query)) { return query.trendsFilter?.showPercentStackView - } else { - return undefined } + return undefined } export const getCachedResults = ( diff --git a/frontend/src/queries/query.ts b/frontend/src/queries/query.ts index b5e74046cffca..e6f9cf712271d 100644 --- a/frontend/src/queries/query.ts +++ b/frontend/src/queries/query.ts @@ -89,9 +89,8 @@ export function queryExportContext( session_end: query.source.sessionEnd ?? now().toISOString(), }, } - } else { - return { source: query } } + return { source: query } } const SYNC_ONLY_QUERY_KINDS = [ @@ -416,9 +415,8 @@ export function legacyInsightQueryURL({ filters, currentTeamId, refresh }: Legac return `api/projects/${currentTeamId}/insights/funnel/${refresh ? '?refresh=true' : ''}` } else if (isPathsFilter(filters)) { return `api/projects/${currentTeamId}/insights/path${refresh ? '?refresh=true' : ''}` - } else { - throw new Error(`Unsupported insight type: ${filters.insight}`) } + throw new Error(`Unsupported insight type: ${filters.insight}`) } export function legacyInsightQueryData({ @@ -469,9 +467,8 @@ export function legacyInsightQueryExportContext({ method: 'POST', body: filters, } - } else { - throw new Error(`Unsupported insight type: ${filters.insight}`) } + throw new Error(`Unsupported insight type: ${filters.insight}`) } export async function legacyInsightQuery({ diff --git a/frontend/src/scenes/apps/appMetricsSceneLogic.ts b/frontend/src/scenes/apps/appMetricsSceneLogic.ts index afcfec42c346e..f878f824da7b7 100644 --- a/frontend/src/scenes/apps/appMetricsSceneLogic.ts +++ b/frontend/src/scenes/apps/appMetricsSceneLogic.ts @@ -213,9 +213,8 @@ export const appMetricsSceneLogic = kea([ return '-24h' } else if (daysSinceInstall <= 7) { return '-7d' - } else { - return DEFAULT_DATE_FROM } + return DEFAULT_DATE_FROM }, ], @@ -258,9 +257,8 @@ export const appMetricsSceneLogic = kea([ capabilities.scheduled_tasks?.includes(method) ) ) - } else { - return !!capabilities.methods?.includes(tab) } + return !!capabilities.methods?.includes(tab) }, ], diff --git a/frontend/src/scenes/apps/frontendAppRequire.ts b/frontend/src/scenes/apps/frontendAppRequire.ts index 2714d4e0c20c0..2bbc74a38bb65 100644 --- a/frontend/src/scenes/apps/frontendAppRequire.ts +++ b/frontend/src/scenes/apps/frontendAppRequire.ts @@ -24,7 +24,6 @@ const packages = { export function frontendAppRequire(module: string): any { if (module in packages) { return packages[module] - } else { - throw new Error(`Cannot import from unknown module "${module}"`) } + throw new Error(`Cannot import from unknown module "${module}"`) } diff --git a/frontend/src/scenes/billing/billing-utils.ts b/frontend/src/scenes/billing/billing-utils.ts index bf690d0174a0c..1ad1a6140b0a1 100644 --- a/frontend/src/scenes/billing/billing-utils.ts +++ b/frontend/src/scenes/billing/billing-utils.ts @@ -9,9 +9,8 @@ export const summarizeUsage = (usage: number | null): string => { return `${usage}` } else if (Math.round(usage / 1000) < 1000) { return `${Math.round(usage / 1000)} thousand` - } else { - return `${Math.round(usage / 1000000)} million` } + return `${Math.round(usage / 1000000)} million` } export const projectUsage = ( diff --git a/frontend/src/scenes/dashboard/Dashboard.tsx b/frontend/src/scenes/dashboard/Dashboard.tsx index 26f869a6cbe6c..8a4ef3c846b8c 100644 --- a/frontend/src/scenes/dashboard/Dashboard.tsx +++ b/frontend/src/scenes/dashboard/Dashboard.tsx @@ -53,7 +53,7 @@ function DashboardScene(): JSX.Element { itemsLoading, filters: dashboardFilters, dashboardMode, - receivedErrorsFromAPI, + dashboardFailedToLoad, } = useValues(dashboardLogic) const { setDashboardMode, setDates, reportDashboardViewed, setProperties, abortAnyRunningQuery } = useActions(dashboardLogic) @@ -96,7 +96,7 @@ function DashboardScene(): JSX.Element { [setDashboardMode, dashboardMode, placement] ) - if (!dashboard && !itemsLoading && !receivedErrorsFromAPI) { + if (!dashboard && !itemsLoading && !dashboardFailedToLoad) { return } @@ -104,7 +104,7 @@ function DashboardScene(): JSX.Element {
    {placement == DashboardPlacement.Dashboard && } - {receivedErrorsFromAPI ? ( + {dashboardFailedToLoad ? ( ) : !tiles || tiles.length === 0 ? ( diff --git a/frontend/src/scenes/dashboard/Dashboards.stories.tsx b/frontend/src/scenes/dashboard/Dashboards.stories.tsx index e77eca82b315c..1e25ad376ba95 100644 --- a/frontend/src/scenes/dashboard/Dashboards.stories.tsx +++ b/frontend/src/scenes/dashboard/Dashboards.stories.tsx @@ -21,6 +21,7 @@ const meta: Meta = { '/api/projects/:team_id/dashboards/': require('./__mocks__/dashboards.json'), '/api/projects/:team_id/dashboards/1/': require('./__mocks__/dashboard1.json'), '/api/projects/:team_id/dashboards/1/collaborators/': [], + '/api/projects/:team_id/dashboards/2/': [500, { detail: 'Server error' }], '/api/projects/:team_id/dashboard_templates/': require('./__mocks__/dashboard_templates.json'), '/api/projects/:team_id/dashboard_templates/json_schema/': require('./__mocks__/dashboard_template_schema.json'), '/api/projects/:team_id/dashboards/:dash_id/sharing/': { @@ -129,3 +130,17 @@ export const Edit = (): JSX.Element => { }, []) return } + +export const NotFound = (): JSX.Element => { + useEffect(() => { + router.actions.push(urls.dashboard(1000)) + }, []) + return +} + +export const Erroring = (): JSX.Element => { + useEffect(() => { + router.actions.push(urls.dashboard(2)) + }) + return +} diff --git a/frontend/src/scenes/dashboard/dashboardLogic.queryCancellation.test.ts b/frontend/src/scenes/dashboard/dashboardLogic.queryCancellation.test.ts index ef026526841fa..371562295aaba 100644 --- a/frontend/src/scenes/dashboard/dashboardLogic.queryCancellation.test.ts +++ b/frontend/src/scenes/dashboard/dashboardLogic.queryCancellation.test.ts @@ -4,6 +4,7 @@ import { MOCK_TEAM_ID } from 'lib/api.mock' import { now } from 'lib/dayjs' import { eventUsageLogic } from 'lib/utils/eventUsageLogic' import { dashboardLogic } from 'scenes/dashboard/dashboardLogic' +// FIXME: Importing a .test.ts file causes all tests within it to be ran again as part of THIS file import { boxToString, dashboardResult, insightOnDashboard, tileFromInsight } from 'scenes/dashboard/dashboardLogic.test' import { useMocks } from '~/mocks/jest' diff --git a/frontend/src/scenes/dashboard/dashboardLogic.test.ts b/frontend/src/scenes/dashboard/dashboardLogic.test.ts index cc2b95aa10379..0adeec47e3ffd 100644 --- a/frontend/src/scenes/dashboard/dashboardLogic.test.ts +++ b/frontend/src/scenes/dashboard/dashboardLogic.test.ts @@ -79,9 +79,8 @@ export const boxToString = (param: string | readonly string[]): string => { //path params from msw can be a string or an array if (typeof param === 'string') { return param - } else { - throw new Error("this shouldn't be an array") } + throw new Error("this shouldn't be an array") } const insight800 = (): InsightModel => ({ @@ -442,7 +441,7 @@ describe('dashboardLogic', () => { it('allows consumers to respond', async () => { await expectLogic(logic).toFinishAllListeners().toMatchValues({ - receivedErrorsFromAPI: true, + dashboardFailedToLoad: true, }) }) }) @@ -479,20 +478,20 @@ describe('dashboardLogic', () => { it('fetches dashboard items on mount', async () => { await expectLogic(logic) - .toDispatchActions(['loadDashboardItems']) + .toDispatchActions(['loadDashboard']) .toMatchValues({ dashboard: null, tiles: [], insightTiles: [], textTiles: [], }) - .toDispatchActions(['loadDashboardItemsSuccess']) + .toDispatchActions(['loadDashboardSuccess']) .toMatchValues({ dashboard: expect.objectContaining(dashboards['5']), tiles: truth((tiles) => tiles.length === 3), insightTiles: truth((insightTiles) => insightTiles.length === 2), textTiles: truth((textTiles) => textTiles.length === 1), - receivedErrorsFromAPI: false, + dashboardFailedToLoad: false, }) }) }) @@ -676,7 +675,7 @@ describe('dashboardLogic', () => { } as InsightModel) }) .toFinishAllListeners() - .toDispatchActions(['loadDashboardItems']) + .toDispatchActions(['loadDashboard']) }) }) @@ -689,7 +688,7 @@ describe('dashboardLogic', () => { it('fetches dashboard items on mount', async () => { await expectLogic(logic) .toFinishAllListeners() - .toDispatchActions(['loadDashboardItemsSuccess']) + .toDispatchActions(['loadDashboardSuccess']) .toMatchValues({ dashboard: truth( ({ tiles }) => tiles.filter((i: DashboardTile) => i.insight?.result === null).length === 2 @@ -748,7 +747,7 @@ describe('dashboardLogic', () => { logic = dashboardLogic({ id: 9 }) logic.mount() await expectLogic(logic) - .toDispatchActions(['loadDashboardItemsSuccess']) + .toDispatchActions(['loadDashboardSuccess']) .toNotHaveDispatchedActions(['refreshAllDashboardItems']) .toFinishListeners() }) @@ -819,6 +818,6 @@ describe('dashboardLogic', () => { })) ).toEqual([]) // Ensuring we do go back to the API for 800, which was added to dashboard 5 - expectLogic(fiveLogic).toDispatchActions(['loadDashboardItems']).toFinishAllListeners() + expectLogic(fiveLogic).toDispatchActions(['loadDashboard']).toFinishAllListeners() }) }) diff --git a/frontend/src/scenes/dashboard/dashboardLogic.tsx b/frontend/src/scenes/dashboard/dashboardLogic.tsx index c21233b7b9a52..24c9e3dd36141 100644 --- a/frontend/src/scenes/dashboard/dashboardLogic.tsx +++ b/frontend/src/scenes/dashboard/dashboardLogic.tsx @@ -142,7 +142,7 @@ export const dashboardLogic = kea([ }), actions({ - loadDashboardItems: (payload: { refresh?: boolean; action: string }) => payload, + loadDashboard: (payload: { refresh?: boolean; action: string }) => payload, triggerDashboardUpdate: (payload) => ({ payload }), /** The current state in which the dashboard is being viewed, see DashboardMode. */ setDashboardMode: (mode: DashboardMode | null, source: DashboardEventSource | null) => ({ mode, source }), @@ -201,12 +201,11 @@ export const dashboardLogic = kea([ dashboard: [ null as DashboardType | null, { - loadDashboardItems: async ({ refresh, action }) => { + loadDashboard: async ({ refresh, action }) => { const dashboardQueryId = uuid() actions.loadingDashboardItemsStarted(action, dashboardQueryId) try { - // :TODO: Send dashboardQueryId forward as well if refreshing const apiUrl = values.apiUrl(refresh) const dashboardResponse: Response = await api.getResponse(apiUrl) const dashboard: DashboardType = await getJSONOrNull(dashboardResponse) @@ -298,12 +297,11 @@ export const dashboardLogic = kea([ ], })), reducers(({ props }) => ({ - receivedErrorsFromAPI: [ + dashboardFailedToLoad: [ false, { - loadDashboardItemsSuccess: () => false, - loadDashboardItemsFailure: () => true, - abortQuery: () => false, + loadDashboardSuccess: () => false, + loadDashboardFailure: () => true, }, ], filters: [ @@ -318,12 +316,15 @@ export const dashboardLogic = kea([ ...state, properties: properties || null, }), - loadDashboardItemsSuccess: (state, { dashboard }) => ({ - ...state, - date_from: dashboard?.filters.date_from || null, - date_to: dashboard?.filters.date_to || null, - properties: dashboard?.filters.properties || [], - }), + loadDashboardSuccess: (state, { dashboard }) => + dashboard + ? { + ...state, + date_from: dashboard?.filters.date_from || null, + date_to: dashboard?.filters.date_to || null, + properties: dashboard?.filters.properties || [], + } + : state, }, ], dashboard: [ @@ -433,7 +434,7 @@ export const dashboardLogic = kea([ }, }, ], - loadTimer: [null as Date | null, { loadDashboardItems: () => new Date() }], + loadTimer: [null as Date | null, { loadDashboard: () => new Date() }], dashboardLoadTimerData: [ { dashboardQueryId: '', action: '', startTime: 0, responseBytes: 0 }, { @@ -513,7 +514,7 @@ export const dashboardLogic = kea([ shouldReportOnAPILoad: [ /* Whether to report viewed/analyzed events after the API is loaded (and this logic is mounted). We need this because the DashboardView component might be mounted (and subsequent `useEffect`) before the API request - to `loadDashboardItems` is completed (e.g. if you open PH directly to a dashboard) + to `loadDashboard` is completed (e.g. if you open PH directly to a dashboard) */ false, { @@ -686,8 +687,8 @@ export const dashboardLogic = kea([ }, ], breadcrumbs: [ - (s) => [s.dashboard], - (dashboard): Breadcrumb[] => [ + (s) => [s.dashboard, s.dashboardLoading, s.dashboardFailedToLoad], + (dashboard, dashboardLoading, dashboardFailedToLoad): Breadcrumb[] => [ { key: Scene.Dashboards, name: 'Dashboards', @@ -695,7 +696,13 @@ export const dashboardLogic = kea([ }, { key: [Scene.Dashboard, dashboard?.id || 'new'], - name: dashboard?.id ? dashboard.name || 'Unnamed' : null, + name: dashboard?.id + ? dashboard.name + : dashboardFailedToLoad + ? 'Could not load' + : !dashboardLoading + ? 'Not found' + : null, onRename: async (name) => { if (dashboard) { await dashboardsModel.asyncActions.updateDashboard({ @@ -732,10 +739,10 @@ export const dashboardLogic = kea([ if (props.id) { if (props.dashboard) { // If we already have dashboard data, use it. Should the data turn out to be stale, - // the loadDashboardItemsSuccess listener will initiate a refresh - actions.loadDashboardItemsSuccess(props.dashboard) + // the loadDashboardSuccess listener will initiate a refresh + actions.loadDashboardSuccess(props.dashboard) } else { - actions.loadDashboardItems({ + actions.loadDashboard({ refresh: false, action: 'initial_load', }) @@ -772,17 +779,17 @@ export const dashboardLogic = kea([ setRefreshError: sharedListeners.reportRefreshTiming, setRefreshStatuses: sharedListeners.reportRefreshTiming, setRefreshStatus: sharedListeners.reportRefreshTiming, - loadDashboardItemsFailure: sharedListeners.reportLoadTiming, + loadDashboardFailure: sharedListeners.reportLoadTiming, [insightsModel.actionTypes.duplicateInsightSuccess]: () => { // TODO this is a bit hacky, but we need to reload the dashboard to get the new insight // TODO when duplicated from a dashboard we should carry the context so only one logic needs to reload // TODO or we should duplicate the tile (and implicitly the insight) - actions.loadDashboardItems({ action: 'update' }) + actions.loadDashboard({ action: 'update' }) }, [dashboardsModel.actionTypes.tileAddedToDashboard]: ({ dashboardId }) => { // when adding an insight to a dashboard, we need to reload the dashboard to get the new insight if (dashboardId === props.id) { - actions.loadDashboardItems({ action: 'update' }) + actions.loadDashboard({ action: 'update' }) } }, [dashboardsModel.actionTypes.updateDashboardInsight]: ({ insight, extraDashboardIds }) => { @@ -796,7 +803,7 @@ export const dashboardLogic = kea([ if (tileIndex === -1) { // this is a new tile created from an insight context we need to reload the dashboard - actions.loadDashboardItems({ action: 'update' }) + actions.loadDashboard({ action: 'update' }) } }, updateLayouts: () => { @@ -1026,10 +1033,14 @@ export const dashboardLogic = kea([ }, values.autoRefresh.interval * 1000) } }, - loadDashboardItemsSuccess: function (...args) { + loadDashboardSuccess: function (...args) { void sharedListeners.reportLoadTiming(...args) - const dashboard = values.dashboard as DashboardType + if (!values.dashboard) { + return // We hit a 404 + } + + const dashboard = values.dashboard const { action, dashboardQueryId, startTime, responseBytes } = values.dashboardLoadTimerData const lastRefresh = sortDates(dashboard.tiles.map((tile) => tile.last_refresh)) diff --git a/frontend/src/scenes/dashboard/dashboardTemplateEditorLogic.ts b/frontend/src/scenes/dashboard/dashboardTemplateEditorLogic.ts index 60a039058f4dc..583f1ae7c9a25 100644 --- a/frontend/src/scenes/dashboard/dashboardTemplateEditorLogic.ts +++ b/frontend/src/scenes/dashboard/dashboardTemplateEditorLogic.ts @@ -44,9 +44,8 @@ export const dashboardTemplateEditorLogic = kea { if (!markers || markers.length === 0) { return [] - } else { - return markers.map((marker: MonacoMarker) => marker.message) } + return markers.map((marker: MonacoMarker) => marker.message) }, clear: () => [], }, diff --git a/frontend/src/scenes/dashboard/tileLayouts.ts b/frontend/src/scenes/dashboard/tileLayouts.ts index 1b1e89a67886b..c0e7e58cdd2b9 100644 --- a/frontend/src/scenes/dashboard/tileLayouts.ts +++ b/frontend/src/scenes/dashboard/tileLayouts.ts @@ -15,9 +15,8 @@ export const sortTilesByLayout = (tiles: Array, col: DashboardLay return -1 } else if (ay > by || (ay == by && ax > bx)) { return 1 - } else { - return 0 } + return 0 }) } export const calculateLayouts = (tiles: DashboardTile[]): Partial> => { diff --git a/frontend/src/scenes/data-warehouse/new/DataWarehouseTableForm.tsx b/frontend/src/scenes/data-warehouse/new/DataWarehouseTableForm.tsx index a6a3a2a40bcfc..faf94fc15df1b 100644 --- a/frontend/src/scenes/data-warehouse/new/DataWarehouseTableForm.tsx +++ b/frontend/src/scenes/data-warehouse/new/DataWarehouseTableForm.tsx @@ -6,13 +6,18 @@ import { dataWarehouseTableLogic } from './dataWarehouseTableLogic' export function DatawarehouseTableForm(): JSX.Element { return ( -
    +
    { if (currentStep === 1) { @@ -50,7 +52,7 @@ export function NewSourceWizard(): JSX.Element { )}
    ) - }, [currentStep, isLoading, canGoNext, canGoBack, nextButtonText, showSkipButton]) + }, [currentStep, isLoading, manualLinkIsLoading, canGoNext, canGoBack, nextButtonText, showSkipButton]) return ( <> diff --git a/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx b/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx index 7ff85f473dd8f..07646fbd77d22 100644 --- a/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx +++ b/frontend/src/scenes/data-warehouse/new/sourceWizardLogic.tsx @@ -249,6 +249,7 @@ export const sourceWizardLogic = kea([ ], }), selectors({ + isManualLinkingSelected: [(s) => [s.selectedConnector], (selectedConnector): boolean => !selectedConnector], canGoBack: [ (s) => [s.currentStep], (currentStep): boolean => { @@ -274,8 +275,12 @@ export const sourceWizardLogic = kea([ }, ], nextButtonText: [ - (s) => [s.currentStep], - (currentStep): string => { + (s) => [s.currentStep, s.isManualLinkingSelected], + (currentStep, isManualLinkingSelected): string => { + if (currentStep === 2 && isManualLinkingSelected) { + return 'Link' + } + if (currentStep === 3) { return 'Import' } diff --git a/frontend/src/scenes/debug/DebugScene.tsx b/frontend/src/scenes/debug/DebugScene.tsx index fffbe0dc4e521..91d189f97ead4 100644 --- a/frontend/src/scenes/debug/DebugScene.tsx +++ b/frontend/src/scenes/debug/DebugScene.tsx @@ -1,20 +1,17 @@ import { useActions, useValues } from 'kea' -import { CodeEditor } from 'lib/components/CodeEditors' import { PageHeader } from 'lib/components/PageHeader' import { LemonButton } from 'lib/lemon-ui/LemonButton' -import { LemonDivider } from 'lib/lemon-ui/LemonDivider' import { LemonLabel } from 'lib/lemon-ui/LemonLabel/LemonLabel' import { LemonSelect } from 'lib/lemon-ui/LemonSelect' import { HogQLDebug } from 'scenes/debug/HogQLDebug' import { Modifiers } from 'scenes/debug/Modifiers' +import { QueryTabs } from 'scenes/debug/QueryTabs' import { SceneExport } from 'scenes/sceneTypes' import { stringifiedExamples } from '~/queries/examples' import { dataNodeLogic, DataNodeLogicProps } from '~/queries/nodes/DataNode/dataNodeLogic' -import { Query } from '~/queries/Query/Query' import { QueryEditor } from '~/queries/QueryEditor/QueryEditor' import { DataNode, HogQLQuery, Node } from '~/queries/schema' -import { isDataTableNode, isInsightVizNode } from '~/queries/utils' import { debugSceneLogic } from './debugSceneLogic' @@ -48,28 +45,27 @@ function QueryDebug({ query, setQuery, queryKey }: QueryDebugProps): JSX.Element /> ) : (
    - - setQuery(JSON.stringify({ ...parsed, source: query }, null, 2)) - : (query) => setQuery(JSON.stringify(query, null, 2)) - } - query={parsed?.source ?? parsed} - response={response} - /> - - setQuery(JSON.stringify(query, null, 2))} + setQuery={setQuery} + aboveButton={ + setQuery(JSON.stringify({ ...parsed, source: query }, null, 2)) + : (query) => setQuery(JSON.stringify(query, null, 2)) + } + query={parsed?.source ?? parsed} + response={response} + /> + } /> - {response && parsed && (isDataTableNode(parsed as Node) || isInsightVizNode(parsed as Node)) ? ( - setQuery(JSON.stringify(query, null, 2))} /> ) : null}
    diff --git a/frontend/src/scenes/debug/HogQLDebug.tsx b/frontend/src/scenes/debug/HogQLDebug.tsx index d42f4af5d5664..dab62f85b68cb 100644 --- a/frontend/src/scenes/debug/HogQLDebug.tsx +++ b/frontend/src/scenes/debug/HogQLDebug.tsx @@ -1,157 +1,50 @@ import { BindLogic, useValues } from 'kea' -import { CodeEditor } from 'lib/components/CodeEditors' -import { CodeSnippet, Language } from 'lib/components/CodeSnippet' -import { LemonTable } from 'lib/lemon-ui/LemonTable' +import { LemonDivider } from 'lib/lemon-ui/LemonDivider' import { Modifiers } from 'scenes/debug/Modifiers' import { dataNodeLogic, DataNodeLogicProps } from '~/queries/nodes/DataNode/dataNodeLogic' import { DateRange } from '~/queries/nodes/DataNode/DateRange' -import { ElapsedTime, Timings } from '~/queries/nodes/DataNode/ElapsedTime' +import { ElapsedTime } from '~/queries/nodes/DataNode/ElapsedTime' import { Reload } from '~/queries/nodes/DataNode/Reload' import { EventPropertyFilters } from '~/queries/nodes/EventsNode/EventPropertyFilters' import { HogQLQueryEditor } from '~/queries/nodes/HogQLQuery/HogQLQueryEditor' import { DataNode, HogQLQuery, HogQLQueryResponse } from '~/queries/schema' +import { QueryTabs } from './QueryTabs' + interface HogQLDebugProps { queryKey: string query: HogQLQuery setQuery: (query: DataNode) => void } -function toLineColumn(hogql: string, position: number): { line: number; column: number } { - const lines = hogql.split('\n') - let line = 0 - let column = 0 - for (let i = 0; i < lines.length; i++) { - if (position < lines[i].length) { - line = i + 1 - column = position + 1 - break - } - position -= lines[i].length + 1 - } - return { line, column } -} - -function toLine(hogql: string, position: number): number { - return toLineColumn(hogql, position).line -} - -function toColumn(hogql: string, position: number): number { - return toLineColumn(hogql, position).column -} - export function HogQLDebug({ query, setQuery, queryKey }: HogQLDebugProps): JSX.Element { const dataNodeLogicProps: DataNodeLogicProps = { query, key: queryKey, dataNodeCollectionId: queryKey } - const { - dataLoading, - response: _response, - responseErrorObject, - elapsedTime, - } = useValues(dataNodeLogic(dataNodeLogicProps)) + const { dataLoading, response: _response } = useValues(dataNodeLogic(dataNodeLogicProps)) const response = _response as HogQLQueryResponse | null - const clickHouseTime = response?.timings?.find(({ k }) => k === './clickhouse_execute')?.t return (
    + +
    - {dataLoading ? ( <>

    Running query...

    - Time elapsed: + Time elapsed:  +
    ) : ( <> - {response?.error ? ( - <> -

    Error Running Query!

    - - {response.error} - - - ) : null} - {response?.hogql ? ( - <> -

    Executed HogQL

    - - {response.hogql} - - - ) : null} - {response?.clickhouse ? ( - <> -

    - Executed ClickHouse SQL - {clickHouseTime !== undefined - ? ` (${Math.floor(clickHouseTime * 1000) / 1000}s)` - : ''} -

    - - {response.clickhouse} - - - ) : null} - {response?.metadata ? ( - <> -

    Metadata

    - ({ - type: 'error', - line: toLine(response.hogql ?? '', error.start ?? 0), - column: toColumn(response.hogql ?? '', error.start ?? 0), - ...error, - })), - ...response.metadata.warnings.map((warn) => ({ - type: 'warning', - line: toLine(response.hogql ?? '', warn.start ?? 0), - column: toColumn(response.hogql ?? '', warn.start ?? 0), - ...warn, - })), - ...response.metadata.notices.map((notice) => ({ - type: 'notice', - line: toLine(response.hogql ?? '', notice.start ?? 0), - column: toColumn(response.hogql ?? '', notice.start ?? 0), - ...notice, - })), - ].sort((a, b) => (a.start ?? 0) - (b.start ?? 0))} - columns={[ - { title: 'Line', dataIndex: 'line', key: 'line', width: '40px' }, - { title: 'Column', dataIndex: 'column', key: 'column', width: '40px' }, - { title: 'Type', dataIndex: 'type', key: 'type', width: '80px' }, - { title: 'Message', dataIndex: 'message', key: 'message' }, - ]} - /> - - ) : null} - {response?.explain ? ( - <> -

    Explained ClickHouseSQL

    - {response.explain.join('\n')} - - ) : null} - {response?.timings && elapsedTime !== null ? ( - <> -

    Time spent

    - - - ) : null} -

    Raw response

    - + )}
    diff --git a/frontend/src/scenes/debug/Modifiers.tsx b/frontend/src/scenes/debug/Modifiers.tsx index ea6551ef65f6c..4b1e2792c9171 100644 --- a/frontend/src/scenes/debug/Modifiers.tsx +++ b/frontend/src/scenes/debug/Modifiers.tsx @@ -13,10 +13,11 @@ export function Modifiers({ setQuery, query, response = null }: ModifiersProps): if (query === null) { return null } + const labelClassName = 'flex flex-col gap-1 items-start' return (
    - - POE: + +
    POE:
    - - Persons ArgMax: + +
    Persons ArgMax:
    - - In Cohort Via: + +
    In Cohort Via:
    - - Materialization Mode: + +
    Materialization Mode:
    | null + setQuery: (query: DataNode) => void +} +export function QueryTabs({ query, queryKey, setQuery, response }: QueryTabsProps): JSX.Element { + const [tab, setTab] = useState(null) + const clickHouseTime = (response?.timings as QueryTiming[])?.find(({ k }) => k === './clickhouse_execute')?.t ?? 0 + const explainTime = (response?.timings as QueryTiming[])?.find(({ k }) => k === './explain')?.t ?? 0 + const totalTime = (response?.timings as QueryTiming[])?.find(({ k }) => k === '.')?.t ?? 0 + const hogQLTime = totalTime - explainTime - clickHouseTime + const tabs: LemonTabsProps['tabs'] = query + ? [ + response?.error && { + key: 'error', + label: 'Error', + content: ( + <> +

    Error Running Query!

    + + {response.error} + + + ), + }, + isInsightVizNode(query) && { + key: 'viz', + label: 'Visualization', + content: setQuery(query)} />, + }, + isInsightQueryNode(query) && { + key: 'insight', + label: 'Insight', + content: ( + setQuery(query)} + /> + ), + }, + isDataTableNode(query) && { + key: 'table', + label: 'Data Table', + content: setQuery(query)} />, + }, + + (response?.result || response?.results) && { + key: 'result', + label: 'Result JSON', + content: ( + + ), + }, + response?.hogql && { + key: 'hogql', + label: ( + <> + HogQL + {hogQLTime && {Math.floor(hogQLTime * 10) / 10}s} + + ), + content: ( + + ), + }, + response?.clickhouse && { + key: 'clickhouse', + label: ( + <> + Clickhouse + {clickHouseTime && ( + {Math.floor(clickHouseTime * 10) / 10}s + )} + + ), + content: ( + + ), + }, + response?.explain && { + key: 'explain', + label: 'Explain', + content: {response.explain.join('\n')}, + }, + response?.timings && { + key: 'timings', + label: 'Timings', + content: , + }, + response && { + key: 'response', + label: 'Full response', + content: ( + + ), + }, + response?.metadata && { + key: 'metadata', + label: 'Metadata', + content: ( + ({ + type: 'error', + line: toLine(response.hogql ?? '', error.start ?? 0), + column: toColumn(response.hogql ?? '', error.start ?? 0), + ...error, + })), + ...(response.metadata as HogQLMetadataResponse).warnings.map((warn) => ({ + type: 'warning', + line: toLine(response.hogql ?? '', warn.start ?? 0), + column: toColumn(response.hogql ?? '', warn.start ?? 0), + ...warn, + })), + ...(response.metadata as HogQLMetadataResponse).notices.map((notice) => ({ + type: 'notice', + line: toLine(response.hogql ?? '', notice.start ?? 0), + column: toColumn(response.hogql ?? '', notice.start ?? 0), + ...notice, + })), + ].sort((a, b) => (a.start ?? 0) - (b.start ?? 0))} + columns={[ + { title: 'Line', dataIndex: 'line', key: 'line', width: '40px' }, + { title: 'Column', dataIndex: 'column', key: 'column', width: '40px' }, + { title: 'Type', dataIndex: 'type', key: 'type', width: '80px' }, + { title: 'Message', dataIndex: 'message', key: 'message' }, + ]} + /> + ), + }, + ] + .filter(Boolean) + .map((tab) => ({ ...tab, content: {tab.content} })) + : [] + + return ( + + t && t.key === tab) ? tab : (tabs[0] && tabs[0].key) || 'response'} + onChange={(t) => setTab(t)} + tabs={tabs} + /> + + ) +} diff --git a/frontend/src/scenes/experiments/utils.ts b/frontend/src/scenes/experiments/utils.ts index fe25dc456323a..9d6a1fdc88dcc 100644 --- a/frontend/src/scenes/experiments/utils.ts +++ b/frontend/src/scenes/experiments/utils.ts @@ -83,7 +83,6 @@ export function getMinimumDetectableEffect( return 100 } else if (baselineCount <= 1000) { return 20 - } else { - return 5 } + return 5 } diff --git a/frontend/src/scenes/feature-flags/FeatureFlag.tsx b/frontend/src/scenes/feature-flags/FeatureFlag.tsx index c6affd75911ee..f202471bbb5ad 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlag.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlag.tsx @@ -618,13 +618,12 @@ function UsageTab({ featureFlag }: { id: string; featureFlag: FeatureFlagType }) const { generateUsageDashboard, enrichUsageDashboard } = useActions(featureFlagLogic) const { featureFlagLoading } = useValues(featureFlagLogic) let dashboard: DashboardType | null = null - let connectedDashboardExists = false if (dashboardId) { + // FIXME: Refactor out into , as React hooks under conditional branches are no good const dashboardLogicValues = useValues( dashboardLogic({ id: dashboardId, placement: DashboardPlacement.FeatureFlag }) ) dashboard = dashboardLogicValues.dashboard - connectedDashboardExists = !dashboardLogicValues.receivedErrorsFromAPI } const { closeEnrichAnalyticsNotice } = useActions(featureFlagsLogic) @@ -632,7 +631,6 @@ function UsageTab({ featureFlag }: { id: string; featureFlag: FeatureFlagType }) useEffect(() => { if ( - connectedDashboardExists && dashboard && hasEnrichedAnalytics && !(dashboard.tiles?.find((tile) => (tile.insight?.name?.indexOf('Feature Viewed') ?? -1) > -1) !== undefined) @@ -652,7 +650,7 @@ function UsageTab({ featureFlag }: { id: string; featureFlag: FeatureFlagType }) return (
    - {connectedDashboardExists ? ( + {dashboard ? ( <> {!hasEnrichedAnalytics && !enrichAnalyticsNoticeAcknowledged && ( closeEnrichAnalyticsNotice()}> diff --git a/frontend/src/scenes/feature-flags/featureFlagLogic.ts b/frontend/src/scenes/feature-flags/featureFlagLogic.ts index 32c029e760e43..291656e9f4bc9 100644 --- a/frontend/src/scenes/feature-flags/featureFlagLogic.ts +++ b/frontend/src/scenes/feature-flags/featureFlagLogic.ts @@ -870,9 +870,8 @@ export const featureFlagLogic = kea([ }, ], } - } else { - return defaultEntityFilterOnFlag } + return defaultEntityFilterOnFlag }, ], hasEarlyAccessFeatures: [ diff --git a/frontend/src/scenes/feature-flags/featureFlagsLogic.ts b/frontend/src/scenes/feature-flags/featureFlagsLogic.ts index 6773878fd071b..709c08f803f41 100644 --- a/frontend/src/scenes/feature-flags/featureFlagsLogic.ts +++ b/frontend/src/scenes/feature-flags/featureFlagsLogic.ts @@ -67,9 +67,8 @@ export const featureFlagsLogic = kea([ updateFlag: (state, { flag }) => { if (state.find(({ id }) => id === flag.id)) { return state.map((stateFlag) => (stateFlag.id === flag.id ? flag : stateFlag)) - } else { - return [flag, ...state] } + return [flag, ...state] }, deleteFlag: (state, { id }) => state.filter((flag) => flag.id !== id), }, diff --git a/frontend/src/scenes/funnels/funnelCorrelationLogic.ts b/frontend/src/scenes/funnels/funnelCorrelationLogic.ts index d76e569d1f2d6..0f8441a5733ea 100644 --- a/frontend/src/scenes/funnels/funnelCorrelationLogic.ts +++ b/frontend/src/scenes/funnels/funnelCorrelationLogic.ts @@ -64,21 +64,20 @@ export const funnelCorrelationLogic = kea([ result_type: FunnelCorrelationResultsType.Events, })) as FunnelCorrelation[], } - } else { - const results: Omit[] = ( - await api.create(`api/projects/${values.currentTeamId}/insights/funnel/correlation`, { - ...values.apiParams, - funnel_correlation_type: 'events', - funnel_correlation_exclude_event_names: values.excludedEventNames, - }) - ).result?.events + } + const results: Omit[] = ( + await api.create(`api/projects/${values.currentTeamId}/insights/funnel/correlation`, { + ...values.apiParams, + funnel_correlation_type: 'events', + funnel_correlation_exclude_event_names: values.excludedEventNames, + }) + ).result?.events - return { - events: results.map((result) => ({ - ...result, - result_type: FunnelCorrelationResultsType.Events, - })), - } + return { + events: results.map((result) => ({ + ...result, + result_type: FunnelCorrelationResultsType.Events, + })), } } catch (error) { lemonToast.error('Failed to load correlation results', { toastId: 'funnel-correlation-error' }) @@ -110,22 +109,21 @@ export const funnelCorrelationLogic = kea([ result_type: FunnelCorrelationResultsType.EventWithProperties, })) as FunnelCorrelation[], } - } else { - const results: Omit[] = ( - await api.create(`api/projects/${values.currentTeamId}/insights/funnel/correlation`, { - ...values.apiParams, - funnel_correlation_type: 'event_with_properties', - funnel_correlation_event_names: [eventName], - funnel_correlation_event_exclude_property_names: values.excludedEventPropertyNames, - }) - ).result?.events + } + const results: Omit[] = ( + await api.create(`api/projects/${values.currentTeamId}/insights/funnel/correlation`, { + ...values.apiParams, + funnel_correlation_type: 'event_with_properties', + funnel_correlation_event_names: [eventName], + funnel_correlation_event_exclude_property_names: values.excludedEventPropertyNames, + }) + ).result?.events - return { - [eventName]: results.map((result) => ({ - ...result, - result_type: FunnelCorrelationResultsType.EventWithProperties, - })), - } + return { + [eventName]: results.map((result) => ({ + ...result, + result_type: FunnelCorrelationResultsType.EventWithProperties, + })), } }, }, diff --git a/frontend/src/scenes/funnels/funnelDataLogic.ts b/frontend/src/scenes/funnels/funnelDataLogic.ts index 3080f0db0560f..555cfd116775f 100644 --- a/frontend/src/scenes/funnels/funnelDataLogic.ts +++ b/frontend/src/scenes/funnels/funnelDataLogic.ts @@ -158,9 +158,8 @@ export const funnelDataLogic = kea([ ) } return insightData.result - } else { - return [] } + return [] }, ], steps: [ @@ -176,9 +175,8 @@ export const funnelDataLogic = kea([ return aggregateBreakdownResult(results, breakdownProperty).sort((a, b) => a.order - b.order) } return results.sort((a, b) => a.order - b.order) - } else { - return [] } + return [] }, ], stepsWithConversionMetrics: [ @@ -270,9 +268,8 @@ export const funnelDataLogic = kea([ return (histogramGraphData?.length ?? 0) > 0 } else if (funnelsFilter.funnelVizType === FunnelVizType.Trends) { return (steps?.length ?? 0) > 0 && !!steps?.[0]?.labels - } else { - return false } + return false }, ], numericBinCount: [ @@ -352,9 +349,8 @@ export const funnelDataLogic = kea([ if (startIndex !== undefined && startIndex !== -1) { return startIndex - steps[0].days.length - } else { - return 0 } + return 0 }, ], diff --git a/frontend/src/scenes/funnels/funnelPropertyCorrelationLogic.ts b/frontend/src/scenes/funnels/funnelPropertyCorrelationLogic.ts index a12aa0ca2ec08..7a9be00e15782 100644 --- a/frontend/src/scenes/funnels/funnelPropertyCorrelationLogic.ts +++ b/frontend/src/scenes/funnels/funnelPropertyCorrelationLogic.ts @@ -94,22 +94,21 @@ export const funnelPropertyCorrelationLogic = kea[] = ( - await api.create(`api/projects/${values.currentTeamId}/insights/funnel/correlation`, { - ...values.apiParams, - funnel_correlation_type: 'properties', - funnel_correlation_names: targetProperties, - funnel_correlation_exclude_names: values.excludedPropertyNames, - }) - ).result?.events + } + const results: Omit[] = ( + await api.create(`api/projects/${values.currentTeamId}/insights/funnel/correlation`, { + ...values.apiParams, + funnel_correlation_type: 'properties', + funnel_correlation_names: targetProperties, + funnel_correlation_exclude_names: values.excludedPropertyNames, + }) + ).result?.events - return { - events: results.map((result) => ({ - ...result, - result_type: FunnelCorrelationResultsType.Properties, - })), - } + return { + events: results.map((result) => ({ + ...result, + result_type: FunnelCorrelationResultsType.Properties, + })), } } catch (error) { lemonToast.error('Failed to load correlation results', { toastId: 'funnel-correlation-error' }) diff --git a/frontend/src/scenes/funnels/funnelUtils.ts b/frontend/src/scenes/funnels/funnelUtils.ts index fc3c5cad3fc8a..1bcd3569983a0 100644 --- a/frontend/src/scenes/funnels/funnelUtils.ts +++ b/frontend/src/scenes/funnels/funnelUtils.ts @@ -468,11 +468,10 @@ export const parseBreakdownValue = ( const components = item.split('::') if (components.length === 1) { return { breakdown: components[0], breakdown_value: '' } - } else { - return { - breakdown: components[0], - breakdown_value: components[1], - } + } + return { + breakdown: components[0], + breakdown_value: components[1], } } @@ -502,18 +501,17 @@ export const parseEventAndProperty = ( value: [propertyValue as string], })), } - } else { - return { - name: components[0], - properties: [ - { - key: components[1], - operator: PropertyOperator.Exact, - value: components[2], - type: PropertyFilterType.Event, - }, - ], - } + } + return { + name: components[0], + properties: [ + { + key: components[1], + operator: PropertyOperator.Exact, + value: components[2], + type: PropertyFilterType.Event, + }, + ], } } @@ -538,11 +536,10 @@ export const parseDisplayNameForCorrelation = ( event: '$autocapture', }) return { first_value, second_value } - } else { - // FunnelCorrelationResultsType.EventWithProperties - // Events here come in the form of event::property::value - return { first_value: values[1], second_value: values[2] } } + // FunnelCorrelationResultsType.EventWithProperties + // Events here come in the form of event::property::value + return { first_value: values[1], second_value: values[2] } } export const appendToCorrelationConfig = ( diff --git a/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx b/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx index 533a0a87fa1f8..2c24bf32f016e 100644 --- a/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx +++ b/frontend/src/scenes/insights/EmptyStates/EmptyStates.tsx @@ -21,7 +21,7 @@ import { urls } from 'scenes/urls' import { actionsAndEventsToSeries } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' import { seriesToActionsAndEvents } from '~/queries/nodes/InsightQuery/utils/queryNodeToFilter' -import { FunnelsQuery } from '~/queries/schema' +import { FunnelsQuery, Node } from '~/queries/schema' import { FilterType, InsightLogicProps, SavedInsightsTabs } from '~/types' import { samplingFilterLogic } from '../EditorFilters/samplingFilterLogic' @@ -139,10 +139,11 @@ export function InsightTimeoutState({ export interface InsightErrorStateProps { excludeDetail?: boolean title?: string + query?: Record | Node | null queryId?: string | null } -export function InsightErrorState({ excludeDetail, title, queryId }: InsightErrorStateProps): JSX.Element { +export function InsightErrorState({ excludeDetail, title, query, queryId }: InsightErrorStateProps): JSX.Element { const { preflight } = useValues(preflightLogic) const { openSupportForm } = useActions(supportLogic) @@ -181,6 +182,18 @@ export function InsightErrorState({ excludeDetail, title, queryId }: InsightErro
    )} {queryId ?
    Query ID: {queryId}
    : null} + {query && ( + + Open in query debugger + + )}
    ) @@ -243,7 +256,13 @@ export function FunnelSingleStepState({ actionable = true }: FunnelSingleStepSta ) } -export function InsightValidationError({ detail }: { detail: string }): JSX.Element { +export function InsightValidationError({ + detail, + query, +}: { + detail: string + query?: Record | null +}): JSX.Element { return (
    @@ -256,6 +275,19 @@ export function InsightValidationError({ detail }: { detail: string }): JSX.Elem {/* but rather that it's something with the definition of the query itself */}

    {detail}

    + {query ? ( +

    + + Open in query debugger + +

    + ) : null} {detail.includes('Exclusion') && (
    { diff --git a/frontend/src/scenes/insights/filters/ActionFilter/entityFilterLogic.ts b/frontend/src/scenes/insights/filters/ActionFilter/entityFilterLogic.ts index 510c572983499..65864c06b8054 100644 --- a/frontend/src/scenes/insights/filters/ActionFilter/entityFilterLogic.ts +++ b/frontend/src/scenes/insights/filters/ActionFilter/entityFilterLogic.ts @@ -221,18 +221,17 @@ export const entityFilterLogic = kea([ : distinct_id_field, table_name: typeof table_name === 'undefined' ? filter.table_name : table_name, } - } else { - delete filter.id_field - delete filter.timestamp_field - delete filter.distinct_id_field - delete filter.table_name - return { - ...filter, - id: typeof id === 'undefined' ? filter.id : id, - name: typeof name === 'undefined' ? filter.name : name, - type: typeof type === 'undefined' ? filter.type : type, - custom_name: typeof custom_name === 'undefined' ? filter.custom_name : custom_name, - } + } + delete filter.id_field + delete filter.timestamp_field + delete filter.distinct_id_field + delete filter.table_name + return { + ...filter, + id: typeof id === 'undefined' ? filter.id : id, + name: typeof name === 'undefined' ? filter.name : name, + type: typeof type === 'undefined' ? filter.type : type, + custom_name: typeof custom_name === 'undefined' ? filter.custom_name : custom_name, } } diff --git a/frontend/src/scenes/insights/insightLogic.test.ts b/frontend/src/scenes/insights/insightLogic.test.ts index 6bf04a1aedd95..b4662f1724873 100644 --- a/frontend/src/scenes/insights/insightLogic.test.ts +++ b/frontend/src/scenes/insights/insightLogic.test.ts @@ -633,7 +633,7 @@ describe('insightLogic', () => { // 1. the dashboard is mounted const dashLogic = dashboardLogic({ id: 33 }) dashLogic.mount() - await expectLogic(dashLogic).toDispatchActions(['loadDashboardItemsSuccess']) + await expectLogic(dashLogic).toDispatchActions(['loadDashboardSuccess']) // 2. mount the insight logic = insightLogic({ dashboardItemId: Insight42, dashboardId: 33 }) @@ -651,7 +651,7 @@ describe('insightLogic', () => { // 1. the dashboard is mounted const dashLogic = dashboardLogic({ id: 33 }) dashLogic.mount() - await expectLogic(dashLogic).toDispatchActions(['loadDashboardItemsSuccess']) + await expectLogic(dashLogic).toDispatchActions(['loadDashboardSuccess']) // 2. mount the insight logic = insightLogic({ dashboardItemId: Insight42, dashboardId: 1 }) diff --git a/frontend/src/scenes/insights/insightLogic.ts b/frontend/src/scenes/insights/insightLogic.ts index 9b3b87114a0ef..cd6b4b6ac88e5 100644 --- a/frontend/src/scenes/insights/insightLogic.ts +++ b/frontend/src/scenes/insights/insightLogic.ts @@ -290,9 +290,8 @@ export const insightLogic = kea([ targetDashboards.includes(props.dashboardId) if (updateIsForThisDashboard) { return { ...state, ...item } - } else { - return state } + return state }, [insightsModel.actionTypes.renameInsightSuccess]: (state, { item }) => { if (item.id === state.id) { @@ -303,9 +302,8 @@ export const insightLogic = kea([ [insightsModel.actionTypes.insightsAddedToDashboard]: (state, { dashboardId, insightIds }) => { if (insightIds.includes(state.id)) { return { ...state, dashboards: [...(state.dashboards || []), dashboardId] } - } else { - return state } + return state }, [dashboardsModel.actionTypes.tileRemovedFromDashboard]: (state, { tile, dashboardId }) => { if (tile.insight?.id === state.id) { @@ -480,37 +478,35 @@ export const insightLogic = kea([ if (insight.query) { return { ...queryExportContext(insight.query, undefined, undefined, false), filename } - } else { - if (isTrendsFilter(filters) || isStickinessFilter(filters) || isLifecycleFilter(filters)) { - return { - path: `api/projects/${currentTeamId}/insights/trend/?${toParams( - filterTrendsClientSideParams(params) - )}`, - filename, - } - } else if (isRetentionFilter(filters)) { - return { - filename, - path: `api/projects/${currentTeamId}/insights/retention/?${toParams(params)}`, - } - } else if (isFunnelsFilter(filters)) { - return { - filename, - method: 'POST', - path: `api/projects/${currentTeamId}/insights/funnel`, - body: params, - } - } else if (isPathsFilter(filters)) { - return { - filename, - method: 'POST', - path: `api/projects/${currentTeamId}/insights/path`, - body: params, - } - } else { - return null + } + if (isTrendsFilter(filters) || isStickinessFilter(filters) || isLifecycleFilter(filters)) { + return { + path: `api/projects/${currentTeamId}/insights/trend/?${toParams( + filterTrendsClientSideParams(params) + )}`, + filename, + } + } else if (isRetentionFilter(filters)) { + return { + filename, + path: `api/projects/${currentTeamId}/insights/retention/?${toParams(params)}`, + } + } else if (isFunnelsFilter(filters)) { + return { + filename, + method: 'POST', + path: `api/projects/${currentTeamId}/insights/funnel`, + body: params, + } + } else if (isPathsFilter(filters)) { + return { + filename, + method: 'POST', + path: `api/projects/${currentTeamId}/insights/path`, + body: params, } } + return null }, ], isUsingSessionAnalysis: [ diff --git a/frontend/src/scenes/insights/insightVizDataLogic.ts b/frontend/src/scenes/insights/insightVizDataLogic.ts index 250ac40172add..4a4d94938e305 100644 --- a/frontend/src/scenes/insights/insightVizDataLogic.ts +++ b/frontend/src/scenes/insights/insightVizDataLogic.ts @@ -151,9 +151,8 @@ export const insightVizDataLogic = kea([ return !NON_VALUES_ON_SERIES_DISPLAY_TYPES.includes(display || ChartDisplayType.ActionsLineGraph) } else if (isLifecycle) { return true - } else { - return false } + return false }, ], diff --git a/frontend/src/scenes/insights/summarizeInsight.ts b/frontend/src/scenes/insights/summarizeInsight.ts index 31a498815b2a0..f14ec09dfd36a 100644 --- a/frontend/src/scenes/insights/summarizeInsight.ts +++ b/frontend/src/scenes/insights/summarizeInsight.ts @@ -60,22 +60,21 @@ function summarizeBreakdown(filters: Partial | BreakdownFilter, cont : `ID ${cohortId}`) ) .join(', ')}` - } else { - const noun = - breakdown_type !== 'group' - ? breakdown_type - : context.aggregationLabel(breakdown_group_type_index, true).singular - const propertyLabel = - typeof breakdown === 'string' && - breakdown_type && - breakdown_type in PROPERTY_FILTER_TYPE_TO_TAXONOMIC_FILTER_GROUP_TYPE - ? getCoreFilterDefinition( - breakdown, - PROPERTY_FILTER_TYPE_TO_TAXONOMIC_FILTER_GROUP_TYPE[breakdown_type] - )?.label || breakdown - : breakdown - return `${noun}'s ${propertyLabel}` } + const noun = + breakdown_type !== 'group' + ? breakdown_type + : context.aggregationLabel(breakdown_group_type_index, true).singular + const propertyLabel = + typeof breakdown === 'string' && + breakdown_type && + breakdown_type in PROPERTY_FILTER_TYPE_TO_TAXONOMIC_FILTER_GROUP_TYPE + ? getCoreFilterDefinition( + breakdown, + PROPERTY_FILTER_TYPE_TO_TAXONOMIC_FILTER_GROUP_TYPE[breakdown_type] + )?.label || breakdown + : breakdown + return `${noun}'s ${propertyLabel}` } return null } @@ -295,9 +294,8 @@ export function summarizeInsightQuery(query: InsightQueryNode, context: SummaryC ) } else if (isLifecycleQuery(query)) { return `User lifecycle based on ${getDisplayNameFromEntityNode(query.series[0])}` - } else { - return '' } + return '' } function summarizeQuery(query: Node): string { diff --git a/frontend/src/scenes/insights/utils/cleanFilters.ts b/frontend/src/scenes/insights/utils/cleanFilters.ts index 879b7851c5a1a..3712017321ea5 100644 --- a/frontend/src/scenes/insights/utils/cleanFilters.ts +++ b/frontend/src/scenes/insights/utils/cleanFilters.ts @@ -259,9 +259,8 @@ export function autocorrectInterval(filters: Partial): IntervalTy return 'hour' } else if (hour_disabled) { return 'day' - } else { - return filters.interval } + return filters.interval } export function cleanFilters( diff --git a/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts b/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts index 492b00fc3cdbb..5b9396ecdc94b 100644 --- a/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts +++ b/frontend/src/scenes/notebooks/Notebook/notebookLogic.ts @@ -181,11 +181,10 @@ export const notebookLogic = kea([ registerNodeLogic: (state, { nodeId, nodeLogic }) => { if (nodeId === null) { return state - } else { - return { - ...state, - [nodeId]: nodeLogic, - } + } + return { + ...state, + [nodeId]: nodeLogic, } }, unregisterNodeLogic: (state, { nodeId }) => { @@ -290,9 +289,8 @@ export const notebookLogic = kea([ if (error.code === 'conflict') { actions.showConflictWarning() return null - } else { - throw error } + throw error } }, renameNotebook: async ({ title }) => { diff --git a/frontend/src/scenes/onboarding/Onboarding.tsx b/frontend/src/scenes/onboarding/Onboarding.tsx index aac4f4f81efd3..c166c71ac113a 100644 --- a/frontend/src/scenes/onboarding/Onboarding.tsx +++ b/frontend/src/scenes/onboarding/Onboarding.tsx @@ -1,6 +1,5 @@ import { useActions, useValues } from 'kea' import { FEATURE_FLAGS, SESSION_REPLAY_MINIMUM_DURATION_OPTIONS } from 'lib/constants' -import { useFeatureFlag } from 'lib/hooks/useFeatureFlag' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { useEffect, useState } from 'react' import { AndroidInstructions } from 'scenes/onboarding/sdks/session-replay' @@ -82,8 +81,6 @@ const OnboardingWrapper = ({ children }: { children: React.ReactNode }): JSX.Ele const ProductAnalyticsOnboarding = (): JSX.Element => { const { currentTeam } = useValues(teamLogic) - const heatmapsEnabled = useFeatureFlag('TOOLBAR_HEATMAPS') - return ( { inverseToggle: true, }, - heatmapsEnabled - ? { - title: 'Enable heatmaps', - description: `If you use our JavaScript libraries, we can capture general clicks, mouse movements, + { + title: 'Enable heatmaps', + description: `If you use our JavaScript libraries, we can capture general clicks, mouse movements, and scrolling to create heatmaps. No additional events are created, and you can disable this at any time.`, - teamProperty: 'heatmaps_opt_in', - value: currentTeam?.heatmaps_opt_in ?? true, - type: 'toggle', - } - : undefined, + teamProperty: 'heatmaps_opt_in', + value: currentTeam?.heatmaps_opt_in ?? true, + type: 'toggle', + }, ]} /> diff --git a/frontend/src/scenes/paths/Paths.tsx b/frontend/src/scenes/paths/Paths.tsx index 933f2825567f9..bb40f2c1701de 100644 --- a/frontend/src/scenes/paths/Paths.tsx +++ b/frontend/src/scenes/paths/Paths.tsx @@ -25,7 +25,7 @@ export function Paths(): JSX.Element { const [nodeCards, setNodeCards] = useState([]) const { insight, insightProps } = useValues(insightLogic) - const { paths, pathsFilter, funnelPathsFilter, insightDataLoading, insightDataError } = useValues( + const { insightQuery, paths, pathsFilter, funnelPathsFilter, insightDataLoading, insightDataError } = useValues( pathsDataLogic(insightProps) ) @@ -51,7 +51,7 @@ export function Paths(): JSX.Element { }, [paths, !insightDataLoading, canvasWidth, canvasHeight]) if (insightDataError) { - return + return } return ( diff --git a/frontend/src/scenes/persons/PersonPreview.tsx b/frontend/src/scenes/persons/PersonPreview.tsx index 2c874726304c7..f0fe0d838179e 100644 --- a/frontend/src/scenes/persons/PersonPreview.tsx +++ b/frontend/src/scenes/persons/PersonPreview.tsx @@ -28,9 +28,18 @@ export function PersonPreview(props: PersonPreviewProps): JSX.Element | null { return } - // NOTE: This should pretty much never happen, but it's here just in case + // NOTE: This can happen if the Person was deleted or the events associated with the distinct_id had person processing disabled if (!person) { - return <>Not found + return ( +
    +

    Person profile not found

    +

    + The Person may have been deleted. +
    + Alternatively, the events for this user may have had Person Profiles disabled. +

    +
    + ) } const display = asDisplay(person) diff --git a/frontend/src/scenes/pipeline/configUtils.ts b/frontend/src/scenes/pipeline/configUtils.ts index 9835e0cc3a63f..255c1e8dffd5a 100644 --- a/frontend/src/scenes/pipeline/configUtils.ts +++ b/frontend/src/scenes/pipeline/configUtils.ts @@ -10,11 +10,10 @@ export function getConfigSchemaArray( ): PluginConfigSchema[] { if (Array.isArray(configSchema)) { return configSchema - } else { - return Object.entries(configSchema) - .map(([key, value]) => ({ key, ...value })) - .sort((a, b) => (a.order || 999999) - (b.order || 999999)) } + return Object.entries(configSchema) + .map(([key, value]) => ({ key, ...value })) + .sort((a, b) => (a.order || 999999) - (b.order || 999999)) } export function getConfigSchemaObject( @@ -28,9 +27,8 @@ export function getConfigSchemaObject( } }) return newSchema - } else { - return configSchema } + return configSchema } export function defaultConfigForPlugin(plugin: PluginType): Record { diff --git a/frontend/src/scenes/retention/retentionLineGraphLogic.ts b/frontend/src/scenes/retention/retentionLineGraphLogic.ts index ec90368adf15a..2bd1ff9663e2c 100644 --- a/frontend/src/scenes/retention/retentionLineGraphLogic.ts +++ b/frontend/src/scenes/retention/retentionLineGraphLogic.ts @@ -109,9 +109,8 @@ export const retentionLineGraphLogic = kea([ if (startIndex !== undefined && startIndex !== -1) { return startIndex - trendSeries[0].days.length - } else { - return 0 } + return 0 }, ], diff --git a/frontend/src/scenes/retention/retentionTableLogic.ts b/frontend/src/scenes/retention/retentionTableLogic.ts index 177782fbafea4..3fd153edebcc6 100644 --- a/frontend/src/scenes/retention/retentionTableLogic.ts +++ b/frontend/src/scenes/retention/retentionTableLogic.ts @@ -24,9 +24,8 @@ const periodIsLatest = (date_to: string | null, period: string | null): boolean (period == 'Month' && curr.isSame(dayjs(), 'month')) ) { return true - } else { - return false } + return false } export const retentionTableLogic = kea([ diff --git a/frontend/src/scenes/sceneLogic.ts b/frontend/src/scenes/sceneLogic.ts index 7efd7e127e261..6a4e49d6651c8 100644 --- a/frontend/src/scenes/sceneLogic.ts +++ b/frontend/src/scenes/sceneLogic.ts @@ -298,9 +298,8 @@ export const sceneLogic = kea([ actions.reloadBrowserDueToImportError() } return - } else { - throw error } + throw error } finally { window.clearTimeout(timeout) } diff --git a/frontend/src/scenes/session-recordings/file-playback/sessionRecordingFilePlaybackSceneLogic.ts b/frontend/src/scenes/session-recordings/file-playback/sessionRecordingFilePlaybackSceneLogic.ts index 8cc99954081fe..118614cfa6cb2 100644 --- a/frontend/src/scenes/session-recordings/file-playback/sessionRecordingFilePlaybackSceneLogic.ts +++ b/frontend/src/scenes/session-recordings/file-playback/sessionRecordingFilePlaybackSceneLogic.ts @@ -45,9 +45,8 @@ export const parseExportedSessionRecording = (fileData: string): ExportedSession .sort((a, b) => a.timestamp - b.timestamp), }, } - } else { - throw new Error('File version is not supported') } + throw new Error('File version is not supported') } /** diff --git a/frontend/src/scenes/session-recordings/player/PlayerMeta.tsx b/frontend/src/scenes/session-recordings/player/PlayerMeta.tsx index 56c69f0df5d03..2e13371ce3245 100644 --- a/frontend/src/scenes/session-recordings/player/PlayerMeta.tsx +++ b/frontend/src/scenes/session-recordings/player/PlayerMeta.tsx @@ -257,7 +257,7 @@ const MenuActions = (): JSX.Element => { const items: LemonMenuItems = [ { label: 'Export to file', - onClick: exportRecordingToFile, + onClick: () => exportRecordingToFile(false), icon: , tooltip: 'Export recording to a file. This can be loaded later into PostHog for playback.', }, diff --git a/frontend/src/scenes/session-recordings/player/controller/PlayerSeekbarPreview.tsx b/frontend/src/scenes/session-recordings/player/controller/PlayerSeekbarPreview.tsx index eea860fcab26e..2b079dba9583d 100644 --- a/frontend/src/scenes/session-recordings/player/controller/PlayerSeekbarPreview.tsx +++ b/frontend/src/scenes/session-recordings/player/controller/PlayerSeekbarPreview.tsx @@ -1,7 +1,7 @@ import { BindLogic, useActions, useValues } from 'kea' import useIsHovering from 'lib/hooks/useIsHovering' import { colonDelimitedDuration } from 'lib/utils' -import { MutableRefObject, useEffect, useRef, useState } from 'react' +import { memo, MutableRefObject, useEffect, useRef, useState } from 'react' import { useDebouncedCallback } from 'use-debounce' import { PlayerFrame } from '../PlayerFrame' @@ -28,7 +28,7 @@ const PlayerSeekbarPreviewFrame = ({ }: { percentage: number; isVisible: boolean } & Omit< PlayerSeekbarPreviewProps, 'seekBarRef' | 'activeMs' ->): JSX.Element => { +>): JSX.Element | null => { const { sessionRecordingId, logicProps } = useValues(sessionRecordingPlayerLogic) const seekPlayerLogicProps: SessionRecordingPlayerLogicProps = { @@ -66,7 +66,7 @@ const PlayerSeekbarPreviewFrame = ({ ) } -export function PlayerSeekbarPreview({ minMs, maxMs, seekBarRef, activeMs }: PlayerSeekbarPreviewProps): JSX.Element { +function _PlayerSeekbarPreview({ minMs, maxMs, seekBarRef, activeMs }: PlayerSeekbarPreviewProps): JSX.Element { const [percentage, setPercentage] = useState(0) const ref = useRef(null) const fixedUnits = maxMs / 1000 > 3600 ? 3 : 2 @@ -125,3 +125,5 @@ export function PlayerSeekbarPreview({ minMs, maxMs, seekBarRef, activeMs }: Pla
    ) } + +export const PlayerSeekbarPreview = memo(_PlayerSeekbarPreview) diff --git a/frontend/src/scenes/session-recordings/player/playerSettingsLogic.ts b/frontend/src/scenes/session-recordings/player/playerSettingsLogic.ts index 84a349cbbdcc8..65829d1257afd 100644 --- a/frontend/src/scenes/session-recordings/player/playerSettingsLogic.ts +++ b/frontend/src/scenes/session-recordings/player/playerSettingsLogic.ts @@ -299,9 +299,8 @@ export const playerSettingsLogic = kea([ if (enabled) { if (selectedFilter.alone) { return false - } else { - return filterInTab.alone ? false : true } + return filterInTab.alone ? false : true } if (existingSelected !== key) { diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts index 0416fa79032ed..b628c57847528 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts @@ -162,10 +162,9 @@ export const deduplicateSnapshots = (snapshots: RecordingSnapshot[] | null): Rec if (seenHashes.has(key)) { return false - } else { - seenHashes.add(key) - return true } + seenHashes.add(key) + return true }) .sort((a, b) => a.timestamp - b.timestamp) } diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts index d1bbf94be6fa6..2eacc5ccf5392 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingPlayerLogic.ts @@ -352,6 +352,7 @@ export const sessionRecordingPlayerLogic = kea( logicProps: [() => [(_, props) => props], (props): SessionRecordingPlayerLogicProps => props], playlistLogic: [() => [(_, props) => props], (props) => props.playlistLogic], + roughAnimationFPS: [(s) => [s.playerSpeed], (playerSpeed) => playerSpeed * (1000 / 60)], currentPlayerState: [ (s) => [ s.playingState, @@ -444,9 +445,8 @@ export const sessionRecordingPlayerLogic = kea( if (isSkippingInactivity) { const secondsToSkip = ((currentSegment?.endTimestamp ?? 0) - (currentTimestamp ?? 0)) / 1000 return Math.max(50, secondsToSkip) - } else { - return speed } + return speed }, ], segmentForTimestamp: [ @@ -814,47 +814,33 @@ export const sessionRecordingPlayerLogic = kea( const rrwebPlayerTime = values.player?.replayer?.getCurrentTime() let newTimestamp = values.fromRRWebPlayerTime(rrwebPlayerTime) - const skip = values.playerSpeed * (1000 / 60) // rough animation fps - if ( - rrwebPlayerTime !== undefined && - newTimestamp !== undefined && - (values.currentPlayerState === SessionPlayerState.PLAY || - values.currentPlayerState === SessionPlayerState.SKIP) && - values.timestampChangeTracking.timestampMatchesPrevious > 10 - ) { - // NOTE: We should investigate if this is still happening - logging to posthog recording so we can find this in the future - posthog.sessionRecording?.log( - 'stuck session player detected - this indicates an issue with the segmenter', - 'warn' - ) - cache.debug?.('stuck session player detected', { - timestampChangeTracking: values.timestampChangeTracking, - currentSegment: values.currentSegment, - snapshots: values.sessionPlayerData.snapshotsByWindowId[values.currentSegment?.windowId ?? ''], - player: values.player, - meta: values.player?.replayer.getMetaData(), - rrwebPlayerTime, - segments: values.sessionPlayerData.segments, - segmentIndex: - values.currentSegment && values.sessionPlayerData.segments.indexOf(values.currentSegment), - }) - - actions.skipPlayerForward(rrwebPlayerTime, skip) - newTimestamp = newTimestamp + skip - } - if (newTimestamp == undefined && values.currentTimestamp) { // This can happen if the player is not loaded due to us being in a "gap" segment // In this case, we should progress time forward manually - if (values.currentSegment?.kind === 'gap') { cache.debug?.('gap segment: skipping forward') - newTimestamp = values.currentTimestamp + skip + newTimestamp = values.currentTimestamp + values.roughAnimationFPS } } + // If we're beyond buffered position, set to buffering + if (values.currentSegment?.kind === 'buffer') { + // Pause only the animation, not our player, so it will restart + // when the buffering progresses + values.player?.replayer?.pause() + actions.startBuffer() + actions.setErrorPlayerState(false) + cache.debug('buffering') + return + } + + if (newTimestamp == undefined) { + // no newTimestamp is unexpected, bail out + return + } + // If we are beyond the current segment then move to the next one - if (newTimestamp && values.currentSegment && newTimestamp > values.currentSegment.endTimestamp) { + if (values.currentSegment && newTimestamp > values.currentSegment.endTimestamp) { const nextSegment = values.segmentForTimestamp(newTimestamp) if (nextSegment) { @@ -874,22 +860,9 @@ export const sessionRecordingPlayerLogic = kea( return } - // If we're beyond buffered position, set to buffering - if (values.currentSegment?.kind === 'buffer') { - // Pause only the animation, not our player, so it will restart - // when the buffering progresses - values.player?.replayer?.pause() - actions.startBuffer() - actions.setErrorPlayerState(false) - cache.debug('buffering') - return - } - - if (newTimestamp !== undefined) { - // The normal loop. Progress the player position and continue the loop - actions.setCurrentTimestamp(newTimestamp) - cache.timer = requestAnimationFrame(actions.updateAnimation) - } + // The normal loop. Progress the player position and continue the loop + actions.setCurrentTimestamp(newTimestamp) + cache.timer = requestAnimationFrame(actions.updateAnimation) }, stopAnimation: () => { if (cache.timer) { @@ -1003,7 +976,7 @@ export const sessionRecordingPlayerLogic = kea( }, })), - subscriptions(({ actions }) => ({ + subscriptions(({ actions, values }) => ({ sessionPlayerData: (next, prev) => { const hasSnapshotChanges = next?.snapshotsByWindowId !== prev?.snapshotsByWindowId @@ -1013,6 +986,17 @@ export const sessionRecordingPlayerLogic = kea( actions.syncSnapshotsWithPlayer() } }, + timestampChangeTracking: (next) => { + if (next.timestampMatchesPrevious < 10) { + return + } + + const rrwebPlayerTime = values.player?.replayer?.getCurrentTime() + + if (rrwebPlayerTime !== undefined && values.currentPlayerState === SessionPlayerState.PLAY) { + actions.skipPlayerForward(rrwebPlayerTime, values.roughAnimationFPS) + } + }, })), beforeUnmount(({ values, actions, cache, props }) => { diff --git a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts index b54aa53aa311b..41792b8d7678d 100644 --- a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts +++ b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts @@ -389,9 +389,8 @@ export const sessionRecordingsPlaylistLogic = kea { @@ -402,9 +401,8 @@ export const sessionRecordingsPlaylistLogic = kea, - flag: 'TOOLBAR_HEATMAPS', }, { id: 'exception-autocapture', diff --git a/frontend/src/scenes/settings/project/filterTestAccountDefaultsLogic.ts b/frontend/src/scenes/settings/project/filterTestAccountDefaultsLogic.ts index 0616394c5ef24..e6ea096cdf263 100644 --- a/frontend/src/scenes/settings/project/filterTestAccountDefaultsLogic.ts +++ b/frontend/src/scenes/settings/project/filterTestAccountDefaultsLogic.ts @@ -31,9 +31,8 @@ export const filterTestAccountsDefaultsLogic = kea([ const response = await api.create('api/user/test_slack_webhook', { webhook }) if (response.success) { return webhook - } else { - actions.testWebhookFailure(response.error) } + actions.testWebhookFailure(response.error) } catch (error: any) { actions.testWebhookFailure(error.message) } diff --git a/frontend/src/scenes/surveys/SurveyEdit.tsx b/frontend/src/scenes/surveys/SurveyEdit.tsx index 1121d79be7f3a..1905cb29fd5c9 100644 --- a/frontend/src/scenes/surveys/SurveyEdit.tsx +++ b/frontend/src/scenes/surveys/SurveyEdit.tsx @@ -2,6 +2,7 @@ import './EditSurvey.scss' import { DndContext } from '@dnd-kit/core' import { SortableContext, verticalListSortingStrategy } from '@dnd-kit/sortable' +import { IconInfo } from '@posthog/icons' import { IconLock, IconPlus, IconTrash } from '@posthog/icons' import { LemonButton, @@ -19,6 +20,7 @@ import { FlagSelector } from 'lib/components/FlagSelector' import { FEATURE_FLAGS } from 'lib/constants' import { IconCancel } from 'lib/lemon-ui/icons' import { LemonField } from 'lib/lemon-ui/LemonField' +import { Tooltip } from 'lib/lemon-ui/Tooltip' import { featureFlagLogic as enabledFeaturesLogic } from 'lib/logic/featureFlagLogic' import { featureFlagLogic } from 'scenes/feature-flags/featureFlagLogic' import { FeatureFlagReleaseConditions } from 'scenes/feature-flags/FeatureFlagReleaseConditions' @@ -516,13 +518,18 @@ export default function SurveyEdit(): JSX.Element { type="number" size="small" min={0} - value={value?.seenSurveyWaitPeriodInDays} + value={value?.seenSurveyWaitPeriodInDays || NaN} onChange={(val) => { if (val !== undefined && val > 0) { onChange({ ...value, seenSurveyWaitPeriodInDays: val, }) + } else { + onChange({ + ...value, + seenSurveyWaitPeriodInDays: null, + }) } }} className="w-16" @@ -597,6 +604,47 @@ export default function SurveyEdit(): JSX.Element { ), }, + { + key: SurveyEditSection.CompletionConditions, + header: 'Completion conditions', + content: ( + + {({ onChange, value }) => { + return ( +
    + { + const newResponsesLimit = checked ? 100 : null + onChange(newResponsesLimit) + }} + /> + Stop the survey once + { + if (newValue && newValue > 0) { + onChange(newValue) + } else { + onChange(null) + } + }} + className="w-16" + />{' '} + responses are received. + + + +
    + ) + }} +
    + ), + }, ]} />
    diff --git a/frontend/src/scenes/surveys/SurveyView.tsx b/frontend/src/scenes/surveys/SurveyView.tsx index cb04b053d5a6d..e168762d8bd0a 100644 --- a/frontend/src/scenes/surveys/SurveyView.tsx +++ b/frontend/src/scenes/surveys/SurveyView.tsx @@ -217,6 +217,15 @@ export function SurveyView({ id }: { id: string }): JSX.Element {
    )}
    + {survey.responses_limit && ( + <> + Completion conditions + + The survey will be stopped once {survey.responses_limit}{' '} + responses are received. + + + )} { id: 'new' linked_flag_id: number | null @@ -145,6 +146,7 @@ export const NEW_SURVEY: NewSurvey = { conditions: null, archived: false, appearance: defaultSurveyAppearance, + responses_limit: null, } export enum SurveyTemplateType { diff --git a/frontend/src/scenes/surveys/surveyLogic.test.ts b/frontend/src/scenes/surveys/surveyLogic.test.ts index 6661f3d359760..c507ab5b02265 100644 --- a/frontend/src/scenes/surveys/surveyLogic.test.ts +++ b/frontend/src/scenes/surveys/surveyLogic.test.ts @@ -48,6 +48,7 @@ const MULTIPLE_CHOICE_SURVEY: Survey = { end_date: null, archived: false, targeting_flag_filters: undefined, + responses_limit: null, } const SINGLE_CHOICE_SURVEY: Survey = { @@ -93,6 +94,7 @@ const SINGLE_CHOICE_SURVEY: Survey = { end_date: null, archived: false, targeting_flag_filters: undefined, + responses_limit: null, } const MULTIPLE_CHOICE_SURVEY_WITH_OPEN_CHOICE: Survey = { @@ -139,6 +141,7 @@ const MULTIPLE_CHOICE_SURVEY_WITH_OPEN_CHOICE: Survey = { end_date: null, archived: false, targeting_flag_filters: undefined, + responses_limit: null, } const SINGLE_CHOICE_SURVEY_WITH_OPEN_CHOICE: Survey = { @@ -185,6 +188,7 @@ const SINGLE_CHOICE_SURVEY_WITH_OPEN_CHOICE: Survey = { end_date: null, archived: false, targeting_flag_filters: undefined, + responses_limit: null, } describe('multiple choice survey logic', () => { diff --git a/frontend/src/scenes/surveys/surveyLogic.tsx b/frontend/src/scenes/surveys/surveyLogic.tsx index cbe549fff5a6c..22661cc0993eb 100644 --- a/frontend/src/scenes/surveys/surveyLogic.tsx +++ b/frontend/src/scenes/surveys/surveyLogic.tsx @@ -36,6 +36,7 @@ export enum SurveyEditSection { Appearance = 'appearance', Customization = 'customization', Targeting = 'targeting', + CompletionConditions = 'CompletionConditions', } export interface SurveyLogicProps { /** Either a UUID or 'new'. */ @@ -475,6 +476,7 @@ export const surveyLogic = kea([ actions.setSurveyValue('targeting_flag', NEW_SURVEY.targeting_flag) actions.setSurveyValue('conditions', NEW_SURVEY.conditions) actions.setSurveyValue('remove_targeting_flag', true) + actions.setSurveyValue('responses_limit', NEW_SURVEY.responses_limit) }, submitSurveyFailure: async () => { // When errors occur, scroll to the error, but wait for errors to be set in the DOM first diff --git a/frontend/src/scenes/trends/persons-modal/PersonsModal.tsx b/frontend/src/scenes/trends/persons-modal/PersonsModal.tsx index 0fd8a8b8cc931..9ee01c3e8c988 100644 --- a/frontend/src/scenes/trends/persons-modal/PersonsModal.tsx +++ b/frontend/src/scenes/trends/persons-modal/PersonsModal.tsx @@ -201,9 +201,9 @@ export function PersonsModal({
    {errorObject ? ( validationError ? ( - + ) : ( - + ) ) : actors && actors.length > 0 ? ( <> diff --git a/frontend/src/scenes/trends/persons-modal/personsModalLogic.ts b/frontend/src/scenes/trends/persons-modal/personsModalLogic.ts index dbe81266fb7db..26bac72e45cab 100644 --- a/frontend/src/scenes/trends/persons-modal/personsModalLogic.ts +++ b/frontend/src/scenes/trends/persons-modal/personsModalLogic.ts @@ -136,25 +136,24 @@ export const personsModalLogic = kea([ group[field] = result[additionalFieldIndices[index]] }) return group - } else { - const person: PersonActorType = { - type: 'person', - id: result[0].id, - uuid: result[0].id, - distinct_ids: result[0].distinct_ids, - is_identified: result[0].is_identified, - properties: result[0].properties, - created_at: result[0].created_at, - matched_recordings: [], - value_at_data_point: null, - } + } + const person: PersonActorType = { + type: 'person', + id: result[0].id, + uuid: result[0].id, + distinct_ids: result[0].distinct_ids, + is_identified: result[0].is_identified, + properties: result[0].properties, + created_at: result[0].created_at, + matched_recordings: [], + value_at_data_point: null, + } - Object.keys(props.additionalSelect || {}).forEach((field, index) => { - person[field] = result[additionalFieldIndices[index]] - }) + Object.keys(props.additionalSelect || {}).forEach((field, index) => { + person[field] = result[additionalFieldIndices[index]] + }) - return person - } + return person }), }, ], diff --git a/frontend/src/scenes/trends/trendsDataLogic.ts b/frontend/src/scenes/trends/trendsDataLogic.ts index 49e4a05f022cb..cd85bbcb204a1 100644 --- a/frontend/src/scenes/trends/trendsDataLogic.ts +++ b/frontend/src/scenes/trends/trendsDataLogic.ts @@ -91,9 +91,8 @@ export const trendsDataLogic = kea([ (insightData: TrendAPIResponse | null): TrendResult[] => { if (insightData?.result && Array.isArray(insightData.result)) { return insightData.result - } else { - return [] } + return [] }, ], @@ -175,9 +174,8 @@ export const trendsDataLogic = kea([ if (startIndex !== undefined && startIndex !== -1) { return startIndex - results[0].days.length - } else { - return 0 } + return 0 }, ], diff --git a/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts b/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts index f8056c88ea228..1b386ac2e2d0c 100644 --- a/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts +++ b/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts @@ -271,17 +271,16 @@ export const webAnalyticsLogic = kea([ } as const }) .filter(isNotNil) - } else { - // no matching property, so add one - const newFilter: WebAnalyticsPropertyFilter = { - type, - key, - value, - operator: PropertyOperator.Exact, - } - - return [...oldPropertyFilters, newFilter] } + // no matching property, so add one + const newFilter: WebAnalyticsPropertyFilter = { + type, + key, + value, + operator: PropertyOperator.Exact, + } + + return [...oldPropertyFilters, newFilter] }, setStateFromUrl: (_, { state }) => state.filters, }, @@ -1012,9 +1011,8 @@ export const webAnalyticsLogic = kea([ limit: 50, }, } - } else { - return query } + return query } if (tabId) { @@ -1040,22 +1038,21 @@ export const webAnalyticsLogic = kea([ query: extendQuery(tab.query), canOpenInsight: tab.canOpenInsight, } - } else { - if ('tabs' in tile) { - throw new Error('Developer Error, tabId not provided for tab tile') - } - return { - tileId, - title: tile.title, - showIntervalSelect: tile.showIntervalSelect, - showPathCleaningControls: tile.showPathCleaningControls, - insightProps: { - dashboardItemId: getDashboardItemId(tileId, undefined, true), - loadPriority: 0, - dataNodeCollectionId: WEB_ANALYTICS_DATA_COLLECTION_NODE_ID, - }, - query: extendQuery(tile.query), - } + } + if ('tabs' in tile) { + throw new Error('Developer Error, tabId not provided for tab tile') + } + return { + tileId, + title: tile.title, + showIntervalSelect: tile.showIntervalSelect, + showPathCleaningControls: tile.showPathCleaningControls, + insightProps: { + dashboardItemId: getDashboardItemId(tileId, undefined, true), + loadPriority: 0, + dataNodeCollectionId: WEB_ANALYTICS_DATA_COLLECTION_NODE_ID, + }, + query: extendQuery(tile.query), } }, ], @@ -1115,16 +1112,15 @@ export const webAnalyticsLogic = kea([ null, formatQueryForNewInsight(tab.query) ) - } else { - if ('tabs' in tile) { - throw new Error('Developer Error, tabId not provided for tab tile') - } - return urls.insightNew( - { properties: webAnalyticsFilters, date_from: dateFrom, date_to: dateTo }, - null, - formatQueryForNewInsight(tile.query) - ) } + if ('tabs' in tile) { + throw new Error('Developer Error, tabId not provided for tab tile') + } + return urls.insightNew( + { properties: webAnalyticsFilters, date_from: dateFrom, date_to: dateTo }, + null, + formatQueryForNewInsight(tile.query) + ) } }, ], diff --git a/frontend/src/toolbar/debug/eventDebugMenuLogic.ts b/frontend/src/toolbar/debug/eventDebugMenuLogic.ts index 1b490687c3ad2..93a1f7587ee10 100644 --- a/frontend/src/toolbar/debug/eventDebugMenuLogic.ts +++ b/frontend/src/toolbar/debug/eventDebugMenuLogic.ts @@ -59,9 +59,8 @@ export const eventDebugMenuLogic = kea([ return events.filter((e) => { if (showRecordingSnapshots) { return true - } else { - return e.event !== '$snapshot' } + return e.event !== '$snapshot' }) }, ], diff --git a/frontend/src/toolbar/elements/Heatmap.tsx b/frontend/src/toolbar/elements/Heatmap.tsx index ce7a58f02fd4c..75785d3f3a768 100644 --- a/frontend/src/toolbar/elements/Heatmap.tsx +++ b/frontend/src/toolbar/elements/Heatmap.tsx @@ -1,6 +1,6 @@ import heatmapsJs, { Heatmap as HeatmapJS } from 'heatmap.js' import { useValues } from 'kea' -import { MutableRefObject, useCallback, useEffect, useRef } from 'react' +import { MutableRefObject, useCallback, useEffect, useMemo, useRef } from 'react' import { heatmapLogic } from '~/toolbar/elements/heatmapLogic' @@ -49,10 +49,26 @@ function HeatmapMouseInfo({ } export function Heatmap(): JSX.Element | null { - const { heatmapJsData, heatmapEnabled, heatmapFilters, windowWidth, windowHeight } = useValues(heatmapLogic) + const { heatmapJsData, heatmapEnabled, heatmapFilters, windowWidth, windowHeight, heatmapColorPalette } = + useValues(heatmapLogic) const heatmapsJsRef = useRef>() const heatmapsJsContainerRef = useRef() + const heatmapJSColorGradient = useMemo((): Record => { + switch (heatmapColorPalette) { + case 'blue': + return { '.0': 'rgba(0, 0, 255, 0)', '.100': 'rgba(0, 0, 255, 1)' } + case 'green': + return { '.0': 'rgba(0, 255, 0, 0)', '.100': 'rgba(0, 255, 0, 1)' } + case 'red': + return { '.0': 'rgba(255, 0, 0, 0)', '.100': 'rgba(255, 0, 0, 1)' } + + default: + // Defaults taken from heatmap.js + return { '.25': 'rgb(0,0,255)', '0.55': 'rgb(0,255,0)', '0.85': 'yellow', '1.0': 'rgb(255,0,0)' } + } + }, [heatmapColorPalette]) + const updateHeatmapData = useCallback((): void => { try { heatmapsJsRef.current?.setData(heatmapJsData) @@ -69,6 +85,7 @@ export function Heatmap(): JSX.Element | null { heatmapsJsRef.current = heatmapsJs.create({ container, + gradient: heatmapJSColorGradient, }) updateHeatmapData() @@ -78,6 +95,17 @@ export function Heatmap(): JSX.Element | null { updateHeatmapData() }, [heatmapJsData]) + useEffect(() => { + if (!heatmapsJsContainerRef.current) { + return + } + + heatmapsJsRef.current?.configure({ + container: heatmapsJsContainerRef.current, + gradient: heatmapJSColorGradient, + }) + }, [heatmapJSColorGradient]) + if (!heatmapEnabled || !heatmapFilters.enabled || heatmapFilters.type === 'scrolldepth') { return null } diff --git a/frontend/src/toolbar/elements/ScrollDepth.tsx b/frontend/src/toolbar/elements/ScrollDepth.tsx index d81e6478aea17..9e929de52c745 100644 --- a/frontend/src/toolbar/elements/ScrollDepth.tsx +++ b/frontend/src/toolbar/elements/ScrollDepth.tsx @@ -1,3 +1,4 @@ +import clsx from 'clsx' import { useValues } from 'kea' import { heatmapLogic } from '~/toolbar/elements/heatmapLogic' @@ -7,7 +8,7 @@ import { useMousePosition } from './useMousePosition' function ScrollDepthMouseInfo(): JSX.Element | null { const { posthog } = useValues(toolbarConfigLogic) - const { heatmapElements, rawHeatmapLoading } = useValues(heatmapLogic) + const { heatmapElements, rawHeatmapLoading, shiftPressed } = useValues(heatmapLogic) const { y: mouseY } = useMousePosition() @@ -35,8 +36,13 @@ function ScrollDepthMouseInfo(): JSX.Element | null { transform: 'translateY(-50%)', }} > -
    -
    +
    +
    {rawHeatmapLoading ? ( <>Loading... ) : heatmapElements.length ? ( @@ -46,7 +52,7 @@ function ScrollDepthMouseInfo(): JSX.Element | null { )}
    -
    +
    ) } @@ -54,7 +60,8 @@ function ScrollDepthMouseInfo(): JSX.Element | null { export function ScrollDepth(): JSX.Element | null { const { posthog } = useValues(toolbarConfigLogic) - const { heatmapEnabled, heatmapFilters, heatmapElements, scrollDepthPosthogJsError } = useValues(heatmapLogic) + const { heatmapEnabled, heatmapFilters, heatmapElements, scrollDepthPosthogJsError, heatmapColorPalette } = + useValues(heatmapLogic) if (!heatmapEnabled || !heatmapFilters.enabled || heatmapFilters.type !== 'scrolldepth') { return null @@ -71,11 +78,32 @@ export function ScrollDepth(): JSX.Element | null { function color(count: number): string { const value = 1 - count / maxCount - const safeValue = Math.max(0, Math.min(1, value)) - const hue = Math.round(260 * safeValue) - // Return hsl color. You can adjust saturation and lightness to your liking - return `hsl(${hue}, 100%, 50%)` + if (heatmapColorPalette === 'default') { + const safeValue = Math.max(0, Math.min(1, value)) + const hue = Math.round(260 * safeValue) + + // Return hsl color. You can adjust saturation and lightness to your liking + return `hsl(${hue}, 100%, 50%)` + } + + const rgba = [0, 0, 0, count / maxCount] + + switch (heatmapColorPalette) { + case 'red': + rgba[0] = 255 + break + case 'green': + rgba[1] = 255 + break + case 'blue': + rgba[2] = 255 + break + default: + break + } + + return `rgba(${rgba.join(', ')})` } return ( diff --git a/frontend/src/toolbar/elements/heatmapLogic.ts b/frontend/src/toolbar/elements/heatmapLogic.ts index b3e586949f3f3..f6256a91844be 100644 --- a/frontend/src/toolbar/elements/heatmapLogic.ts +++ b/frontend/src/toolbar/elements/heatmapLogic.ts @@ -1,3 +1,4 @@ +import { LemonSelectOption } from '@posthog/lemon-ui' import { actions, afterMount, beforeUnmount, connect, kea, listeners, path, reducers, selectors } from 'kea' import { loaders } from 'kea-loaders' import { encodeParams } from 'kea-router' @@ -57,6 +58,13 @@ export type HeatmapJsData = { } export type HeatmapFixedPositionMode = 'fixed' | 'relative' | 'hidden' +export const HEATMAP_COLOR_PALETTE_OPTIONS: LemonSelectOption[] = [ + { value: 'default', label: 'Default (multicolor)' }, + { value: 'red', label: 'Red (monocolor)' }, + { value: 'green', label: 'Green (monocolor)' }, + { value: 'blue', label: 'Blue (monocolor)' }, +] + export const heatmapLogic = kea([ path(['toolbar', 'elements', 'heatmapLogic']), connect({ @@ -86,6 +94,7 @@ export const heatmapLogic = kea([ fetchHeatmapApi: (params: HeatmapRequestType) => ({ params }), setHeatmapScrollY: (scrollY: number) => ({ scrollY }), setHeatmapFixedPositionMode: (mode: HeatmapFixedPositionMode) => ({ mode }), + setHeatmapColorPalette: (Palette: string | null) => ({ Palette }), }), windowValues(() => ({ windowWidth: (window: Window) => window.innerWidth, @@ -153,6 +162,14 @@ export const heatmapLogic = kea([ setHeatmapFixedPositionMode: (_, { mode }) => mode, }, ], + + heatmapColorPalette: [ + 'default' as string | null, + { persist: true }, + { + setHeatmapColorPalette: (_, { Palette }) => Palette, + }, + ], }), loaders(({ values }) => ({ diff --git a/frontend/src/toolbar/stats/HeatmapToolbarMenu.tsx b/frontend/src/toolbar/stats/HeatmapToolbarMenu.tsx index 273796a853a9e..823a4096572ba 100644 --- a/frontend/src/toolbar/stats/HeatmapToolbarMenu.tsx +++ b/frontend/src/toolbar/stats/HeatmapToolbarMenu.tsx @@ -1,5 +1,5 @@ -import { IconMagicWand } from '@posthog/icons' -import { LemonLabel, LemonSegmentedButton } from '@posthog/lemon-ui' +import { IconInfo, IconMagicWand } from '@posthog/icons' +import { LemonLabel, LemonSegmentedButton, LemonSelect, LemonTag } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { CUSTOM_OPTION_KEY } from 'lib/components/DateFilter/types' import { IconSync } from 'lib/lemon-ui/icons' @@ -11,13 +11,14 @@ import { LemonSwitch } from 'lib/lemon-ui/LemonSwitch' import { Spinner } from 'lib/lemon-ui/Spinner' import { Tooltip } from 'lib/lemon-ui/Tooltip' import { dateFilterToText, dateMapping } from 'lib/utils' +import React, { useState } from 'react' import { ToolbarMenu } from '~/toolbar/bar/ToolbarMenu' import { elementsLogic } from '~/toolbar/elements/elementsLogic' -import { heatmapLogic } from '~/toolbar/elements/heatmapLogic' +import { HEATMAP_COLOR_PALETTE_OPTIONS, heatmapLogic } from '~/toolbar/elements/heatmapLogic' import { currentPageLogic } from '~/toolbar/stats/currentPageLogic' -import { useToolbarFeatureFlag } from '../toolbarPosthogJS' +import { toolbarConfigLogic } from '../toolbarConfigLogic' const ScrollDepthJSWarning = (): JSX.Element | null => { const { scrollDepthPosthogJsError } = useValues(heatmapLogic) @@ -40,6 +41,91 @@ const ScrollDepthJSWarning = (): JSX.Element | null => { ) } +const HeatmapsJSWarning = (): JSX.Element | null => { + const { posthog } = useValues(toolbarConfigLogic) + + if (!posthog || posthog?.heatmaps?.isEnabled) { + return null + } + + return ( +

    + {!posthog.heatmaps ? ( + <>The version of posthog-js you are using does not support collecting heatmap data. + ) : !posthog.heatmaps.isEnabled ? ( + <> + Heatmap collection is disabled in your posthog-js configuration. If you do not see heatmap data then + this is likely why. + + ) : null} +

    + ) +} + +const SectionButton = ({ + children, + checked, + onChange, + loading, +}: { + children: React.ReactNode + checked: boolean + onChange: (checked: boolean) => void + loading?: boolean +}): JSX.Element => { + return ( +
    + onChange(!checked)} + sideIcon={} + > + + {children} + + {loading ? : null} + + +
    + ) +} + +const SectionSetting = ({ + children, + title, + info, +}: { + children: React.ReactNode + title: React.ReactNode + info?: React.ReactNode +}): JSX.Element => { + const [showInfo, setShowInfo] = useState(false) + return ( +
    +
    + + {title} + + {info && ( + } + size="xsmall" + active={showInfo} + onClick={() => setShowInfo(!showInfo)} + noPadding + /> + )} + +
    + + {showInfo ?
    {info}
    : null} + + {children} +
    + ) +} + export const HeatmapToolbarMenu = (): JSX.Element => { const { wildcardHref } = useValues(currentPageLogic) const { setWildcardHref, autoWildcardHref } = useActions(currentPageLogic) @@ -56,6 +142,7 @@ export const HeatmapToolbarMenu = (): JSX.Element => { elementStatsLoading, clickmapsEnabled, heatmapFixedPositionMode, + heatmapColorPalette, } = useValues(heatmapLogic) const { setCommonFilters, @@ -64,6 +151,7 @@ export const HeatmapToolbarMenu = (): JSX.Element => { setMatchLinksByHref, toggleClickmapsEnabled, setHeatmapFixedPositionMode, + setHeatmapColorPalette, } = useActions(heatmapLogic) const { setHighlightElement, setSelectedElement } = useActions(elementsLogic) @@ -74,8 +162,6 @@ export const HeatmapToolbarMenu = (): JSX.Element => { onClick: () => setCommonFilters({ date_from: dateOption.values[0], date_to: dateOption.values[1] }), })) - const showNewHeatmaps = useToolbarFeatureFlag('toolbar-heatmaps') - return ( @@ -107,166 +193,199 @@ export const HeatmapToolbarMenu = (): JSX.Element => {
    - {showNewHeatmaps ? ( -
    - Heatmaps {rawHeatmapLoading ? : null}} - onChange={(e) => - patchHeatmapFilters({ - enabled: e, - }) - } - /> +
    + + patchHeatmapFilters({ + enabled: e, + }) + } + loading={rawHeatmapLoading} + checked={!!heatmapFilters.enabled} + > + Heatmaps NEW{' '} + - {heatmapFilters.enabled && ( - <> -

    - Heatmaps are calculated using additional data sent along with standard events. They - are based off of general pointer interactions and might not be 100% accurate to the - page you are viewing. -

    -
    - Heatmap type -
    - patchHeatmapFilters({ type: e })} - value={heatmapFilters.type ?? undefined} - options={[ - { - value: 'click', - label: 'Clicks', - }, - { - value: 'rageclick', - label: 'Rageclicks', - }, - { - value: 'mousemove', - label: 'Mouse moves', - }, - { - value: 'scrolldepth', - label: 'Scroll depth', - }, - ]} - size="small" - /> -
    + {heatmapFilters.enabled && ( + <> + +

    + Heatmaps are calculated using additional data sent along with standard events. They are + based off of general pointer interactions and might not be 100% accurate to the page you + are viewing. +

    - {heatmapFilters.type === 'scrolldepth' && ( - <> -

    - Scroll depth uses additional information from Pageview and Pageleave - events to indicate how far down the page users have scrolled. -

    - - - )} + + Select the kind of heatmap you want to view. Clicks, rageclicks, and mouse moves + options will show different "heat" based on the number of interactions at that + area of the page. Scroll depth will show how far down the page users have + reached. +
    + Scroll depth uses additional information from Pageview and Pageleave events to + indicate how far down the page users have scrolled. + + } + > +
    + patchHeatmapFilters({ type: e })} + value={heatmapFilters.type ?? undefined} + options={[ + { + value: 'click', + label: 'Clicks', + }, + { + value: 'rageclick', + label: 'Rageclicks', + }, + { + value: 'mousemove', + label: 'Mouse moves', + }, + { + value: 'scrolldepth', + label: 'Scroll depth', + }, + ]} + size="small" + /> - Aggregation -
    - patchHeatmapFilters({ aggregation: e })} - value={heatmapFilters.aggregation ?? 'total_count'} - options={[ - { - value: 'total_count', - label: 'Total count', - }, - { - value: 'unique_visitors', - label: 'Unique visitors', - }, - ]} - size="small" - /> -
    + {heatmapFilters.type === 'scrolldepth' && } +
    +
    - Viewport accuracy -
    - patchHeatmapFilters({ viewportAccuracy: value })} - /> - + Heatmaps can be aggregated by total count or unique visitors. Total count will + show the total number of interactions on the page, while unique visitors will + only count each visitor once. + + } + > +
    + +
    + - `} - > - - {`${Math.round((heatmapFilters.viewportAccuracy ?? 1) * 100)}% (${ - viewportRange.min - }px - ${viewportRange.max}px)`} - -
    -
    + + The viewport accuracy setting will determine how closely the loaded data will be + to your current viewport. +
    + For example if you set this to 100%, only visitors whose viewport width is + identical to yours will be included in the heatmap. +
    + At 90% you will see data from viewports that are 10% smaller or larger than + yours. + + } + > +
    + patchHeatmapFilters({ viewportAccuracy: value })} + /> + + {`${Math.round((heatmapFilters.viewportAccuracy ?? 1) * 100)}% (${ + viewportRange.min + }px - ${viewportRange.max}px)`} + +
    +
    - {heatmapFilters.type !== 'scrolldepth' ? ( - <> - Fixed positioning calculation -

    - PostHog JS will attempt to detect fixed elements such as headers or - modals and will therefore show those heatmap areas, ignoring the scroll - value. -
    - You can choose to show these areas as fixed, include them with scrolled - data or hide them altogether. -

    + + + - + {heatmapFilters.type !== 'scrolldepth' && ( + + PostHog JS will attempt to detect fixed elements such as headers or modals + and will therefore show those heatmap areas, ignoring the scroll value. +
    + You can choose to show these areas as fixed, include them with scrolled data + or hide them altogether. - ) : null} -
    - - )} -
    - ) : null} + } + > + + + )} + + )} +
    - {showNewHeatmaps ? ( - Clickmaps (autocapture) {elementStatsLoading ? : null}} - onChange={(e) => toggleClickmapsEnabled(e)} - /> - ) : null} + toggleClickmapsEnabled(e)} + loading={elementStatsLoading} + checked={!!clickmapsEnabled} + > + Clickmaps (autocapture) + - {(clickmapsEnabled || !showNewHeatmaps) && ( + {clickmapsEnabled && ( <> - {showNewHeatmaps ? ( -

    - Clickmaps are built using Autocapture events. They are more accurate than heatmaps - if the event can be mapped to a specific element found on the page you are viewing - but less data is usually captured. -

    - ) : null} +

    + Clickmaps are built using Autocapture events. They are more accurate than heatmaps if + the event can be mapped to a specific element found on the page you are viewing but less + data is usually captured. +

    } diff --git a/frontend/src/toolbar/utils.ts b/frontend/src/toolbar/utils.ts index b10a2d80b6bcd..53f281f7e1c74 100644 --- a/frontend/src/toolbar/utils.ts +++ b/frontend/src/toolbar/utils.ts @@ -293,14 +293,13 @@ export function actionStepToActionStepFormItem(step: ActionStepType, isNew = fal href_selected: false, url_selected: false, } - } else { - return { - ...step, - selector_selected: hasSelector, - text_selected: false, - url_selected: false, - href_selected: false, - } + } + return { + ...step, + selector_selected: hasSelector, + text_selected: false, + url_selected: false, + href_selected: false, } } diff --git a/frontend/src/types.ts b/frontend/src/types.ts index 3a49374e1f4fe..b5b420ebf1ada 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -2434,6 +2434,7 @@ export interface Survey { end_date: string | null archived: boolean remove_targeting_flag?: boolean + responses_limit: number | null } export enum SurveyUrlMatchType { diff --git a/latest_migrations.manifest b/latest_migrations.manifest index 9aab6a612f77b..6bb4be7adc644 100644 --- a/latest_migrations.manifest +++ b/latest_migrations.manifest @@ -5,7 +5,7 @@ contenttypes: 0002_remove_content_type_name ee: 0016_rolemembership_organization_member otp_static: 0002_throttling otp_totp: 0002_auto_20190420_0723 -posthog: 0405_team_heatmaps_opt_in +posthog: 0406_survey_responses_limit sessions: 0001_initial social_django: 0010_uid_db_index two_factor: 0007_auto_20201201_1019 diff --git a/mypy-baseline.txt b/mypy-baseline.txt index b1e56d812c2f0..0d27df2286796 100644 --- a/mypy-baseline.txt +++ b/mypy-baseline.txt @@ -367,7 +367,6 @@ posthog/session_recordings/queries/session_recording_list_from_replay_summary.py posthog/session_recordings/queries/session_recording_list_from_replay_summary.py:0: note: If the method is meant to be abstract, use @abc.abstractmethod posthog/session_recordings/queries/session_recording_list_from_replay_summary.py:0: error: Missing return statement [empty-body] posthog/session_recordings/queries/session_recording_list_from_replay_summary.py:0: note: If the method is meant to be abstract, use @abc.abstractmethod -posthog/migrations/0404_remove_propertydefinition_property_type_is_valid_and_more.py:0: error: Module "django.contrib.postgres.operations" has no attribute "AddConstraintNotValid" [attr-defined] posthog/hogql_queries/test/test_query_runner.py:0: error: Variable "TestQueryRunner" is not valid as a type [valid-type] posthog/hogql_queries/test/test_query_runner.py:0: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases posthog/hogql_queries/test/test_query_runner.py:0: error: Invalid base class "TestQueryRunner" [misc] @@ -503,6 +502,8 @@ posthog/api/organization_member.py:0: error: Metaclass conflict: the metaclass o posthog/api/action.py:0: error: Argument 1 to has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type] ee/api/role.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc] ee/clickhouse/views/insights.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc] +posthog/warehouse/data_load/validate_schema.py:0: error: Item "None" of "DataWarehouseTable | None" has no attribute "get_columns" [union-attr] +posthog/warehouse/data_load/validate_schema.py:0: error: Item "None" of "DataWarehouseTable | None" has no attribute "columns" [union-attr] posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: Argument 6 has incompatible type "ExternalDataSchema"; expected "str" [arg-type] posthog/temporal/data_imports/workflow_activities/create_job_model.py:0: error: Unused "type: ignore" comment [unused-ignore] posthog/temporal/data_imports/pipelines/zendesk/helpers.py:0: error: Argument 1 to "ensure_pendulum_datetime" has incompatible type "DateTime | Date | datetime | date | str | float | int | None"; expected "DateTime | Date | datetime | date | str | float | int" [arg-type] @@ -547,6 +548,7 @@ posthog/queries/trends/test/test_person.py:0: error: "str" has no attribute "get posthog/queries/trends/test/test_person.py:0: error: Invalid index type "int" for "HttpResponse"; expected type "str | bytes" [index] posthog/queries/trends/test/test_person.py:0: error: "str" has no attribute "get" [attr-defined] posthog/queries/trends/test/test_person.py:0: error: Invalid index type "int" for "HttpResponse"; expected type "str | bytes" [index] +posthog/migrations/0404_remove_propertydefinition_property_type_is_valid_and_more.py:0: error: Module "django.contrib.postgres.operations" has no attribute "AddConstraintNotValid" [attr-defined] posthog/management/commands/migrate_team.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "BatchExport") [assignment] posthog/hogql/test/test_query.py:0: error: Argument 1 to "len" has incompatible type "list[Any] | None"; expected "Sized" [arg-type] posthog/hogql/test/test_query.py:0: error: Value of type "list[QueryTiming] | None" is not indexable [index] diff --git a/package.json b/package.json index 7970123848a28..3b478bf21f1e4 100644 --- a/package.json +++ b/package.json @@ -146,7 +146,7 @@ "pmtiles": "^2.11.0", "postcss": "^8.4.31", "postcss-preset-env": "^9.3.0", - "posthog-js": "1.129.0", + "posthog-js": "1.130.0", "posthog-js-lite": "3.0.0", "prettier": "^2.8.8", "prop-types": "^15.7.2", diff --git a/patches/rrweb@2.0.0-alpha.13.patch b/patches/rrweb@2.0.0-alpha.13.patch index c415c192d26c7..7e21e3c9f16d0 100644 --- a/patches/rrweb@2.0.0-alpha.13.patch +++ b/patches/rrweb@2.0.0-alpha.13.patch @@ -488,6 +488,46 @@ index 0d49411b1f6d31103bed898c0e81d1d74ab51234..0b2160ef08aa3ae5310f63c295abc0a5 } } applyMutation(d, isSync) { +diff --git a/es/rrweb/packages/rrweb/src/replay/media/index.js b/es/rrweb/packages/rrweb/src/replay/media/index.js +index 22fee601e786c1d8dfb5c01d2e359c8bcbac7c42..20c3e14adfde860563e8dd902041bd14c860f462 100644 +--- a/es/rrweb/packages/rrweb/src/replay/media/index.js ++++ b/es/rrweb/packages/rrweb/src/replay/media/index.js +@@ -21,7 +21,7 @@ class MediaManager { + this.mediaMap.forEach((mediaState, target) => { + this.syncTargetWithState(target); + if (options.pause) { +- target.pause(); ++ target?.pause(); + } + }); + } +@@ -49,7 +49,7 @@ class MediaManager { + target.currentTime = seekToTime; + } + else { +- target.pause(); ++ target?.pause(); + target.currentTime = mediaState.currentTimeAtLastInteraction; + } + } +@@ -117,7 +117,7 @@ class MediaManager { + void target.play(); + } + else { +- target.pause(); ++ target?.pause(); + } + } + catch (error) { +@@ -141,7 +141,7 @@ class MediaManager { + isPlaying = target.getAttribute('autoplay') !== null; + } + if (isPlaying && playerIsPaused) +- target.pause(); ++ target?.pause(); + let playbackRate = 1; + if (typeof mediaAttributes.rr_mediaPlaybackRate === 'number') { + playbackRate = mediaAttributes.rr_mediaPlaybackRate; diff --git a/es/rrweb/packages/rrweb-snapshot/es/rrweb-snapshot.js b/es/rrweb/packages/rrweb-snapshot/es/rrweb-snapshot.js index 38a23aaae8d683fa584329eced277dd8de55d1ff..278e06bc6c8c964581d461405a0f0a4544344fa1 100644 --- a/es/rrweb/packages/rrweb-snapshot/es/rrweb-snapshot.js diff --git a/plugin-server/src/worker/ingestion/person-state.ts b/plugin-server/src/worker/ingestion/person-state.ts index ca250aa3d52da..6c94de6f0ea2a 100644 --- a/plugin-server/src/worker/ingestion/person-state.ts +++ b/plugin-server/src/worker/ingestion/person-state.ts @@ -4,7 +4,6 @@ import { ProducerRecord } from 'kafkajs' import { DateTime } from 'luxon' import { Counter } from 'prom-client' import { KafkaProducerWrapper } from 'utils/db/kafka-producer-wrapper' -import { parse as parseUuid, v5 as uuidv5 } from 'uuid' import { KAFKA_PERSON_OVERRIDE } from '../../config/kafka-topics' import { InternalPerson, Person, PropertyUpdateOperation, TimestampFormat } from '../../types' @@ -15,6 +14,7 @@ import { PeriodicTask } from '../../utils/periodic-task' import { promiseRetry } from '../../utils/retries' import { status } from '../../utils/status' import { castTimestampOrNow } from '../../utils/utils' +import { uuidFromDistinctId } from './person-uuid' import { captureIngestionWarning } from './utils' export const mergeFinalFailuresCounter = new Counter({ @@ -34,15 +34,6 @@ export const mergeTxnSuccessCounter = new Counter({ labelNames: ['call', 'oldPersonIdentified', 'newPersonIdentified', 'poEEmbraceJoin'], }) -// UUIDv5 requires a namespace, which is itself a UUID. This was a randomly generated UUIDv4 -// that must be used to deterministrically generate UUIDv5s for Person rows. -const PERSON_UUIDV5_NAMESPACE = parseUuid('932979b4-65c3-4424-8467-0b66ec27bc22') - -function uuidFromDistinctId(teamId: number, distinctId: string): string { - // Deterministcally create a UUIDv5 based on the (team_id, distinct_id) pair. - return uuidv5(`${teamId}:${distinctId}`, PERSON_UUIDV5_NAMESPACE) -} - // used to prevent identify from being used with generic IDs // that we can safely assume stem from a bug or mistake // used to prevent identify from being used with generic IDs @@ -121,8 +112,15 @@ export class PersonState { // Ensure person properties don't propagate elsewhere, such as onto the event itself. person.properties = {} - // See documentation on the field. - person.force_upgrade = true + if (this.timestamp > person.created_at.plus({ minutes: 1 })) { + // See documentation on the field. + // + // Note that we account for timestamp vs person creation time (with a little + // padding for good measure) to account for ingestion lag. It's possible for + // events to be processed after person creation even if they were sent prior + // to person creation, and the user did nothing wrong in that case. + person.force_upgrade = true + } return person } diff --git a/plugin-server/src/worker/ingestion/person-uuid.ts b/plugin-server/src/worker/ingestion/person-uuid.ts new file mode 100644 index 0000000000000..78ff92eaca8ef --- /dev/null +++ b/plugin-server/src/worker/ingestion/person-uuid.ts @@ -0,0 +1,10 @@ +import { parse as parseUuid, v5 as uuidv5 } from 'uuid' + +// UUIDv5 requires a namespace, which is itself a UUID. This was a randomly generated UUIDv4 +// that must be used to deterministrically generate UUIDv5s for Person rows. +const PERSON_UUIDV5_NAMESPACE = parseUuid('932979b4-65c3-4424-8467-0b66ec27bc22') + +export function uuidFromDistinctId(teamId: number, distinctId: string): string { + // Deterministcally create a UUIDv5 based on the (team_id, distinct_id) pair. + return uuidv5(`${teamId}:${distinctId}`, PERSON_UUIDV5_NAMESPACE) +} diff --git a/plugin-server/tests/worker/ingestion/person-state.test.ts b/plugin-server/tests/worker/ingestion/person-state.test.ts index c465fb675ead0..d3a04018e0d96 100644 --- a/plugin-server/tests/worker/ingestion/person-state.test.ts +++ b/plugin-server/tests/worker/ingestion/person-state.test.ts @@ -1,6 +1,5 @@ import { PluginEvent } from '@posthog/plugin-scaffold' import { DateTime } from 'luxon' -import { parse as parseUuid, v5 as uuidv5 } from 'uuid' import { waitForExpect } from '../../../functional_tests/expectations' import { Database, Hub, InternalPerson } from '../../../src/types' @@ -15,6 +14,7 @@ import { FlatPersonOverrideWriter, PersonState, } from '../../../src/worker/ingestion/person-state' +import { uuidFromDistinctId } from '../../../src/worker/ingestion/person-uuid' import { delayUntilEventIngested } from '../../helpers/clickhouse' import { WaitEvent } from '../../helpers/promises' import { createOrganization, createTeam, fetchPostgresPersons, insertRow } from '../../helpers/sql' @@ -34,17 +34,6 @@ interface PersonOverridesMode { ): Promise> } -function uuidFromDistinctId(teamId: number, distinctId: string): string { - // The UUID generation code here is deliberately copied from `person-state` rather than imported, - // so that someone can't accidentally change how `person-state` UUID generation works and still - // have the tests pass. - // - // It is very important that Person UUIDs are deterministically generated and that this format - // doesn't change without a lot of thought and planning about side effects! - const namespace = parseUuid('932979b4-65c3-4424-8467-0b66ec27bc22') - return uuidv5(`${teamId}:${distinctId}`, namespace) -} - const PersonOverridesModes: Record = { disabled: undefined, 'deferred, without mappings (flat)': { @@ -122,7 +111,8 @@ describe('PersonState.update()', () => { event: Partial, customHub?: Hub, processPerson = true, - lazyPersonCreation = false + lazyPersonCreation = false, + timestampParam = timestamp ) { const fullEvent = { team_id: teamId, @@ -134,7 +124,7 @@ describe('PersonState.update()', () => { fullEvent as any, teamId, event.distinct_id!, - timestamp, + timestampParam, processPerson, customHub ? customHub.db : hub.db, lazyPersonCreation, @@ -272,6 +262,7 @@ describe('PersonState.update()', () => { // `force_upgrade=true` and real Person `uuid` and `created_at` processPerson = false const event_uuid = new UUIDT().toString() + const timestampParam = timestamp.plus({ minutes: 5 }) // Event needs to happen after Person creation const fakePerson = await personState( { event: '$pageview', @@ -281,7 +272,8 @@ describe('PersonState.update()', () => { }, hubParam, processPerson, - lazyPersonCreation + lazyPersonCreation, + timestampParam ).update() await hub.db.kafkaProducer.flush() diff --git a/plugin-server/tests/worker/ingestion/person-uuid.test.ts b/plugin-server/tests/worker/ingestion/person-uuid.test.ts new file mode 100644 index 0000000000000..c27106dc4e1f2 --- /dev/null +++ b/plugin-server/tests/worker/ingestion/person-uuid.test.ts @@ -0,0 +1,11 @@ +import { uuidFromDistinctId } from '../../../src/worker/ingestion/person-uuid' + +jest.setTimeout(5000) // 5 sec timeout + +describe('uuidFromDistinctId', () => { + it('generates deterministic UUIDs', () => { + expect(uuidFromDistinctId(1, 'test')).toMatchInlineSnapshot(`"246f7a43-5507-564f-b687-793ee3c2dd79"`) + expect(uuidFromDistinctId(2, 'test')).toMatchInlineSnapshot(`"00ce873a-549c-548e-bbec-cc804a385dd8"`) + expect(uuidFromDistinctId(1, 'test2')).toMatchInlineSnapshot(`"45c17302-ee44-5596-916a-0eba21f4b638"`) + }) +}) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index b82ee3c9a0063..bb4715a6d5e60 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1,4 +1,4 @@ -lockfileVersion: '6.1' +lockfileVersion: '6.0' settings: autoInstallPeers: true @@ -12,7 +12,7 @@ patchedDependencies: hash: gydrxrztd4ruyhouu6tu7zh43e path: patches/heatmap.js@2.0.5.patch rrweb@2.0.0-alpha.13: - hash: awo7hmdoglvq4pl5b43lreupza + hash: g7jnlxuwyxkja3n62yb6xpim6q path: patches/rrweb@2.0.0-alpha.13.patch dependencies: @@ -260,8 +260,8 @@ dependencies: specifier: ^9.3.0 version: 9.3.0(postcss@8.4.31) posthog-js: - specifier: 1.129.0 - version: 1.129.0 + specifier: 1.130.0 + version: 1.130.0 posthog-js-lite: specifier: 3.0.0 version: 3.0.0 @@ -333,7 +333,7 @@ dependencies: version: 1.5.1 rrweb: specifier: 2.0.0-alpha.13 - version: 2.0.0-alpha.13(patch_hash=awo7hmdoglvq4pl5b43lreupza) + version: 2.0.0-alpha.13(patch_hash=g7jnlxuwyxkja3n62yb6xpim6q) sass: specifier: ^1.26.2 version: 1.56.0 @@ -356,7 +356,7 @@ dependencies: optionalDependencies: fsevents: specifier: ^2.3.2 - version: 2.3.2 + version: 2.3.3 devDependencies: '@babel/core': @@ -6708,8 +6708,8 @@ packages: type-fest: 2.19.0 dev: true - /@storybook/csf@0.1.4: - resolution: {integrity: sha512-B9UI/lsQMjF+oEfZCI6YXNoeuBcGZoOP5x8yKbe2tIEmsMjSztFKkpPzi5nLCnBk/MBtl6QJeI3ksJnbsWPkOw==} + /@storybook/csf@0.1.5: + resolution: {integrity: sha512-pW7Dtk/bE2JGrAe/KuBY4Io02NBe/2CLP2DkgVgWlSwvEVdm/rbQyiwy8RaL0lQlJCv9CsGBY+n9HQG8d4bZjQ==} dependencies: type-fest: 2.19.0 dev: true @@ -6743,7 +6743,7 @@ packages: '@storybook/channels': 7.6.18 '@storybook/client-logger': 7.6.18 '@storybook/core-events': 7.6.18 - '@storybook/csf': 0.1.4 + '@storybook/csf': 0.1.5 '@storybook/global': 5.0.0 '@storybook/router': 7.6.18 '@storybook/theming': 7.6.18(react-dom@18.2.0)(react@18.2.0) @@ -6858,7 +6858,7 @@ packages: '@storybook/channels': 7.6.18 '@storybook/client-logger': 7.6.18 '@storybook/core-events': 7.6.18 - '@storybook/csf': 0.1.4 + '@storybook/csf': 0.1.5 '@storybook/global': 5.0.0 '@storybook/types': 7.6.18 '@types/qs': 6.9.15 @@ -12914,6 +12914,7 @@ packages: engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} os: [darwin] requiresBuild: true + dev: true optional: true /fsevents@2.3.3: @@ -17566,8 +17567,8 @@ packages: resolution: {integrity: sha512-dyajjnfzZD1tht4N7p7iwf7nBnR1MjVaVu+MKr+7gBgA39bn28wizCIJZztZPtHy4PY0YwtSGgwfBCuG/hnHgA==} dev: false - /posthog-js@1.129.0: - resolution: {integrity: sha512-Vddg6Shbum/SxFFUYrEUMu6PmFk7tNHIxTXQwtqrYtar6YKUXDUslW8kf8vwsK09AOglX7ISynisSJ8u/MzicA==} + /posthog-js@1.130.0: + resolution: {integrity: sha512-bCrw5HunoXLybO20Q1bYEg68i5WCZWKxhStYJK4OR/9jrm7GwZ53GDrN78p8apFi0EH5ay4YZGbLFSkg+SsZWQ==} dependencies: fflate: 0.4.8 preact: 10.20.2 @@ -19276,7 +19277,7 @@ packages: resolution: {integrity: sha512-slbhNBCYjxLGCeH95a67ECCy5a22nloXp1F5wF7DCzUNw80FN7tF9Lef1sRGLNo32g3mNqTc2sWLATlKejMxYw==} dev: false - /rrweb@2.0.0-alpha.13(patch_hash=awo7hmdoglvq4pl5b43lreupza): + /rrweb@2.0.0-alpha.13(patch_hash=g7jnlxuwyxkja3n62yb6xpim6q): resolution: {integrity: sha512-a8GXOCnzWHNaVZPa7hsrLZtNZ3CGjiL+YrkpLo0TfmxGLhjNZbWY2r7pE06p+FcjFNlgUVTmFrSJbK3kO7yxvw==} dependencies: '@rrweb/types': 2.0.0-alpha.13 diff --git a/posthog/api/person.py b/posthog/api/person.py index e242c7ffffe14..bedec209168d9 100644 --- a/posthog/api/person.py +++ b/posthog/api/person.py @@ -1,5 +1,6 @@ import json import posthoganalytics +from posthog.models.person.missing_person import MissingPerson from posthog.renderers import SafeJSONRenderer from datetime import datetime from typing import ( # noqa: UP035 @@ -7,6 +8,7 @@ List, Optional, TypeVar, + Union, cast, ) from collections.abc import Callable @@ -173,10 +175,20 @@ def get_name(self, person: Person) -> str: team = self.context["get_team"]() return get_person_name(team, person) - def to_representation(self, instance: Person) -> dict[str, Any]: - representation = super().to_representation(instance) - representation["distinct_ids"] = sorted(representation["distinct_ids"], key=is_anonymous_id) - return representation + def to_representation(self, instance: Union[Person, MissingPerson]) -> dict[str, Any]: + if isinstance(instance, Person): + representation = super().to_representation(instance) + representation["distinct_ids"] = sorted(representation["distinct_ids"], key=is_anonymous_id) + return representation + elif isinstance(instance, MissingPerson): + return { + "id": None, + "name": None, + "distinct_ids": [instance.distinct_id], + "properties": instance.properties, + "created_at": None, + "uuid": instance.uuid, + } # person distinct ids can grow to be a very large list diff --git a/posthog/api/survey.py b/posthog/api/survey.py index 3ffce982b8981..6f4ee68c71fb1 100644 --- a/posthog/api/survey.py +++ b/posthog/api/survey.py @@ -57,6 +57,7 @@ class Meta: "start_date", "end_date", "archived", + "responses_limit", ] read_only_fields = ["id", "created_at", "created_by"] @@ -88,6 +89,7 @@ class Meta: "start_date", "end_date", "archived", + "responses_limit", ] read_only_fields = ["id", "linked_flag", "targeting_flag", "created_at"] diff --git a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr index 345b64664cdd9..3fce9cc24961c 100644 --- a/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr +++ b/posthog/api/test/__snapshots__/test_organization_feature_flag.ambr @@ -1536,7 +1536,8 @@ "posthog_survey"."start_date", "posthog_survey"."end_date", "posthog_survey"."updated_at", - "posthog_survey"."archived" + "posthog_survey"."archived", + "posthog_survey"."responses_limit" FROM "posthog_survey" WHERE "posthog_survey"."linked_flag_id" = 2 ''' diff --git a/posthog/api/test/__snapshots__/test_survey.ambr b/posthog/api/test/__snapshots__/test_survey.ambr index 22dc514280297..37e6e31e52976 100644 --- a/posthog/api/test/__snapshots__/test_survey.ambr +++ b/posthog/api/test/__snapshots__/test_survey.ambr @@ -181,6 +181,7 @@ "posthog_survey"."end_date", "posthog_survey"."updated_at", "posthog_survey"."archived", + "posthog_survey"."responses_limit", "posthog_featureflag"."id", "posthog_featureflag"."key", "posthog_featureflag"."name", diff --git a/posthog/api/test/test_insight.py b/posthog/api/test/test_insight.py index 2184261eccd59..07404b07db16f 100644 --- a/posthog/api/test/test_insight.py +++ b/posthog/api/test/test_insight.py @@ -1160,9 +1160,9 @@ def test_insight_refreshing_query(self, spy_execute_hogql_query) -> None: series=[ EventsNode( event="$pageview", - properties=[EventPropertyFilter(key="another", value="never_return_this", operator="is_not")], ) - ] + ], + properties=[EventPropertyFilter(key="another", value="never_return_this", operator="is_not")], ).model_dump() with freeze_time("2012-01-15T04:01:34.000Z"): @@ -1246,12 +1246,13 @@ def test_insight_refreshing_query(self, spy_execute_hogql_query) -> None: #  Test property filter - dashboard = Dashboard.objects.get(pk=dashboard_id) - dashboard.filters = { - "properties": [{"key": "prop", "value": "val"}], - "date_from": "-14d", - } - dashboard.save() + Dashboard.objects.update( + id=dashboard_id, + filters={ + "properties": [{"key": "prop", "value": "val"}], + "date_from": "-14d", + }, + ) with freeze_time("2012-01-16T05:01:34.000Z"): response = self.client.get( f"/api/projects/{self.team.id}/insights/{insight_id}/?refresh=true&from_dashboard={dashboard_id}" diff --git a/posthog/api/test/test_query.py b/posthog/api/test/test_query.py index 6e6fd5490b61b..434ad2ca20058 100644 --- a/posthog/api/test/test_query.py +++ b/posthog/api/test/test_query.py @@ -351,7 +351,9 @@ def test_safe_clickhouse_error_passed_through(self): ), ) - @patch("sqlparse.format", return_value="SELECT 1&&&") # Erroneously constructed SQL + @patch( + "posthog.clickhouse.client.execute._annotate_tagged_query", return_value=("SELECT 1&&&", {}) + ) # Erroneously constructed SQL def test_unsafe_clickhouse_error_is_swallowed(self, sqlparse_format_mock): query = {"kind": "EventsQuery", "select": ["timestamp"]} diff --git a/posthog/api/test/test_survey.py b/posthog/api/test/test_survey.py index 9ef7622f344f9..d37dc40e33889 100644 --- a/posthog/api/test/test_survey.py +++ b/posthog/api/test/test_survey.py @@ -853,6 +853,7 @@ def test_can_list_surveys(self): "archived": False, "start_date": None, "end_date": None, + "responses_limit": None, } ], } diff --git a/posthog/api/test/test_user.py b/posthog/api/test/test_user.py index 4b682b4095e7f..f88fd860c7e53 100644 --- a/posthog/api/test/test_user.py +++ b/posthog/api/test/test_user.py @@ -887,6 +887,25 @@ def assert_forbidden_url(url): assert_allowed_url("https://subdomain.otherexample.com") assert_allowed_url("https://sub.subdomain.otherexample.com") + def test_user_cannot_update_protected_fields(self): + self.user.is_staff = False + self.user.save() + fields = { + "date_joined": "2021-01-01T00:00:00Z", + "uuid": str(uuid.uuid4()), + "distinct_id": "distinct_id", + "pending_email": "changed@example.com", + "is_email_verified": True, + } + + initial_user = self.client.get("/api/users/@me/").json() + + for field, value in fields.items(): + response = self.client.patch("/api/users/@me/", {field: value}) + assert ( + response.json()[field] == initial_user[field] + ), f"Updating field '{field}' to '{value}' worked when it shouldn't! Was {initial_user[field]} and is now {response.json()[field]}" + class TestUserSlackWebhook(APIBaseTest): ENDPOINT: str = "/api/user/test_slack_webhook/" diff --git a/posthog/api/user.py b/posthog/api/user.py index 28b4a42b8620a..2c69250278ebb 100644 --- a/posthog/api/user.py +++ b/posthog/api/user.py @@ -86,7 +86,6 @@ class Meta: "pending_email", "email_opt_in", "is_email_verified", - "pending_email", "notification_settings", "anonymize_data", "toolbar_mode", @@ -107,8 +106,22 @@ class Meta: "scene_personalisation", "theme_mode", ] + + read_only_fields = [ + "date_joined", + "uuid", + "distinct_id", + "pending_email", + "is_email_verified", + "has_password", + "is_impersonated", + "team", + "organization", + "organizations", + "has_social_auth", + ] + extra_kwargs = { - "date_joined": {"read_only": True}, "password": {"write_only": True}, } diff --git a/posthog/asgi.py b/posthog/asgi.py index 22912a0c7b76e..38b9b00ec5c51 100644 --- a/posthog/asgi.py +++ b/posthog/asgi.py @@ -1,8 +1,22 @@ import os from django.core.asgi import get_asgi_application +from django.http.response import HttpResponse os.environ.setdefault("DJANGO_SETTINGS_MODULE", "posthog.settings") os.environ.setdefault("SERVER_GATEWAY_INTERFACE", "ASGI") -application = get_asgi_application() + +# Django doesn't support lifetime requests and raises an exception +# when it receives them. This creates a lot of noise in sentry so +# intercept these requests and return a 501 error without raising an exception +def lifetime_wrapper(func): + async def inner(scope, receive, send): + if scope["type"] != "http": + return HttpResponse(status=501) + return await func(scope, receive, send) + + return inner + + +application = lifetime_wrapper(get_asgi_application()) diff --git a/posthog/clickhouse/client/execute.py b/posthog/clickhouse/client/execute.py index 17af5683a6f19..9af75370f7723 100644 --- a/posthog/clickhouse/client/execute.py +++ b/posthog/clickhouse/client/execute.py @@ -202,7 +202,11 @@ def _prepare_query( rendered_sql = substitute_params(query, args) prepared_args = None - formatted_sql = sqlparse.format(rendered_sql, strip_comments=True) + if "--" in rendered_sql or "/*" in rendered_sql: + # This can take a very long time with e.g. large funnel queries + formatted_sql = sqlparse.format(rendered_sql, strip_comments=True) + else: + formatted_sql = rendered_sql annotated_sql, tags = _annotate_tagged_query(formatted_sql, workload) if app_settings.SHELL_PLUS_PRINT_SQL: diff --git a/posthog/hogql/transforms/lazy_tables.py b/posthog/hogql/transforms/lazy_tables.py index c010fc13ce408..e9cb9db14988f 100644 --- a/posthog/hogql/transforms/lazy_tables.py +++ b/posthog/hogql/transforms/lazy_tables.py @@ -55,7 +55,7 @@ def visit_field(self, node: ast.Field): class LazyFinder(TraversingVisitor): found_lazy: bool = False - max_type_visits: int = 3 + max_type_visits: int = 1 def __init__(self) -> None: self.visited_field_type_counts: dict[int, int] = {} @@ -67,7 +67,7 @@ def visit_lazy_table_type(self, node: ast.TableType): self.found_lazy = True def visit_field_type(self, node: ast.FieldType): - node_ref = id(node) + node_ref = id(node.table_type) visited_count = self.visited_field_type_counts.get(node_ref, 0) if visited_count < self.max_type_visits: self.visited_field_type_counts[node_ref] = visited_count + 1 diff --git a/posthog/hogql_queries/query_runner.py b/posthog/hogql_queries/query_runner.py index 0e49ca849637b..c9209794aaa7d 100644 --- a/posthog/hogql_queries/query_runner.py +++ b/posthog/hogql_queries/query_runner.py @@ -25,6 +25,7 @@ FunnelCorrelationQuery, FunnelsActorsQuery, PropertyGroupFilter, + PropertyGroupFilterValue, TrendsQuery, FunnelsQuery, RetentionQuery, @@ -429,7 +430,13 @@ def apply_dashboard_filters(self, dashboard_filter: DashboardFilter) -> Q: if dashboard_filter.properties: if self.query.properties: query_update["properties"] = PropertyGroupFilter( - type=FilterLogicalOperator.AND, values=[self.query.properties, dashboard_filter.properties] + type=FilterLogicalOperator.AND, + values=[ + PropertyGroupFilterValue(type=FilterLogicalOperator.AND, values=self.query.properties), + PropertyGroupFilterValue( + type=FilterLogicalOperator.AND, values=dashboard_filter.properties + ), + ], ) else: query_update["properties"] = dashboard_filter.properties diff --git a/posthog/middleware.py b/posthog/middleware.py index 87ee128a7268e..9b2bcb7dfd582 100644 --- a/posthog/middleware.py +++ b/posthog/middleware.py @@ -161,6 +161,13 @@ def __call__(self, request: HttpRequest): user = cast(User, request.user) if len(path_parts) >= 2 and path_parts[0] == "project" and path_parts[1].startswith("phc_"): + + def do_redirect(): + new_path = "/".join(path_parts) + search_params = request.GET.urlencode() + + return redirect(f"/{new_path}?{search_params}" if search_params else f"/{new_path}") + try: new_team = Team.objects.get(api_token=path_parts[1]) @@ -168,12 +175,12 @@ def __call__(self, request: HttpRequest): raise Team.DoesNotExist path_parts[1] = str(new_team.pk) - return redirect("/" + "/".join(path_parts)) + return do_redirect() except Team.DoesNotExist: if user.team: path_parts[1] = str(user.team.pk) - return redirect("/" + "/".join(path_parts)) + return do_redirect() if len(path_parts) >= 2 and path_parts[0] == "project" and path_parts[1].isdigit(): project_id_in_url = int(path_parts[1]) diff --git a/posthog/migrations/0406_survey_responses_limit.py b/posthog/migrations/0406_survey_responses_limit.py new file mode 100644 index 0000000000000..9ca28d5e1bd5c --- /dev/null +++ b/posthog/migrations/0406_survey_responses_limit.py @@ -0,0 +1,17 @@ +# Generated by Django 4.1.13 on 2024-04-14 13:56 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0405_team_heatmaps_opt_in"), + ] + + operations = [ + migrations.AddField( + model_name="survey", + name="responses_limit", + field=models.PositiveIntegerField(null=True), + ), + ] diff --git a/posthog/models/feedback/survey.py b/posthog/models/feedback/survey.py index d1914d5d2b00b..33507ba50bbcd 100644 --- a/posthog/models/feedback/survey.py +++ b/posthog/models/feedback/survey.py @@ -56,6 +56,8 @@ class Meta: end_date: models.DateTimeField = models.DateTimeField(null=True) updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) archived: models.BooleanField = models.BooleanField(default=False) + # It's not a strict limit as it's enforced in a periodic task + responses_limit = models.PositiveIntegerField(null=True) @mutable_receiver([post_save, post_delete], sender=Survey) diff --git a/posthog/models/person/missing_person.py b/posthog/models/person/missing_person.py new file mode 100644 index 0000000000000..428e8f4c9d3b2 --- /dev/null +++ b/posthog/models/person/missing_person.py @@ -0,0 +1,27 @@ +from uuid import uuid5, UUID + + +PERSON_UUIDV5_NAMESPACE = UUID("932979b4-65c3-4424-8467-0b66ec27bc22") + + +def uuidFromDistinctId(team_id: int, distinct_id: str) -> UUID: + """ + Deterministically create a UUIDv5 based on the (team_id, distinct_id) pair. + """ + return uuid5(PERSON_UUIDV5_NAMESPACE, f"{team_id}:{distinct_id}") + + +class MissingPerson: + uuid: UUID + properties: dict = {} + + def __init__(self, team_id: int, distinct_id: str): + """ + This is loosely based on the plugin-server `person-state.ts` file and is meant to represent a person that is "missing" + """ + self.team_id = team_id + self.distinct_id = distinct_id + self.uuid = uuidFromDistinctId(team_id, distinct_id) + + def __str__(self): + return f"MissingPerson({self.team_id}, {self.distinct_id})" diff --git a/posthog/models/test/test_missing_person_model.py b/posthog/models/test/test_missing_person_model.py new file mode 100644 index 0000000000000..2692b76b15652 --- /dev/null +++ b/posthog/models/test/test_missing_person_model.py @@ -0,0 +1,10 @@ +from uuid import UUID +from posthog.models.person.missing_person import MissingPerson +from posthog.test.base import BaseTest + + +class TestMissingPersonModel(BaseTest): + def test_generates_deterministic_uuid(self): + assert MissingPerson(1, "test").uuid == UUID("246f7a43-5507-564f-b687-793ee3c2dd79") + assert MissingPerson(2, "test").uuid == UUID("00ce873a-549c-548e-bbec-cc804a385dd8") + assert MissingPerson(1, "test2").uuid == UUID("45c17302-ee44-5596-916a-0eba21f4b638") diff --git a/posthog/session_recordings/models/session_recording.py b/posthog/session_recordings/models/session_recording.py index d5ac114f8a216..359df2faf94e7 100644 --- a/posthog/session_recordings/models/session_recording.py +++ b/posthog/session_recordings/models/session_recording.py @@ -1,8 +1,9 @@ -from typing import Any, Literal, Optional +from typing import Any, Literal, Optional, Union from django.conf import settings from django.db import models +from posthog.models.person.missing_person import MissingPerson from posthog.models.person.person import Person from posthog.models.signals import mutable_receiver from posthog.models.team.team import Team @@ -57,7 +58,7 @@ class Meta: # DYNAMIC FIELDS viewed: Optional[bool] = False - person: Optional[Person] = None + _person: Optional[Person] = None matching_events: Optional[RecordingMatchingEvents] = None # Metadata can be loaded from Clickhouse or S3 @@ -111,9 +112,20 @@ def storage(self): def snapshot_source(self) -> Optional[str]: return self._metadata.get("snapshot_source", "web") if self._metadata else "web" - def load_person(self) -> Optional[Person]: - if self.person: - return self.person + @property + def person(self) -> Union[Person, MissingPerson]: + if self._person: + return self._person + + return MissingPerson(team_id=self.team_id, distinct_id=self.distinct_id) + + @person.setter + def person(self, value: Person): + self._person = value + + def load_person(self): + if self._person: + return try: self.person = Person.objects.get( @@ -121,9 +133,8 @@ def load_person(self) -> Optional[Person]: persondistinctid__team_id=self.team, team=self.team, ) - return self.person except Person.DoesNotExist: - return None + pass def check_viewed_for_user(self, user: Any, save_viewed=False) -> None: if not save_viewed: diff --git a/posthog/session_recordings/queries/session_replay_events.py b/posthog/session_recordings/queries/session_replay_events.py index 226d27154fd85..1607aad167176 100644 --- a/posthog/session_recordings/queries/session_replay_events.py +++ b/posthog/session_recordings/queries/session_replay_events.py @@ -1,7 +1,9 @@ from datetime import datetime, timedelta from typing import Optional +import pytz from django.conf import settings +from django.core.cache import cache from posthog.clickhouse.client import sync_execute from posthog.cloud_utils import is_cloud @@ -15,26 +17,47 @@ ) +def seconds_until_midnight(): + now = datetime.now(pytz.timezone("UTC")) + midnight = (now + timedelta(days=1)).replace(hour=0, minute=0, second=0, microsecond=0) + difference = midnight - now + return difference.seconds + + class SessionReplayEvents: def exists(self, session_id: str, team: Team) -> bool: - # TODO we could cache this result when its result is True. + cache_key = f"summarize_recording_existence_team_{team.pk}_id_{session_id}" + cached_response = cache.get(cache_key) + if isinstance(cached_response, bool): + return cached_response + # Once we know that session exists we don't need to check again (until the end of the day since TTL might apply) + existence = self._check_exists_within_days(ttl_days(team), session_id, team) or self._check_exists_within_days( + 370, session_id, team + ) + + if existence: + # let's be cautious and not cache non-existence + # in case we manage to check existence just before the first event hits ClickHouse + # that should be impossible but cache invalidation is hard etc etc + cache.set(cache_key, existence, timeout=seconds_until_midnight()) + return existence + + @staticmethod + def _check_exists_within_days(days: int, session_id: str, team: Team) -> bool: result = sync_execute( """ - SELECT count(1) + SELECT count() FROM session_replay_events - WHERE team_id = %(team_id)s + PREWHERE team_id = %(team_id)s AND session_id = %(session_id)s - -- we should check for the `ttl_days(team)` TTL here, - -- but for a shared/pinned recording - -- the TTL effectively becomes 1 year - -- and we don't know which we're dealing with - AND min_first_timestamp >= now() - INTERVAL 370 DAY + AND min_first_timestamp >= now() - INTERVAL %(days)s DAY + AND min_first_timestamp <= now() """, { "team_id": team.pk, "session_id": session_id, - "recording_ttl_days": ttl_days(team), + "days": days, }, ) return result[0][0] > 0 @@ -144,7 +167,6 @@ def get_events( def ttl_days(team: Team) -> int: - ttl_days = (get_instance_setting("RECORDINGS_TTL_WEEKS") or 3) * 7 if is_cloud(): # NOTE: We use Playlists as a proxy to see if they are subbed to Recordings is_paid = team.organization.is_feature_available(AvailableFeature.RECORDINGS_PLAYLISTS) @@ -155,5 +177,6 @@ def ttl_days(team: Team) -> int: if days_since_blob_ingestion < ttl_days: ttl_days = days_since_blob_ingestion - + else: + ttl_days = (get_instance_setting("RECORDINGS_TTL_WEEKS") or 3) * 7 return ttl_days diff --git a/posthog/session_recordings/session_recording_api.py b/posthog/session_recordings/session_recording_api.py index d9a9fc303d1bb..6d4a638b4d051 100644 --- a/posthog/session_recordings/session_recording_api.py +++ b/posthog/session_recordings/session_recording_api.py @@ -1,7 +1,7 @@ import os import time from datetime import datetime, timedelta, timezone - +from prometheus_client import Histogram import json from typing import Any, cast @@ -50,9 +50,7 @@ from ee.session_recordings.session_summary.summarize_session import summarize_recording from ee.session_recordings.ai.similar_recordings import similar_recordings from ee.session_recordings.ai.error_clustering import error_clustering -from posthog.session_recordings.snapshots.convert_legacy_snapshots import ( - convert_original_version_lts_recording, -) +from posthog.session_recordings.snapshots.convert_legacy_snapshots import convert_original_version_lts_recording from posthog.storage import object_storage from prometheus_client import Counter @@ -63,6 +61,21 @@ labelnames=["source"], ) +GENERATE_PRE_SIGNED_URL_HISTOGRAM = Histogram( + "session_snapshots_generate_pre_signed_url_histogram", + "Time taken to generate a pre-signed URL for a session snapshot", +) + +GET_REALTIME_SNAPSHOTS_FROM_REDIS = Histogram( + "session_snapshots_get_realtime_snapshots_from_redis_histogram", + "Time taken to get realtime snapshots from Redis", +) + +STREAM_RESPONSE_TO_CLIENT_HISTOGRAM = Histogram( + "session_snapshots_stream_response_to_client_histogram", + "Time taken to stream a session snapshot to the client", +) + class SurrogatePairSafeJSONEncoder(JSONEncoder): def encode(self, o): @@ -407,7 +420,8 @@ def snapshots(self, request: request.Request, **kwargs): response_data["sources"] = sources elif source == "realtime": - snapshots = get_realtime_snapshots(team_id=self.team.pk, session_id=str(recording.session_id)) or [] + with GET_REALTIME_SNAPSHOTS_FROM_REDIS.time(): + snapshots = get_realtime_snapshots(team_id=self.team.pk, session_id=str(recording.session_id)) or [] event_properties["source"] = "realtime" event_properties["snapshots_length"] = len(snapshots) @@ -420,36 +434,7 @@ def snapshots(self, request: request.Request, **kwargs): response_data["snapshots"] = snapshots elif source == "blob": - blob_key = request.GET.get("blob_key", "") - self._validate_blob_key(blob_key) - - # very short-lived pre-signed URL - if recording.object_storage_path: - if recording.storage_version == "2023-08-01": - file_key = f"{recording.object_storage_path}/{blob_key}" - else: - # this is a legacy recording, we need to load the file from the old path - file_key = convert_original_version_lts_recording(recording) - else: - blob_prefix = settings.OBJECT_STORAGE_SESSION_RECORDING_BLOB_INGESTION_FOLDER - file_key = f"{blob_prefix}/team_id/{self.team.pk}/session_id/{recording.session_id}/data/{blob_key}" - url = object_storage.get_presigned_url(file_key, expiration=60) - if not url: - raise exceptions.NotFound("Snapshot file not found") - - event_properties["source"] = "blob" - event_properties["blob_key"] = blob_key - posthoganalytics.capture( - self._distinct_id_from_request(request), - "session recording snapshots v2 loaded", - event_properties, - ) - - with requests.get(url=url, stream=True) as r: - r.raise_for_status() - response = HttpResponse(content=r.raw, content_type="application/json") - response["Content-Disposition"] = "inline" - return response + return self._stream_blob_to_client(recording, request, event_properties) else: raise exceptions.ValidationError("Invalid source must be one of [realtime, blob]") @@ -601,6 +586,42 @@ def error_clusters(self, request: request.Request, **kwargs): r = Response(clusters, headers={"Cache-Control": "max-age=15"}) return r + def _stream_blob_to_client( + self, recording: SessionRecording, request: request.Request, event_properties: dict + ) -> HttpResponse: + blob_key = request.GET.get("blob_key", "") + self._validate_blob_key(blob_key) + + # very short-lived pre-signed URL + with GENERATE_PRE_SIGNED_URL_HISTOGRAM.time(): + if recording.object_storage_path: + if recording.storage_version == "2023-08-01": + file_key = f"{recording.object_storage_path}/{blob_key}" + else: + # this is a legacy recording, we need to load the file from the old path + file_key = convert_original_version_lts_recording(recording) + else: + blob_prefix = settings.OBJECT_STORAGE_SESSION_RECORDING_BLOB_INGESTION_FOLDER + file_key = f"{blob_prefix}/team_id/{self.team.pk}/session_id/{recording.session_id}/data/{blob_key}" + url = object_storage.get_presigned_url(file_key, expiration=60) + if not url: + raise exceptions.NotFound("Snapshot file not found") + + event_properties["source"] = "blob" + event_properties["blob_key"] = blob_key + posthoganalytics.capture( + self._distinct_id_from_request(request), + "session recording snapshots v2 loaded", + event_properties, + ) + + with STREAM_RESPONSE_TO_CLIENT_HISTOGRAM.time(): + with requests.get(url=url, stream=True) as r: + r.raise_for_status() + response = HttpResponse(content=r.raw, content_type="application/json") + response["Content-Disposition"] = "inline" + return response + def list_recordings( filter: SessionRecordingsFilter, request: request.Request, context: dict[str, Any] @@ -683,7 +704,9 @@ def list_recordings( for recording in recordings: recording.viewed = recording.session_id in viewed_session_recordings - recording.person = distinct_id_to_person.get(recording.distinct_id) + person = distinct_id_to_person.get(recording.distinct_id) + if person: + recording.person = person session_recording_serializer = SessionRecordingSerializer(recordings, context=context, many=True) results = session_recording_serializer.data diff --git a/posthog/session_recordings/test/test_session_recordings.py b/posthog/session_recordings/test/test_session_recordings.py index 6b0721d673083..2038b39cacb2b 100644 --- a/posthog/session_recordings/test/test_session_recordings.py +++ b/posthog/session_recordings/test/test_session_recordings.py @@ -41,7 +41,7 @@ def setUp(self): # TODO this is pretty slow, we should change assertions so that we don't need it self.team = Team.objects.create(organization=self.organization, name="New Team") - def create_snapshot( + def produce_replay_summary( self, distinct_id, session_id, @@ -79,11 +79,11 @@ def test_get_session_recordings(self): base_time = (now() - relativedelta(days=1)).replace(microsecond=0) session_id_one = f"test_get_session_recordings-1" - self.create_snapshot("user_one_0", session_id_one, base_time) - self.create_snapshot("user_one_0", session_id_one, base_time + relativedelta(seconds=10)) - self.create_snapshot("user_one_0", session_id_one, base_time + relativedelta(seconds=30)) + self.produce_replay_summary("user_one_0", session_id_one, base_time) + self.produce_replay_summary("user_one_0", session_id_one, base_time + relativedelta(seconds=10)) + self.produce_replay_summary("user_one_0", session_id_one, base_time + relativedelta(seconds=30)) session_id_two = f"test_get_session_recordings-2" - self.create_snapshot("user2", session_id_two, base_time + relativedelta(seconds=20)) + self.produce_replay_summary("user2", session_id_two, base_time + relativedelta(seconds=20)) response = self.client.get(f"/api/projects/{self.team.id}/session_recordings") self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -146,11 +146,11 @@ def test_can_list_recordings_even_when_the_person_has_multiple_distinct_ids(self base_time = (now() - relativedelta(days=1)).replace(microsecond=0) session_id_one = f"test_get_session_recordings-1" - self.create_snapshot("user_one_0", session_id_one, base_time) - self.create_snapshot("user_one_1", session_id_one, base_time + relativedelta(seconds=10)) - self.create_snapshot("user_one_2", session_id_one, base_time + relativedelta(seconds=30)) + self.produce_replay_summary("user_one_0", session_id_one, base_time) + self.produce_replay_summary("user_one_1", session_id_one, base_time + relativedelta(seconds=10)) + self.produce_replay_summary("user_one_2", session_id_one, base_time + relativedelta(seconds=30)) session_id_two = f"test_get_session_recordings-2" - self.create_snapshot("user2", session_id_two, base_time + relativedelta(seconds=20)) + self.produce_replay_summary("user2", session_id_two, base_time + relativedelta(seconds=20)) response = self.client.get(f"/api/projects/{self.team.id}/session_recordings") self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -200,8 +200,8 @@ def _person_with_snapshots(self, base_time: datetime, distinct_id: str = "user", distinct_ids=[distinct_id], properties={"$some_prop": "something", "email": "bob@bob.com"}, ) - self.create_snapshot(distinct_id, session_id, base_time) - self.create_snapshot(distinct_id, session_id, base_time + relativedelta(seconds=10)) + self.produce_replay_summary(distinct_id, session_id, base_time) + self.produce_replay_summary(distinct_id, session_id, base_time + relativedelta(seconds=10)) flush_persons_and_events() def test_session_recordings_dont_leak_teams(self) -> None: @@ -218,8 +218,8 @@ def test_session_recordings_dont_leak_teams(self) -> None: ) base_time = (now() - relativedelta(days=1)).replace(microsecond=0) - self.create_snapshot("user", "1", base_time, team_id=another_team.pk) - self.create_snapshot("user", "2", base_time) + self.produce_replay_summary("user", "1", base_time, team_id=another_team.pk) + self.produce_replay_summary("user", "2", base_time) response = self.client.get(f"/api/projects/{self.team.id}/session_recordings") self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -234,8 +234,8 @@ def test_session_recording_for_user_with_multiple_distinct_ids(self) -> None: distinct_ids=["d1", "d2"], properties={"$some_prop": "something", "email": "bob@bob.com"}, ) - self.create_snapshot("d1", "1", base_time) - self.create_snapshot("d2", "2", base_time + relativedelta(seconds=30)) + self.produce_replay_summary("d1", "1", base_time) + self.produce_replay_summary("d2", "2", base_time + relativedelta(seconds=30)) response = self.client.get(f"/api/projects/{self.team.id}/session_recordings") response_data = response.json() @@ -250,8 +250,8 @@ def test_viewed_state_of_session_recording_version_1(self): ) base_time = (now() - timedelta(days=1)).replace(microsecond=0) SessionRecordingViewed.objects.create(team=self.team, user=self.user, session_id="1") - self.create_snapshot("u1", "1", base_time) - self.create_snapshot("u1", "2", base_time + relativedelta(seconds=30)) + self.produce_replay_summary("u1", "1", base_time) + self.produce_replay_summary("u1", "2", base_time + relativedelta(seconds=30)) response = self.client.get(f"/api/projects/{self.team.id}/session_recordings") response_data = response.json() self.assertEqual(len(response_data["results"]), 2) @@ -271,8 +271,8 @@ def test_viewed_state_of_session_recording_version_3(self): session_id_two = "2" SessionRecordingViewed.objects.create(team=self.team, user=self.user, session_id=session_id_one) - self.create_snapshot("u1", session_id_one, base_time) - self.create_snapshot("u1", session_id_two, base_time + relativedelta(seconds=30)) + self.produce_replay_summary("u1", session_id_one, base_time) + self.produce_replay_summary("u1", session_id_two, base_time + relativedelta(seconds=30)) response = self.client.get(f"/api/projects/{self.team.id}/session_recordings") response_data = response.json() @@ -387,7 +387,7 @@ def test_get_single_session_recording_metadata(self): def test_single_session_recording_doesnt_leak_teams(self): another_team = Team.objects.create(organization=self.organization) - self.create_snapshot( + self.produce_replay_summary( "user", "id_no_team_leaking", now() - relativedelta(days=1), @@ -410,7 +410,18 @@ def test_session_recording_with_no_person(self): response = self.client.get(f"/api/projects/{self.team.id}/session_recordings/id_no_person") response_data = response.json() - self.assertEqual(response_data["person"], None) + + self.assertEqual( + response_data["person"], + { + "id": None, + "name": None, + "distinct_ids": ["d1"], + "properties": {}, + "created_at": None, + "uuid": response_data["person"]["uuid"], + }, + ) def test_session_recording_doesnt_exist(self): response = self.client.get(f"/api/projects/{self.team.id}/session_recordings/non_existent_id") @@ -422,7 +433,7 @@ def test_session_recording_doesnt_exist(self): def test_request_to_another_teams_endpoint_returns_401(self): org = Organization.objects.create(name="Separate Org") another_team = Team.objects.create(organization=org) - self.create_snapshot( + self.produce_replay_summary( "user", "id_no_team_leaking", now() - relativedelta(days=1), @@ -444,17 +455,17 @@ def test_session_ids_filter(self, use_recording_events: bool, api_version: int): distinct_ids=["user"], properties={"$some_prop": "something", "email": "bob@bob.com"}, ) - self.create_snapshot( + self.produce_replay_summary( "user", "1", now() - relativedelta(days=1), ) - self.create_snapshot( + self.produce_replay_summary( "user", "2", now() - relativedelta(days=2), ) - self.create_snapshot( + self.produce_replay_summary( "user", "3", now() - relativedelta(days=3), @@ -478,9 +489,9 @@ def test_empty_list_session_ids_filter_returns_no_recordings(self): distinct_ids=["user"], properties={"$some_prop": "something", "email": "bob@bob.com"}, ) - self.create_snapshot("user", "1", now() - relativedelta(days=1)) - self.create_snapshot("user", "2", now() - relativedelta(days=2)) - self.create_snapshot("user", "3", now() - relativedelta(days=3)) + self.produce_replay_summary("user", "1", now() - relativedelta(days=1)) + self.produce_replay_summary("user", "2", now() - relativedelta(days=2)) + self.produce_replay_summary("user", "3", now() - relativedelta(days=3)) # Fetch playlist params_string = urlencode({"session_ids": "[]"}) @@ -491,7 +502,7 @@ def test_empty_list_session_ids_filter_returns_no_recordings(self): self.assertEqual(len(response_data["results"]), 0) def test_delete_session_recording(self): - self.create_snapshot("user", "1", now() - relativedelta(days=1), team_id=self.team.pk) + self.produce_replay_summary("user", "1", now() - relativedelta(days=1), team_id=self.team.pk) response = self.client.delete(f"/api/projects/{self.team.id}/session_recordings/1") self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) # Trying to delete same recording again returns 404 @@ -503,7 +514,7 @@ def test_delete_session_recording(self): return_value=2, ) def test_persist_session_recording(self, _mock_copy_objects: MagicMock) -> None: - self.create_snapshot("user", "1", now() - relativedelta(days=1), team_id=self.team.pk) + self.produce_replay_summary("user", "1", now() - relativedelta(days=1), team_id=self.team.pk) response = self.client.get(f"/api/projects/{self.team.id}/session_recordings/1") assert response.status_code == status.HTTP_200_OK @@ -827,7 +838,7 @@ def test_get_via_sharing_token(self, mock_copy_objects: MagicMock) -> None: session_id = str(uuid.uuid4()) with freeze_time("2023-01-01T12:00:00Z"): - self.create_snapshot( + self.produce_replay_summary( "user", session_id, now() - relativedelta(days=1), @@ -866,10 +877,12 @@ def test_get_via_sharing_token(self, mock_copy_objects: MagicMock) -> None: } # now create a snapshot record that doesn't have a fixed date, as it needs to be within TTL for the request below to complete - self.create_snapshot( + self.produce_replay_summary( "user", session_id, - now(), + # a little before now, since the DB checks if the snapshot is within TTL and before now + # if the test runs too quickly it looks like the snapshot is not there + now() - relativedelta(seconds=1), team_id=self.team.pk, ) @@ -936,7 +949,7 @@ def test_get_matching_events(self) -> None: # the matching session session_id = f"test_get_matching_events-1-{uuid.uuid4()}" - self.create_snapshot("user", session_id, base_time) + self.produce_replay_summary("user", session_id, base_time) event_id = _create_event( event="$pageview", properties={"$session_id": session_id}, @@ -946,7 +959,7 @@ def test_get_matching_events(self) -> None: # a non-matching session non_matching_session_id = f"test_get_matching_events-2-{uuid.uuid4()}" - self.create_snapshot("user", non_matching_session_id, base_time) + self.produce_replay_summary("user", non_matching_session_id, base_time) _create_event( event="$pageview", properties={"$session_id": non_matching_session_id}, diff --git a/posthog/settings/access.py b/posthog/settings/access.py index 1a339e6a54c39..0d76473d71e60 100644 --- a/posthog/settings/access.py +++ b/posthog/settings/access.py @@ -63,11 +63,9 @@ if not DEBUG and not TEST and SECRET_KEY == DEFAULT_SECRET_KEY: logger.critical( """ - You are using the default SECRET_KEY in a production environment! - For the safety of your instance, you must generate and set a unique key., - More information on - https://posthog.com/docs/self-host/configure/securing-posthog, - """ +You are using the default SECRET_KEY in a production environment! +For the safety of your instance, you must generate and set a unique key. +""" ) sys.exit("[ERROR] Default SECRET_KEY in production. Stopping Django server…\n") diff --git a/posthog/settings/overrides.py b/posthog/settings/overrides.py index 5e5191a2a9158..1b4f8298ef74b 100644 --- a/posthog/settings/overrides.py +++ b/posthog/settings/overrides.py @@ -32,7 +32,7 @@ if runner: cmd = sys.argv[1] if len(sys.argv) >= 2 else None - if cmd == "test" or runner.endswith("pytest") or runner.endswith("mypy"): + if cmd == "test" or runner.endswith("pytest") or runner.endswith("mypy") or "/mypy/" in runner: print("Running in test mode. Setting DEBUG and TEST environment variables.") os.environ["DEBUG"] = "1" os.environ["TEST"] = "1" diff --git a/posthog/tasks/scheduled.py b/posthog/tasks/scheduled.py index 537a1a49bbe20..71e910dfb3140 100644 --- a/posthog/tasks/scheduled.py +++ b/posthog/tasks/scheduled.py @@ -46,6 +46,7 @@ update_event_partitions, update_quota_limiting, verify_persons_data_in_sync, + stop_surveys_reached_target, ) from posthog.utils import get_crontab @@ -238,6 +239,12 @@ def setup_periodic_tasks(sender: Celery, **kwargs: Any) -> None: name="clickhouse clear deleted person data", ) + sender.add_periodic_task( + crontab(hour="*/12"), + stop_surveys_reached_target.s(), + name="stop surveys that reached responses limits", + ) + if settings.EE_AVAILABLE: # every interval seconds, we calculate N replay embeddings # the goal is to process _enough_ every 24 hours that diff --git a/posthog/tasks/stop_surveys_reached_target.py b/posthog/tasks/stop_surveys_reached_target.py new file mode 100644 index 0000000000000..5432a45d84b19 --- /dev/null +++ b/posthog/tasks/stop_surveys_reached_target.py @@ -0,0 +1,71 @@ +from itertools import groupby +from django.db.models import Q +from django.utils import timezone +from uuid import UUID +from datetime import datetime + +from posthog.clickhouse.client.connection import Workload +from posthog.client import sync_execute +from posthog.models import Survey + + +def _get_surveys_response_counts( + surveys_ids: list[UUID], team_id: int, earliest_survey_creation_date: datetime +) -> dict[str, int]: + data = sync_execute( + """ + SELECT JSONExtractString(properties, '$survey_id') as survey_id, count() + FROM events + WHERE event = 'survey sent' + AND team_id = %(team_id)s + AND timestamp >= %(earliest_survey_creation_date)s + AND survey_id in %(surveys_ids)s + GROUP BY survey_id + """, + { + "surveys_ids": surveys_ids, + "team_id": team_id, + "earliest_survey_creation_date": earliest_survey_creation_date, + }, + workload=Workload.OFFLINE, + ) + + counts = {} + for survey_id, count in data: + counts[survey_id] = count + return counts + + +def _stop_survey_if_reached_limit(survey: Survey, responses_count: int) -> None: + # Since the job might take a long time, the survey configuration could've been changed by the user + # after we've queried it. + survey.refresh_from_db() + if survey.responses_limit is None or survey.end_date is not None: + return + + if responses_count < survey.responses_limit: + return + + survey.end_date = timezone.now() + survey.responses_limit = None + survey.save(update_fields=["end_date", "responses_limit"]) + + +def stop_surveys_reached_target() -> None: + all_surveys = Survey.objects.exclude(Q(responses_limit__isnull=True) | Q(end_date__isnull=False)).only( + "id", "responses_limit", "team_id", "created_at" + ) + + all_surveys_sorted = sorted(all_surveys, key=lambda survey: survey.team_id) + for team_id, team_surveys in groupby(all_surveys_sorted, lambda survey: survey.team_id): + team_surveys_list = list(team_surveys) + surveys_ids = [survey.id for survey in team_surveys_list] + earliest_survey_creation_date = min([survey.created_at for survey in team_surveys_list]) + + response_counts = _get_surveys_response_counts(surveys_ids, team_id, earliest_survey_creation_date) + for survey in team_surveys_list: + survey_id = str(survey.id) + if survey_id not in response_counts: + continue + + _stop_survey_if_reached_limit(survey, response_counts[survey_id]) diff --git a/posthog/tasks/tasks.py b/posthog/tasks/tasks.py index 0a52b02d35590..56c4909e79db3 100644 --- a/posthog/tasks/tasks.py +++ b/posthog/tasks/tasks.py @@ -631,6 +631,13 @@ def verify_persons_data_in_sync() -> None: verify() +@shared_task(ignrore_result=True) +def stop_surveys_reached_target() -> None: + from posthog.tasks.stop_surveys_reached_target import stop_surveys_reached_target + + stop_surveys_reached_target() + + def recompute_materialized_columns_enabled() -> bool: from posthog.models.instance_setting import get_instance_setting diff --git a/posthog/tasks/test/__snapshots__/test_stop_surveys_reached_target.ambr b/posthog/tasks/test/__snapshots__/test_stop_surveys_reached_target.ambr new file mode 100644 index 0000000000000..e5c72ba285bdb --- /dev/null +++ b/posthog/tasks/test/__snapshots__/test_stop_surveys_reached_target.ambr @@ -0,0 +1,27 @@ +# serializer version: 1 +# name: TestStopSurveysReachedTarget.test_stop_surveys_with_enough_responses + ''' + + SELECT JSONExtractString(properties, '$survey_id') as survey_id, + count() + FROM events + WHERE event = 'survey sent' + AND team_id = 2 + AND timestamp >= '2021-12-29 20:00:00' + AND survey_id in ['00000000-0000-0000-0000-000000000000', '00000000-0000-0000-0000-000000000001' /* ... */] + GROUP BY survey_id + ''' +# --- +# name: TestStopSurveysReachedTarget.test_stop_surveys_with_enough_responses.1 + ''' + + SELECT JSONExtractString(properties, '$survey_id') as survey_id, + count() + FROM events + WHERE event = 'survey sent' + AND team_id = 2 + AND timestamp >= '2022-01-01 00:00:00' + AND survey_id in ['00000000-0000-0000-0000-000000000000', '00000000-0000-0000-0000-000000000001' /* ... */] + GROUP BY survey_id + ''' +# --- diff --git a/posthog/tasks/test/test_stop_surveys_reached_target.py b/posthog/tasks/test/test_stop_surveys_reached_target.py new file mode 100644 index 0000000000000..6860405ddcd7c --- /dev/null +++ b/posthog/tasks/test/test_stop_surveys_reached_target.py @@ -0,0 +1,176 @@ +from freezegun import freeze_time +from posthog.models import Survey, Organization, Team, User, FeatureFlag +from django.test import TestCase +from typing import Optional +from dateutil.relativedelta import relativedelta +from datetime import timedelta, datetime +from django.utils.timezone import now +from posthog.test.base import _create_event, flush_persons_and_events, ClickhouseTestMixin, snapshot_clickhouse_queries +from posthog.tasks.stop_surveys_reached_target import stop_surveys_reached_target + + +class TestStopSurveysReachedTarget(TestCase, ClickhouseTestMixin): + def setUp(self) -> None: + super().setUp() + + self.org = Organization.objects.create(name="Org 1") + self.team1 = Team.objects.create(organization=self.org, name="Team 1") + self.team2 = Team.objects.create(organization=self.org, name="Team 2") + self.user = User.objects.create_and_join(self.org, "a@b.c", password=None) + self.flag = FeatureFlag.objects.create( + team=self.team1, + created_by=self.user, + key="flag_name", + filters={}, + rollout_percentage=100, + ) + + def _create_event_for_survey( + self, survey: Survey, event: str = "survey sent", custom_timestamp: Optional[datetime] = None + ) -> None: + timestamp = custom_timestamp or now() + _create_event( + distinct_id="0", + event=event, + properties={ + "$survey_id": str(survey.id), + }, + timestamp=timestamp, + team=survey.team, + ) + + @freeze_time("2022-01-01") + @snapshot_clickhouse_queries + def test_stop_surveys_with_enough_responses(self) -> None: + surveys = [ + Survey.objects.create( + name="1", + team=self.team1, + created_by=self.user, + linked_flag=self.flag, + responses_limit=1, + ), + Survey.objects.create( + name="2", + team=self.team1, + created_by=self.user, + linked_flag=self.flag, + responses_limit=1, + ), + Survey.objects.create( + name="3", + team=self.team2, + created_by=self.user, + linked_flag=self.flag, + responses_limit=1, + ), + ] + + # TRICKY: We can't override created at in the model create because of auto_now_add, so we need to update it manually + surveys[1].created_at = now() - relativedelta(days=2, hours=4) + surveys[1].save() + + for survey in surveys: + self._create_event_for_survey(survey) + + # Check that having more responses than the limit indicates will stop the survey + self._create_event_for_survey(surveys[0]) + + flush_persons_and_events() + + stop_surveys_reached_target() + + for survey in surveys: + survey.refresh_from_db() + assert now() - survey.end_date < timedelta(seconds=1) + assert not survey.responses_limit + + def test_do_not_stop_survey_with_not_enough_responses(self) -> None: + survey = Survey.objects.create( + name="1", + team=self.team1, + created_by=self.user, + linked_flag=self.flag, + responses_limit=3, + created_at=now(), + ) + self._create_event_for_survey(survey) + flush_persons_and_events() + + stop_surveys_reached_target() + + survey.refresh_from_db() + assert not survey.end_date + assert survey.responses_limit == 3 + + def test_do_not_stop_survey_without_limit(self) -> None: + survey = Survey.objects.create( + name="1", + team=self.team1, + created_by=self.user, + linked_flag=self.flag, + created_at=now(), + ) + self._create_event_for_survey(survey) + flush_persons_and_events() + + stop_surveys_reached_target() + + survey.refresh_from_db() + assert not survey.end_date + + def test_do_not_stop_survey_with_other_events(self) -> None: + survey = Survey.objects.create( + name="1", + team=self.team1, + created_by=self.user, + linked_flag=self.flag, + responses_limit=1, + created_at=now(), + ) + self._create_event_for_survey(survey, event="survey dismissed") + self._create_event_for_survey(survey, event="survey shown") + flush_persons_and_events() + + stop_surveys_reached_target() + + survey.refresh_from_db() + assert not survey.end_date + + def test_do_not_stop_survey_with_events_before_creation_date(self) -> None: + survey = Survey.objects.create( + name="1", + team=self.team1, + created_by=self.user, + linked_flag=self.flag, + responses_limit=1, + created_at=now(), + ) + self._create_event_for_survey(survey, event="survey sent", custom_timestamp=now() - relativedelta(hours=12)) + flush_persons_and_events() + + stop_surveys_reached_target() + + survey.refresh_from_db() + assert not survey.end_date + + def test_do_not_stop_already_stopped_survey_with_responses_limit(self) -> None: + survey = Survey.objects.create( + name="1", + team=self.team1, + created_by=self.user, + linked_flag=self.flag, + responses_limit=1, + end_date=now() - relativedelta(hours=1), + ) + survey.created_at = now() - relativedelta(hours=1) + survey.save() + + self._create_event_for_survey(survey, event="survey sent") + flush_persons_and_events() + + stop_surveys_reached_target() + + survey.refresh_from_db() + assert now() - relativedelta(hours=1) - survey.end_date < timedelta(seconds=1) + assert survey.responses_limit == 1 diff --git a/posthog/temporal/data_imports/__init__.py b/posthog/temporal/data_imports/__init__.py index 3259e91f002cf..2b162efa4c538 100644 --- a/posthog/temporal/data_imports/__init__.py +++ b/posthog/temporal/data_imports/__init__.py @@ -4,7 +4,6 @@ create_source_templates, import_data_activity, update_external_data_job_model, - validate_schema_activity, check_schedule_activity, ) @@ -14,7 +13,6 @@ create_external_data_job_model_activity, update_external_data_job_model, import_data_activity, - validate_schema_activity, create_source_templates, check_schedule_activity, ] diff --git a/posthog/temporal/data_imports/external_data_job.py b/posthog/temporal/data_imports/external_data_job.py index dc111fb4b834d..9529113928238 100644 --- a/posthog/temporal/data_imports/external_data_job.py +++ b/posthog/temporal/data_imports/external_data_job.py @@ -4,7 +4,6 @@ import uuid from asgiref.sync import sync_to_async -from dlt.common.schema.typing import TSchemaTables from temporalio import activity, exceptions, workflow from temporalio.common import RetryPolicy @@ -24,7 +23,6 @@ ) from posthog.warehouse.data_load.source_templates import create_warehouse_templates_for_source -from posthog.warehouse.data_load.validate_schema import validate_schema_and_update_table from posthog.warehouse.external_data_source.jobs import ( update_external_job_status, ) @@ -60,31 +58,6 @@ async def update_external_data_job_model(inputs: UpdateExternalDataJobStatusInpu ) -@dataclasses.dataclass -class ValidateSchemaInputs: - run_id: str - team_id: int - schema_id: uuid.UUID - table_schema: TSchemaTables - table_row_counts: dict[str, int] - - -@activity.defn -async def validate_schema_activity(inputs: ValidateSchemaInputs) -> None: - await validate_schema_and_update_table( - run_id=inputs.run_id, - team_id=inputs.team_id, - schema_id=inputs.schema_id, - table_schema=inputs.table_schema, - table_row_counts=inputs.table_row_counts, - ) - - logger = await bind_temporal_worker_logger(team_id=inputs.team_id) - logger.info( - f"Validated schema for external data job {inputs.run_id}", - ) - - @dataclasses.dataclass class CreateSourceTemplateInputs: team_id: int @@ -193,22 +166,6 @@ async def run(self, inputs: ExternalDataWorkflowInputs): **timeout_params, ) # type: ignore - # check schema first - validate_inputs = ValidateSchemaInputs( - run_id=run_id, - team_id=inputs.team_id, - schema_id=inputs.external_data_schema_id, - table_schema=table_schemas, - table_row_counts=table_row_counts, - ) - - await workflow.execute_activity( - validate_schema_activity, - validate_inputs, - start_to_close_timeout=dt.timedelta(minutes=10), - retry_policy=RetryPolicy(maximum_attempts=2), - ) - # Create source templates await workflow.execute_activity( create_source_templates, diff --git a/posthog/temporal/data_imports/pipelines/pipeline.py b/posthog/temporal/data_imports/pipelines/pipeline.py index 0ac469c214d04..25842753dfda7 100644 --- a/posthog/temporal/data_imports/pipelines/pipeline.py +++ b/posthog/temporal/data_imports/pipelines/pipeline.py @@ -6,6 +6,7 @@ from django.conf import settings from dlt.pipeline.exceptions import PipelineStepFailed +from asgiref.sync import async_to_sync import asyncio import os from posthog.settings.base_variables import TEST @@ -13,6 +14,8 @@ from dlt.sources import DltSource from collections import Counter +from posthog.warehouse.data_load.validate_schema import validate_schema_and_update_table + @dataclass class PipelineInputs: @@ -98,27 +101,43 @@ def _create_pipeline(self): def _run(self) -> dict[str, int]: pipeline = self._create_pipeline() - total_counts: Counter = Counter({}) + total_counts: Counter[str] = Counter({}) if self._incremental: # will get overwritten - counts: Counter = Counter({"start": 1}) + counts: Counter[str] = Counter({"start": 1}) while counts: pipeline.run(self.source, loader_file_format=self.loader_file_format) row_counts = pipeline.last_trace.last_normalize_info.row_counts # Remove any DLT tables from the counts - filtered_rows = filter(lambda pair: not pair[0].startswith("_dlt"), row_counts.items()) - counts = Counter(dict(filtered_rows)) + filtered_rows = dict(filter(lambda pair: not pair[0].startswith("_dlt"), row_counts.items())) + counts = Counter(filtered_rows) total_counts = counts + total_counts + + async_to_sync(validate_schema_and_update_table)( + run_id=self.inputs.run_id, + team_id=self.inputs.team_id, + schema_id=self.inputs.schema_id, + table_schema=self.source.schema.tables, + table_row_counts=filtered_rows, + ) else: pipeline.run(self.source, loader_file_format=self.loader_file_format) row_counts = pipeline.last_trace.last_normalize_info.row_counts - filtered_rows = filter(lambda pair: not pair[0].startswith("_dlt"), row_counts.items()) - counts = Counter(dict(filtered_rows)) + filtered_rows = dict(filter(lambda pair: not pair[0].startswith("_dlt"), row_counts.items())) + counts = Counter(filtered_rows) total_counts = total_counts + counts + async_to_sync(validate_schema_and_update_table)( + run_id=self.inputs.run_id, + team_id=self.inputs.team_id, + schema_id=self.inputs.schema_id, + table_schema=self.source.schema.tables, + table_row_counts=filtered_rows, + ) + return dict(total_counts) async def run(self) -> dict[str, int]: diff --git a/posthog/temporal/data_imports/pipelines/test/test_pipeline.py b/posthog/temporal/data_imports/pipelines/test/test_pipeline.py new file mode 100644 index 0000000000000..e83ae21d78d55 --- /dev/null +++ b/posthog/temporal/data_imports/pipelines/test/test_pipeline.py @@ -0,0 +1,106 @@ +from typing import Any +from unittest.mock import MagicMock, PropertyMock, patch +import uuid + +import pytest +import structlog +from asgiref.sync import sync_to_async +from posthog.temporal.data_imports.pipelines.pipeline import DataImportPipeline, PipelineInputs +from posthog.temporal.data_imports.pipelines.stripe.helpers import stripe_source +from posthog.test.base import APIBaseTest +from posthog.warehouse.models.external_data_job import ExternalDataJob +from posthog.warehouse.models.external_data_schema import ExternalDataSchema +from posthog.warehouse.models.external_data_source import ExternalDataSource + + +class TestDataImportPipeline(APIBaseTest): + async def _create_pipeline(self, schema_name: str, incremental: bool): + source = await sync_to_async(ExternalDataSource.objects.create)( + source_id=str(uuid.uuid4()), + connection_id=str(uuid.uuid4()), + destination_id=str(uuid.uuid4()), + team=self.team, + status="running", + source_type="Stripe", + ) + schema = await sync_to_async(ExternalDataSchema.objects.create)( + name=schema_name, + team_id=self.team.pk, + source_id=source.pk, + source=source, + ) + job = await sync_to_async(ExternalDataJob.objects.create)( + team_id=self.team.pk, + pipeline_id=source.pk, + pipeline=source, + schema_id=schema.pk, + schema=schema, + status=ExternalDataJob.Status.RUNNING, + rows_synced=0, + workflow_id=str(uuid.uuid4()), + ) + + pipeline = DataImportPipeline( + inputs=PipelineInputs( + source_id=source.pk, + run_id=job.pk, + schema_id=schema.pk, + dataset_name=job.folder_path, + job_type="Stripe", + team_id=self.team.pk, + ), + source=stripe_source( + api_key="", + account_id="", + endpoints=tuple(schema_name), + team_id=self.team.pk, + job_id=job.pk, + schema_id=schema.pk, + start_date=None, + end_date=None, + ), + logger=structlog.get_logger(), + incremental=incremental, + ) + + return pipeline + + @pytest.mark.django_db(transaction=True) + @pytest.mark.asyncio + async def test_pipeline_non_incremental(self): + def mock_create_pipeline(local_self: Any): + mock = MagicMock() + mock.last_trace.last_normalize_info.row_counts = {"customer": 1} + return mock + + with ( + patch.object(DataImportPipeline, "_create_pipeline", mock_create_pipeline), + patch( + "posthog.temporal.data_imports.pipelines.pipeline.validate_schema_and_update_table" + ) as mock_validate_schema_and_update_table, + ): + pipeline = await self._create_pipeline("Customer", False) + res = await pipeline.run() + + assert res.get("customer") == 1 + assert mock_validate_schema_and_update_table.call_count == 1 + + @pytest.mark.django_db(transaction=True) + @pytest.mark.asyncio + async def test_pipeline_incremental(self): + def mock_create_pipeline(local_self: Any): + mock = MagicMock() + type(mock.last_trace.last_normalize_info).row_counts = PropertyMock(side_effect=[{"customer": 1}, {}]) + return mock + + with ( + patch.object(DataImportPipeline, "_create_pipeline", mock_create_pipeline), + patch( + "posthog.temporal.data_imports.pipelines.pipeline.validate_schema_and_update_table" + ) as mock_validate_schema_and_update_table, + ): + pipeline = await self._create_pipeline("Customer", True) + res = await pipeline.run() + + assert res.get("customer") == 1 + assert mock_validate_schema_and_update_table.call_count == 2 diff --git a/posthog/temporal/tests/external_data/test_external_data_job.py b/posthog/temporal/tests/external_data/test_external_data_job.py index 37431206fe331..a8b499a500500 100644 --- a/posthog/temporal/tests/external_data/test_external_data_job.py +++ b/posthog/temporal/tests/external_data/test_external_data_job.py @@ -7,11 +7,9 @@ from posthog.temporal.data_imports.external_data_job import ( UpdateExternalDataJobStatusInputs, - ValidateSchemaInputs, check_schedule_activity, create_source_templates, update_external_data_job_model, - validate_schema_activity, ) from posthog.temporal.data_imports.external_data_job import ( ExternalDataJobWorkflow, @@ -25,11 +23,9 @@ from posthog.warehouse.external_data_source.jobs import create_external_data_job from posthog.warehouse.models import ( get_latest_run_if_exists, - DataWarehouseTable, ExternalDataJob, ExternalDataSource, ExternalDataSchema, - DataWarehouseCredential, ) from posthog.temporal.data_imports.pipelines.schemas import ( @@ -512,239 +508,6 @@ async def setup_job_1(): assert job_1.rows_synced == 1 -@pytest.mark.django_db(transaction=True) -@pytest.mark.asyncio -async def test_validate_schema_and_update_table_activity(activity_environment, team, **kwargs): - new_source = await sync_to_async(ExternalDataSource.objects.create)( - source_id=uuid.uuid4(), - connection_id=uuid.uuid4(), - destination_id=uuid.uuid4(), - team=team, - status="running", - source_type="Stripe", - job_inputs={"stripe_secret_key": "test-key"}, - ) - - new_job = await sync_to_async(ExternalDataJob.objects.create)( - team_id=team.id, - pipeline_id=new_source.pk, - status=ExternalDataJob.Status.RUNNING, - rows_synced=0, - ) - - test_1_schema = await _create_schema("test-1", new_source, team) - - with ( - mock.patch("posthog.warehouse.models.table.DataWarehouseTable.get_columns") as mock_get_columns, - override_settings(**AWS_BUCKET_MOCK_SETTINGS), - ): - mock_get_columns.return_value = {"id": "string"} - await activity_environment.run( - validate_schema_activity, - ValidateSchemaInputs( - run_id=new_job.pk, - team_id=team.id, - schema_id=test_1_schema.id, - table_schema={ - "test-1": {"name": "test-1", "resource": "test-1", "columns": {"id": {"data_type": "text"}}}, - }, - table_row_counts={}, - ), - ) - - assert mock_get_columns.call_count == 2 - assert ( - await sync_to_async(DataWarehouseTable.objects.filter(external_data_source_id=new_source.pk).count)() == 1 - ) - - -@pytest.mark.django_db(transaction=True) -@pytest.mark.asyncio -async def test_validate_schema_and_update_table_activity_with_existing(activity_environment, team, **kwargs): - new_source = await sync_to_async(ExternalDataSource.objects.create)( - source_id=uuid.uuid4(), - connection_id=uuid.uuid4(), - destination_id=uuid.uuid4(), - team=team, - status="running", - source_type="Stripe", - job_inputs={"stripe_secret_key": "test-key"}, - prefix="stripe_", - ) - - old_job: ExternalDataJob = await sync_to_async(ExternalDataJob.objects.create)( - team_id=team.id, - pipeline_id=new_source.pk, - status=ExternalDataJob.Status.COMPLETED, - rows_synced=0, - ) - - old_credential = await sync_to_async(DataWarehouseCredential.objects.create)( - team=team, - access_key=settings.OBJECT_STORAGE_ACCESS_KEY_ID, - access_secret=settings.OBJECT_STORAGE_SECRET_ACCESS_KEY, - ) - - url_pattern = await sync_to_async(old_job.url_pattern_by_schema)("test-1") - - existing_table = await sync_to_async(DataWarehouseTable.objects.create)( - credential=old_credential, - name="stripe_test-1", - format="Parquet", - url_pattern=url_pattern, - team_id=team.pk, - external_data_source_id=new_source.pk, - ) - - new_job = await sync_to_async(ExternalDataJob.objects.create)( - team_id=team.id, - pipeline_id=new_source.pk, - status=ExternalDataJob.Status.RUNNING, - rows_synced=0, - ) - - test_1_schema = await _create_schema("test-1", new_source, team, table_id=existing_table.id) - - with ( - mock.patch("posthog.warehouse.models.table.DataWarehouseTable.get_columns") as mock_get_columns, - override_settings(**AWS_BUCKET_MOCK_SETTINGS), - ): - mock_get_columns.return_value = {"id": "string"} - await activity_environment.run( - validate_schema_activity, - ValidateSchemaInputs( - run_id=new_job.pk, - team_id=team.id, - schema_id=test_1_schema.id, - table_schema={ - "test-1": {"name": "test-1", "resource": "test-1", "columns": {"id": {"data_type": "text"}}}, - }, - table_row_counts={}, - ), - ) - - assert mock_get_columns.call_count == 2 - assert ( - await sync_to_async(DataWarehouseTable.objects.filter(external_data_source_id=new_source.pk).count)() == 1 - ) - - -@pytest.mark.django_db(transaction=True) -@pytest.mark.asyncio -async def test_validate_schema_and_update_table_activity_half_run(activity_environment, team, **kwargs): - new_source = await sync_to_async(ExternalDataSource.objects.create)( - source_id=uuid.uuid4(), - connection_id=uuid.uuid4(), - destination_id=uuid.uuid4(), - team=team, - status="running", - source_type="Stripe", - job_inputs={"stripe_secret_key": "test-key"}, - ) - - new_job = await sync_to_async(ExternalDataJob.objects.create)( - team_id=team.id, - pipeline_id=new_source.pk, - status=ExternalDataJob.Status.RUNNING, - rows_synced=0, - ) - - with ( - mock.patch("posthog.warehouse.models.table.DataWarehouseTable.get_columns") as mock_get_columns, - mock.patch( - "posthog.warehouse.data_load.validate_schema.validate_schema", - ) as mock_validate, - override_settings(**AWS_BUCKET_MOCK_SETTINGS), - ): - mock_get_columns.return_value = {"id": "string"} - credential = await sync_to_async(DataWarehouseCredential.objects.create)( - team=team, - access_key=settings.OBJECT_STORAGE_ACCESS_KEY_ID, - access_secret=settings.OBJECT_STORAGE_SECRET_ACCESS_KEY, - ) - - mock_validate.side_effect = [ - Exception, - { - "credential": credential, - "format": "Parquet", - "name": "test_schema", - "url_pattern": "test_url_pattern", - "team_id": team.pk, - }, - ] - - broken_schema = await _create_schema("broken_schema", new_source, team) - - await activity_environment.run( - validate_schema_activity, - ValidateSchemaInputs( - run_id=new_job.pk, - team_id=team.id, - schema_id=broken_schema.id, - table_schema={ - "broken_schema": { - "name": "broken_schema", - "resource": "broken_schema", - "columns": {"id": {"data_type": "text"}}, - }, - }, - table_row_counts={}, - ), - ) - - assert mock_get_columns.call_count == 0 - assert ( - await sync_to_async(DataWarehouseTable.objects.filter(external_data_source_id=new_source.pk).count)() == 0 - ) - - -@pytest.mark.django_db(transaction=True) -@pytest.mark.asyncio -async def test_create_schema_activity(activity_environment, team, **kwargs): - new_source = await sync_to_async(ExternalDataSource.objects.create)( - source_id=uuid.uuid4(), - connection_id=uuid.uuid4(), - destination_id=uuid.uuid4(), - team=team, - status="running", - source_type="Stripe", - job_inputs={"stripe_secret_key": "test-key"}, - ) - - new_job = await sync_to_async(ExternalDataJob.objects.create)( - team_id=team.id, - pipeline_id=new_source.pk, - status=ExternalDataJob.Status.RUNNING, - rows_synced=0, - ) - - test_1_schema = await _create_schema("test-1", new_source, team) - - with ( - mock.patch("posthog.warehouse.models.table.DataWarehouseTable.get_columns") as mock_get_columns, - override_settings(**AWS_BUCKET_MOCK_SETTINGS), - ): - mock_get_columns.return_value = {"id": "string"} - await activity_environment.run( - validate_schema_activity, - ValidateSchemaInputs( - run_id=new_job.pk, - team_id=team.id, - schema_id=test_1_schema.id, - table_schema={ - "test-1": {"name": "test-1", "resource": "test-1", "columns": {"id": {"data_type": "text"}}}, - }, - table_row_counts={}, - ), - ) - - assert mock_get_columns.call_count == 2 - all_tables = DataWarehouseTable.objects.all() - table_length = await sync_to_async(len)(all_tables) - assert table_length == 1 - - @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio async def test_external_data_job_workflow_with_schema(team, **kwargs): @@ -792,7 +555,6 @@ async def mock_async_func(inputs): create_external_data_job_model_activity, update_external_data_job_model, import_data_activity, - validate_schema_activity, create_source_templates, ], workflow_runner=UnsandboxedWorkflowRunner(), @@ -810,8 +572,6 @@ async def mock_async_func(inputs): assert run is not None assert run.status == ExternalDataJob.Status.COMPLETED - assert await sync_to_async(DataWarehouseTable.objects.filter(external_data_source_id=new_source.pk).count)() == 1 - @pytest.mark.django_db(transaction=True) @pytest.mark.asyncio diff --git a/posthog/test/base.py b/posthog/test/base.py index 2ebfa6178e259..3b45da1a2b463 100644 --- a/posthog/test/base.py +++ b/posthog/test/base.py @@ -515,6 +515,14 @@ def assertQueryMatchesSnapshot(self, query, params=None, replace_all_numbers=Fal query, ) + # replace survey uuids + # replace arrays like "survey_id in ['017e12ef-9c00-0000-59bf-43ddb0bddea6', '017e12ef-9c00-0001-6df6-2cf1f217757f']" + query = re.sub( + r"survey_id in \['[0-9a-f-]{36}'(, '[0-9a-f-]{36}')*\]", + r"survey_id in ['00000000-0000-0000-0000-000000000000', '00000000-0000-0000-0000-000000000001' /* ... */]", + query, + ) + #### Cohort replacements # replace cohort id lists in queries too query = re.sub( diff --git a/posthog/test/test_middleware.py b/posthog/test/test_middleware.py index e0f5283dd3cae..728ce4936a645 100644 --- a/posthog/test/test_middleware.py +++ b/posthog/test/test_middleware.py @@ -325,6 +325,15 @@ def test_project_redirects_to_current_team_when_accessing_inaccessible_project_b assert res.status_code == 302 assert res.headers["Location"] == f"/project/{self.team.pk}/home" + def test_project_redirects_including_query_params(self): + res = self.client.get(f"/project/phc_123?t=1") + assert res.status_code == 302 + assert res.headers["Location"] == f"/project/{self.team.pk}?t=1" + + res = self.client.get(f"/project/phc_123/home?t=1") + assert res.status_code == 302 + assert res.headers["Location"] == f"/project/{self.team.pk}/home?t=1" + @override_settings(CLOUD_DEPLOYMENT="US") # As PostHog Cloud class TestPostHogTokenCookieMiddleware(APIBaseTest): diff --git a/posthog/warehouse/data_load/validate_schema.py b/posthog/warehouse/data_load/validate_schema.py index f3755442d3cea..bda0f6ebf3368 100644 --- a/posthog/warehouse/data_load/validate_schema.py +++ b/posthog/warehouse/data_load/validate_schema.py @@ -144,21 +144,14 @@ async def validate_schema_and_update_table( ) # create or update - table_created = None - if last_successful_job: - try: - table_created = await get_table_by_schema_id(_schema_id, team_id) - if not table_created: - raise DataWarehouseTable.DoesNotExist - except Exception: - table_created = None + table_created: DataWarehouseTable | None = await get_table_by_schema_id(_schema_id, team_id) + if table_created: + table_created.url_pattern = new_url_pattern + if incremental: + table_created.row_count = await sync_to_async(table_created.get_count)() else: - table_created.url_pattern = new_url_pattern - if incremental: - table_created.row_count = await sync_to_async(table_created.get_count)() - else: - table_created.row_count = row_count - await asave_datawarehousetable(table_created) + table_created.row_count = row_count + await asave_datawarehousetable(table_created) if not table_created: table_created = await acreate_datawarehousetable(external_data_source_id=job.pipeline.id, **data) diff --git a/posthog/warehouse/models/external_data_schema.py b/posthog/warehouse/models/external_data_schema.py index f2f0832f308ff..d6bd80a6b259c 100644 --- a/posthog/warehouse/models/external_data_schema.py +++ b/posthog/warehouse/models/external_data_schema.py @@ -4,7 +4,7 @@ from posthog.models.team import Team from posthog.models.utils import CreatedMetaFields, UUIDModel, sane_repr import uuid -import psycopg +import psycopg2 from django.conf import settings from posthog.warehouse.util import database_sync_to_async @@ -81,7 +81,7 @@ def sync_old_schemas_with_new_schemas(new_schemas: list, source_id: uuid.UUID, t def get_postgres_schemas(host: str, port: str, database: str, user: str, password: str, schema: str) -> list[Any]: - connection = psycopg.Connection.connect( + connection = psycopg2.connect( host=host, port=int(port), dbname=database, diff --git a/posthog/warehouse/models/table.py b/posthog/warehouse/models/table.py index 229c81168a8d3..383c5c98399cf 100644 --- a/posthog/warehouse/models/table.py +++ b/posthog/warehouse/models/table.py @@ -69,6 +69,17 @@ ExtractErrors = { "The AWS Access Key Id you provided does not exist": "The Access Key you provided does not exist", + "Access Denied: while reading key:": "Access was denied when reading the provided file", + "Could not list objects in bucket": "Access was denied to the provided bucket", + "file is empty": "The provided file contains no data", + "The specified key does not exist": "The provided file doesn't exist in the bucket", + "Cannot extract table structure from CSV format file, because there are no files with provided path in S3 or all files are empty": "The provided file doesn't exist in the bucket", + "Cannot extract table structure from Parquet format file, because there are no files with provided path in S3 or all files are empty": "The provided file doesn't exist in the bucket", + "Cannot extract table structure from JSONEachRow format file, because there are no files with provided path in S3 or all files are empty": "The provided file doesn't exist in the bucket", + "Bucket or key name are invalid in S3 URI": "The provided file or bucket doesn't exist", + "S3 exception: `NoSuchBucket`, message: 'The specified bucket does not exist.'": "The provided bucket doesn't exist", + "Either the file is corrupted or this is not a parquet file": "The provided file is not in Parquet format", + "Rows have different amount of values": "The provided file has rows with different amount of values", }