diff --git a/app/packages/components/src/components/PanelCTA/index.tsx b/app/packages/components/src/components/PanelCTA/index.tsx
index 45065585268..c19d66b02c0 100644
--- a/app/packages/components/src/components/PanelCTA/index.tsx
+++ b/app/packages/components/src/components/PanelCTA/index.tsx
@@ -158,7 +158,11 @@ function TypographyOrNode(props: TypographyProps) {
const { children, ...otherProps } = props;
if (typeof children === "string") {
- return {children};
+ return (
+
+ {children}
+
+ );
}
if (React.isValidElement(children)) {
diff --git a/app/packages/core/src/plugins/SchemaIO/components/NativeModelEvaluationView/index.tsx b/app/packages/core/src/plugins/SchemaIO/components/NativeModelEvaluationView/index.tsx
index 13b8fc6215e..a150d5fbec9 100644
--- a/app/packages/core/src/plugins/SchemaIO/components/NativeModelEvaluationView/index.tsx
+++ b/app/packages/core/src/plugins/SchemaIO/components/NativeModelEvaluationView/index.tsx
@@ -87,12 +87,12 @@ export default function NativeModelEvaluationView(props) {
{page === "overview" &&
(showEmptyOverview || showCTA ? (
{
return (
diff --git a/app/packages/core/src/plugins/SchemaIO/components/TreeSelectionView.tsx b/app/packages/core/src/plugins/SchemaIO/components/TreeSelectionView.tsx
index 74a187d764c..e8c20164a18 100644
--- a/app/packages/core/src/plugins/SchemaIO/components/TreeSelectionView.tsx
+++ b/app/packages/core/src/plugins/SchemaIO/components/TreeSelectionView.tsx
@@ -39,12 +39,7 @@ export default function TreeSelectionView(props: ViewPropsType) {
const { view = {} } = schema;
if (data == undefined) {
- const sampleIds = view?.data.flatMap(([parentId, children]) => {
- return children.map((childId) =>
- typeof childId === "string" ? childId : childId[0]
- );
- });
- onChange(path, sampleIds);
+ onChange(path, []);
}
const structure = view?.data || [];
@@ -78,7 +73,7 @@ export default function TreeSelectionView(props: ViewPropsType) {
const initialCollapsedState: CollapsedState = React.useMemo(() => {
const state: CollapsedState = {};
structure.forEach(([parentId]) => {
- state[parentId] = false; // start as expanded
+ state[parentId] = true; // start as folded
});
return state;
}, [structure]);
@@ -87,7 +82,7 @@ export default function TreeSelectionView(props: ViewPropsType) {
initialCollapsedState
);
- const [allCollapsed, setAllCollapsed] = React.useState(false);
+ const [allCollapsed, setAllCollapsed] = React.useState(true);
const handleExpandCollapseAll = () => {
setCollapsedState((prevState) => {
@@ -97,7 +92,7 @@ export default function TreeSelectionView(props: ViewPropsType) {
});
return newState;
});
- setAllCollapsed(!allCollapsed); // Toggle the expand/collapse state
+ setAllCollapsed(!allCollapsed);
};
const handleCheckboxChange = (id: string, isChecked: boolean) => {
@@ -183,7 +178,7 @@ export default function TreeSelectionView(props: ViewPropsType) {
const isSample =
!structure.some(([parentId]) => parentId === key) &&
key !== "selectAll";
- return isSample && updatedState[key].checked; // Only checked samples
+ return isSample && updatedState[key].checked;
});
// We update the actual output value (ctx.params.value \ data) here.
@@ -193,7 +188,6 @@ export default function TreeSelectionView(props: ViewPropsType) {
});
};
- // Function to handle expand/collapse toggle
const handleToggleCollapse = (id: string) => {
setCollapsedState((prevState) => ({
...prevState,
@@ -209,17 +203,6 @@ export default function TreeSelectionView(props: ViewPropsType) {
return idx === -1 ? 0 : idx + 1;
};
- // On init, all samples are selected by default
- useEffect(() => {
- const sampleIds = view?.data.flatMap(([parentId, children]) => {
- return children.map((childId) =>
- typeof childId === "string" ? childId : childId[0]
- );
- });
- onChange(path, sampleIds);
- // eslint-disable-next-line react-hooks/exhaustive-deps
- }, []);
-
// this only runs when data and checkboxstate are different
// meaning the user selected samples from the grid
// we will handle the state change of checkedState here
diff --git a/app/packages/embeddings/src/Embeddings.tsx b/app/packages/embeddings/src/Embeddings.tsx
index 6cabb976381..3b99eef8395 100644
--- a/app/packages/embeddings/src/Embeddings.tsx
+++ b/app/packages/embeddings/src/Embeddings.tsx
@@ -92,14 +92,18 @@ export default function Embeddings({ containerHeight, dimensions }) {
containerStyle={selectorStyle}
/>
)}
- {computeViz.isAvailable && (
- computeViz.prompt()}
- title={"Compute visualization"}
- >
-
-
- )}
+ {
+ if (constants.IS_APP_MODE_FIFTYONE) {
+ setShowCTA(true);
+ } else {
+ computeViz.prompt();
+ }
+ }}
+ title={"Compute visualization"}
+ >
+
+
{!plotSelection.selectionIsExternal && (
- }
- onClick={() => {
- if (constants.IS_APP_MODE_FIFTYONE) {
- setShowCTA(true);
- } else {
- computeViz.prompt();
- }
- }}
- variant="contained"
- >
- Compute Embeddings
-
{showPlot && (
extends BaseElement {
}
}
+ if (!error && this.errorElement) {
+ this.errorElement.remove();
+ this.errorElement = null;
+ }
+
return this.errorElement;
}
}
-
-const onClick = (href) => {
- let openExternal;
-
- return null;
-};
diff --git a/app/packages/looker/src/elements/image.ts b/app/packages/looker/src/elements/image.ts
index 2df92eb5fd4..5d5d7ecdfbf 100644
--- a/app/packages/looker/src/elements/image.ts
+++ b/app/packages/looker/src/elements/image.ts
@@ -6,22 +6,48 @@ import type { ImageState } from "../state";
import type { Events } from "./base";
import { BaseElement } from "./base";
+const MAX_IMAGE_LOAD_RETRIES = 10;
+
export class ImageElement extends BaseElement {
private src = "";
- private imageSource: HTMLImageElement;
+ protected imageSource: HTMLImageElement;
+
+ private retryCount = 0;
+ private timeoutId: number | null = null;
getEvents(): Events {
return {
load: ({ update }) => {
+ if (this.timeoutId !== null) {
+ window.clearTimeout(this.timeoutId);
+ this.timeoutId = null;
+ }
+ this.retryCount = 0;
+
this.imageSource = this.element;
update({
loaded: true,
+ error: false,
dimensions: [this.element.naturalWidth, this.element.naturalHeight],
});
},
error: ({ update }) => {
update({ error: true, dimensions: [512, 512], loaded: true });
+ // sometimes image loading fails because of insufficient resources
+ // we'll want to try again in those cases
+ if (this.retryCount < MAX_IMAGE_LOAD_RETRIES) {
+ // schedule a retry after a delay
+ if (this.timeoutId !== null) {
+ window.clearTimeout(this.timeoutId);
+ }
+ this.timeoutId = window.setTimeout(() => {
+ this.retryCount += 1;
+ const retrySrc = `${this.src}`;
+ this.element.setAttribute("src", retrySrc);
+ // linear backoff
+ }, 1000 * this.retryCount);
+ }
},
};
}
@@ -36,10 +62,13 @@ export class ImageElement extends BaseElement {
renderSelf({ config: { src } }: Readonly) {
if (this.src !== src) {
this.src = src;
-
+ this.retryCount = 0;
+ if (this.timeoutId !== null) {
+ window.clearTimeout(this.timeoutId);
+ this.timeoutId = null;
+ }
this.element.setAttribute("src", src);
}
-
return null;
}
}
diff --git a/app/packages/looker/src/worker/decorated-fetch.test.ts b/app/packages/looker/src/worker/decorated-fetch.test.ts
new file mode 100644
index 00000000000..67ed8532009
--- /dev/null
+++ b/app/packages/looker/src/worker/decorated-fetch.test.ts
@@ -0,0 +1,91 @@
+import { beforeEach, describe, expect, it, vi } from "vitest";
+import { fetchWithLinearBackoff } from "./decorated-fetch";
+
+describe("fetchWithLinearBackoff", () => {
+ beforeEach(() => {
+ vi.clearAllMocks();
+ vi.useRealTimers();
+ });
+
+ it("should return response when fetch succeeds on first try", async () => {
+ const mockResponse = new Response("Success", { status: 200 });
+ global.fetch = vi.fn().mockResolvedValue(mockResponse);
+
+ const response = await fetchWithLinearBackoff("http://fiftyone.ai");
+
+ expect(response).toBe(mockResponse);
+ expect(global.fetch).toHaveBeenCalledTimes(1);
+ expect(global.fetch).toHaveBeenCalledWith("http://fiftyone.ai");
+ });
+
+ it("should retry when fetch fails and eventually succeed", async () => {
+ const mockResponse = new Response("Success", { status: 200 });
+ global.fetch = vi
+ .fn()
+ .mockRejectedValueOnce(new Error("Network Error"))
+ .mockResolvedValue(mockResponse);
+
+ const response = await fetchWithLinearBackoff("http://fiftyone.ai");
+
+ expect(response).toBe(mockResponse);
+ expect(global.fetch).toHaveBeenCalledTimes(2);
+ });
+
+ it("should throw an error after max retries when fetch fails every time", async () => {
+ global.fetch = vi.fn().mockRejectedValue(new Error("Network Error"));
+
+ await expect(
+ fetchWithLinearBackoff("http://fiftyone.ai", 3, 10)
+ ).rejects.toThrowError(new RegExp("Max retries for fetch reached"));
+
+ expect(global.fetch).toHaveBeenCalledTimes(3);
+ });
+
+ it("should throw an error when response is not ok", async () => {
+ const mockResponse = new Response("Not Found", { status: 500 });
+ global.fetch = vi.fn().mockResolvedValue(mockResponse);
+
+ await expect(
+ fetchWithLinearBackoff("http://fiftyone.ai", 5, 10)
+ ).rejects.toThrow("HTTP error: 500");
+
+ expect(global.fetch).toHaveBeenCalledTimes(5);
+ });
+
+ it("should throw an error when response is a 4xx, like 404", async () => {
+ const mockResponse = new Response("Not Found", { status: 404 });
+ global.fetch = vi.fn().mockResolvedValue(mockResponse);
+
+ await expect(
+ fetchWithLinearBackoff("http://fiftyone.ai", 5, 10)
+ ).rejects.toThrow("Non-retryable HTTP error: 404");
+
+ expect(global.fetch).toHaveBeenCalledTimes(1);
+ });
+
+ it("should apply linear backoff between retries", async () => {
+ const mockResponse = new Response("Success", { status: 200 });
+ global.fetch = vi
+ .fn()
+ .mockRejectedValueOnce(new Error("Network Error"))
+ .mockRejectedValueOnce(new Error("Network Error"))
+ .mockResolvedValue(mockResponse);
+
+ vi.useFakeTimers();
+
+ const fetchPromise = fetchWithLinearBackoff("http://fiftyone.ai", 5, 10);
+
+ // advance timers to simulate delays
+ // after first delay
+ await vi.advanceTimersByTimeAsync(100);
+ // after scond delay
+ await vi.advanceTimersByTimeAsync(200);
+
+ const response = await fetchPromise;
+
+ expect(response).toBe(mockResponse);
+ expect(global.fetch).toHaveBeenCalledTimes(3);
+
+ vi.useRealTimers();
+ });
+});
diff --git a/app/packages/looker/src/worker/decorated-fetch.ts b/app/packages/looker/src/worker/decorated-fetch.ts
new file mode 100644
index 00000000000..c77059d551c
--- /dev/null
+++ b/app/packages/looker/src/worker/decorated-fetch.ts
@@ -0,0 +1,49 @@
+const DEFAULT_MAX_RETRIES = 10;
+const DEFAULT_BASE_DELAY = 200;
+// list of HTTP status codes that are client errors (4xx) and should not be retried
+const NON_RETRYABLE_STATUS_CODES = [400, 401, 403, 404, 405, 422];
+
+class NonRetryableError extends Error {
+ constructor(message: string) {
+ super(message);
+ this.name = "NonRetryableError";
+ }
+}
+
+export const fetchWithLinearBackoff = async (
+ url: string,
+ retries = DEFAULT_MAX_RETRIES,
+ delay = DEFAULT_BASE_DELAY
+) => {
+ for (let i = 0; i < retries; i++) {
+ try {
+ const response = await fetch(url);
+ if (response.ok) {
+ return response;
+ } else {
+ if (NON_RETRYABLE_STATUS_CODES.includes(response.status)) {
+ throw new NonRetryableError(
+ `Non-retryable HTTP error: ${response.status}`
+ );
+ } else {
+ // retry on other HTTP errors (e.g., 500 Internal Server Error)
+ throw new Error(`HTTP error: ${response.status}`);
+ }
+ }
+ } catch (e) {
+ if (e instanceof NonRetryableError) {
+ // immediately throw
+ throw e;
+ }
+ if (i < retries - 1) {
+ await new Promise((resolve) => setTimeout(resolve, delay * (i + 1)));
+ } else {
+ // max retries reached
+ throw new Error(
+ "Max retries for fetch reached (linear backoff), error: " + e
+ );
+ }
+ }
+ }
+ return null;
+};
diff --git a/app/packages/looker/src/worker/index.ts b/app/packages/looker/src/worker/index.ts
index d10d0625fd0..21859407e2d 100644
--- a/app/packages/looker/src/worker/index.ts
+++ b/app/packages/looker/src/worker/index.ts
@@ -30,6 +30,7 @@ import {
Sample,
} from "../state";
import { decodeWithCanvas } from "./canvas-decoder";
+import { fetchWithLinearBackoff } from "./decorated-fetch";
import { DeserializerFactory } from "./deserializer";
import { PainterFactory } from "./painter";
import { mapId } from "./shared";
@@ -107,11 +108,12 @@ const imputeOverlayFromPath = async (
colorscale: Colorscale,
buffers: ArrayBuffer[],
sources: { [path: string]: string },
- cls: string
+ cls: string,
+ maskPathDecodingPromises: Promise[] = []
) => {
// handle all list types here
if (cls === DETECTIONS) {
- const promises = [];
+ const promises: Promise[] = [];
for (const detection of label.detections) {
promises.push(
imputeOverlayFromPath(
@@ -126,10 +128,7 @@ const imputeOverlayFromPath = async (
)
);
}
- // if some paths fail to load, it's okay, we can still proceed
- // hence we use `allSettled` instead of `all`
- await Promise.allSettled(promises);
- return;
+ maskPathDecodingPromises.push(...promises);
}
// overlay path is in `map_path` property for heatmap, or else, it's in `mask_path` property (for segmentation or detection)
@@ -157,14 +156,17 @@ const imputeOverlayFromPath = async (
baseUrl = overlayImageUrl.split("?")[0];
}
- const overlayImageBuffer: Blob = await getFetchFunction()(
- "GET",
- overlayImageUrl,
- null,
- "blob"
- );
+ let overlayImageBlob: Blob;
+ try {
+ const overlayImageFetchResponse = await fetchWithLinearBackoff(baseUrl);
+ overlayImageBlob = await overlayImageFetchResponse.blob();
+ } catch (e) {
+ console.error(e);
+ // skip decoding if fetch fails altogether
+ return;
+ }
- const overlayMask = await decodeWithCanvas(overlayImageBuffer);
+ const overlayMask = await decodeWithCanvas(overlayImageBlob);
const [overlayHeight, overlayWidth] = overlayMask.shape;
// set the `mask` property for this label
@@ -190,8 +192,11 @@ const processLabels = async (
schema: Schema
): Promise => {
const buffers: ArrayBuffer[] = [];
- const promises = [];
+ const painterPromises = [];
+
+ const maskPathDecodingPromises = [];
+ // mask deserialization / mask_path decoding loop
for (const field in sample) {
let labels = sample[field];
if (!Array.isArray(labels)) {
@@ -205,8 +210,8 @@ const processLabels = async (
}
if (DENSE_LABELS.has(cls)) {
- try {
- await imputeOverlayFromPath(
+ maskPathDecodingPromises.push(
+ imputeOverlayFromPath(
`${prefix || ""}${field}`,
label,
coloring,
@@ -214,11 +219,10 @@ const processLabels = async (
colorscale,
buffers,
sources,
- cls
- );
- } catch (e) {
- console.error("Couldn't decode overlay image from disk: ", e);
- }
+ cls,
+ maskPathDecodingPromises
+ )
+ );
}
if (cls in DeserializerFactory) {
@@ -249,9 +253,25 @@ const processLabels = async (
mapId(label);
}
}
+ }
+ }
+
+ await Promise.allSettled(maskPathDecodingPromises);
+ // overlay painting loop
+ for (const field in sample) {
+ let labels = sample[field];
+ if (!Array.isArray(labels)) {
+ labels = [labels];
+ }
+ const cls = getCls(`${prefix ? prefix : ""}${field}`, schema);
+
+ for (const label of labels) {
+ if (!label) {
+ continue;
+ }
if (painterFactory[cls]) {
- promises.push(
+ painterPromises.push(
painterFactory[cls](
prefix ? prefix + field : field,
label,
@@ -266,7 +286,7 @@ const processLabels = async (
}
}
- return Promise.all(promises).then(() => buffers);
+ return Promise.all(painterPromises).then(() => buffers);
};
/** GLOBALS */
diff --git a/app/packages/operators/src/SplitButton.tsx b/app/packages/operators/src/SplitButton.tsx
index 73195636609..fbb4b1266db 100644
--- a/app/packages/operators/src/SplitButton.tsx
+++ b/app/packages/operators/src/SplitButton.tsx
@@ -11,6 +11,7 @@ import {
ListItemText,
Tooltip,
ButtonProps,
+ Box,
} from "@mui/material";
import ArrowDropDownIcon from "@mui/icons-material/ArrowDropDown";
import { onEnter } from "./utils";
@@ -128,6 +129,9 @@ export default function SplitButton({
key={option.id}
disabled={option.disabled}
selected={option.selected}
+ sx={{
+ cursor: option.onClick ? "pointer" : "default",
+ }}
onClick={() => handleSelect(option)}
>
}
- secondary={option.description}
+ secondary={
+
+ {option.description}
+
+ }
/>
))}
@@ -158,13 +172,13 @@ export default function SplitButton({
);
}
-function PrimaryWithTag({ label, tag }) {
+function PrimaryWithTag({ label, tag, disabled }) {
const theme = useTheme();
const tagEl = tag ? (
diff --git a/app/packages/operators/src/built-in-operators.ts b/app/packages/operators/src/built-in-operators.ts
index 3edea7378c3..67e7ff6b096 100644
--- a/app/packages/operators/src/built-in-operators.ts
+++ b/app/packages/operators/src/built-in-operators.ts
@@ -1361,6 +1361,54 @@ export class EnableQueryPerformance extends Operator {
}
}
+class OpenSample extends Operator {
+ _builtIn = true;
+ get config(): OperatorConfig {
+ return new OperatorConfig({
+ name: "open_sample",
+ label: "Open Sample",
+ unlisted: true,
+ });
+ }
+ async resolveInput(): Promise {
+ const inputs = new types.Object();
+ inputs.str("id", { label: "Sample ID" });
+ inputs.str("group_id", { label: "Group ID" });
+
+ return new types.Property(inputs);
+ }
+ useHooks(): object {
+ return {
+ setExpanded: fos.useSetExpandedSample(),
+ };
+ }
+ async execute({ hooks, params }: ExecutionContext) {
+ hooks.setExpanded({
+ id: params.id,
+ group_id: params.group_id,
+ });
+ }
+}
+
+class CloseSample extends Operator {
+ _builtIn = true;
+ get config(): OperatorConfig {
+ return new OperatorConfig({
+ name: "close_sample",
+ label: "Close Sample",
+ unlisted: true,
+ });
+ }
+ useHooks(): object {
+ return {
+ close: fos.useClearModal(),
+ };
+ }
+ async execute({ hooks, params }: ExecutionContext) {
+ hooks.close();
+ }
+}
+
export function registerBuiltInOperators() {
try {
_registerBuiltInOperator(CopyViewAsJSON);
@@ -1412,6 +1460,8 @@ export function registerBuiltInOperators() {
_registerBuiltInOperator(SetFrameNumber);
_registerBuiltInOperator(DisableQueryPerformance);
_registerBuiltInOperator(EnableQueryPerformance);
+ _registerBuiltInOperator(OpenSample);
+ _registerBuiltInOperator(CloseSample);
} catch (e) {
console.error("Error registering built-in operators");
console.error(e);
diff --git a/app/packages/operators/src/state.ts b/app/packages/operators/src/state.ts
index 59ccc6c7411..6d4a87e92c0 100644
--- a/app/packages/operators/src/state.ts
+++ b/app/packages/operators/src/state.ts
@@ -1,7 +1,13 @@
import { useAnalyticsInfo } from "@fiftyone/analytics";
import * as fos from "@fiftyone/state";
import { debounce } from "lodash";
-import { useCallback, useEffect, useMemo, useRef, useState } from "react";
+import React, {
+ useCallback,
+ useEffect,
+ useMemo,
+ useRef,
+ useState,
+} from "react";
import {
atom,
selector,
@@ -32,6 +38,7 @@ import {
import { OperatorPromptType, Places } from "./types";
import { OperatorExecutorOptions } from "./types-internal";
import { ValidationContext } from "./validation";
+import { Markdown } from "@fiftyone/components";
export const promptingOperatorState = atom({
key: "promptingOperator",
@@ -231,8 +238,8 @@ function useExecutionOptions(operatorURI, ctx, isRemote) {
export type OperatorExecutionOption = {
label: string;
id: string;
- description: string;
- onClick: () => void;
+ description: string | React.ReactNode;
+ onClick?: () => void;
isDelegated: boolean;
choiceLabel?: string;
tag?: string;
@@ -251,7 +258,7 @@ const useOperatorPromptSubmitOptions = (
const persistUnderKey = `operator-prompt-${operatorURI}`;
const availableOrchestrators =
execDetails.executionOptions?.availableOrchestrators || [];
- const hasAvailableOrchestators = availableOrchestrators.length > 0;
+ const hasAvailableOrchestrators = availableOrchestrators.length > 0;
const executionOptions = execDetails.executionOptions || {};
const defaultToExecute = executionOptions.allowDelegatedExecution
? !executionOptions.defaultChoiceToDelegated
@@ -287,7 +294,7 @@ const useOperatorPromptSubmitOptions = (
label: "Schedule",
id: "schedule",
default: defaultToSchedule,
- description: "Run this operation on your compute cluster",
+ description: "Run this operation in the background",
onSelect() {
setSelectedID("schedule");
},
@@ -300,7 +307,7 @@ const useOperatorPromptSubmitOptions = (
if (
executionOptions.allowDelegatedExecution &&
- hasAvailableOrchestators &&
+ hasAvailableOrchestrators &&
executionOptions.orchestratorRegistrationEnabled
) {
for (let orc of execDetails.executionOptions.availableOrchestrators) {
@@ -321,6 +328,25 @@ const useOperatorPromptSubmitOptions = (
isDelegated: true,
});
}
+ } else if (
+ executionOptions.allowDelegatedExecution &&
+ executionOptions.allowImmediateExecution &&
+ executionOptions.orchestratorRegistrationEnabled &&
+ !hasAvailableOrchestrators
+ ) {
+ const markdownDesc = React.createElement(
+ Markdown,
+ null,
+ "[Learn how](https://docs.voxel51.com/plugins/using_plugins.html#delegated-operations) to run this operation in the background"
+ );
+ options.push({
+ label: "Schedule",
+ choiceLabel: `Schedule`,
+ tag: "NOT AVAILABLE",
+ id: "disabled-schedule",
+ description: markdownDesc,
+ isDelegated: true,
+ });
}
// sort options so that the default is always the first in the list
@@ -366,10 +392,11 @@ const useOperatorPromptSubmitOptions = (
if (selectedOption) selectedOption.selected = true;
const showWarning =
executionOptions.orchestratorRegistrationEnabled &&
- !hasAvailableOrchestators &&
+ !hasAvailableOrchestrators &&
!executionOptions.allowImmediateExecution;
- const warningMessage =
- "There are no available orchestrators to schedule this operation. Please contact your administrator to add an orchestrator.";
+ const warningStr =
+ "This operation requires [delegated execution](https://docs.voxel51.com/plugins/using_plugins.html#delegated-operations)";
+ const warningMessage = React.createElement(Markdown, null, warningStr);
return {
showWarning,
diff --git a/app/packages/relay/src/queries/__generated__/lightningQuery.graphql.ts b/app/packages/relay/src/queries/__generated__/lightningQuery.graphql.ts
index 4749ee62996..e4510efa610 100644
--- a/app/packages/relay/src/queries/__generated__/lightningQuery.graphql.ts
+++ b/app/packages/relay/src/queries/__generated__/lightningQuery.graphql.ts
@@ -1,5 +1,5 @@
/**
- * @generated SignedSource<>
+ * @generated SignedSource<>
* @lightSyntaxTransform
* @nogrep
*/
@@ -12,6 +12,7 @@ import { ConcreteRequest, Query } from 'relay-runtime';
export type LightningInput = {
dataset: string;
paths: ReadonlyArray;
+ slice?: string | null;
};
export type LightningPathInput = {
exclude?: ReadonlyArray | null;
diff --git a/app/packages/state/src/hooks/useSetModalState.ts b/app/packages/state/src/hooks/useSetModalState.ts
index 891b5faff7a..80466b34e5e 100644
--- a/app/packages/state/src/hooks/useSetModalState.ts
+++ b/app/packages/state/src/hooks/useSetModalState.ts
@@ -2,7 +2,6 @@ import type { CallbackInterface, RecoilState } from "recoil";
import { useRelayEnvironment } from "react-relay";
import { useRecoilCallback } from "recoil";
-import { dynamicGroupsViewMode } from "../recoil";
import * as atoms from "../recoil/atoms";
import * as filterAtoms from "../recoil/filters";
import * as groupAtoms from "../recoil/groups";
@@ -45,8 +44,6 @@ export default () => {
];
}),
- [dynamicGroupsViewMode(true), dynamicGroupsViewMode(false)],
-
[atoms.cropToContent(true), atoms.cropToContent(false)],
[atoms.sortFilterResults(true), atoms.sortFilterResults(false)],
[groupAtoms.groupStatistics(true), groupAtoms.groupStatistics(false)],
diff --git a/app/packages/state/src/recoil/options.ts b/app/packages/state/src/recoil/options.ts
index 7cbf44b7aff..09811d6ec2b 100644
--- a/app/packages/state/src/recoil/options.ts
+++ b/app/packages/state/src/recoil/options.ts
@@ -4,7 +4,7 @@ import {
mediaFieldsFragment,
mediaFieldsFragment$key,
} from "@fiftyone/relay";
-import { atomFamily, selector, selectorFamily } from "recoil";
+import { DefaultValue, atomFamily, selector, selectorFamily } from "recoil";
import { getBrowserStorageEffectForKey } from "./customEffects";
import { datasetSampleCount } from "./dataset";
import { fieldPaths } from "./schema";
@@ -55,17 +55,43 @@ export const selectedMediaField = selectorFamily({
set(selectedMediaFieldAtomFamily(modal), value),
});
-export const dynamicGroupsViewMode = atomFamily<
- "carousel" | "pagination" | "video",
+export const dynamicGroupsViewModeStore = atomFamily<
+ "carousel" | "pagination" | "video" | null,
boolean
>({
- key: "dynamicGroupsViewMode",
- default: "pagination",
+ key: "dynamicGroupsViewModeStore",
+ default: null,
effects: (modal) => [
getBrowserStorageEffectForKey(`dynamicGroupsViewMode-${modal}`),
],
});
+export const dynamicGroupsViewMode = selectorFamily({
+ key: "dynamicGroupsViewMode",
+ get:
+ (modal: boolean) =>
+ ({ get }) => {
+ const value = get(dynamicGroupsViewModeStore(modal));
+
+ if (!value) {
+ return modal
+ ? get(dynamicGroupsViewModeStore(false)) ?? "pagination"
+ : "pagination";
+ }
+
+ return value;
+ },
+ set:
+ (modal: boolean) =>
+ ({ reset, set }, newValue) => {
+ const instance = dynamicGroupsViewModeStore(modal);
+
+ newValue instanceof DefaultValue
+ ? reset(instance)
+ : set(instance, newValue);
+ },
+});
+
export const isLargeVideo = selector({
key: "isLargeVideo",
get: ({ get }) => {
diff --git a/app/packages/state/src/recoil/queryPerformance.ts b/app/packages/state/src/recoil/queryPerformance.ts
index 8392074e296..439f167889f 100644
--- a/app/packages/state/src/recoil/queryPerformance.ts
+++ b/app/packages/state/src/recoil/queryPerformance.ts
@@ -11,6 +11,7 @@ import { graphQLSelectorFamily } from "recoil-relay";
import type { ResponseFrom } from "../utils";
import { config } from "./config";
import { getBrowserStorageEffectForKey } from "./customEffects";
+import { groupSlice } from "./groups";
import { isLabelPath } from "./labels";
import { RelayEnvironmentKey } from "./relay";
import * as schemaAtoms from "./schema";
@@ -34,6 +35,7 @@ export const lightningQuery = graphQLSelectorFamily<
input: {
dataset: get(datasetName),
paths,
+ slice: get(groupSlice),
},
};
},
diff --git a/app/packages/utilities/src/fetch.ts b/app/packages/utilities/src/fetch.ts
index 71c5366336e..260dfac82ba 100644
--- a/app/packages/utilities/src/fetch.ts
+++ b/app/packages/utilities/src/fetch.ts
@@ -18,7 +18,7 @@ export interface FetchFunction {
body?: A,
result?: "json" | "blob" | "text" | "arrayBuffer" | "json-stream",
retries?: number,
- retryCodes?: number[] | "arrayBuffer"
+ retryCodes?: number[]
): Promise;
}
@@ -110,7 +110,7 @@ export const setFetchFunction = (
const fetchCall = retries
? fetchRetry(fetch, {
retries,
- retryDelay: 0,
+ retryDelay: 500,
retryOn: (attempt, error, response) => {
if (
(error !== null || retryCodes.includes(response.status)) &&
diff --git a/app/schema.graphql b/app/schema.graphql
index 2b843f3a7aa..f959dd13d2b 100644
--- a/app/schema.graphql
+++ b/app/schema.graphql
@@ -444,6 +444,7 @@ input LabelTagColorInput {
input LightningInput {
dataset: String!
paths: [LightningPathInput!]!
+ slice: String = null
}
input LightningPathInput {
diff --git a/docs/source/images/plugins/operators/operator-user-delegation.png b/docs/source/images/plugins/operators/operator-user-delegation.png
deleted file mode 100644
index 82a7f82c6cf..00000000000
Binary files a/docs/source/images/plugins/operators/operator-user-delegation.png and /dev/null differ
diff --git a/docs/source/plugins/developing_plugins.rst b/docs/source/plugins/developing_plugins.rst
index 9b079173b40..9be6827c5cd 100644
--- a/docs/source/plugins/developing_plugins.rst
+++ b/docs/source/plugins/developing_plugins.rst
@@ -992,9 +992,8 @@ contains the following properties:
instance that you can use to read and write the :ref:`state `
and :ref:`data ` of the current panel, if the operator was
invoked from a panel
-- `ctx.delegated` - whether delegated execution has been forced for the
- operation
-- `ctx.requesting_delegated_execution` - whether delegated execution has been
+- `ctx.delegated` - whether the operation was delegated
+- `ctx.requesting_delegated_execution` - whether delegated execution was
requested for the operation
- `ctx.delegation_target` - the orchestrator to which the operation should be
delegated, if applicable
@@ -1248,46 +1247,6 @@ of the current view:
to specify the available execution options as described in the previous
section.
-Alternatively, you could simply ask the user to decide:
-
-.. code-block:: python
- :linenos:
-
- def resolve_input(self, ctx):
- delegate = ctx.params.get("delegate", None)
-
- if delegate:
- description = "Uncheck this box to execute the operation immediately"
- else:
- description = "Check this box to delegate execution of this task"
-
- inputs.bool(
- "delegate",
- label="Delegate execution?",
- description=description,
- view=types.CheckboxView(),
- )
-
- if delegate:
- inputs.view(
- "notice",
- types.Notice(
- label=(
- "You've chosen delegated execution. Note that you must "
- "have a delegated operation service running in order for "
- "this task to be processed. See "
- "https://docs.voxel51.com/plugins/index.html#operators "
- "for more information"
- )
- ),
- )
-
- def resolve_delegation(self, ctx):
- return ctx.params.get("delegate", None)
-
-.. image:: /images/plugins/operators/operator-user-delegation.png
- :align: center
-
.. _operator-reporting-progress:
Reporting progress
diff --git a/docs/source/plugins/using_plugins.rst b/docs/source/plugins/using_plugins.rst
index eed8934be62..0a03ef9ebc5 100644
--- a/docs/source/plugins/using_plugins.rst
+++ b/docs/source/plugins/using_plugins.rst
@@ -891,6 +891,14 @@ FiftyOne Open Source users can run delegated operations via the
This command starts a service that will continuously check for any queued
delegated operations and execute them serially in its process.
+You must also ensure that the
+:ref:`allow_legacy_orchestrators ` config flag is set
+in the environment where you run the App, e.g. by setting:
+
+.. code-block:: shell
+
+ export FIFTYONE_ALLOW_LEGACY_ORCHESTRATORS=true
+
.. _delegated-orchestrator-teamas:
FiftyOne Teams
diff --git a/docs/source/user_guide/app.rst b/docs/source/user_guide/app.rst
index 36476a63593..a323876c8cb 100644
--- a/docs/source/user_guide/app.rst
+++ b/docs/source/user_guide/app.rst
@@ -488,8 +488,6 @@ perform initial filters on:
# Note: it is faster to declare indexes before adding samples
dataset.add_samples(...)
- fo.app_config.default_query_performance = True
-
session = fo.launch_app(dataset)
.. note::
@@ -520,8 +518,6 @@ compound index that includes the group slice name:
dataset.create_index("ground_truth.detections.label")
dataset.create_index([("group.name", 1), ("ground_truth.detections.label", 1)])
- fo.app_config.default_query_performance = True
-
session = fo.launch_app(dataset)
For datasets with a small number of fields, you can index all fields by adding
@@ -537,8 +533,6 @@ a single
dataset = foz.load_zoo_dataset("quickstart")
dataset.create_index("$**")
- fo.app_config.default_query_performance = True
-
session = fo.launch_app(dataset)
.. warning::
diff --git a/docs/source/user_guide/config.rst b/docs/source/user_guide/config.rst
index 74043e68eb2..73273a5fa0b 100644
--- a/docs/source/user_guide/config.rst
+++ b/docs/source/user_guide/config.rst
@@ -14,119 +14,121 @@ Configuration options
FiftyOne supports the configuration options described below:
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| Config field | Environment variable | Default value | Description |
-+===============================+=====================================+===============================+========================================================================================+
-| `database_admin` | `FIFTYONE_DATABASE_ADMIN` | `True` | Whether the client is allowed to trigger database migrations. See |
-| | | | :ref:`this section ` for more information. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `database_dir` | `FIFTYONE_DATABASE_DIR` | `~/.fiftyone/var/lib/mongo` | The directory in which to store FiftyOne's backing database. Only applicable if |
-| | | | `database_uri` is not defined. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `database_name` | `FIFTYONE_DATABASE_NAME` | `fiftyone` | A name to use for FiftyOne's backing database in your MongoDB instance. The database |
-| | | | is automatically created if necessary. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `database_uri` | `FIFTYONE_DATABASE_URI` | `None` | A `MongoDB URI `_ to |
-| | | | specifying a custom MongoDB database to which to connect. See |
-| | | | :ref:`this section ` for more information. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `database_validation` | `FIFTYONE_DATABASE_VALIDATION` | `True` | Whether to validate the compatibility of database before connecting to it. See |
-| | | | :ref:`this section ` for more information. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `dataset_zoo_dir` | `FIFTYONE_DATASET_ZOO_DIR` | `~/fiftyone` | The default directory in which to store datasets that are downloaded from the |
-| | | | :ref:`FiftyOne Dataset Zoo `. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `dataset_zoo_manifest_paths` | `FIFTYONE_ZOO_MANIFEST_PATHS` | `None` | A list of manifest JSON files specifying additional zoo datasets. See |
-| | | | :ref:`adding datasets to the zoo ` for more information. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `default_dataset_dir` | `FIFTYONE_DEFAULT_DATASET_DIR` | `~/fiftyone` | The default directory to use when performing FiftyOne operations that |
-| | | | require writing dataset contents to disk, such as ingesting datasets via |
-| | | | :meth:`ingest_labeled_images() `. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `default_ml_backend` | `FIFTYONE_DEFAULT_ML_BACKEND` | `torch` | The default ML backend to use when performing operations such as |
-| | | | downloading datasets from the FiftyOne Dataset Zoo that support multiple ML |
-| | | | backends. Supported values are `torch` and `tensorflow`. By default, |
-| | | | `torch` is used if `PyTorch `_ is installed in your |
-| | | | Python environment, and `tensorflow` is used if |
-| | | | `TensorFlow `_ is installed. If no supported backend |
-| | | | is detected, this defaults to `None`, and any operation that requires an |
-| | | | installed ML backend will raise an informative error message if invoked in |
-| | | | this state. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `default_batch_size` | `FIFTYONE_DEFAULT_BATCH_SIZE` | `None` | A default batch size to use when :ref:`applying models to datasets `. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `default_batcher` | `FIFTYONE_DEFAULT_BATCHER` | `latency` | Batching implementation to use in some batched database operations such as |
-| | | | :meth:`add_samples() `, |
-| | | | :meth:`set_values() `, and |
-| | | | :meth:`save_context() `. |
-| | | | Supported values are `latency`, `size`, and `static`. |
-| | | | |
-| | | | `latency` is the default, which uses a dynamic batch size to achieve a target latency |
-| | | | of `batcher_target_latency` between calls. The default changes to `size` for the |
-| | | | FiftyOne Teams SDK in :ref:`API connection mode `, which targets |
-| | | | a size of `batcher_target_size_bytes` for each call. `static` uses a fixed batch size |
-| | | | of `batcher_static_size`. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `batcher_static_size` | `FIFTYONE_BATCHER_STATIC_SIZE` | `100` | Fixed size of batches. Only used when `default_batcher` is `static`. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `batcher_target_size_bytes` | `FIFTYONE_BATCHER_TARGET_SIZE_BYTES`| `2 ** 20` | Target content size of batches, in bytes. Only used when `default_batcher` is `size`. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `batcher_target_latency` | `FIFTYONE_BATCHER_TARGET_LATENCY` | `0.2` | Target latency between batches, in seconds. Only used when `default_batcher` is |
-| | | | `latency`. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `default_sequence_idx` | `FIFTYONE_DEFAULT_SEQUENCE_IDX` | `%06d` | The default numeric string pattern to use when writing sequential lists of |
-| | | | files. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `default_image_ext` | `FIFTYONE_DEFAULT_IMAGE_EXT` | `.jpg` | The default image format to use when writing images to disk. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `default_video_ext` | `FIFTYONE_DEFAULT_VIDEO_EXT` | `.mp4` | The default video format to use when writing videos to disk. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `default_app_port` | `FIFTYONE_DEFAULT_APP_PORT` | `5151` | The default port to use to serve the :ref:`FiftyOne App `. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `default_app_address` | `FIFTYONE_DEFAULT_APP_ADDRESS` | `localhost` | The default address to use to serve the :ref:`FiftyOne App `. This may |
-| | | | be either an IP address or hostname. If it's a hostname, the App will listen to all |
-| | | | IP addresses associated with the name. The default is `localhost`, which means the App |
-| | | | will only listen on the local interface. See :ref:`this page `|
-| | | | for more information. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `do_not_track` | `FIFTYONE_DO_NOT_TRACK` | `False` | Controls whether UUID based import and App usage events are tracked. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `logging_level` | `FIFTYONE_LOGGING_LEVEL` | `INFO` | Controls FiftyOne's package-wide logging level. Can be any valid ``logging`` level as |
-| | | | a string: ``DEBUG, INFO, WARNING, ERROR, CRITICAL``. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `max_thread_pool_workers` | `FIFTYONE_MAX_THREAD_POOL_WORKERS` | `None` | An optional maximum number of workers to use when creating thread pools |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `max_process_pool_workers` | `FIFTYONE_MAX_PROCESS_POOL_WORKERS` | `None` | An optional maximum number of workers to use when creating process pools |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `model_zoo_dir` | `FIFTYONE_MODEL_ZOO_DIR` | `~/fiftyone/__models__` | The default directory in which to store models that are downloaded from the |
-| | | | :ref:`FiftyOne Model Zoo `. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `model_zoo_manifest_paths` | `FIFTYONE_MODEL_ZOO_MANIFEST_PATHS` | `None` | A list of manifest JSON files specifying additional zoo models. See |
-| | | | :ref:`adding models to the zoo ` for more information. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `module_path` | `FIFTYONE_MODULE_PATH` | `None` | A list of modules that should be automatically imported whenever FiftyOne is imported. |
-| | | | See :ref:`this page ` for an example usage. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `operator_timeout` | `FIFTYONE_OPERATOR_TIMEOUT` | `600` | The timeout for execution of an operator. See :ref:`this page ` for |
-| | | | more information. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `plugins_dir` | `FIFTYONE_PLUGINS_DIR` | `None` | A directory containing custom App plugins. See :ref:`this page ` for |
-| | | | more information. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `plugins_cache_enabled` | `FIFTYONE_PLUGINS_CACHE_ENABLED` | `False` | When set to ``True`` plugins will be cached until their directory's ``mtime`` changes. |
-| | | | This is intended to be used in production. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `requirement_error_level` | `FIFTYONE_REQUIREMENT_ERROR_LEVEL` | `0` | A default error level to use when ensuring/installing requirements such as third-party |
-| | | | packages. See :ref:`loading zoo models ` for an example usage. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `show_progress_bars` | `FIFTYONE_SHOW_PROGRESS_BARS` | `True` | Controls whether progress bars are printed to the terminal when performing |
-| | | | operations such reading/writing large datasets or activating FiftyOne |
-| | | | Brain methods on datasets. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
-| `timezone` | `FIFTYONE_TIMEZONE` | `None` | An optional timezone string. If provided, all datetimes read from FiftyOne datasets |
-| | | | will be expressed in this timezone. See :ref:`this section ` for |
-| | | | more information. |
-+-------------------------------+-------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| Config field | Environment variable | Default value | Description |
++===============================+=======================================+===============================+========================================================================================+
+| `database_admin` | `FIFTYONE_DATABASE_ADMIN` | `True` | Whether the client is allowed to trigger database migrations. See |
+| | | | :ref:`this section ` for more information. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `database_dir` | `FIFTYONE_DATABASE_DIR` | `~/.fiftyone/var/lib/mongo` | The directory in which to store FiftyOne's backing database. Only applicable if |
+| | | | `database_uri` is not defined. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `database_name` | `FIFTYONE_DATABASE_NAME` | `fiftyone` | A name to use for FiftyOne's backing database in your MongoDB instance. The database |
+| | | | is automatically created if necessary. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `database_uri` | `FIFTYONE_DATABASE_URI` | `None` | A `MongoDB URI `_ to |
+| | | | specifying a custom MongoDB database to which to connect. See |
+| | | | :ref:`this section ` for more information. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `database_validation` | `FIFTYONE_DATABASE_VALIDATION` | `True` | Whether to validate the compatibility of database before connecting to it. See |
+| | | | :ref:`this section ` for more information. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `dataset_zoo_dir` | `FIFTYONE_DATASET_ZOO_DIR` | `~/fiftyone` | The default directory in which to store datasets that are downloaded from the |
+| | | | :ref:`FiftyOne Dataset Zoo `. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `dataset_zoo_manifest_paths` | `FIFTYONE_ZOO_MANIFEST_PATHS` | `None` | A list of manifest JSON files specifying additional zoo datasets. See |
+| | | | :ref:`adding datasets to the zoo ` for more information. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `default_dataset_dir` | `FIFTYONE_DEFAULT_DATASET_DIR` | `~/fiftyone` | The default directory to use when performing FiftyOne operations that |
+| | | | require writing dataset contents to disk, such as ingesting datasets via |
+| | | | :meth:`ingest_labeled_images() `. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `default_ml_backend` | `FIFTYONE_DEFAULT_ML_BACKEND` | `torch` | The default ML backend to use when performing operations such as |
+| | | | downloading datasets from the FiftyOne Dataset Zoo that support multiple ML |
+| | | | backends. Supported values are `torch` and `tensorflow`. By default, |
+| | | | `torch` is used if `PyTorch `_ is installed in your |
+| | | | Python environment, and `tensorflow` is used if |
+| | | | `TensorFlow `_ is installed. If no supported backend |
+| | | | is detected, this defaults to `None`, and any operation that requires an |
+| | | | installed ML backend will raise an informative error message if invoked in |
+| | | | this state. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `default_batch_size` | `FIFTYONE_DEFAULT_BATCH_SIZE` | `None` | A default batch size to use when :ref:`applying models to datasets `. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `default_batcher` | `FIFTYONE_DEFAULT_BATCHER` | `latency` | Batching implementation to use in some batched database operations such as |
+| | | | :meth:`add_samples() `, |
+| | | | :meth:`set_values() `, and |
+| | | | :meth:`save_context() `. |
+| | | | Supported values are `latency`, `size`, and `static`. |
+| | | | |
+| | | | `latency` is the default, which uses a dynamic batch size to achieve a target latency |
+| | | | of `batcher_target_latency` between calls. The default changes to `size` for the |
+| | | | FiftyOne Teams SDK in :ref:`API connection mode `, which targets |
+| | | | a size of `batcher_target_size_bytes` for each call. `static` uses a fixed batch size |
+| | | | of `batcher_static_size`. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `batcher_static_size` | `FIFTYONE_BATCHER_STATIC_SIZE` | `100` | Fixed size of batches. Only used when `default_batcher` is `static`. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `batcher_target_size_bytes` | `FIFTYONE_BATCHER_TARGET_SIZE_BYTES` | `2 ** 20` | Target content size of batches, in bytes. Only used when `default_batcher` is `size`. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `batcher_target_latency` | `FIFTYONE_BATCHER_TARGET_LATENCY` | `0.2` | Target latency between batches, in seconds. Only used when `default_batcher` is |
+| | | | `latency`. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `default_sequence_idx` | `FIFTYONE_DEFAULT_SEQUENCE_IDX` | `%06d` | The default numeric string pattern to use when writing sequential lists of |
+| | | | files. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `default_image_ext` | `FIFTYONE_DEFAULT_IMAGE_EXT` | `.jpg` | The default image format to use when writing images to disk. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `default_video_ext` | `FIFTYONE_DEFAULT_VIDEO_EXT` | `.mp4` | The default video format to use when writing videos to disk. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `default_app_port` | `FIFTYONE_DEFAULT_APP_PORT` | `5151` | The default port to use to serve the :ref:`FiftyOne App `. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `default_app_address` | `FIFTYONE_DEFAULT_APP_ADDRESS` | `localhost` | The default address to use to serve the :ref:`FiftyOne App `. This may |
+| | | | be either an IP address or hostname. If it's a hostname, the App will listen to all |
+| | | | IP addresses associated with the name. The default is `localhost`, which means the App |
+| | | | will only listen on the local interface. See :ref:`this page `|
+| | | | for more information. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `do_not_track` | `FIFTYONE_DO_NOT_TRACK` | `False` | Controls whether UUID based import and App usage events are tracked. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `logging_level` | `FIFTYONE_LOGGING_LEVEL` | `INFO` | Controls FiftyOne's package-wide logging level. Can be any valid ``logging`` level as |
+| | | | a string: ``DEBUG, INFO, WARNING, ERROR, CRITICAL``. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `max_thread_pool_workers` | `FIFTYONE_MAX_THREAD_POOL_WORKERS` | `None` | An optional maximum number of workers to use when creating thread pools |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `max_process_pool_workers` | `FIFTYONE_MAX_PROCESS_POOL_WORKERS` | `None` | An optional maximum number of workers to use when creating process pools |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `model_zoo_dir` | `FIFTYONE_MODEL_ZOO_DIR` | `~/fiftyone/__models__` | The default directory in which to store models that are downloaded from the |
+| | | | :ref:`FiftyOne Model Zoo `. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `model_zoo_manifest_paths` | `FIFTYONE_MODEL_ZOO_MANIFEST_PATHS` | `None` | A list of manifest JSON files specifying additional zoo models. See |
+| | | | :ref:`adding models to the zoo ` for more information. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `module_path` | `FIFTYONE_MODULE_PATH` | `None` | A list of modules that should be automatically imported whenever FiftyOne is imported. |
+| | | | See :ref:`this page ` for an example usage. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `operator_timeout` | `FIFTYONE_OPERATOR_TIMEOUT` | `600` | The timeout for execution of an operator. See :ref:`this page ` for |
+| | | | more information. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `allow_legacy_orchestrators` | `FIFTYONE_ALLOW_LEGACY_ORCHESTRATORS` | `False` | Whether to allow delegated operations to be scheduled locally. |
+| | | | See :ref:`this page ` for more information. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `plugins_dir` | `FIFTYONE_PLUGINS_DIR` | `None` | A directory containing custom App plugins. See :ref:`this page ` for |
+| | | | more information. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `plugins_cache_enabled` | `FIFTYONE_PLUGINS_CACHE_ENABLED` | `False` | When set to ``True`` plugins will be cached until their directory's ``mtime`` changes. |
+| | | | This is intended to be used in production. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `do_not_track` | `FIFTYONE_DO_NOT_TRACK` | `False` | Controls whether UUID based import and App usage events are tracked. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `show_progress_bars` | `FIFTYONE_SHOW_PROGRESS_BARS` | `True` | Controls whether progress bars are printed to the terminal when performing |
+| | | | operations such reading/writing large datasets or activating FiftyOne |
+| | | | Brain methods on datasets. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
+| `timezone` | `FIFTYONE_TIMEZONE` | `None` | An optional timezone string. If provided, all datetimes read from FiftyOne datasets |
+| | | | will be expressed in this timezone. See :ref:`this section ` for |
+| | | | more information. |
++-------------------------------+---------------------------------------+-------------------------------+----------------------------------------------------------------------------------------+
Viewing your config
-------------------
@@ -178,6 +180,7 @@ and the CLI:
"model_zoo_manifest_paths": null,
"module_path": null,
"operator_timeout": 600,
+ "allow_legacy_orchestrators": false,
"plugins_cache_enabled": false,
"plugins_dir": null,
"requirement_error_level": 0,
@@ -227,6 +230,7 @@ and the CLI:
"model_zoo_manifest_paths": null,
"module_path": null,
"operator_timeout": 600,
+ "allow_legacy_orchestrators": false,
"plugins_cache_enabled": false,
"plugins_dir": null,
"requirement_error_level": 0,
diff --git a/fiftyone/core/config.py b/fiftyone/core/config.py
index 653648a37bb..1e7b04b9744 100644
--- a/fiftyone/core/config.py
+++ b/fiftyone/core/config.py
@@ -120,6 +120,12 @@ def __init__(self, d=None):
env_var="FIFTYONE_OPERATOR_TIMEOUT",
default=600, # 600 seconds (10 minutes)
)
+ self.allow_legacy_orchestrators = self.parse_bool(
+ d,
+ "allow_legacy_orchestrators",
+ env_var="FIFTYONE_ALLOW_LEGACY_ORCHESTRATORS",
+ default=False,
+ )
self.dataset_zoo_manifest_paths = self.parse_path_array(
d,
"dataset_zoo_manifest_paths",
diff --git a/fiftyone/factory/repos/delegated_operation.py b/fiftyone/factory/repos/delegated_operation.py
index 0224a07e95f..ae3b42c8076 100644
--- a/fiftyone/factory/repos/delegated_operation.py
+++ b/fiftyone/factory/repos/delegated_operation.py
@@ -177,15 +177,15 @@ def queue_operation(self, **kwargs: Any) -> DelegatedOperationDocument:
raise ValueError("Missing required property '%s'" % prop)
setattr(op, prop, kwargs.get(prop))
- # also set the delegation target (not required)
delegation_target = kwargs.get("delegation_target", None)
if delegation_target:
setattr(op, "delegation_target", delegation_target)
- # also set the metadata (not required)
metadata = kwargs.get("metadata", None)
if metadata:
setattr(op, "metadata", metadata)
+ else:
+ setattr(op, "metadata", {})
context = None
if isinstance(op.context, dict):
@@ -262,8 +262,6 @@ def update_run_state(
else None
)
- needs_pipeline_update = False
-
if run_state == ExecutionRunState.COMPLETED:
update = {
"$set": {
@@ -278,7 +276,6 @@ def update_run_state(
update["$set"]["metadata.outputs_schema"] = (
outputs_schema or {}
)
- needs_pipeline_update = True
elif run_state == ExecutionRunState.FAILED:
update = {
@@ -328,12 +325,6 @@ def update_run_state(
if required_state is not None:
collection_filter["run_state"] = required_state
- # Using pipeline update instead of a single update doc fixes a case
- # where `metadata` is null and so accessing the dotted field
- # `metadata.output_schema` creates the document instead of erroring.
- if needs_pipeline_update:
- update = [update]
-
doc = self._collection.find_one_and_update(
filter=collection_filter,
update=update,
diff --git a/fiftyone/operators/delegated.py b/fiftyone/operators/delegated.py
index d4ef748473b..c03762348ad 100644
--- a/fiftyone/operators/delegated.py
+++ b/fiftyone/operators/delegated.py
@@ -54,7 +54,6 @@ def queue_operation(
- inputs_schema: the schema of the operator's inputs
- outputs_schema: the schema of the operator's outputs
-
Returns:
a :class:`fiftyone.factory.repos.DelegatedOperationDocument`
"""
@@ -514,11 +513,12 @@ async def _execute_operator(self, doc):
result = await do_execute_operator(operator, ctx, exhaust=True)
outputs_schema = None
- request_params = {**context.request_params, "results": result}
try:
- outputs = await resolve_type_with_context(
- request_params, "outputs"
- )
+ # Resolve output types now
+ ctx.request_params["target"] = "outputs"
+ ctx.request_params["results"] = result
+
+ outputs = await resolve_type_with_context(operator, ctx)
if outputs is not None:
outputs_schema = outputs.to_json()
except (AttributeError, Exception):
diff --git a/fiftyone/operators/executor.py b/fiftyone/operators/executor.py
index c15bbcc0699..a2b50e7af03 100644
--- a/fiftyone/operators/executor.py
+++ b/fiftyone/operators/executor.py
@@ -258,6 +258,7 @@ async def execute_or_delegate_operator(
try:
from .delegated import DelegatedOperationService
+ ctx.request_params["delegated"] = True
metadata = {"inputs_schema": None, "outputs_schema": None}
try:
@@ -378,31 +379,28 @@ async def resolve_type(registry, operator_uri, request_params):
required_secrets=operator._plugin_secrets,
)
await ctx.resolve_secret_values(operator._plugin_secrets)
- try:
- return operator.resolve_type(
- ctx, request_params.get("target", "inputs")
- )
- except Exception as e:
- return ExecutionResult(error=traceback.format_exc())
+
+ return await resolve_type_with_context(operator, ctx)
-async def resolve_type_with_context(request_params, target=None):
+async def resolve_type_with_context(operator, context):
"""Resolves the "inputs" or "outputs" schema of an operator with the given
context.
Args:
- request_params: a dictionary of request parameters
- target (None): the target schema ("inputs" or "outputs")
+ operator: the :class:`fiftyone.operators.Operator`
+ context: the :class:`ExecutionContext` of an operator
Returns:
- the schema of "inputs" or "outputs"
+ the "inputs" or "outputs" schema
:class:`fiftyone.operators.types.Property` of an operator, or None
"""
- computed_target = target or request_params.get("target", None)
- computed_request_params = {**request_params, "target": computed_target}
- operator_uri = request_params.get("operator_uri", None)
- registry = OperatorRegistry()
- return await resolve_type(registry, operator_uri, computed_request_params)
+ try:
+ return operator.resolve_type(
+ context, context.request_params.get("target", "inputs")
+ )
+ except Exception as e:
+ return ExecutionResult(error=traceback.format_exc())
async def resolve_execution_options(registry, operator_uri, request_params):
@@ -694,12 +692,12 @@ def panel(self):
@property
def delegated(self):
- """Whether delegated execution has been forced for the operation."""
+ """Whether the operation was delegated."""
return self.request_params.get("delegated", False)
@property
def requesting_delegated_execution(self):
- """Whether delegated execution has been requested for the operation."""
+ """Whether delegated execution was requested for the operation."""
return self.request_params.get("request_delegation", False)
@property
@@ -1277,7 +1275,7 @@ def available_orchestrators(self):
@property
def orchestrator_registration_enabled(self):
- return False
+ return not fo.config.allow_legacy_orchestrators
def update(self, available_orchestrators=None):
self._available_orchestrators = available_orchestrators
diff --git a/fiftyone/operators/operations.py b/fiftyone/operators/operations.py
index f10c0a00f41..e0038aadfea 100644
--- a/fiftyone/operators/operations.py
+++ b/fiftyone/operators/operations.py
@@ -665,6 +665,21 @@ def set_group_slice(self, slice):
"""
return self._ctx.trigger("set_group_slice", {"slice": slice})
+ def open_sample(self, id=None, group_id=None):
+ """Opens the specified sample or group in the App's sample modal.
+
+ Args:
+ id (None): a sample ID to open in the modal
+ group_id (None): a group ID to open in the modal
+ """
+ return self._ctx.trigger(
+ "open_sample", {"id": id, "group_id": group_id}
+ )
+
+ def close_sample(self):
+ """Closes the App's sample modal."""
+ return self._ctx.trigger("close_sample")
+
def _serialize_view(view):
return json.loads(json_util.dumps(view._serialize()))
diff --git a/fiftyone/server/lightning.py b/fiftyone/server/lightning.py
index 2b1d22df3dd..701588864d5 100644
--- a/fiftyone/server/lightning.py
+++ b/fiftyone/server/lightning.py
@@ -9,7 +9,6 @@
from bson import ObjectId
from dataclasses import asdict, dataclass
from datetime import date, datetime
-import math
import typing as t
import asyncio
@@ -46,6 +45,7 @@ class LightningPathInput:
class LightningInput:
dataset: str
paths: t.List[LightningPathInput]
+ slice: t.Optional[str] = None
@gql.interface
@@ -138,7 +138,13 @@ async def lightning_resolver(
for collection, sublist in zip(collections, queries)
for item in sublist
]
- result = await _do_async_pooled_queries(dataset, flattened)
+
+ filter = (
+ {f"{dataset.group_field}.name": input.slice}
+ if dataset.group_field and input.slice
+ else None
+ )
+ result = await _do_async_pooled_queries(dataset, flattened, filter)
results = []
offset = 0
@@ -293,10 +299,11 @@ async def _do_async_pooled_queries(
queries: t.List[
t.Tuple[AsyncIOMotorCollection, t.Union[DistinctQuery, t.List[t.Dict]]]
],
+ filter: t.Optional[t.Mapping[str, str]],
):
return await asyncio.gather(
*[
- _do_async_query(dataset, collection, query)
+ _do_async_query(dataset, collection, query, filter)
for collection, query in queries
]
)
@@ -306,25 +313,31 @@ async def _do_async_query(
dataset: fo.Dataset,
collection: AsyncIOMotorCollection,
query: t.Union[DistinctQuery, t.List[t.Dict]],
+ filter: t.Optional[t.Mapping[str, str]],
):
if isinstance(query, DistinctQuery):
if query.has_list and not query.filters:
- return await _do_distinct_query(collection, query)
+ return await _do_distinct_query(collection, query, filter)
+
+ return await _do_distinct_pipeline(dataset, collection, query, filter)
- return await _do_distinct_pipeline(dataset, collection, query)
+ if filter:
+ query.insert(0, {"$match": filter})
return [i async for i in collection.aggregate(query)]
async def _do_distinct_query(
- collection: AsyncIOMotorCollection, query: DistinctQuery
+ collection: AsyncIOMotorCollection,
+ query: DistinctQuery,
+ filter: t.Optional[t.Mapping[str, str]],
):
match = None
if query.search:
match = query.search
try:
- result = await collection.distinct(query.path)
+ result = await collection.distinct(query.path, filter)
except:
# too many results
return None
@@ -350,12 +363,16 @@ async def _do_distinct_pipeline(
dataset: fo.Dataset,
collection: AsyncIOMotorCollection,
query: DistinctQuery,
+ filter: t.Optional[t.Mapping[str, str]],
):
pipeline = []
+ if filter:
+ pipeline.append({"$match": filter})
+
if query.filters:
pipeline += get_view(dataset, filters=query.filters)._pipeline()
- pipeline += [{"$sort": {query.path: 1}}]
+ pipeline.append({"$sort": {query.path: 1}})
if query.search:
if query.is_object_id_field:
diff --git a/fiftyone/server/routes/embeddings.py b/fiftyone/server/routes/embeddings.py
index 8f18bec06f1..cc7493ed49c 100644
--- a/fiftyone/server/routes/embeddings.py
+++ b/fiftyone/server/routes/embeddings.py
@@ -51,7 +51,6 @@ def _post_sync(self, data):
try:
results = dataset.load_brain_results(brain_key)
- assert results is not None
except:
msg = (
"Failed to load results for brain run with key '%s'. Try "
@@ -59,6 +58,13 @@ def _post_sync(self, data):
) % brain_key
return {"error": msg}
+ if results is None:
+ msg = (
+ "Results for brain run with key '%s' are not yet available"
+ % brain_key
+ )
+ return {"error": msg}
+
view = fosv.get_view(
dataset_name,
stages=stages,
diff --git a/tests/unittests/lightning_tests.py b/tests/unittests/lightning_tests.py
index 319315f89be..b631e8cf080 100644
--- a/tests/unittests/lightning_tests.py
+++ b/tests/unittests/lightning_tests.py
@@ -1053,6 +1053,91 @@ async def test_strings(self, dataset: fo.Dataset):
)
+class TestGroupDatasetLightningQueries(unittest.IsolatedAsyncioTestCase):
+ @drop_async_dataset
+ async def test_group_dataset(self, dataset: fo.Dataset):
+ group = fo.Group()
+ one = fo.Sample(
+ classifications=fo.Classifications(
+ classifications=[fo.Classification(label="one")]
+ ),
+ filepath="one.png",
+ group=group.element("one"),
+ numeric=1,
+ string="one",
+ )
+ two = fo.Sample(
+ classifications=fo.Classifications(
+ classifications=[fo.Classification(label="two")]
+ ),
+ filepath="two.png",
+ group=group.element("two"),
+ numeric=2,
+ string="two",
+ )
+ dataset.add_samples([one, two])
+
+ query = """
+ query Query($input: LightningInput!) {
+ lightning(input: $input) {
+ ... on IntLightningResult {
+ path
+ min
+ max
+ }
+ ... on StringLightningResult {
+ path
+ values
+ }
+ }
+ }
+ """
+
+ # only query "one" slice samples
+ result = await _execute(
+ query,
+ dataset,
+ (fo.IntField, fo.StringField),
+ ["classifications.classifications.label", "numeric", "string"],
+ frames=False,
+ slice="one",
+ )
+
+ self.assertListEqual(
+ result.data["lightning"],
+ [
+ {
+ "path": "classifications.classifications.label",
+ "values": ["one"],
+ },
+ {"path": "numeric", "min": 1.0, "max": 1.0},
+ {"path": "string", "values": ["one"]},
+ ],
+ )
+
+ # only query "two" slice samples
+ result = await _execute(
+ query,
+ dataset,
+ (fo.IntField, fo.StringField),
+ ["classifications.classifications.label", "numeric", "string"],
+ frames=False,
+ slice="two",
+ )
+
+ self.assertListEqual(
+ result.data["lightning"],
+ [
+ {
+ "path": "classifications.classifications.label",
+ "values": ["two"],
+ },
+ {"path": "numeric", "min": 2.0, "max": 2.0},
+ {"path": "string", "values": ["two"]},
+ ],
+ )
+
+
def _add_samples(dataset: fo.Dataset, *sample_data: t.List[t.Dict]):
samples = []
keys = set()
@@ -1067,7 +1152,12 @@ def _add_samples(dataset: fo.Dataset, *sample_data: t.List[t.Dict]):
async def _execute(
- query: str, dataset: fo.Dataset, field: fo.Field, keys: t.Set[str]
+ query: str,
+ dataset: fo.Dataset,
+ field: fo.Field,
+ keys: t.Set[str],
+ frames=True,
+ slice: t.Optional[str] = None,
):
return await execute(
schema,
@@ -1076,7 +1166,8 @@ async def _execute(
"input": asdict(
LightningInput(
dataset=dataset.name,
- paths=_get_paths(dataset, field, keys),
+ paths=_get_paths(dataset, field, keys, frames=frames),
+ slice=slice,
)
)
},
@@ -1084,17 +1175,23 @@ async def _execute(
def _get_paths(
- dataset: fo.Dataset, field_type: t.Type[fo.Field], keys: t.Set[str]
+ dataset: fo.Dataset,
+ field_type: t.Type[fo.Field],
+ keys: t.Set[str],
+ frames=True,
):
field_dict = dataset.get_field_schema(flat=True)
- field_dict.update(
- **{
- f"frames.{path}": field
- for path, field in dataset.get_frame_field_schema(
- flat=True
- ).items()
- }
- )
+
+ if frames:
+ field_dict.update(
+ **{
+ f"frames.{path}": field
+ for path, field in dataset.get_frame_field_schema(
+ flat=True
+ ).items()
+ }
+ )
+
paths: t.List[LightningPathInput] = []
for path in sorted(field_dict):
field = field_dict[path]
diff --git a/tests/unittests/operators/delegated_tests.py b/tests/unittests/operators/delegated_tests.py
index e2f8bdbc402..bb8ed3b208a 100644
--- a/tests/unittests/operators/delegated_tests.py
+++ b/tests/unittests/operators/delegated_tests.py
@@ -212,7 +212,7 @@ def test_delegate_operation(
self.assertIsNotNone(doc.queued_at)
self.assertEqual(doc.label, "Mock Operator")
self.assertEqual(doc.run_state, ExecutionRunState.QUEUED)
- self.assertIsNone(doc.metadata)
+ self.assertEqual(doc.metadata, {})
doc2_metadata = {"inputs_schema": {}}
doc2 = self.svc.queue_operation(
@@ -227,7 +227,6 @@ def test_delegate_operation(
self.assertIsNotNone(doc2.queued_at)
self.assertEqual(doc2.label, "@voxelfiftyone/operator/foo")
self.assertEqual(doc2.run_state, ExecutionRunState.QUEUED)
- self.assertIsNotNone(doc2.metadata)
self.assertEqual(doc2.metadata, doc2_metadata)
def test_list_operations(self, mock_get_operator, mock_operator_exists):
@@ -485,35 +484,6 @@ def test_sets_progress(
self.assertEqual(doc.status.label, "halfway there")
self.assertIsNotNone(doc.status.updated_at)
- def test_output_schema_null_metadata(
- self, mock_get_operator, mock_operator_exists
- ):
- mock_outputs = MockOutputs()
- doc = self.svc.queue_operation(
- operator="@voxelfiftyone/operator/foo",
- delegation_target="test_target",
- context=ExecutionContext(request_params={"foo": "bar"}),
- )
-
- # Set metadata to null instead of being unset, to test that corner case
- self.svc._repo._collection.find_one_and_update(
- {"_id": bson.ObjectId(doc.id)}, {"$set": {"metadata": None}}
- )
-
- self.svc.set_completed(
- doc.id,
- result=ExecutionResult(outputs_schema=mock_outputs.to_json()),
- )
-
- doc = self.svc.get(doc_id=doc.id)
- self.assertEqual(doc.run_state, ExecutionRunState.COMPLETED)
- self.assertEqual(
- doc.metadata,
- {
- "outputs_schema": mock_outputs.to_json(),
- },
- )
-
@patch(
"fiftyone.core.odm.utils.load_dataset",
)