Skip to content

Commit

Permalink
add pending evaluation cards
Browse files Browse the repository at this point in the history
  • Loading branch information
imanjra committed Nov 5, 2024
1 parent af727af commit 8d56e67
Show file tree
Hide file tree
Showing 3 changed files with 122 additions and 20 deletions.
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { Card, CardActionArea, Stack, Typography } from "@mui/material";
import { LoadingDots } from "@fiftyone/components";
import { Card, CardActionArea, Chip, Stack, Typography } from "@mui/material";
import React from "react";
import Evaluate from "./Evaluate";
import EvaluationNotes from "./EvaluationNotes";
Expand All @@ -12,6 +13,7 @@ export default function Overview(props: OverviewProps) {
statuses = {},
notes = {},
permissions = {},
pending_evaluations,
} = props;
const count = evaluations.length;

Expand All @@ -24,43 +26,75 @@ export default function Overview(props: OverviewProps) {
<Typography variant="body1" color="secondary">
{count} Model Evaluations
</Typography>
<Evaluate onEvaluate={onEvaluate} permissions={permissions} />
<Evaluate
onEvaluate={onEvaluate}
permissions={permissions}
variant="overview"
/>
</Stack>
{evaluations.map((evaluation) => {
const { key, id } = evaluation;
const status = statuses[id] || "needs_review";
const note = notes[id];

return (
<CardActionArea key={key}>
<Card
sx={{ p: 2, cursor: "pointer" }}
onClick={() => {
onSelect(key, id);
}}
>
<Stack direction="row" justifyContent="space-between">
<Typography sx={{ fontSize: 16, fontWeight: 600 }}>
{key}
</Typography>
<Status status={status} />
</Stack>
<EvaluationNotes notes={note} variant="overview" />
</Card>
</CardActionArea>
<EvaluationCard
key={key}
eval_key={key}
id={id}
status={status}
note={note}
onSelect={onSelect}
/>
);
})}
{pending_evaluations.map((evaluation) => {
const { eval_key } = evaluation;
return (
<EvaluationCard
key={eval_key}
eval_key={eval_key}
pending
onSelect={onSelect}
/>
);
})}
</Stack>
);
}

function EvaluationCard(props: EvaluationCardProps) {
const { pending, onSelect, eval_key, note, status, id } = props;
return (
<CardActionArea key={eval_key} disabled={pending}>
<Card
sx={{ p: 2, cursor: "pointer" }}
onClick={() => {
onSelect(eval_key, id);
}}
>
<Stack direction="row" justifyContent="space-between">
<Typography sx={{ fontSize: 16, fontWeight: 600 }}>
{eval_key}
</Typography>
{pending && (
<Chip variant="filled" label={<LoadingDots text="Evaluating" />} />
)}
{status && <Status status={status} />}
</Stack>
{note && <EvaluationNotes notes={note} variant="overview" />}
</Card>
</CardActionArea>
);
}

type OverviewProps = {
evaluations: EvaluationType[];
onSelect: (key: string, id: string) => void;
onEvaluate: () => void;
statuses?: Record<string, string>;
notes?: Record<string, string>;
permissions?: Record<string, boolean>;
pending_evaluations: PendingEvaluationType[];
};

type EvaluationType = {
Expand All @@ -69,3 +103,17 @@ type EvaluationType = {
description: string;
status: string;
};

type PendingEvaluationType = {
eval_key: string;
doc_id?: string;
};

type EvaluationCardProps = {
eval_key: string;
id?: string;
note?: string;
onSelect: OverviewProps["onSelect"];
pending?: boolean;
status?: string;
};
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ export default function NativeModelEvaluationView(props) {
statuses = {},
notes = {},
permissions = {},
pending_evaluations = [],
} = data;
const computedEvaluations = useMemo(() => {
return evaluations.map(({ key, id }) => ({
Expand Down Expand Up @@ -77,6 +78,7 @@ export default function NativeModelEvaluationView(props) {
onEvaluate={() => {
triggerEvent(on_evaluate_model);
}}
permissions={permissions}
/>
) : (
<Overview
Expand All @@ -92,6 +94,7 @@ export default function NativeModelEvaluationView(props) {
statuses={statuses}
notes={notes}
permissions={permissions}
pending_evaluations={pending_evaluations}
/>
))}
</Box>
Expand Down
53 changes: 52 additions & 1 deletion fiftyone/operators/builtins/panels/model_evaluation/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,9 @@ def config(self):
is_new=is_new("2024-11-07"),
)

def get_dataset_id(self, ctx):
return str(ctx.dataset._doc.id)

def get_store(self, ctx):
return ctx.create_store(STORE_NAME)

Expand Down Expand Up @@ -76,6 +79,7 @@ def on_load(self, ctx):
ctx.panel.set_data("statuses", statuses)
ctx.panel.set_data("notes", notes)
ctx.panel.set_data("permissions", permissions)
self.load_pending_evaluations(ctx)
# keys = ctx.dataset.list_evaluations()
# ctx.panel.set_state("keys", keys)

Expand Down Expand Up @@ -266,6 +270,50 @@ def on_change_view(self, ctx):
# Used only for triggering re-renders when the view changes
pass

def load_pending_evaluations(self, ctx, skip_update=False):
store = self.get_store(ctx)
dataset_id = self.get_dataset_id(ctx)
pending_evaluations = store.get("pending_evaluations") or {}
pending = pending_evaluations.get(dataset_id, [])
if not skip_update:
eval_keys = ctx.dataset.list_evaluations()
updated_pending = []
update = False
for item in pending:
pending_eval_key = item.get("eval_key")
if pending_eval_key in eval_keys:
update = True
else:
updated_pending.append(item)
if update:
pending_evaluations[dataset_id] = updated_pending
store.set("pending_evaluations", pending_evaluations)
pending = updated_pending
ctx.panel.set_data("pending_evaluations", pending)

def on_evaluate_model_success(self, ctx):
dataset_id = self.get_dataset_id(ctx)
store = self.get_store(ctx)
result = ctx.params.get("result", {})
doc_id = result.get("id")
delegated_eval_key = (
result.get("context", {}).get("params", {}).get("eval_key")
)
eval_key = result.get("eval_key", delegated_eval_key)
pending = {}
if doc_id is None:
pending["eval_key"] = eval_key
else:
pending["doc_id"] = str(doc_id)
pending["eval_key"] = eval_key

pending_evaluations = store.get("pending_evaluations") or {}
if dataset_id not in pending_evaluations:
pending_evaluations[dataset_id] = []
pending_evaluations[dataset_id].append(pending)
store.set("pending_evaluations", pending_evaluations)
self.load_pending_evaluations(ctx, True)

def on_evaluate_model(self, ctx):
if not self.can_evaluate(ctx):
ctx.ops.notify(
Expand All @@ -274,7 +322,10 @@ def on_evaluate_model(self, ctx):
)
return
# Called when you click the "Evaluate Model" button
ctx.prompt("@voxel51/evaluation/evaluate_model")
ctx.prompt(
"@voxel51/operators/evaluate_model_async",
on_success=self.on_evaluate_model_success,
)
# ctx.panel.state.view = "eval"

def load_view(self, ctx):
Expand Down

0 comments on commit 8d56e67

Please sign in to comment.