Folders, Persistent Import Progress Tracking & UX Enhancements (#3841)

Co-authored-by: Jonathan Dobson <jon.m.dobson@gmail.com>
This commit is contained in:
Celal Zamanoglu
2025-11-05 18:37:18 +03:00
committed by GitHub
parent fcc3f30ba4
commit 75948053b9
32 changed files with 2886 additions and 538 deletions

View File

@@ -0,0 +1,90 @@
"""add_folders_table
Revision ID: 541870962332
Revises: d135e472622c
Create Date: 2025-10-30 14:14:37.593691+00:00
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "541870962332"
down_revision: Union[str, None] = "d135e472622c"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# Step 1: Create folders table
op.create_table(
"folders",
sa.Column("folder_id", sa.String(), nullable=False),
sa.Column("organization_id", sa.String(), nullable=False),
sa.Column("title", sa.String(), nullable=False),
sa.Column("description", sa.String(), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=False),
sa.Column("modified_at", sa.DateTime(), nullable=False),
sa.Column("deleted_at", sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint("folder_id"),
sa.ForeignKeyConstraint(
["organization_id"],
["organizations.organization_id"],
ondelete="CASCADE",
),
)
# Create indexes on folders table
op.create_index(
"folder_organization_id_idx",
"folders",
["organization_id"],
unique=False,
)
op.create_index(
"folder_organization_title_idx",
"folders",
["organization_id", "title"],
unique=False,
)
# Step 2: Add folder_id to workflows table
op.add_column("workflows", sa.Column("folder_id", sa.String(), nullable=True))
# Create index on workflows.folder_id
op.create_index(
"workflow_folder_id_idx",
"workflows",
["folder_id"],
unique=False,
)
# Create foreign key constraint
op.create_foreign_key(
"fk_workflows_folder_id",
"workflows",
"folders",
["folder_id"],
["folder_id"],
ondelete="SET NULL",
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# Step 1: Remove folder_id from workflows table (must be done before dropping folders table)
op.drop_constraint("fk_workflows_folder_id", "workflows", type_="foreignkey")
op.drop_index("workflow_folder_id_idx", table_name="workflows")
op.drop_column("workflows", "folder_id")
# Step 2: Drop folders table
op.drop_index("folder_organization_title_idx", table_name="folders")
op.drop_index("folder_organization_id_idx", table_name="folders")
op.drop_table("folders")
# ### end Alembic commands ###

View File

@@ -0,0 +1,30 @@
"""add_import_tracking_to_workflows
Revision ID: b61cf349aa4b
Revises: 541870962332
Create Date: 2025-10-30 14:25:37.010446+00:00
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "b61cf349aa4b"
down_revision: Union[str, None] = "541870962332"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add import_error column to workflows table for tracking import failures
# Note: status column is a String, not an enum, so no schema changes needed for new status values
op.add_column("workflows", sa.Column("import_error", sa.String(), nullable=True))
def downgrade() -> None:
# Remove import_error column from workflows table
op.drop_column("workflows", "import_error")

View File

@@ -0,0 +1,23 @@
"""merge migration heads
Revision ID: 2c34dee3304e
Revises: b61cf349aa4b, 7fbf463be9a7
Create Date: 2025-11-05 15:23:24.380086+00:00
"""
from typing import Sequence, Union
# revision identifiers, used by Alembic.
revision: str = "2c34dee3304e"
down_revision: Union[str, None] = ("b61cf349aa4b", "7fbf463be9a7")
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
pass
def downgrade() -> None:
pass

View File

@@ -1,13 +1,11 @@
import { getClient } from "@/api/AxiosClient";
import { Label } from "@/components/ui/label";
import { ReloadIcon, UploadIcon } from "@radix-ui/react-icons";
import { useMutation, useQueryClient } from "@tanstack/react-query";
import { UploadIcon } from "@radix-ui/react-icons";
import { useQueryClient } from "@tanstack/react-query";
import { useId } from "react";
import { stringify as convertToYAML } from "yaml";
import { WorkflowApiResponse } from "./types/workflowTypes";
import { useCredentialGetter } from "@/hooks/useCredentialGetter";
import { useNavigate } from "react-router-dom";
import { AxiosError } from "axios";
import { toast } from "@/components/ui/use-toast";
import {
Tooltip,
@@ -15,6 +13,7 @@ import {
TooltipProvider,
TooltipTrigger,
} from "@/components/ui/tooltip";
import { AxiosError } from "axios";
function isJsonString(str: string): boolean {
try {
@@ -25,16 +24,28 @@ function isJsonString(str: string): boolean {
return true;
}
function ImportWorkflowButton() {
function getErrorMessage(error: unknown, fallback: string): string {
if (error instanceof AxiosError) {
return error.response?.data?.detail || error.message || fallback;
} else if (error instanceof Error) {
return error.message;
}
return fallback;
}
interface ImportWorkflowButtonProps {
onImportStart?: () => void;
}
function ImportWorkflowButton({ onImportStart }: ImportWorkflowButtonProps) {
const inputId = useId();
const credentialGetter = useCredentialGetter();
const queryClient = useQueryClient();
const navigate = useNavigate();
const createWorkflowFromYamlMutation = useMutation({
mutationFn: async (yaml: string) => {
const createWorkflowFromYamlMutation = async (yaml: string) => {
try {
const client = await getClient(credentialGetter);
return client.post<string, { data: WorkflowApiResponse }>(
await client.post<string, { data: WorkflowApiResponse }>(
"/workflows",
yaml,
{
@@ -43,21 +54,51 @@ function ImportWorkflowButton() {
},
},
);
},
onSuccess: (response) => {
queryClient.invalidateQueries({
queryKey: ["workflows"],
});
navigate(`/workflows/${response.data.workflow_permanent_id}/debug`);
},
onError: (error: AxiosError) => {
toast({
variant: "success",
title: "Workflow imported",
description: "Successfully imported workflow",
});
} catch (error) {
toast({
variant: "destructive",
title: "Error importing workflow",
description: error.message || "An error occurred",
description: getErrorMessage(error, "Failed to import workflow"),
});
},
});
}
};
const createWorkflowFromPdfMutation = async (file: File) => {
try {
const formData = new FormData();
formData.append("file", file);
const client = await getClient(credentialGetter);
await client.post("/workflows/import-pdf", formData, {
headers: {
"Content-Type": "multipart/form-data",
},
});
// Notify parent to start polling
onImportStart?.();
toast({
title: "Import started",
description: `Importing ${file.name}...`,
});
} catch (error) {
toast({
title: "Import Failed",
description: getErrorMessage(error, "Failed to import PDF"),
variant: "destructive",
});
}
};
return (
<TooltipProvider>
@@ -75,38 +116,8 @@ function ImportWorkflowButton() {
const fileName = file.name.toLowerCase();
if (fileName.endsWith(".pdf")) {
// Handle PDF file - send as FormData to new endpoint
const formData = new FormData();
formData.append("file", file);
const client = await getClient(credentialGetter);
try {
const response = await client.post<WorkflowApiResponse>(
"/workflows/import-pdf",
formData,
{
headers: {
"Content-Type": "multipart/form-data",
},
},
);
queryClient.invalidateQueries({
queryKey: ["workflows"],
});
navigate(
`/workflows/${response.data.workflow_permanent_id}/debug`,
);
} catch (error) {
toast({
title: "Import Failed",
description:
error instanceof Error
? error.message
: "Failed to import PDF",
variant: "destructive",
});
}
// Handle PDF file
await createWorkflowFromPdfMutation(file);
} else {
// Non-pdf files like yaml, json
const fileTextContent = await file.text();
@@ -114,17 +125,14 @@ function ImportWorkflowButton() {
const content = isJson
? convertToYAML(JSON.parse(fileTextContent))
: fileTextContent;
createWorkflowFromYamlMutation.mutate(content);
await createWorkflowFromYamlMutation(content);
}
}
}}
/>
<div className="flex h-full cursor-pointer items-center gap-2 rounded-md bg-secondary px-4 py-2 font-bold text-secondary-foreground hover:bg-secondary/90">
{createWorkflowFromYamlMutation.isPending ? (
<ReloadIcon className="h-4 w-4 animate-spin" />
) : (
<UploadIcon className="h-4 w-4" />
)}
<UploadIcon className="h-4 w-4" />
Import
</div>
</Label>

View File

@@ -84,6 +84,9 @@ function WorkflowActions({ workflow, onSuccessfullyDeleted }: Props) {
queryClient.invalidateQueries({
queryKey: ["workflows"],
});
queryClient.invalidateQueries({
queryKey: ["folders"],
});
onSuccessfullyDeleted?.();
},
onError: (error: AxiosError) => {

View File

@@ -27,26 +27,34 @@ import { useCredentialGetter } from "@/hooks/useCredentialGetter";
import { basicLocalTimeFormat, basicTimeFormat } from "@/util/timeFormat";
import { cn } from "@/util/utils";
import {
DotsHorizontalIcon,
FileIcon,
LightningBoltIcon,
MagnifyingGlassIcon,
MixerHorizontalIcon,
Pencil2Icon,
PlayIcon,
PlusIcon,
ReloadIcon,
} from "@radix-ui/react-icons";
import { useQuery } from "@tanstack/react-query";
import { useState } from "react";
import React, { useCallback, useMemo, useState } from "react";
import { useNavigate, useSearchParams } from "react-router-dom";
import { useDebounce } from "use-debounce";
import { NarrativeCard } from "./components/header/NarrativeCard";
import { WorkflowParametersDialog } from "./components/WorkflowParametersDialog";
import { FolderCard } from "./components/FolderCard";
import { CreateFolderDialog } from "./components/CreateFolderDialog";
import { ViewAllFoldersDialog } from "./components/ViewAllFoldersDialog";
import { WorkflowFolderSelector } from "./components/WorkflowFolderSelector";
import { HighlightText } from "./components/HighlightText";
import { useCreateWorkflowMutation } from "./hooks/useCreateWorkflowMutation";
import { useFoldersQuery } from "./hooks/useFoldersQuery";
import { useActiveImportsPolling } from "./hooks/useActiveImportsPolling";
import { ImportWorkflowButton } from "./ImportWorkflowButton";
import { WorkflowApiResponse } from "./types/workflowTypes";
import { Parameter, WorkflowApiResponse } from "./types/workflowTypes";
import { WorkflowCreateYAMLRequest } from "./types/workflowYamlTypes";
import { WorkflowActions } from "./WorkflowActions";
import { WorkflowTemplates } from "../discover/WorkflowTemplates";
import { Skeleton } from "@/components/ui/skeleton";
const emptyWorkflowRequest: WorkflowCreateYAMLRequest = {
title: "New Workflow",
@@ -65,17 +73,55 @@ function Workflows() {
const createWorkflowMutation = useCreateWorkflowMutation();
const [searchParams, setSearchParams] = useSearchParams();
const [search, setSearch] = useState("");
const [openWorkflowId, setOpenWorkflowId] = useState<string | null>(null);
const [debouncedSearch] = useDebounce(search, 500);
const [debouncedSearch] = useDebounce(search, 250);
const page = searchParams.get("page") ? Number(searchParams.get("page")) : 1;
const itemsPerPage = searchParams.get("page_size")
? Number(searchParams.get("page_size"))
: 10;
const { data: workflows = [], isLoading } = useQuery<
Array<WorkflowApiResponse>
>({
queryKey: ["workflows", debouncedSearch, page, itemsPerPage],
// Folder state
const [selectedFolderId, setSelectedFolderId] = useState<string | null>(null);
const [isCreateFolderOpen, setIsCreateFolderOpen] = useState(false);
const [isViewAllFoldersOpen, setIsViewAllFoldersOpen] = useState(false);
// Parameter expansion state
const [manuallyExpandedRows, setManuallyExpandedRows] = useState<Set<string>>(
new Set(),
);
// Poll for active imports
const { activeImports, startPolling } = useActiveImportsPolling();
// Fetch folders
const { data: allFolders = [] } = useFoldersQuery({ page_size: 10 });
// Create folders map for O(1) lookup
const foldersMap = useMemo(() => {
return new Map(allFolders.map((f) => [f.folder_id, f]));
}, [allFolders]);
// Sort folders by modified date (most recent first) and get top 5
const recentFolders = useMemo(() => {
return [...allFolders]
.sort(
(a, b) =>
new Date(b.modified_at).getTime() - new Date(a.modified_at).getTime(),
)
.slice(0, 5);
}, [allFolders]);
const {
data: workflows = [],
isFetching,
isPlaceholderData,
} = useQuery<Array<WorkflowApiResponse>>({
queryKey: [
"workflows",
debouncedSearch,
page,
itemsPerPage,
selectedFolderId,
],
queryFn: async () => {
const client = await getClient(credentialGetter);
const params = new URLSearchParams();
@@ -85,16 +131,26 @@ function Workflows() {
if (debouncedSearch) {
params.append("search_key", debouncedSearch);
}
if (selectedFolderId) {
params.append("folder_id", selectedFolderId);
}
return client
.get(`/workflows`, {
params,
})
.then((response) => response.data);
},
placeholderData: (previousData) => previousData,
});
const { data: nextPageWorkflows } = useQuery<Array<WorkflowApiResponse>>({
queryKey: ["workflows", debouncedSearch, page + 1, itemsPerPage],
queryKey: [
"workflows",
debouncedSearch,
page + 1,
itemsPerPage,
selectedFolderId,
],
queryFn: async () => {
const client = await getClient(credentialGetter);
const params = new URLSearchParams();
@@ -104,6 +160,9 @@ function Workflows() {
if (debouncedSearch) {
params.append("search_key", debouncedSearch);
}
if (selectedFolderId) {
params.append("folder_id", selectedFolderId);
}
return client
.get(`/workflows`, {
params,
@@ -114,7 +173,61 @@ function Workflows() {
});
const isNextDisabled =
isLoading || !nextPageWorkflows || nextPageWorkflows.length === 0;
isFetching || !nextPageWorkflows || nextPageWorkflows.length === 0;
// Check if a specific parameter matches the search
const parameterMatchesSearch = useCallback(
(param: Parameter): boolean => {
if (!debouncedSearch.trim()) return false;
const lowerQuery = debouncedSearch.toLowerCase();
const keyMatch = param.key?.toLowerCase().includes(lowerQuery) ?? false;
const descMatch =
param.description?.toLowerCase().includes(lowerQuery) ?? false;
const valueMatch = Boolean(
param.parameter_type === "workflow" &&
param.default_value &&
String(param.default_value).toLowerCase().includes(lowerQuery),
);
return keyMatch || descMatch || valueMatch;
},
[debouncedSearch],
);
// Auto-expand rows when parameters match search
const autoExpandedRows = useMemo(() => {
if (!debouncedSearch.trim()) return new Set<string>();
const expanded = new Set<string>();
workflows.forEach((workflow) => {
const hasParameterMatch = workflow.workflow_definition.parameters?.some(
(param) => parameterMatchesSearch(param),
);
if (hasParameterMatch) {
expanded.add(workflow.workflow_permanent_id);
}
});
return expanded;
}, [workflows, debouncedSearch, parameterMatchesSearch]);
// Combine manual and auto-expanded rows
const expandedRows = useMemo(() => {
return new Set([...manuallyExpandedRows, ...autoExpandedRows]);
}, [manuallyExpandedRows, autoExpandedRows]);
const toggleParametersExpanded = (workflowId: string) => {
const newExpanded = new Set(manuallyExpandedRows);
if (newExpanded.has(workflowId)) {
newExpanded.delete(workflowId);
} else {
newExpanded.add(workflowId);
}
setManuallyExpandedRows(newExpanded);
};
function handleRowClick(
event: React.MouseEvent<HTMLTableCellElement>,
@@ -162,6 +275,18 @@ function Workflows() {
setParamPatch({ page: String(page + 1) });
}
// Show importing workflows from polling hook (only on page 1)
const displayWorkflows = useMemo(() => {
const importingOnly = activeImports.filter(
(imp) => imp.status === "importing",
);
if (page === 1 && importingOnly.length > 0) {
return [...importingOnly, ...workflows];
}
return workflows;
}, [activeImports, workflows, page]);
return (
<div className="space-y-10">
<div className="flex h-32 justify-between gap-6">
@@ -191,8 +316,88 @@ function Workflows() {
</div>
</div>
<div className="space-y-4">
<header>
{/* Folders Section */}
<div className="space-y-4">
<div className="flex items-center justify-between">
<div className="flex items-center gap-3">
<h2 className="text-lg font-semibold">Folders</h2>
<Button
variant="link"
size="sm"
className="h-auto p-0 text-blue-600 dark:text-blue-400"
onClick={() => setIsCreateFolderOpen(true)}
>
+ New folder
</Button>
</div>
{allFolders.length > 5 && (
<Button
variant="link"
size="sm"
className="text-blue-600 dark:text-blue-400"
onClick={() => setIsViewAllFoldersOpen(true)}
>
View all
</Button>
)}
</div>
{recentFolders.length > 0 ? (
<div className="grid grid-cols-5 gap-4">
{recentFolders.map((folder) => (
<FolderCard
key={folder.folder_id}
folder={folder}
isSelected={selectedFolderId === folder.folder_id}
onClick={() =>
setSelectedFolderId(
selectedFolderId === folder.folder_id
? null
: folder.folder_id,
)
}
/>
))}
</div>
) : (
<div className="rounded-lg border border-slate-200 bg-slate-elevation1 py-6 text-center dark:border-slate-700">
<div className="mx-auto max-w-md">
<FileIcon className="mx-auto mb-3 h-10 w-10 text-blue-400 opacity-50" />
<h3 className="mb-2 text-slate-900 dark:text-slate-100">
Organize Your Workflows with Folders
</h3>
<p className="mb-4 text-sm text-slate-500 dark:text-slate-400">
Keep your workflows organized by creating folders. Group
related workflows together by project, team, or workflow type
for easier management.
</p>
<Button
variant="link"
size="sm"
className="h-auto p-0 text-blue-600 dark:text-blue-400"
onClick={() => setIsCreateFolderOpen(true)}
>
<PlusIcon className="mr-2 h-4 w-4" />
Create Your First Folder
</Button>
</div>
</div>
)}
</div>
{/* Workflows Section */}
<header className="flex items-center justify-between">
<h1 className="text-xl">My Flows</h1>
{selectedFolderId && (
<Button
variant="link"
size="sm"
className="h-auto p-0 text-blue-600 dark:text-blue-400"
onClick={() => setSelectedFolderId(null)}
>
View all workflows
</Button>
)}
</header>
<div className="flex justify-between">
<div className="relative">
@@ -210,7 +415,7 @@ function Workflows() {
/>
</div>
<div className="flex gap-4">
<ImportWorkflowButton />
<ImportWorkflowButton onImportStart={startPolling} />
<Button
disabled={createWorkflowMutation.isPending}
onClick={() => {
@@ -230,134 +435,298 @@ function Workflows() {
<Table>
<TableHeader className="rounded-t-lg bg-slate-elevation2">
<TableRow>
<TableHead className="w-1/3 rounded-tl-lg text-slate-400">
<TableHead className="w-1/4 rounded-tl-lg text-slate-400">
ID
</TableHead>
<TableHead className="w-1/3 text-slate-400">Title</TableHead>
<TableHead className="w-1/3 text-slate-400">
<TableHead className="w-1/4 text-slate-400">Title</TableHead>
<TableHead className="w-1/6 text-slate-400">Folder</TableHead>
<TableHead className="w-1/6 text-slate-400">
Created At
</TableHead>
<TableHead className="rounded-tr-lg"></TableHead>
</TableRow>
</TableHeader>
<TableBody>
{isLoading ? (
{isFetching &&
!isPlaceholderData &&
displayWorkflows.length === 0 ? (
// Show skeleton rows only on initial load (not during search refinement)
Array.from({ length: 10 }).map((_, index) => (
<TableRow key={`skeleton-${index}`}>
<TableCell>
<Skeleton className="h-5 w-full" />
</TableCell>
<TableCell>
<Skeleton className="h-5 w-full" />
</TableCell>
<TableCell>
<Skeleton className="h-5 w-20" />
</TableCell>
<TableCell>
<Skeleton className="h-5 w-32" />
</TableCell>
<TableCell>
<div className="flex justify-end gap-2">
<Skeleton className="h-8 w-8 rounded" />
<Skeleton className="h-8 w-8 rounded" />
<Skeleton className="h-8 w-8 rounded" />
<Skeleton className="h-8 w-8 rounded" />
</div>
</TableCell>
</TableRow>
))
) : displayWorkflows?.length === 0 ? (
<TableRow>
<TableCell colSpan={4}>Loading...</TableCell>
</TableRow>
) : workflows?.length === 0 ? (
<TableRow>
<TableCell colSpan={4}>No workflows found</TableCell>
<TableCell colSpan={5}>No workflows found</TableCell>
</TableRow>
) : (
workflows?.map((workflow) => {
displayWorkflows?.map((workflow) => {
const hasParameters =
workflow.workflow_definition.parameters.filter(
(p) => p.parameter_type !== "output",
).length > 0;
const isExpanded = expandedRows.has(
workflow.workflow_permanent_id,
);
// Check if this is an importing workflow
const isUploading = workflow.status === "importing";
return (
<TableRow
key={workflow.workflow_permanent_id}
className="cursor-pointer"
>
<TableCell
onClick={(event) => {
handleRowClick(event, workflow.workflow_permanent_id);
}}
>
{workflow.workflow_permanent_id}
</TableCell>
<TableCell
onClick={(event) => {
handleRowClick(event, workflow.workflow_permanent_id);
}}
>
{workflow.title}
</TableCell>
<TableCell
onClick={(event) => {
handleRowClick(event, workflow.workflow_permanent_id);
}}
title={basicTimeFormat(workflow.created_at)}
>
{basicLocalTimeFormat(workflow.created_at)}
</TableCell>
<TableCell>
<div className="flex justify-end gap-2">
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<Button
size="icon"
variant="outline"
onClick={(event) => {
handleIconClick(
event,
`/workflows/${workflow.workflow_permanent_id}/debug`,
<React.Fragment key={workflow.workflow_permanent_id}>
{/* Main workflow row */}
{isUploading ? (
<TableRow className="opacity-70">
<TableCell colSpan={2}>
<div className="flex items-center gap-2">
<ReloadIcon className="h-4 w-4 animate-spin text-blue-400" />
<span>{workflow.title}</span>
</div>
</TableCell>
<TableCell>
<span className="text-slate-400">-</span>
</TableCell>
<TableCell>
{basicLocalTimeFormat(workflow.created_at)}
</TableCell>
<TableCell>
<div className="flex justify-end gap-2">
<Button size="icon" variant="ghost" disabled>
<FileIcon className="h-4 w-4" />
</Button>
<Button size="icon" variant="ghost" disabled>
<MixerHorizontalIcon className="h-4 w-4" />
</Button>
<Button size="icon" variant="ghost" disabled>
<PlayIcon className="h-4 w-4" />
</Button>
<Button size="icon" variant="ghost" disabled>
<DotsHorizontalIcon className="h-4 w-4" />
</Button>
</div>
</TableCell>
</TableRow>
) : (
<TableRow className="cursor-pointer">
<TableCell
onClick={(event) => {
handleRowClick(
event,
workflow.workflow_permanent_id,
);
}}
>
<HighlightText
text={workflow.workflow_permanent_id}
query={debouncedSearch}
/>
</TableCell>
<TableCell
onClick={(event) => {
handleRowClick(
event,
workflow.workflow_permanent_id,
);
}}
>
<HighlightText
text={workflow.title}
query={debouncedSearch}
/>
</TableCell>
<TableCell
onClick={(event) => {
handleRowClick(
event,
workflow.workflow_permanent_id,
);
}}
>
{workflow.folder_id ? (
<div className="flex items-center gap-1.5">
<FileIcon className="h-3.5 w-3.5 text-blue-400" />
<span className="text-sm">
<HighlightText
text={
foldersMap.get(workflow.folder_id)
?.title || workflow.folder_id
}
query={debouncedSearch}
/>
</span>
</div>
) : (
<span className="text-slate-400">-</span>
)}
</TableCell>
<TableCell
onClick={(event) => {
handleRowClick(
event,
workflow.workflow_permanent_id,
);
}}
title={basicTimeFormat(workflow.created_at)}
>
{basicLocalTimeFormat(workflow.created_at)}
</TableCell>
<TableCell>
<div className="flex justify-end gap-2">
<WorkflowFolderSelector
workflowPermanentId={
workflow.workflow_permanent_id
}
currentFolderId={workflow.folder_id}
/>
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<Button
size="icon"
variant="outline"
onClick={() =>
toggleParametersExpanded(
workflow.workflow_permanent_id,
)
}
disabled={!hasParameters}
className={cn(
isExpanded && "text-blue-400",
)}
>
<MixerHorizontalIcon className="h-4 w-4" />
</Button>
</TooltipTrigger>
<TooltipContent>
{hasParameters
? isExpanded
? "Hide Parameters"
: "Show Parameters"
: "No Parameters"}
</TooltipContent>
</Tooltip>
</TooltipProvider>
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<Button
size="icon"
variant="outline"
onClick={(event) => {
handleIconClick(
event,
`/workflows/${workflow.workflow_permanent_id}/run`,
);
}}
>
<PlayIcon className="h-4 w-4" />
</Button>
</TooltipTrigger>
<TooltipContent>
Create New Run
</TooltipContent>
</Tooltip>
</TooltipProvider>
<WorkflowActions workflow={workflow} />
</div>
</TableCell>
</TableRow>
)}
{/* Expanded parameters section */}
{isExpanded && hasParameters && (
<TableRow
key={`${workflow.workflow_permanent_id}-params`}
>
<TableCell
colSpan={5}
className="bg-slate-50 dark:bg-slate-900/50"
>
<div className="ml-8 space-y-2 py-4">
<div className="mb-3 text-sm font-medium">
Parameters
</div>
<div className="space-y-2">
{workflow.workflow_definition.parameters
.filter((p) => p.parameter_type !== "output")
.map((param, idx) => {
const matchesParam =
parameterMatchesSearch(param);
return (
<div
key={idx}
className={cn(
"grid grid-cols-[140px_1fr_2fr] gap-4 rounded border bg-white p-3 text-sm dark:border-slate-800 dark:bg-slate-900",
matchesParam &&
"shadow-[0_0_15px_rgba(59,130,246,0.3)] ring-2 ring-blue-500/50",
)}
>
<div className="font-medium text-blue-600 dark:text-blue-400">
<HighlightText
text={param.key}
query={debouncedSearch}
/>
</div>
<div className="truncate">
{param.parameter_type ===
"workflow" &&
param.default_value ? (
<HighlightText
text={String(param.default_value)}
query={debouncedSearch}
/>
) : (
<span className="text-slate-400">
-
</span>
)}
</div>
<div className="text-slate-500">
{param.description ? (
<HighlightText
text={param.description}
query={debouncedSearch}
/>
) : (
<span className="text-slate-400">
No description
</span>
)}
</div>
</div>
);
}}
>
<Pencil2Icon className="h-4 w-4" />
</Button>
</TooltipTrigger>
<TooltipContent>Open in Editor</TooltipContent>
</Tooltip>
</TooltipProvider>
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<Button
size="icon"
variant="outline"
onClick={(event) => {
handleIconClick(
event,
`/workflows/${workflow.workflow_permanent_id}/run`,
);
}}
>
<PlayIcon className="h-4 w-4" />
</Button>
</TooltipTrigger>
<TooltipContent>Create New Run</TooltipContent>
</Tooltip>
</TooltipProvider>
<WorkflowActions workflow={workflow} />
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<Button
size="icon"
variant="outline"
onClick={() =>
setOpenWorkflowId(
workflow.workflow_permanent_id,
)
}
disabled={
!workflow.workflow_definition.parameters.some(
(p) => p.parameter_type !== "output",
)
}
>
<MixerHorizontalIcon className="h-4 w-4" />
</Button>
</TooltipTrigger>
<TooltipContent>View Parameters</TooltipContent>
</Tooltip>
</TooltipProvider>
</div>
</TableCell>
</TableRow>
})}
</div>
</div>
</TableCell>
</TableRow>
)}
</React.Fragment>
);
})
)}
</TableBody>
</Table>
<WorkflowParametersDialog
open={openWorkflowId !== null}
onOpenChange={(open) => {
if (!open) setOpenWorkflowId(null);
}}
workflowId={openWorkflowId}
workflows={workflows}
/>
<div className="relative px-3 py-3">
<div className="absolute left-3 top-1/2 flex -translate-y-1/2 items-center gap-2 text-sm">
<span className="text-slate-400">Items per page</span>
@@ -403,6 +772,19 @@ function Workflows() {
</Pagination>
</div>
</div>
{/* Folder Dialogs */}
<CreateFolderDialog
open={isCreateFolderOpen}
onOpenChange={setIsCreateFolderOpen}
/>
<ViewAllFoldersDialog
open={isViewAllFoldersOpen}
onOpenChange={setIsViewAllFoldersOpen}
selectedFolderId={selectedFolderId}
onFolderSelect={setSelectedFolderId}
/>
<WorkflowTemplates />
</div>
</div>

View File

@@ -0,0 +1,101 @@
import { useState } from "react";
import { Button } from "@/components/ui/button";
import {
Dialog,
DialogContent,
DialogDescription,
DialogFooter,
DialogHeader,
DialogTitle,
} from "@/components/ui/dialog";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import { Textarea } from "@/components/ui/textarea";
import { useCreateFolderMutation } from "../hooks/useFolderMutations";
interface CreateFolderDialogProps {
open: boolean;
onOpenChange: (open: boolean) => void;
}
function CreateFolderDialog({ open, onOpenChange }: CreateFolderDialogProps) {
const [title, setTitle] = useState("");
const [description, setDescription] = useState("");
const createFolderMutation = useCreateFolderMutation();
const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault();
if (!title.trim()) return;
await createFolderMutation.mutateAsync({
title: title.trim(),
description: description.trim() || null,
});
setTitle("");
setDescription("");
onOpenChange(false);
};
const handleOpenChange = (open: boolean) => {
onOpenChange(open);
if (!open) {
setTitle("");
setDescription("");
}
};
return (
<Dialog open={open} onOpenChange={handleOpenChange}>
<DialogContent>
<DialogHeader>
<DialogTitle>Create New Folder</DialogTitle>
<DialogDescription>
Create a folder to organize your workflows.
</DialogDescription>
</DialogHeader>
<form onSubmit={handleSubmit}>
<div className="grid gap-4 py-4">
<div className="grid gap-2">
<Label htmlFor="folder-title">Title</Label>
<Input
id="folder-title"
value={title}
onChange={(e) => setTitle(e.target.value)}
placeholder="e.g., Production Workflows"
autoFocus
/>
</div>
<div className="grid gap-2">
<Label htmlFor="folder-description">Description (optional)</Label>
<Textarea
id="folder-description"
value={description}
onChange={(e) => setDescription(e.target.value)}
placeholder="Add a description..."
rows={3}
/>
</div>
</div>
<DialogFooter>
<Button
type="button"
variant="outline"
onClick={() => handleOpenChange(false)}
>
Cancel
</Button>
<Button
type="submit"
disabled={!title.trim() || createFolderMutation.isPending}
>
Create Folder
</Button>
</DialogFooter>
</form>
</DialogContent>
</Dialog>
);
}
export { CreateFolderDialog };

View File

@@ -0,0 +1,118 @@
import { GarbageIcon } from "@/components/icons/GarbageIcon";
import { Button } from "@/components/ui/button";
import {
Dialog,
DialogClose,
DialogContent,
DialogDescription,
DialogFooter,
DialogHeader,
DialogTitle,
DialogTrigger,
} from "@/components/ui/dialog";
import {
Tooltip,
TooltipContent,
TooltipProvider,
TooltipTrigger,
} from "@/components/ui/tooltip";
import { Label } from "@/components/ui/label";
import { RadioGroup, RadioGroupItem } from "@/components/ui/radio-group";
import { ReloadIcon } from "@radix-ui/react-icons";
import { useEffect, useState } from "react";
import { useDeleteFolderMutation } from "../hooks/useFolderMutations";
type Props = {
folderId: string;
folderTitle: string;
};
function DeleteFolderButton({ folderId, folderTitle }: Props) {
const [deleteOption, setDeleteOption] = useState<
"folder_only" | "folder_and_workflows"
>("folder_only");
const [isDialogOpen, setIsDialogOpen] = useState(false);
const {
mutate: deleteFolder,
isPending: isDeleteFolderPending,
isSuccess: isDeleteFolderSuccess,
} = useDeleteFolderMutation();
// Close dialog when deletion succeeds
useEffect(() => {
if (isDeleteFolderSuccess) setIsDialogOpen(false);
}, [isDeleteFolderSuccess]);
const handleDelete = () => {
const deleteWorkflows = deleteOption === "folder_and_workflows";
deleteFolder({ folderId, folderTitle, deleteWorkflows });
};
return (
<Dialog open={isDialogOpen} onOpenChange={setIsDialogOpen}>
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<DialogTrigger asChild>
<button
onClick={(e) => e.stopPropagation()}
className="rounded p-1.5 text-red-400 transition-colors hover:bg-red-500/20 hover:text-red-300"
aria-label="Delete folder"
>
<GarbageIcon className="h-4 w-4" />
</button>
</DialogTrigger>
</TooltipTrigger>
<TooltipContent>Delete Folder</TooltipContent>
</Tooltip>
</TooltipProvider>
<DialogContent onCloseAutoFocus={(e) => e.preventDefault()}>
<DialogHeader>
<DialogTitle>Delete Folder: {folderTitle}</DialogTitle>
<DialogDescription>
Choose how you want to delete this folder.
</DialogDescription>
</DialogHeader>
<RadioGroup
value={deleteOption}
onValueChange={(value) =>
setDeleteOption(value as typeof deleteOption)
}
>
<div className="flex items-center space-x-2">
<RadioGroupItem value="folder_only" id="folder_only" />
<Label htmlFor="folder_only" className="font-normal">
Delete folder only (workflows will be unassigned)
</Label>
</div>
<div className="flex items-center space-x-2">
<RadioGroupItem
value="folder_and_workflows"
id="folder_and_workflows"
/>
<Label htmlFor="folder_and_workflows" className="font-normal">
Delete folder and all workflows inside it
</Label>
</div>
</RadioGroup>
<DialogFooter>
<DialogClose asChild>
<Button variant="secondary">Cancel</Button>
</DialogClose>
<Button
variant="destructive"
onClick={handleDelete}
disabled={isDeleteFolderPending}
>
{isDeleteFolderPending && (
<ReloadIcon className="mr-2 h-4 w-4 animate-spin" />
)}
Delete
</Button>
</DialogFooter>
</DialogContent>
</Dialog>
);
}
export { DeleteFolderButton };

View File

@@ -0,0 +1,110 @@
import { useEffect, useState } from "react";
import { Button } from "@/components/ui/button";
import {
Dialog,
DialogContent,
DialogDescription,
DialogFooter,
DialogHeader,
DialogTitle,
} from "@/components/ui/dialog";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import { Textarea } from "@/components/ui/textarea";
import { useUpdateFolderMutation } from "../hooks/useFolderMutations";
import type { Folder } from "../types/folderTypes";
interface EditFolderDialogProps {
open: boolean;
onOpenChange: (open: boolean) => void;
folder: Folder;
}
function EditFolderDialog({
open,
onOpenChange,
folder,
}: EditFolderDialogProps) {
const [title, setTitle] = useState(folder.title);
const [description, setDescription] = useState(folder.description || "");
const updateFolderMutation = useUpdateFolderMutation();
// Reset form when folder changes or dialog opens
useEffect(() => {
if (open) {
setTitle(folder.title);
setDescription(folder.description || "");
}
}, [open, folder]);
const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault();
if (!title.trim()) return;
await updateFolderMutation.mutateAsync({
folderId: folder.folder_id,
data: {
title: title.trim(),
description: description.trim() || null,
},
});
onOpenChange(false);
};
return (
<Dialog open={open} onOpenChange={onOpenChange}>
<DialogContent>
<DialogHeader>
<DialogTitle>Edit Folder</DialogTitle>
<DialogDescription>
Update the folder's title and description.
</DialogDescription>
</DialogHeader>
<form onSubmit={handleSubmit}>
<div className="grid gap-4 py-4">
<div className="grid gap-2">
<Label htmlFor="edit-folder-title">Title</Label>
<Input
id="edit-folder-title"
value={title}
onChange={(e) => setTitle(e.target.value)}
placeholder="e.g., Production Workflows"
autoFocus
/>
</div>
<div className="grid gap-2">
<Label htmlFor="edit-folder-description">
Description (optional)
</Label>
<Textarea
id="edit-folder-description"
value={description}
onChange={(e) => setDescription(e.target.value)}
placeholder="Add a description..."
rows={3}
/>
</div>
</div>
<DialogFooter>
<Button
type="button"
variant="outline"
onClick={() => onOpenChange(false)}
>
Cancel
</Button>
<Button
type="submit"
disabled={!title.trim() || updateFolderMutation.isPending}
>
Save Changes
</Button>
</DialogFooter>
</form>
</DialogContent>
</Dialog>
);
}
export { EditFolderDialog };

View File

@@ -0,0 +1,89 @@
import { FileIcon, Pencil1Icon } from "@radix-ui/react-icons";
import { cn } from "@/util/utils";
import type { Folder } from "../types/folderTypes";
import { DeleteFolderButton } from "./DeleteFolderButton";
import { EditFolderDialog } from "./EditFolderDialog";
import { useState } from "react";
import {
Tooltip,
TooltipContent,
TooltipProvider,
TooltipTrigger,
} from "@/components/ui/tooltip";
interface FolderCardProps {
folder: Folder;
isSelected: boolean;
onClick: () => void;
}
function FolderCard({ folder, isSelected, onClick }: FolderCardProps) {
const [isHovered, setIsHovered] = useState(false);
const [isEditDialogOpen, setIsEditDialogOpen] = useState(false);
return (
<>
<button
onClick={onClick}
onMouseEnter={() => setIsHovered(true)}
onMouseLeave={() => setIsHovered(false)}
className={cn(
"relative flex h-24 flex-col gap-3 rounded-lg border p-4 text-left transition-colors hover:border-blue-400",
isSelected
? "border-blue-400 bg-blue-50 ring-2 ring-blue-400/20 dark:bg-blue-950/20"
: "border-slate-200 bg-slate-elevation1 dark:border-slate-700",
)}
>
<div className="flex items-start gap-3">
<div className="mt-0.5">
<FileIcon className="h-5 w-5 text-blue-400" />
</div>
<div className="flex min-w-0 flex-1 flex-col gap-1">
<div className="flex items-start justify-between gap-2">
<h3 className="truncate text-sm font-medium text-slate-900 dark:text-slate-100">
{folder.title}
</h3>
<div
onClick={(e) => e.stopPropagation()}
className={cn(
"flex gap-1 transition-opacity",
isHovered ? "opacity-100" : "opacity-0",
)}
>
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<button
onClick={() => setIsEditDialogOpen(true)}
className="rounded p-1.5 text-slate-400 transition-colors hover:bg-slate-500/20 hover:text-slate-300"
aria-label="Edit folder"
>
<Pencil1Icon className="h-4 w-4" />
</button>
</TooltipTrigger>
<TooltipContent>Edit Folder</TooltipContent>
</Tooltip>
</TooltipProvider>
<DeleteFolderButton
folderId={folder.folder_id}
folderTitle={folder.title}
/>
</div>
</div>
<p className="text-xs text-slate-500 dark:text-slate-400">
{folder.workflow_count}{" "}
{folder.workflow_count === 1 ? "workflow" : "workflows"}
</p>
</div>
</div>
</button>
<EditFolderDialog
open={isEditDialogOpen}
onOpenChange={setIsEditDialogOpen}
folder={folder}
/>
</>
);
}
export { FolderCard };

View File

@@ -0,0 +1,32 @@
interface HighlightTextProps {
text: string;
query?: string;
}
function HighlightText({ text, query }: HighlightTextProps) {
if (!query || !query.trim()) {
return <>{text}</>;
}
const escapeRegExp = (s: string) => s.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const q = query.trim();
const regex = new RegExp(`(${escapeRegExp(q)})`, "gi");
const parts = text.split(regex);
const lowerQ = q.toLowerCase();
return (
<>
{parts.map((part, i) =>
part.toLowerCase() === lowerQ ? (
<span key={i} className="rounded bg-blue-500/30 px-0.5 text-blue-400">
{part}
</span>
) : (
<span key={i}>{part}</span>
),
)}
</>
);
}
export { HighlightText };

View File

@@ -0,0 +1,147 @@
import { useState, useMemo } from "react";
import { MagnifyingGlassIcon, ReloadIcon } from "@radix-ui/react-icons";
import {
Dialog,
DialogContent,
DialogDescription,
DialogHeader,
DialogTitle,
} from "@/components/ui/dialog";
import { Input } from "@/components/ui/input";
import { FolderCard } from "./FolderCard";
import { useInfiniteFoldersQuery } from "../hooks/useInfiniteFoldersQuery";
import { handleInfiniteScroll } from "@/util/utils";
import { useDebounce } from "use-debounce";
import { Skeleton } from "@/components/ui/skeleton";
interface ViewAllFoldersDialogProps {
open: boolean;
onOpenChange: (open: boolean) => void;
selectedFolderId: string | null;
onFolderSelect: (folderId: string | null) => void;
}
function ViewAllFoldersDialog({
open,
onOpenChange,
selectedFolderId,
onFolderSelect,
}: ViewAllFoldersDialogProps) {
const [search, setSearch] = useState("");
const [debouncedSearch] = useDebounce(search, 500);
const isTyping = search !== debouncedSearch;
const { data, fetchNextPage, hasNextPage, isFetchingNextPage, isFetching } =
useInfiniteFoldersQuery({
search: debouncedSearch,
page_size: 20, // Load 20 to ensure scrollbar appears
});
// Flatten pages into a single array
const folders = useMemo(() => {
return data?.pages.flatMap((page) => page) ?? [];
}, [data]);
const handleFolderClick = (folderId: string) => {
onFolderSelect(selectedFolderId === folderId ? null : folderId);
onOpenChange(false);
setSearch("");
};
const handleOpenChange = (open: boolean) => {
onOpenChange(open);
if (!open) {
setSearch("");
}
};
return (
<Dialog open={open} onOpenChange={handleOpenChange}>
<DialogContent className="max-h-[90vh] max-w-4xl">
<DialogHeader>
<DialogTitle>All Folders</DialogTitle>
<DialogDescription>
Browse and select from all folders. Scroll to load more.
</DialogDescription>
</DialogHeader>
<div className="relative mb-4">
<MagnifyingGlassIcon className="absolute left-3 top-1/2 h-4 w-4 -translate-y-1/2 text-slate-400" />
<Input
type="text"
placeholder="Search folders..."
value={search}
onChange={(e) => setSearch(e.target.value)}
className="pl-10"
/>
</div>
<div
className="max-h-[500px] overflow-y-auto pr-2 [&::-webkit-scrollbar-thumb]:rounded-full [&::-webkit-scrollbar-thumb]:border-2 [&::-webkit-scrollbar-thumb]:border-slate-100 [&::-webkit-scrollbar-thumb]:bg-slate-300 dark:[&::-webkit-scrollbar-thumb]:border-slate-800 dark:[&::-webkit-scrollbar-thumb]:bg-slate-600 [&::-webkit-scrollbar-track]:bg-slate-100 dark:[&::-webkit-scrollbar-track]:bg-slate-800 [&::-webkit-scrollbar]:w-2"
onScroll={(e) =>
handleInfiniteScroll(
e,
fetchNextPage,
hasNextPage,
isFetchingNextPage,
)
}
>
{(isFetching || isTyping) && folders.length === 0 ? (
// Show 12 skeleton cards while typing or fetching
<div className="grid grid-cols-3 gap-4">
{Array.from({ length: 12 }).map((_, index) => (
<div
key={`skeleton-${index}`}
className="flex h-24 flex-col gap-3 rounded-lg border border-slate-200 bg-slate-elevation1 p-4 dark:border-slate-700"
>
<div className="flex items-start gap-3">
<Skeleton className="mt-0.5 h-5 w-5" />
<div className="flex min-w-0 flex-1 flex-col gap-2">
<Skeleton className="h-4 w-3/4" />
<Skeleton className="h-3 w-1/2" />
</div>
</div>
</div>
))}
</div>
) : folders.length === 0 ? (
<div className="flex flex-col items-center justify-center py-12 text-slate-400">
<p>No folders found</p>
</div>
) : (
<>
<div className="grid grid-cols-3 gap-4">
{folders.map((folder) => (
<FolderCard
key={folder.folder_id}
folder={folder}
isSelected={selectedFolderId === folder.folder_id}
onClick={() => handleFolderClick(folder.folder_id)}
/>
))}
</div>
{isFetchingNextPage && (
<div className="mt-4 flex items-center justify-center py-4">
<ReloadIcon className="mr-2 h-4 w-4 animate-spin" />
<span className="text-sm text-slate-400">
Loading more folders...
</span>
</div>
)}
{!hasNextPage && folders.length > 20 && (
<div className="mt-4 flex items-center justify-center py-4">
<span className="text-sm text-slate-400">
All folders loaded
</span>
</div>
)}
</>
)}
</div>
</DialogContent>
</Dialog>
);
}
export { ViewAllFoldersDialog };

View File

@@ -0,0 +1,172 @@
import { useState, useMemo } from "react";
import {
CheckIcon,
Cross2Icon,
FileIcon,
MagnifyingGlassIcon,
ReloadIcon,
} from "@radix-ui/react-icons";
import { Button } from "@/components/ui/button";
import {
Popover,
PopoverContent,
PopoverTrigger,
} from "@/components/ui/popover";
import { Input } from "@/components/ui/input";
import { cn } from "@/util/utils";
import { useInfiniteFoldersQuery } from "../hooks/useInfiniteFoldersQuery";
import { useUpdateWorkflowFolderMutation } from "../hooks/useFolderMutations";
import { handleInfiniteScroll } from "@/util/utils";
import { useDebounce } from "use-debounce";
import { Skeleton } from "@/components/ui/skeleton";
interface WorkflowFolderSelectorProps {
workflowPermanentId: string;
currentFolderId: string | null;
}
function WorkflowFolderSelector({
workflowPermanentId,
currentFolderId,
}: WorkflowFolderSelectorProps) {
const [open, setOpen] = useState(false);
const [search, setSearch] = useState("");
const [debouncedSearch] = useDebounce(search, 500);
const isTyping = search !== debouncedSearch;
const { data, fetchNextPage, hasNextPage, isFetchingNextPage, isFetching } =
useInfiniteFoldersQuery({
search: debouncedSearch,
page_size: 20,
});
// Flatten pages into a single array
const folders = useMemo(() => {
return data?.pages.flatMap((page) => page) ?? [];
}, [data]);
const updateFolderMutation = useUpdateWorkflowFolderMutation();
const handleFolderSelect = async (folderId: string | null) => {
await updateFolderMutation.mutateAsync({
workflowPermanentId,
data: { folder_id: folderId },
});
setOpen(false);
setSearch("");
};
return (
<Popover open={open} onOpenChange={setOpen}>
<PopoverTrigger asChild>
<Button
variant="ghost"
size="icon"
className={cn(
"h-8 w-8",
currentFolderId ? "text-blue-400" : "text-slate-400",
)}
title="Move to folder"
>
<FileIcon className="h-4 w-4" />
</Button>
</PopoverTrigger>
<PopoverContent className="w-80 p-0" align="end">
<div className="border-b p-3">
<h4 className="mb-2 text-sm font-medium">Move to folder</h4>
<div className="relative">
<MagnifyingGlassIcon className="absolute left-2 top-1/2 h-4 w-4 -translate-y-1/2 text-slate-400" />
<Input
placeholder="Search folders..."
value={search}
onChange={(e) => setSearch(e.target.value)}
className="h-8 pl-8"
autoFocus
/>
</div>
</div>
<div
className="max-h-[300px] overflow-y-auto [&::-webkit-scrollbar-thumb]:rounded-full [&::-webkit-scrollbar-thumb]:border-2 [&::-webkit-scrollbar-thumb]:border-slate-100 [&::-webkit-scrollbar-thumb]:bg-slate-300 dark:[&::-webkit-scrollbar-thumb]:border-slate-800 dark:[&::-webkit-scrollbar-thumb]:bg-slate-600 [&::-webkit-scrollbar-track]:bg-slate-100 dark:[&::-webkit-scrollbar-track]:bg-slate-800 [&::-webkit-scrollbar]:w-2"
onScroll={(e) =>
handleInfiniteScroll(
e,
fetchNextPage,
hasNextPage,
isFetchingNextPage,
)
}
>
{currentFolderId && (
<button
onClick={() => handleFolderSelect(null)}
className="flex w-full items-center justify-between border-b px-3 py-2 text-left text-sm transition-colors hover:bg-slate-50 dark:hover:bg-slate-800"
>
<div className="flex items-center gap-2">
<Cross2Icon className="h-4 w-4 text-red-400" />
<span>Remove from folder</span>
</div>
</button>
)}
{(isFetching || isTyping) && folders.length === 0 ? (
// Show 8 skeleton rows while typing or fetching
<>
{Array.from({ length: 8 }).map((_, index) => (
<div
key={`skeleton-${index}`}
className="flex w-full items-center gap-2 px-3 py-2"
>
<Skeleton className="h-4 w-4" />
<div className="flex flex-1 flex-col gap-1">
<Skeleton className="h-4 w-3/4" />
<Skeleton className="h-3 w-1/2" />
</div>
</div>
))}
</>
) : folders.length === 0 ? (
<div className="px-3 py-8 text-center text-sm text-slate-400">
No folders found
</div>
) : (
<>
{folders.map((folder) => {
const isCurrentFolder = currentFolderId === folder.folder_id;
return (
<button
key={folder.folder_id}
onClick={() => handleFolderSelect(folder.folder_id)}
disabled={isCurrentFolder}
className="flex w-full items-center justify-between px-3 py-2 text-left text-sm transition-colors hover:bg-slate-50 disabled:opacity-50 dark:hover:bg-slate-800"
>
<div className="flex items-center gap-2">
<FileIcon className="h-4 w-4 text-blue-400" />
<div className="flex flex-col">
<span>{folder.title}</span>
{folder.description && (
<span className="text-xs text-slate-400">
{folder.description}
</span>
)}
</div>
</div>
{isCurrentFolder && (
<CheckIcon className="h-4 w-4 text-blue-400" />
)}
</button>
);
})}
{isFetchingNextPage && (
<div className="flex items-center justify-center py-2">
<ReloadIcon className="h-3 w-3 animate-spin text-slate-400" />
</div>
)}
</>
)}
</div>
</PopoverContent>
</Popover>
);
}
export { WorkflowFolderSelector };

View File

@@ -1,179 +0,0 @@
import { useMemo } from "react";
import { ParametersDialogBase } from "./ParametersDialogBase";
import {
WorkflowApiResponse,
WorkflowParameter,
WorkflowParameterTypes,
Parameter,
CredentialParameter,
AWSSecretParameter,
OnePasswordCredentialParameter,
AzureVaultCredentialParameter,
BitwardenLoginCredentialParameter,
BitwardenSensitiveInformationParameter,
BitwardenCreditCardDataParameter,
ContextParameter,
} from "../types/workflowTypes";
type Props = {
open: boolean;
onOpenChange: (open: boolean) => void;
workflowId: string | null;
workflows: Array<WorkflowApiResponse>;
};
function getParameterId(param: Parameter): string {
if ("workflow_parameter_id" in param && param.workflow_parameter_id)
return param.workflow_parameter_id;
if ("credential_parameter_id" in param && param.credential_parameter_id)
return param.credential_parameter_id;
if ("aws_secret_parameter_id" in param && param.aws_secret_parameter_id)
return param.aws_secret_parameter_id;
if (
"onepassword_credential_parameter_id" in param &&
param.onepassword_credential_parameter_id
)
return param.onepassword_credential_parameter_id;
if (
"azure_vault_credential_parameter_id" in param &&
param.azure_vault_credential_parameter_id
)
return param.azure_vault_credential_parameter_id;
if (
"bitwarden_login_credential_parameter_id" in param &&
param.bitwarden_login_credential_parameter_id
)
return param.bitwarden_login_credential_parameter_id;
if (
"bitwarden_sensitive_information_parameter_id" in param &&
param.bitwarden_sensitive_information_parameter_id
)
return param.bitwarden_sensitive_information_parameter_id;
if (
"bitwarden_credit_card_data_parameter_id" in param &&
param.bitwarden_credit_card_data_parameter_id
)
return param.bitwarden_credit_card_data_parameter_id;
if ("output_parameter_id" in param && param.output_parameter_id)
return param.output_parameter_id;
return param.key;
}
function getParameterDisplayType(param: Parameter): string {
return param.parameter_type;
}
function getParameterDisplayValue(param: Parameter): string | null {
switch (param.parameter_type) {
case "workflow": {
const p = param as WorkflowParameter;
const value = p.default_value;
try {
return value === null || value === undefined
? ""
: typeof value === "string"
? value
: JSON.stringify(value);
} catch {
return String(value);
}
}
case "credential": {
// Show referenced credential id; do not reveal secrets
return "credential_id" in param
? String((param as CredentialParameter).credential_id)
: null;
}
case "aws_secret": {
// Show the AWS secret key reference only
return "aws_key" in param
? String((param as AWSSecretParameter).aws_key)
: null;
}
case "onepassword": {
const p = param as OnePasswordCredentialParameter;
if (p.vault_id && p.item_id) return `${p.vault_id} / ${p.item_id}`;
return null;
}
case "azure_vault_credential": {
const p = param as AzureVaultCredentialParameter;
return p.vault_name ? `${p.vault_name}` : null;
}
case "bitwarden_login_credential": {
const p = param as BitwardenLoginCredentialParameter;
return p.bitwarden_item_id ?? p.bitwarden_collection_id ?? null;
}
case "bitwarden_sensitive_information": {
const p = param as BitwardenSensitiveInformationParameter;
return p.bitwarden_identity_key ?? null;
}
case "bitwarden_credit_card_data": {
const p = param as BitwardenCreditCardDataParameter;
return p.bitwarden_item_id ?? null;
}
case "context": {
const p = param as ContextParameter;
if ("value" in p && p.value !== undefined) {
try {
return typeof p.value === "string"
? p.value
: JSON.stringify(p.value);
} catch {
return String(p.value);
}
}
return null;
}
default:
return null;
}
}
// Row rendering moved inside component to access local reveal state
export function WorkflowParametersDialog({
open,
onOpenChange,
workflowId,
workflows,
}: Props) {
const workflow = useMemo(
() => workflows?.find((w) => w.workflow_permanent_id === workflowId),
[workflows, workflowId],
);
const items = useMemo(() => {
const params = workflow
? (workflow.workflow_definition.parameters.filter(
(p) =>
p.parameter_type === WorkflowParameterTypes.Workflow ||
p.parameter_type === "credential" ||
p.parameter_type === "aws_secret" ||
p.parameter_type === "onepassword" ||
p.parameter_type === "azure_vault_credential" ||
p.parameter_type === "bitwarden_login_credential" ||
p.parameter_type === "bitwarden_sensitive_information" ||
p.parameter_type === "bitwarden_credit_card_data" ||
p.parameter_type === "context",
) as Parameter[])
: ([] as Parameter[]);
return params.map((param) => ({
id: getParameterId(param),
key: param.key,
description:
"description" in param ? param.description ?? undefined : undefined,
type: getParameterDisplayType(param),
value: getParameterDisplayValue(param),
}));
}, [workflow]);
return (
<ParametersDialogBase
open={open}
onOpenChange={onOpenChange}
title="Parameters"
sectionLabel="Workflow-level parameters"
items={items}
/>
);
}

View File

@@ -0,0 +1,128 @@
import { useQueryClient } from "@tanstack/react-query";
import { useEffect, useRef, useState } from "react";
import { toast } from "@/components/ui/use-toast";
import { WorkflowApiResponse } from "../types/workflowTypes";
import { useActiveImportsQuery } from "./useActiveImportsQuery";
export function useActiveImportsPolling() {
const queryClient = useQueryClient();
const previousCountRef = useRef<number>(0);
const [shouldPoll, setShouldPoll] = useState(true); // Start with true to check on mount
const justStartedPollingRef = useRef(false); // Prevent immediate stop after starting
const seenFailuresRef = useRef<Set<string>>(new Set()); // Track failures we've already shown
const seenCompletionsRef = useRef<Set<string>>(new Set()); // Track completions we've already shown
const previousImportsRef = useRef<Map<string, WorkflowApiResponse>>(
new Map(),
); // Track previous state
const { data: activeImports = [] } = useActiveImportsQuery({
enabled: shouldPoll,
refetchInterval: shouldPoll ? 3000 : false,
});
// Monitor for completed/failed imports and invalidate workflows when count decreases
useEffect(() => {
const currentCount = activeImports.length;
const previousCount = previousCountRef.current;
const importingCount = activeImports.filter(
(imp) => imp.status === "importing",
).length;
// Reset completion/failure tracking when an import restarts
activeImports.forEach((imp) => {
if (imp.status === "importing") {
seenCompletionsRef.current.delete(imp.workflow_permanent_id);
seenFailuresRef.current.delete(imp.workflow_permanent_id);
}
});
// Check for status changes and disappeared workflows
const currentPermanentIds = new Set(
activeImports.map((imp) => imp.workflow_permanent_id),
);
// Check for workflows that disappeared (importing -> completed successfully)
previousImportsRef.current.forEach((prevImport, permanentId) => {
// If it was importing and now it's gone, it completed successfully!
if (
prevImport.status === "importing" &&
!currentPermanentIds.has(permanentId) &&
!seenCompletionsRef.current.has(permanentId)
) {
seenCompletionsRef.current.add(permanentId);
toast({
variant: "success",
title: "Workflow imported",
description: `Successfully imported ${prevImport.title || "workflow"}`,
});
// Refresh workflows to show new workflow
queryClient.invalidateQueries({ queryKey: ["workflows"] });
}
});
// Check for failed imports (status changed from importing → import_failed)
activeImports.forEach((imp) => {
const previousImport = previousImportsRef.current.get(
imp.workflow_permanent_id,
);
// Only show toast if we SAW the transition from importing → import_failed
if (
imp.status === "import_failed" &&
previousImport?.status === "importing" &&
!seenFailuresRef.current.has(imp.workflow_permanent_id)
) {
seenFailuresRef.current.add(imp.workflow_permanent_id);
toast({
variant: "destructive",
title: "Import failed",
description:
imp.import_error || `Failed to import ${imp.title || "workflow"}`,
});
// Refresh workflows to update UI
queryClient.invalidateQueries({ queryKey: ["workflows"] });
}
});
// Update previous imports map for next comparison
previousImportsRef.current = new Map(
activeImports.map((imp) => [imp.workflow_permanent_id, imp]),
);
// If we have active IMPORTING imports, make sure polling is enabled
if (importingCount > 0 && !shouldPoll) {
setShouldPoll(true);
}
// Stop polling if there are no IMPORTING imports
// BUT don't stop if we just started polling (to avoid race condition)
if (importingCount === 0 && shouldPoll && !justStartedPollingRef.current) {
setShouldPoll(false);
}
// Clear the "just started" flag once we have imports or if count changed
if (
justStartedPollingRef.current &&
(currentCount > 0 || currentCount !== previousCount)
) {
justStartedPollingRef.current = false;
}
previousCountRef.current = currentCount;
}, [activeImports, queryClient, shouldPoll]);
// Function to start polling (called when a new import starts)
const startPolling = () => {
justStartedPollingRef.current = true; // Set flag to prevent immediate stop
setShouldPoll(true);
// Force refetch immediately to get the latest state
queryClient.refetchQueries({ queryKey: ["active-imports"] });
queryClient.refetchQueries({ queryKey: ["workflows"] });
};
return { activeImports, startPolling };
}

View File

@@ -0,0 +1,38 @@
import { getClient } from "@/api/AxiosClient";
import { useCredentialGetter } from "@/hooks/useCredentialGetter";
import { useQuery } from "@tanstack/react-query";
import { WorkflowApiResponse } from "../types/workflowTypes";
type UseActiveImportsQueryParams = {
enabled?: boolean;
refetchInterval?: number | false;
};
export function useActiveImportsQuery({
enabled = true,
refetchInterval = false,
}: UseActiveImportsQueryParams = {}) {
const credentialGetter = useCredentialGetter();
return useQuery({
queryKey: ["active-imports"],
queryFn: async () => {
const client = await getClient(credentialGetter);
const response = await client.get<WorkflowApiResponse[]>("/workflows", {
params: {
status: ["importing", "import_failed"],
page: 1,
page_size: 20,
},
paramsSerializer: {
indexes: null, // Remove brackets from array params: status=a&status=b instead of status[]=a&status[]=b
},
});
return response.data;
},
enabled,
refetchInterval,
refetchIntervalInBackground: true,
staleTime: 0, // Always consider data stale so it refetches immediately
});
}

View File

@@ -0,0 +1,127 @@
import { useMutation, useQueryClient } from "@tanstack/react-query";
import { useCredentialGetter } from "@/hooks/useCredentialGetter";
import { getClient } from "@/api/AxiosClient";
import { toast } from "@/components/ui/use-toast";
import type {
Folder,
FolderCreate,
FolderUpdate,
UpdateWorkflowFolderRequest,
} from "../types/folderTypes";
function useCreateFolderMutation() {
const credentialGetter = useCredentialGetter();
const queryClient = useQueryClient();
return useMutation({
mutationFn: async (data: FolderCreate) => {
const client = await getClient(credentialGetter);
return client
.post<Folder>("/folders", data)
.then((response) => response.data);
},
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: ["folders"] });
},
onError: (error: Error) => {
toast({
variant: "destructive",
title: "Failed to create folder",
description: error.message,
});
},
});
}
function useUpdateFolderMutation() {
const credentialGetter = useCredentialGetter();
const queryClient = useQueryClient();
return useMutation({
mutationFn: async ({
folderId,
data,
}: {
folderId: string;
data: FolderUpdate;
}) => {
const client = await getClient(credentialGetter);
return client
.put<Folder>(`/folders/${folderId}`, data)
.then((response) => response.data);
},
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: ["folders"] });
},
onError: (error: Error) => {
toast({
variant: "destructive",
title: "Failed to update folder",
description: error.message,
});
},
});
}
function useDeleteFolderMutation() {
const credentialGetter = useCredentialGetter();
const queryClient = useQueryClient();
return useMutation({
mutationFn: async ({
folderId,
deleteWorkflows,
}: {
folderId: string;
folderTitle: string;
deleteWorkflows: boolean;
}) => {
const client = await getClient(credentialGetter);
return client.delete(`/folders/${folderId}`, {
params: {
delete_workflows: deleteWorkflows,
},
});
},
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: ["folders"] });
queryClient.invalidateQueries({ queryKey: ["workflows"] });
},
onError: (error: Error) => {
toast({
variant: "destructive",
title: "Failed to delete folder",
description: error.message,
});
},
});
}
function useUpdateWorkflowFolderMutation() {
const credentialGetter = useCredentialGetter();
const queryClient = useQueryClient();
return useMutation({
mutationFn: async ({
workflowPermanentId,
data,
}: {
workflowPermanentId: string;
data: UpdateWorkflowFolderRequest;
}) => {
const client = await getClient(credentialGetter);
return client.put(`/workflows/${workflowPermanentId}/folder`, data);
},
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: ["workflows"] });
queryClient.invalidateQueries({ queryKey: ["folders"] });
},
});
}
export {
useCreateFolderMutation,
useUpdateFolderMutation,
useDeleteFolderMutation,
useUpdateWorkflowFolderMutation,
};

View File

@@ -0,0 +1,38 @@
import { getClient } from "@/api/AxiosClient";
import { useCredentialGetter } from "@/hooks/useCredentialGetter";
import { useQuery } from "@tanstack/react-query";
import type { Folder } from "../types/folderTypes";
interface UseFoldersQueryParams {
page?: number;
page_size?: number;
search?: string;
}
function useFoldersQuery(params?: UseFoldersQueryParams) {
const credentialGetter = useCredentialGetter();
return useQuery({
queryKey: ["folders", params],
queryFn: async () => {
const client = await getClient(credentialGetter);
const searchParams = new URLSearchParams();
if (params?.page) {
searchParams.append("page", String(params.page));
}
if (params?.page_size) {
searchParams.append("page_size", String(params.page_size));
}
if (params?.search) {
searchParams.append("search", params.search);
}
return client
.get<Array<Folder>>("/folders", { params: searchParams })
.then((response) => response.data);
},
});
}
export { useFoldersQuery };

View File

@@ -0,0 +1,45 @@
import { getClient } from "@/api/AxiosClient";
import { useCredentialGetter } from "@/hooks/useCredentialGetter";
import { useInfiniteQuery } from "@tanstack/react-query";
import type { Folder } from "../types/folderTypes";
interface UseInfiniteFoldersQueryParams {
page_size?: number;
search?: string;
}
function useInfiniteFoldersQuery(params?: UseInfiniteFoldersQueryParams) {
const credentialGetter = useCredentialGetter();
return useInfiniteQuery({
queryKey: ["folders", "infinite", params],
queryFn: async ({ pageParam = 1 }) => {
const client = await getClient(credentialGetter);
const searchParams = new URLSearchParams();
searchParams.append("page", String(pageParam));
if (params?.page_size) {
searchParams.append("page_size", String(params.page_size));
}
if (params?.search) {
searchParams.append("search", params.search);
}
return client
.get<Array<Folder>>("/folders", { params: searchParams })
.then((response) => response.data);
},
getNextPageParam: (lastPage, allPages) => {
// If the last page has items equal to page_size, there might be more
const pageSize = params?.page_size || 10;
if (lastPage.length === pageSize) {
return allPages.length + 1;
}
return undefined;
},
initialPageParam: 1,
});
}
export { useInfiniteFoldersQuery };

View File

@@ -0,0 +1,23 @@
export interface Folder {
folder_id: string;
organization_id: string;
title: string;
description: string | null;
workflow_count: number;
created_at: string;
modified_at: string;
}
export interface FolderCreate {
title: string;
description?: string | null;
}
export interface FolderUpdate {
title?: string;
description?: string | null;
}
export interface UpdateWorkflowFolderRequest {
folder_id: string | null;
}

View File

@@ -559,6 +559,8 @@ export type WorkflowApiResponse = {
ai_fallback: boolean | null;
run_sequentially: boolean | null;
sequential_key: string | null;
folder_id: string | null;
import_error: string | null;
};
export type WorkflowSettings = {

View File

@@ -67,3 +67,31 @@ export function formatDate(date: Date): string {
};
return date.toLocaleString("en-US", options);
}
/**
* Handle infinite scroll logic for loading more items
* @param event - React scroll event
* @param fetchNextPage - Function to fetch next page
* @param hasNextPage - Whether there are more pages to fetch
* @param isFetchingNextPage - Whether currently fetching
* @param scrollThreshold - Percentage threshold to trigger fetch (default 0.8)
*/
export const handleInfiniteScroll = (
event: React.UIEvent<HTMLDivElement>,
fetchNextPage: () => void,
hasNextPage: boolean,
isFetchingNextPage: boolean,
scrollThreshold: number = 0.8,
) => {
const target = event.currentTarget;
const scrollPercentage =
(target.scrollTop + target.clientHeight) / target.scrollHeight;
if (
scrollPercentage >= scrollThreshold &&
hasNextPage &&
!isFetchingNextPage
) {
fetchNextPage();
}
};

View File

@@ -27,6 +27,7 @@ from skyvern.forge.sdk.db.models import (
CredentialModel,
CredentialParameterModel,
DebugSessionModel,
FolderModel,
OnePasswordCredentialParameterModel,
OrganizationAuthTokenModel,
OrganizationBitwardenCollectionModel,
@@ -1405,6 +1406,7 @@ class AgentDB:
cache_key: str | None = None,
run_sequentially: bool = False,
sequential_key: str | None = None,
folder_id: str | None = None,
) -> Workflow:
async with self.Session() as session:
workflow = WorkflowModel(
@@ -1427,6 +1429,7 @@ class AgentDB:
cache_key=cache_key or DEFAULT_SCRIPT_RUN_ID,
run_sequentially=run_sequentially,
sequential_key=sequential_key,
folder_id=folder_id,
)
if workflow_permanent_id:
workflow.workflow_permanent_id = workflow_permanent_id
@@ -1605,14 +1608,15 @@ class AgentDB:
only_saved_tasks: bool = False,
only_workflows: bool = False,
search_key: str | None = None,
folder_id: str | None = None,
statuses: list[WorkflowStatus] | None = None,
) -> list[Workflow]:
"""
Get all workflows with the latest version for the organization.
Search semantics:
- If `search_key` is provided, its value is used as a unified search term for both
`workflows.title` and workflow parameter metadata (key, description, and default_value).
- If `search_key` is provided, its value is used as a unified search term for
`workflows.title`, `folders.title`, and workflow parameter metadata (key, description, and default_value).
- If `search_key` is not provided, no search filtering is applied.
- Parameter metadata search excludes soft-deleted parameter rows across parameter tables.
"""
@@ -1635,11 +1639,19 @@ class AgentDB:
)
.subquery()
)
main_query = select(WorkflowModel).join(
subquery,
(WorkflowModel.organization_id == subquery.c.organization_id)
& (WorkflowModel.workflow_permanent_id == subquery.c.workflow_permanent_id)
& (WorkflowModel.version == subquery.c.max_version),
main_query = (
select(WorkflowModel)
.join(
subquery,
(WorkflowModel.organization_id == subquery.c.organization_id)
& (WorkflowModel.workflow_permanent_id == subquery.c.workflow_permanent_id)
& (WorkflowModel.version == subquery.c.max_version),
)
.outerjoin(
FolderModel,
(WorkflowModel.folder_id == FolderModel.folder_id)
& (FolderModel.organization_id == WorkflowModel.organization_id),
)
)
if only_saved_tasks:
main_query = main_query.where(WorkflowModel.is_saved_task.is_(True))
@@ -1647,9 +1659,12 @@ class AgentDB:
main_query = main_query.where(WorkflowModel.is_saved_task.is_(False))
if statuses:
main_query = main_query.where(WorkflowModel.status.in_(statuses))
if folder_id:
main_query = main_query.where(WorkflowModel.folder_id == folder_id)
if search_key:
search_like = f"%{search_key}%"
title_like = WorkflowModel.title.ilike(search_like)
folder_title_like = FolderModel.title.ilike(search_like)
parameter_filters = [
# WorkflowParameterModel
@@ -1771,7 +1786,7 @@ class AgentDB:
)
),
]
main_query = main_query.where(or_(title_like, or_(*parameter_filters)))
main_query = main_query.where(or_(title_like, folder_title_like, or_(*parameter_filters)))
main_query = (
main_query.order_by(WorkflowModel.created_at.desc()).limit(page_size).offset(db_page * page_size)
)
@@ -1791,6 +1806,8 @@ class AgentDB:
version: int | None = None,
run_with: str | None = None,
cache_key: str | None = None,
status: str | None = None,
import_error: str | None = None,
) -> Workflow:
try:
async with self.Session() as session:
@@ -1812,6 +1829,10 @@ class AgentDB:
workflow.run_with = run_with
if cache_key is not None:
workflow.cache_key = cache_key
if status is not None:
workflow.status = status
if import_error is not None:
workflow.import_error = import_error
await session.commit()
await session.refresh(workflow)
return convert_to_workflow(workflow, self.debug_enabled)
@@ -1846,6 +1867,372 @@ class AgentDB:
await session.execute(update_deleted_at_query)
await session.commit()
async def create_folder(
self,
organization_id: str,
title: str,
description: str | None = None,
) -> FolderModel:
"""Create a new folder."""
try:
async with self.Session() as session:
folder = FolderModel(
organization_id=organization_id,
title=title,
description=description,
)
session.add(folder)
await session.commit()
await session.refresh(folder)
return folder
except SQLAlchemyError:
LOG.error("SQLAlchemyError in create_folder", exc_info=True)
raise
async def get_folders(
self,
organization_id: str,
page: int = 1,
page_size: int = 10,
search_query: str | None = None,
) -> list[FolderModel]:
"""Get all folders for an organization with pagination and optional search."""
try:
async with self.Session() as session:
stmt = (
select(FolderModel)
.filter_by(organization_id=organization_id)
.filter(FolderModel.deleted_at.is_(None))
)
if search_query:
search_pattern = f"%{search_query}%"
stmt = stmt.filter(
or_(
FolderModel.title.ilike(search_pattern),
FolderModel.description.ilike(search_pattern),
)
)
stmt = stmt.order_by(FolderModel.modified_at.desc())
stmt = stmt.offset((page - 1) * page_size).limit(page_size)
result = await session.execute(stmt)
return list(result.scalars().all())
except SQLAlchemyError:
LOG.error("SQLAlchemyError in get_folders", exc_info=True)
raise
async def get_folder(
self,
folder_id: str,
organization_id: str,
) -> FolderModel | None:
"""Get a folder by ID."""
try:
async with self.Session() as session:
stmt = (
select(FolderModel)
.filter_by(folder_id=folder_id, organization_id=organization_id)
.filter(FolderModel.deleted_at.is_(None))
)
result = await session.execute(stmt)
return result.scalar_one_or_none()
except SQLAlchemyError:
LOG.error("SQLAlchemyError in get_folder", exc_info=True)
raise
async def update_folder(
self,
folder_id: str,
organization_id: str,
title: str | None = None,
description: str | None = None,
) -> FolderModel | None:
"""Update a folder's title or description."""
try:
async with self.Session() as session:
stmt = (
select(FolderModel)
.filter_by(folder_id=folder_id, organization_id=organization_id)
.filter(FolderModel.deleted_at.is_(None))
)
result = await session.execute(stmt)
folder = result.scalar_one_or_none()
if not folder:
return None
if title is not None:
folder.title = title
if description is not None:
folder.description = description
folder.modified_at = datetime.utcnow()
await session.commit()
await session.refresh(folder)
return folder
except SQLAlchemyError:
LOG.error("SQLAlchemyError in update_folder", exc_info=True)
raise
async def get_workflow_permanent_ids_in_folder(
self,
folder_id: str,
organization_id: str,
) -> list[str]:
"""Get workflow permanent IDs (latest versions only) in a folder."""
try:
async with self.Session() as session:
# Subquery to get the latest version for each workflow
subquery = (
select(
WorkflowModel.organization_id,
WorkflowModel.workflow_permanent_id,
func.max(WorkflowModel.version).label("max_version"),
)
.where(WorkflowModel.organization_id == organization_id)
.where(WorkflowModel.deleted_at.is_(None))
.group_by(
WorkflowModel.organization_id,
WorkflowModel.workflow_permanent_id,
)
.subquery()
)
# Get workflow_permanent_ids where the latest version is in this folder
stmt = (
select(WorkflowModel.workflow_permanent_id)
.join(
subquery,
(WorkflowModel.organization_id == subquery.c.organization_id)
& (WorkflowModel.workflow_permanent_id == subquery.c.workflow_permanent_id)
& (WorkflowModel.version == subquery.c.max_version),
)
.where(WorkflowModel.folder_id == folder_id)
)
result = await session.execute(stmt)
return list(result.scalars().all())
except SQLAlchemyError:
LOG.error("SQLAlchemyError in get_workflow_permanent_ids_in_folder", exc_info=True)
raise
async def soft_delete_folder(
self,
folder_id: str,
organization_id: str,
delete_workflows: bool = False,
) -> bool:
"""Soft delete a folder. Optionally delete all workflows in the folder."""
try:
async with self.Session() as session:
# Check if folder exists
folder_stmt = (
select(FolderModel)
.filter_by(folder_id=folder_id, organization_id=organization_id)
.filter(FolderModel.deleted_at.is_(None))
)
folder_result = await session.execute(folder_stmt)
folder = folder_result.scalar_one_or_none()
if not folder:
return False
# If delete_workflows is True, delete all workflows in the folder
if delete_workflows:
# Get workflow permanent IDs in the folder (inline logic)
subquery = (
select(
WorkflowModel.organization_id,
WorkflowModel.workflow_permanent_id,
func.max(WorkflowModel.version).label("max_version"),
)
.where(WorkflowModel.organization_id == organization_id)
.where(WorkflowModel.deleted_at.is_(None))
.group_by(
WorkflowModel.organization_id,
WorkflowModel.workflow_permanent_id,
)
.subquery()
)
workflow_permanent_ids_stmt = (
select(WorkflowModel.workflow_permanent_id)
.join(
subquery,
(WorkflowModel.organization_id == subquery.c.organization_id)
& (WorkflowModel.workflow_permanent_id == subquery.c.workflow_permanent_id)
& (WorkflowModel.version == subquery.c.max_version),
)
.where(WorkflowModel.folder_id == folder_id)
)
result = await session.execute(workflow_permanent_ids_stmt)
workflow_permanent_ids = list(result.scalars().all())
# Soft delete all workflows with these permanent IDs in a single bulk update
if workflow_permanent_ids:
update_workflows_query = (
update(WorkflowModel)
.where(WorkflowModel.workflow_permanent_id.in_(workflow_permanent_ids))
.where(WorkflowModel.organization_id == organization_id)
.where(WorkflowModel.deleted_at.is_(None))
.values(deleted_at=datetime.utcnow())
)
await session.execute(update_workflows_query)
else:
# Just remove folder_id from all workflows in this folder
update_workflows_query = (
update(WorkflowModel)
.where(WorkflowModel.folder_id == folder_id)
.where(WorkflowModel.organization_id == organization_id)
.values(folder_id=None, modified_at=datetime.utcnow())
)
await session.execute(update_workflows_query)
# Soft delete the folder
folder.deleted_at = datetime.utcnow()
await session.commit()
return True
except SQLAlchemyError:
LOG.error("SQLAlchemyError in soft_delete_folder", exc_info=True)
raise
async def get_folder_workflow_count(
self,
folder_id: str,
organization_id: str,
) -> int:
"""Get the count of workflows (latest versions only) in a folder."""
try:
async with self.Session() as session:
# Subquery to get the latest version for each workflow (same pattern as get_workflows_by_organization_id)
subquery = (
select(
WorkflowModel.organization_id,
WorkflowModel.workflow_permanent_id,
func.max(WorkflowModel.version).label("max_version"),
)
.where(WorkflowModel.organization_id == organization_id)
.where(WorkflowModel.deleted_at.is_(None))
.group_by(
WorkflowModel.organization_id,
WorkflowModel.workflow_permanent_id,
)
.subquery()
)
# Count workflows where the latest version is in this folder
stmt = (
select(func.count(WorkflowModel.workflow_permanent_id))
.join(
subquery,
(WorkflowModel.organization_id == subquery.c.organization_id)
& (WorkflowModel.workflow_permanent_id == subquery.c.workflow_permanent_id)
& (WorkflowModel.version == subquery.c.max_version),
)
.where(WorkflowModel.folder_id == folder_id)
)
result = await session.execute(stmt)
return result.scalar_one()
except SQLAlchemyError:
LOG.error("SQLAlchemyError in get_folder_workflow_count", exc_info=True)
raise
async def get_folder_workflow_counts_batch(
self,
folder_ids: list[str],
organization_id: str,
) -> dict[str, int]:
"""Get workflow counts for multiple folders in a single query."""
try:
async with self.Session() as session:
# Subquery to get the latest version for each workflow
subquery = (
select(
WorkflowModel.organization_id,
WorkflowModel.workflow_permanent_id,
func.max(WorkflowModel.version).label("max_version"),
)
.where(WorkflowModel.organization_id == organization_id)
.where(WorkflowModel.deleted_at.is_(None))
.group_by(
WorkflowModel.organization_id,
WorkflowModel.workflow_permanent_id,
)
.subquery()
)
# Count workflows grouped by folder_id
stmt = (
select(
WorkflowModel.folder_id,
func.count(WorkflowModel.workflow_permanent_id).label("count"),
)
.join(
subquery,
(WorkflowModel.organization_id == subquery.c.organization_id)
& (WorkflowModel.workflow_permanent_id == subquery.c.workflow_permanent_id)
& (WorkflowModel.version == subquery.c.max_version),
)
.where(WorkflowModel.folder_id.in_(folder_ids))
.group_by(WorkflowModel.folder_id)
)
result = await session.execute(stmt)
rows = result.all()
# Convert to dict, defaulting to 0 for folders with no workflows
return {row.folder_id: row.count for row in rows}
except SQLAlchemyError:
LOG.error("SQLAlchemyError in get_folder_workflow_counts_batch", exc_info=True)
raise
async def update_workflow_folder(
self,
workflow_permanent_id: str,
organization_id: str,
folder_id: str | None,
) -> Workflow | None:
"""Update folder assignment for the latest version of a workflow."""
try:
# Get the latest version of the workflow
latest_workflow = await self.get_workflow_by_permanent_id(
workflow_permanent_id=workflow_permanent_id,
organization_id=organization_id,
)
if not latest_workflow:
return None
async with self.Session() as session:
# Validate folder exists in-org if folder_id is provided
if folder_id:
stmt = (
select(FolderModel.folder_id)
.where(FolderModel.folder_id == folder_id)
.where(FolderModel.organization_id == organization_id)
.where(FolderModel.deleted_at.is_(None))
)
if (await session.scalar(stmt)) is None:
raise ValueError(f"Folder {folder_id} not found")
workflow_model = await session.get(WorkflowModel, latest_workflow.workflow_id)
if workflow_model:
workflow_model.folder_id = folder_id
workflow_model.modified_at = datetime.utcnow()
# Update folder's modified_at in the same transaction
if folder_id:
folder_model = await session.get(FolderModel, folder_id)
if folder_model:
folder_model.modified_at = datetime.utcnow()
await session.commit()
await session.refresh(workflow_model)
return convert_to_workflow(workflow_model, self.debug_enabled)
return None
except SQLAlchemyError:
LOG.error("SQLAlchemyError in update_workflow_folder", exc_info=True)
raise
async def create_workflow_run(
self,
workflow_permanent_id: str,

View File

@@ -39,6 +39,7 @@ CREDENTIAL_AZURE_VAULT_PARAMETER_PREFIX = "azcp"
CREDENTIAL_PARAMETER_PREFIX = "cp"
CREDENTIAL_PREFIX = "cred"
DEBUG_SESSION_PREFIX = "ds"
FOLDER_PREFIX = "fld"
BROWSER_PROFILE_PREFIX = "bp"
ORGANIZATION_BITWARDEN_COLLECTION_PREFIX = "obc"
TASK_V2_ID = "tsk_v2"
@@ -220,6 +221,11 @@ def generate_debug_session_id() -> str:
return f"{DEBUG_SESSION_PREFIX}_{int_id}"
def generate_folder_id() -> str:
int_id = generate_id()
return f"{FOLDER_PREFIX}_{int_id}"
def generate_organization_bitwarden_collection_id() -> str:
int_id = generate_id()
return f"{ORGANIZATION_BITWARDEN_COLLECTION_PREFIX}_{int_id}"

View File

@@ -32,6 +32,7 @@ from skyvern.forge.sdk.db.id import (
generate_credential_id,
generate_credential_parameter_id,
generate_debug_session_id,
generate_folder_id,
generate_onepassword_credential_parameter_id,
generate_org_id,
generate_organization_auth_token_id,
@@ -218,6 +219,28 @@ class ArtifactModel(Base):
)
class FolderModel(Base):
__tablename__ = "folders"
__table_args__ = (
Index("folder_organization_id_idx", "organization_id"),
Index("folder_organization_title_idx", "organization_id", "title"),
)
folder_id = Column(String, primary_key=True, default=generate_folder_id)
organization_id = Column(String, ForeignKey("organizations.organization_id", ondelete="CASCADE"), nullable=False)
title = Column(String, nullable=False)
description = Column(String, nullable=True)
created_at = Column(DateTime, default=datetime.datetime.utcnow, nullable=False)
modified_at = Column(
DateTime,
default=datetime.datetime.utcnow,
onupdate=datetime.datetime.utcnow,
nullable=False,
)
deleted_at = Column(DateTime, nullable=True)
class WorkflowModel(Base):
__tablename__ = "workflows"
__table_args__ = (
@@ -230,6 +253,7 @@ class WorkflowModel(Base):
Index("permanent_id_version_idx", "workflow_permanent_id", "version"),
Index("organization_id_title_idx", "organization_id", "title"),
Index("workflow_oid_status_idx", "organization_id", "status"),
Index("workflow_folder_id_idx", "folder_id"),
)
workflow_id = Column(String, primary_key=True, default=generate_workflow_id)
@@ -252,6 +276,8 @@ class WorkflowModel(Base):
cache_key = Column(String, nullable=True)
run_sequentially = Column(Boolean, nullable=True)
sequential_key = Column(String, nullable=True)
folder_id = Column(String, ForeignKey("folders.folder_id", ondelete="SET NULL"), nullable=True)
import_error = Column(String, nullable=True) # Error message if import failed
created_at = Column(DateTime, default=datetime.datetime.utcnow, nullable=False)
modified_at = Column(

View File

@@ -288,6 +288,8 @@ def convert_to_workflow(workflow_model: WorkflowModel, debug_enabled: bool = Fal
cache_key=workflow_model.cache_key,
run_sequentially=workflow_model.run_sequentially,
sequential_key=workflow_model.sequential_key,
folder_id=workflow_model.folder_id,
import_error=workflow_model.import_error,
)

View File

@@ -1,9 +1,22 @@
import asyncio
from enum import Enum
from typing import Annotated, Any
import structlog
import yaml
from fastapi import BackgroundTasks, Depends, Header, HTTPException, Path, Query, Request, Response, UploadFile, status
from fastapi import (
BackgroundTasks,
Body,
Depends,
Header,
HTTPException,
Path,
Query,
Request,
Response,
UploadFile,
)
from fastapi import status as http_status
from fastapi.responses import ORJSONResponse
from skyvern import analytics
@@ -84,6 +97,7 @@ from skyvern.forge.sdk.workflow.models.workflow import (
WorkflowRunWithWorkflowResponse,
)
from skyvern.schemas.artifacts import EntityType, entity_type_to_param
from skyvern.schemas.folders import Folder, FolderCreate, FolderUpdate, UpdateWorkflowFolderRequest
from skyvern.schemas.runs import (
CUA_ENGINES,
BlockRunRequest,
@@ -410,7 +424,7 @@ async def get_run(
run_response = await run_service.get_run_response(run_id, organization_id=current_org.organization_id)
if not run_response:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
status_code=http_status.HTTP_404_NOT_FOUND,
detail=f"Task run not found {run_id}",
)
return run_response
@@ -604,6 +618,22 @@ async def create_workflow_from_prompt(
return workflow.model_dump(by_alias=True)
async def _validate_file_size(file: UploadFile) -> UploadFile:
try:
file.file.seek(0, 2) # Move the pointer to the end of the file
size = file.file.tell() # Get the current position of the pointer, which represents the file size
file.file.seek(0) # Reset the pointer back to the beginning
except Exception as e:
raise HTTPException(status_code=500, detail="Could not determine file size.") from e
if size > app.SETTINGS_MANAGER.MAX_UPLOAD_FILE_SIZE:
raise HTTPException(
status_code=413,
detail=f"File size exceeds the maximum allowed size ({app.SETTINGS_MANAGER.MAX_UPLOAD_FILE_SIZE / 1024 / 1024} MB)",
)
return file
@legacy_base_router.post(
"/workflows/import-pdf",
response_model=dict[str, Any],
@@ -636,13 +666,97 @@ async def create_workflow_from_prompt(
include_in_schema=False,
)
async def import_workflow_from_pdf(
file: UploadFile,
background_tasks: BackgroundTasks,
file: UploadFile = Depends(_validate_file_size),
current_org: Organization = Depends(org_auth_service.get_current_org),
) -> dict[str, Any]:
"""Import a workflow from a PDF file containing Standard Operating Procedures."""
analytics.capture("skyvern-oss-workflow-import-pdf")
return await pdf_import_service.import_workflow_from_pdf(file, current_org)
# Read file and validate early (before creating import record)
if not file.filename or not file.filename.lower().endswith(".pdf"):
raise HTTPException(status_code=400, detail="Only PDF files are supported.")
try:
file_contents = await file.read()
file_name = file.filename
finally:
# Release underlying SpooledTemporaryFile ASAP
await file.close()
# Extract text in executor to avoid blocking event loop (1-2 seconds)
try:
sop_text = await asyncio.to_thread(
pdf_import_service.extract_text_from_pdf,
file_contents,
file_name,
)
except HTTPException:
# Re-raise validation errors immediately
raise
# Validation passed! Create empty workflow v1 with status='importing'
empty_workflow = await app.DATABASE.create_workflow(
title=f"Importing {file_name}",
workflow_definition={"parameters": [], "blocks": []},
organization_id=current_org.organization_id,
status=WorkflowStatus.importing,
)
# Process PDF import in background (LLM call is the slow part)
async def process_pdf_import() -> None:
try:
# Create workflow from extracted text (LLM processing)
result = await pdf_import_service.create_workflow_from_sop_text(sop_text, current_org)
# Create v2 with real content
await app.WORKFLOW_SERVICE.create_workflow_from_request(
organization=current_org,
request=WorkflowCreateYAMLRequest.model_validate(result),
workflow_permanent_id=empty_workflow.workflow_permanent_id,
)
# Update v1 status to published (v1 won't show in list since v2 is latest version)
await app.DATABASE.update_workflow(
workflow_id=empty_workflow.workflow_id,
organization_id=current_org.organization_id,
status=WorkflowStatus.published,
)
LOG.info(
"Workflow import completed",
workflow_permanent_id=empty_workflow.workflow_permanent_id,
organization_id=current_org.organization_id,
)
except Exception as e:
# Log full error server-side for debugging
LOG.exception(
"Workflow import failed",
workflow_permanent_id=empty_workflow.workflow_permanent_id,
error=str(e),
organization_id=current_org.organization_id,
)
# Provide sanitized user-facing error message (don't expose internal details/PII)
sanitized_error = "Import failed. Please verify the PDF content and try again."
# Mark v1 as import_failed with sanitized error
await app.DATABASE.update_workflow(
workflow_id=empty_workflow.workflow_id,
organization_id=current_org.organization_id,
status=WorkflowStatus.import_failed,
import_error=sanitized_error,
)
background_tasks.add_task(process_pdf_import)
return {
"workflow_permanent_id": empty_workflow.workflow_permanent_id,
"status": "importing",
"file_name": file.filename,
"organization_id": current_org.organization_id,
"created_at": empty_workflow.created_at.isoformat(),
}
@legacy_base_router.put(
@@ -817,6 +931,251 @@ async def delete_workflow(
await app.WORKFLOW_SERVICE.delete_workflow_by_permanent_id(workflow_id, current_org.organization_id)
################# Folder Endpoints #################
@legacy_base_router.post("/folders", response_model=Folder, tags=["agent"])
@legacy_base_router.post("/folders/", response_model=Folder, include_in_schema=False)
@base_router.post(
"/folders",
response_model=Folder,
tags=["Workflows"],
description="Create a new folder to organize workflows",
summary="Create folder",
responses={
200: {"description": "Successfully created folder"},
400: {"description": "Invalid request"},
},
)
@base_router.post("/folders/", response_model=Folder, include_in_schema=False)
async def create_folder(
data: FolderCreate,
current_org: Organization = Depends(org_auth_service.get_current_org),
) -> Folder:
analytics.capture("skyvern-oss-folder-create")
folder_model = await app.DATABASE.create_folder(
organization_id=current_org.organization_id,
title=data.title,
description=data.description,
)
workflow_count = await app.DATABASE.get_folder_workflow_count(
folder_id=folder_model.folder_id,
organization_id=current_org.organization_id,
)
return Folder(
folder_id=folder_model.folder_id,
organization_id=folder_model.organization_id,
title=folder_model.title,
description=folder_model.description,
workflow_count=workflow_count,
created_at=folder_model.created_at,
modified_at=folder_model.modified_at,
)
@legacy_base_router.get("/folders/{folder_id}", response_model=Folder, tags=["agent"])
@legacy_base_router.get("/folders/{folder_id}/", response_model=Folder, include_in_schema=False)
@base_router.get(
"/folders/{folder_id}",
response_model=Folder,
tags=["Workflows"],
description="Get a specific folder by ID",
summary="Get folder",
responses={
200: {"description": "Successfully retrieved folder"},
404: {"description": "Folder not found"},
},
)
@base_router.get("/folders/{folder_id}/", response_model=Folder, include_in_schema=False)
async def get_folder(
folder_id: str = Path(..., description="Folder ID", examples=["fld_123"]),
current_org: Organization = Depends(org_auth_service.get_current_org),
) -> Folder:
folder = await app.DATABASE.get_folder(
folder_id=folder_id,
organization_id=current_org.organization_id,
)
if not folder:
raise HTTPException(status_code=http_status.HTTP_404_NOT_FOUND, detail=f"Folder {folder_id} not found")
workflow_count = await app.DATABASE.get_folder_workflow_count(
folder_id=folder.folder_id,
organization_id=current_org.organization_id,
)
return Folder(
folder_id=folder.folder_id,
organization_id=folder.organization_id,
title=folder.title,
description=folder.description,
workflow_count=workflow_count,
created_at=folder.created_at,
modified_at=folder.modified_at,
)
@legacy_base_router.get("/folders", response_model=list[Folder], tags=["agent"])
@legacy_base_router.get("/folders/", response_model=list[Folder], include_in_schema=False)
@base_router.get(
"/folders",
response_model=list[Folder],
tags=["Workflows"],
description="Get all folders for the organization",
summary="Get folders",
responses={
200: {"description": "Successfully retrieved folders"},
},
)
@base_router.get("/folders/", response_model=list[Folder], include_in_schema=False)
async def get_folders(
page: int = Query(1, ge=1, description="Page number"),
page_size: int = Query(100, ge=1, le=500, description="Number of folders per page"),
search: str | None = Query(None, description="Search folders by title or description"),
current_org: Organization = Depends(org_auth_service.get_current_org),
) -> list[Folder]:
folders = await app.DATABASE.get_folders(
organization_id=current_org.organization_id,
page=page,
page_size=page_size,
search_query=search,
)
# Get workflow counts for all folders in a single query
if folders:
folder_ids = [folder.folder_id for folder in folders]
workflow_counts = await app.DATABASE.get_folder_workflow_counts_batch(
folder_ids=folder_ids,
organization_id=current_org.organization_id,
)
else:
workflow_counts = {}
# Build result with workflow counts
result = []
for folder in folders:
result.append(
Folder(
folder_id=folder.folder_id,
organization_id=folder.organization_id,
title=folder.title,
description=folder.description,
workflow_count=workflow_counts.get(folder.folder_id, 0),
created_at=folder.created_at,
modified_at=folder.modified_at,
)
)
return result
@legacy_base_router.put("/folders/{folder_id}", response_model=Folder, tags=["agent"])
@legacy_base_router.put("/folders/{folder_id}/", response_model=Folder, include_in_schema=False)
@base_router.put(
"/folders/{folder_id}",
response_model=Folder,
tags=["Workflows"],
description="Update a folder's title or description",
summary="Update folder",
responses={
200: {"description": "Successfully updated folder"},
404: {"description": "Folder not found"},
},
)
@base_router.put("/folders/{folder_id}/", response_model=Folder, include_in_schema=False)
async def update_folder(
folder_id: str = Path(..., description="Folder ID", examples=["fld_123"]),
data: FolderUpdate = Body(...),
current_org: Organization = Depends(org_auth_service.get_current_org),
) -> Folder:
folder = await app.DATABASE.update_folder(
folder_id=folder_id,
organization_id=current_org.organization_id,
title=data.title,
description=data.description,
)
if not folder:
raise HTTPException(status_code=http_status.HTTP_404_NOT_FOUND, detail=f"Folder {folder_id} not found")
workflow_count = await app.DATABASE.get_folder_workflow_count(
folder_id=folder.folder_id,
organization_id=current_org.organization_id,
)
return Folder(
folder_id=folder.folder_id,
organization_id=folder.organization_id,
title=folder.title,
description=folder.description,
workflow_count=workflow_count,
created_at=folder.created_at,
modified_at=folder.modified_at,
)
@legacy_base_router.delete("/folders/{folder_id}", tags=["agent"])
@legacy_base_router.delete("/folders/{folder_id}/", include_in_schema=False)
@base_router.delete(
"/folders/{folder_id}",
tags=["Workflows"],
description="Delete a folder. Optionally delete all workflows in the folder.",
summary="Delete folder",
responses={
200: {"description": "Successfully deleted folder"},
404: {"description": "Folder not found"},
},
)
@base_router.delete("/folders/{folder_id}/", include_in_schema=False)
async def delete_folder(
folder_id: str = Path(..., description="Folder ID", examples=["fld_123"]),
delete_workflows: bool = Query(False, description="If true, also delete all workflows in this folder"),
current_org: Organization = Depends(org_auth_service.get_current_org),
) -> dict:
analytics.capture("skyvern-oss-folder-delete")
success = await app.DATABASE.soft_delete_folder(
folder_id=folder_id,
organization_id=current_org.organization_id,
delete_workflows=delete_workflows,
)
if not success:
raise HTTPException(status_code=http_status.HTTP_404_NOT_FOUND, detail=f"Folder {folder_id} not found")
return {"status": "deleted", "folder_id": folder_id, "workflows_deleted": delete_workflows}
@legacy_base_router.put("/workflows/{workflow_permanent_id}/folder", response_model=Workflow, tags=["agent"])
@legacy_base_router.put("/workflows/{workflow_permanent_id}/folder/", response_model=Workflow, include_in_schema=False)
@base_router.put(
"/workflows/{workflow_permanent_id}/folder",
response_model=Workflow,
tags=["Workflows"],
description="Update a workflow's folder assignment for the latest version",
summary="Update workflow folder",
responses={
200: {"description": "Successfully updated workflow folder"},
404: {"description": "Workflow not found"},
400: {"description": "Folder not found"},
},
)
@base_router.put("/workflows/{workflow_permanent_id}/folder/", response_model=Workflow, include_in_schema=False)
async def update_workflow_folder(
workflow_permanent_id: str = Path(..., description="Workflow permanent ID", examples=["wpid_123"]),
data: UpdateWorkflowFolderRequest = Body(...),
current_org: Organization = Depends(org_auth_service.get_current_org),
) -> Workflow:
try:
workflow = await app.DATABASE.update_workflow_folder(
workflow_permanent_id=workflow_permanent_id,
organization_id=current_org.organization_id,
folder_id=data.folder_id,
)
if not workflow:
raise HTTPException(
status_code=http_status.HTTP_404_NOT_FOUND, detail=f"Workflow {workflow_permanent_id} not found"
)
return workflow
except ValueError as e:
raise HTTPException(status_code=http_status.HTTP_400_BAD_REQUEST, detail=str(e)) from e
@legacy_base_router.post(
"/utilities/curl-to-http",
tags=["Utilities"],
@@ -896,7 +1255,7 @@ async def get_artifact(
)
if not artifact:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
status_code=http_status.HTTP_404_NOT_FOUND,
detail=f"Artifact not found {artifact_id}",
)
if settings.ENV != "local" or settings.GENERATE_PRESIGNED_URLS:
@@ -1027,7 +1386,7 @@ async def get_run_timeline(
run_response = await run_service.get_run_response(run_id, organization_id=current_org.organization_id)
if not run_response:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
status_code=http_status.HTTP_404_NOT_FOUND,
detail=f"Run not found {run_id}",
)
@@ -1040,13 +1399,13 @@ async def get_run_timeline(
task_v2 = await app.DATABASE.get_task_v2(task_v2_id=run_id, organization_id=current_org.organization_id)
if not task_v2:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
status_code=http_status.HTTP_404_NOT_FOUND,
detail=f"Task v2 not found {run_id}",
)
if not task_v2.workflow_run_id:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
status_code=http_status.HTTP_400_BAD_REQUEST,
detail=f"Task v2 {run_id} has no associated workflow run",
)
@@ -1054,7 +1413,7 @@ async def get_run_timeline(
# Timeline not available for other run types
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
status_code=http_status.HTTP_400_BAD_REQUEST,
detail=f"Timeline not available for run type {run_response.run_type}",
)
@@ -1149,7 +1508,7 @@ async def webhook(
payload=payload,
)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
status_code=http_status.HTTP_400_BAD_REQUEST,
detail="Missing webhook signature or timestamp",
)
@@ -1270,7 +1629,7 @@ async def cancel_task(
task_obj = await app.DATABASE.get_task(task_id, organization_id=current_org.organization_id)
if not task_obj:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
status_code=http_status.HTTP_404_NOT_FOUND,
detail=f"Task not found {task_id}",
)
task = await app.agent.update_task(task_obj, status=TaskStatus.canceled)
@@ -1288,7 +1647,7 @@ async def _cancel_workflow_run(workflow_run_id: str, organization_id: str, x_api
if not workflow_run:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
status_code=http_status.HTTP_404_NOT_FOUND,
detail=f"Workflow run not found {workflow_run_id}",
)
@@ -1324,7 +1683,7 @@ async def _continue_workflow_run(workflow_run_id: str, organization_id: str) ->
if not workflow_run:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
status_code=http_status.HTTP_404_NOT_FOUND,
detail=f"Workflow run not found {workflow_run_id}",
)
@@ -1398,7 +1757,7 @@ async def retry_webhook(
task_obj = await app.DATABASE.get_task(task_id, organization_id=current_org.organization_id)
if not task_obj:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
status_code=http_status.HTTP_404_NOT_FOUND,
detail=f"Task not found {task_id}",
)
@@ -1452,7 +1811,7 @@ async def get_tasks(
analytics.capture("skyvern-oss-agent-tasks-get")
if only_standalone_tasks and workflow_run_id:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
status_code=http_status.HTTP_400_BAD_REQUEST,
detail="only_standalone_tasks and workflow_run_id cannot be used together",
)
tasks = await app.DATABASE.get_tasks(
@@ -1560,7 +1919,7 @@ async def get_artifacts(
if entity_type not in entity_type_to_param:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
status_code=http_status.HTTP_400_BAD_REQUEST,
detail=f"Invalid entity_type: {entity_type}",
)
@@ -1828,7 +2187,7 @@ async def get_workflow_and_run_from_workflow_run_id(
if not workflow:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
status_code=http_status.HTTP_404_NOT_FOUND,
detail=f"Workflow run not found {workflow_run_id}",
)
@@ -1926,9 +2285,11 @@ async def get_workflows(
only_workflows: bool = Query(False),
search_key: str | None = Query(
None,
description="Unified search across workflow title and parameter metadata (key, description, default_value).",
description="Unified search across workflow title, folder name, and parameter metadata (key, description, default_value).",
),
title: str = Query("", deprecated=True, description="Deprecated: use search_key instead."),
folder_id: str | None = Query(None, description="Filter workflows by folder ID"),
status: Annotated[list[WorkflowStatus] | None, Query()] = None,
current_org: Organization = Depends(org_auth_service.get_current_org),
template: bool = Query(False),
) -> list[Workflow]:
@@ -1936,8 +2297,8 @@ async def get_workflows(
Get all workflows with the latest version for the organization.
Search semantics:
- If `search_key` is provided, its value is used as a unified search term for both
`workflows.title` and workflow parameter metadata (key, description, and default_value for
- If `search_key` is provided, its value is used as a unified search term for
`workflows.title`, `folders.title`, and workflow parameter metadata (key, description, and default_value for
`WorkflowParameterModel`).
- Falls back to deprecated `title` (title-only search) if `search_key` is not provided.
- Parameter metadata search excludes soft-deleted parameter rows across all parameter tables.
@@ -1947,6 +2308,9 @@ async def get_workflows(
# Determine the effective search term: prioritize search_key, fallback to title
effective_search = search_key or (title if title else None)
# Default to published and draft if no status filter provided
effective_statuses = status if status else [WorkflowStatus.published, WorkflowStatus.draft]
if template:
global_workflows_permanent_ids = await app.STORAGE.retrieve_global_workflows()
if not global_workflows_permanent_ids:
@@ -1956,13 +2320,13 @@ async def get_workflows(
page=page,
page_size=page_size,
search_key=effective_search or "",
statuses=[WorkflowStatus.published, WorkflowStatus.draft],
statuses=effective_statuses,
)
return workflows
if only_saved_tasks and only_workflows:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
status_code=http_status.HTTP_400_BAD_REQUEST,
detail="only_saved_tasks and only_workflows cannot be used together",
)
@@ -1973,7 +2337,8 @@ async def get_workflows(
only_saved_tasks=only_saved_tasks,
only_workflows=only_workflows,
search_key=effective_search,
statuses=[WorkflowStatus.published, WorkflowStatus.draft],
folder_id=folder_id,
statuses=effective_statuses,
)
@@ -2182,22 +2547,6 @@ async def get_api_keys(
return GetOrganizationAPIKeysResponse(api_keys=api_keys)
async def _validate_file_size(file: UploadFile) -> UploadFile:
try:
file.file.seek(0, 2) # Move the pointer to the end of the file
size = file.file.tell() # Get the current position of the pointer, which represents the file size
file.file.seek(0) # Reset the pointer back to the beginning
except Exception as e:
raise HTTPException(status_code=500, detail="Could not determine file size.") from e
if size > app.SETTINGS_MANAGER.MAX_UPLOAD_FILE_SIZE:
raise HTTPException(
status_code=413,
detail=f"File size exceeds the maximum allowed size ({app.SETTINGS_MANAGER.MAX_UPLOAD_FILE_SIZE / 1024 / 1024} MB)",
)
return file
@legacy_base_router.post(
"/upload_file",
tags=["server"],

View File

@@ -83,6 +83,8 @@ class Workflow(BaseModel):
cache_key: str | None = None
run_sequentially: bool | None = None
sequential_key: str | None = None
folder_id: str | None = None
import_error: str | None = None
created_at: datetime
modified_at: datetime

View File

@@ -1098,6 +1098,7 @@ class WorkflowService:
ai_fallback: bool | None = None,
run_sequentially: bool = False,
sequential_key: str | None = None,
folder_id: str | None = None,
) -> Workflow:
return await app.DATABASE.create_workflow(
title=title,
@@ -1121,6 +1122,7 @@ class WorkflowService:
ai_fallback=False if ai_fallback is None else ai_fallback,
run_sequentially=run_sequentially,
sequential_key=sequential_key,
folder_id=folder_id,
)
async def create_workflow_from_prompt(
@@ -1378,13 +1380,15 @@ class WorkflowService:
only_saved_tasks: bool = False,
only_workflows: bool = False,
search_key: str | None = None,
folder_id: str | None = None,
statuses: list[WorkflowStatus] | None = None,
) -> list[Workflow]:
"""
Get all workflows with the latest version for the organization.
Args:
search_key: Unified search term for title and parameter metadata (replaces title/parameter).
search_key: Unified search term for title, folder name, and parameter metadata.
folder_id: Filter workflows by folder ID.
"""
return await app.DATABASE.get_workflows_by_organization_id(
organization_id=organization_id,
@@ -1393,6 +1397,7 @@ class WorkflowService:
only_saved_tasks=only_saved_tasks,
only_workflows=only_workflows,
search_key=search_key,
folder_id=folder_id,
statuses=statuses,
)
@@ -2723,6 +2728,7 @@ class WorkflowService:
ai_fallback=request.ai_fallback,
run_sequentially=request.run_sequentially,
sequential_key=request.sequential_key,
folder_id=existing_latest_workflow.folder_id,
)
else:
# NOTE: it's only potential, as it may be immediately deleted!

View File

@@ -0,0 +1,41 @@
from datetime import datetime
from pydantic import BaseModel, Field
class FolderBase(BaseModel):
title: str = Field(..., description="Folder title", min_length=1, max_length=255)
description: str | None = Field(None, description="Folder description")
class FolderCreate(FolderBase):
"""Request model for creating a folder"""
class FolderUpdate(BaseModel):
"""Request model for updating a folder"""
title: str | None = Field(None, description="Folder title", min_length=1, max_length=255)
description: str | None = Field(None, description="Folder description")
class Folder(FolderBase):
"""Response model for a folder"""
folder_id: str
organization_id: str
workflow_count: int = Field(0, description="Number of workflows in this folder")
created_at: datetime
modified_at: datetime
class Config:
from_attributes = True
class UpdateWorkflowFolderRequest(BaseModel):
"""Request model for updating a workflow's folder assignment"""
folder_id: str | None = Field(
None,
description="Folder ID to assign workflow to. Set to null to remove from folder.",
)

View File

@@ -14,6 +14,8 @@ class WorkflowStatus(StrEnum):
published = "published"
draft = "draft"
auto_generated = "auto_generated"
importing = "importing"
import_failed = "import_failed"
class BlockType(StrEnum):

View File

@@ -4,11 +4,10 @@ import tempfile
from typing import Any
import structlog
from fastapi import HTTPException, UploadFile
from fastapi import HTTPException
from pypdf import PdfReader
from skyvern.config import settings
from skyvern.forge import app
from skyvern.forge.prompts import prompt_engine
from skyvern.forge.sdk.api.llm.api_handler_factory import LLMAPIHandlerFactory
from skyvern.forge.sdk.schemas.organizations import Organization
@@ -133,156 +132,129 @@ class PDFImportService:
return raw
async def import_workflow_from_pdf(self, file: UploadFile, organization: Organization) -> dict[str, Any]:
LOG.info("Starting PDF import", filename=file.filename, organization_id=organization.organization_id)
if not file.filename.lower().endswith(".pdf"):
raise HTTPException(status_code=400, detail="Only PDF files are supported.")
def extract_text_from_pdf(self, file_contents: bytes, file_name: str) -> str:
"""Extract text from PDF file contents. Raises HTTPException if invalid."""
LOG.info("Extracting text from PDF", filename=file_name)
# Save the uploaded file to a temporary location
LOG.info("Saving PDF to temporary file", filename=file.filename)
with tempfile.NamedTemporaryFile(delete=False, suffix=".pdf") as temp_file:
temp_file.write(await file.read())
temp_file.write(file_contents)
temp_file_path = temp_file.name
try:
# Extract text from PDF
LOG.info("Extracting text from PDF", filename=file.filename, temp_file=temp_file_path)
reader = PdfReader(temp_file_path)
sop_text = ""
for page_num, page in enumerate(reader.pages, 1):
page_text = page.extract_text()
page_text = page.extract_text() or ""
sop_text += page_text + "\n"
LOG.debug("Extracted text from page", page=page_num, text_length=len(page_text))
LOG.info(
"PDF text extraction complete",
total_text_length=len(sop_text),
organization_id=organization.organization_id,
)
LOG.info("PDF text extraction complete", total_text_length=len(sop_text))
if not sop_text.strip():
raise HTTPException(status_code=400, detail="No readable content found in the PDF.")
# Load and render the prompt template
prompt = prompt_engine.load_prompt(
"build-workflow-from-pdf",
sop_text=sop_text,
return sop_text
except Exception as e:
LOG.warning(
"Failed to read/extract text from PDF",
filename=file_name,
error=str(e),
)
# Use the LLM to convert SOP to workflow
llm_key = settings.LLM_KEY or "gpt-4o-mini"
LOG.info(
"Calling LLM to convert SOP to workflow",
llm_key=llm_key,
prompt_length=len(prompt),
sop_text_length=len(sop_text),
sop_chars_sent=len(sop_text),
organization_id=organization.organization_id,
)
llm_api_handler = LLMAPIHandlerFactory.get_llm_api_handler(llm_key)
response = await llm_api_handler(
prompt=prompt,
prompt_name="sop_to_workflow_conversion",
organization_id=organization.organization_id,
parameters={"max_completion_tokens": 32768}, # Override the default 4096 limit for PDF conversion
)
LOG.info(
"LLM response received",
response_type=type(response),
response_keys=list(response.keys()) if isinstance(response, dict) else None,
organization_id=organization.organization_id,
)
# The LLM API handler automatically parses JSON responses
# The response should be a dict with the workflow structure
if not isinstance(response, dict):
LOG.error(
"LLM returned non-dict response",
response_type=type(response),
response=str(response)[:500],
organization_id=organization.organization_id,
)
raise HTTPException(
status_code=422, detail="LLM returned invalid response format - expected JSON object"
)
# Validate that it has the required structure
if "workflow_definition" not in response:
LOG.error(
"LLM response missing workflow_definition",
response_keys=list(response.keys()),
organization_id=organization.organization_id,
)
raise HTTPException(status_code=422, detail="LLM response missing 'workflow_definition' field")
if "blocks" not in response.get("workflow_definition", {}):
LOG.error(
"LLM workflow_definition missing blocks",
workflow_def_keys=list(response.get("workflow_definition", {}).keys()),
organization_id=organization.organization_id,
)
raise HTTPException(status_code=422, detail="LLM workflow definition missing 'blocks' field")
LOG.info(
"Workflow JSON validated",
title=response.get("title"),
block_count=len(response.get("workflow_definition", {}).get("blocks", [])),
organization_id=organization.organization_id,
)
LOG.info(
"Creating workflow from JSON",
response_keys=list(response.keys()),
organization_id=organization.organization_id,
)
try:
# Sanitize LLM output for Jinja and required fields before validation
response = self._sanitize_workflow_json(response)
workflow_create_request = WorkflowCreateYAMLRequest.model_validate(response)
except Exception as e:
LOG.error(
"Failed to validate workflow request",
error=str(e),
error_type=type(e).__name__,
response_sample=str(response)[:1000],
organization_id=organization.organization_id,
exc_info=True,
)
raise HTTPException(status_code=422, detail=f"Failed to validate workflow structure: {str(e)}")
try:
workflow = await app.WORKFLOW_SERVICE.create_workflow_from_request(
organization=organization,
request=workflow_create_request,
)
except Exception as e:
LOG.error(
"Failed to create workflow",
error=str(e),
error_type=type(e).__name__,
organization_id=organization.organization_id,
exc_info=True,
)
raise HTTPException(status_code=422, detail=f"Failed to create workflow: {str(e)}")
workflow_dict = workflow.model_dump(by_alias=True)
LOG.info(
"PDF import completed successfully",
workflow_id=workflow.workflow_permanent_id,
workflow_permanent_id_in_dict=workflow_dict.get("workflow_permanent_id"),
dict_keys=list(workflow_dict.keys()),
organization_id=organization.organization_id,
)
return workflow_dict
raise HTTPException(status_code=400, detail="Invalid or unreadable PDF file.") from e
finally:
# Clean up the temporary file
os.unlink(temp_file_path)
async def create_workflow_from_sop_text(self, sop_text: str, organization: Organization) -> dict[str, Any]:
"""Convert SOP text to workflow definition using LLM (does not create the workflow)."""
# Load and render the prompt template
prompt = prompt_engine.load_prompt(
"build-workflow-from-pdf",
sop_text=sop_text,
)
# Use the LLM to convert SOP to workflow
llm_key = settings.LLM_KEY or "gpt-4o-mini"
LOG.info(
"Calling LLM to convert SOP to workflow",
llm_key=llm_key,
prompt_length=len(prompt),
sop_text_length=len(sop_text),
sop_chars_sent=len(sop_text),
organization_id=organization.organization_id,
)
llm_api_handler = LLMAPIHandlerFactory.get_llm_api_handler(llm_key)
response = await llm_api_handler(
prompt=prompt,
prompt_name="sop_to_workflow_conversion",
organization_id=organization.organization_id,
parameters={"max_completion_tokens": 32768}, # Override the default 4096 limit for PDF conversion
)
LOG.info(
"LLM response received",
response_type=type(response),
response_keys=list(response.keys()) if isinstance(response, dict) else None,
organization_id=organization.organization_id,
)
# The LLM API handler automatically parses JSON responses
# The response should be a dict with the workflow structure
if not isinstance(response, dict):
LOG.error(
"LLM returned non-dict response",
response_type=type(response),
response=str(response)[:500],
organization_id=organization.organization_id,
)
raise HTTPException(status_code=422, detail="LLM returned invalid response format - expected JSON object")
# Validate that it has the required structure
if "workflow_definition" not in response:
LOG.error(
"LLM response missing workflow_definition",
response_keys=list(response.keys()),
organization_id=organization.organization_id,
)
raise HTTPException(status_code=422, detail="LLM response missing 'workflow_definition' field")
if "blocks" not in response.get("workflow_definition", {}):
LOG.error(
"LLM workflow_definition missing blocks",
workflow_def_keys=list(response.get("workflow_definition", {}).keys()),
organization_id=organization.organization_id,
)
raise HTTPException(status_code=422, detail="LLM workflow definition missing 'blocks' field")
try:
# Sanitize LLM output for Jinja and required fields before validation
response = self._sanitize_workflow_json(response)
workflow_create_request = WorkflowCreateYAMLRequest.model_validate(response)
LOG.info(
"Workflow JSON validated successfully",
title=response.get("title"),
block_count=len(response.get("workflow_definition", {}).get("blocks", [])),
organization_id=organization.organization_id,
)
except Exception as e:
LOG.error(
"Failed to validate workflow request",
error=str(e),
error_type=type(e).__name__,
organization_id=organization.organization_id,
exc_info=True,
)
raise HTTPException(
status_code=422,
detail=f"Failed to validate workflow structure: {e!s}",
) from e
# Return the validated request as a dict (caller will create the workflow)
return workflow_create_request.model_dump(by_alias=True)
pdf_import_service = PDFImportService()