Save LLM prompt and response artifacts in TextPromptBlock (#4361)

This commit is contained in:
pedrohsdb
2025-12-23 09:48:36 -08:00
committed by GitHub
parent 9645960016
commit b81865b0b5
2 changed files with 69 additions and 1 deletions

View File

@@ -305,6 +305,45 @@ class ArtifactManager:
path=path,
)
async def create_workflow_run_block_artifacts(
self,
workflow_run_block: WorkflowRunBlock,
artifacts: list[tuple[ArtifactType, bytes]],
) -> list[str]:
"""
Bulk-create artifacts for a workflow run block in a single DB round-trip.
"""
if not artifacts:
return []
artifact_batch: list[ArtifactBatchData] = []
for artifact_type, data in artifacts:
artifact_id = generate_artifact_id()
uri = app.STORAGE.build_workflow_run_block_uri(
organization_id=workflow_run_block.organization_id,
artifact_id=artifact_id,
workflow_run_block=workflow_run_block,
artifact_type=artifact_type,
)
artifact_batch.append(
ArtifactBatchData(
artifact_model=self._build_artifact_model(
artifact_id=artifact_id,
artifact_type=artifact_type,
uri=uri,
organization_id=workflow_run_block.organization_id,
workflow_run_block_id=workflow_run_block.workflow_run_block_id,
workflow_run_id=workflow_run_block.workflow_run_id,
),
data=data,
)
)
request = BulkArtifactCreationRequest(
artifacts=artifact_batch, primary_key=workflow_run_block.workflow_run_block_id
)
return await self._bulk_create_artifacts(request)
async def create_ai_suggestion_artifact(
self,
ai_suggestion: AISuggestion,

View File

@@ -2078,6 +2078,7 @@ class TextPromptBlock(Block):
parameter_values: dict[str, Any],
workflow_run_id: str,
organization_id: str | None = None,
workflow_run_block_id: str | None = None,
) -> dict[str, Any]:
default_llm_handler = await self._resolve_default_llm_handler(workflow_run_id, organization_id)
llm_api_handler = LLMAPIHandlerFactory.get_override_llm_api_handler(
@@ -2102,12 +2103,34 @@ class TextPromptBlock(Block):
+ json.dumps(self.json_schema, indent=2)
+ "\n```\n\n"
)
workflow_run_block = None
artifacts_to_persist: list[tuple[ArtifactType, bytes]] = []
if workflow_run_block_id:
try:
workflow_run_block = await app.DATABASE.get_workflow_run_block(workflow_run_block_id, organization_id)
if workflow_run_block:
artifacts_to_persist.append((ArtifactType.LLM_PROMPT, prompt.encode("utf-8")))
except Exception as e:
LOG.error("Failed to fetch workflow_run_block for TextPromptBlock artifacts", error=e)
LOG.info(
"TextPromptBlock: Sending prompt to LLM",
prompt=prompt,
llm_key=self.llm_key,
)
response = await llm_api_handler(prompt=prompt, prompt_name="text-prompt")
if workflow_run_block:
artifacts_to_persist.append((ArtifactType.LLM_RESPONSE, json.dumps(response).encode("utf-8")))
try:
await app.ARTIFACT_MANAGER.create_workflow_run_block_artifacts(
workflow_run_block=workflow_run_block,
artifacts=artifacts_to_persist,
)
except Exception as e:
LOG.error("Failed to save TextPromptBlock artifacts", error=e)
LOG.info("TextPromptBlock: Received response from LLM", response=response)
return response
@@ -2170,7 +2193,13 @@ class TextPromptBlock(Block):
else:
parameter_values[parameter.key] = value
response = await self.send_prompt(self.prompt, parameter_values, workflow_run_id, organization_id)
response = await self.send_prompt(
self.prompt,
parameter_values,
workflow_run_id,
organization_id,
workflow_run_block_id=workflow_run_block_id,
)
await self.record_output_parameter_value(workflow_run_context, workflow_run_id, response)
return await self.build_block_result(
success=True,