support select llm for file parser (#3291)
This commit is contained in:
@@ -109,6 +109,21 @@ class Block(BaseModel, abc.ABC):
|
||||
continue_on_failure: bool = False
|
||||
model: dict[str, Any] | None = None
|
||||
|
||||
@property
|
||||
def override_llm_key(self) -> str | None:
|
||||
"""
|
||||
If the `Block` has a `model` defined, then return the mapped llm_key for it.
|
||||
|
||||
Otherwise return `None`.
|
||||
"""
|
||||
if self.model:
|
||||
model_name = self.model.get("model_name")
|
||||
if model_name:
|
||||
mapping = settings.get_model_name_to_llm_key()
|
||||
return mapping.get(model_name, {}).get("llm_key")
|
||||
|
||||
return None
|
||||
|
||||
async def record_output_parameter_value(
|
||||
self,
|
||||
workflow_run_context: WorkflowRunContext,
|
||||
@@ -2564,7 +2579,12 @@ class FileParserBlock(Block):
|
||||
llm_prompt = prompt_engine.load_prompt(
|
||||
"extract-information-from-file-text", extracted_text_content=content_str, json_schema=schema_to_use
|
||||
)
|
||||
llm_response = await app.LLM_API_HANDLER(prompt=llm_prompt, prompt_name="extract-information-from-file-text")
|
||||
|
||||
llm_api_handler = LLMAPIHandlerFactory.get_override_llm_api_handler(
|
||||
self.override_llm_key, default=app.LLM_API_HANDLER
|
||||
)
|
||||
|
||||
llm_response = await llm_api_handler(prompt=llm_prompt, prompt_name="extract-information-from-file-text")
|
||||
return llm_response
|
||||
|
||||
async def execute(
|
||||
|
||||
@@ -1982,6 +1982,7 @@ class WorkflowService:
|
||||
file_type=block_yaml.file_type,
|
||||
json_schema=block_yaml.json_schema,
|
||||
continue_on_failure=block_yaml.continue_on_failure,
|
||||
model=block_yaml.model,
|
||||
)
|
||||
elif block_yaml.block_type == BlockType.PDF_PARSER:
|
||||
return PDFParserBlock(
|
||||
|
||||
Reference in New Issue
Block a user