Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "uipath-langchain"
version = "0.5.70"
version = "0.5.71"
description = "Python SDK that enables developers to build and deploy LangGraph agents to the UiPath Cloud Platform"
readme = { file = "README.md", content-type = "text/markdown" }
requires-python = ">=3.11"
Expand Down
7 changes: 7 additions & 0 deletions src/uipath_langchain/agent/react/init_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

from .job_attachments import (
get_job_attachments,
parse_attachments_from_conversation_messages,
)
from .types import AgentSettings

Expand Down Expand Up @@ -43,6 +44,12 @@ def graph_state_init(state: Any) -> Any:
job_attachments_dict = {
str(att.id): att for att in job_attachments if att.id is not None
}
# Merge attachments from preserved messages for conversational agents
if is_conversational:
message_attachments = parse_attachments_from_conversation_messages(
preserved_messages
)
job_attachments_dict.update(message_attachments)

return {
"messages": resolved_messages,
Expand Down
44 changes: 43 additions & 1 deletion src/uipath_langchain/agent/react/job_attachments.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,10 @@

import copy
import uuid
from typing import Any
from typing import Any, Sequence

from jsonpath_ng import parse # type: ignore[import-untyped]
from langchain_core.messages import BaseMessage, HumanMessage
from pydantic import BaseModel
from uipath.platform.attachments import Attachment

Expand Down Expand Up @@ -125,3 +126,44 @@ def _create_job_attachment_error_message(attachment_id_str: str) -> str:
f"Try invoking the tool again and please make sure that you pass "
f"valid JobAttachment IDs associated with existing JobAttachments in the current context."
)


def parse_attachments_from_conversation_messages(
messages: Sequence[BaseMessage],
) -> dict[str, Attachment]:
"""Parse attachments from HumanMessage additional_kwargs.

Extracts attachment information from HumanMessages where additional_kwargs
contains an 'attachments' list with attachment details.

Args:
messages: Sequence of messages to parse

Returns:
Dictionary mapping attachment ID to Attachment objects
"""
attachments: dict[str, Attachment] = {}

for message in messages:
if not isinstance(message, HumanMessage):
continue

kwargs = getattr(message, "additional_kwargs", None)
if not kwargs:
continue

# Handle attachments list in additional_kwargs
attachment_list = kwargs.get("attachments", [])
for att in attachment_list:
id = att.get("id")
full_name = att.get("full_name")
mime_type = att.get("mime_type")

if id and full_name:
attachments[str(id)] = Attachment(
id=id,
full_name=full_name,
mime_type=mime_type,
)

return attachments
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,10 @@ def create_analyze_file_tool(
input_model = create_model(resource.input_schema)
output_model = create_model(resource.output_schema)

# Disable streaming so for conversational loops, the internal LLM call doesn't leak
# AIMessageChunk events into the graph stream.
non_streaming_llm = llm.model_copy(update={"disable_streaming": True})

@mockable(
name=resource.name,
description=resource.description,
Expand Down Expand Up @@ -81,7 +85,7 @@ async def tool_fn(**kwargs: Any):
cast(AnyMessage, human_message_with_files),
]
config = var_child_runnable_config.get(None)
result = await llm.ainvoke(messages, config=config)
result = await non_streaming_llm.ainvoke(messages, config=config)

analysis_result = extract_text_content(result)
return analysis_result
Expand Down
60 changes: 57 additions & 3 deletions src/uipath_langchain/runtime/messages.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import asyncio
import json
import logging
import uuid
from datetime import datetime, timezone
from typing import Any, cast

Expand Down Expand Up @@ -28,6 +29,7 @@
UiPathConversationToolCallEndEvent,
UiPathConversationToolCallEvent,
UiPathConversationToolCallStartEvent,
UiPathExternalValue,
UiPathInlineValue,
)
from uipath.runtime import UiPathRuntimeStorageProtocol
Expand Down Expand Up @@ -90,7 +92,6 @@ def map_messages(self, messages: list[Any]) -> list[Any]:
return self._map_messages_internal(
cast(list[UiPathConversationMessage], messages)
)

# Case3: List[dict] -> parse to List[UiPathConversationMessage]
if isinstance(first, dict):
try:
Expand Down Expand Up @@ -118,9 +119,9 @@ def _map_messages_internal(

for uipath_message in messages:
content_blocks: list[ContentBlock] = []
attachments: list[dict[str, Any]] = []

# Convert content_parts to content_blocks
# TODO: Convert file-attachment content-parts to content_blocks as well
if uipath_message.content_parts:
for uipath_content_part in uipath_message.content_parts:
data = uipath_content_part.data
Expand All @@ -134,13 +135,36 @@ def _map_messages_internal(
text, id=uipath_content_part.content_part_id
)
)
elif isinstance(data, UiPathExternalValue):
attachment_id = self.parse_attachment_id_from_content_part_uri(
data.uri
)
full_name = uipath_content_part.name
if attachment_id and full_name:
attachments.append(
{
"id": attachment_id,
"full_name": full_name,
"mime_type": uipath_content_part.mime_type,
}
)

# Add attachment references as a text block for LLM visibility
if attachments:
content_blocks.append(
create_text_block(
f"<uip:attachments>{json.dumps(attachments)}</uip:attachments>"
Copy link
Contributor

@maxduu maxduu Feb 18, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

When the text content_blocks are all combined, any new-lines here should be added? E.g. \n<uip:attachments>...? Just checking it doesn't get put at the end of the human message without any whitespace between.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The content blocks are still separated in their respective JSON text blocks (handled individually and differently than AIMessage content extraction) e.g. something like

HumanMessage(
      id="msg-123",
      content_blocks=[
          # 1. User inputted text
          {"type": "text", "text": "Please summarize these documents", "id": "cp-1"},
          # 2. Attachment references (all combined into one JSON text block)
          {"type": "text", "text": '<uip:attachments>[{"id": "a940a416-...", "full_name": "report.pdf", "mime_type":
  "application/pdf"}, {"id": "b123c456-...", "full_name": "data.csv", "mime_type": "text/csv"}]</uip:attachments>'}
      ],
      additional_kwargs={
          "message_id": "msg-123",
          "created_at": "2025-01-04T10:30:00.123Z",
          "updated_at": "2025-01-04T10:30:00.123Z",
          "attachments": [
              {"id": "a940a416-...", "full_name": "report.pdf", "mime_type": "application/pdf"},
              {"id": "b123c456-...", "full_name": "data.csv", "mime_type": "text/csv"},
          ]
      }
  )
image

)
)

# Metadata for the user/assistant message
metadata = {
metadata: dict[str, Any] = {
"message_id": uipath_message.message_id,
"created_at": uipath_message.created_at,
"updated_at": uipath_message.updated_at,
}
if attachments:
metadata["attachments"] = attachments

role = uipath_message.role
if role == "user":
Expand Down Expand Up @@ -244,6 +268,36 @@ def get_timestamp(self):
def get_content_part_id(self, message_id: str) -> str:
return f"chunk-{message_id}-0"

def parse_attachment_id_from_content_part_uri(self, uri: str) -> str | None:
"""Parse attachment ID from a URI.

Extracts the UUID from URIs like:
"urn:uipath:cas:file:orchestrator:a940a416-b97b-4146-3089-08de5f4d0a87"

Args:
uri: The URI to parse

Returns:
The attachment ID if found, None otherwise
"""
if not uri:
return None

# The UUID is the last segment after the final colon
parts = uri.rsplit(":", 1)
if len(parts) != 2:
return None

potential_uuid = parts[1]
if not potential_uuid:
return None

# Validate it's a proper UUID and normalize to lowercase
try:
return str(uuid.UUID(potential_uuid))
except (ValueError, AttributeError):
return None

async def map_ai_message_chunk_to_events(
self, message: AIMessageChunk
) -> list[UiPathConversationMessageEvent]:
Expand Down
32 changes: 32 additions & 0 deletions tests/agent/react/test_init_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,3 +214,35 @@ def test_inner_state_present_in_conversational_mode(self):

assert "inner_state" in result
assert "job_attachments" in result["inner_state"]

def test_conversational_merges_attachments_from_preserved_messages(self):
"""Conversational mode should merge attachments from preserved message metadata."""
attachment_id = "a940a416-b97b-4146-3089-08de5f4d0a87"
old_system_message = SystemMessage(content="Old system")
preserved_human_message = HumanMessage(
content="File here",
additional_kwargs={
"attachments": [
{
"id": attachment_id,
"full_name": "document.pdf",
"mime_type": "application/pdf",
}
],
},
)
state = MockState(messages=[old_system_message, preserved_human_message])

new_messages: list[SystemMessage | HumanMessage] = [
SystemMessage(content="New system"),
]
init_node = create_init_node(
new_messages, input_schema=None, is_conversational=True
)

result = init_node(state)

job_attachments = result["inner_state"]["job_attachments"]
assert attachment_id in job_attachments
assert job_attachments[attachment_id].full_name == "document.pdf"
assert job_attachments[attachment_id].mime_type == "application/pdf"
Loading