Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 10 additions & 10 deletions sdk/ai/azure-ai-projects/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,7 @@ asset_file_path = os.path.abspath(

# Upload the CSV file for the code interpreter
file = openai_client.files.create(purpose="assistants", file=open(asset_file_path, "rb"))
tool = CodeInterpreterTool(container=CodeInterpreterToolAuto(file_ids=[file.id]))
tool = CodeInterpreterTool(container=CodeInterpreterContainerAuto(file_ids=[file.id]))
```

<!-- END SNIPPET -->
Expand Down Expand Up @@ -348,7 +348,7 @@ Call external APIs defined by OpenAPI specifications without additional client-s
with open(weather_asset_file_path, "r") as f:
openapi_weather = jsonref.loads(f.read())

tool = OpenApiAgentTool(
tool = OpenApiTool(
openapi=OpenApiFunctionDefinition(
name="get_weather",
spec=openapi_weather,
Expand Down Expand Up @@ -427,7 +427,7 @@ Integrate with Azure AI Search indexes for powerful knowledge retrieval and sema
<!-- SNIPPET:sample_agent_ai_search.tool_declaration -->

```python
tool = AzureAISearchAgentTool(
tool = AzureAISearchTool(
azure_ai_search=AzureAISearchToolResource(
indexes=[
AISearchIndexResource(
Expand All @@ -451,7 +451,7 @@ Ground agent responses with real-time web search results from Bing to provide up
<!-- SNIPPET:sample_agent_bing_grounding.tool_declaration -->

```python
tool = BingGroundingAgentTool(
tool = BingGroundingTool(
bing_grounding=BingGroundingSearchToolParameters(
search_configurations=[
BingGroundingSearchConfiguration(project_connection_id=os.environ["BING_PROJECT_CONNECTION_ID"])
Expand All @@ -471,7 +471,7 @@ Use custom-configured Bing search instances for domain-specific or filtered web
<!-- SNIPPET:sample_agent_bing_custom_search.tool_declaration -->

```python
tool = BingCustomSearchAgentTool(
tool = BingCustomSearchPreviewTool(
bing_custom_search_preview=BingCustomSearchToolParameters(
search_configurations=[
BingCustomSearchConfiguration(
Expand All @@ -494,7 +494,7 @@ Connect to and query Microsoft Fabric:
<!-- SNIPPET:sample_agent_fabric.tool_declaration -->

```python
tool = MicrosoftFabricAgentTool(
tool = MicrosoftFabricPreviewTool(
fabric_dataagent_preview=FabricDataAgentToolParameters(
project_connections=[
ToolProjectConnection(project_connection_id=os.environ["FABRIC_PROJECT_CONNECTION_ID"])
Expand All @@ -514,7 +514,7 @@ Access and search SharePoint documents, lists, and sites for enterprise knowledg
<!-- SNIPPET:sample_agent_sharepoint.tool_declaration -->

```python
tool = SharepointAgentTool(
tool = SharepointPreviewTool(
sharepoint_grounding_preview=SharepointGroundingToolParameters(
project_connections=[
ToolProjectConnection(project_connection_id=os.environ["SHAREPOINT_PROJECT_CONNECTION_ID"])
Expand All @@ -534,7 +534,7 @@ Automate browser interactions for web scraping, testing, and interaction with we
<!-- SNIPPET:sample_agent_browser_automation.tool_declaration -->

```python
tool = BrowserAutomationAgentTool(
tool = BrowserAutomationPreviewTool(
browser_automation_preview=BrowserAutomationToolParameters(
connection=BrowserAutomationToolConnectionParameters(
project_connection_id=os.environ["BROWSER_AUTOMATION_PROJECT_CONNECTION_ID"],
Expand Down Expand Up @@ -574,7 +574,7 @@ Enable multi-agent collaboration where agents can communicate and delegate tasks
<!-- SNIPPET:sample_agent_to_agent.tool_declaration -->

```python
tool = A2ATool(
tool = A2APreviewTool(
project_connection_id=os.environ["A2A_PROJECT_CONNECTION_ID"],
)
# If the connection is missing target, we need to set the A2A endpoint URL.
Expand All @@ -596,7 +596,7 @@ Call external APIs defined by OpenAPI specifications using project connection au
with open(tripadvisor_asset_file_path, "r") as f:
openapi_tripadvisor = jsonref.loads(f.read())

tool = OpenApiAgentTool(
tool = OpenApiTool(
openapi=OpenApiFunctionDefinition(
name="tripadvisor",
spec=openapi_tripadvisor,
Expand Down
352 changes: 226 additions & 126 deletions sdk/ai/azure-ai-projects/apiview-properties.json

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions sdk/ai/azure-ai-projects/azure/ai/projects/_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,5 @@

if TYPE_CHECKING:
from . import models as _models
ComparisonFilterValueItems = Union[str, float]
Filters = Union["_models.ComparisonFilter", "_models.CompoundFilter"]
112 changes: 109 additions & 3 deletions sdk/ai/azure-ai-projects/azure/ai/projects/_utils/model_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,21 @@ def default(self, o): # pylint: disable=too-many-return-statements
r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT"
)

_ARRAY_ENCODE_MAPPING = {
"pipeDelimited": "|",
"spaceDelimited": " ",
"commaDelimited": ",",
"newlineDelimited": "\n",
}


def _deserialize_array_encoded(delimit: str, attr):
if isinstance(attr, str):
if attr == "":
return []
return attr.split(delimit)
return attr


def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime:
"""Deserialize ISO-8601 formatted string into Datetime object.
Expand Down Expand Up @@ -315,6 +330,8 @@ def _deserialize_int_as_str(attr):
def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None):
if annotation is int and rf and rf._format == "str":
return _deserialize_int_as_str
if annotation is str and rf and rf._format in _ARRAY_ENCODE_MAPPING:
return functools.partial(_deserialize_array_encoded, _ARRAY_ENCODE_MAPPING[rf._format])
if rf and rf._format:
return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format)
return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore
Expand Down Expand Up @@ -353,9 +370,39 @@ def __contains__(self, key: typing.Any) -> bool:
return key in self._data

def __getitem__(self, key: str) -> typing.Any:
# If this key has been deserialized (for mutable types), we need to handle serialization
if hasattr(self, "_attr_to_rest_field"):
cache_attr = f"_deserialized_{key}"
if hasattr(self, cache_attr):
rf = _get_rest_field(getattr(self, "_attr_to_rest_field"), key)
if rf:
value = self._data.get(key)
if isinstance(value, (dict, list, set)):
# For mutable types, serialize and return
# But also update _data with serialized form and clear flag
# so mutations via this returned value affect _data
serialized = _serialize(value, rf._format)
# If serialized form is same type (no transformation needed),
# return _data directly so mutations work
if isinstance(serialized, type(value)) and serialized == value:
return self._data.get(key)
# Otherwise return serialized copy and clear flag
try:
object.__delattr__(self, cache_attr)
except AttributeError:
pass
# Store serialized form back
self._data[key] = serialized
return serialized
return self._data.__getitem__(key)

def __setitem__(self, key: str, value: typing.Any) -> None:
# Clear any cached deserialized value when setting through dictionary access
cache_attr = f"_deserialized_{key}"
try:
object.__delattr__(self, cache_attr)
except AttributeError:
pass
self._data.__setitem__(key, value)

def __delitem__(self, key: str) -> None:
Expand Down Expand Up @@ -483,6 +530,8 @@ def _is_model(obj: typing.Any) -> bool:

def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements
if isinstance(o, list):
if format in _ARRAY_ENCODE_MAPPING and all(isinstance(x, str) for x in o):
return _ARRAY_ENCODE_MAPPING[format].join(o)
return [_serialize(x, format) for x in o]
if isinstance(o, dict):
return {k: _serialize(v, format) for k, v in o.items()}
Expand Down Expand Up @@ -638,6 +687,10 @@ def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self:
if not rf._rest_name_input:
rf._rest_name_input = attr
cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items())
cls._backcompat_attr_to_rest_field: dict[str, _RestField] = {
Model._get_backcompat_attribute_name(cls._attr_to_rest_field, attr): rf
for attr, rf in cls._attr_to_rest_field.items()
}
cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}")

return super().__new__(cls)
Expand All @@ -647,6 +700,16 @@ def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None:
if hasattr(base, "__mapping__"):
base.__mapping__[discriminator or cls.__name__] = cls # type: ignore

@classmethod
def _get_backcompat_attribute_name(cls, attr_to_rest_field: dict[str, "_RestField"], attr_name: str) -> str:
rest_field_obj = attr_to_rest_field.get(attr_name) # pylint: disable=protected-access
if rest_field_obj is None:
return attr_name
original_tsp_name = getattr(rest_field_obj, "_original_tsp_name", None) # pylint: disable=protected-access
if original_tsp_name:
return original_tsp_name
return attr_name

@classmethod
def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]:
for v in cls.__dict__.values():
Expand Down Expand Up @@ -767,6 +830,17 @@ def _deserialize_sequence(
return obj
if isinstance(obj, ET.Element):
obj = list(obj)
try:
if (
isinstance(obj, str)
and isinstance(deserializer, functools.partial)
and isinstance(deserializer.args[0], functools.partial)
and deserializer.args[0].func == _deserialize_array_encoded # pylint: disable=comparison-with-callable
):
# encoded string may be deserialized to sequence
return deserializer(obj)
except: # pylint: disable=bare-except
Copy link

Copilot AI Jan 13, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The use of a bare except clause is a security and maintainability concern. While pylint is disabled for this line, bare except clauses can catch system exceptions like SystemExit and KeyboardInterrupt, making it difficult to terminate the program. Consider catching specific exceptions instead, such as (TypeError, AttributeError, IndexError) which are likely the exceptions that could be raised when checking the deserializer structure.

Suggested change
except: # pylint: disable=bare-except
except (TypeError, AttributeError, IndexError):

Copilot uses AI. Check for mistakes.
pass
return type(obj)(_deserialize(deserializer, entry, module) for entry in obj)


Expand Down Expand Up @@ -972,6 +1046,7 @@ def _failsafe_deserialize_xml(
return None


# pylint: disable=too-many-instance-attributes
class _RestField:
def __init__(
self,
Expand All @@ -984,6 +1059,7 @@ def __init__(
format: typing.Optional[str] = None,
is_multipart_file_input: bool = False,
xml: typing.Optional[dict[str, typing.Any]] = None,
original_tsp_name: typing.Optional[str] = None,
):
self._type = type
self._rest_name_input = name
Expand All @@ -995,10 +1071,15 @@ def __init__(
self._format = format
self._is_multipart_file_input = is_multipart_file_input
self._xml = xml if xml is not None else {}
self._original_tsp_name = original_tsp_name

@property
def _class_type(self) -> typing.Any:
return getattr(self._type, "args", [None])[0]
result = getattr(self._type, "args", [None])[0]
# type may be wrapped by nested functools.partial so we need to check for that
if isinstance(result, functools.partial):
return getattr(result, "args", [None])[0]
return result

@property
def _rest_name(self) -> str:
Expand All @@ -1009,14 +1090,37 @@ def _rest_name(self) -> str:
def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin
# by this point, type and rest_name will have a value bc we default
# them in __new__ of the Model class
item = obj.get(self._rest_name)
# Use _data.get() directly to avoid triggering __getitem__ which clears the cache
item = obj._data.get(self._rest_name)
if item is None:
return item
if self._is_model:
return item
return _deserialize(self._type, _serialize(item, self._format), rf=self)

# For mutable types, we want mutations to directly affect _data
# Check if we've already deserialized this value
cache_attr = f"_deserialized_{self._rest_name}"
if hasattr(obj, cache_attr):
# Return the value from _data directly (it's been deserialized in place)
return obj._data.get(self._rest_name)

deserialized = _deserialize(self._type, _serialize(item, self._format), rf=self)

# For mutable types, store the deserialized value back in _data
# so mutations directly affect _data
if isinstance(deserialized, (dict, list, set)):
obj._data[self._rest_name] = deserialized
object.__setattr__(obj, cache_attr, True) # Mark as deserialized
return deserialized

return deserialized

def __set__(self, obj: Model, value) -> None:
# Clear the cached deserialized object when setting a new value
cache_attr = f"_deserialized_{self._rest_name}"
if hasattr(obj, cache_attr):
object.__delattr__(obj, cache_attr)

if value is None:
# we want to wipe out entries if users set attr to None
try:
Expand Down Expand Up @@ -1046,6 +1150,7 @@ def rest_field(
format: typing.Optional[str] = None,
is_multipart_file_input: bool = False,
xml: typing.Optional[dict[str, typing.Any]] = None,
original_tsp_name: typing.Optional[str] = None,
) -> typing.Any:
return _RestField(
name=name,
Expand All @@ -1055,6 +1160,7 @@ def rest_field(
format=format,
is_multipart_file_input=is_multipart_file_input,
xml=xml,
original_tsp_name=original_tsp_name,
)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -821,13 +821,20 @@ def serialize_basic(cls, data, data_type, **kwargs):
:param str data_type: Type of object in the iterable.
:rtype: str, int, float, bool
:return: serialized object
:raises TypeError: raise if data_type is not one of str, int, float, bool.
"""
custom_serializer = cls._get_custom_serializers(data_type, **kwargs)
if custom_serializer:
return custom_serializer(data)
if data_type == "str":
return cls.serialize_unicode(data)
return eval(data_type)(data) # nosec # pylint: disable=eval-used
if data_type == "int":
return int(data)
if data_type == "float":
return float(data)
if data_type == "bool":
return bool(data)
raise TypeError("Unknown basic data type: {}".format(data_type))

@classmethod
def serialize_unicode(cls, data):
Expand Down Expand Up @@ -1757,7 +1764,7 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return
:param str data_type: deserialization data type.
:return: Deserialized basic type.
:rtype: str, int, float or bool
:raises TypeError: if string format is not valid.
:raises TypeError: if string format is not valid or data_type is not one of str, int, float, bool.
"""
# If we're here, data is supposed to be a basic type.
# If it's still an XML node, take the text
Expand All @@ -1783,7 +1790,11 @@ def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return

if data_type == "str":
return self.deserialize_unicode(attr)
return eval(data_type)(attr) # nosec # pylint: disable=eval-used
if data_type == "int":
return int(attr)
if data_type == "float":
return float(attr)
raise TypeError("Unknown basic data type: {}".format(data_type))

@staticmethod
def deserialize_unicode(data):
Expand Down
Loading
Loading