diff --git a/sdk/farmbeats/azure-farmbeats/README.md b/sdk/farmbeats/azure-farmbeats/README.md
new file mode 100644
index 000000000000..fb8e7f5a6973
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/README.md
@@ -0,0 +1,78 @@
+# Azure Farnbeats API client library for Python
+
+TODO
+
+## Getting started
+
+### Key concepts
+
+### Prerequisites
+
+* Azure subscription
+* Azure Farmbeats instance
+* Python 2.7 or 3.5.3+ TODO figure out supported versions
+
+If you need a Cosmos DB SQL API account, you can follow this link TODO
+
+### Install the package
+
+```bash
+pip install azure-farmbeats
+```
+
+#### Configure a virtual environment (optional)
+
+Although not required, you can keep your base system and Azure SDK environments isolated from one another if you use a virtual environment. Execute the following commands to configure and then enter a virtual environment with venv:
+
+```Bash
+python3 -m venv farmbeats-test-env
+source farmbeats-test-env/bin/activate
+```
+### Authenticate the client
+
+TODO
+
+### Create the client
+
+TODO
+
+## Examples
+
+Here are some common use case scenarios:
+
+TODO
+
+## Troubleshooting
+
+TODO
+
+
+## Next steps
+
+TODO
+
+
+[azure_cli]: https://docs.microsoft.com/cli/azure
+
+## Contributing
+
+This project welcomes contributions and suggestions.
+Most contributions require you to agree to a
+Contributor License Agreement (CLA) declaring that
+you have the right to, and actually do, grant us
+the rights to use your contribution.
+
+For details, visit https://cla.microsoft.com.
+
+When you submit a pull request, a CLA-bot will
+automatically determine whether you need to provide
+a CLA and decorate the PR appropriately (e.g., label, comment).
+Simply follow the instructions provided by the bot.
+You will only need to do this once across all repos using our CLA.
+
+This project has adopted the
+[Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
+For more information see the
+[Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
+contact [opencode@microsoft.com](mailto:opencode@microsoft.com)
+with any additional questions or comments.
diff --git a/sdk/farmbeats/azure-farmbeats/azure/__init__.py b/sdk/farmbeats/azure-farmbeats/azure/__init__.py
new file mode 100644
index 000000000000..5960c353a898
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/__init__.py
@@ -0,0 +1 @@
+__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore
\ No newline at end of file
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/__init__.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/__init__.py
new file mode 100644
index 000000000000..362891f7b69f
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/__init__.py
@@ -0,0 +1,17 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._farm_beats_client import FarmBeatsClient
+from ._version import VERSION
+
+__version__ = VERSION
+__all__ = ['FarmBeatsClient']
+
+try:
+ from ._patch import patch_sdk # type: ignore
+ patch_sdk()
+except ImportError:
+ pass
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/_configuration.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/_configuration.py
new file mode 100644
index 000000000000..3657c809364a
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/_configuration.py
@@ -0,0 +1,62 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import TYPE_CHECKING
+
+from azure.core.configuration import Configuration
+from azure.core.pipeline import policies
+
+from ._version import VERSION
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any
+
+ from azure.core.credentials import TokenCredential
+
+
+class FarmBeatsClientConfiguration(Configuration):
+ """Configuration for FarmBeatsClient.
+
+ Note that all parameters used to create this instance are saved as instance
+ attributes.
+
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials.TokenCredential
+ """
+
+ def __init__(
+ self,
+ credential, # type: "TokenCredential"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ if credential is None:
+ raise ValueError("Parameter 'credential' must not be None.")
+ super(FarmBeatsClientConfiguration, self).__init__(**kwargs)
+
+ self.credential = credential
+ self.api_version = "2021-03-31-preview"
+ self.credential_scopes = kwargs.pop('credential_scopes', ['https://farmbeats-dogfood.azure.net/.default'])
+ kwargs.setdefault('sdk_moniker', 'farmbeatsclient/{}'.format(VERSION))
+ self._configure(**kwargs)
+
+ def _configure(
+ self,
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
+ self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
+ self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
+ self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
+ self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs)
+ self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
+ self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
+ self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
+ self.authentication_policy = kwargs.get('authentication_policy')
+ if self.credential and not self.authentication_policy:
+ self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/_farm_beats_client.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/_farm_beats_client.py
new file mode 100644
index 000000000000..1b7e87f40117
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/_farm_beats_client.py
@@ -0,0 +1,169 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import TYPE_CHECKING
+
+from azure.core import PipelineClient
+from msrest import Deserializer, Serializer
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any
+
+ from azure.core.credentials import TokenCredential
+ from azure.core.pipeline.transport import HttpRequest, HttpResponse
+
+from ._configuration import FarmBeatsClientConfiguration
+from .operations import ApplicationDataOperations
+from .operations import AttachmentsOperations
+from .operations import BoundariesOperations
+from .operations import CropsOperations
+from .operations import CropVarietiesOperations
+from .operations import FarmersOperations
+from .operations import FarmOperationsOperations
+from .operations import FarmsOperations
+from .operations import FieldsOperations
+from .operations import HarvestDataOperations
+from .operations import ImageProcessingOperations
+from .operations import OAuthProvidersOperations
+from .operations import OAuthTokensOperations
+from .operations import PlantingDataOperations
+from .operations import ScenesOperations
+from .operations import SeasonalFieldsOperations
+from .operations import SeasonsOperations
+from .operations import TillageDataOperations
+from .operations import WeatherOperations
+from . import models
+
+
+class FarmBeatsClient(object):
+ """APIs documentation for Azure AgPlatform DataPlane Service.
+
+ :ivar application_data: ApplicationDataOperations operations
+ :vartype application_data: azure.farmbeats.operations.ApplicationDataOperations
+ :ivar attachments: AttachmentsOperations operations
+ :vartype attachments: azure.farmbeats.operations.AttachmentsOperations
+ :ivar boundaries: BoundariesOperations operations
+ :vartype boundaries: azure.farmbeats.operations.BoundariesOperations
+ :ivar crops: CropsOperations operations
+ :vartype crops: azure.farmbeats.operations.CropsOperations
+ :ivar crop_varieties: CropVarietiesOperations operations
+ :vartype crop_varieties: azure.farmbeats.operations.CropVarietiesOperations
+ :ivar farmers: FarmersOperations operations
+ :vartype farmers: azure.farmbeats.operations.FarmersOperations
+ :ivar farm_operations: FarmOperationsOperations operations
+ :vartype farm_operations: azure.farmbeats.operations.FarmOperationsOperations
+ :ivar farms: FarmsOperations operations
+ :vartype farms: azure.farmbeats.operations.FarmsOperations
+ :ivar fields: FieldsOperations operations
+ :vartype fields: azure.farmbeats.operations.FieldsOperations
+ :ivar harvest_data: HarvestDataOperations operations
+ :vartype harvest_data: azure.farmbeats.operations.HarvestDataOperations
+ :ivar image_processing: ImageProcessingOperations operations
+ :vartype image_processing: azure.farmbeats.operations.ImageProcessingOperations
+ :ivar oauth_providers: OAuthProvidersOperations operations
+ :vartype oauth_providers: azure.farmbeats.operations.OAuthProvidersOperations
+ :ivar oauth_tokens: OAuthTokensOperations operations
+ :vartype oauth_tokens: azure.farmbeats.operations.OAuthTokensOperations
+ :ivar planting_data: PlantingDataOperations operations
+ :vartype planting_data: azure.farmbeats.operations.PlantingDataOperations
+ :ivar scenes: ScenesOperations operations
+ :vartype scenes: azure.farmbeats.operations.ScenesOperations
+ :ivar seasonal_fields: SeasonalFieldsOperations operations
+ :vartype seasonal_fields: azure.farmbeats.operations.SeasonalFieldsOperations
+ :ivar seasons: SeasonsOperations operations
+ :vartype seasons: azure.farmbeats.operations.SeasonsOperations
+ :ivar tillage_data: TillageDataOperations operations
+ :vartype tillage_data: azure.farmbeats.operations.TillageDataOperations
+ :ivar weather: WeatherOperations operations
+ :vartype weather: azure.farmbeats.operations.WeatherOperations
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials.TokenCredential
+ :param str base_url: Service URL
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ """
+
+ def __init__(
+ self,
+ base_url,
+ credential, # type: "TokenCredential"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ self._config = FarmBeatsClientConfiguration(credential, **kwargs)
+ self._client = PipelineClient(base_url=base_url, config=self._config, **kwargs)
+
+ client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ self._serialize = Serializer(client_models)
+ self._serialize.client_side_validation = False
+ self._deserialize = Deserializer(client_models)
+
+ self.application_data = ApplicationDataOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.attachments = AttachmentsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.boundaries = BoundariesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.crops = CropsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.crop_varieties = CropVarietiesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.farmers = FarmersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.farm_operations = FarmOperationsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.farms = FarmsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.fields = FieldsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.harvest_data = HarvestDataOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.image_processing = ImageProcessingOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.oauth_providers = OAuthProvidersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.oauth_tokens = OAuthTokensOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.planting_data = PlantingDataOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.scenes = ScenesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.seasonal_fields = SeasonalFieldsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.seasons = SeasonsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.tillage_data = TillageDataOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.weather = WeatherOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+
+ def _send_request(self, http_request, **kwargs):
+ # type: (HttpRequest, Any) -> HttpResponse
+ """Runs the network request through the client's chained policies.
+
+ :param http_request: The network request you want to make. Required.
+ :type http_request: ~azure.core.pipeline.transport.HttpRequest
+ :keyword bool stream: Whether the response payload will be streamed. Defaults to True.
+ :return: The response of your network call. Does not do error handling on your response.
+ :rtype: ~azure.core.pipeline.transport.HttpResponse
+ """
+ http_request.url = self._client.format_url(http_request.url)
+ stream = kwargs.pop("stream", True)
+ pipeline_response = self._client._pipeline.run(http_request, stream=stream, **kwargs)
+ return pipeline_response.http_response
+
+ def close(self):
+ # type: () -> None
+ self._client.close()
+
+ def __enter__(self):
+ # type: () -> FarmBeatsClient
+ self._client.__enter__()
+ return self
+
+ def __exit__(self, *exc_details):
+ # type: (Any) -> None
+ self._client.__exit__(*exc_details)
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/_version.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/_version.py
new file mode 100644
index 000000000000..eb384a58622d
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/_version.py
@@ -0,0 +1,7 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+VERSION = "1.0.0b1"
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/__init__.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/__init__.py
new file mode 100644
index 000000000000..ecf58379ad9c
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/__init__.py
@@ -0,0 +1,8 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._farm_beats_client import FarmBeatsClient
+__all__ = ['FarmBeatsClient']
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/_configuration.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/_configuration.py
new file mode 100644
index 000000000000..fd06810306a3
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/_configuration.py
@@ -0,0 +1,58 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, TYPE_CHECKING
+
+from azure.core.configuration import Configuration
+from azure.core.pipeline import policies
+
+from .._version import VERSION
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from azure.core.credentials_async import AsyncTokenCredential
+
+
+class FarmBeatsClientConfiguration(Configuration):
+ """Configuration for FarmBeatsClient.
+
+ Note that all parameters used to create this instance are saved as instance
+ attributes.
+
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+ """
+
+ def __init__(
+ self,
+ credential: "AsyncTokenCredential",
+ **kwargs: Any
+ ) -> None:
+ if credential is None:
+ raise ValueError("Parameter 'credential' must not be None.")
+ super(FarmBeatsClientConfiguration, self).__init__(**kwargs)
+
+ self.credential = credential
+ self.api_version = "2021-03-31-preview"
+ self.credential_scopes = kwargs.pop('credential_scopes', ['https://farmbeats-dogfood.azure.net/.default'])
+ kwargs.setdefault('sdk_moniker', 'farmbeatsclient/{}'.format(VERSION))
+ self._configure(**kwargs)
+
+ def _configure(
+ self,
+ **kwargs: Any
+ ) -> None:
+ self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
+ self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
+ self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
+ self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
+ self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs)
+ self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
+ self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
+ self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
+ self.authentication_policy = kwargs.get('authentication_policy')
+ if self.credential and not self.authentication_policy:
+ self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/_farm_beats_client.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/_farm_beats_client.py
new file mode 100644
index 000000000000..ee36e4922698
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/_farm_beats_client.py
@@ -0,0 +1,162 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, TYPE_CHECKING
+
+from azure.core import AsyncPipelineClient
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from msrest import Deserializer, Serializer
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from azure.core.credentials_async import AsyncTokenCredential
+
+from ._configuration import FarmBeatsClientConfiguration
+from .operations import ApplicationDataOperations
+from .operations import AttachmentsOperations
+from .operations import BoundariesOperations
+from .operations import CropsOperations
+from .operations import CropVarietiesOperations
+from .operations import FarmersOperations
+from .operations import FarmOperationsOperations
+from .operations import FarmsOperations
+from .operations import FieldsOperations
+from .operations import HarvestDataOperations
+from .operations import ImageProcessingOperations
+from .operations import OAuthProvidersOperations
+from .operations import OAuthTokensOperations
+from .operations import PlantingDataOperations
+from .operations import ScenesOperations
+from .operations import SeasonalFieldsOperations
+from .operations import SeasonsOperations
+from .operations import TillageDataOperations
+from .operations import WeatherOperations
+from .. import models
+
+
+class FarmBeatsClient(object):
+ """APIs documentation for Azure AgPlatform DataPlane Service.
+
+ :ivar application_data: ApplicationDataOperations operations
+ :vartype application_data: azure.farmbeats.aio.operations.ApplicationDataOperations
+ :ivar attachments: AttachmentsOperations operations
+ :vartype attachments: azure.farmbeats.aio.operations.AttachmentsOperations
+ :ivar boundaries: BoundariesOperations operations
+ :vartype boundaries: azure.farmbeats.aio.operations.BoundariesOperations
+ :ivar crops: CropsOperations operations
+ :vartype crops: azure.farmbeats.aio.operations.CropsOperations
+ :ivar crop_varieties: CropVarietiesOperations operations
+ :vartype crop_varieties: azure.farmbeats.aio.operations.CropVarietiesOperations
+ :ivar farmers: FarmersOperations operations
+ :vartype farmers: azure.farmbeats.aio.operations.FarmersOperations
+ :ivar farm_operations: FarmOperationsOperations operations
+ :vartype farm_operations: azure.farmbeats.aio.operations.FarmOperationsOperations
+ :ivar farms: FarmsOperations operations
+ :vartype farms: azure.farmbeats.aio.operations.FarmsOperations
+ :ivar fields: FieldsOperations operations
+ :vartype fields: azure.farmbeats.aio.operations.FieldsOperations
+ :ivar harvest_data: HarvestDataOperations operations
+ :vartype harvest_data: azure.farmbeats.aio.operations.HarvestDataOperations
+ :ivar image_processing: ImageProcessingOperations operations
+ :vartype image_processing: azure.farmbeats.aio.operations.ImageProcessingOperations
+ :ivar oauth_providers: OAuthProvidersOperations operations
+ :vartype oauth_providers: azure.farmbeats.aio.operations.OAuthProvidersOperations
+ :ivar oauth_tokens: OAuthTokensOperations operations
+ :vartype oauth_tokens: azure.farmbeats.aio.operations.OAuthTokensOperations
+ :ivar planting_data: PlantingDataOperations operations
+ :vartype planting_data: azure.farmbeats.aio.operations.PlantingDataOperations
+ :ivar scenes: ScenesOperations operations
+ :vartype scenes: azure.farmbeats.aio.operations.ScenesOperations
+ :ivar seasonal_fields: SeasonalFieldsOperations operations
+ :vartype seasonal_fields: azure.farmbeats.aio.operations.SeasonalFieldsOperations
+ :ivar seasons: SeasonsOperations operations
+ :vartype seasons: azure.farmbeats.aio.operations.SeasonsOperations
+ :ivar tillage_data: TillageDataOperations operations
+ :vartype tillage_data: azure.farmbeats.aio.operations.TillageDataOperations
+ :ivar weather: WeatherOperations operations
+ :vartype weather: azure.farmbeats.aio.operations.WeatherOperations
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+ :param str base_url: Service URL
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ """
+
+ def __init__(
+ self,
+ base_url,
+ credential: "AsyncTokenCredential",
+ **kwargs: Any
+ ) -> None:
+ self._config = FarmBeatsClientConfiguration(credential, **kwargs)
+ self._client = AsyncPipelineClient(base_url=base_url, config=self._config, **kwargs)
+
+ client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ self._serialize = Serializer(client_models)
+ self._serialize.client_side_validation = False
+ self._deserialize = Deserializer(client_models)
+
+ self.application_data = ApplicationDataOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.attachments = AttachmentsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.boundaries = BoundariesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.crops = CropsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.crop_varieties = CropVarietiesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.farmers = FarmersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.farm_operations = FarmOperationsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.farms = FarmsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.fields = FieldsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.harvest_data = HarvestDataOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.image_processing = ImageProcessingOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.oauth_providers = OAuthProvidersOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.oauth_tokens = OAuthTokensOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.planting_data = PlantingDataOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.scenes = ScenesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.seasonal_fields = SeasonalFieldsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.seasons = SeasonsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.tillage_data = TillageDataOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.weather = WeatherOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+
+ async def _send_request(self, http_request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse:
+ """Runs the network request through the client's chained policies.
+
+ :param http_request: The network request you want to make. Required.
+ :type http_request: ~azure.core.pipeline.transport.HttpRequest
+ :keyword bool stream: Whether the response payload will be streamed. Defaults to True.
+ :return: The response of your network call. Does not do error handling on your response.
+ :rtype: ~azure.core.pipeline.transport.AsyncHttpResponse
+ """
+ http_request.url = self._client.format_url(http_request.url)
+ stream = kwargs.pop("stream", True)
+ pipeline_response = await self._client._pipeline.run(http_request, stream=stream, **kwargs)
+ return pipeline_response.http_response
+
+ async def close(self) -> None:
+ await self._client.close()
+
+ async def __aenter__(self) -> "FarmBeatsClient":
+ await self._client.__aenter__()
+ return self
+
+ async def __aexit__(self, *exc_details) -> None:
+ await self._client.__aexit__(*exc_details)
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/__init__.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/__init__.py
new file mode 100644
index 000000000000..ee0bef1b3236
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/__init__.py
@@ -0,0 +1,47 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._application_data_operations import ApplicationDataOperations
+from ._attachments_operations import AttachmentsOperations
+from ._boundaries_operations import BoundariesOperations
+from ._crops_operations import CropsOperations
+from ._crop_varieties_operations import CropVarietiesOperations
+from ._farmers_operations import FarmersOperations
+from ._farm_operations_operations import FarmOperationsOperations
+from ._farms_operations import FarmsOperations
+from ._fields_operations import FieldsOperations
+from ._harvest_data_operations import HarvestDataOperations
+from ._image_processing_operations import ImageProcessingOperations
+from ._oauth_providers_operations import OAuthProvidersOperations
+from ._oauth_tokens_operations import OAuthTokensOperations
+from ._planting_data_operations import PlantingDataOperations
+from ._scenes_operations import ScenesOperations
+from ._seasonal_fields_operations import SeasonalFieldsOperations
+from ._seasons_operations import SeasonsOperations
+from ._tillage_data_operations import TillageDataOperations
+from ._weather_operations import WeatherOperations
+
+__all__ = [
+ 'ApplicationDataOperations',
+ 'AttachmentsOperations',
+ 'BoundariesOperations',
+ 'CropsOperations',
+ 'CropVarietiesOperations',
+ 'FarmersOperations',
+ 'FarmOperationsOperations',
+ 'FarmsOperations',
+ 'FieldsOperations',
+ 'HarvestDataOperations',
+ 'ImageProcessingOperations',
+ 'OAuthProvidersOperations',
+ 'OAuthTokensOperations',
+ 'PlantingDataOperations',
+ 'ScenesOperations',
+ 'SeasonalFieldsOperations',
+ 'SeasonsOperations',
+ 'TillageDataOperations',
+ 'WeatherOperations',
+]
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_application_data_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_application_data_operations.py
new file mode 100644
index 000000000000..ec25099059e2
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_application_data_operations.py
@@ -0,0 +1,637 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+
+from ... import models as _models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class ApplicationDataOperations:
+ """ApplicationDataOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_farmer_id(
+ self,
+ farmer_id: str,
+ min_avg_material: Optional[float] = None,
+ max_avg_material: Optional[float] = None,
+ min_total_material: Optional[float] = None,
+ max_total_material: Optional[float] = None,
+ sources: Optional[List[str]] = None,
+ associated_boundary_ids: Optional[List[str]] = None,
+ operation_boundary_ids: Optional[List[str]] = None,
+ min_operation_start_date_time: Optional[datetime.datetime] = None,
+ max_operation_start_date_time: Optional[datetime.datetime] = None,
+ min_operation_end_date_time: Optional[datetime.datetime] = None,
+ max_operation_end_date_time: Optional[datetime.datetime] = None,
+ min_operation_modified_date_time: Optional[datetime.datetime] = None,
+ max_operation_modified_date_time: Optional[datetime.datetime] = None,
+ min_area: Optional[float] = None,
+ max_area: Optional[float] = None,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.ApplicationDataListResponse"]:
+ """Returns a paginated list of application data resources under a particular farm.
+
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :param min_avg_material: Minimum average amount of material applied during the application
+ (inclusive).
+ :type min_avg_material: float
+ :param max_avg_material: Maximum average amount of material applied during the application
+ (inclusive).
+ :type max_avg_material: float
+ :param min_total_material: Minimum total amount of material applied during the application
+ (inclusive).
+ :type min_total_material: float
+ :param max_total_material: Maximum total amount of material applied during the application
+ (inclusive).
+ :type max_total_material: float
+ :param sources: Sources of the operation data.
+ :type sources: list[str]
+ :param associated_boundary_ids: Boundary IDs associated with operation data.
+ :type associated_boundary_ids: list[str]
+ :param operation_boundary_ids: Operation boundary IDs associated with operation data.
+ :type operation_boundary_ids: list[str]
+ :param min_operation_start_date_time: Minimum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_start_date_time: ~datetime.datetime
+ :param max_operation_start_date_time: Maximum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_start_date_time: ~datetime.datetime
+ :param min_operation_end_date_time: Minimum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_end_date_time: ~datetime.datetime
+ :param max_operation_end_date_time: Maximum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_end_date_time: ~datetime.datetime
+ :param min_operation_modified_date_time: Minimum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_modified_date_time: ~datetime.datetime
+ :param max_operation_modified_date_time: Maximum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_modified_date_time: ~datetime.datetime
+ :param min_area: Minimum area for which operation was applied (inclusive).
+ :type min_area: float
+ :param max_area: Maximum area for which operation was applied (inclusive).
+ :type max_area: float
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ApplicationDataListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.ApplicationDataListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationDataListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_farmer_id.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if min_avg_material is not None:
+ query_parameters['minAvgMaterial'] = self._serialize.query("min_avg_material", min_avg_material, 'float')
+ if max_avg_material is not None:
+ query_parameters['maxAvgMaterial'] = self._serialize.query("max_avg_material", max_avg_material, 'float')
+ if min_total_material is not None:
+ query_parameters['minTotalMaterial'] = self._serialize.query("min_total_material", min_total_material, 'float')
+ if max_total_material is not None:
+ query_parameters['maxTotalMaterial'] = self._serialize.query("max_total_material", max_total_material, 'float')
+ if sources is not None:
+ query_parameters['sources'] = [self._serialize.query("sources", q, 'str') if q is not None else '' for q in sources]
+ if associated_boundary_ids is not None:
+ query_parameters['associatedBoundaryIds'] = [self._serialize.query("associated_boundary_ids", q, 'str') if q is not None else '' for q in associated_boundary_ids]
+ if operation_boundary_ids is not None:
+ query_parameters['operationBoundaryIds'] = [self._serialize.query("operation_boundary_ids", q, 'str') if q is not None else '' for q in operation_boundary_ids]
+ if min_operation_start_date_time is not None:
+ query_parameters['minOperationStartDateTime'] = self._serialize.query("min_operation_start_date_time", min_operation_start_date_time, 'iso-8601')
+ if max_operation_start_date_time is not None:
+ query_parameters['maxOperationStartDateTime'] = self._serialize.query("max_operation_start_date_time", max_operation_start_date_time, 'iso-8601')
+ if min_operation_end_date_time is not None:
+ query_parameters['minOperationEndDateTime'] = self._serialize.query("min_operation_end_date_time", min_operation_end_date_time, 'iso-8601')
+ if max_operation_end_date_time is not None:
+ query_parameters['maxOperationEndDateTime'] = self._serialize.query("max_operation_end_date_time", max_operation_end_date_time, 'iso-8601')
+ if min_operation_modified_date_time is not None:
+ query_parameters['minOperationModifiedDateTime'] = self._serialize.query("min_operation_modified_date_time", min_operation_modified_date_time, 'iso-8601')
+ if max_operation_modified_date_time is not None:
+ query_parameters['maxOperationModifiedDateTime'] = self._serialize.query("max_operation_modified_date_time", max_operation_modified_date_time, 'iso-8601')
+ if min_area is not None:
+ query_parameters['minArea'] = self._serialize.query("min_area", min_area, 'float')
+ if max_area is not None:
+ query_parameters['maxArea'] = self._serialize.query("max_area", max_area, 'float')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ApplicationDataListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_farmer_id.metadata = {'url': '/farmers/{farmerId}/application-data'} # type: ignore
+
+ def list(
+ self,
+ min_avg_material: Optional[float] = None,
+ max_avg_material: Optional[float] = None,
+ min_total_material: Optional[float] = None,
+ max_total_material: Optional[float] = None,
+ sources: Optional[List[str]] = None,
+ associated_boundary_ids: Optional[List[str]] = None,
+ operation_boundary_ids: Optional[List[str]] = None,
+ min_operation_start_date_time: Optional[datetime.datetime] = None,
+ max_operation_start_date_time: Optional[datetime.datetime] = None,
+ min_operation_end_date_time: Optional[datetime.datetime] = None,
+ max_operation_end_date_time: Optional[datetime.datetime] = None,
+ min_operation_modified_date_time: Optional[datetime.datetime] = None,
+ max_operation_modified_date_time: Optional[datetime.datetime] = None,
+ min_area: Optional[float] = None,
+ max_area: Optional[float] = None,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.ApplicationDataListResponse"]:
+ """Returns a paginated list of application data resources across all farmers.
+
+ :param min_avg_material: Minimum average amount of material applied during the application
+ (inclusive).
+ :type min_avg_material: float
+ :param max_avg_material: Maximum average amount of material applied during the application
+ (inclusive).
+ :type max_avg_material: float
+ :param min_total_material: Minimum total amount of material applied during the application
+ (inclusive).
+ :type min_total_material: float
+ :param max_total_material: Maximum total amount of material applied during the application
+ (inclusive).
+ :type max_total_material: float
+ :param sources: Sources of the operation data.
+ :type sources: list[str]
+ :param associated_boundary_ids: Boundary IDs associated with operation data.
+ :type associated_boundary_ids: list[str]
+ :param operation_boundary_ids: Operation boundary IDs associated with operation data.
+ :type operation_boundary_ids: list[str]
+ :param min_operation_start_date_time: Minimum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_start_date_time: ~datetime.datetime
+ :param max_operation_start_date_time: Maximum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_start_date_time: ~datetime.datetime
+ :param min_operation_end_date_time: Minimum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_end_date_time: ~datetime.datetime
+ :param max_operation_end_date_time: Maximum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_end_date_time: ~datetime.datetime
+ :param min_operation_modified_date_time: Minimum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_modified_date_time: ~datetime.datetime
+ :param max_operation_modified_date_time: Maximum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_modified_date_time: ~datetime.datetime
+ :param min_area: Minimum area for which operation was applied (inclusive).
+ :type min_area: float
+ :param max_area: Maximum area for which operation was applied (inclusive).
+ :type max_area: float
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ApplicationDataListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.ApplicationDataListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationDataListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if min_avg_material is not None:
+ query_parameters['minAvgMaterial'] = self._serialize.query("min_avg_material", min_avg_material, 'float')
+ if max_avg_material is not None:
+ query_parameters['maxAvgMaterial'] = self._serialize.query("max_avg_material", max_avg_material, 'float')
+ if min_total_material is not None:
+ query_parameters['minTotalMaterial'] = self._serialize.query("min_total_material", min_total_material, 'float')
+ if max_total_material is not None:
+ query_parameters['maxTotalMaterial'] = self._serialize.query("max_total_material", max_total_material, 'float')
+ if sources is not None:
+ query_parameters['sources'] = [self._serialize.query("sources", q, 'str') if q is not None else '' for q in sources]
+ if associated_boundary_ids is not None:
+ query_parameters['associatedBoundaryIds'] = [self._serialize.query("associated_boundary_ids", q, 'str') if q is not None else '' for q in associated_boundary_ids]
+ if operation_boundary_ids is not None:
+ query_parameters['operationBoundaryIds'] = [self._serialize.query("operation_boundary_ids", q, 'str') if q is not None else '' for q in operation_boundary_ids]
+ if min_operation_start_date_time is not None:
+ query_parameters['minOperationStartDateTime'] = self._serialize.query("min_operation_start_date_time", min_operation_start_date_time, 'iso-8601')
+ if max_operation_start_date_time is not None:
+ query_parameters['maxOperationStartDateTime'] = self._serialize.query("max_operation_start_date_time", max_operation_start_date_time, 'iso-8601')
+ if min_operation_end_date_time is not None:
+ query_parameters['minOperationEndDateTime'] = self._serialize.query("min_operation_end_date_time", min_operation_end_date_time, 'iso-8601')
+ if max_operation_end_date_time is not None:
+ query_parameters['maxOperationEndDateTime'] = self._serialize.query("max_operation_end_date_time", max_operation_end_date_time, 'iso-8601')
+ if min_operation_modified_date_time is not None:
+ query_parameters['minOperationModifiedDateTime'] = self._serialize.query("min_operation_modified_date_time", min_operation_modified_date_time, 'iso-8601')
+ if max_operation_modified_date_time is not None:
+ query_parameters['maxOperationModifiedDateTime'] = self._serialize.query("max_operation_modified_date_time", max_operation_modified_date_time, 'iso-8601')
+ if min_area is not None:
+ query_parameters['minArea'] = self._serialize.query("min_area", min_area, 'float')
+ if max_area is not None:
+ query_parameters['maxArea'] = self._serialize.query("max_area", max_area, 'float')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ApplicationDataListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/application-data'} # type: ignore
+
+ async def get(
+ self,
+ farmer_id: str,
+ application_data_id: str,
+ **kwargs
+ ) -> "_models.ApplicationData":
+ """Get a specified application data resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer resource.
+ :type farmer_id: str
+ :param application_data_id: ID of the application data resource.
+ :type application_data_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ApplicationData, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.ApplicationData
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationData"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'applicationDataId': self._serialize.url("application_data_id", application_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('ApplicationData', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/farmers/{farmerId}/application-data/{applicationDataId}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ farmer_id: str,
+ application_data_id: str,
+ application_data: Optional["_models.ApplicationData"] = None,
+ **kwargs
+ ) -> "_models.ApplicationData":
+ """Creates or updates an application data resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :param application_data_id: ID of the application data resource.
+ :type application_data_id: str
+ :param application_data: Application data resource payload to create or update.
+ :type application_data: ~azure.farmbeats.models.ApplicationData
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ApplicationData, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.ApplicationData
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationData"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'applicationDataId': self._serialize.url("application_data_id", application_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if application_data is not None:
+ body_content = self._serialize.body(application_data, 'ApplicationData')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('ApplicationData', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('ApplicationData', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/farmers/{farmerId}/application-data/{applicationDataId}'} # type: ignore
+
+ async def delete(
+ self,
+ farmer_id: str,
+ application_data_id: str,
+ **kwargs
+ ) -> None:
+ """Deletes a specified application data resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer resource.
+ :type farmer_id: str
+ :param application_data_id: ID of the application data.
+ :type application_data_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'applicationDataId': self._serialize.url("application_data_id", application_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/farmers/{farmerId}/application-data/{applicationDataId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_attachments_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_attachments_operations.py
new file mode 100644
index 000000000000..591fe5b99829
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_attachments_operations.py
@@ -0,0 +1,464 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import Any, AsyncIterable, Callable, Dict, Generic, IO, List, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+
+from ... import models as _models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class AttachmentsOperations:
+ """AttachmentsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_farmer_id(
+ self,
+ farmer_id: str,
+ resource_ids: Optional[List[str]] = None,
+ resource_types: Optional[List[str]] = None,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.AttachmentListResponse"]:
+ """Returns a paginated list of attachment resources under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer.
+ :type farmer_id: str
+ :param resource_ids: Resource Ids of the resource.
+ :type resource_ids: list[str]
+ :param resource_types: Resource Types of the resource.
+ :type resource_types: list[str]
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either AttachmentListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.AttachmentListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.AttachmentListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_farmer_id.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if resource_ids is not None:
+ query_parameters['resourceIds'] = [self._serialize.query("resource_ids", q, 'str') if q is not None else '' for q in resource_ids]
+ if resource_types is not None:
+ query_parameters['resourceTypes'] = [self._serialize.query("resource_types", q, 'str') if q is not None else '' for q in resource_types]
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('AttachmentListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_farmer_id.metadata = {'url': '/farmers/{farmerId}/attachments'} # type: ignore
+
+ async def get(
+ self,
+ farmer_id: str,
+ attachment_id: str,
+ **kwargs
+ ) -> "_models.Attachment":
+ """Gets a specified attachment resource under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer.
+ :type farmer_id: str
+ :param attachment_id: Id of the attachment.
+ :type attachment_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Attachment, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Attachment
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Attachment"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'attachmentId': self._serialize.url("attachment_id", attachment_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('Attachment', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/farmers/{farmerId}/attachments/{attachmentId}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ farmer_id: str,
+ attachment_id: str,
+ file: Optional[IO] = None,
+ farmer_id1: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ resource_type: Optional[str] = None,
+ original_file_name: Optional[str] = None,
+ id: Optional[str] = None,
+ status: Optional[str] = None,
+ created_date_time: Optional[str] = None,
+ modified_date_time: Optional[str] = None,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ e_tag: Optional[str] = None,
+ **kwargs
+ ) -> "_models.Attachment":
+ """Creates or updates an attachment resource under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer resource.
+ :type farmer_id: str
+ :param attachment_id: Id of the attachment resource.
+ :type attachment_id: str
+ :param file: File to be uploaded.
+ :type file: IO
+ :param farmer_id1: Farmer id for this attachment.
+ :type farmer_id1: str
+ :param resource_id: Associated Resource id for this attachment.
+ :type resource_id: str
+ :param resource_type: Associated Resource type for this attachment
+ i.e. Farmer, Farm, Field, SeasonalField, Boundary, FarmOperationApplicationData, HarvestData,
+ TillageData, PlantingData.
+ :type resource_type: str
+ :param original_file_name: Original File Name for this attachment.
+ :type original_file_name: str
+ :param id: Unique id.
+ :type id: str
+ :param status: Status of the resource.
+ :type status: str
+ :param created_date_time: Date when resource was created.
+ :type created_date_time: str
+ :param modified_date_time: Date when resource was last modified.
+ :type modified_date_time: str
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of resource.
+ :type description: str
+ :param e_tag: The ETag value to implement optimistic concurrency.
+ :type e_tag: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Attachment, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Attachment
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Attachment"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "multipart/form-data")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'attachmentId': self._serialize.url("attachment_id", attachment_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ # Construct form data
+ _form_content = {
+ 'file': file,
+ 'FarmerId': farmer_id1,
+ 'ResourceId': resource_id,
+ 'ResourceType': resource_type,
+ 'OriginalFileName': original_file_name,
+ 'Id': id,
+ 'Status': status,
+ 'CreatedDateTime': created_date_time,
+ 'ModifiedDateTime': modified_date_time,
+ 'Name': name,
+ 'Description': description,
+ 'ETag': e_tag,
+ }
+ request = self._client.patch(url, query_parameters, header_parameters, form_content=_form_content)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('Attachment', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('Attachment', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/farmers/{farmerId}/attachments/{attachmentId}'} # type: ignore
+
+ async def delete(
+ self,
+ farmer_id: str,
+ attachment_id: str,
+ **kwargs
+ ) -> None:
+ """Deletes a specified attachment resource under a particular farmer.
+
+ :param farmer_id: Id of the farmer.
+ :type farmer_id: str
+ :param attachment_id: Id of the attachment.
+ :type attachment_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'attachmentId': self._serialize.url("attachment_id", attachment_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/farmers/{farmerId}/attachments/{attachmentId}'} # type: ignore
+
+ async def download(
+ self,
+ farmer_id: str,
+ attachment_id: str,
+ **kwargs
+ ) -> IO:
+ """Downloads and returns attachment as response for the given input filePath.
+
+ :param farmer_id: Id of the associated farmer.
+ :type farmer_id: str
+ :param attachment_id: Id of attachment to be downloaded.
+ :type attachment_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: IO, or the result of cls(response)
+ :rtype: IO
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[IO]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/octet-stream, application/json"
+
+ # Construct URL
+ url = self.download.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'attachmentId': self._serialize.url("attachment_id", attachment_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=True, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = response.stream_download(self._client._pipeline)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ download.metadata = {'url': '/farmers/{farmerId}/attachments/{attachmentId}/file'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_boundaries_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_boundaries_operations.py
new file mode 100644
index 000000000000..8333e3d8efac
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_boundaries_operations.py
@@ -0,0 +1,928 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.core.polling.async_base_polling import AsyncLROBasePolling
+
+from ... import models as _models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class BoundariesOperations:
+ """BoundariesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_farmer_id(
+ self,
+ farmer_id: str,
+ is_primary: Optional[bool] = None,
+ parent_type: Optional[str] = None,
+ parent_ids: Optional[List[str]] = None,
+ min_acreage: Optional[float] = None,
+ max_acreage: Optional[float] = None,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.BoundaryListResponse"]:
+ """Returns a paginated list of boundary resources under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer.
+ :type farmer_id: str
+ :param is_primary: Is the boundary primary.
+ :type is_primary: bool
+ :param parent_type: Type of the parent it belongs to.
+ :type parent_type: str
+ :param parent_ids: Parent Ids of the resource.
+ :type parent_ids: list[str]
+ :param min_acreage: Minimum acreage of the boundary (inclusive).
+ :type min_acreage: float
+ :param max_acreage: Maximum acreage of the boundary (inclusive).
+ :type max_acreage: float
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either BoundaryListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.BoundaryListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.BoundaryListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_farmer_id.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if is_primary is not None:
+ query_parameters['isPrimary'] = self._serialize.query("is_primary", is_primary, 'bool')
+ if parent_type is not None:
+ query_parameters['parentType'] = self._serialize.query("parent_type", parent_type, 'str')
+ if parent_ids is not None:
+ query_parameters['parentIds'] = [self._serialize.query("parent_ids", q, 'str') if q is not None else '' for q in parent_ids]
+ if min_acreage is not None:
+ query_parameters['minAcreage'] = self._serialize.query("min_acreage", min_acreage, 'float')
+ if max_acreage is not None:
+ query_parameters['maxAcreage'] = self._serialize.query("max_acreage", max_acreage, 'float')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('BoundaryListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_farmer_id.metadata = {'url': '/farmers/{farmerId}/boundaries'} # type: ignore
+
+ def search_by_farmer_id(
+ self,
+ farmer_id: str,
+ query: Optional["_models.SearchBoundaryQuery"] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.BoundaryListResponse"]:
+ """Search for boundaries by fields and intersecting geometry.
+
+ :param farmer_id: Id of the farmer.
+ :type farmer_id: str
+ :param query: Query filters.
+ :type query: ~azure.farmbeats.models.SearchBoundaryQuery
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either BoundaryListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.BoundaryListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.BoundaryListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = "application/json"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.search_by_farmer_id.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if query is not None:
+ body_content = self._serialize.body(query, 'SearchBoundaryQuery')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if query is not None:
+ body_content = self._serialize.body(query, 'SearchBoundaryQuery')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('BoundaryListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ search_by_farmer_id.metadata = {'url': '/farmers/{farmerId}/boundaries'} # type: ignore
+
+ def list(
+ self,
+ is_primary: Optional[bool] = None,
+ parent_type: Optional[str] = None,
+ parent_ids: Optional[List[str]] = None,
+ min_acreage: Optional[float] = None,
+ max_acreage: Optional[float] = None,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.BoundaryListResponse"]:
+ """Returns a paginated list of boundary resources across all farmers.
+
+ :param is_primary: Is the boundary primary.
+ :type is_primary: bool
+ :param parent_type: Type of the parent it belongs to.
+ :type parent_type: str
+ :param parent_ids: Parent Ids of the resource.
+ :type parent_ids: list[str]
+ :param min_acreage: Minimum acreage of the boundary (inclusive).
+ :type min_acreage: float
+ :param max_acreage: Maximum acreage of the boundary (inclusive).
+ :type max_acreage: float
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either BoundaryListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.BoundaryListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.BoundaryListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if is_primary is not None:
+ query_parameters['isPrimary'] = self._serialize.query("is_primary", is_primary, 'bool')
+ if parent_type is not None:
+ query_parameters['parentType'] = self._serialize.query("parent_type", parent_type, 'str')
+ if parent_ids is not None:
+ query_parameters['parentIds'] = [self._serialize.query("parent_ids", q, 'str') if q is not None else '' for q in parent_ids]
+ if min_acreage is not None:
+ query_parameters['minAcreage'] = self._serialize.query("min_acreage", min_acreage, 'float')
+ if max_acreage is not None:
+ query_parameters['maxAcreage'] = self._serialize.query("max_acreage", max_acreage, 'float')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('BoundaryListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/boundaries'} # type: ignore
+
+ def search(
+ self,
+ query: Optional["_models.SearchBoundaryQuery"] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.BoundaryListResponse"]:
+ """Search for boundaries across all farmers by fields and intersecting geometry.
+
+ :param query: Query filters.
+ :type query: ~azure.farmbeats.models.SearchBoundaryQuery
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either BoundaryListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.BoundaryListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.BoundaryListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = "application/json"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.search.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if query is not None:
+ body_content = self._serialize.body(query, 'SearchBoundaryQuery')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if query is not None:
+ body_content = self._serialize.body(query, 'SearchBoundaryQuery')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('BoundaryListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ search.metadata = {'url': '/boundaries'} # type: ignore
+
+ async def get_cascade_delete_job_details(
+ self,
+ job_id: str,
+ **kwargs
+ ) -> "_models.CascadeDeleteJob":
+ """Get cascade delete job for specified boundary.
+
+ :param job_id: Id of the job.
+ :type job_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CascadeDeleteJob, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.CascadeDeleteJob
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_cascade_delete_job_details.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_cascade_delete_job_details.metadata = {'url': '/boundaries/cascade-delete/{jobId}'} # type: ignore
+
+ async def _create_cascade_delete_job_initial(
+ self,
+ job_id: str,
+ farmer_id: str,
+ boundary_id: str,
+ **kwargs
+ ) -> "_models.CascadeDeleteJob":
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_cascade_delete_job_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['farmerId'] = self._serialize.query("farmer_id", farmer_id, 'str')
+ query_parameters['boundaryId'] = self._serialize.query("boundary_id", boundary_id, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.put(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_cascade_delete_job_initial.metadata = {'url': '/boundaries/cascade-delete/{jobId}'} # type: ignore
+
+ async def begin_create_cascade_delete_job(
+ self,
+ job_id: str,
+ farmer_id: str,
+ boundary_id: str,
+ **kwargs
+ ) -> AsyncLROPoller["_models.CascadeDeleteJob"]:
+ """Create a cascade delete job for specified boundary.
+
+ :param job_id: Job ID supplied by end user.
+ :type job_id: str
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :param boundary_id: ID of the boundary to be deleted.
+ :type boundary_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: By default, your polling method will be AsyncLROBasePolling.
+ Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either CascadeDeleteJob or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.farmbeats.models.CascadeDeleteJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_cascade_delete_job_initial(
+ job_id=job_id,
+ farmer_id=farmer_id,
+ boundary_id=boundary_id,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncLROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_cascade_delete_job.metadata = {'url': '/boundaries/cascade-delete/{jobId}'} # type: ignore
+
+ async def get(
+ self,
+ farmer_id: str,
+ boundary_id: str,
+ **kwargs
+ ) -> "_models.Boundary":
+ """Gets a specified boundary resource under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer.
+ :type farmer_id: str
+ :param boundary_id: Id of the boundary.
+ :type boundary_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Boundary, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Boundary
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Boundary"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'boundaryId': self._serialize.url("boundary_id", boundary_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('Boundary', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/farmers/{farmerId}/boundaries/{boundaryId}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ farmer_id: str,
+ boundary_id: str,
+ boundary: Optional["_models.Boundary"] = None,
+ **kwargs
+ ) -> "_models.Boundary":
+ """Creates or updates a boundary resource.
+
+ :param farmer_id: Id of the farmer resource.
+ :type farmer_id: str
+ :param boundary_id: Id of the boundary resource.
+ :type boundary_id: str
+ :param boundary: Boundary resource payload to create or update.
+ :type boundary: ~azure.farmbeats.models.Boundary
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Boundary, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Boundary
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Boundary"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'boundaryId': self._serialize.url("boundary_id", boundary_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if boundary is not None:
+ body_content = self._serialize.body(boundary, 'Boundary')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('Boundary', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('Boundary', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/farmers/{farmerId}/boundaries/{boundaryId}'} # type: ignore
+
+ async def delete(
+ self,
+ farmer_id: str,
+ boundary_id: str,
+ **kwargs
+ ) -> None:
+ """Deletes a specified boundary resource under a particular farmer.
+
+ :param farmer_id: Id of the farmer.
+ :type farmer_id: str
+ :param boundary_id: Id of the boundary.
+ :type boundary_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'boundaryId': self._serialize.url("boundary_id", boundary_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/farmers/{farmerId}/boundaries/{boundaryId}'} # type: ignore
+
+ async def get_overlap(
+ self,
+ farmer_id: str,
+ boundary_id: str,
+ other_farmer_id: str,
+ other_boundary_id: str,
+ **kwargs
+ ) -> "_models.BoundaryOverlapResponse":
+ """Returns overlapping acreage between two boundary Ids.
+
+ :param farmer_id: Id of the farmer.
+ :type farmer_id: str
+ :param boundary_id: Id of the boundary.
+ :type boundary_id: str
+ :param other_farmer_id: FarmerId of the other field.
+ :type other_farmer_id: str
+ :param other_boundary_id: Id of the other boundary.
+ :type other_boundary_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: BoundaryOverlapResponse, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.BoundaryOverlapResponse
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.BoundaryOverlapResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_overlap.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'boundaryId': self._serialize.url("boundary_id", boundary_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['otherFarmerId'] = self._serialize.query("other_farmer_id", other_farmer_id, 'str')
+ query_parameters['otherBoundaryId'] = self._serialize.query("other_boundary_id", other_boundary_id, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('BoundaryOverlapResponse', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_overlap.metadata = {'url': '/farmers/{farmerId}/boundaries/{boundaryId}/overlap'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_crop_varieties_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_crop_varieties_operations.py
new file mode 100644
index 000000000000..6d393cb1e80f
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_crop_varieties_operations.py
@@ -0,0 +1,497 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+
+from ... import models as _models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class CropVarietiesOperations:
+ """CropVarietiesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_crop_id(
+ self,
+ crop_id: str,
+ crop_ids: Optional[List[str]] = None,
+ brands: Optional[List[str]] = None,
+ products: Optional[List[str]] = None,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.CropVarietyListResponse"]:
+ """Returns a paginated list of crop variety resources under a particular crop.
+
+ :param crop_id: Id of the associated crop.
+ :type crop_id: str
+ :param crop_ids: CropIds of the resource.
+ :type crop_ids: list[str]
+ :param brands: Brands of the resource.
+ :type brands: list[str]
+ :param products: Products of the resource.
+ :type products: list[str]
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either CropVarietyListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.CropVarietyListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CropVarietyListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_crop_id.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'cropId': self._serialize.url("crop_id", crop_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if crop_ids is not None:
+ query_parameters['cropIds'] = [self._serialize.query("crop_ids", q, 'str') if q is not None else '' for q in crop_ids]
+ if brands is not None:
+ query_parameters['brands'] = [self._serialize.query("brands", q, 'str') if q is not None else '' for q in brands]
+ if products is not None:
+ query_parameters['products'] = [self._serialize.query("products", q, 'str') if q is not None else '' for q in products]
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ path_format_arguments = {
+ 'cropId': self._serialize.url("crop_id", crop_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('CropVarietyListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_crop_id.metadata = {'url': '/crops/{cropId}/crop-varieties'} # type: ignore
+
+ def list(
+ self,
+ crop_ids: Optional[List[str]] = None,
+ brands: Optional[List[str]] = None,
+ products: Optional[List[str]] = None,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.CropVarietyListResponse"]:
+ """Returns a paginated list of crop variety resources across all crops.
+
+ :param crop_ids: CropIds of the resource.
+ :type crop_ids: list[str]
+ :param brands: Brands of the resource.
+ :type brands: list[str]
+ :param products: Products of the resource.
+ :type products: list[str]
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either CropVarietyListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.CropVarietyListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CropVarietyListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if crop_ids is not None:
+ query_parameters['cropIds'] = [self._serialize.query("crop_ids", q, 'str') if q is not None else '' for q in crop_ids]
+ if brands is not None:
+ query_parameters['brands'] = [self._serialize.query("brands", q, 'str') if q is not None else '' for q in brands]
+ if products is not None:
+ query_parameters['products'] = [self._serialize.query("products", q, 'str') if q is not None else '' for q in products]
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('CropVarietyListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/crop-varieties'} # type: ignore
+
+ async def get(
+ self,
+ crop_id: str,
+ crop_variety_id: str,
+ **kwargs
+ ) -> "_models.CropVariety":
+ """Gets a specified crop variety resource under a particular crop.
+
+ :param crop_id: Id of the associated crop.
+ :type crop_id: str
+ :param crop_variety_id: Id of the crop variety.
+ :type crop_variety_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CropVariety, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.CropVariety
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CropVariety"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'cropId': self._serialize.url("crop_id", crop_id, 'str'),
+ 'cropVarietyId': self._serialize.url("crop_variety_id", crop_variety_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CropVariety', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/crops/{cropId}/crop-varieties/{cropVarietyId}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ crop_id: str,
+ crop_variety_id: str,
+ crop_variety: Optional["_models.CropVariety"] = None,
+ **kwargs
+ ) -> "_models.CropVariety":
+ """Creates or updates a crop variety resource.
+
+ :param crop_id: Id of the crop resource.
+ :type crop_id: str
+ :param crop_variety_id: Id of the crop variety resource.
+ :type crop_variety_id: str
+ :param crop_variety: Crop variety resource payload to create or update.
+ :type crop_variety: ~azure.farmbeats.models.CropVariety
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CropVariety, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.CropVariety
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CropVariety"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'cropId': self._serialize.url("crop_id", crop_id, 'str'),
+ 'cropVarietyId': self._serialize.url("crop_variety_id", crop_variety_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if crop_variety is not None:
+ body_content = self._serialize.body(crop_variety, 'CropVariety')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('CropVariety', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('CropVariety', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/crops/{cropId}/crop-varieties/{cropVarietyId}'} # type: ignore
+
+ async def delete(
+ self,
+ crop_id: str,
+ crop_variety_id: str,
+ **kwargs
+ ) -> None:
+ """Deletes a specified crop variety resource under a particular crop.
+
+ :param crop_id: Id of the crop.
+ :type crop_id: str
+ :param crop_variety_id: Id of the crop variety.
+ :type crop_variety_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'cropId': self._serialize.url("crop_id", crop_id, 'str'),
+ 'cropVarietyId': self._serialize.url("crop_variety_id", crop_variety_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/crops/{cropId}/crop-varieties/{cropVarietyId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_crops_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_crops_operations.py
new file mode 100644
index 000000000000..93c63ffcb5a7
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_crops_operations.py
@@ -0,0 +1,334 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+
+from ... import models as _models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class CropsOperations:
+ """CropsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ phenotypes: Optional[List[str]] = None,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.CropListResponse"]:
+ """Returns a paginated list of crop resources.
+
+ :param phenotypes: Crop phenotypes of the resource.
+ :type phenotypes: list[str]
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either CropListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.CropListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CropListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if phenotypes is not None:
+ query_parameters['phenotypes'] = [self._serialize.query("phenotypes", q, 'str') if q is not None else '' for q in phenotypes]
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('CropListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/crops'} # type: ignore
+
+ async def get(
+ self,
+ crop_id: str,
+ **kwargs
+ ) -> "_models.Crop":
+ """Gets a specified crop resource.
+
+ :param crop_id: Id of the crop.
+ :type crop_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Crop, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Crop
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Crop"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'cropId': self._serialize.url("crop_id", crop_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('Crop', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/crops/{cropId}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ crop_id: str,
+ crop: Optional["_models.Crop"] = None,
+ **kwargs
+ ) -> "_models.Crop":
+ """Creates or updates a crop resource.
+
+ :param crop_id: Id of the crop resource.
+ :type crop_id: str
+ :param crop: Crop resource payload to create or update.
+ :type crop: ~azure.farmbeats.models.Crop
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Crop, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Crop
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Crop"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'cropId': self._serialize.url("crop_id", crop_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if crop is not None:
+ body_content = self._serialize.body(crop, 'Crop')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('Crop', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('Crop', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/crops/{cropId}'} # type: ignore
+
+ async def delete(
+ self,
+ crop_id: str,
+ **kwargs
+ ) -> None:
+ """Deletes Crop for given crop id.
+
+ :param crop_id: Id of crop to be deleted.
+ :type crop_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'cropId': self._serialize.url("crop_id", crop_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/crops/{cropId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_farm_operations_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_farm_operations_operations.py
new file mode 100644
index 000000000000..bec9e747bad4
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_farm_operations_operations.py
@@ -0,0 +1,213 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.core.polling.async_base_polling import AsyncLROBasePolling
+
+from ... import models as _models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class FarmOperationsOperations:
+ """FarmOperationsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def _create_data_ingestion_job_initial(
+ self,
+ job_id: str,
+ job: Optional["_models.FarmOperationDataIngestionJob"] = None,
+ **kwargs
+ ) -> "_models.FarmOperationDataIngestionJob":
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.FarmOperationDataIngestionJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_data_ingestion_job_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if job is not None:
+ body_content = self._serialize.body(job, 'FarmOperationDataIngestionJob')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('FarmOperationDataIngestionJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_data_ingestion_job_initial.metadata = {'url': '/farm-operations/ingest-data/{jobId}'} # type: ignore
+
+ async def begin_create_data_ingestion_job(
+ self,
+ job_id: str,
+ job: Optional["_models.FarmOperationDataIngestionJob"] = None,
+ **kwargs
+ ) -> AsyncLROPoller["_models.FarmOperationDataIngestionJob"]:
+ """Create a farm operation data ingestion job.
+
+ :param job_id: Job Id supplied by user.
+ :type job_id: str
+ :param job: Job parameters supplied by user.
+ :type job: ~azure.farmbeats.models.FarmOperationDataIngestionJob
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: By default, your polling method will be AsyncLROBasePolling.
+ Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either FarmOperationDataIngestionJob or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.farmbeats.models.FarmOperationDataIngestionJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.FarmOperationDataIngestionJob"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_data_ingestion_job_initial(
+ job_id=job_id,
+ job=job,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('FarmOperationDataIngestionJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncLROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_data_ingestion_job.metadata = {'url': '/farm-operations/ingest-data/{jobId}'} # type: ignore
+
+ async def get_data_ingestion_job_details(
+ self,
+ job_id: str,
+ **kwargs
+ ) -> "_models.FarmOperationDataIngestionJob":
+ """Get a farm operation data ingestion job.
+
+ :param job_id: Id of the job.
+ :type job_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: FarmOperationDataIngestionJob, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.FarmOperationDataIngestionJob
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.FarmOperationDataIngestionJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_data_ingestion_job_details.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('FarmOperationDataIngestionJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_data_ingestion_job_details.metadata = {'url': '/farm-operations/ingest-data/{jobId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_farmers_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_farmers_operations.py
new file mode 100644
index 000000000000..53a01921b046
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_farmers_operations.py
@@ -0,0 +1,497 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.core.polling.async_base_polling import AsyncLROBasePolling
+
+from ... import models as _models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class FarmersOperations:
+ """FarmersOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.FarmerListResponse"]:
+ """Returns a paginated list of farmer resources.
+
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either FarmerListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.FarmerListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.FarmerListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('FarmerListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/farmers'} # type: ignore
+
+ async def get(
+ self,
+ farmer_id: str,
+ **kwargs
+ ) -> "_models.Farmer":
+ """Gets a specified farmer resource.
+
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Farmer, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Farmer
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Farmer"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('Farmer', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/farmers/{farmerId}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ farmer_id: str,
+ farmer: Optional["_models.Farmer"] = None,
+ **kwargs
+ ) -> "_models.Farmer":
+ """Creates or updates a farmer resource.
+
+ :param farmer_id: Id of the farmer resource.
+ :type farmer_id: str
+ :param farmer: Farmer resource payload to create or update.
+ :type farmer: ~azure.farmbeats.models.Farmer
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Farmer, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Farmer
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Farmer"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if farmer is not None:
+ body_content = self._serialize.body(farmer, 'Farmer')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('Farmer', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('Farmer', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/farmers/{farmerId}'} # type: ignore
+
+ async def delete(
+ self,
+ farmer_id: str,
+ **kwargs
+ ) -> None:
+ """Deletes a specified farmer resource.
+
+ :param farmer_id: Id of farmer to be deleted.
+ :type farmer_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/farmers/{farmerId}'} # type: ignore
+
+ async def get_cascade_delete_job_details(
+ self,
+ job_id: str,
+ **kwargs
+ ) -> "_models.CascadeDeleteJob":
+ """Get a cascade delete job for specified farmer.
+
+ :param job_id: Id of the job.
+ :type job_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CascadeDeleteJob, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.CascadeDeleteJob
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_cascade_delete_job_details.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_cascade_delete_job_details.metadata = {'url': '/farmers/cascade-delete/{jobId}'} # type: ignore
+
+ async def _create_cascade_delete_job_initial(
+ self,
+ job_id: str,
+ farmer_id: str,
+ **kwargs
+ ) -> "_models.CascadeDeleteJob":
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_cascade_delete_job_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['farmerId'] = self._serialize.query("farmer_id", farmer_id, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.put(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_cascade_delete_job_initial.metadata = {'url': '/farmers/cascade-delete/{jobId}'} # type: ignore
+
+ async def begin_create_cascade_delete_job(
+ self,
+ job_id: str,
+ farmer_id: str,
+ **kwargs
+ ) -> AsyncLROPoller["_models.CascadeDeleteJob"]:
+ """Create a cascade delete job for specified farmer.
+
+ :param job_id: Job ID supplied by end user.
+ :type job_id: str
+ :param farmer_id: ID of the farmer to be deleted.
+ :type farmer_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: By default, your polling method will be AsyncLROBasePolling.
+ Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either CascadeDeleteJob or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.farmbeats.models.CascadeDeleteJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_cascade_delete_job_initial(
+ job_id=job_id,
+ farmer_id=farmer_id,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncLROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_cascade_delete_job.metadata = {'url': '/farmers/cascade-delete/{jobId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_farms_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_farms_operations.py
new file mode 100644
index 000000000000..920dceab13e4
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_farms_operations.py
@@ -0,0 +1,641 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.core.polling.async_base_polling import AsyncLROBasePolling
+
+from ... import models as _models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class FarmsOperations:
+ """FarmsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_farmer_id(
+ self,
+ farmer_id: str,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.FarmListResponse"]:
+ """Returns a paginated list of farm resources under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer.
+ :type farmer_id: str
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either FarmListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.FarmListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.FarmListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_farmer_id.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('FarmListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_farmer_id.metadata = {'url': '/farmers/{farmerId}/farms'} # type: ignore
+
+ def list(
+ self,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.FarmListResponse"]:
+ """Returns a paginated list of farm resources across all farmers.
+
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either FarmListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.FarmListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.FarmListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('FarmListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/farms'} # type: ignore
+
+ async def get(
+ self,
+ farmer_id: str,
+ farm_id: str,
+ **kwargs
+ ) -> "_models.Farm":
+ """Gets a specified farm resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer resource.
+ :type farmer_id: str
+ :param farm_id: ID of the farm resource.
+ :type farm_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Farm, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Farm
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Farm"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'farmId': self._serialize.url("farm_id", farm_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('Farm', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/farmers/{farmerId}/farms/{farmId}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ farmer_id: str,
+ farm_id: str,
+ farm: Optional["_models.Farm"] = None,
+ **kwargs
+ ) -> "_models.Farm":
+ """Creates or updates a farm resource under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer resource.
+ :type farmer_id: str
+ :param farm_id: Id of the farm resource.
+ :type farm_id: str
+ :param farm: Farm resource payload to create or update.
+ :type farm: ~azure.farmbeats.models.Farm
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Farm, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Farm
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Farm"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'farmId': self._serialize.url("farm_id", farm_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if farm is not None:
+ body_content = self._serialize.body(farm, 'Farm')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('Farm', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('Farm', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/farmers/{farmerId}/farms/{farmId}'} # type: ignore
+
+ async def delete(
+ self,
+ farmer_id: str,
+ farm_id: str,
+ **kwargs
+ ) -> None:
+ """Deletes a specified farm resource under a particular farmer.
+
+ :param farmer_id: Id of the farmer.
+ :type farmer_id: str
+ :param farm_id: Id of the farm.
+ :type farm_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'farmId': self._serialize.url("farm_id", farm_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/farmers/{farmerId}/farms/{farmId}'} # type: ignore
+
+ async def get_cascade_delete_job_details(
+ self,
+ job_id: str,
+ **kwargs
+ ) -> "_models.CascadeDeleteJob":
+ """Get a cascade delete job for specified farm.
+
+ :param job_id: Id of the job.
+ :type job_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CascadeDeleteJob, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.CascadeDeleteJob
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_cascade_delete_job_details.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_cascade_delete_job_details.metadata = {'url': '/farms/cascade-delete/{jobId}'} # type: ignore
+
+ async def _create_cascade_delete_job_initial(
+ self,
+ job_id: str,
+ farmer_id: str,
+ farm_id: str,
+ **kwargs
+ ) -> "_models.CascadeDeleteJob":
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_cascade_delete_job_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['farmerId'] = self._serialize.query("farmer_id", farmer_id, 'str')
+ query_parameters['farmId'] = self._serialize.query("farm_id", farm_id, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.put(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_cascade_delete_job_initial.metadata = {'url': '/farms/cascade-delete/{jobId}'} # type: ignore
+
+ async def begin_create_cascade_delete_job(
+ self,
+ job_id: str,
+ farmer_id: str,
+ farm_id: str,
+ **kwargs
+ ) -> AsyncLROPoller["_models.CascadeDeleteJob"]:
+ """Create a cascade delete job for specified farm.
+
+ :param job_id: Job ID supplied by end user.
+ :type job_id: str
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :param farm_id: ID of the farm to be deleted.
+ :type farm_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: By default, your polling method will be AsyncLROBasePolling.
+ Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either CascadeDeleteJob or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.farmbeats.models.CascadeDeleteJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_cascade_delete_job_initial(
+ job_id=job_id,
+ farmer_id=farmer_id,
+ farm_id=farm_id,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncLROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_cascade_delete_job.metadata = {'url': '/farms/cascade-delete/{jobId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_fields_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_fields_operations.py
new file mode 100644
index 000000000000..a1a1a9f94c7d
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_fields_operations.py
@@ -0,0 +1,651 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.core.polling.async_base_polling import AsyncLROBasePolling
+
+from ... import models as _models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class FieldsOperations:
+ """FieldsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_farmer_id(
+ self,
+ farmer_id: str,
+ farm_ids: Optional[List[str]] = None,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.FieldListResponse"]:
+ """Returns a paginated list of field resources under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer.
+ :type farmer_id: str
+ :param farm_ids: Farm Ids of the resource.
+ :type farm_ids: list[str]
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either FieldListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.FieldListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.FieldListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_farmer_id.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if farm_ids is not None:
+ query_parameters['farmIds'] = [self._serialize.query("farm_ids", q, 'str') if q is not None else '' for q in farm_ids]
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('FieldListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_farmer_id.metadata = {'url': '/farmers/{farmerId}/fields'} # type: ignore
+
+ def list(
+ self,
+ farm_ids: Optional[List[str]] = None,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.FieldListResponse"]:
+ """Returns a paginated list of field resources across all farmers.
+
+ :param farm_ids: Farm Ids of the resource.
+ :type farm_ids: list[str]
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either FieldListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.FieldListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.FieldListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if farm_ids is not None:
+ query_parameters['farmIds'] = [self._serialize.query("farm_ids", q, 'str') if q is not None else '' for q in farm_ids]
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('FieldListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/fields'} # type: ignore
+
+ async def get(
+ self,
+ farmer_id: str,
+ field_id: str,
+ **kwargs
+ ) -> "_models.Field":
+ """Gets a specified field resource under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer.
+ :type farmer_id: str
+ :param field_id: Id of the field.
+ :type field_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Field, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Field
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Field"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'fieldId': self._serialize.url("field_id", field_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('Field', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/farmers/{farmerId}/fields/{fieldId}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ farmer_id: str,
+ field_id: str,
+ field: Optional["_models.Field"] = None,
+ **kwargs
+ ) -> "_models.Field":
+ """Creates or Updates a field resource under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer resource.
+ :type farmer_id: str
+ :param field_id: Id of the field resource.
+ :type field_id: str
+ :param field: Field resource payload to create or update.
+ :type field: ~azure.farmbeats.models.Field
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Field, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Field
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Field"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'fieldId': self._serialize.url("field_id", field_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if field is not None:
+ body_content = self._serialize.body(field, 'Field')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('Field', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('Field', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/farmers/{farmerId}/fields/{fieldId}'} # type: ignore
+
+ async def delete(
+ self,
+ farmer_id: str,
+ field_id: str,
+ **kwargs
+ ) -> None:
+ """Deletes a specified field resource under a particular farmer.
+
+ :param farmer_id: Id of the farmer.
+ :type farmer_id: str
+ :param field_id: Id of the field.
+ :type field_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'fieldId': self._serialize.url("field_id", field_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/farmers/{farmerId}/fields/{fieldId}'} # type: ignore
+
+ async def get_cascade_delete_job_details(
+ self,
+ job_id: str,
+ **kwargs
+ ) -> "_models.CascadeDeleteJob":
+ """Get a cascade delete job for specified field.
+
+ :param job_id: Id of the job.
+ :type job_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CascadeDeleteJob, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.CascadeDeleteJob
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_cascade_delete_job_details.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_cascade_delete_job_details.metadata = {'url': '/fields/cascade-delete/{jobId}'} # type: ignore
+
+ async def _create_cascade_delete_job_initial(
+ self,
+ job_id: str,
+ farmer_id: str,
+ field_id: str,
+ **kwargs
+ ) -> "_models.CascadeDeleteJob":
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_cascade_delete_job_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['farmerId'] = self._serialize.query("farmer_id", farmer_id, 'str')
+ query_parameters['fieldId'] = self._serialize.query("field_id", field_id, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.put(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_cascade_delete_job_initial.metadata = {'url': '/fields/cascade-delete/{jobId}'} # type: ignore
+
+ async def begin_create_cascade_delete_job(
+ self,
+ job_id: str,
+ farmer_id: str,
+ field_id: str,
+ **kwargs
+ ) -> AsyncLROPoller["_models.CascadeDeleteJob"]:
+ """Create a cascade delete job for specified field.
+
+ :param job_id: Job ID supplied by end user.
+ :type job_id: str
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :param field_id: ID of the field to be deleted.
+ :type field_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: By default, your polling method will be AsyncLROBasePolling.
+ Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either CascadeDeleteJob or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.farmbeats.models.CascadeDeleteJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_cascade_delete_job_initial(
+ job_id=job_id,
+ farmer_id=farmer_id,
+ field_id=field_id,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncLROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_cascade_delete_job.metadata = {'url': '/fields/cascade-delete/{jobId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_harvest_data_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_harvest_data_operations.py
new file mode 100644
index 000000000000..993b70924043
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_harvest_data_operations.py
@@ -0,0 +1,709 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+
+from ... import models as _models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class HarvestDataOperations:
+ """HarvestDataOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_farmer_id(
+ self,
+ farmer_id: str,
+ min_total_yield: Optional[float] = None,
+ max_total_yield: Optional[float] = None,
+ min_avg_yield: Optional[float] = None,
+ max_avg_yield: Optional[float] = None,
+ min_total_wet_mass: Optional[float] = None,
+ max_total_wet_mass: Optional[float] = None,
+ min_avg_wet_mass: Optional[float] = None,
+ max_avg_wet_mass: Optional[float] = None,
+ min_avg_moisture: Optional[float] = None,
+ max_avg_moisture: Optional[float] = None,
+ min_avg_speed: Optional[float] = None,
+ max_avg_speed: Optional[float] = None,
+ sources: Optional[List[str]] = None,
+ associated_boundary_ids: Optional[List[str]] = None,
+ operation_boundary_ids: Optional[List[str]] = None,
+ min_operation_start_date_time: Optional[datetime.datetime] = None,
+ max_operation_start_date_time: Optional[datetime.datetime] = None,
+ min_operation_end_date_time: Optional[datetime.datetime] = None,
+ max_operation_end_date_time: Optional[datetime.datetime] = None,
+ min_operation_modified_date_time: Optional[datetime.datetime] = None,
+ max_operation_modified_date_time: Optional[datetime.datetime] = None,
+ min_area: Optional[float] = None,
+ max_area: Optional[float] = None,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.HarvestDataListResponse"]:
+ """Returns a paginated list of harvest data resources under a particular farm.
+
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :param min_total_yield: Minimum Yield value(inclusive).
+ :type min_total_yield: float
+ :param max_total_yield: Maximum Yield value (inclusive).
+ :type max_total_yield: float
+ :param min_avg_yield: Minimum AvgYield value(inclusive).
+ :type min_avg_yield: float
+ :param max_avg_yield: Maximum AvgYield value (inclusive).
+ :type max_avg_yield: float
+ :param min_total_wet_mass: Minimum Total WetMass value(inclusive).
+ :type min_total_wet_mass: float
+ :param max_total_wet_mass: Maximum Total WetMass value (inclusive).
+ :type max_total_wet_mass: float
+ :param min_avg_wet_mass: Minimum AvgWetMass value(inclusive).
+ :type min_avg_wet_mass: float
+ :param max_avg_wet_mass: Maximum AvgWetMass value (inclusive).
+ :type max_avg_wet_mass: float
+ :param min_avg_moisture: Minimum AvgMoisture value(inclusive).
+ :type min_avg_moisture: float
+ :param max_avg_moisture: Maximum AvgMoisture value (inclusive).
+ :type max_avg_moisture: float
+ :param min_avg_speed: Minimum AvgSpeed value(inclusive).
+ :type min_avg_speed: float
+ :param max_avg_speed: Maximum AvgSpeed value (inclusive).
+ :type max_avg_speed: float
+ :param sources: Sources of the operation data.
+ :type sources: list[str]
+ :param associated_boundary_ids: Boundary IDs associated with operation data.
+ :type associated_boundary_ids: list[str]
+ :param operation_boundary_ids: Operation boundary IDs associated with operation data.
+ :type operation_boundary_ids: list[str]
+ :param min_operation_start_date_time: Minimum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_start_date_time: ~datetime.datetime
+ :param max_operation_start_date_time: Maximum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_start_date_time: ~datetime.datetime
+ :param min_operation_end_date_time: Minimum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_end_date_time: ~datetime.datetime
+ :param max_operation_end_date_time: Maximum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_end_date_time: ~datetime.datetime
+ :param min_operation_modified_date_time: Minimum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_modified_date_time: ~datetime.datetime
+ :param max_operation_modified_date_time: Maximum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_modified_date_time: ~datetime.datetime
+ :param min_area: Minimum area for which operation was applied (inclusive).
+ :type min_area: float
+ :param max_area: Maximum area for which operation was applied (inclusive).
+ :type max_area: float
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either HarvestDataListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.HarvestDataListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.HarvestDataListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_farmer_id.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if min_total_yield is not None:
+ query_parameters['minTotalYield'] = self._serialize.query("min_total_yield", min_total_yield, 'float')
+ if max_total_yield is not None:
+ query_parameters['maxTotalYield'] = self._serialize.query("max_total_yield", max_total_yield, 'float')
+ if min_avg_yield is not None:
+ query_parameters['minAvgYield'] = self._serialize.query("min_avg_yield", min_avg_yield, 'float')
+ if max_avg_yield is not None:
+ query_parameters['maxAvgYield'] = self._serialize.query("max_avg_yield", max_avg_yield, 'float')
+ if min_total_wet_mass is not None:
+ query_parameters['minTotalWetMass'] = self._serialize.query("min_total_wet_mass", min_total_wet_mass, 'float')
+ if max_total_wet_mass is not None:
+ query_parameters['maxTotalWetMass'] = self._serialize.query("max_total_wet_mass", max_total_wet_mass, 'float')
+ if min_avg_wet_mass is not None:
+ query_parameters['minAvgWetMass'] = self._serialize.query("min_avg_wet_mass", min_avg_wet_mass, 'float')
+ if max_avg_wet_mass is not None:
+ query_parameters['maxAvgWetMass'] = self._serialize.query("max_avg_wet_mass", max_avg_wet_mass, 'float')
+ if min_avg_moisture is not None:
+ query_parameters['minAvgMoisture'] = self._serialize.query("min_avg_moisture", min_avg_moisture, 'float')
+ if max_avg_moisture is not None:
+ query_parameters['maxAvgMoisture'] = self._serialize.query("max_avg_moisture", max_avg_moisture, 'float')
+ if min_avg_speed is not None:
+ query_parameters['minAvgSpeed'] = self._serialize.query("min_avg_speed", min_avg_speed, 'float')
+ if max_avg_speed is not None:
+ query_parameters['maxAvgSpeed'] = self._serialize.query("max_avg_speed", max_avg_speed, 'float')
+ if sources is not None:
+ query_parameters['sources'] = [self._serialize.query("sources", q, 'str') if q is not None else '' for q in sources]
+ if associated_boundary_ids is not None:
+ query_parameters['associatedBoundaryIds'] = [self._serialize.query("associated_boundary_ids", q, 'str') if q is not None else '' for q in associated_boundary_ids]
+ if operation_boundary_ids is not None:
+ query_parameters['operationBoundaryIds'] = [self._serialize.query("operation_boundary_ids", q, 'str') if q is not None else '' for q in operation_boundary_ids]
+ if min_operation_start_date_time is not None:
+ query_parameters['minOperationStartDateTime'] = self._serialize.query("min_operation_start_date_time", min_operation_start_date_time, 'iso-8601')
+ if max_operation_start_date_time is not None:
+ query_parameters['maxOperationStartDateTime'] = self._serialize.query("max_operation_start_date_time", max_operation_start_date_time, 'iso-8601')
+ if min_operation_end_date_time is not None:
+ query_parameters['minOperationEndDateTime'] = self._serialize.query("min_operation_end_date_time", min_operation_end_date_time, 'iso-8601')
+ if max_operation_end_date_time is not None:
+ query_parameters['maxOperationEndDateTime'] = self._serialize.query("max_operation_end_date_time", max_operation_end_date_time, 'iso-8601')
+ if min_operation_modified_date_time is not None:
+ query_parameters['minOperationModifiedDateTime'] = self._serialize.query("min_operation_modified_date_time", min_operation_modified_date_time, 'iso-8601')
+ if max_operation_modified_date_time is not None:
+ query_parameters['maxOperationModifiedDateTime'] = self._serialize.query("max_operation_modified_date_time", max_operation_modified_date_time, 'iso-8601')
+ if min_area is not None:
+ query_parameters['minArea'] = self._serialize.query("min_area", min_area, 'float')
+ if max_area is not None:
+ query_parameters['maxArea'] = self._serialize.query("max_area", max_area, 'float')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('HarvestDataListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_farmer_id.metadata = {'url': '/farmers/{farmerId}/harvest-data'} # type: ignore
+
+ def list(
+ self,
+ min_total_yield: Optional[float] = None,
+ max_total_yield: Optional[float] = None,
+ min_avg_yield: Optional[float] = None,
+ max_avg_yield: Optional[float] = None,
+ min_total_wet_mass: Optional[float] = None,
+ max_total_wet_mass: Optional[float] = None,
+ min_avg_wet_mass: Optional[float] = None,
+ max_avg_wet_mass: Optional[float] = None,
+ min_avg_moisture: Optional[float] = None,
+ max_avg_moisture: Optional[float] = None,
+ min_avg_speed: Optional[float] = None,
+ max_avg_speed: Optional[float] = None,
+ sources: Optional[List[str]] = None,
+ associated_boundary_ids: Optional[List[str]] = None,
+ operation_boundary_ids: Optional[List[str]] = None,
+ min_operation_start_date_time: Optional[datetime.datetime] = None,
+ max_operation_start_date_time: Optional[datetime.datetime] = None,
+ min_operation_end_date_time: Optional[datetime.datetime] = None,
+ max_operation_end_date_time: Optional[datetime.datetime] = None,
+ min_operation_modified_date_time: Optional[datetime.datetime] = None,
+ max_operation_modified_date_time: Optional[datetime.datetime] = None,
+ min_area: Optional[float] = None,
+ max_area: Optional[float] = None,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.HarvestDataListResponse"]:
+ """Returns a paginated list of harvest data resources across all farmers.
+
+ :param min_total_yield: Minimum Yield value(inclusive).
+ :type min_total_yield: float
+ :param max_total_yield: Maximum Yield value (inclusive).
+ :type max_total_yield: float
+ :param min_avg_yield: Minimum AvgYield value(inclusive).
+ :type min_avg_yield: float
+ :param max_avg_yield: Maximum AvgYield value (inclusive).
+ :type max_avg_yield: float
+ :param min_total_wet_mass: Minimum Total WetMass value(inclusive).
+ :type min_total_wet_mass: float
+ :param max_total_wet_mass: Maximum Total WetMass value (inclusive).
+ :type max_total_wet_mass: float
+ :param min_avg_wet_mass: Minimum AvgWetMass value(inclusive).
+ :type min_avg_wet_mass: float
+ :param max_avg_wet_mass: Maximum AvgWetMass value (inclusive).
+ :type max_avg_wet_mass: float
+ :param min_avg_moisture: Minimum AvgMoisture value(inclusive).
+ :type min_avg_moisture: float
+ :param max_avg_moisture: Maximum AvgMoisture value (inclusive).
+ :type max_avg_moisture: float
+ :param min_avg_speed: Minimum AvgSpeed value(inclusive).
+ :type min_avg_speed: float
+ :param max_avg_speed: Maximum AvgSpeed value (inclusive).
+ :type max_avg_speed: float
+ :param sources: Sources of the operation data.
+ :type sources: list[str]
+ :param associated_boundary_ids: Boundary IDs associated with operation data.
+ :type associated_boundary_ids: list[str]
+ :param operation_boundary_ids: Operation boundary IDs associated with operation data.
+ :type operation_boundary_ids: list[str]
+ :param min_operation_start_date_time: Minimum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_start_date_time: ~datetime.datetime
+ :param max_operation_start_date_time: Maximum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_start_date_time: ~datetime.datetime
+ :param min_operation_end_date_time: Minimum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_end_date_time: ~datetime.datetime
+ :param max_operation_end_date_time: Maximum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_end_date_time: ~datetime.datetime
+ :param min_operation_modified_date_time: Minimum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_modified_date_time: ~datetime.datetime
+ :param max_operation_modified_date_time: Maximum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_modified_date_time: ~datetime.datetime
+ :param min_area: Minimum area for which operation was applied (inclusive).
+ :type min_area: float
+ :param max_area: Maximum area for which operation was applied (inclusive).
+ :type max_area: float
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either HarvestDataListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.HarvestDataListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.HarvestDataListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if min_total_yield is not None:
+ query_parameters['minTotalYield'] = self._serialize.query("min_total_yield", min_total_yield, 'float')
+ if max_total_yield is not None:
+ query_parameters['maxTotalYield'] = self._serialize.query("max_total_yield", max_total_yield, 'float')
+ if min_avg_yield is not None:
+ query_parameters['minAvgYield'] = self._serialize.query("min_avg_yield", min_avg_yield, 'float')
+ if max_avg_yield is not None:
+ query_parameters['maxAvgYield'] = self._serialize.query("max_avg_yield", max_avg_yield, 'float')
+ if min_total_wet_mass is not None:
+ query_parameters['minTotalWetMass'] = self._serialize.query("min_total_wet_mass", min_total_wet_mass, 'float')
+ if max_total_wet_mass is not None:
+ query_parameters['maxTotalWetMass'] = self._serialize.query("max_total_wet_mass", max_total_wet_mass, 'float')
+ if min_avg_wet_mass is not None:
+ query_parameters['minAvgWetMass'] = self._serialize.query("min_avg_wet_mass", min_avg_wet_mass, 'float')
+ if max_avg_wet_mass is not None:
+ query_parameters['maxAvgWetMass'] = self._serialize.query("max_avg_wet_mass", max_avg_wet_mass, 'float')
+ if min_avg_moisture is not None:
+ query_parameters['minAvgMoisture'] = self._serialize.query("min_avg_moisture", min_avg_moisture, 'float')
+ if max_avg_moisture is not None:
+ query_parameters['maxAvgMoisture'] = self._serialize.query("max_avg_moisture", max_avg_moisture, 'float')
+ if min_avg_speed is not None:
+ query_parameters['minAvgSpeed'] = self._serialize.query("min_avg_speed", min_avg_speed, 'float')
+ if max_avg_speed is not None:
+ query_parameters['maxAvgSpeed'] = self._serialize.query("max_avg_speed", max_avg_speed, 'float')
+ if sources is not None:
+ query_parameters['sources'] = [self._serialize.query("sources", q, 'str') if q is not None else '' for q in sources]
+ if associated_boundary_ids is not None:
+ query_parameters['associatedBoundaryIds'] = [self._serialize.query("associated_boundary_ids", q, 'str') if q is not None else '' for q in associated_boundary_ids]
+ if operation_boundary_ids is not None:
+ query_parameters['operationBoundaryIds'] = [self._serialize.query("operation_boundary_ids", q, 'str') if q is not None else '' for q in operation_boundary_ids]
+ if min_operation_start_date_time is not None:
+ query_parameters['minOperationStartDateTime'] = self._serialize.query("min_operation_start_date_time", min_operation_start_date_time, 'iso-8601')
+ if max_operation_start_date_time is not None:
+ query_parameters['maxOperationStartDateTime'] = self._serialize.query("max_operation_start_date_time", max_operation_start_date_time, 'iso-8601')
+ if min_operation_end_date_time is not None:
+ query_parameters['minOperationEndDateTime'] = self._serialize.query("min_operation_end_date_time", min_operation_end_date_time, 'iso-8601')
+ if max_operation_end_date_time is not None:
+ query_parameters['maxOperationEndDateTime'] = self._serialize.query("max_operation_end_date_time", max_operation_end_date_time, 'iso-8601')
+ if min_operation_modified_date_time is not None:
+ query_parameters['minOperationModifiedDateTime'] = self._serialize.query("min_operation_modified_date_time", min_operation_modified_date_time, 'iso-8601')
+ if max_operation_modified_date_time is not None:
+ query_parameters['maxOperationModifiedDateTime'] = self._serialize.query("max_operation_modified_date_time", max_operation_modified_date_time, 'iso-8601')
+ if min_area is not None:
+ query_parameters['minArea'] = self._serialize.query("min_area", min_area, 'float')
+ if max_area is not None:
+ query_parameters['maxArea'] = self._serialize.query("max_area", max_area, 'float')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('HarvestDataListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/harvest-data'} # type: ignore
+
+ async def get(
+ self,
+ farmer_id: str,
+ harvest_data_id: str,
+ **kwargs
+ ) -> "_models.HarvestData":
+ """Get a specified harvest data resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer resource.
+ :type farmer_id: str
+ :param harvest_data_id: ID of the harvest data resource.
+ :type harvest_data_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: HarvestData, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.HarvestData
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.HarvestData"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'harvestDataId': self._serialize.url("harvest_data_id", harvest_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('HarvestData', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/farmers/{farmerId}/harvest-data/{harvestDataId}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ farmer_id: str,
+ harvest_data_id: str,
+ harvest_data: Optional["_models.HarvestData"] = None,
+ **kwargs
+ ) -> "_models.HarvestData":
+ """Creates or updates harvest data resource under a particular farmer.
+
+ :param farmer_id: ID of the farmer.
+ :type farmer_id: str
+ :param harvest_data_id: ID of the harvest data resource.
+ :type harvest_data_id: str
+ :param harvest_data: Harvest data resource payload to create or update.
+ :type harvest_data: ~azure.farmbeats.models.HarvestData
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: HarvestData, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.HarvestData
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.HarvestData"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'harvestDataId': self._serialize.url("harvest_data_id", harvest_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if harvest_data is not None:
+ body_content = self._serialize.body(harvest_data, 'HarvestData')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('HarvestData', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('HarvestData', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/farmers/{farmerId}/harvest-data/{harvestDataId}'} # type: ignore
+
+ async def delete(
+ self,
+ farmer_id: str,
+ harvest_data_id: str,
+ **kwargs
+ ) -> None:
+ """Deletes a specified harvest data resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer resource.
+ :type farmer_id: str
+ :param harvest_data_id: ID of the harvest data.
+ :type harvest_data_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'harvestDataId': self._serialize.url("harvest_data_id", harvest_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/farmers/{farmerId}/harvest-data/{harvestDataId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_image_processing_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_image_processing_operations.py
new file mode 100644
index 000000000000..a226482e8e64
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_image_processing_operations.py
@@ -0,0 +1,211 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.core.polling.async_base_polling import AsyncLROBasePolling
+
+from ... import models as _models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class ImageProcessingOperations:
+ """ImageProcessingOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def _create_rasterize_job_initial(
+ self,
+ job_id: str,
+ job: Optional["_models.ImageProcessingRasterizeJob"] = None,
+ **kwargs
+ ) -> "_models.ImageProcessingRasterizeJob":
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.ImageProcessingRasterizeJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_rasterize_job_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if job is not None:
+ body_content = self._serialize.body(job, 'ImageProcessingRasterizeJob')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response)
+
+ deserialized = self._deserialize('ImageProcessingRasterizeJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_rasterize_job_initial.metadata = {'url': '/image-processing/rasterize/{jobId}'} # type: ignore
+
+ async def begin_create_rasterize_job(
+ self,
+ job_id: str,
+ job: Optional["_models.ImageProcessingRasterizeJob"] = None,
+ **kwargs
+ ) -> AsyncLROPoller["_models.ImageProcessingRasterizeJob"]:
+ """Create a ImageProcessing Rasterize job.
+
+ :param job_id: JobId provided by user.
+ :type job_id: str
+ :param job: Job parameters supplied by user.
+ :type job: ~azure.farmbeats.models.ImageProcessingRasterizeJob
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: By default, your polling method will be AsyncLROBasePolling.
+ Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either ImageProcessingRasterizeJob or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.farmbeats.models.ImageProcessingRasterizeJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.ImageProcessingRasterizeJob"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_rasterize_job_initial(
+ job_id=job_id,
+ job=job,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('ImageProcessingRasterizeJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncLROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_rasterize_job.metadata = {'url': '/image-processing/rasterize/{jobId}'} # type: ignore
+
+ async def get_rasterize_job(
+ self,
+ job_id: str,
+ **kwargs
+ ) -> "_models.ImageProcessingRasterizeJob":
+ """Get ImageProcessing Rasterize job's details.
+
+ :param job_id: Id of the job.
+ :type job_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ImageProcessingRasterizeJob, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.ImageProcessingRasterizeJob
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.ImageProcessingRasterizeJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_rasterize_job.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response)
+
+ deserialized = self._deserialize('ImageProcessingRasterizeJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_rasterize_job.metadata = {'url': '/image-processing/rasterize/{jobId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_oauth_providers_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_oauth_providers_operations.py
new file mode 100644
index 000000000000..75c9623367c1
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_oauth_providers_operations.py
@@ -0,0 +1,329 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+
+from ... import models as _models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class OAuthProvidersOperations:
+ """OAuthProvidersOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.OAuthProviderListResponse"]:
+ """Returns a paginated list of oauthProvider resources.
+
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either OAuthProviderListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.OAuthProviderListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.OAuthProviderListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('OAuthProviderListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/oauth/providers'} # type: ignore
+
+ async def get(
+ self,
+ oauth_provider_id: str,
+ **kwargs
+ ) -> "_models.OAuthProvider":
+ """Get a specified oauthProvider resource.
+
+ :param oauth_provider_id: ID of the oauthProvider resource.
+ :type oauth_provider_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: OAuthProvider, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.OAuthProvider
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.OAuthProvider"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'oauthProviderId': self._serialize.url("oauth_provider_id", oauth_provider_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('OAuthProvider', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/oauth/providers/{oauthProviderId}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ oauth_provider_id: str,
+ oauth_provider: Optional["_models.OAuthProvider"] = None,
+ **kwargs
+ ) -> "_models.OAuthProvider":
+ """Creates or updates an oauthProvider resource.
+
+ :param oauth_provider_id: ID of oauthProvider resource.
+ :type oauth_provider_id: str
+ :param oauth_provider: OauthProvider resource payload to create or update.
+ :type oauth_provider: ~azure.farmbeats.models.OAuthProvider
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: OAuthProvider, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.OAuthProvider
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.OAuthProvider"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'oauthProviderId': self._serialize.url("oauth_provider_id", oauth_provider_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if oauth_provider is not None:
+ body_content = self._serialize.body(oauth_provider, 'OAuthProvider')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('OAuthProvider', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('OAuthProvider', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/oauth/providers/{oauthProviderId}'} # type: ignore
+
+ async def delete(
+ self,
+ oauth_provider_id: str,
+ **kwargs
+ ) -> None:
+ """Deletes an specified oauthProvider resource.
+
+ :param oauth_provider_id: ID of oauthProvider.
+ :type oauth_provider_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'oauthProviderId': self._serialize.url("oauth_provider_id", oauth_provider_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/oauth/providers/{oauthProviderId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_oauth_tokens_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_oauth_tokens_operations.py
new file mode 100644
index 000000000000..80bd11535be0
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_oauth_tokens_operations.py
@@ -0,0 +1,381 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.core.polling.async_base_polling import AsyncLROBasePolling
+
+from ... import models as _models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class OAuthTokensOperations:
+ """OAuthTokensOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ auth_provider_ids: Optional[List[str]] = None,
+ farmer_ids: Optional[List[str]] = None,
+ is_valid: Optional[bool] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.OAuthTokenListResponse"]:
+ """Returns a list of OAuthToken documents.
+
+ :param auth_provider_ids: Name of AuthProvider.
+ :type auth_provider_ids: list[str]
+ :param farmer_ids: List of farmers.
+ :type farmer_ids: list[str]
+ :param is_valid: If the token object is valid.
+ :type is_valid: bool
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either OAuthTokenListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.OAuthTokenListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.OAuthTokenListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if auth_provider_ids is not None:
+ query_parameters['authProviderIds'] = [self._serialize.query("auth_provider_ids", q, 'str') if q is not None else '' for q in auth_provider_ids]
+ if farmer_ids is not None:
+ query_parameters['farmerIds'] = [self._serialize.query("farmer_ids", q, 'str') if q is not None else '' for q in farmer_ids]
+ if is_valid is not None:
+ query_parameters['isValid'] = self._serialize.query("is_valid", is_valid, 'bool')
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('OAuthTokenListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/oauth/tokens'} # type: ignore
+
+ async def get_o_auth_connection_link(
+ self,
+ connect_request: Optional["_models.OAuthConnectRequest"] = None,
+ **kwargs
+ ) -> str:
+ """Returns Connection link needed in the OAuth flow.
+
+ :param connect_request: OAuth Connect Request.
+ :type connect_request: ~azure.farmbeats.models.OAuthConnectRequest
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: str, or the result of cls(response)
+ :rtype: str
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[str]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_o_auth_connection_link.metadata['url'] # type: ignore
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if connect_request is not None:
+ body_content = self._serialize.body(connect_request, 'OAuthConnectRequest')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('str', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_o_auth_connection_link.metadata = {'url': '/oauth/tokens/:connect'} # type: ignore
+
+ async def get_cascade_delete_job_details(
+ self,
+ job_id: str,
+ **kwargs
+ ) -> "_models.CascadeDeleteJob":
+ """Get remove job for OAuth token.
+
+ :param job_id: Id of the job.
+ :type job_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CascadeDeleteJob, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.CascadeDeleteJob
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_cascade_delete_job_details.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_cascade_delete_job_details.metadata = {'url': '/oauth/tokens/remove/{jobId}'} # type: ignore
+
+ async def _create_cascade_delete_job_initial(
+ self,
+ job_id: str,
+ farmer_id: str,
+ oauth_provider_id: str,
+ **kwargs
+ ) -> "_models.CascadeDeleteJob":
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_cascade_delete_job_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['farmerId'] = self._serialize.query("farmer_id", farmer_id, 'str')
+ query_parameters['oauthProviderId'] = self._serialize.query("oauth_provider_id", oauth_provider_id, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.put(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_cascade_delete_job_initial.metadata = {'url': '/oauth/tokens/remove/{jobId}'} # type: ignore
+
+ async def begin_create_cascade_delete_job(
+ self,
+ job_id: str,
+ farmer_id: str,
+ oauth_provider_id: str,
+ **kwargs
+ ) -> AsyncLROPoller["_models.CascadeDeleteJob"]:
+ """Create remove job for OAuth token.
+
+ :param job_id: Job Id supplied by end user.
+ :type job_id: str
+ :param farmer_id: Id of the farmer.
+ :type farmer_id: str
+ :param oauth_provider_id: Id of the OAuthProvider.
+ :type oauth_provider_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: By default, your polling method will be AsyncLROBasePolling.
+ Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either CascadeDeleteJob or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.farmbeats.models.CascadeDeleteJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_cascade_delete_job_initial(
+ job_id=job_id,
+ farmer_id=farmer_id,
+ oauth_provider_id=oauth_provider_id,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncLROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_cascade_delete_job.metadata = {'url': '/oauth/tokens/remove/{jobId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_planting_data_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_planting_data_operations.py
new file mode 100644
index 000000000000..cd65fa887e1b
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_planting_data_operations.py
@@ -0,0 +1,649 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+
+from ... import models as _models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class PlantingDataOperations:
+ """PlantingDataOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_farmer_id(
+ self,
+ farmer_id: str,
+ min_avg_planting_rate: Optional[float] = None,
+ max_avg_planting_rate: Optional[float] = None,
+ min_total_material: Optional[float] = None,
+ max_total_material: Optional[float] = None,
+ min_avg_material: Optional[float] = None,
+ max_avg_material: Optional[float] = None,
+ sources: Optional[List[str]] = None,
+ associated_boundary_ids: Optional[List[str]] = None,
+ operation_boundary_ids: Optional[List[str]] = None,
+ min_operation_start_date_time: Optional[datetime.datetime] = None,
+ max_operation_start_date_time: Optional[datetime.datetime] = None,
+ min_operation_end_date_time: Optional[datetime.datetime] = None,
+ max_operation_end_date_time: Optional[datetime.datetime] = None,
+ min_operation_modified_date_time: Optional[datetime.datetime] = None,
+ max_operation_modified_date_time: Optional[datetime.datetime] = None,
+ min_area: Optional[float] = None,
+ max_area: Optional[float] = None,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.PlantingDataListResponse"]:
+ """Returns a paginated list of planting data resources under a particular farm.
+
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :param min_avg_planting_rate: Minimum AvgPlantingRate value(inclusive).
+ :type min_avg_planting_rate: float
+ :param max_avg_planting_rate: Maximum AvgPlantingRate value (inclusive).
+ :type max_avg_planting_rate: float
+ :param min_total_material: Minimum TotalMaterial value(inclusive).
+ :type min_total_material: float
+ :param max_total_material: Maximum TotalMaterial value (inclusive).
+ :type max_total_material: float
+ :param min_avg_material: Minimum AvgMaterial value(inclusive).
+ :type min_avg_material: float
+ :param max_avg_material: Maximum AvgMaterial value (inclusive).
+ :type max_avg_material: float
+ :param sources: Sources of the operation data.
+ :type sources: list[str]
+ :param associated_boundary_ids: Boundary IDs associated with operation data.
+ :type associated_boundary_ids: list[str]
+ :param operation_boundary_ids: Operation boundary IDs associated with operation data.
+ :type operation_boundary_ids: list[str]
+ :param min_operation_start_date_time: Minimum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_start_date_time: ~datetime.datetime
+ :param max_operation_start_date_time: Maximum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_start_date_time: ~datetime.datetime
+ :param min_operation_end_date_time: Minimum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_end_date_time: ~datetime.datetime
+ :param max_operation_end_date_time: Maximum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_end_date_time: ~datetime.datetime
+ :param min_operation_modified_date_time: Minimum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_modified_date_time: ~datetime.datetime
+ :param max_operation_modified_date_time: Maximum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_modified_date_time: ~datetime.datetime
+ :param min_area: Minimum area for which operation was applied (inclusive).
+ :type min_area: float
+ :param max_area: Maximum area for which operation was applied (inclusive).
+ :type max_area: float
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PlantingDataListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.PlantingDataListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.PlantingDataListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_farmer_id.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if min_avg_planting_rate is not None:
+ query_parameters['minAvgPlantingRate'] = self._serialize.query("min_avg_planting_rate", min_avg_planting_rate, 'float')
+ if max_avg_planting_rate is not None:
+ query_parameters['maxAvgPlantingRate'] = self._serialize.query("max_avg_planting_rate", max_avg_planting_rate, 'float')
+ if min_total_material is not None:
+ query_parameters['minTotalMaterial'] = self._serialize.query("min_total_material", min_total_material, 'float')
+ if max_total_material is not None:
+ query_parameters['maxTotalMaterial'] = self._serialize.query("max_total_material", max_total_material, 'float')
+ if min_avg_material is not None:
+ query_parameters['minAvgMaterial'] = self._serialize.query("min_avg_material", min_avg_material, 'float')
+ if max_avg_material is not None:
+ query_parameters['maxAvgMaterial'] = self._serialize.query("max_avg_material", max_avg_material, 'float')
+ if sources is not None:
+ query_parameters['sources'] = [self._serialize.query("sources", q, 'str') if q is not None else '' for q in sources]
+ if associated_boundary_ids is not None:
+ query_parameters['associatedBoundaryIds'] = [self._serialize.query("associated_boundary_ids", q, 'str') if q is not None else '' for q in associated_boundary_ids]
+ if operation_boundary_ids is not None:
+ query_parameters['operationBoundaryIds'] = [self._serialize.query("operation_boundary_ids", q, 'str') if q is not None else '' for q in operation_boundary_ids]
+ if min_operation_start_date_time is not None:
+ query_parameters['minOperationStartDateTime'] = self._serialize.query("min_operation_start_date_time", min_operation_start_date_time, 'iso-8601')
+ if max_operation_start_date_time is not None:
+ query_parameters['maxOperationStartDateTime'] = self._serialize.query("max_operation_start_date_time", max_operation_start_date_time, 'iso-8601')
+ if min_operation_end_date_time is not None:
+ query_parameters['minOperationEndDateTime'] = self._serialize.query("min_operation_end_date_time", min_operation_end_date_time, 'iso-8601')
+ if max_operation_end_date_time is not None:
+ query_parameters['maxOperationEndDateTime'] = self._serialize.query("max_operation_end_date_time", max_operation_end_date_time, 'iso-8601')
+ if min_operation_modified_date_time is not None:
+ query_parameters['minOperationModifiedDateTime'] = self._serialize.query("min_operation_modified_date_time", min_operation_modified_date_time, 'iso-8601')
+ if max_operation_modified_date_time is not None:
+ query_parameters['maxOperationModifiedDateTime'] = self._serialize.query("max_operation_modified_date_time", max_operation_modified_date_time, 'iso-8601')
+ if min_area is not None:
+ query_parameters['minArea'] = self._serialize.query("min_area", min_area, 'float')
+ if max_area is not None:
+ query_parameters['maxArea'] = self._serialize.query("max_area", max_area, 'float')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('PlantingDataListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_farmer_id.metadata = {'url': '/farmers/{farmerId}/planting-data'} # type: ignore
+
+ def list(
+ self,
+ min_avg_planting_rate: Optional[float] = None,
+ max_avg_planting_rate: Optional[float] = None,
+ min_total_material: Optional[float] = None,
+ max_total_material: Optional[float] = None,
+ min_avg_material: Optional[float] = None,
+ max_avg_material: Optional[float] = None,
+ sources: Optional[List[str]] = None,
+ associated_boundary_ids: Optional[List[str]] = None,
+ operation_boundary_ids: Optional[List[str]] = None,
+ min_operation_start_date_time: Optional[datetime.datetime] = None,
+ max_operation_start_date_time: Optional[datetime.datetime] = None,
+ min_operation_end_date_time: Optional[datetime.datetime] = None,
+ max_operation_end_date_time: Optional[datetime.datetime] = None,
+ min_operation_modified_date_time: Optional[datetime.datetime] = None,
+ max_operation_modified_date_time: Optional[datetime.datetime] = None,
+ min_area: Optional[float] = None,
+ max_area: Optional[float] = None,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.PlantingDataListResponse"]:
+ """Returns a paginated list of planting data resources across all farmers.
+
+ :param min_avg_planting_rate: Minimum AvgPlantingRate value(inclusive).
+ :type min_avg_planting_rate: float
+ :param max_avg_planting_rate: Maximum AvgPlantingRate value (inclusive).
+ :type max_avg_planting_rate: float
+ :param min_total_material: Minimum TotalMaterial value(inclusive).
+ :type min_total_material: float
+ :param max_total_material: Maximum TotalMaterial value (inclusive).
+ :type max_total_material: float
+ :param min_avg_material: Minimum AvgMaterial value(inclusive).
+ :type min_avg_material: float
+ :param max_avg_material: Maximum AvgMaterial value (inclusive).
+ :type max_avg_material: float
+ :param sources: Sources of the operation data.
+ :type sources: list[str]
+ :param associated_boundary_ids: Boundary IDs associated with operation data.
+ :type associated_boundary_ids: list[str]
+ :param operation_boundary_ids: Operation boundary IDs associated with operation data.
+ :type operation_boundary_ids: list[str]
+ :param min_operation_start_date_time: Minimum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_start_date_time: ~datetime.datetime
+ :param max_operation_start_date_time: Maximum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_start_date_time: ~datetime.datetime
+ :param min_operation_end_date_time: Minimum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_end_date_time: ~datetime.datetime
+ :param max_operation_end_date_time: Maximum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_end_date_time: ~datetime.datetime
+ :param min_operation_modified_date_time: Minimum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_modified_date_time: ~datetime.datetime
+ :param max_operation_modified_date_time: Maximum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_modified_date_time: ~datetime.datetime
+ :param min_area: Minimum area for which operation was applied (inclusive).
+ :type min_area: float
+ :param max_area: Maximum area for which operation was applied (inclusive).
+ :type max_area: float
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PlantingDataListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.PlantingDataListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.PlantingDataListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if min_avg_planting_rate is not None:
+ query_parameters['minAvgPlantingRate'] = self._serialize.query("min_avg_planting_rate", min_avg_planting_rate, 'float')
+ if max_avg_planting_rate is not None:
+ query_parameters['maxAvgPlantingRate'] = self._serialize.query("max_avg_planting_rate", max_avg_planting_rate, 'float')
+ if min_total_material is not None:
+ query_parameters['minTotalMaterial'] = self._serialize.query("min_total_material", min_total_material, 'float')
+ if max_total_material is not None:
+ query_parameters['maxTotalMaterial'] = self._serialize.query("max_total_material", max_total_material, 'float')
+ if min_avg_material is not None:
+ query_parameters['minAvgMaterial'] = self._serialize.query("min_avg_material", min_avg_material, 'float')
+ if max_avg_material is not None:
+ query_parameters['maxAvgMaterial'] = self._serialize.query("max_avg_material", max_avg_material, 'float')
+ if sources is not None:
+ query_parameters['sources'] = [self._serialize.query("sources", q, 'str') if q is not None else '' for q in sources]
+ if associated_boundary_ids is not None:
+ query_parameters['associatedBoundaryIds'] = [self._serialize.query("associated_boundary_ids", q, 'str') if q is not None else '' for q in associated_boundary_ids]
+ if operation_boundary_ids is not None:
+ query_parameters['operationBoundaryIds'] = [self._serialize.query("operation_boundary_ids", q, 'str') if q is not None else '' for q in operation_boundary_ids]
+ if min_operation_start_date_time is not None:
+ query_parameters['minOperationStartDateTime'] = self._serialize.query("min_operation_start_date_time", min_operation_start_date_time, 'iso-8601')
+ if max_operation_start_date_time is not None:
+ query_parameters['maxOperationStartDateTime'] = self._serialize.query("max_operation_start_date_time", max_operation_start_date_time, 'iso-8601')
+ if min_operation_end_date_time is not None:
+ query_parameters['minOperationEndDateTime'] = self._serialize.query("min_operation_end_date_time", min_operation_end_date_time, 'iso-8601')
+ if max_operation_end_date_time is not None:
+ query_parameters['maxOperationEndDateTime'] = self._serialize.query("max_operation_end_date_time", max_operation_end_date_time, 'iso-8601')
+ if min_operation_modified_date_time is not None:
+ query_parameters['minOperationModifiedDateTime'] = self._serialize.query("min_operation_modified_date_time", min_operation_modified_date_time, 'iso-8601')
+ if max_operation_modified_date_time is not None:
+ query_parameters['maxOperationModifiedDateTime'] = self._serialize.query("max_operation_modified_date_time", max_operation_modified_date_time, 'iso-8601')
+ if min_area is not None:
+ query_parameters['minArea'] = self._serialize.query("min_area", min_area, 'float')
+ if max_area is not None:
+ query_parameters['maxArea'] = self._serialize.query("max_area", max_area, 'float')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('PlantingDataListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/planting-data'} # type: ignore
+
+ async def get(
+ self,
+ farmer_id: str,
+ planting_data_id: str,
+ **kwargs
+ ) -> "_models.PlantingData":
+ """Get a specified planting data resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer resource.
+ :type farmer_id: str
+ :param planting_data_id: ID of the planting data resource.
+ :type planting_data_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PlantingData, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.PlantingData
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.PlantingData"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'plantingDataId': self._serialize.url("planting_data_id", planting_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('PlantingData', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/farmers/{farmerId}/planting-data/{plantingDataId}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ farmer_id: str,
+ planting_data_id: str,
+ planting_data: Optional["_models.PlantingData"] = None,
+ **kwargs
+ ) -> "_models.PlantingData":
+ """Creates or updates an planting data resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :param planting_data_id: ID of the planting data resource.
+ :type planting_data_id: str
+ :param planting_data: Planting data resource payload to create or update.
+ :type planting_data: ~azure.farmbeats.models.PlantingData
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PlantingData, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.PlantingData
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.PlantingData"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'plantingDataId': self._serialize.url("planting_data_id", planting_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if planting_data is not None:
+ body_content = self._serialize.body(planting_data, 'PlantingData')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('PlantingData', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('PlantingData', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/farmers/{farmerId}/planting-data/{plantingDataId}'} # type: ignore
+
+ async def delete(
+ self,
+ farmer_id: str,
+ planting_data_id: str,
+ **kwargs
+ ) -> None:
+ """Deletes a specified planting data resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer resource.
+ :type farmer_id: str
+ :param planting_data_id: ID of the planting data.
+ :type planting_data_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'plantingDataId': self._serialize.url("planting_data_id", planting_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/farmers/{farmerId}/planting-data/{plantingDataId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_scenes_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_scenes_operations.py
new file mode 100644
index 000000000000..46901e4d7f1b
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_scenes_operations.py
@@ -0,0 +1,395 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import Any, AsyncIterable, Callable, Dict, Generic, IO, List, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.core.polling.async_base_polling import AsyncLROBasePolling
+
+from ... import models as _models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class ScenesOperations:
+ """ScenesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ farmer_id: str,
+ boundary_id: str,
+ provider: str = "Microsoft",
+ source: Optional[str] = "Sentinel_2_L2A",
+ start_date_time: Optional[datetime.datetime] = None,
+ end_date_time: Optional[datetime.datetime] = None,
+ max_cloud_coverage_percentage: Optional[float] = 100,
+ max_dark_pixel_coverage_percentage: Optional[float] = 100,
+ image_names: Optional[List[str]] = None,
+ image_resolutions: Optional[List[float]] = None,
+ image_formats: Optional[List[str]] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.SceneListResponse"]:
+ """Returns a paginated list of scene resources.
+
+ :param farmer_id: FarmerId.
+ :type farmer_id: str
+ :param boundary_id: BoundaryId.
+ :type boundary_id: str
+ :param provider: Provider name of scene data.
+ :type provider: str
+ :param source: Source name of scene data, default value Sentinel_2_L2A (Sentinel 2 L2A).
+ :type source: str
+ :param start_date_time: Scene start UTC datetime (inclusive), sample format:
+ yyyy-MM-ddThh:mm:ssZ.
+ :type start_date_time: ~datetime.datetime
+ :param end_date_time: Scene end UTC datetime (inclusive), sample format: yyyy-MM-dThh:mm:ssZ.
+ :type end_date_time: ~datetime.datetime
+ :param max_cloud_coverage_percentage: Filter scenes with cloud coverage percentage less than
+ max value. Range [0 to 100.0].
+ :type max_cloud_coverage_percentage: float
+ :param max_dark_pixel_coverage_percentage: Filter scenes with dark pixel coverage percentage
+ less than max value. Range [0 to 100.0].
+ :type max_dark_pixel_coverage_percentage: float
+ :param image_names: List of image names to be filtered.
+ :type image_names: list[str]
+ :param image_resolutions: List of image resolutions in meters to be filtered.
+ :type image_resolutions: list[float]
+ :param image_formats: List of image formats to be filtered.
+ :type image_formats: list[str]
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either SceneListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.SceneListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.SceneListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['provider'] = self._serialize.query("provider", provider, 'str')
+ query_parameters['farmerId'] = self._serialize.query("farmer_id", farmer_id, 'str')
+ query_parameters['boundaryId'] = self._serialize.query("boundary_id", boundary_id, 'str')
+ if source is not None:
+ query_parameters['source'] = self._serialize.query("source", source, 'str')
+ if start_date_time is not None:
+ query_parameters['startDateTime'] = self._serialize.query("start_date_time", start_date_time, 'iso-8601')
+ if end_date_time is not None:
+ query_parameters['endDateTime'] = self._serialize.query("end_date_time", end_date_time, 'iso-8601')
+ if max_cloud_coverage_percentage is not None:
+ query_parameters['maxCloudCoveragePercentage'] = self._serialize.query("max_cloud_coverage_percentage", max_cloud_coverage_percentage, 'float', maximum=100, minimum=0)
+ if max_dark_pixel_coverage_percentage is not None:
+ query_parameters['maxDarkPixelCoveragePercentage'] = self._serialize.query("max_dark_pixel_coverage_percentage", max_dark_pixel_coverage_percentage, 'float', maximum=100, minimum=0)
+ if image_names is not None:
+ query_parameters['imageNames'] = [self._serialize.query("image_names", q, 'str') if q is not None else '' for q in image_names]
+ if image_resolutions is not None:
+ query_parameters['imageResolutions'] = [self._serialize.query("image_resolutions", q, 'float') if q is not None else '' for q in image_resolutions]
+ if image_formats is not None:
+ query_parameters['imageFormats'] = [self._serialize.query("image_formats", q, 'str') if q is not None else '' for q in image_formats]
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('SceneListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/scenes'} # type: ignore
+
+ async def _create_satellite_data_ingestion_job_initial(
+ self,
+ job_id: str,
+ job: Optional["_models.SatelliteDataIngestionJob"] = None,
+ **kwargs
+ ) -> "_models.SatelliteDataIngestionJob":
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.SatelliteDataIngestionJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_satellite_data_ingestion_job_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if job is not None:
+ body_content = self._serialize.body(job, 'SatelliteDataIngestionJob')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('SatelliteDataIngestionJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_satellite_data_ingestion_job_initial.metadata = {'url': '/scenes/satellite/ingest-data/{jobId}'} # type: ignore
+
+ async def begin_create_satellite_data_ingestion_job(
+ self,
+ job_id: str,
+ job: Optional["_models.SatelliteDataIngestionJob"] = None,
+ **kwargs
+ ) -> AsyncLROPoller["_models.SatelliteDataIngestionJob"]:
+ """Create a satellite data ingestion job.
+
+ :param job_id: JobId provided by user.
+ :type job_id: str
+ :param job: Job parameters supplied by user.
+ :type job: ~azure.farmbeats.models.SatelliteDataIngestionJob
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: By default, your polling method will be AsyncLROBasePolling.
+ Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either SatelliteDataIngestionJob or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.farmbeats.models.SatelliteDataIngestionJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.SatelliteDataIngestionJob"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_satellite_data_ingestion_job_initial(
+ job_id=job_id,
+ job=job,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('SatelliteDataIngestionJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncLROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_satellite_data_ingestion_job.metadata = {'url': '/scenes/satellite/ingest-data/{jobId}'} # type: ignore
+
+ async def get_satellite_data_ingestion_job_details(
+ self,
+ job_id: str,
+ **kwargs
+ ) -> "_models.SatelliteDataIngestionJob":
+ """Get a satellite data ingestion job.
+
+ :param job_id: Id of the job.
+ :type job_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: SatelliteDataIngestionJob, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.SatelliteDataIngestionJob
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.SatelliteDataIngestionJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_satellite_data_ingestion_job_details.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('SatelliteDataIngestionJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_satellite_data_ingestion_job_details.metadata = {'url': '/scenes/satellite/ingest-data/{jobId}'} # type: ignore
+
+ async def download(
+ self,
+ file_path: str,
+ **kwargs
+ ) -> IO:
+ """Downloads and returns file Stream as response for the given input filePath.
+
+ :param file_path: cloud storage path of scene file.
+ :type file_path: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: IO, or the result of cls(response)
+ :rtype: IO
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[IO]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/octet-stream, application/json"
+
+ # Construct URL
+ url = self.download.metadata['url'] # type: ignore
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['filePath'] = self._serialize.query("file_path", file_path, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=True, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = response.stream_download(self._client._pipeline)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ download.metadata = {'url': '/scenes/downloadFiles'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_seasonal_fields_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_seasonal_fields_operations.py
new file mode 100644
index 000000000000..03d86332c20b
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_seasonal_fields_operations.py
@@ -0,0 +1,775 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.core.polling.async_base_polling import AsyncLROBasePolling
+
+from ... import models as _models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class SeasonalFieldsOperations:
+ """SeasonalFieldsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_farmer_id(
+ self,
+ farmer_id: str,
+ farm_ids: Optional[List[str]] = None,
+ field_ids: Optional[List[str]] = None,
+ season_ids: Optional[List[str]] = None,
+ crop_variety_ids: Optional[List[str]] = None,
+ crop_ids: Optional[List[str]] = None,
+ min_avg_yield_value: Optional[float] = None,
+ max_avg_yield_value: Optional[float] = None,
+ avg_yield_unit: Optional[str] = None,
+ min_avg_seed_population_value: Optional[float] = None,
+ max_avg_seed_population_value: Optional[float] = None,
+ avg_seed_population_unit: Optional[str] = None,
+ min_planting_date_time: Optional[datetime.datetime] = None,
+ max_planting_date_time: Optional[datetime.datetime] = None,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.SeasonalFieldListResponse"]:
+ """Returns a paginated list of seasonal field resources under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer.
+ :type farmer_id: str
+ :param farm_ids: Farm Ids of the resource.
+ :type farm_ids: list[str]
+ :param field_ids: Field Ids of the resource.
+ :type field_ids: list[str]
+ :param season_ids: Season Ids of the resource.
+ :type season_ids: list[str]
+ :param crop_variety_ids: CropVarietyIds of the resource.
+ :type crop_variety_ids: list[str]
+ :param crop_ids: Ids of the crop it belongs to.
+ :type crop_ids: list[str]
+ :param min_avg_yield_value: Minimum average yield value of the seasonal field(inclusive).
+ :type min_avg_yield_value: float
+ :param max_avg_yield_value: Maximum average yield value of the seasonal field(inclusive).
+ :type max_avg_yield_value: float
+ :param avg_yield_unit: Unit of the average yield value attribute.
+ :type avg_yield_unit: str
+ :param min_avg_seed_population_value: Minimum average seed population value of the seasonal
+ field(inclusive).
+ :type min_avg_seed_population_value: float
+ :param max_avg_seed_population_value: Maximum average seed population value of the seasonal
+ field(inclusive).
+ :type max_avg_seed_population_value: float
+ :param avg_seed_population_unit: Unit of average seed population value attribute.
+ :type avg_seed_population_unit: str
+ :param min_planting_date_time: Minimum planting datetime, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type min_planting_date_time: ~datetime.datetime
+ :param max_planting_date_time: Maximum planting datetime, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type max_planting_date_time: ~datetime.datetime
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either SeasonalFieldListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.SeasonalFieldListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.SeasonalFieldListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_farmer_id.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if farm_ids is not None:
+ query_parameters['farmIds'] = [self._serialize.query("farm_ids", q, 'str') if q is not None else '' for q in farm_ids]
+ if field_ids is not None:
+ query_parameters['fieldIds'] = [self._serialize.query("field_ids", q, 'str') if q is not None else '' for q in field_ids]
+ if season_ids is not None:
+ query_parameters['seasonIds'] = [self._serialize.query("season_ids", q, 'str') if q is not None else '' for q in season_ids]
+ if crop_variety_ids is not None:
+ query_parameters['cropVarietyIds'] = [self._serialize.query("crop_variety_ids", q, 'str') if q is not None else '' for q in crop_variety_ids]
+ if crop_ids is not None:
+ query_parameters['cropIds'] = [self._serialize.query("crop_ids", q, 'str') if q is not None else '' for q in crop_ids]
+ if min_avg_yield_value is not None:
+ query_parameters['minAvgYieldValue'] = self._serialize.query("min_avg_yield_value", min_avg_yield_value, 'float')
+ if max_avg_yield_value is not None:
+ query_parameters['maxAvgYieldValue'] = self._serialize.query("max_avg_yield_value", max_avg_yield_value, 'float')
+ if avg_yield_unit is not None:
+ query_parameters['avgYieldUnit'] = self._serialize.query("avg_yield_unit", avg_yield_unit, 'str')
+ if min_avg_seed_population_value is not None:
+ query_parameters['minAvgSeedPopulationValue'] = self._serialize.query("min_avg_seed_population_value", min_avg_seed_population_value, 'float')
+ if max_avg_seed_population_value is not None:
+ query_parameters['maxAvgSeedPopulationValue'] = self._serialize.query("max_avg_seed_population_value", max_avg_seed_population_value, 'float')
+ if avg_seed_population_unit is not None:
+ query_parameters['avgSeedPopulationUnit'] = self._serialize.query("avg_seed_population_unit", avg_seed_population_unit, 'str')
+ if min_planting_date_time is not None:
+ query_parameters['minPlantingDateTime'] = self._serialize.query("min_planting_date_time", min_planting_date_time, 'iso-8601')
+ if max_planting_date_time is not None:
+ query_parameters['maxPlantingDateTime'] = self._serialize.query("max_planting_date_time", max_planting_date_time, 'iso-8601')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('SeasonalFieldListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_farmer_id.metadata = {'url': '/farmers/{farmerId}/seasonal-fields'} # type: ignore
+
+ def list(
+ self,
+ farm_ids: Optional[List[str]] = None,
+ field_ids: Optional[List[str]] = None,
+ season_ids: Optional[List[str]] = None,
+ crop_variety_ids: Optional[List[str]] = None,
+ crop_ids: Optional[List[str]] = None,
+ min_avg_yield_value: Optional[float] = None,
+ max_avg_yield_value: Optional[float] = None,
+ avg_yield_unit: Optional[str] = None,
+ min_avg_seed_population_value: Optional[float] = None,
+ max_avg_seed_population_value: Optional[float] = None,
+ avg_seed_population_unit: Optional[str] = None,
+ min_planting_date_time: Optional[datetime.datetime] = None,
+ max_planting_date_time: Optional[datetime.datetime] = None,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.SeasonalFieldListResponse"]:
+ """Returns a paginated list of seasonal field resources across all farmers.
+
+ :param farm_ids: Farm Ids of the resource.
+ :type farm_ids: list[str]
+ :param field_ids: Field Ids of the resource.
+ :type field_ids: list[str]
+ :param season_ids: Season Ids of the resource.
+ :type season_ids: list[str]
+ :param crop_variety_ids: CropVarietyIds of the resource.
+ :type crop_variety_ids: list[str]
+ :param crop_ids: Ids of the crop it belongs to.
+ :type crop_ids: list[str]
+ :param min_avg_yield_value: Minimum average yield value of the seasonal field(inclusive).
+ :type min_avg_yield_value: float
+ :param max_avg_yield_value: Maximum average yield value of the seasonal field(inclusive).
+ :type max_avg_yield_value: float
+ :param avg_yield_unit: Unit of the average yield value attribute.
+ :type avg_yield_unit: str
+ :param min_avg_seed_population_value: Minimum average seed population value of the seasonal
+ field(inclusive).
+ :type min_avg_seed_population_value: float
+ :param max_avg_seed_population_value: Maximum average seed population value of the seasonal
+ field(inclusive).
+ :type max_avg_seed_population_value: float
+ :param avg_seed_population_unit: Unit of average seed population value attribute.
+ :type avg_seed_population_unit: str
+ :param min_planting_date_time: Minimum planting datetime, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type min_planting_date_time: ~datetime.datetime
+ :param max_planting_date_time: Maximum planting datetime, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type max_planting_date_time: ~datetime.datetime
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either SeasonalFieldListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.SeasonalFieldListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.SeasonalFieldListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if farm_ids is not None:
+ query_parameters['farmIds'] = [self._serialize.query("farm_ids", q, 'str') if q is not None else '' for q in farm_ids]
+ if field_ids is not None:
+ query_parameters['fieldIds'] = [self._serialize.query("field_ids", q, 'str') if q is not None else '' for q in field_ids]
+ if season_ids is not None:
+ query_parameters['seasonIds'] = [self._serialize.query("season_ids", q, 'str') if q is not None else '' for q in season_ids]
+ if crop_variety_ids is not None:
+ query_parameters['cropVarietyIds'] = [self._serialize.query("crop_variety_ids", q, 'str') if q is not None else '' for q in crop_variety_ids]
+ if crop_ids is not None:
+ query_parameters['cropIds'] = [self._serialize.query("crop_ids", q, 'str') if q is not None else '' for q in crop_ids]
+ if min_avg_yield_value is not None:
+ query_parameters['minAvgYieldValue'] = self._serialize.query("min_avg_yield_value", min_avg_yield_value, 'float')
+ if max_avg_yield_value is not None:
+ query_parameters['maxAvgYieldValue'] = self._serialize.query("max_avg_yield_value", max_avg_yield_value, 'float')
+ if avg_yield_unit is not None:
+ query_parameters['avgYieldUnit'] = self._serialize.query("avg_yield_unit", avg_yield_unit, 'str')
+ if min_avg_seed_population_value is not None:
+ query_parameters['minAvgSeedPopulationValue'] = self._serialize.query("min_avg_seed_population_value", min_avg_seed_population_value, 'float')
+ if max_avg_seed_population_value is not None:
+ query_parameters['maxAvgSeedPopulationValue'] = self._serialize.query("max_avg_seed_population_value", max_avg_seed_population_value, 'float')
+ if avg_seed_population_unit is not None:
+ query_parameters['avgSeedPopulationUnit'] = self._serialize.query("avg_seed_population_unit", avg_seed_population_unit, 'str')
+ if min_planting_date_time is not None:
+ query_parameters['minPlantingDateTime'] = self._serialize.query("min_planting_date_time", min_planting_date_time, 'iso-8601')
+ if max_planting_date_time is not None:
+ query_parameters['maxPlantingDateTime'] = self._serialize.query("max_planting_date_time", max_planting_date_time, 'iso-8601')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('SeasonalFieldListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/seasonal-fields'} # type: ignore
+
+ async def get(
+ self,
+ farmer_id: str,
+ seasonal_field_id: str,
+ **kwargs
+ ) -> "_models.SeasonalField":
+ """Gets a specified seasonal field resource under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer.
+ :type farmer_id: str
+ :param seasonal_field_id: Id of the seasonal field.
+ :type seasonal_field_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: SeasonalField, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.SeasonalField
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.SeasonalField"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'seasonalFieldId': self._serialize.url("seasonal_field_id", seasonal_field_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('SeasonalField', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/farmers/{farmerId}/seasonal-fields/{seasonalFieldId}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ farmer_id: str,
+ seasonal_field_id: str,
+ seasonal_field: Optional["_models.SeasonalField"] = None,
+ **kwargs
+ ) -> "_models.SeasonalField":
+ """Creates or Updates a seasonal field resource under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer resource.
+ :type farmer_id: str
+ :param seasonal_field_id: Id of the seasonal field resource.
+ :type seasonal_field_id: str
+ :param seasonal_field: Seasonal field resource payload to create or update.
+ :type seasonal_field: ~azure.farmbeats.models.SeasonalField
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: SeasonalField, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.SeasonalField
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.SeasonalField"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'seasonalFieldId': self._serialize.url("seasonal_field_id", seasonal_field_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if seasonal_field is not None:
+ body_content = self._serialize.body(seasonal_field, 'SeasonalField')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('SeasonalField', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('SeasonalField', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/farmers/{farmerId}/seasonal-fields/{seasonalFieldId}'} # type: ignore
+
+ async def delete(
+ self,
+ farmer_id: str,
+ seasonal_field_id: str,
+ **kwargs
+ ) -> None:
+ """Deletes a specified seasonal-field resource under a particular farmer.
+
+ :param farmer_id: Id of the farmer.
+ :type farmer_id: str
+ :param seasonal_field_id: Id of the seasonal field.
+ :type seasonal_field_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'seasonalFieldId': self._serialize.url("seasonal_field_id", seasonal_field_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/farmers/{farmerId}/seasonal-fields/{seasonalFieldId}'} # type: ignore
+
+ async def get_cascade_delete_job_details(
+ self,
+ job_id: str,
+ **kwargs
+ ) -> "_models.CascadeDeleteJob":
+ """Get cascade delete job for specified seasonal field.
+
+ :param job_id: Id of the job.
+ :type job_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CascadeDeleteJob, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.CascadeDeleteJob
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_cascade_delete_job_details.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_cascade_delete_job_details.metadata = {'url': '/seasonal-fields/cascade-delete/{jobId}'} # type: ignore
+
+ async def _create_cascade_delete_job_initial(
+ self,
+ job_id: str,
+ farmer_id: str,
+ seasonal_field_id: str,
+ **kwargs
+ ) -> "_models.CascadeDeleteJob":
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_cascade_delete_job_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['farmerId'] = self._serialize.query("farmer_id", farmer_id, 'str')
+ query_parameters['seasonalFieldId'] = self._serialize.query("seasonal_field_id", seasonal_field_id, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.put(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_cascade_delete_job_initial.metadata = {'url': '/seasonal-fields/cascade-delete/{jobId}'} # type: ignore
+
+ async def begin_create_cascade_delete_job(
+ self,
+ job_id: str,
+ farmer_id: str,
+ seasonal_field_id: str,
+ **kwargs
+ ) -> AsyncLROPoller["_models.CascadeDeleteJob"]:
+ """Create a cascade delete job for specified seasonal field.
+
+ :param job_id: Job ID supplied by end user.
+ :type job_id: str
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :param seasonal_field_id: ID of the seasonalField to be deleted.
+ :type seasonal_field_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: By default, your polling method will be AsyncLROBasePolling.
+ Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either CascadeDeleteJob or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.farmbeats.models.CascadeDeleteJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_cascade_delete_job_initial(
+ job_id=job_id,
+ farmer_id=farmer_id,
+ seasonal_field_id=seasonal_field_id,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncLROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_cascade_delete_job.metadata = {'url': '/seasonal-fields/cascade-delete/{jobId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_seasons_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_seasons_operations.py
new file mode 100644
index 000000000000..d1fd3efe9fbf
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_seasons_operations.py
@@ -0,0 +1,354 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+
+from ... import models as _models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class SeasonsOperations:
+ """SeasonsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ min_start_date_time: Optional[datetime.datetime] = None,
+ max_start_date_time: Optional[datetime.datetime] = None,
+ min_end_date_time: Optional[datetime.datetime] = None,
+ max_end_date_time: Optional[datetime.datetime] = None,
+ years: Optional[List[int]] = None,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.SeasonListResponse"]:
+ """Returns a paginated list of season resources.
+
+ :param min_start_date_time: Minimum season start datetime, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type min_start_date_time: ~datetime.datetime
+ :param max_start_date_time: Maximum season start datetime, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type max_start_date_time: ~datetime.datetime
+ :param min_end_date_time: Minimum season end datetime, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type min_end_date_time: ~datetime.datetime
+ :param max_end_date_time: Maximum season end datetime, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type max_end_date_time: ~datetime.datetime
+ :param years: Years of the resource.
+ :type years: list[int]
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either SeasonListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.SeasonListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.SeasonListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if min_start_date_time is not None:
+ query_parameters['minStartDateTime'] = self._serialize.query("min_start_date_time", min_start_date_time, 'iso-8601')
+ if max_start_date_time is not None:
+ query_parameters['maxStartDateTime'] = self._serialize.query("max_start_date_time", max_start_date_time, 'iso-8601')
+ if min_end_date_time is not None:
+ query_parameters['minEndDateTime'] = self._serialize.query("min_end_date_time", min_end_date_time, 'iso-8601')
+ if max_end_date_time is not None:
+ query_parameters['maxEndDateTime'] = self._serialize.query("max_end_date_time", max_end_date_time, 'iso-8601')
+ if years is not None:
+ query_parameters['years'] = [self._serialize.query("years", q, 'int') if q is not None else '' for q in years]
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('SeasonListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/seasons'} # type: ignore
+
+ async def get(
+ self,
+ season_id: str,
+ **kwargs
+ ) -> "_models.Season":
+ """Gets a specified season resource.
+
+ :param season_id: Id of the season.
+ :type season_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Season, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Season
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Season"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'seasonId': self._serialize.url("season_id", season_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('Season', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/seasons/{seasonId}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ season_id: str,
+ season: Optional["_models.Season"] = None,
+ **kwargs
+ ) -> "_models.Season":
+ """Creates or updates a season resource.
+
+ :param season_id: Id of the season resource.
+ :type season_id: str
+ :param season: Season resource payload to create or update.
+ :type season: ~azure.farmbeats.models.Season
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Season, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Season
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Season"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'seasonId': self._serialize.url("season_id", season_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if season is not None:
+ body_content = self._serialize.body(season, 'Season')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('Season', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('Season', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/seasons/{seasonId}'} # type: ignore
+
+ async def delete(
+ self,
+ season_id: str,
+ **kwargs
+ ) -> None:
+ """Deletes a specified season resource.
+
+ :param season_id: Id of the season.
+ :type season_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'seasonId': self._serialize.url("season_id", season_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/seasons/{seasonId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_tillage_data_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_tillage_data_operations.py
new file mode 100644
index 000000000000..a4fa1f891e42
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_tillage_data_operations.py
@@ -0,0 +1,629 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+
+from ... import models as _models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class TillageDataOperations:
+ """TillageDataOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_farmer_id(
+ self,
+ farmer_id: str,
+ min_tillage_depth: Optional[float] = None,
+ max_tillage_depth: Optional[float] = None,
+ min_tillage_pressure: Optional[float] = None,
+ max_tillage_pressure: Optional[float] = None,
+ sources: Optional[List[str]] = None,
+ associated_boundary_ids: Optional[List[str]] = None,
+ operation_boundary_ids: Optional[List[str]] = None,
+ min_operation_start_date_time: Optional[datetime.datetime] = None,
+ max_operation_start_date_time: Optional[datetime.datetime] = None,
+ min_operation_end_date_time: Optional[datetime.datetime] = None,
+ max_operation_end_date_time: Optional[datetime.datetime] = None,
+ min_operation_modified_date_time: Optional[datetime.datetime] = None,
+ max_operation_modified_date_time: Optional[datetime.datetime] = None,
+ min_area: Optional[float] = None,
+ max_area: Optional[float] = None,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.TillageDataListResponse"]:
+ """Returns a paginated list of tillage data resources under a particular farm.
+
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :param min_tillage_depth: Minimum measured tillage depth (inclusive).
+ :type min_tillage_depth: float
+ :param max_tillage_depth: Maximum measured tillage depth (inclusive).
+ :type max_tillage_depth: float
+ :param min_tillage_pressure: Minimum pressure applied to a tillage implement (inclusive).
+ :type min_tillage_pressure: float
+ :param max_tillage_pressure: Maximum pressure applied to a tillage implement (inclusive).
+ :type max_tillage_pressure: float
+ :param sources: Sources of the operation data.
+ :type sources: list[str]
+ :param associated_boundary_ids: Boundary IDs associated with operation data.
+ :type associated_boundary_ids: list[str]
+ :param operation_boundary_ids: Operation boundary IDs associated with operation data.
+ :type operation_boundary_ids: list[str]
+ :param min_operation_start_date_time: Minimum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_start_date_time: ~datetime.datetime
+ :param max_operation_start_date_time: Maximum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_start_date_time: ~datetime.datetime
+ :param min_operation_end_date_time: Minimum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_end_date_time: ~datetime.datetime
+ :param max_operation_end_date_time: Maximum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_end_date_time: ~datetime.datetime
+ :param min_operation_modified_date_time: Minimum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_modified_date_time: ~datetime.datetime
+ :param max_operation_modified_date_time: Maximum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_modified_date_time: ~datetime.datetime
+ :param min_area: Minimum area for which operation was applied (inclusive).
+ :type min_area: float
+ :param max_area: Maximum area for which operation was applied (inclusive).
+ :type max_area: float
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either TillageDataListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.TillageDataListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.TillageDataListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_farmer_id.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if min_tillage_depth is not None:
+ query_parameters['minTillageDepth'] = self._serialize.query("min_tillage_depth", min_tillage_depth, 'float')
+ if max_tillage_depth is not None:
+ query_parameters['maxTillageDepth'] = self._serialize.query("max_tillage_depth", max_tillage_depth, 'float')
+ if min_tillage_pressure is not None:
+ query_parameters['minTillagePressure'] = self._serialize.query("min_tillage_pressure", min_tillage_pressure, 'float')
+ if max_tillage_pressure is not None:
+ query_parameters['maxTillagePressure'] = self._serialize.query("max_tillage_pressure", max_tillage_pressure, 'float')
+ if sources is not None:
+ query_parameters['sources'] = [self._serialize.query("sources", q, 'str') if q is not None else '' for q in sources]
+ if associated_boundary_ids is not None:
+ query_parameters['associatedBoundaryIds'] = [self._serialize.query("associated_boundary_ids", q, 'str') if q is not None else '' for q in associated_boundary_ids]
+ if operation_boundary_ids is not None:
+ query_parameters['operationBoundaryIds'] = [self._serialize.query("operation_boundary_ids", q, 'str') if q is not None else '' for q in operation_boundary_ids]
+ if min_operation_start_date_time is not None:
+ query_parameters['minOperationStartDateTime'] = self._serialize.query("min_operation_start_date_time", min_operation_start_date_time, 'iso-8601')
+ if max_operation_start_date_time is not None:
+ query_parameters['maxOperationStartDateTime'] = self._serialize.query("max_operation_start_date_time", max_operation_start_date_time, 'iso-8601')
+ if min_operation_end_date_time is not None:
+ query_parameters['minOperationEndDateTime'] = self._serialize.query("min_operation_end_date_time", min_operation_end_date_time, 'iso-8601')
+ if max_operation_end_date_time is not None:
+ query_parameters['maxOperationEndDateTime'] = self._serialize.query("max_operation_end_date_time", max_operation_end_date_time, 'iso-8601')
+ if min_operation_modified_date_time is not None:
+ query_parameters['minOperationModifiedDateTime'] = self._serialize.query("min_operation_modified_date_time", min_operation_modified_date_time, 'iso-8601')
+ if max_operation_modified_date_time is not None:
+ query_parameters['maxOperationModifiedDateTime'] = self._serialize.query("max_operation_modified_date_time", max_operation_modified_date_time, 'iso-8601')
+ if min_area is not None:
+ query_parameters['minArea'] = self._serialize.query("min_area", min_area, 'float')
+ if max_area is not None:
+ query_parameters['maxArea'] = self._serialize.query("max_area", max_area, 'float')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('TillageDataListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_farmer_id.metadata = {'url': '/farmers/{farmerId}/tillage-data'} # type: ignore
+
+ def list(
+ self,
+ min_tillage_depth: Optional[float] = None,
+ max_tillage_depth: Optional[float] = None,
+ min_tillage_pressure: Optional[float] = None,
+ max_tillage_pressure: Optional[float] = None,
+ sources: Optional[List[str]] = None,
+ associated_boundary_ids: Optional[List[str]] = None,
+ operation_boundary_ids: Optional[List[str]] = None,
+ min_operation_start_date_time: Optional[datetime.datetime] = None,
+ max_operation_start_date_time: Optional[datetime.datetime] = None,
+ min_operation_end_date_time: Optional[datetime.datetime] = None,
+ max_operation_end_date_time: Optional[datetime.datetime] = None,
+ min_operation_modified_date_time: Optional[datetime.datetime] = None,
+ max_operation_modified_date_time: Optional[datetime.datetime] = None,
+ min_area: Optional[float] = None,
+ max_area: Optional[float] = None,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.TillageDataListResponse"]:
+ """Returns a paginated list of tillage data resources across all farmers.
+
+ :param min_tillage_depth: Minimum measured tillage depth (inclusive).
+ :type min_tillage_depth: float
+ :param max_tillage_depth: Maximum measured tillage depth (inclusive).
+ :type max_tillage_depth: float
+ :param min_tillage_pressure: Minimum pressure applied to a tillage implement (inclusive).
+ :type min_tillage_pressure: float
+ :param max_tillage_pressure: Maximum pressure applied to a tillage implement (inclusive).
+ :type max_tillage_pressure: float
+ :param sources: Sources of the operation data.
+ :type sources: list[str]
+ :param associated_boundary_ids: Boundary IDs associated with operation data.
+ :type associated_boundary_ids: list[str]
+ :param operation_boundary_ids: Operation boundary IDs associated with operation data.
+ :type operation_boundary_ids: list[str]
+ :param min_operation_start_date_time: Minimum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_start_date_time: ~datetime.datetime
+ :param max_operation_start_date_time: Maximum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_start_date_time: ~datetime.datetime
+ :param min_operation_end_date_time: Minimum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_end_date_time: ~datetime.datetime
+ :param max_operation_end_date_time: Maximum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_end_date_time: ~datetime.datetime
+ :param min_operation_modified_date_time: Minimum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_modified_date_time: ~datetime.datetime
+ :param max_operation_modified_date_time: Maximum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_modified_date_time: ~datetime.datetime
+ :param min_area: Minimum area for which operation was applied (inclusive).
+ :type min_area: float
+ :param max_area: Maximum area for which operation was applied (inclusive).
+ :type max_area: float
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either TillageDataListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.TillageDataListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.TillageDataListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if min_tillage_depth is not None:
+ query_parameters['minTillageDepth'] = self._serialize.query("min_tillage_depth", min_tillage_depth, 'float')
+ if max_tillage_depth is not None:
+ query_parameters['maxTillageDepth'] = self._serialize.query("max_tillage_depth", max_tillage_depth, 'float')
+ if min_tillage_pressure is not None:
+ query_parameters['minTillagePressure'] = self._serialize.query("min_tillage_pressure", min_tillage_pressure, 'float')
+ if max_tillage_pressure is not None:
+ query_parameters['maxTillagePressure'] = self._serialize.query("max_tillage_pressure", max_tillage_pressure, 'float')
+ if sources is not None:
+ query_parameters['sources'] = [self._serialize.query("sources", q, 'str') if q is not None else '' for q in sources]
+ if associated_boundary_ids is not None:
+ query_parameters['associatedBoundaryIds'] = [self._serialize.query("associated_boundary_ids", q, 'str') if q is not None else '' for q in associated_boundary_ids]
+ if operation_boundary_ids is not None:
+ query_parameters['operationBoundaryIds'] = [self._serialize.query("operation_boundary_ids", q, 'str') if q is not None else '' for q in operation_boundary_ids]
+ if min_operation_start_date_time is not None:
+ query_parameters['minOperationStartDateTime'] = self._serialize.query("min_operation_start_date_time", min_operation_start_date_time, 'iso-8601')
+ if max_operation_start_date_time is not None:
+ query_parameters['maxOperationStartDateTime'] = self._serialize.query("max_operation_start_date_time", max_operation_start_date_time, 'iso-8601')
+ if min_operation_end_date_time is not None:
+ query_parameters['minOperationEndDateTime'] = self._serialize.query("min_operation_end_date_time", min_operation_end_date_time, 'iso-8601')
+ if max_operation_end_date_time is not None:
+ query_parameters['maxOperationEndDateTime'] = self._serialize.query("max_operation_end_date_time", max_operation_end_date_time, 'iso-8601')
+ if min_operation_modified_date_time is not None:
+ query_parameters['minOperationModifiedDateTime'] = self._serialize.query("min_operation_modified_date_time", min_operation_modified_date_time, 'iso-8601')
+ if max_operation_modified_date_time is not None:
+ query_parameters['maxOperationModifiedDateTime'] = self._serialize.query("max_operation_modified_date_time", max_operation_modified_date_time, 'iso-8601')
+ if min_area is not None:
+ query_parameters['minArea'] = self._serialize.query("min_area", min_area, 'float')
+ if max_area is not None:
+ query_parameters['maxArea'] = self._serialize.query("max_area", max_area, 'float')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('TillageDataListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/tillage-data'} # type: ignore
+
+ async def get(
+ self,
+ farmer_id: str,
+ tillage_data_id: str,
+ **kwargs
+ ) -> "_models.TillageData":
+ """Get a specified tillage data resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer resource.
+ :type farmer_id: str
+ :param tillage_data_id: ID of the tillage data resource.
+ :type tillage_data_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: TillageData, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.TillageData
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.TillageData"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'tillageDataId': self._serialize.url("tillage_data_id", tillage_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('TillageData', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/farmers/{farmerId}/tillage-data/{tillageDataId}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ farmer_id: str,
+ tillage_data_id: str,
+ tillage_data: Optional["_models.TillageData"] = None,
+ **kwargs
+ ) -> "_models.TillageData":
+ """Creates or updates an tillage data resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :param tillage_data_id: ID of the tillage data resource.
+ :type tillage_data_id: str
+ :param tillage_data: Tillage data resource payload to create or update.
+ :type tillage_data: ~azure.farmbeats.models.TillageData
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: TillageData, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.TillageData
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.TillageData"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'tillageDataId': self._serialize.url("tillage_data_id", tillage_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if tillage_data is not None:
+ body_content = self._serialize.body(tillage_data, 'TillageData')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('TillageData', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('TillageData', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/farmers/{farmerId}/tillage-data/{tillageDataId}'} # type: ignore
+
+ async def delete(
+ self,
+ farmer_id: str,
+ tillage_data_id: str,
+ **kwargs
+ ) -> None:
+ """Deletes a specified tillage data resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer resource.
+ :type farmer_id: str
+ :param tillage_data_id: ID of the tillage data.
+ :type tillage_data_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'tillageDataId': self._serialize.url("tillage_data_id", tillage_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/farmers/{farmerId}/tillage-data/{tillageDataId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_weather_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_weather_operations.py
new file mode 100644
index 000000000000..ea0b2fae1f09
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/aio/operations/_weather_operations.py
@@ -0,0 +1,494 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.core.polling.async_base_polling import AsyncLROBasePolling
+
+from ... import models as _models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class WeatherOperations:
+ """WeatherOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ farmer_id: str,
+ boundary_id: str,
+ extension_id: str,
+ weather_data_type: str,
+ granularity: str,
+ start_date_time: Optional[datetime.datetime] = None,
+ end_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["_models.WeatherDataListResponse"]:
+ """Returns a paginated list of weather data.
+
+ :param farmer_id: Farmer ID.
+ :type farmer_id: str
+ :param boundary_id: Boundary ID.
+ :type boundary_id: str
+ :param extension_id: ID of the weather extension.
+ :type extension_id: str
+ :param weather_data_type: Type of weather data (forecast/historical).
+ :type weather_data_type: str
+ :param granularity: Granularity of weather data (daily/hourly).
+ :type granularity: str
+ :param start_date_time: Weather data start UTC date-time (inclusive), sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type start_date_time: ~datetime.datetime
+ :param end_date_time: Weather data end UTC date-time (inclusive), sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type end_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either WeatherDataListResponse or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.farmbeats.models.WeatherDataListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.WeatherDataListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['farmerId'] = self._serialize.query("farmer_id", farmer_id, 'str')
+ query_parameters['boundaryId'] = self._serialize.query("boundary_id", boundary_id, 'str')
+ query_parameters['extensionId'] = self._serialize.query("extension_id", extension_id, 'str', pattern=r'^[A-za-z]{3,50}[.][A-za-z]{3,100}$')
+ query_parameters['weatherDataType'] = self._serialize.query("weather_data_type", weather_data_type, 'str', max_length=50, min_length=0)
+ query_parameters['granularity'] = self._serialize.query("granularity", granularity, 'str', max_length=50, min_length=0)
+ if start_date_time is not None:
+ query_parameters['startDateTime'] = self._serialize.query("start_date_time", start_date_time, 'iso-8601')
+ if end_date_time is not None:
+ query_parameters['endDateTime'] = self._serialize.query("end_date_time", end_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('WeatherDataListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/weather'} # type: ignore
+
+ async def get_data_ingestion_job_details(
+ self,
+ job_id: str,
+ **kwargs
+ ) -> "_models.WeatherDataIngestionJob":
+ """Get weather ingestion job.
+
+ :param job_id: Id of the job.
+ :type job_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WeatherDataIngestionJob, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.WeatherDataIngestionJob
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.WeatherDataIngestionJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_data_ingestion_job_details.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('WeatherDataIngestionJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_data_ingestion_job_details.metadata = {'url': '/weather/ingest-data/{jobId}'} # type: ignore
+
+ async def _create_data_ingestion_job_initial(
+ self,
+ job_id: str,
+ job: Optional["_models.WeatherDataIngestionJob"] = None,
+ **kwargs
+ ) -> "_models.WeatherDataIngestionJob":
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.WeatherDataIngestionJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_data_ingestion_job_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if job is not None:
+ body_content = self._serialize.body(job, 'WeatherDataIngestionJob')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('WeatherDataIngestionJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_data_ingestion_job_initial.metadata = {'url': '/weather/ingest-data/{jobId}'} # type: ignore
+
+ async def begin_create_data_ingestion_job(
+ self,
+ job_id: str,
+ job: Optional["_models.WeatherDataIngestionJob"] = None,
+ **kwargs
+ ) -> AsyncLROPoller["_models.WeatherDataIngestionJob"]:
+ """Create a weather data ingestion job.
+
+ :param job_id: Job id supplied by user.
+ :type job_id: str
+ :param job: Job parameters supplied by user.
+ :type job: ~azure.farmbeats.models.WeatherDataIngestionJob
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: By default, your polling method will be AsyncLROBasePolling.
+ Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either WeatherDataIngestionJob or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.farmbeats.models.WeatherDataIngestionJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.WeatherDataIngestionJob"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_data_ingestion_job_initial(
+ job_id=job_id,
+ job=job,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('WeatherDataIngestionJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncLROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_data_ingestion_job.metadata = {'url': '/weather/ingest-data/{jobId}'} # type: ignore
+
+ async def get_data_delete_job_details(
+ self,
+ job_id: str,
+ **kwargs
+ ) -> "_models.WeatherDataDeleteJob":
+ """Get weather data delete job.
+
+ :param job_id: Id of the job.
+ :type job_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WeatherDataDeleteJob, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.WeatherDataDeleteJob
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.WeatherDataDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_data_delete_job_details.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('WeatherDataDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_data_delete_job_details.metadata = {'url': '/weather/delete-data/{jobId}'} # type: ignore
+
+ async def _create_data_delete_job_initial(
+ self,
+ job_id: str,
+ job: Optional["_models.WeatherDataDeleteJob"] = None,
+ **kwargs
+ ) -> "_models.WeatherDataDeleteJob":
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.WeatherDataDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_data_delete_job_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if job is not None:
+ body_content = self._serialize.body(job, 'WeatherDataDeleteJob')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('WeatherDataDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_data_delete_job_initial.metadata = {'url': '/weather/delete-data/{jobId}'} # type: ignore
+
+ async def begin_create_data_delete_job(
+ self,
+ job_id: str,
+ job: Optional["_models.WeatherDataDeleteJob"] = None,
+ **kwargs
+ ) -> AsyncLROPoller["_models.WeatherDataDeleteJob"]:
+ """Create a weather data delete job.
+
+ :param job_id: Job Id supplied by end user.
+ :type job_id: str
+ :param job: Job parameters supplied by user.
+ :type job: ~azure.farmbeats.models.WeatherDataDeleteJob
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: By default, your polling method will be AsyncLROBasePolling.
+ Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either WeatherDataDeleteJob or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure.farmbeats.models.WeatherDataDeleteJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.WeatherDataDeleteJob"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_data_delete_job_initial(
+ job_id=job_id,
+ job=job,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('WeatherDataDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncLROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_data_delete_job.metadata = {'url': '/weather/delete-data/{jobId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/models/__init__.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/models/__init__.py
new file mode 100644
index 000000000000..8bacdc46b9e7
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/models/__init__.py
@@ -0,0 +1,196 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+try:
+ from ._models_py3 import ApplicationData
+ from ._models_py3 import ApplicationDataListResponse
+ from ._models_py3 import ApplicationProductDetail
+ from ._models_py3 import Attachment
+ from ._models_py3 import AttachmentListResponse
+ from ._models_py3 import Boundary
+ from ._models_py3 import BoundaryListResponse
+ from ._models_py3 import BoundaryOverlapResponse
+ from ._models_py3 import CascadeDeleteJob
+ from ._models_py3 import Crop
+ from ._models_py3 import CropListResponse
+ from ._models_py3 import CropVariety
+ from ._models_py3 import CropVarietyListResponse
+ from ._models_py3 import Error
+ from ._models_py3 import ErrorResponse
+ from ._models_py3 import Farm
+ from ._models_py3 import FarmListResponse
+ from ._models_py3 import FarmOperationDataIngestionJob
+ from ._models_py3 import Farmer
+ from ._models_py3 import FarmerListResponse
+ from ._models_py3 import Field
+ from ._models_py3 import FieldListResponse
+ from ._models_py3 import GeoJsonObject
+ from ._models_py3 import HarvestData
+ from ._models_py3 import HarvestDataListResponse
+ from ._models_py3 import HarvestProductDetail
+ from ._models_py3 import ImageFile
+ from ._models_py3 import ImageProcessingRasterizeJob
+ from ._models_py3 import InnerError
+ from ._models_py3 import Location
+ from ._models_py3 import Measure
+ from ._models_py3 import MultiPolygon
+ from ._models_py3 import OAuthConnectRequest
+ from ._models_py3 import OAuthProvider
+ from ._models_py3 import OAuthProviderListResponse
+ from ._models_py3 import OAuthToken
+ from ._models_py3 import OAuthTokenListResponse
+ from ._models_py3 import Paths1LxjoxzFarmersFarmeridAttachmentsAttachmentidPatchRequestbodyContentMultipartFormDataSchema
+ from ._models_py3 import PlantingData
+ from ._models_py3 import PlantingDataListResponse
+ from ._models_py3 import PlantingProductDetail
+ from ._models_py3 import Point
+ from ._models_py3 import Polygon
+ from ._models_py3 import SatelliteData
+ from ._models_py3 import SatelliteDataIngestionJob
+ from ._models_py3 import Scene
+ from ._models_py3 import SceneListResponse
+ from ._models_py3 import SearchBoundaryQuery
+ from ._models_py3 import Season
+ from ._models_py3 import SeasonListResponse
+ from ._models_py3 import SeasonalField
+ from ._models_py3 import SeasonalFieldListResponse
+ from ._models_py3 import TillageData
+ from ._models_py3 import TillageDataListResponse
+ from ._models_py3 import WeatherData
+ from ._models_py3 import WeatherDataDeleteJob
+ from ._models_py3 import WeatherDataIngestionJob
+ from ._models_py3 import WeatherDataListResponse
+except (SyntaxError, ImportError):
+ from ._models import ApplicationData # type: ignore
+ from ._models import ApplicationDataListResponse # type: ignore
+ from ._models import ApplicationProductDetail # type: ignore
+ from ._models import Attachment # type: ignore
+ from ._models import AttachmentListResponse # type: ignore
+ from ._models import Boundary # type: ignore
+ from ._models import BoundaryListResponse # type: ignore
+ from ._models import BoundaryOverlapResponse # type: ignore
+ from ._models import CascadeDeleteJob # type: ignore
+ from ._models import Crop # type: ignore
+ from ._models import CropListResponse # type: ignore
+ from ._models import CropVariety # type: ignore
+ from ._models import CropVarietyListResponse # type: ignore
+ from ._models import Error # type: ignore
+ from ._models import ErrorResponse # type: ignore
+ from ._models import Farm # type: ignore
+ from ._models import FarmListResponse # type: ignore
+ from ._models import FarmOperationDataIngestionJob # type: ignore
+ from ._models import Farmer # type: ignore
+ from ._models import FarmerListResponse # type: ignore
+ from ._models import Field # type: ignore
+ from ._models import FieldListResponse # type: ignore
+ from ._models import GeoJsonObject # type: ignore
+ from ._models import HarvestData # type: ignore
+ from ._models import HarvestDataListResponse # type: ignore
+ from ._models import HarvestProductDetail # type: ignore
+ from ._models import ImageFile # type: ignore
+ from ._models import ImageProcessingRasterizeJob # type: ignore
+ from ._models import InnerError # type: ignore
+ from ._models import Location # type: ignore
+ from ._models import Measure # type: ignore
+ from ._models import MultiPolygon # type: ignore
+ from ._models import OAuthConnectRequest # type: ignore
+ from ._models import OAuthProvider # type: ignore
+ from ._models import OAuthProviderListResponse # type: ignore
+ from ._models import OAuthToken # type: ignore
+ from ._models import OAuthTokenListResponse # type: ignore
+ from ._models import Paths1LxjoxzFarmersFarmeridAttachmentsAttachmentidPatchRequestbodyContentMultipartFormDataSchema # type: ignore
+ from ._models import PlantingData # type: ignore
+ from ._models import PlantingDataListResponse # type: ignore
+ from ._models import PlantingProductDetail # type: ignore
+ from ._models import Point # type: ignore
+ from ._models import Polygon # type: ignore
+ from ._models import SatelliteData # type: ignore
+ from ._models import SatelliteDataIngestionJob # type: ignore
+ from ._models import Scene # type: ignore
+ from ._models import SceneListResponse # type: ignore
+ from ._models import SearchBoundaryQuery # type: ignore
+ from ._models import Season # type: ignore
+ from ._models import SeasonListResponse # type: ignore
+ from ._models import SeasonalField # type: ignore
+ from ._models import SeasonalFieldListResponse # type: ignore
+ from ._models import TillageData # type: ignore
+ from ._models import TillageDataListResponse # type: ignore
+ from ._models import WeatherData # type: ignore
+ from ._models import WeatherDataDeleteJob # type: ignore
+ from ._models import WeatherDataIngestionJob # type: ignore
+ from ._models import WeatherDataListResponse # type: ignore
+
+from ._farm_beats_client_enums import (
+ DataProvider,
+ GeoJsonObjectType,
+ ImageFormat,
+ Source,
+)
+
+__all__ = [
+ 'ApplicationData',
+ 'ApplicationDataListResponse',
+ 'ApplicationProductDetail',
+ 'Attachment',
+ 'AttachmentListResponse',
+ 'Boundary',
+ 'BoundaryListResponse',
+ 'BoundaryOverlapResponse',
+ 'CascadeDeleteJob',
+ 'Crop',
+ 'CropListResponse',
+ 'CropVariety',
+ 'CropVarietyListResponse',
+ 'Error',
+ 'ErrorResponse',
+ 'Farm',
+ 'FarmListResponse',
+ 'FarmOperationDataIngestionJob',
+ 'Farmer',
+ 'FarmerListResponse',
+ 'Field',
+ 'FieldListResponse',
+ 'GeoJsonObject',
+ 'HarvestData',
+ 'HarvestDataListResponse',
+ 'HarvestProductDetail',
+ 'ImageFile',
+ 'ImageProcessingRasterizeJob',
+ 'InnerError',
+ 'Location',
+ 'Measure',
+ 'MultiPolygon',
+ 'OAuthConnectRequest',
+ 'OAuthProvider',
+ 'OAuthProviderListResponse',
+ 'OAuthToken',
+ 'OAuthTokenListResponse',
+ 'Paths1LxjoxzFarmersFarmeridAttachmentsAttachmentidPatchRequestbodyContentMultipartFormDataSchema',
+ 'PlantingData',
+ 'PlantingDataListResponse',
+ 'PlantingProductDetail',
+ 'Point',
+ 'Polygon',
+ 'SatelliteData',
+ 'SatelliteDataIngestionJob',
+ 'Scene',
+ 'SceneListResponse',
+ 'SearchBoundaryQuery',
+ 'Season',
+ 'SeasonListResponse',
+ 'SeasonalField',
+ 'SeasonalFieldListResponse',
+ 'TillageData',
+ 'TillageDataListResponse',
+ 'WeatherData',
+ 'WeatherDataDeleteJob',
+ 'WeatherDataIngestionJob',
+ 'WeatherDataListResponse',
+ 'DataProvider',
+ 'GeoJsonObjectType',
+ 'ImageFormat',
+ 'Source',
+]
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/models/_farm_beats_client_enums.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/models/_farm_beats_client_enums.py
new file mode 100644
index 000000000000..71b152695c6b
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/models/_farm_beats_client_enums.py
@@ -0,0 +1,51 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from enum import Enum, EnumMeta
+from six import with_metaclass
+
+class _CaseInsensitiveEnumMeta(EnumMeta):
+ def __getitem__(self, name):
+ return super().__getitem__(name.upper())
+
+ def __getattr__(cls, name):
+ """Return the enum member matching `name`
+ We use __getattr__ instead of descriptors or inserting into the enum
+ class' __dict__ in order to support `name` and `value` being both
+ properties for enum members (which live in the class' __dict__) and
+ enum members themselves.
+ """
+ try:
+ return cls._member_map_[name.upper()]
+ except KeyError:
+ raise AttributeError(name)
+
+
+class DataProvider(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Provider of satellite data.
+ """
+
+ MICROSOFT = "Microsoft"
+
+class GeoJsonObjectType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """GeoJSON object type.
+ """
+
+ POINT = "Point"
+ POLYGON = "Polygon"
+ MULTI_POLYGON = "MultiPolygon"
+
+class ImageFormat(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Supported image formats for scene resource.
+ """
+
+ TIF = "TIF"
+
+class Source(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Source of satellite data.
+ """
+
+ SENTINEL2_L2_A = "Sentinel_2_L2A"
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/models/_models.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/models/_models.py
new file mode 100644
index 000000000000..b35b9cc5f439
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/models/_models.py
@@ -0,0 +1,3431 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.core.exceptions import HttpResponseError
+import msrest.serialization
+
+
+class ApplicationData(msrest.serialization.Model):
+ """Schema of application data resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param application_product_details: Application product details.
+ :type application_product_details: list[~azure.farmbeats.models.ApplicationProductDetail]
+ :param avg_material: Schema for storing measurement reading and unit.
+ :type avg_material: ~azure.farmbeats.models.Measure
+ :param total_material: Schema for storing measurement reading and unit.
+ :type total_material: ~azure.farmbeats.models.Measure
+ :param area: Schema for storing measurement reading and unit.
+ :type area: ~azure.farmbeats.models.Measure
+ :param source: Source of the operation data.
+ :type source: str
+ :param operation_modified_date_time: Modified date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ Note: this will be specified by the source provider itself.
+ :type operation_modified_date_time: ~datetime.datetime
+ :param operation_start_date_time: Start date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type operation_start_date_time: ~datetime.datetime
+ :param operation_end_date_time: End date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type operation_end_date_time: ~datetime.datetime
+ :ivar attachments_link: Link for attachments.
+ :vartype attachments_link: str
+ :param associated_boundary_id: Optional boundary ID of the field for which operation was
+ applied.
+ :type associated_boundary_id: str
+ :param operation_boundary_id: Optional boundary ID of the actual area for which operation was
+ applied inside the specified field.
+ :type operation_boundary_id: str
+ :ivar farmer_id: Farmer ID which belongs to the operation data.
+ :vartype farmer_id: str
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'source': {'max_length': 100, 'min_length': 2},
+ 'attachments_link': {'readonly': True},
+ 'farmer_id': {'readonly': True},
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'application_product_details': {'key': 'applicationProductDetails', 'type': '[ApplicationProductDetail]'},
+ 'avg_material': {'key': 'avgMaterial', 'type': 'Measure'},
+ 'total_material': {'key': 'totalMaterial', 'type': 'Measure'},
+ 'area': {'key': 'area', 'type': 'Measure'},
+ 'source': {'key': 'source', 'type': 'str'},
+ 'operation_modified_date_time': {'key': 'operationModifiedDateTime', 'type': 'iso-8601'},
+ 'operation_start_date_time': {'key': 'operationStartDateTime', 'type': 'iso-8601'},
+ 'operation_end_date_time': {'key': 'operationEndDateTime', 'type': 'iso-8601'},
+ 'attachments_link': {'key': 'attachmentsLink', 'type': 'str'},
+ 'associated_boundary_id': {'key': 'associatedBoundaryId', 'type': 'str'},
+ 'operation_boundary_id': {'key': 'operationBoundaryId', 'type': 'str'},
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ApplicationData, self).__init__(**kwargs)
+ self.application_product_details = kwargs.get('application_product_details', None)
+ self.avg_material = kwargs.get('avg_material', None)
+ self.total_material = kwargs.get('total_material', None)
+ self.area = kwargs.get('area', None)
+ self.source = kwargs.get('source', None)
+ self.operation_modified_date_time = kwargs.get('operation_modified_date_time', None)
+ self.operation_start_date_time = kwargs.get('operation_start_date_time', None)
+ self.operation_end_date_time = kwargs.get('operation_end_date_time', None)
+ self.attachments_link = None
+ self.associated_boundary_id = kwargs.get('associated_boundary_id', None)
+ self.operation_boundary_id = kwargs.get('operation_boundary_id', None)
+ self.farmer_id = None
+ self.id = None
+ self.e_tag = None
+ self.status = kwargs.get('status', None)
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = kwargs.get('name', None)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class ApplicationDataListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.ApplicationData]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ApplicationData]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ApplicationDataListResponse, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.skip_token = kwargs.get('skip_token', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class ApplicationProductDetail(msrest.serialization.Model):
+ """Schema of product used during application.
+
+ :param product_name: Name of the product applied.
+ :type product_name: str
+ :param is_carrier: A flag indicating whether product is a carrier for a tank mix.
+ :type is_carrier: bool
+ :param avg_material: Schema for storing measurement reading and unit.
+ :type avg_material: ~azure.farmbeats.models.Measure
+ :param total_material: Schema for storing measurement reading and unit.
+ :type total_material: ~azure.farmbeats.models.Measure
+ """
+
+ _validation = {
+ 'product_name': {'max_length': 100, 'min_length': 1},
+ }
+
+ _attribute_map = {
+ 'product_name': {'key': 'productName', 'type': 'str'},
+ 'is_carrier': {'key': 'isCarrier', 'type': 'bool'},
+ 'avg_material': {'key': 'avgMaterial', 'type': 'Measure'},
+ 'total_material': {'key': 'totalMaterial', 'type': 'Measure'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ApplicationProductDetail, self).__init__(**kwargs)
+ self.product_name = kwargs.get('product_name', None)
+ self.is_carrier = kwargs.get('is_carrier', False)
+ self.avg_material = kwargs.get('avg_material', None)
+ self.total_material = kwargs.get('total_material', None)
+
+
+class Attachment(msrest.serialization.Model):
+ """Schema of attachment resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar farmer_id: Farmer id for this attachment.
+ :vartype farmer_id: str
+ :param resource_id: Associated Resource id for this attachment.
+ :type resource_id: str
+ :param resource_type: Associated Resource type for this attachment
+ i.e. Farmer, Farm, Field, SeasonalField, Boundary, FarmOperationApplicationData, HarvestData,
+ TillageData, PlantingData.
+ :type resource_type: str
+ :ivar original_file_name: Original File Name for this attachment.
+ :vartype original_file_name: str
+ :ivar id: Unique id.
+ :vartype id: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date when resource was created.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date when resource was last modified.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of resource.
+ :type description: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ """
+
+ _validation = {
+ 'farmer_id': {'readonly': True},
+ 'original_file_name': {'readonly': True},
+ 'id': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ 'e_tag': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'resource_type': {'key': 'resourceType', 'type': 'str'},
+ 'original_file_name': {'key': 'originalFileName', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Attachment, self).__init__(**kwargs)
+ self.farmer_id = None
+ self.resource_id = kwargs.get('resource_id', None)
+ self.resource_type = kwargs.get('resource_type', None)
+ self.original_file_name = None
+ self.id = None
+ self.status = kwargs.get('status', None)
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = kwargs.get('name', None)
+ self.description = kwargs.get('description', None)
+ self.e_tag = None
+
+
+class AttachmentListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.Attachment]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Attachment]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AttachmentListResponse, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.skip_token = kwargs.get('skip_token', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class Boundary(msrest.serialization.Model):
+ """Schema of boundary resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar farmer_id: Farmer Id.
+ :vartype farmer_id: str
+ :param parent_id: Id of the parent(field or seasonalField) it belongs to.
+ :type parent_id: str
+ :param geometry: GeoJSON abstract class.
+ :type geometry: ~azure.farmbeats.models.GeoJsonObject
+ :param is_primary: Is the boundary primary.
+ :type is_primary: bool
+ :ivar acreage: Boundary area in acres.
+ :vartype acreage: float
+ :ivar parent_type: Type of the parent it belongs to.
+ :vartype parent_type: str
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'farmer_id': {'readonly': True},
+ 'acreage': {'readonly': True},
+ 'parent_type': {'readonly': True},
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'parent_id': {'key': 'parentId', 'type': 'str'},
+ 'geometry': {'key': 'geometry', 'type': 'GeoJsonObject'},
+ 'is_primary': {'key': 'isPrimary', 'type': 'bool'},
+ 'acreage': {'key': 'acreage', 'type': 'float'},
+ 'parent_type': {'key': 'parentType', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Boundary, self).__init__(**kwargs)
+ self.farmer_id = None
+ self.parent_id = kwargs.get('parent_id', None)
+ self.geometry = kwargs.get('geometry', None)
+ self.is_primary = kwargs.get('is_primary', None)
+ self.acreage = None
+ self.parent_type = None
+ self.id = None
+ self.e_tag = None
+ self.status = kwargs.get('status', None)
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = kwargs.get('name', None)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class BoundaryListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.Boundary]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Boundary]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(BoundaryListResponse, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.skip_token = kwargs.get('skip_token', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class BoundaryOverlapResponse(msrest.serialization.Model):
+ """Schema of boundary overlap response.
+
+ :param boundary_acreage: Acreage of Main boundary.
+ :type boundary_acreage: float
+ :param other_boundary_acreage: Acreage of other boundary.
+ :type other_boundary_acreage: float
+ :param intersecting_acreage: Acreage of intersecting boundary.
+ :type intersecting_acreage: float
+ """
+
+ _attribute_map = {
+ 'boundary_acreage': {'key': 'boundaryAcreage', 'type': 'float'},
+ 'other_boundary_acreage': {'key': 'otherBoundaryAcreage', 'type': 'float'},
+ 'intersecting_acreage': {'key': 'intersectingAcreage', 'type': 'float'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(BoundaryOverlapResponse, self).__init__(**kwargs)
+ self.boundary_acreage = kwargs.get('boundary_acreage', None)
+ self.other_boundary_acreage = kwargs.get('other_boundary_acreage', None)
+ self.intersecting_acreage = kwargs.get('intersecting_acreage', None)
+
+
+class CascadeDeleteJob(msrest.serialization.Model):
+ """Schema of cascade delete job.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param farmer_id: Required. Farmer Id.
+ :type farmer_id: str
+ :param resource_id: Required. The id of the resource.
+ :type resource_id: str
+ :param resource_type: Required. The type of the resource.
+ :type resource_type: str
+ :ivar id: Unique job id.
+ :vartype id: str
+ :ivar status: Status of the job.
+ Possible values: 'Waiting', 'Running', 'Succeeded', 'Failed', 'Cancelled'.
+ :vartype status: str
+ :ivar duration_in_seconds: Duration of the job in seconds.
+ :vartype duration_in_seconds: float
+ :ivar message: Status message to capture more details of the job.
+ :vartype message: str
+ :ivar created_date_time: Job created at dateTime. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar last_action_date_time: Job was last acted upon at dateTime. Sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype last_action_date_time: ~datetime.datetime
+ :ivar start_time: Job start time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype start_time: ~datetime.datetime
+ :ivar end_time: Job end time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype end_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'farmer_id': {'required': True},
+ 'resource_id': {'required': True},
+ 'resource_type': {'required': True},
+ 'id': {'readonly': True},
+ 'status': {'readonly': True},
+ 'duration_in_seconds': {'readonly': True},
+ 'message': {'readonly': True},
+ 'created_date_time': {'readonly': True},
+ 'last_action_date_time': {'readonly': True},
+ 'start_time': {'readonly': True},
+ 'end_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'resource_type': {'key': 'resourceType', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'duration_in_seconds': {'key': 'durationInSeconds', 'type': 'float'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'last_action_date_time': {'key': 'lastActionDateTime', 'type': 'iso-8601'},
+ 'start_time': {'key': 'startTime', 'type': 'iso-8601'},
+ 'end_time': {'key': 'endTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CascadeDeleteJob, self).__init__(**kwargs)
+ self.farmer_id = kwargs['farmer_id']
+ self.resource_id = kwargs['resource_id']
+ self.resource_type = kwargs['resource_type']
+ self.id = None
+ self.status = None
+ self.duration_in_seconds = None
+ self.message = None
+ self.created_date_time = None
+ self.last_action_date_time = None
+ self.start_time = None
+ self.end_time = None
+ self.name = kwargs.get('name', None)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class Crop(msrest.serialization.Model):
+ """Schema of crop resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param phenotype: Crop phenotype.
+ :type phenotype: str
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'phenotype': {'max_length': 100, 'min_length': 0},
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'phenotype': {'key': 'phenotype', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Crop, self).__init__(**kwargs)
+ self.phenotype = kwargs.get('phenotype', None)
+ self.id = None
+ self.e_tag = None
+ self.status = kwargs.get('status', None)
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = kwargs.get('name', None)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class CropListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.Crop]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Crop]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CropListResponse, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.skip_token = kwargs.get('skip_token', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class CropVariety(msrest.serialization.Model):
+ """Schema of crop variety resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar crop_id: Id of the crop it belongs to.
+ :vartype crop_id: str
+ :param brand: CropVariety Brand.
+ :type brand: str
+ :param product: CropVariety product.
+ :type product: str
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'crop_id': {'readonly': True},
+ 'brand': {'max_length': 100, 'min_length': 0},
+ 'product': {'max_length': 100, 'min_length': 0},
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'crop_id': {'key': 'cropId', 'type': 'str'},
+ 'brand': {'key': 'brand', 'type': 'str'},
+ 'product': {'key': 'product', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CropVariety, self).__init__(**kwargs)
+ self.crop_id = None
+ self.brand = kwargs.get('brand', None)
+ self.product = kwargs.get('product', None)
+ self.id = None
+ self.e_tag = None
+ self.status = kwargs.get('status', None)
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = kwargs.get('name', None)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class CropVarietyListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.CropVariety]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[CropVariety]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CropVarietyListResponse, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.skip_token = kwargs.get('skip_token', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class Error(msrest.serialization.Model):
+ """An error from the Azure AgPlatform service.
+
+ :param code: Server-defined set of error codes.
+ :type code: str
+ :param message: Human-readable representation of the error.
+ :type message: str
+ :param target: Target of the error.
+ :type target: str
+ :param details: Array of details about specific errors that led to this reported error.
+ :type details: list[~azure.farmbeats.models.Error]
+ :param innererror: Inner error containing list of errors.
+
+ :code:`InnerError
+ reference document`.
+ :type innererror: ~azure.farmbeats.models.InnerError
+ """
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'target': {'key': 'target', 'type': 'str'},
+ 'details': {'key': 'details', 'type': '[Error]'},
+ 'innererror': {'key': 'innererror', 'type': 'InnerError'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Error, self).__init__(**kwargs)
+ self.code = kwargs.get('code', None)
+ self.message = kwargs.get('message', None)
+ self.target = kwargs.get('target', None)
+ self.details = kwargs.get('details', None)
+ self.innererror = kwargs.get('innererror', None)
+
+
+class ErrorResponse(msrest.serialization.Model):
+ """An error response from the Azure AgPlatform service.
+
+:code:`ErrorResponse reference document.`.
+
+ :param error: An error from the Azure AgPlatform service.
+ :type error: ~azure.farmbeats.models.Error
+ :param trace_id: Unique trace Id.
+ :type trace_id: str
+ """
+
+ _attribute_map = {
+ 'error': {'key': 'error', 'type': 'Error'},
+ 'trace_id': {'key': 'traceId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ErrorResponse, self).__init__(**kwargs)
+ self.error = kwargs.get('error', None)
+ self.trace_id = kwargs.get('trace_id', None)
+
+
+class Farm(msrest.serialization.Model):
+ """Schema of farm resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar farmer_id: Farmer Id.
+ :vartype farmer_id: str
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'farmer_id': {'readonly': True},
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Farm, self).__init__(**kwargs)
+ self.farmer_id = None
+ self.id = None
+ self.e_tag = None
+ self.status = kwargs.get('status', None)
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = kwargs.get('name', None)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class Farmer(msrest.serialization.Model):
+ """Schema of farmer resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Farmer, self).__init__(**kwargs)
+ self.id = None
+ self.e_tag = None
+ self.status = kwargs.get('status', None)
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = kwargs.get('name', None)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class FarmerListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.Farmer]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Farmer]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(FarmerListResponse, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.skip_token = kwargs.get('skip_token', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class FarmListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.Farm]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Farm]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(FarmListResponse, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.skip_token = kwargs.get('skip_token', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class FarmOperationDataIngestionJob(msrest.serialization.Model):
+ """Schema of farm operation data ingestion job.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param farmer_id: Required. Farmer Id.
+ :type farmer_id: str
+ :param auth_provider_id: Required. Authentication provider Id.
+ :type auth_provider_id: str
+ :param operations: List of operation types for which data needs to be downloaded. Available
+ values: AllOperations, Application, Planting, Harvest, Tillage.
+ :type operations: list[str]
+ :param start_year: Required. Start Year (Minimum = 2000, Maximum = CurrentYear).
+ :type start_year: int
+ :ivar id: Unique job id.
+ :vartype id: str
+ :ivar status: Status of the job.
+ Possible values: 'Waiting', 'Running', 'Succeeded', 'Failed', 'Cancelled'.
+ :vartype status: str
+ :ivar duration_in_seconds: Duration of the job in seconds.
+ :vartype duration_in_seconds: float
+ :ivar message: Status message to capture more details of the job.
+ :vartype message: str
+ :ivar created_date_time: Job created at dateTime. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar last_action_date_time: Job was last acted upon at dateTime. Sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype last_action_date_time: ~datetime.datetime
+ :ivar start_time: Job start time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype start_time: ~datetime.datetime
+ :ivar end_time: Job end time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype end_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'farmer_id': {'required': True},
+ 'auth_provider_id': {'required': True},
+ 'start_year': {'required': True},
+ 'id': {'readonly': True},
+ 'status': {'readonly': True},
+ 'duration_in_seconds': {'readonly': True},
+ 'message': {'readonly': True},
+ 'created_date_time': {'readonly': True},
+ 'last_action_date_time': {'readonly': True},
+ 'start_time': {'readonly': True},
+ 'end_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'auth_provider_id': {'key': 'authProviderId', 'type': 'str'},
+ 'operations': {'key': 'operations', 'type': '[str]'},
+ 'start_year': {'key': 'startYear', 'type': 'int'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'duration_in_seconds': {'key': 'durationInSeconds', 'type': 'float'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'last_action_date_time': {'key': 'lastActionDateTime', 'type': 'iso-8601'},
+ 'start_time': {'key': 'startTime', 'type': 'iso-8601'},
+ 'end_time': {'key': 'endTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(FarmOperationDataIngestionJob, self).__init__(**kwargs)
+ self.farmer_id = kwargs['farmer_id']
+ self.auth_provider_id = kwargs['auth_provider_id']
+ self.operations = kwargs.get('operations', None)
+ self.start_year = kwargs['start_year']
+ self.id = None
+ self.status = None
+ self.duration_in_seconds = None
+ self.message = None
+ self.created_date_time = None
+ self.last_action_date_time = None
+ self.start_time = None
+ self.end_time = None
+ self.name = kwargs.get('name', None)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class Field(msrest.serialization.Model):
+ """Schema of field resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param farm_id: Id of the associated Farm.
+ :type farm_id: str
+ :ivar farmer_id: Farmer Id.
+ :vartype farmer_id: str
+ :ivar primary_boundary_id: Primary boundary id.
+ :vartype primary_boundary_id: str
+ :ivar boundary_ids: Boundary Ids.
+ :vartype boundary_ids: list[str]
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'farmer_id': {'readonly': True},
+ 'primary_boundary_id': {'readonly': True},
+ 'boundary_ids': {'readonly': True, 'unique': True},
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'farm_id': {'key': 'farmId', 'type': 'str'},
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'primary_boundary_id': {'key': 'primaryBoundaryId', 'type': 'str'},
+ 'boundary_ids': {'key': 'boundaryIds', 'type': '[str]'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Field, self).__init__(**kwargs)
+ self.farm_id = kwargs.get('farm_id', None)
+ self.farmer_id = None
+ self.primary_boundary_id = None
+ self.boundary_ids = None
+ self.id = None
+ self.e_tag = None
+ self.status = kwargs.get('status', None)
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = kwargs.get('name', None)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class FieldListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.Field]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Field]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(FieldListResponse, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.skip_token = kwargs.get('skip_token', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class GeoJsonObject(msrest.serialization.Model):
+ """GeoJSON abstract class.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: MultiPolygon, Point, Polygon.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param type: Required. GeoJSON object type.Constant filled by server. Possible values include:
+ "Point", "Polygon", "MultiPolygon".
+ :type type: str or ~azure.farmbeats.models.GeoJsonObjectType
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'type': {'MultiPolygon': 'MultiPolygon', 'Point': 'Point', 'Polygon': 'Polygon'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(GeoJsonObject, self).__init__(**kwargs)
+ self.type = None # type: Optional[str]
+
+
+class HarvestData(msrest.serialization.Model):
+ """Schema of harvest data resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param total_yield: Schema for storing measurement reading and unit.
+ :type total_yield: ~azure.farmbeats.models.Measure
+ :param avg_yield: Schema for storing measurement reading and unit.
+ :type avg_yield: ~azure.farmbeats.models.Measure
+ :param total_wet_mass: Schema for storing measurement reading and unit.
+ :type total_wet_mass: ~azure.farmbeats.models.Measure
+ :param avg_wet_mass: Schema for storing measurement reading and unit.
+ :type avg_wet_mass: ~azure.farmbeats.models.Measure
+ :param avg_moisture: Schema for storing measurement reading and unit.
+ :type avg_moisture: ~azure.farmbeats.models.Measure
+ :param avg_speed: Schema for storing measurement reading and unit.
+ :type avg_speed: ~azure.farmbeats.models.Measure
+ :param harvest_product_details: Harvest product details.
+ :type harvest_product_details: list[~azure.farmbeats.models.HarvestProductDetail]
+ :param area: Schema for storing measurement reading and unit.
+ :type area: ~azure.farmbeats.models.Measure
+ :param source: Source of the operation data.
+ :type source: str
+ :param operation_modified_date_time: Modified date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ Note: this will be specified by the source provider itself.
+ :type operation_modified_date_time: ~datetime.datetime
+ :param operation_start_date_time: Start date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type operation_start_date_time: ~datetime.datetime
+ :param operation_end_date_time: End date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type operation_end_date_time: ~datetime.datetime
+ :ivar attachments_link: Link for attachments.
+ :vartype attachments_link: str
+ :param associated_boundary_id: Optional boundary ID of the field for which operation was
+ applied.
+ :type associated_boundary_id: str
+ :param operation_boundary_id: Optional boundary ID of the actual area for which operation was
+ applied inside the specified field.
+ :type operation_boundary_id: str
+ :ivar farmer_id: Farmer ID which belongs to the operation data.
+ :vartype farmer_id: str
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'source': {'max_length': 100, 'min_length': 2},
+ 'attachments_link': {'readonly': True},
+ 'farmer_id': {'readonly': True},
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'total_yield': {'key': 'totalYield', 'type': 'Measure'},
+ 'avg_yield': {'key': 'avgYield', 'type': 'Measure'},
+ 'total_wet_mass': {'key': 'totalWetMass', 'type': 'Measure'},
+ 'avg_wet_mass': {'key': 'avgWetMass', 'type': 'Measure'},
+ 'avg_moisture': {'key': 'avgMoisture', 'type': 'Measure'},
+ 'avg_speed': {'key': 'avgSpeed', 'type': 'Measure'},
+ 'harvest_product_details': {'key': 'harvestProductDetails', 'type': '[HarvestProductDetail]'},
+ 'area': {'key': 'area', 'type': 'Measure'},
+ 'source': {'key': 'source', 'type': 'str'},
+ 'operation_modified_date_time': {'key': 'operationModifiedDateTime', 'type': 'iso-8601'},
+ 'operation_start_date_time': {'key': 'operationStartDateTime', 'type': 'iso-8601'},
+ 'operation_end_date_time': {'key': 'operationEndDateTime', 'type': 'iso-8601'},
+ 'attachments_link': {'key': 'attachmentsLink', 'type': 'str'},
+ 'associated_boundary_id': {'key': 'associatedBoundaryId', 'type': 'str'},
+ 'operation_boundary_id': {'key': 'operationBoundaryId', 'type': 'str'},
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(HarvestData, self).__init__(**kwargs)
+ self.total_yield = kwargs.get('total_yield', None)
+ self.avg_yield = kwargs.get('avg_yield', None)
+ self.total_wet_mass = kwargs.get('total_wet_mass', None)
+ self.avg_wet_mass = kwargs.get('avg_wet_mass', None)
+ self.avg_moisture = kwargs.get('avg_moisture', None)
+ self.avg_speed = kwargs.get('avg_speed', None)
+ self.harvest_product_details = kwargs.get('harvest_product_details', None)
+ self.area = kwargs.get('area', None)
+ self.source = kwargs.get('source', None)
+ self.operation_modified_date_time = kwargs.get('operation_modified_date_time', None)
+ self.operation_start_date_time = kwargs.get('operation_start_date_time', None)
+ self.operation_end_date_time = kwargs.get('operation_end_date_time', None)
+ self.attachments_link = None
+ self.associated_boundary_id = kwargs.get('associated_boundary_id', None)
+ self.operation_boundary_id = kwargs.get('operation_boundary_id', None)
+ self.farmer_id = None
+ self.id = None
+ self.e_tag = None
+ self.status = kwargs.get('status', None)
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = kwargs.get('name', None)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class HarvestDataListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.HarvestData]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[HarvestData]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(HarvestDataListResponse, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.skip_token = kwargs.get('skip_token', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class HarvestProductDetail(msrest.serialization.Model):
+ """Schema of product used during harvesting.
+
+ :param product_name: Name of the product.
+ :type product_name: str
+ :param area: Schema for storing measurement reading and unit.
+ :type area: ~azure.farmbeats.models.Measure
+ :param total_yield: Schema for storing measurement reading and unit.
+ :type total_yield: ~azure.farmbeats.models.Measure
+ :param avg_yield: Schema for storing measurement reading and unit.
+ :type avg_yield: ~azure.farmbeats.models.Measure
+ :param avg_moisture: Schema for storing measurement reading and unit.
+ :type avg_moisture: ~azure.farmbeats.models.Measure
+ :param total_wet_mass: Schema for storing measurement reading and unit.
+ :type total_wet_mass: ~azure.farmbeats.models.Measure
+ :param avg_wet_mass: Schema for storing measurement reading and unit.
+ :type avg_wet_mass: ~azure.farmbeats.models.Measure
+ """
+
+ _validation = {
+ 'product_name': {'max_length': 100, 'min_length': 1},
+ }
+
+ _attribute_map = {
+ 'product_name': {'key': 'productName', 'type': 'str'},
+ 'area': {'key': 'area', 'type': 'Measure'},
+ 'total_yield': {'key': 'totalYield', 'type': 'Measure'},
+ 'avg_yield': {'key': 'avgYield', 'type': 'Measure'},
+ 'avg_moisture': {'key': 'avgMoisture', 'type': 'Measure'},
+ 'total_wet_mass': {'key': 'totalWetMass', 'type': 'Measure'},
+ 'avg_wet_mass': {'key': 'avgWetMass', 'type': 'Measure'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(HarvestProductDetail, self).__init__(**kwargs)
+ self.product_name = kwargs.get('product_name', None)
+ self.area = kwargs.get('area', None)
+ self.total_yield = kwargs.get('total_yield', None)
+ self.avg_yield = kwargs.get('avg_yield', None)
+ self.avg_moisture = kwargs.get('avg_moisture', None)
+ self.total_wet_mass = kwargs.get('total_wet_mass', None)
+ self.avg_wet_mass = kwargs.get('avg_wet_mass', None)
+
+
+class ImageFile(msrest.serialization.Model):
+ """Schema of image file resource.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param file_link: Link of the image file.
+ :type file_link: str
+ :param name: Required. Name of the image file.
+ :type name: str
+ :param image_format: Supported image formats for scene resource. Possible values include:
+ "TIF".
+ :type image_format: str or ~azure.farmbeats.models.ImageFormat
+ :param resolution: Resolution of image file in meters.
+ :type resolution: float
+ """
+
+ _validation = {
+ 'name': {'required': True},
+ }
+
+ _attribute_map = {
+ 'file_link': {'key': 'fileLink', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'image_format': {'key': 'imageFormat', 'type': 'str'},
+ 'resolution': {'key': 'resolution', 'type': 'float'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ImageFile, self).__init__(**kwargs)
+ self.file_link = kwargs.get('file_link', None)
+ self.name = kwargs['name']
+ self.image_format = kwargs.get('image_format', None)
+ self.resolution = kwargs.get('resolution', None)
+
+
+class ImageProcessingRasterizeJob(msrest.serialization.Model):
+ """ImageProcessingRasterizeJob.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param farmer_id: Required. Farmer Id.
+ :type farmer_id: str
+ :param shapefile_attachment_id: Required. Shapefile attachment Id.
+ :type shapefile_attachment_id: str
+ :param shapefile_column_names: Required. List of shapefile column names to create raster
+ attachments.
+ :type shapefile_column_names: list[str]
+ :ivar id: Unique job id.
+ :vartype id: str
+ :ivar status: Status of the job.
+ Possible values: 'Waiting', 'Running', 'Succeeded', 'Failed', 'Cancelled'.
+ :vartype status: str
+ :ivar duration_in_seconds: Duration of the job in seconds.
+ :vartype duration_in_seconds: float
+ :ivar message: Status message to capture more details of the job.
+ :vartype message: str
+ :ivar created_date_time: Job created at dateTime. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar last_action_date_time: Job was last acted upon at dateTime. Sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype last_action_date_time: ~datetime.datetime
+ :ivar start_time: Job start time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype start_time: ~datetime.datetime
+ :ivar end_time: Job end time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype end_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'farmer_id': {'required': True},
+ 'shapefile_attachment_id': {'required': True},
+ 'shapefile_column_names': {'required': True},
+ 'id': {'readonly': True},
+ 'status': {'readonly': True},
+ 'duration_in_seconds': {'readonly': True},
+ 'message': {'readonly': True},
+ 'created_date_time': {'readonly': True},
+ 'last_action_date_time': {'readonly': True},
+ 'start_time': {'readonly': True},
+ 'end_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'shapefile_attachment_id': {'key': 'shapefileAttachmentId', 'type': 'str'},
+ 'shapefile_column_names': {'key': 'shapefileColumnNames', 'type': '[str]'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'duration_in_seconds': {'key': 'durationInSeconds', 'type': 'float'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'last_action_date_time': {'key': 'lastActionDateTime', 'type': 'iso-8601'},
+ 'start_time': {'key': 'startTime', 'type': 'iso-8601'},
+ 'end_time': {'key': 'endTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ImageProcessingRasterizeJob, self).__init__(**kwargs)
+ self.farmer_id = kwargs['farmer_id']
+ self.shapefile_attachment_id = kwargs['shapefile_attachment_id']
+ self.shapefile_column_names = kwargs['shapefile_column_names']
+ self.id = None
+ self.status = None
+ self.duration_in_seconds = None
+ self.message = None
+ self.created_date_time = None
+ self.last_action_date_time = None
+ self.start_time = None
+ self.end_time = None
+ self.name = kwargs.get('name', None)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class InnerError(msrest.serialization.Model):
+ """Inner error containing list of errors.
+
+:code:`InnerError reference document`.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :param code: Specific error code than was provided by the
+ containing error.
+ :type code: str
+ :param innererror: Inner error containing list of errors.
+
+ :code:`InnerError
+ reference document`.
+ :type innererror: ~azure.farmbeats.models.InnerError
+ """
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'code': {'key': 'code', 'type': 'str'},
+ 'innererror': {'key': 'innererror', 'type': 'InnerError'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(InnerError, self).__init__(**kwargs)
+ self.additional_properties = kwargs.get('additional_properties', None)
+ self.code = kwargs.get('code', None)
+ self.innererror = kwargs.get('innererror', None)
+
+
+class Location(msrest.serialization.Model):
+ """Location model class.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param latitude: Required. Latitude of the location.
+ :type latitude: float
+ :param longitude: Required. Longitude of the location.
+ :type longitude: float
+ """
+
+ _validation = {
+ 'latitude': {'required': True, 'maximum': 90, 'minimum': -90},
+ 'longitude': {'required': True, 'maximum': 180, 'minimum': -180},
+ }
+
+ _attribute_map = {
+ 'latitude': {'key': 'latitude', 'type': 'float'},
+ 'longitude': {'key': 'longitude', 'type': 'float'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Location, self).__init__(**kwargs)
+ self.latitude = kwargs['latitude']
+ self.longitude = kwargs['longitude']
+
+
+class Measure(msrest.serialization.Model):
+ """Schema for storing measurement reading and unit.
+
+ :param unit: Data unit.
+ :type unit: str
+ :param value: Data value.
+ :type value: float
+ """
+
+ _validation = {
+ 'unit': {'max_length': 50, 'min_length': 1},
+ }
+
+ _attribute_map = {
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'float'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Measure, self).__init__(**kwargs)
+ self.unit = kwargs.get('unit', None)
+ self.value = kwargs.get('value', None)
+
+
+class MultiPolygon(GeoJsonObject):
+ """MultiPolygon geometry.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param type: Required. GeoJSON object type.Constant filled by server. Possible values include:
+ "Point", "Polygon", "MultiPolygon".
+ :type type: str or ~azure.farmbeats.models.GeoJsonObjectType
+ :param coordinates: Required. Gets or sets Coordinates of GeoJSON Object.
+ It must be an array of polygons, each polygon contains list of linear rings.
+ For Polygons with more than one of these rings, the first MUST be the exterior ring,
+ and any others MUST be interior rings.
+ :type coordinates: list[list[list[list[float]]]]
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ 'coordinates': {'required': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ 'coordinates': {'key': 'coordinates', 'type': '[[[[float]]]]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(MultiPolygon, self).__init__(**kwargs)
+ self.type = 'MultiPolygon' # type: str
+ self.coordinates = kwargs['coordinates']
+
+
+class OAuthConnectRequest(msrest.serialization.Model):
+ """Get OAuth config query parameters.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param farmer_id: Required. Id of the farmer.
+ :type farmer_id: str
+ :param o_auth_provider_id: Required. Id of the OAuthProvider.
+ :type o_auth_provider_id: str
+ :param user_redirect_link: Required. Link to redirect the user to, at the end of the oauth
+ flow.
+ :type user_redirect_link: str
+ :param user_redirect_state: State to provide back when redirecting the user, at the end of the
+ oauth flow.
+ :type user_redirect_state: str
+ """
+
+ _validation = {
+ 'farmer_id': {'required': True},
+ 'o_auth_provider_id': {'required': True},
+ 'user_redirect_link': {'required': True, 'max_length': 1000, 'min_length': 0},
+ 'user_redirect_state': {'max_length': 200, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'o_auth_provider_id': {'key': 'oAuthProviderId', 'type': 'str'},
+ 'user_redirect_link': {'key': 'userRedirectLink', 'type': 'str'},
+ 'user_redirect_state': {'key': 'userRedirectState', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OAuthConnectRequest, self).__init__(**kwargs)
+ self.farmer_id = kwargs['farmer_id']
+ self.o_auth_provider_id = kwargs['o_auth_provider_id']
+ self.user_redirect_link = kwargs['user_redirect_link']
+ self.user_redirect_state = kwargs.get('user_redirect_state', None)
+
+
+class OAuthProvider(msrest.serialization.Model):
+ """Schema of OAuth provider resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param app_id: OAuth App Id for given OAuth Provider.
+ :type app_id: str
+ :param app_secret: OAuth App secret for given Provider.
+ Note: Won't be sent in response.
+ :type app_secret: str
+ :param api_key: OAuth Api key for given Provider.
+ Note: currently Applicable to Climate provider. Won't be sent in response.
+ :type api_key: str
+ :param is_production_app: An optional flag to determine if the App is ready to be used for
+ Production scenarios in the provider side or not. (Default value: false)
+ Note: Currently applicable for JohnDeere.
+ :type is_production_app: bool
+ :ivar id: Unique OAuth provider ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'app_id': {'max_length': 200, 'min_length': 2},
+ 'app_secret': {'max_length': 200, 'min_length': 2},
+ 'api_key': {'max_length': 200, 'min_length': 2},
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'app_id': {'key': 'appId', 'type': 'str'},
+ 'app_secret': {'key': 'appSecret', 'type': 'str'},
+ 'api_key': {'key': 'apiKey', 'type': 'str'},
+ 'is_production_app': {'key': 'isProductionApp', 'type': 'bool'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OAuthProvider, self).__init__(**kwargs)
+ self.app_id = kwargs.get('app_id', None)
+ self.app_secret = kwargs.get('app_secret', None)
+ self.api_key = kwargs.get('api_key', None)
+ self.is_production_app = kwargs.get('is_production_app', False)
+ self.id = None
+ self.e_tag = None
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = kwargs.get('name', None)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class OAuthProviderListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.OAuthProvider]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[OAuthProvider]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OAuthProviderListResponse, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.skip_token = kwargs.get('skip_token', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class OAuthToken(msrest.serialization.Model):
+ """Schema of OAuth token resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param farmer_id: Required. Farmer ID for this OAuth config.
+ :type farmer_id: str
+ :param auth_provider_id: Required. ID of the OAuth provider resource containing app
+ information.
+ :type auth_provider_id: str
+ :param is_valid: An optional flag indicating whether the token is a valid or expired (Default
+ value: true).
+ :type is_valid: bool
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ """
+
+ _validation = {
+ 'farmer_id': {'required': True},
+ 'auth_provider_id': {'required': True},
+ 'e_tag': {'readonly': True},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'auth_provider_id': {'key': 'authProviderId', 'type': 'str'},
+ 'is_valid': {'key': 'isValid', 'type': 'bool'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OAuthToken, self).__init__(**kwargs)
+ self.farmer_id = kwargs['farmer_id']
+ self.auth_provider_id = kwargs['auth_provider_id']
+ self.is_valid = kwargs.get('is_valid', True)
+ self.e_tag = None
+ self.created_date_time = None
+ self.modified_date_time = None
+
+
+class OAuthTokenListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.OAuthToken]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[OAuthToken]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OAuthTokenListResponse, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.skip_token = kwargs.get('skip_token', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class Paths1LxjoxzFarmersFarmeridAttachmentsAttachmentidPatchRequestbodyContentMultipartFormDataSchema(msrest.serialization.Model):
+ """Paths1LxjoxzFarmersFarmeridAttachmentsAttachmentidPatchRequestbodyContentMultipartFormDataSchema.
+
+ :param file: File to be uploaded.
+ :type file: IO
+ :param farmer_id: Farmer id for this attachment.
+ :type farmer_id: str
+ :param resource_id: Associated Resource id for this attachment.
+ :type resource_id: str
+ :param resource_type: Associated Resource type for this attachment
+ i.e. Farmer, Farm, Field, SeasonalField, Boundary, FarmOperationApplicationData, HarvestData,
+ TillageData, PlantingData.
+ :type resource_type: str
+ :param original_file_name: Original File Name for this attachment.
+ :type original_file_name: str
+ :param id: Unique id.
+ :type id: str
+ :param status: Status of the resource.
+ :type status: str
+ :param created_date_time: Date when resource was created.
+ :type created_date_time: str
+ :param modified_date_time: Date when resource was last modified.
+ :type modified_date_time: str
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of resource.
+ :type description: str
+ :param e_tag: The ETag value to implement optimistic concurrency.
+ :type e_tag: str
+ """
+
+ _attribute_map = {
+ 'file': {'key': 'file', 'type': 'IO'},
+ 'farmer_id': {'key': 'FarmerId', 'type': 'str'},
+ 'resource_id': {'key': 'ResourceId', 'type': 'str'},
+ 'resource_type': {'key': 'ResourceType', 'type': 'str'},
+ 'original_file_name': {'key': 'OriginalFileName', 'type': 'str'},
+ 'id': {'key': 'Id', 'type': 'str'},
+ 'status': {'key': 'Status', 'type': 'str'},
+ 'created_date_time': {'key': 'CreatedDateTime', 'type': 'str'},
+ 'modified_date_time': {'key': 'ModifiedDateTime', 'type': 'str'},
+ 'name': {'key': 'Name', 'type': 'str'},
+ 'description': {'key': 'Description', 'type': 'str'},
+ 'e_tag': {'key': 'ETag', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Paths1LxjoxzFarmersFarmeridAttachmentsAttachmentidPatchRequestbodyContentMultipartFormDataSchema, self).__init__(**kwargs)
+ self.file = kwargs.get('file', None)
+ self.farmer_id = kwargs.get('farmer_id', None)
+ self.resource_id = kwargs.get('resource_id', None)
+ self.resource_type = kwargs.get('resource_type', None)
+ self.original_file_name = kwargs.get('original_file_name', None)
+ self.id = kwargs.get('id', None)
+ self.status = kwargs.get('status', None)
+ self.created_date_time = kwargs.get('created_date_time', None)
+ self.modified_date_time = kwargs.get('modified_date_time', None)
+ self.name = kwargs.get('name', None)
+ self.description = kwargs.get('description', None)
+ self.e_tag = kwargs.get('e_tag', None)
+
+
+class PlantingData(msrest.serialization.Model):
+ """Schema of planting data resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param avg_planting_rate: Schema for storing measurement reading and unit.
+ :type avg_planting_rate: ~azure.farmbeats.models.Measure
+ :param total_material: Schema for storing measurement reading and unit.
+ :type total_material: ~azure.farmbeats.models.Measure
+ :param avg_material: Schema for storing measurement reading and unit.
+ :type avg_material: ~azure.farmbeats.models.Measure
+ :param planting_product_details: Planting product details.
+ :type planting_product_details: list[~azure.farmbeats.models.PlantingProductDetail]
+ :param area: Schema for storing measurement reading and unit.
+ :type area: ~azure.farmbeats.models.Measure
+ :param source: Source of the operation data.
+ :type source: str
+ :param operation_modified_date_time: Modified date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ Note: this will be specified by the source provider itself.
+ :type operation_modified_date_time: ~datetime.datetime
+ :param operation_start_date_time: Start date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type operation_start_date_time: ~datetime.datetime
+ :param operation_end_date_time: End date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type operation_end_date_time: ~datetime.datetime
+ :ivar attachments_link: Link for attachments.
+ :vartype attachments_link: str
+ :param associated_boundary_id: Optional boundary ID of the field for which operation was
+ applied.
+ :type associated_boundary_id: str
+ :param operation_boundary_id: Optional boundary ID of the actual area for which operation was
+ applied inside the specified field.
+ :type operation_boundary_id: str
+ :ivar farmer_id: Farmer ID which belongs to the operation data.
+ :vartype farmer_id: str
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'source': {'max_length': 100, 'min_length': 2},
+ 'attachments_link': {'readonly': True},
+ 'farmer_id': {'readonly': True},
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'avg_planting_rate': {'key': 'avgPlantingRate', 'type': 'Measure'},
+ 'total_material': {'key': 'totalMaterial', 'type': 'Measure'},
+ 'avg_material': {'key': 'avgMaterial', 'type': 'Measure'},
+ 'planting_product_details': {'key': 'plantingProductDetails', 'type': '[PlantingProductDetail]'},
+ 'area': {'key': 'area', 'type': 'Measure'},
+ 'source': {'key': 'source', 'type': 'str'},
+ 'operation_modified_date_time': {'key': 'operationModifiedDateTime', 'type': 'iso-8601'},
+ 'operation_start_date_time': {'key': 'operationStartDateTime', 'type': 'iso-8601'},
+ 'operation_end_date_time': {'key': 'operationEndDateTime', 'type': 'iso-8601'},
+ 'attachments_link': {'key': 'attachmentsLink', 'type': 'str'},
+ 'associated_boundary_id': {'key': 'associatedBoundaryId', 'type': 'str'},
+ 'operation_boundary_id': {'key': 'operationBoundaryId', 'type': 'str'},
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PlantingData, self).__init__(**kwargs)
+ self.avg_planting_rate = kwargs.get('avg_planting_rate', None)
+ self.total_material = kwargs.get('total_material', None)
+ self.avg_material = kwargs.get('avg_material', None)
+ self.planting_product_details = kwargs.get('planting_product_details', None)
+ self.area = kwargs.get('area', None)
+ self.source = kwargs.get('source', None)
+ self.operation_modified_date_time = kwargs.get('operation_modified_date_time', None)
+ self.operation_start_date_time = kwargs.get('operation_start_date_time', None)
+ self.operation_end_date_time = kwargs.get('operation_end_date_time', None)
+ self.attachments_link = None
+ self.associated_boundary_id = kwargs.get('associated_boundary_id', None)
+ self.operation_boundary_id = kwargs.get('operation_boundary_id', None)
+ self.farmer_id = None
+ self.id = None
+ self.e_tag = None
+ self.status = kwargs.get('status', None)
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = kwargs.get('name', None)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class PlantingDataListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.PlantingData]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[PlantingData]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PlantingDataListResponse, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.skip_token = kwargs.get('skip_token', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class PlantingProductDetail(msrest.serialization.Model):
+ """Schema for Planting product detail.
+
+ :param product_name: Name of the product.
+ :type product_name: str
+ :param area: Schema for storing measurement reading and unit.
+ :type area: ~azure.farmbeats.models.Measure
+ :param total_material: Schema for storing measurement reading and unit.
+ :type total_material: ~azure.farmbeats.models.Measure
+ :param avg_material: Schema for storing measurement reading and unit.
+ :type avg_material: ~azure.farmbeats.models.Measure
+ """
+
+ _attribute_map = {
+ 'product_name': {'key': 'productName', 'type': 'str'},
+ 'area': {'key': 'area', 'type': 'Measure'},
+ 'total_material': {'key': 'totalMaterial', 'type': 'Measure'},
+ 'avg_material': {'key': 'avgMaterial', 'type': 'Measure'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PlantingProductDetail, self).__init__(**kwargs)
+ self.product_name = kwargs.get('product_name', None)
+ self.area = kwargs.get('area', None)
+ self.total_material = kwargs.get('total_material', None)
+ self.avg_material = kwargs.get('avg_material', None)
+
+
+class Point(GeoJsonObject):
+ """Point geometry.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param type: Required. GeoJSON object type.Constant filled by server. Possible values include:
+ "Point", "Polygon", "MultiPolygon".
+ :type type: str or ~azure.farmbeats.models.GeoJsonObjectType
+ :param coordinates: Required. Gets or sets the coordinate of this point.
+ It must be an array of 2 or 3 elements for a 2D or 3D system.
+ :type coordinates: list[float]
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ 'coordinates': {'required': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ 'coordinates': {'key': 'coordinates', 'type': '[float]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Point, self).__init__(**kwargs)
+ self.type = 'Point' # type: str
+ self.coordinates = kwargs['coordinates']
+
+
+class Polygon(GeoJsonObject):
+ """Polygon geometry.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param type: Required. GeoJSON object type.Constant filled by server. Possible values include:
+ "Point", "Polygon", "MultiPolygon".
+ :type type: str or ~azure.farmbeats.models.GeoJsonObjectType
+ :param coordinates: Required. Gets or sets type of the GeoJSON Object.
+ It must be an array of linear ring coordinate arrays.
+ For Polygons with more than one of these rings, the first MUST be the exterior ring,
+ and any others MUST be interior rings.
+ :type coordinates: list[list[list[float]]]
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ 'coordinates': {'required': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ 'coordinates': {'key': 'coordinates', 'type': '[[[float]]]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Polygon, self).__init__(**kwargs)
+ self.type = 'Polygon' # type: str
+ self.coordinates = kwargs['coordinates']
+
+
+class SatelliteData(msrest.serialization.Model):
+ """Data Model for SatelliteIngestionJobRequest.
+
+ :param image_names: List of ImageNames.
+ :type image_names: list[str]
+ :param image_formats: List of ImageFormats. Available value: TIF.
+ :type image_formats: list[str]
+ :param image_resolutions: List of ImageResolutions in meters. Available values: 10, 20, 60.
+ :type image_resolutions: list[float]
+ """
+
+ _attribute_map = {
+ 'image_names': {'key': 'imageNames', 'type': '[str]'},
+ 'image_formats': {'key': 'imageFormats', 'type': '[str]'},
+ 'image_resolutions': {'key': 'imageResolutions', 'type': '[float]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SatelliteData, self).__init__(**kwargs)
+ self.image_names = kwargs.get('image_names', None)
+ self.image_formats = kwargs.get('image_formats', None)
+ self.image_resolutions = kwargs.get('image_resolutions', None)
+
+
+class SatelliteDataIngestionJob(msrest.serialization.Model):
+ """Schema of satellite data ingestion job.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param farmer_id: Required. Farmer Id.
+ :type farmer_id: str
+ :param boundary_id: Required. The id of the boundary object for which satellite data is being
+ fetched.
+ :type boundary_id: str
+ :param start_date_time: Required. Start Date.
+ :type start_date_time: ~datetime.datetime
+ :param end_date_time: Required. End Date.
+ :type end_date_time: ~datetime.datetime
+ :param provider: Provider of satellite data. Possible values include: "Microsoft".
+ :type provider: str or ~azure.farmbeats.models.DataProvider
+ :param source: Source of satellite data. Possible values include: "Sentinel_2_L2A".
+ :type source: str or ~azure.farmbeats.models.Source
+ :param data: Data Model for SatelliteIngestionJobRequest.
+ :type data: ~azure.farmbeats.models.SatelliteData
+ :ivar id: Unique job id.
+ :vartype id: str
+ :ivar status: Status of the job.
+ Possible values: 'Waiting', 'Running', 'Succeeded', 'Failed', 'Cancelled'.
+ :vartype status: str
+ :ivar duration_in_seconds: Duration of the job in seconds.
+ :vartype duration_in_seconds: float
+ :ivar message: Status message to capture more details of the job.
+ :vartype message: str
+ :ivar created_date_time: Job created at dateTime. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar last_action_date_time: Job was last acted upon at dateTime. Sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype last_action_date_time: ~datetime.datetime
+ :ivar start_time: Job start time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype start_time: ~datetime.datetime
+ :ivar end_time: Job end time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype end_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'farmer_id': {'required': True},
+ 'boundary_id': {'required': True},
+ 'start_date_time': {'required': True},
+ 'end_date_time': {'required': True},
+ 'id': {'readonly': True},
+ 'status': {'readonly': True},
+ 'duration_in_seconds': {'readonly': True},
+ 'message': {'readonly': True},
+ 'created_date_time': {'readonly': True},
+ 'last_action_date_time': {'readonly': True},
+ 'start_time': {'readonly': True},
+ 'end_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'boundary_id': {'key': 'boundaryId', 'type': 'str'},
+ 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'},
+ 'end_date_time': {'key': 'endDateTime', 'type': 'iso-8601'},
+ 'provider': {'key': 'provider', 'type': 'str'},
+ 'source': {'key': 'source', 'type': 'str'},
+ 'data': {'key': 'data', 'type': 'SatelliteData'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'duration_in_seconds': {'key': 'durationInSeconds', 'type': 'float'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'last_action_date_time': {'key': 'lastActionDateTime', 'type': 'iso-8601'},
+ 'start_time': {'key': 'startTime', 'type': 'iso-8601'},
+ 'end_time': {'key': 'endTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SatelliteDataIngestionJob, self).__init__(**kwargs)
+ self.farmer_id = kwargs['farmer_id']
+ self.boundary_id = kwargs['boundary_id']
+ self.start_date_time = kwargs['start_date_time']
+ self.end_date_time = kwargs['end_date_time']
+ self.provider = kwargs.get('provider', None)
+ self.source = kwargs.get('source', None)
+ self.data = kwargs.get('data', None)
+ self.id = None
+ self.status = None
+ self.duration_in_seconds = None
+ self.message = None
+ self.created_date_time = None
+ self.last_action_date_time = None
+ self.start_time = None
+ self.end_time = None
+ self.name = kwargs.get('name', None)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class Scene(msrest.serialization.Model):
+ """Schema of scene resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param scene_date_time: Date-time of the scene, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type scene_date_time: ~datetime.datetime
+ :param provider: Data provider of the scene.
+ :type provider: str
+ :param source: Data source of the scene.
+ :type source: str
+ :param image_files: Collection of image files.
+ :type image_files: list[~azure.farmbeats.models.ImageFile]
+ :param image_format: Supported image formats for scene resource. Possible values include:
+ "TIF".
+ :type image_format: str or ~azure.farmbeats.models.ImageFormat
+ :param cloud_cover_percentage: Cloud cover percentage of the scene.
+ :type cloud_cover_percentage: float
+ :param dark_pixel_percentage: Dark pixel percentage of the scene.
+ :type dark_pixel_percentage: float
+ :param ndvi_median_value: Median of NDVI of the scene.
+ :type ndvi_median_value: float
+ :param boundary_id: Boundary ID which belongs to the scene.
+ :type boundary_id: str
+ :param farmer_id: Farmer ID which belongs to the scene.
+ :type farmer_id: str
+ :param id: Unique scene resource ID.
+ :type id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ """
+
+ _validation = {
+ 'provider': {'max_length': 100, 'min_length': 2},
+ 'source': {'max_length': 100, 'min_length': 2},
+ 'cloud_cover_percentage': {'maximum': 100, 'minimum': 0},
+ 'dark_pixel_percentage': {'maximum': 100, 'minimum': 0},
+ 'ndvi_median_value': {'maximum': 1, 'minimum': 0},
+ 'boundary_id': {'max_length': 100, 'min_length': 2},
+ 'e_tag': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'scene_date_time': {'key': 'sceneDateTime', 'type': 'iso-8601'},
+ 'provider': {'key': 'provider', 'type': 'str'},
+ 'source': {'key': 'source', 'type': 'str'},
+ 'image_files': {'key': 'imageFiles', 'type': '[ImageFile]'},
+ 'image_format': {'key': 'imageFormat', 'type': 'str'},
+ 'cloud_cover_percentage': {'key': 'cloudCoverPercentage', 'type': 'float'},
+ 'dark_pixel_percentage': {'key': 'darkPixelPercentage', 'type': 'float'},
+ 'ndvi_median_value': {'key': 'ndviMedianValue', 'type': 'float'},
+ 'boundary_id': {'key': 'boundaryId', 'type': 'str'},
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Scene, self).__init__(**kwargs)
+ self.scene_date_time = kwargs.get('scene_date_time', None)
+ self.provider = kwargs.get('provider', None)
+ self.source = kwargs.get('source', None)
+ self.image_files = kwargs.get('image_files', None)
+ self.image_format = kwargs.get('image_format', None)
+ self.cloud_cover_percentage = kwargs.get('cloud_cover_percentage', None)
+ self.dark_pixel_percentage = kwargs.get('dark_pixel_percentage', None)
+ self.ndvi_median_value = kwargs.get('ndvi_median_value', None)
+ self.boundary_id = kwargs.get('boundary_id', None)
+ self.farmer_id = kwargs.get('farmer_id', None)
+ self.id = kwargs.get('id', None)
+ self.e_tag = None
+
+
+class SceneListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.Scene]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Scene]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SceneListResponse, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.skip_token = kwargs.get('skip_token', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class SearchBoundaryQuery(msrest.serialization.Model):
+ """SearchAllBoundaries and SearchBoundaries parameters.
+
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :param is_primary: Is the boundary primary.
+ :type is_primary: bool
+ :param parent_type: Type of the parent it belongs to.
+ :type parent_type: str
+ :param parent_ids: Parent Ids of the resource.
+ :type parent_ids: list[str]
+ :param min_acreage: Minimum acreage of the boundary (inclusive).
+ :type min_acreage: float
+ :param max_acreage: Maximum acreage of the boundary (inclusive).
+ :type max_acreage: float
+ :param intersects_with_geometry: GeoJSON abstract class.
+ :type intersects_with_geometry: ~azure.farmbeats.models.GeoJsonObject
+ """
+
+ _validation = {
+ 'max_page_size': {'maximum': 1000, 'minimum': 10},
+ }
+
+ _attribute_map = {
+ 'ids': {'key': 'ids', 'type': '[str]'},
+ 'names': {'key': 'names', 'type': '[str]'},
+ 'property_filters': {'key': 'propertyFilters', 'type': '[str]'},
+ 'statuses': {'key': 'statuses', 'type': '[str]'},
+ 'min_created_date_time': {'key': 'minCreatedDateTime', 'type': 'iso-8601'},
+ 'max_created_date_time': {'key': 'maxCreatedDateTime', 'type': 'iso-8601'},
+ 'min_last_modified_date_time': {'key': 'minLastModifiedDateTime', 'type': 'iso-8601'},
+ 'max_last_modified_date_time': {'key': 'maxLastModifiedDateTime', 'type': 'iso-8601'},
+ 'max_page_size': {'key': '$maxPageSize', 'type': 'int'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'is_primary': {'key': 'isPrimary', 'type': 'bool'},
+ 'parent_type': {'key': 'parentType', 'type': 'str'},
+ 'parent_ids': {'key': 'parentIds', 'type': '[str]'},
+ 'min_acreage': {'key': 'minAcreage', 'type': 'float'},
+ 'max_acreage': {'key': 'maxAcreage', 'type': 'float'},
+ 'intersects_with_geometry': {'key': 'intersectsWithGeometry', 'type': 'GeoJsonObject'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SearchBoundaryQuery, self).__init__(**kwargs)
+ self.ids = kwargs.get('ids', None)
+ self.names = kwargs.get('names', None)
+ self.property_filters = kwargs.get('property_filters', None)
+ self.statuses = kwargs.get('statuses', None)
+ self.min_created_date_time = kwargs.get('min_created_date_time', None)
+ self.max_created_date_time = kwargs.get('max_created_date_time', None)
+ self.min_last_modified_date_time = kwargs.get('min_last_modified_date_time', None)
+ self.max_last_modified_date_time = kwargs.get('max_last_modified_date_time', None)
+ self.max_page_size = kwargs.get('max_page_size', 50)
+ self.skip_token = kwargs.get('skip_token', None)
+ self.is_primary = kwargs.get('is_primary', None)
+ self.parent_type = kwargs.get('parent_type', None)
+ self.parent_ids = kwargs.get('parent_ids', None)
+ self.min_acreage = kwargs.get('min_acreage', None)
+ self.max_acreage = kwargs.get('max_acreage', None)
+ self.intersects_with_geometry = kwargs.get('intersects_with_geometry', None)
+
+
+class Season(msrest.serialization.Model):
+ """Schema of season resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param start_date_time: Season start datetime, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type start_date_time: ~datetime.datetime
+ :param end_date_time: Season end datetime, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type end_date_time: ~datetime.datetime
+ :param year: Season year.
+ :type year: int
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'},
+ 'end_date_time': {'key': 'endDateTime', 'type': 'iso-8601'},
+ 'year': {'key': 'year', 'type': 'int'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Season, self).__init__(**kwargs)
+ self.start_date_time = kwargs.get('start_date_time', None)
+ self.end_date_time = kwargs.get('end_date_time', None)
+ self.year = kwargs.get('year', None)
+ self.id = None
+ self.e_tag = None
+ self.status = kwargs.get('status', None)
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = kwargs.get('name', None)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class SeasonalField(msrest.serialization.Model):
+ """Schema of seasonal field resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar farmer_id: Farmer Id.
+ :vartype farmer_id: str
+ :ivar primary_boundary_id: Primary boundary id.
+ :vartype primary_boundary_id: str
+ :ivar boundary_ids: Boundary Ids.
+ :vartype boundary_ids: list[str]
+ :param farm_id: Id of the associated Farm.
+ :type farm_id: str
+ :param field_id: Id of the associated Field.
+ :type field_id: str
+ :param season_id: Id of the season it belongs to.
+ :type season_id: str
+ :param crop_variety_ids: CropVariety ids.
+ :type crop_variety_ids: list[str]
+ :param crop_id: Id of the crop it belongs to.
+ :type crop_id: str
+ :param avg_yield_value: Average yield value of the seasonal field.
+ :type avg_yield_value: float
+ :param avg_yield_unit: Unit of the average yield value attribute.
+ :type avg_yield_unit: str
+ :param avg_seed_population_value: Average seed population value of the seasonal field.
+ :type avg_seed_population_value: float
+ :param avg_seed_population_unit: Unit of average seed population value attribute.
+ :type avg_seed_population_unit: str
+ :param planting_date_time: Planting datetime, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type planting_date_time: ~datetime.datetime
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'farmer_id': {'readonly': True},
+ 'primary_boundary_id': {'readonly': True},
+ 'boundary_ids': {'readonly': True, 'unique': True},
+ 'crop_variety_ids': {'unique': True},
+ 'avg_yield_unit': {'max_length': 32, 'min_length': 2},
+ 'avg_seed_population_unit': {'max_length': 32, 'min_length': 2},
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'primary_boundary_id': {'key': 'primaryBoundaryId', 'type': 'str'},
+ 'boundary_ids': {'key': 'boundaryIds', 'type': '[str]'},
+ 'farm_id': {'key': 'farmId', 'type': 'str'},
+ 'field_id': {'key': 'fieldId', 'type': 'str'},
+ 'season_id': {'key': 'seasonId', 'type': 'str'},
+ 'crop_variety_ids': {'key': 'cropVarietyIds', 'type': '[str]'},
+ 'crop_id': {'key': 'cropId', 'type': 'str'},
+ 'avg_yield_value': {'key': 'avgYieldValue', 'type': 'float'},
+ 'avg_yield_unit': {'key': 'avgYieldUnit', 'type': 'str'},
+ 'avg_seed_population_value': {'key': 'avgSeedPopulationValue', 'type': 'float'},
+ 'avg_seed_population_unit': {'key': 'avgSeedPopulationUnit', 'type': 'str'},
+ 'planting_date_time': {'key': 'plantingDateTime', 'type': 'iso-8601'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SeasonalField, self).__init__(**kwargs)
+ self.farmer_id = None
+ self.primary_boundary_id = None
+ self.boundary_ids = None
+ self.farm_id = kwargs.get('farm_id', None)
+ self.field_id = kwargs.get('field_id', None)
+ self.season_id = kwargs.get('season_id', None)
+ self.crop_variety_ids = kwargs.get('crop_variety_ids', None)
+ self.crop_id = kwargs.get('crop_id', None)
+ self.avg_yield_value = kwargs.get('avg_yield_value', None)
+ self.avg_yield_unit = kwargs.get('avg_yield_unit', None)
+ self.avg_seed_population_value = kwargs.get('avg_seed_population_value', None)
+ self.avg_seed_population_unit = kwargs.get('avg_seed_population_unit', None)
+ self.planting_date_time = kwargs.get('planting_date_time', None)
+ self.id = None
+ self.e_tag = None
+ self.status = kwargs.get('status', None)
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = kwargs.get('name', None)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class SeasonalFieldListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.SeasonalField]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[SeasonalField]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SeasonalFieldListResponse, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.skip_token = kwargs.get('skip_token', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class SeasonListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.Season]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Season]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SeasonListResponse, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.skip_token = kwargs.get('skip_token', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class TillageData(msrest.serialization.Model):
+ """Schema of tillage data resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param tillage_depth: Schema for storing measurement reading and unit.
+ :type tillage_depth: ~azure.farmbeats.models.Measure
+ :param tillage_pressure: Schema for storing measurement reading and unit.
+ :type tillage_pressure: ~azure.farmbeats.models.Measure
+ :param area: Schema for storing measurement reading and unit.
+ :type area: ~azure.farmbeats.models.Measure
+ :param source: Source of the operation data.
+ :type source: str
+ :param operation_modified_date_time: Modified date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ Note: this will be specified by the source provider itself.
+ :type operation_modified_date_time: ~datetime.datetime
+ :param operation_start_date_time: Start date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type operation_start_date_time: ~datetime.datetime
+ :param operation_end_date_time: End date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type operation_end_date_time: ~datetime.datetime
+ :ivar attachments_link: Link for attachments.
+ :vartype attachments_link: str
+ :param associated_boundary_id: Optional boundary ID of the field for which operation was
+ applied.
+ :type associated_boundary_id: str
+ :param operation_boundary_id: Optional boundary ID of the actual area for which operation was
+ applied inside the specified field.
+ :type operation_boundary_id: str
+ :ivar farmer_id: Farmer ID which belongs to the operation data.
+ :vartype farmer_id: str
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'source': {'max_length': 100, 'min_length': 2},
+ 'attachments_link': {'readonly': True},
+ 'farmer_id': {'readonly': True},
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'tillage_depth': {'key': 'tillageDepth', 'type': 'Measure'},
+ 'tillage_pressure': {'key': 'tillagePressure', 'type': 'Measure'},
+ 'area': {'key': 'area', 'type': 'Measure'},
+ 'source': {'key': 'source', 'type': 'str'},
+ 'operation_modified_date_time': {'key': 'operationModifiedDateTime', 'type': 'iso-8601'},
+ 'operation_start_date_time': {'key': 'operationStartDateTime', 'type': 'iso-8601'},
+ 'operation_end_date_time': {'key': 'operationEndDateTime', 'type': 'iso-8601'},
+ 'attachments_link': {'key': 'attachmentsLink', 'type': 'str'},
+ 'associated_boundary_id': {'key': 'associatedBoundaryId', 'type': 'str'},
+ 'operation_boundary_id': {'key': 'operationBoundaryId', 'type': 'str'},
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(TillageData, self).__init__(**kwargs)
+ self.tillage_depth = kwargs.get('tillage_depth', None)
+ self.tillage_pressure = kwargs.get('tillage_pressure', None)
+ self.area = kwargs.get('area', None)
+ self.source = kwargs.get('source', None)
+ self.operation_modified_date_time = kwargs.get('operation_modified_date_time', None)
+ self.operation_start_date_time = kwargs.get('operation_start_date_time', None)
+ self.operation_end_date_time = kwargs.get('operation_end_date_time', None)
+ self.attachments_link = None
+ self.associated_boundary_id = kwargs.get('associated_boundary_id', None)
+ self.operation_boundary_id = kwargs.get('operation_boundary_id', None)
+ self.farmer_id = None
+ self.id = None
+ self.e_tag = None
+ self.status = kwargs.get('status', None)
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = kwargs.get('name', None)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class TillageDataListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.TillageData]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[TillageData]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(TillageDataListResponse, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.skip_token = kwargs.get('skip_token', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class WeatherData(msrest.serialization.Model):
+ """Schema of weather data.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param farmer_id: Required. Farmer ID.
+ :type farmer_id: str
+ :param boundary_id: Required. Boundary ID.
+ :type boundary_id: str
+ :param extension_id: Required. ID of the weather extension.
+ :type extension_id: str
+ :param location: Required. Location model class.
+ :type location: ~azure.farmbeats.models.Location
+ :param date_time: Required. Date-time of the weather data, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type date_time: ~datetime.datetime
+ :param unit_system_code: Unit System like US/SI etc.
+ :type unit_system_code: str
+ :param extension_version: Required. Version of the weather data extension.
+ :type extension_version: str
+ :param weather_data_type: Required. Type of weather data (forecast/historical).
+ :type weather_data_type: str
+ :param granularity: Required. Granularity of weather data (daily/hourly).
+ :type granularity: str
+ :param cloud_cover: Schema for storing measurement reading and unit.
+ :type cloud_cover: ~azure.farmbeats.models.Measure
+ :param dew_point: Schema for storing measurement reading and unit.
+ :type dew_point: ~azure.farmbeats.models.Measure
+ :param growing_degree_day: Schema for storing measurement reading and unit.
+ :type growing_degree_day: ~azure.farmbeats.models.Measure
+ :param precipitation: Schema for storing measurement reading and unit.
+ :type precipitation: ~azure.farmbeats.models.Measure
+ :param pressure: Schema for storing measurement reading and unit.
+ :type pressure: ~azure.farmbeats.models.Measure
+ :param relative_humidity: Schema for storing measurement reading and unit.
+ :type relative_humidity: ~azure.farmbeats.models.Measure
+ :param soil_moisture: Schema for storing measurement reading and unit.
+ :type soil_moisture: ~azure.farmbeats.models.Measure
+ :param soil_temperature: Schema for storing measurement reading and unit.
+ :type soil_temperature: ~azure.farmbeats.models.Measure
+ :param temperature: Schema for storing measurement reading and unit.
+ :type temperature: ~azure.farmbeats.models.Measure
+ :param visibility: Schema for storing measurement reading and unit.
+ :type visibility: ~azure.farmbeats.models.Measure
+ :param wet_bulb_temperature: Schema for storing measurement reading and unit.
+ :type wet_bulb_temperature: ~azure.farmbeats.models.Measure
+ :param wind_chill: Schema for storing measurement reading and unit.
+ :type wind_chill: ~azure.farmbeats.models.Measure
+ :param wind_direction: Schema for storing measurement reading and unit.
+ :type wind_direction: ~azure.farmbeats.models.Measure
+ :param wind_gust: Schema for storing measurement reading and unit.
+ :type wind_gust: ~azure.farmbeats.models.Measure
+ :param wind_speed: Schema for storing measurement reading and unit.
+ :type wind_speed: ~azure.farmbeats.models.Measure
+ :param id: Weather data ID.
+ :type id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'farmer_id': {'required': True},
+ 'boundary_id': {'required': True},
+ 'extension_id': {'required': True},
+ 'location': {'required': True},
+ 'date_time': {'required': True},
+ 'extension_version': {'required': True},
+ 'weather_data_type': {'required': True},
+ 'granularity': {'required': True},
+ 'e_tag': {'readonly': True},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'boundary_id': {'key': 'boundaryId', 'type': 'str'},
+ 'extension_id': {'key': 'extensionId', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'Location'},
+ 'date_time': {'key': 'dateTime', 'type': 'iso-8601'},
+ 'unit_system_code': {'key': 'unitSystemCode', 'type': 'str'},
+ 'extension_version': {'key': 'extensionVersion', 'type': 'str'},
+ 'weather_data_type': {'key': 'weatherDataType', 'type': 'str'},
+ 'granularity': {'key': 'granularity', 'type': 'str'},
+ 'cloud_cover': {'key': 'cloudCover', 'type': 'Measure'},
+ 'dew_point': {'key': 'dewPoint', 'type': 'Measure'},
+ 'growing_degree_day': {'key': 'growingDegreeDay', 'type': 'Measure'},
+ 'precipitation': {'key': 'precipitation', 'type': 'Measure'},
+ 'pressure': {'key': 'pressure', 'type': 'Measure'},
+ 'relative_humidity': {'key': 'relativeHumidity', 'type': 'Measure'},
+ 'soil_moisture': {'key': 'soilMoisture', 'type': 'Measure'},
+ 'soil_temperature': {'key': 'soilTemperature', 'type': 'Measure'},
+ 'temperature': {'key': 'temperature', 'type': 'Measure'},
+ 'visibility': {'key': 'visibility', 'type': 'Measure'},
+ 'wet_bulb_temperature': {'key': 'wetBulbTemperature', 'type': 'Measure'},
+ 'wind_chill': {'key': 'windChill', 'type': 'Measure'},
+ 'wind_direction': {'key': 'windDirection', 'type': 'Measure'},
+ 'wind_gust': {'key': 'windGust', 'type': 'Measure'},
+ 'wind_speed': {'key': 'windSpeed', 'type': 'Measure'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WeatherData, self).__init__(**kwargs)
+ self.farmer_id = kwargs['farmer_id']
+ self.boundary_id = kwargs['boundary_id']
+ self.extension_id = kwargs['extension_id']
+ self.location = kwargs['location']
+ self.date_time = kwargs['date_time']
+ self.unit_system_code = kwargs.get('unit_system_code', None)
+ self.extension_version = kwargs['extension_version']
+ self.weather_data_type = kwargs['weather_data_type']
+ self.granularity = kwargs['granularity']
+ self.cloud_cover = kwargs.get('cloud_cover', None)
+ self.dew_point = kwargs.get('dew_point', None)
+ self.growing_degree_day = kwargs.get('growing_degree_day', None)
+ self.precipitation = kwargs.get('precipitation', None)
+ self.pressure = kwargs.get('pressure', None)
+ self.relative_humidity = kwargs.get('relative_humidity', None)
+ self.soil_moisture = kwargs.get('soil_moisture', None)
+ self.soil_temperature = kwargs.get('soil_temperature', None)
+ self.temperature = kwargs.get('temperature', None)
+ self.visibility = kwargs.get('visibility', None)
+ self.wet_bulb_temperature = kwargs.get('wet_bulb_temperature', None)
+ self.wind_chill = kwargs.get('wind_chill', None)
+ self.wind_direction = kwargs.get('wind_direction', None)
+ self.wind_gust = kwargs.get('wind_gust', None)
+ self.wind_speed = kwargs.get('wind_speed', None)
+ self.id = kwargs.get('id', None)
+ self.e_tag = None
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.properties = kwargs.get('properties', None)
+
+
+class WeatherDataDeleteJob(msrest.serialization.Model):
+ """Schema of weather data delete job.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param extension_id: Required. Id of the extension to be used for the providerInput. eg.
+ DTN.ClearAg.
+ :type extension_id: str
+ :param farmer_id: Required. The id of the farmer object for which weather data is being
+ fetched.
+ :type farmer_id: str
+ :param boundary_id: Required. The id of the boundary object for which weather data is being
+ fetched.
+ :type boundary_id: str
+ :param weather_data_type: Type of weather data. Possible values include: 'forecast' ,
+ 'historical'.
+ :type weather_data_type: str
+ :param granularity: Granularity of weather data. Possible values include: 'daily' , 'hourly'.
+ :type granularity: str
+ :param start_date_time: Weather data start UTC date-time (inclusive), sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type start_date_time: ~datetime.datetime
+ :param end_date_time: Weather data end UTC date-time (inclusive), sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type end_date_time: ~datetime.datetime
+ :ivar id: Unique job id.
+ :vartype id: str
+ :ivar status: Status of the job.
+ Possible values: 'Waiting', 'Running', 'Succeeded', 'Failed', 'Cancelled'.
+ :vartype status: str
+ :ivar duration_in_seconds: Duration of the job in seconds.
+ :vartype duration_in_seconds: float
+ :ivar message: Status message to capture more details of the job.
+ :vartype message: str
+ :ivar created_date_time: Job created at dateTime. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar last_action_date_time: Job was last acted upon at dateTime. Sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype last_action_date_time: ~datetime.datetime
+ :ivar start_time: Job start time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype start_time: ~datetime.datetime
+ :ivar end_time: Job end time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype end_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'extension_id': {'required': True, 'max_length': 100, 'min_length': 2, 'pattern': r'^[A-za-z]{3,50}[.][A-za-z]{3,100}$'},
+ 'farmer_id': {'required': True},
+ 'boundary_id': {'required': True},
+ 'id': {'readonly': True},
+ 'status': {'readonly': True},
+ 'duration_in_seconds': {'readonly': True},
+ 'message': {'readonly': True},
+ 'created_date_time': {'readonly': True},
+ 'last_action_date_time': {'readonly': True},
+ 'start_time': {'readonly': True},
+ 'end_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'extension_id': {'key': 'extensionId', 'type': 'str'},
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'boundary_id': {'key': 'boundaryId', 'type': 'str'},
+ 'weather_data_type': {'key': 'weatherDataType', 'type': 'str'},
+ 'granularity': {'key': 'granularity', 'type': 'str'},
+ 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'},
+ 'end_date_time': {'key': 'endDateTime', 'type': 'iso-8601'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'duration_in_seconds': {'key': 'durationInSeconds', 'type': 'float'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'last_action_date_time': {'key': 'lastActionDateTime', 'type': 'iso-8601'},
+ 'start_time': {'key': 'startTime', 'type': 'iso-8601'},
+ 'end_time': {'key': 'endTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WeatherDataDeleteJob, self).__init__(**kwargs)
+ self.extension_id = kwargs['extension_id']
+ self.farmer_id = kwargs['farmer_id']
+ self.boundary_id = kwargs['boundary_id']
+ self.weather_data_type = kwargs.get('weather_data_type', None)
+ self.granularity = kwargs.get('granularity', None)
+ self.start_date_time = kwargs.get('start_date_time', None)
+ self.end_date_time = kwargs.get('end_date_time', None)
+ self.id = None
+ self.status = None
+ self.duration_in_seconds = None
+ self.message = None
+ self.created_date_time = None
+ self.last_action_date_time = None
+ self.start_time = None
+ self.end_time = None
+ self.name = kwargs.get('name', None)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class WeatherDataIngestionJob(msrest.serialization.Model):
+ """Schema of weather ingestion job.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param boundary_id: Required. The id of the boundary object for which weather data is being
+ fetched.
+ :type boundary_id: str
+ :param farmer_id: Required. The id of the farmer object for which weather data is being
+ fetched.
+ :type farmer_id: str
+ :param extension_id: Required. Id of the extension to be used for the providerInput. eg.
+ DTN.ClearAg.
+ :type extension_id: str
+ :param extension_api_name: Required. Extension api name to which request is to be made.
+ :type extension_api_name: str
+ :param extension_api_input: Required. Extension api input dictionary which would be used to
+ feed request query/body/parameter information.
+ :type extension_api_input: dict[str, object]
+ :param extension_data_provider_app_id: App id of the weather data provider.
+ :type extension_data_provider_app_id: str
+ :param extension_data_provider_api_key: Api key of the weather data provider.
+ :type extension_data_provider_api_key: str
+ :ivar id: Unique job id.
+ :vartype id: str
+ :ivar status: Status of the job.
+ Possible values: 'Waiting', 'Running', 'Succeeded', 'Failed', 'Cancelled'.
+ :vartype status: str
+ :ivar duration_in_seconds: Duration of the job in seconds.
+ :vartype duration_in_seconds: float
+ :ivar message: Status message to capture more details of the job.
+ :vartype message: str
+ :ivar created_date_time: Job created at dateTime. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar last_action_date_time: Job was last acted upon at dateTime. Sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype last_action_date_time: ~datetime.datetime
+ :ivar start_time: Job start time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype start_time: ~datetime.datetime
+ :ivar end_time: Job end time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype end_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'boundary_id': {'required': True},
+ 'farmer_id': {'required': True},
+ 'extension_id': {'required': True, 'max_length': 100, 'min_length': 2, 'pattern': r'^[A-za-z]{3,50}[.][A-za-z]{3,100}$'},
+ 'extension_api_name': {'required': True, 'max_length': 100, 'min_length': 2},
+ 'extension_api_input': {'required': True},
+ 'extension_data_provider_app_id': {'max_length': 200, 'min_length': 2},
+ 'extension_data_provider_api_key': {'max_length': 200, 'min_length': 2},
+ 'id': {'readonly': True},
+ 'status': {'readonly': True},
+ 'duration_in_seconds': {'readonly': True},
+ 'message': {'readonly': True},
+ 'created_date_time': {'readonly': True},
+ 'last_action_date_time': {'readonly': True},
+ 'start_time': {'readonly': True},
+ 'end_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'boundary_id': {'key': 'boundaryId', 'type': 'str'},
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'extension_id': {'key': 'extensionId', 'type': 'str'},
+ 'extension_api_name': {'key': 'extensionApiName', 'type': 'str'},
+ 'extension_api_input': {'key': 'extensionApiInput', 'type': '{object}'},
+ 'extension_data_provider_app_id': {'key': 'extensionDataProviderAppId', 'type': 'str'},
+ 'extension_data_provider_api_key': {'key': 'extensionDataProviderApiKey', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'duration_in_seconds': {'key': 'durationInSeconds', 'type': 'float'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'last_action_date_time': {'key': 'lastActionDateTime', 'type': 'iso-8601'},
+ 'start_time': {'key': 'startTime', 'type': 'iso-8601'},
+ 'end_time': {'key': 'endTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WeatherDataIngestionJob, self).__init__(**kwargs)
+ self.boundary_id = kwargs['boundary_id']
+ self.farmer_id = kwargs['farmer_id']
+ self.extension_id = kwargs['extension_id']
+ self.extension_api_name = kwargs['extension_api_name']
+ self.extension_api_input = kwargs['extension_api_input']
+ self.extension_data_provider_app_id = kwargs.get('extension_data_provider_app_id', None)
+ self.extension_data_provider_api_key = kwargs.get('extension_data_provider_api_key', None)
+ self.id = None
+ self.status = None
+ self.duration_in_seconds = None
+ self.message = None
+ self.created_date_time = None
+ self.last_action_date_time = None
+ self.start_time = None
+ self.end_time = None
+ self.name = kwargs.get('name', None)
+ self.description = kwargs.get('description', None)
+ self.properties = kwargs.get('properties', None)
+
+
+class WeatherDataListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.WeatherData]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[WeatherData]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WeatherDataListResponse, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.skip_token = kwargs.get('skip_token', None)
+ self.next_link = kwargs.get('next_link', None)
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/models/_models_py3.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/models/_models_py3.py
new file mode 100644
index 000000000000..c28f3e429595
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/models/_models_py3.py
@@ -0,0 +1,3831 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+import datetime
+from typing import Dict, IO, List, Optional, Union
+
+from azure.core.exceptions import HttpResponseError
+import msrest.serialization
+
+from ._farm_beats_client_enums import *
+
+
+class ApplicationData(msrest.serialization.Model):
+ """Schema of application data resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param application_product_details: Application product details.
+ :type application_product_details: list[~azure.farmbeats.models.ApplicationProductDetail]
+ :param avg_material: Schema for storing measurement reading and unit.
+ :type avg_material: ~azure.farmbeats.models.Measure
+ :param total_material: Schema for storing measurement reading and unit.
+ :type total_material: ~azure.farmbeats.models.Measure
+ :param area: Schema for storing measurement reading and unit.
+ :type area: ~azure.farmbeats.models.Measure
+ :param source: Source of the operation data.
+ :type source: str
+ :param operation_modified_date_time: Modified date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ Note: this will be specified by the source provider itself.
+ :type operation_modified_date_time: ~datetime.datetime
+ :param operation_start_date_time: Start date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type operation_start_date_time: ~datetime.datetime
+ :param operation_end_date_time: End date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type operation_end_date_time: ~datetime.datetime
+ :ivar attachments_link: Link for attachments.
+ :vartype attachments_link: str
+ :param associated_boundary_id: Optional boundary ID of the field for which operation was
+ applied.
+ :type associated_boundary_id: str
+ :param operation_boundary_id: Optional boundary ID of the actual area for which operation was
+ applied inside the specified field.
+ :type operation_boundary_id: str
+ :ivar farmer_id: Farmer ID which belongs to the operation data.
+ :vartype farmer_id: str
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'source': {'max_length': 100, 'min_length': 2},
+ 'attachments_link': {'readonly': True},
+ 'farmer_id': {'readonly': True},
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'application_product_details': {'key': 'applicationProductDetails', 'type': '[ApplicationProductDetail]'},
+ 'avg_material': {'key': 'avgMaterial', 'type': 'Measure'},
+ 'total_material': {'key': 'totalMaterial', 'type': 'Measure'},
+ 'area': {'key': 'area', 'type': 'Measure'},
+ 'source': {'key': 'source', 'type': 'str'},
+ 'operation_modified_date_time': {'key': 'operationModifiedDateTime', 'type': 'iso-8601'},
+ 'operation_start_date_time': {'key': 'operationStartDateTime', 'type': 'iso-8601'},
+ 'operation_end_date_time': {'key': 'operationEndDateTime', 'type': 'iso-8601'},
+ 'attachments_link': {'key': 'attachmentsLink', 'type': 'str'},
+ 'associated_boundary_id': {'key': 'associatedBoundaryId', 'type': 'str'},
+ 'operation_boundary_id': {'key': 'operationBoundaryId', 'type': 'str'},
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ application_product_details: Optional[List["ApplicationProductDetail"]] = None,
+ avg_material: Optional["Measure"] = None,
+ total_material: Optional["Measure"] = None,
+ area: Optional["Measure"] = None,
+ source: Optional[str] = None,
+ operation_modified_date_time: Optional[datetime.datetime] = None,
+ operation_start_date_time: Optional[datetime.datetime] = None,
+ operation_end_date_time: Optional[datetime.datetime] = None,
+ associated_boundary_id: Optional[str] = None,
+ operation_boundary_id: Optional[str] = None,
+ status: Optional[str] = None,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(ApplicationData, self).__init__(**kwargs)
+ self.application_product_details = application_product_details
+ self.avg_material = avg_material
+ self.total_material = total_material
+ self.area = area
+ self.source = source
+ self.operation_modified_date_time = operation_modified_date_time
+ self.operation_start_date_time = operation_start_date_time
+ self.operation_end_date_time = operation_end_date_time
+ self.attachments_link = None
+ self.associated_boundary_id = associated_boundary_id
+ self.operation_boundary_id = operation_boundary_id
+ self.farmer_id = None
+ self.id = None
+ self.e_tag = None
+ self.status = status
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = name
+ self.description = description
+ self.properties = properties
+
+
+class ApplicationDataListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.ApplicationData]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ApplicationData]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["ApplicationData"]] = None,
+ skip_token: Optional[str] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(ApplicationDataListResponse, self).__init__(**kwargs)
+ self.value = value
+ self.skip_token = skip_token
+ self.next_link = next_link
+
+
+class ApplicationProductDetail(msrest.serialization.Model):
+ """Schema of product used during application.
+
+ :param product_name: Name of the product applied.
+ :type product_name: str
+ :param is_carrier: A flag indicating whether product is a carrier for a tank mix.
+ :type is_carrier: bool
+ :param avg_material: Schema for storing measurement reading and unit.
+ :type avg_material: ~azure.farmbeats.models.Measure
+ :param total_material: Schema for storing measurement reading and unit.
+ :type total_material: ~azure.farmbeats.models.Measure
+ """
+
+ _validation = {
+ 'product_name': {'max_length': 100, 'min_length': 1},
+ }
+
+ _attribute_map = {
+ 'product_name': {'key': 'productName', 'type': 'str'},
+ 'is_carrier': {'key': 'isCarrier', 'type': 'bool'},
+ 'avg_material': {'key': 'avgMaterial', 'type': 'Measure'},
+ 'total_material': {'key': 'totalMaterial', 'type': 'Measure'},
+ }
+
+ def __init__(
+ self,
+ *,
+ product_name: Optional[str] = None,
+ is_carrier: Optional[bool] = False,
+ avg_material: Optional["Measure"] = None,
+ total_material: Optional["Measure"] = None,
+ **kwargs
+ ):
+ super(ApplicationProductDetail, self).__init__(**kwargs)
+ self.product_name = product_name
+ self.is_carrier = is_carrier
+ self.avg_material = avg_material
+ self.total_material = total_material
+
+
+class Attachment(msrest.serialization.Model):
+ """Schema of attachment resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar farmer_id: Farmer id for this attachment.
+ :vartype farmer_id: str
+ :param resource_id: Associated Resource id for this attachment.
+ :type resource_id: str
+ :param resource_type: Associated Resource type for this attachment
+ i.e. Farmer, Farm, Field, SeasonalField, Boundary, FarmOperationApplicationData, HarvestData,
+ TillageData, PlantingData.
+ :type resource_type: str
+ :ivar original_file_name: Original File Name for this attachment.
+ :vartype original_file_name: str
+ :ivar id: Unique id.
+ :vartype id: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date when resource was created.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date when resource was last modified.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of resource.
+ :type description: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ """
+
+ _validation = {
+ 'farmer_id': {'readonly': True},
+ 'original_file_name': {'readonly': True},
+ 'id': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ 'e_tag': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'resource_type': {'key': 'resourceType', 'type': 'str'},
+ 'original_file_name': {'key': 'originalFileName', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ resource_id: Optional[str] = None,
+ resource_type: Optional[str] = None,
+ status: Optional[str] = None,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ **kwargs
+ ):
+ super(Attachment, self).__init__(**kwargs)
+ self.farmer_id = None
+ self.resource_id = resource_id
+ self.resource_type = resource_type
+ self.original_file_name = None
+ self.id = None
+ self.status = status
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = name
+ self.description = description
+ self.e_tag = None
+
+
+class AttachmentListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.Attachment]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Attachment]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["Attachment"]] = None,
+ skip_token: Optional[str] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(AttachmentListResponse, self).__init__(**kwargs)
+ self.value = value
+ self.skip_token = skip_token
+ self.next_link = next_link
+
+
+class Boundary(msrest.serialization.Model):
+ """Schema of boundary resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar farmer_id: Farmer Id.
+ :vartype farmer_id: str
+ :param parent_id: Id of the parent(field or seasonalField) it belongs to.
+ :type parent_id: str
+ :param geometry: GeoJSON abstract class.
+ :type geometry: ~azure.farmbeats.models.GeoJsonObject
+ :param is_primary: Is the boundary primary.
+ :type is_primary: bool
+ :ivar acreage: Boundary area in acres.
+ :vartype acreage: float
+ :ivar parent_type: Type of the parent it belongs to.
+ :vartype parent_type: str
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'farmer_id': {'readonly': True},
+ 'acreage': {'readonly': True},
+ 'parent_type': {'readonly': True},
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'parent_id': {'key': 'parentId', 'type': 'str'},
+ 'geometry': {'key': 'geometry', 'type': 'GeoJsonObject'},
+ 'is_primary': {'key': 'isPrimary', 'type': 'bool'},
+ 'acreage': {'key': 'acreage', 'type': 'float'},
+ 'parent_type': {'key': 'parentType', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ parent_id: Optional[str] = None,
+ geometry: Optional["GeoJsonObject"] = None,
+ is_primary: Optional[bool] = None,
+ status: Optional[str] = None,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(Boundary, self).__init__(**kwargs)
+ self.farmer_id = None
+ self.parent_id = parent_id
+ self.geometry = geometry
+ self.is_primary = is_primary
+ self.acreage = None
+ self.parent_type = None
+ self.id = None
+ self.e_tag = None
+ self.status = status
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = name
+ self.description = description
+ self.properties = properties
+
+
+class BoundaryListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.Boundary]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Boundary]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["Boundary"]] = None,
+ skip_token: Optional[str] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(BoundaryListResponse, self).__init__(**kwargs)
+ self.value = value
+ self.skip_token = skip_token
+ self.next_link = next_link
+
+
+class BoundaryOverlapResponse(msrest.serialization.Model):
+ """Schema of boundary overlap response.
+
+ :param boundary_acreage: Acreage of Main boundary.
+ :type boundary_acreage: float
+ :param other_boundary_acreage: Acreage of other boundary.
+ :type other_boundary_acreage: float
+ :param intersecting_acreage: Acreage of intersecting boundary.
+ :type intersecting_acreage: float
+ """
+
+ _attribute_map = {
+ 'boundary_acreage': {'key': 'boundaryAcreage', 'type': 'float'},
+ 'other_boundary_acreage': {'key': 'otherBoundaryAcreage', 'type': 'float'},
+ 'intersecting_acreage': {'key': 'intersectingAcreage', 'type': 'float'},
+ }
+
+ def __init__(
+ self,
+ *,
+ boundary_acreage: Optional[float] = None,
+ other_boundary_acreage: Optional[float] = None,
+ intersecting_acreage: Optional[float] = None,
+ **kwargs
+ ):
+ super(BoundaryOverlapResponse, self).__init__(**kwargs)
+ self.boundary_acreage = boundary_acreage
+ self.other_boundary_acreage = other_boundary_acreage
+ self.intersecting_acreage = intersecting_acreage
+
+
+class CascadeDeleteJob(msrest.serialization.Model):
+ """Schema of cascade delete job.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param farmer_id: Required. Farmer Id.
+ :type farmer_id: str
+ :param resource_id: Required. The id of the resource.
+ :type resource_id: str
+ :param resource_type: Required. The type of the resource.
+ :type resource_type: str
+ :ivar id: Unique job id.
+ :vartype id: str
+ :ivar status: Status of the job.
+ Possible values: 'Waiting', 'Running', 'Succeeded', 'Failed', 'Cancelled'.
+ :vartype status: str
+ :ivar duration_in_seconds: Duration of the job in seconds.
+ :vartype duration_in_seconds: float
+ :ivar message: Status message to capture more details of the job.
+ :vartype message: str
+ :ivar created_date_time: Job created at dateTime. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar last_action_date_time: Job was last acted upon at dateTime. Sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype last_action_date_time: ~datetime.datetime
+ :ivar start_time: Job start time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype start_time: ~datetime.datetime
+ :ivar end_time: Job end time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype end_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'farmer_id': {'required': True},
+ 'resource_id': {'required': True},
+ 'resource_type': {'required': True},
+ 'id': {'readonly': True},
+ 'status': {'readonly': True},
+ 'duration_in_seconds': {'readonly': True},
+ 'message': {'readonly': True},
+ 'created_date_time': {'readonly': True},
+ 'last_action_date_time': {'readonly': True},
+ 'start_time': {'readonly': True},
+ 'end_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'resource_type': {'key': 'resourceType', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'duration_in_seconds': {'key': 'durationInSeconds', 'type': 'float'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'last_action_date_time': {'key': 'lastActionDateTime', 'type': 'iso-8601'},
+ 'start_time': {'key': 'startTime', 'type': 'iso-8601'},
+ 'end_time': {'key': 'endTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ farmer_id: str,
+ resource_id: str,
+ resource_type: str,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(CascadeDeleteJob, self).__init__(**kwargs)
+ self.farmer_id = farmer_id
+ self.resource_id = resource_id
+ self.resource_type = resource_type
+ self.id = None
+ self.status = None
+ self.duration_in_seconds = None
+ self.message = None
+ self.created_date_time = None
+ self.last_action_date_time = None
+ self.start_time = None
+ self.end_time = None
+ self.name = name
+ self.description = description
+ self.properties = properties
+
+
+class Crop(msrest.serialization.Model):
+ """Schema of crop resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param phenotype: Crop phenotype.
+ :type phenotype: str
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'phenotype': {'max_length': 100, 'min_length': 0},
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'phenotype': {'key': 'phenotype', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ phenotype: Optional[str] = None,
+ status: Optional[str] = None,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(Crop, self).__init__(**kwargs)
+ self.phenotype = phenotype
+ self.id = None
+ self.e_tag = None
+ self.status = status
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = name
+ self.description = description
+ self.properties = properties
+
+
+class CropListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.Crop]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Crop]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["Crop"]] = None,
+ skip_token: Optional[str] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(CropListResponse, self).__init__(**kwargs)
+ self.value = value
+ self.skip_token = skip_token
+ self.next_link = next_link
+
+
+class CropVariety(msrest.serialization.Model):
+ """Schema of crop variety resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar crop_id: Id of the crop it belongs to.
+ :vartype crop_id: str
+ :param brand: CropVariety Brand.
+ :type brand: str
+ :param product: CropVariety product.
+ :type product: str
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'crop_id': {'readonly': True},
+ 'brand': {'max_length': 100, 'min_length': 0},
+ 'product': {'max_length': 100, 'min_length': 0},
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'crop_id': {'key': 'cropId', 'type': 'str'},
+ 'brand': {'key': 'brand', 'type': 'str'},
+ 'product': {'key': 'product', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ brand: Optional[str] = None,
+ product: Optional[str] = None,
+ status: Optional[str] = None,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(CropVariety, self).__init__(**kwargs)
+ self.crop_id = None
+ self.brand = brand
+ self.product = product
+ self.id = None
+ self.e_tag = None
+ self.status = status
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = name
+ self.description = description
+ self.properties = properties
+
+
+class CropVarietyListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.CropVariety]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[CropVariety]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["CropVariety"]] = None,
+ skip_token: Optional[str] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(CropVarietyListResponse, self).__init__(**kwargs)
+ self.value = value
+ self.skip_token = skip_token
+ self.next_link = next_link
+
+
+class Error(msrest.serialization.Model):
+ """An error from the Azure AgPlatform service.
+
+ :param code: Server-defined set of error codes.
+ :type code: str
+ :param message: Human-readable representation of the error.
+ :type message: str
+ :param target: Target of the error.
+ :type target: str
+ :param details: Array of details about specific errors that led to this reported error.
+ :type details: list[~azure.farmbeats.models.Error]
+ :param innererror: Inner error containing list of errors.
+
+ :code:`InnerError
+ reference document`.
+ :type innererror: ~azure.farmbeats.models.InnerError
+ """
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'target': {'key': 'target', 'type': 'str'},
+ 'details': {'key': 'details', 'type': '[Error]'},
+ 'innererror': {'key': 'innererror', 'type': 'InnerError'},
+ }
+
+ def __init__(
+ self,
+ *,
+ code: Optional[str] = None,
+ message: Optional[str] = None,
+ target: Optional[str] = None,
+ details: Optional[List["Error"]] = None,
+ innererror: Optional["InnerError"] = None,
+ **kwargs
+ ):
+ super(Error, self).__init__(**kwargs)
+ self.code = code
+ self.message = message
+ self.target = target
+ self.details = details
+ self.innererror = innererror
+
+
+class ErrorResponse(msrest.serialization.Model):
+ """An error response from the Azure AgPlatform service.
+
+:code:`ErrorResponse reference document.`.
+
+ :param error: An error from the Azure AgPlatform service.
+ :type error: ~azure.farmbeats.models.Error
+ :param trace_id: Unique trace Id.
+ :type trace_id: str
+ """
+
+ _attribute_map = {
+ 'error': {'key': 'error', 'type': 'Error'},
+ 'trace_id': {'key': 'traceId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ error: Optional["Error"] = None,
+ trace_id: Optional[str] = None,
+ **kwargs
+ ):
+ super(ErrorResponse, self).__init__(**kwargs)
+ self.error = error
+ self.trace_id = trace_id
+
+
+class Farm(msrest.serialization.Model):
+ """Schema of farm resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar farmer_id: Farmer Id.
+ :vartype farmer_id: str
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'farmer_id': {'readonly': True},
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ status: Optional[str] = None,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(Farm, self).__init__(**kwargs)
+ self.farmer_id = None
+ self.id = None
+ self.e_tag = None
+ self.status = status
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = name
+ self.description = description
+ self.properties = properties
+
+
+class Farmer(msrest.serialization.Model):
+ """Schema of farmer resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ status: Optional[str] = None,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(Farmer, self).__init__(**kwargs)
+ self.id = None
+ self.e_tag = None
+ self.status = status
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = name
+ self.description = description
+ self.properties = properties
+
+
+class FarmerListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.Farmer]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Farmer]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["Farmer"]] = None,
+ skip_token: Optional[str] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(FarmerListResponse, self).__init__(**kwargs)
+ self.value = value
+ self.skip_token = skip_token
+ self.next_link = next_link
+
+
+class FarmListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.Farm]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Farm]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["Farm"]] = None,
+ skip_token: Optional[str] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(FarmListResponse, self).__init__(**kwargs)
+ self.value = value
+ self.skip_token = skip_token
+ self.next_link = next_link
+
+
+class FarmOperationDataIngestionJob(msrest.serialization.Model):
+ """Schema of farm operation data ingestion job.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param farmer_id: Required. Farmer Id.
+ :type farmer_id: str
+ :param auth_provider_id: Required. Authentication provider Id.
+ :type auth_provider_id: str
+ :param operations: List of operation types for which data needs to be downloaded. Available
+ values: AllOperations, Application, Planting, Harvest, Tillage.
+ :type operations: list[str]
+ :param start_year: Required. Start Year (Minimum = 2000, Maximum = CurrentYear).
+ :type start_year: int
+ :ivar id: Unique job id.
+ :vartype id: str
+ :ivar status: Status of the job.
+ Possible values: 'Waiting', 'Running', 'Succeeded', 'Failed', 'Cancelled'.
+ :vartype status: str
+ :ivar duration_in_seconds: Duration of the job in seconds.
+ :vartype duration_in_seconds: float
+ :ivar message: Status message to capture more details of the job.
+ :vartype message: str
+ :ivar created_date_time: Job created at dateTime. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar last_action_date_time: Job was last acted upon at dateTime. Sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype last_action_date_time: ~datetime.datetime
+ :ivar start_time: Job start time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype start_time: ~datetime.datetime
+ :ivar end_time: Job end time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype end_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'farmer_id': {'required': True},
+ 'auth_provider_id': {'required': True},
+ 'start_year': {'required': True},
+ 'id': {'readonly': True},
+ 'status': {'readonly': True},
+ 'duration_in_seconds': {'readonly': True},
+ 'message': {'readonly': True},
+ 'created_date_time': {'readonly': True},
+ 'last_action_date_time': {'readonly': True},
+ 'start_time': {'readonly': True},
+ 'end_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'auth_provider_id': {'key': 'authProviderId', 'type': 'str'},
+ 'operations': {'key': 'operations', 'type': '[str]'},
+ 'start_year': {'key': 'startYear', 'type': 'int'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'duration_in_seconds': {'key': 'durationInSeconds', 'type': 'float'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'last_action_date_time': {'key': 'lastActionDateTime', 'type': 'iso-8601'},
+ 'start_time': {'key': 'startTime', 'type': 'iso-8601'},
+ 'end_time': {'key': 'endTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ farmer_id: str,
+ auth_provider_id: str,
+ start_year: int,
+ operations: Optional[List[str]] = None,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(FarmOperationDataIngestionJob, self).__init__(**kwargs)
+ self.farmer_id = farmer_id
+ self.auth_provider_id = auth_provider_id
+ self.operations = operations
+ self.start_year = start_year
+ self.id = None
+ self.status = None
+ self.duration_in_seconds = None
+ self.message = None
+ self.created_date_time = None
+ self.last_action_date_time = None
+ self.start_time = None
+ self.end_time = None
+ self.name = name
+ self.description = description
+ self.properties = properties
+
+
+class Field(msrest.serialization.Model):
+ """Schema of field resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param farm_id: Id of the associated Farm.
+ :type farm_id: str
+ :ivar farmer_id: Farmer Id.
+ :vartype farmer_id: str
+ :ivar primary_boundary_id: Primary boundary id.
+ :vartype primary_boundary_id: str
+ :ivar boundary_ids: Boundary Ids.
+ :vartype boundary_ids: list[str]
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'farmer_id': {'readonly': True},
+ 'primary_boundary_id': {'readonly': True},
+ 'boundary_ids': {'readonly': True, 'unique': True},
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'farm_id': {'key': 'farmId', 'type': 'str'},
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'primary_boundary_id': {'key': 'primaryBoundaryId', 'type': 'str'},
+ 'boundary_ids': {'key': 'boundaryIds', 'type': '[str]'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ farm_id: Optional[str] = None,
+ status: Optional[str] = None,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(Field, self).__init__(**kwargs)
+ self.farm_id = farm_id
+ self.farmer_id = None
+ self.primary_boundary_id = None
+ self.boundary_ids = None
+ self.id = None
+ self.e_tag = None
+ self.status = status
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = name
+ self.description = description
+ self.properties = properties
+
+
+class FieldListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.Field]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Field]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["Field"]] = None,
+ skip_token: Optional[str] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(FieldListResponse, self).__init__(**kwargs)
+ self.value = value
+ self.skip_token = skip_token
+ self.next_link = next_link
+
+
+class GeoJsonObject(msrest.serialization.Model):
+ """GeoJSON abstract class.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: MultiPolygon, Point, Polygon.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param type: Required. GeoJSON object type.Constant filled by server. Possible values include:
+ "Point", "Polygon", "MultiPolygon".
+ :type type: str or ~azure.farmbeats.models.GeoJsonObjectType
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'type': {'MultiPolygon': 'MultiPolygon', 'Point': 'Point', 'Polygon': 'Polygon'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(GeoJsonObject, self).__init__(**kwargs)
+ self.type = None # type: Optional[str]
+
+
+class HarvestData(msrest.serialization.Model):
+ """Schema of harvest data resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param total_yield: Schema for storing measurement reading and unit.
+ :type total_yield: ~azure.farmbeats.models.Measure
+ :param avg_yield: Schema for storing measurement reading and unit.
+ :type avg_yield: ~azure.farmbeats.models.Measure
+ :param total_wet_mass: Schema for storing measurement reading and unit.
+ :type total_wet_mass: ~azure.farmbeats.models.Measure
+ :param avg_wet_mass: Schema for storing measurement reading and unit.
+ :type avg_wet_mass: ~azure.farmbeats.models.Measure
+ :param avg_moisture: Schema for storing measurement reading and unit.
+ :type avg_moisture: ~azure.farmbeats.models.Measure
+ :param avg_speed: Schema for storing measurement reading and unit.
+ :type avg_speed: ~azure.farmbeats.models.Measure
+ :param harvest_product_details: Harvest product details.
+ :type harvest_product_details: list[~azure.farmbeats.models.HarvestProductDetail]
+ :param area: Schema for storing measurement reading and unit.
+ :type area: ~azure.farmbeats.models.Measure
+ :param source: Source of the operation data.
+ :type source: str
+ :param operation_modified_date_time: Modified date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ Note: this will be specified by the source provider itself.
+ :type operation_modified_date_time: ~datetime.datetime
+ :param operation_start_date_time: Start date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type operation_start_date_time: ~datetime.datetime
+ :param operation_end_date_time: End date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type operation_end_date_time: ~datetime.datetime
+ :ivar attachments_link: Link for attachments.
+ :vartype attachments_link: str
+ :param associated_boundary_id: Optional boundary ID of the field for which operation was
+ applied.
+ :type associated_boundary_id: str
+ :param operation_boundary_id: Optional boundary ID of the actual area for which operation was
+ applied inside the specified field.
+ :type operation_boundary_id: str
+ :ivar farmer_id: Farmer ID which belongs to the operation data.
+ :vartype farmer_id: str
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'source': {'max_length': 100, 'min_length': 2},
+ 'attachments_link': {'readonly': True},
+ 'farmer_id': {'readonly': True},
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'total_yield': {'key': 'totalYield', 'type': 'Measure'},
+ 'avg_yield': {'key': 'avgYield', 'type': 'Measure'},
+ 'total_wet_mass': {'key': 'totalWetMass', 'type': 'Measure'},
+ 'avg_wet_mass': {'key': 'avgWetMass', 'type': 'Measure'},
+ 'avg_moisture': {'key': 'avgMoisture', 'type': 'Measure'},
+ 'avg_speed': {'key': 'avgSpeed', 'type': 'Measure'},
+ 'harvest_product_details': {'key': 'harvestProductDetails', 'type': '[HarvestProductDetail]'},
+ 'area': {'key': 'area', 'type': 'Measure'},
+ 'source': {'key': 'source', 'type': 'str'},
+ 'operation_modified_date_time': {'key': 'operationModifiedDateTime', 'type': 'iso-8601'},
+ 'operation_start_date_time': {'key': 'operationStartDateTime', 'type': 'iso-8601'},
+ 'operation_end_date_time': {'key': 'operationEndDateTime', 'type': 'iso-8601'},
+ 'attachments_link': {'key': 'attachmentsLink', 'type': 'str'},
+ 'associated_boundary_id': {'key': 'associatedBoundaryId', 'type': 'str'},
+ 'operation_boundary_id': {'key': 'operationBoundaryId', 'type': 'str'},
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ total_yield: Optional["Measure"] = None,
+ avg_yield: Optional["Measure"] = None,
+ total_wet_mass: Optional["Measure"] = None,
+ avg_wet_mass: Optional["Measure"] = None,
+ avg_moisture: Optional["Measure"] = None,
+ avg_speed: Optional["Measure"] = None,
+ harvest_product_details: Optional[List["HarvestProductDetail"]] = None,
+ area: Optional["Measure"] = None,
+ source: Optional[str] = None,
+ operation_modified_date_time: Optional[datetime.datetime] = None,
+ operation_start_date_time: Optional[datetime.datetime] = None,
+ operation_end_date_time: Optional[datetime.datetime] = None,
+ associated_boundary_id: Optional[str] = None,
+ operation_boundary_id: Optional[str] = None,
+ status: Optional[str] = None,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(HarvestData, self).__init__(**kwargs)
+ self.total_yield = total_yield
+ self.avg_yield = avg_yield
+ self.total_wet_mass = total_wet_mass
+ self.avg_wet_mass = avg_wet_mass
+ self.avg_moisture = avg_moisture
+ self.avg_speed = avg_speed
+ self.harvest_product_details = harvest_product_details
+ self.area = area
+ self.source = source
+ self.operation_modified_date_time = operation_modified_date_time
+ self.operation_start_date_time = operation_start_date_time
+ self.operation_end_date_time = operation_end_date_time
+ self.attachments_link = None
+ self.associated_boundary_id = associated_boundary_id
+ self.operation_boundary_id = operation_boundary_id
+ self.farmer_id = None
+ self.id = None
+ self.e_tag = None
+ self.status = status
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = name
+ self.description = description
+ self.properties = properties
+
+
+class HarvestDataListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.HarvestData]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[HarvestData]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["HarvestData"]] = None,
+ skip_token: Optional[str] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(HarvestDataListResponse, self).__init__(**kwargs)
+ self.value = value
+ self.skip_token = skip_token
+ self.next_link = next_link
+
+
+class HarvestProductDetail(msrest.serialization.Model):
+ """Schema of product used during harvesting.
+
+ :param product_name: Name of the product.
+ :type product_name: str
+ :param area: Schema for storing measurement reading and unit.
+ :type area: ~azure.farmbeats.models.Measure
+ :param total_yield: Schema for storing measurement reading and unit.
+ :type total_yield: ~azure.farmbeats.models.Measure
+ :param avg_yield: Schema for storing measurement reading and unit.
+ :type avg_yield: ~azure.farmbeats.models.Measure
+ :param avg_moisture: Schema for storing measurement reading and unit.
+ :type avg_moisture: ~azure.farmbeats.models.Measure
+ :param total_wet_mass: Schema for storing measurement reading and unit.
+ :type total_wet_mass: ~azure.farmbeats.models.Measure
+ :param avg_wet_mass: Schema for storing measurement reading and unit.
+ :type avg_wet_mass: ~azure.farmbeats.models.Measure
+ """
+
+ _validation = {
+ 'product_name': {'max_length': 100, 'min_length': 1},
+ }
+
+ _attribute_map = {
+ 'product_name': {'key': 'productName', 'type': 'str'},
+ 'area': {'key': 'area', 'type': 'Measure'},
+ 'total_yield': {'key': 'totalYield', 'type': 'Measure'},
+ 'avg_yield': {'key': 'avgYield', 'type': 'Measure'},
+ 'avg_moisture': {'key': 'avgMoisture', 'type': 'Measure'},
+ 'total_wet_mass': {'key': 'totalWetMass', 'type': 'Measure'},
+ 'avg_wet_mass': {'key': 'avgWetMass', 'type': 'Measure'},
+ }
+
+ def __init__(
+ self,
+ *,
+ product_name: Optional[str] = None,
+ area: Optional["Measure"] = None,
+ total_yield: Optional["Measure"] = None,
+ avg_yield: Optional["Measure"] = None,
+ avg_moisture: Optional["Measure"] = None,
+ total_wet_mass: Optional["Measure"] = None,
+ avg_wet_mass: Optional["Measure"] = None,
+ **kwargs
+ ):
+ super(HarvestProductDetail, self).__init__(**kwargs)
+ self.product_name = product_name
+ self.area = area
+ self.total_yield = total_yield
+ self.avg_yield = avg_yield
+ self.avg_moisture = avg_moisture
+ self.total_wet_mass = total_wet_mass
+ self.avg_wet_mass = avg_wet_mass
+
+
+class ImageFile(msrest.serialization.Model):
+ """Schema of image file resource.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param file_link: Link of the image file.
+ :type file_link: str
+ :param name: Required. Name of the image file.
+ :type name: str
+ :param image_format: Supported image formats for scene resource. Possible values include:
+ "TIF".
+ :type image_format: str or ~azure.farmbeats.models.ImageFormat
+ :param resolution: Resolution of image file in meters.
+ :type resolution: float
+ """
+
+ _validation = {
+ 'name': {'required': True},
+ }
+
+ _attribute_map = {
+ 'file_link': {'key': 'fileLink', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'image_format': {'key': 'imageFormat', 'type': 'str'},
+ 'resolution': {'key': 'resolution', 'type': 'float'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: str,
+ file_link: Optional[str] = None,
+ image_format: Optional[Union[str, "ImageFormat"]] = None,
+ resolution: Optional[float] = None,
+ **kwargs
+ ):
+ super(ImageFile, self).__init__(**kwargs)
+ self.file_link = file_link
+ self.name = name
+ self.image_format = image_format
+ self.resolution = resolution
+
+
+class ImageProcessingRasterizeJob(msrest.serialization.Model):
+ """ImageProcessingRasterizeJob.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param farmer_id: Required. Farmer Id.
+ :type farmer_id: str
+ :param shapefile_attachment_id: Required. Shapefile attachment Id.
+ :type shapefile_attachment_id: str
+ :param shapefile_column_names: Required. List of shapefile column names to create raster
+ attachments.
+ :type shapefile_column_names: list[str]
+ :ivar id: Unique job id.
+ :vartype id: str
+ :ivar status: Status of the job.
+ Possible values: 'Waiting', 'Running', 'Succeeded', 'Failed', 'Cancelled'.
+ :vartype status: str
+ :ivar duration_in_seconds: Duration of the job in seconds.
+ :vartype duration_in_seconds: float
+ :ivar message: Status message to capture more details of the job.
+ :vartype message: str
+ :ivar created_date_time: Job created at dateTime. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar last_action_date_time: Job was last acted upon at dateTime. Sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype last_action_date_time: ~datetime.datetime
+ :ivar start_time: Job start time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype start_time: ~datetime.datetime
+ :ivar end_time: Job end time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype end_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'farmer_id': {'required': True},
+ 'shapefile_attachment_id': {'required': True},
+ 'shapefile_column_names': {'required': True},
+ 'id': {'readonly': True},
+ 'status': {'readonly': True},
+ 'duration_in_seconds': {'readonly': True},
+ 'message': {'readonly': True},
+ 'created_date_time': {'readonly': True},
+ 'last_action_date_time': {'readonly': True},
+ 'start_time': {'readonly': True},
+ 'end_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'shapefile_attachment_id': {'key': 'shapefileAttachmentId', 'type': 'str'},
+ 'shapefile_column_names': {'key': 'shapefileColumnNames', 'type': '[str]'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'duration_in_seconds': {'key': 'durationInSeconds', 'type': 'float'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'last_action_date_time': {'key': 'lastActionDateTime', 'type': 'iso-8601'},
+ 'start_time': {'key': 'startTime', 'type': 'iso-8601'},
+ 'end_time': {'key': 'endTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ farmer_id: str,
+ shapefile_attachment_id: str,
+ shapefile_column_names: List[str],
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(ImageProcessingRasterizeJob, self).__init__(**kwargs)
+ self.farmer_id = farmer_id
+ self.shapefile_attachment_id = shapefile_attachment_id
+ self.shapefile_column_names = shapefile_column_names
+ self.id = None
+ self.status = None
+ self.duration_in_seconds = None
+ self.message = None
+ self.created_date_time = None
+ self.last_action_date_time = None
+ self.start_time = None
+ self.end_time = None
+ self.name = name
+ self.description = description
+ self.properties = properties
+
+
+class InnerError(msrest.serialization.Model):
+ """Inner error containing list of errors.
+
+:code:`InnerError reference document`.
+
+ :param additional_properties: Unmatched properties from the message are deserialized to this
+ collection.
+ :type additional_properties: dict[str, object]
+ :param code: Specific error code than was provided by the
+ containing error.
+ :type code: str
+ :param innererror: Inner error containing list of errors.
+
+ :code:`InnerError
+ reference document`.
+ :type innererror: ~azure.farmbeats.models.InnerError
+ """
+
+ _attribute_map = {
+ 'additional_properties': {'key': '', 'type': '{object}'},
+ 'code': {'key': 'code', 'type': 'str'},
+ 'innererror': {'key': 'innererror', 'type': 'InnerError'},
+ }
+
+ def __init__(
+ self,
+ *,
+ additional_properties: Optional[Dict[str, object]] = None,
+ code: Optional[str] = None,
+ innererror: Optional["InnerError"] = None,
+ **kwargs
+ ):
+ super(InnerError, self).__init__(**kwargs)
+ self.additional_properties = additional_properties
+ self.code = code
+ self.innererror = innererror
+
+
+class Location(msrest.serialization.Model):
+ """Location model class.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param latitude: Required. Latitude of the location.
+ :type latitude: float
+ :param longitude: Required. Longitude of the location.
+ :type longitude: float
+ """
+
+ _validation = {
+ 'latitude': {'required': True, 'maximum': 90, 'minimum': -90},
+ 'longitude': {'required': True, 'maximum': 180, 'minimum': -180},
+ }
+
+ _attribute_map = {
+ 'latitude': {'key': 'latitude', 'type': 'float'},
+ 'longitude': {'key': 'longitude', 'type': 'float'},
+ }
+
+ def __init__(
+ self,
+ *,
+ latitude: float,
+ longitude: float,
+ **kwargs
+ ):
+ super(Location, self).__init__(**kwargs)
+ self.latitude = latitude
+ self.longitude = longitude
+
+
+class Measure(msrest.serialization.Model):
+ """Schema for storing measurement reading and unit.
+
+ :param unit: Data unit.
+ :type unit: str
+ :param value: Data value.
+ :type value: float
+ """
+
+ _validation = {
+ 'unit': {'max_length': 50, 'min_length': 1},
+ }
+
+ _attribute_map = {
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'float'},
+ }
+
+ def __init__(
+ self,
+ *,
+ unit: Optional[str] = None,
+ value: Optional[float] = None,
+ **kwargs
+ ):
+ super(Measure, self).__init__(**kwargs)
+ self.unit = unit
+ self.value = value
+
+
+class MultiPolygon(GeoJsonObject):
+ """MultiPolygon geometry.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param type: Required. GeoJSON object type.Constant filled by server. Possible values include:
+ "Point", "Polygon", "MultiPolygon".
+ :type type: str or ~azure.farmbeats.models.GeoJsonObjectType
+ :param coordinates: Required. Gets or sets Coordinates of GeoJSON Object.
+ It must be an array of polygons, each polygon contains list of linear rings.
+ For Polygons with more than one of these rings, the first MUST be the exterior ring,
+ and any others MUST be interior rings.
+ :type coordinates: list[list[list[list[float]]]]
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ 'coordinates': {'required': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ 'coordinates': {'key': 'coordinates', 'type': '[[[[float]]]]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ coordinates: List[List[List[List[float]]]],
+ **kwargs
+ ):
+ super(MultiPolygon, self).__init__(**kwargs)
+ self.type = 'MultiPolygon' # type: str
+ self.coordinates = coordinates
+
+
+class OAuthConnectRequest(msrest.serialization.Model):
+ """Get OAuth config query parameters.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param farmer_id: Required. Id of the farmer.
+ :type farmer_id: str
+ :param o_auth_provider_id: Required. Id of the OAuthProvider.
+ :type o_auth_provider_id: str
+ :param user_redirect_link: Required. Link to redirect the user to, at the end of the oauth
+ flow.
+ :type user_redirect_link: str
+ :param user_redirect_state: State to provide back when redirecting the user, at the end of the
+ oauth flow.
+ :type user_redirect_state: str
+ """
+
+ _validation = {
+ 'farmer_id': {'required': True},
+ 'o_auth_provider_id': {'required': True},
+ 'user_redirect_link': {'required': True, 'max_length': 1000, 'min_length': 0},
+ 'user_redirect_state': {'max_length': 200, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'o_auth_provider_id': {'key': 'oAuthProviderId', 'type': 'str'},
+ 'user_redirect_link': {'key': 'userRedirectLink', 'type': 'str'},
+ 'user_redirect_state': {'key': 'userRedirectState', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ farmer_id: str,
+ o_auth_provider_id: str,
+ user_redirect_link: str,
+ user_redirect_state: Optional[str] = None,
+ **kwargs
+ ):
+ super(OAuthConnectRequest, self).__init__(**kwargs)
+ self.farmer_id = farmer_id
+ self.o_auth_provider_id = o_auth_provider_id
+ self.user_redirect_link = user_redirect_link
+ self.user_redirect_state = user_redirect_state
+
+
+class OAuthProvider(msrest.serialization.Model):
+ """Schema of OAuth provider resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param app_id: OAuth App Id for given OAuth Provider.
+ :type app_id: str
+ :param app_secret: OAuth App secret for given Provider.
+ Note: Won't be sent in response.
+ :type app_secret: str
+ :param api_key: OAuth Api key for given Provider.
+ Note: currently Applicable to Climate provider. Won't be sent in response.
+ :type api_key: str
+ :param is_production_app: An optional flag to determine if the App is ready to be used for
+ Production scenarios in the provider side or not. (Default value: false)
+ Note: Currently applicable for JohnDeere.
+ :type is_production_app: bool
+ :ivar id: Unique OAuth provider ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'app_id': {'max_length': 200, 'min_length': 2},
+ 'app_secret': {'max_length': 200, 'min_length': 2},
+ 'api_key': {'max_length': 200, 'min_length': 2},
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'app_id': {'key': 'appId', 'type': 'str'},
+ 'app_secret': {'key': 'appSecret', 'type': 'str'},
+ 'api_key': {'key': 'apiKey', 'type': 'str'},
+ 'is_production_app': {'key': 'isProductionApp', 'type': 'bool'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ app_id: Optional[str] = None,
+ app_secret: Optional[str] = None,
+ api_key: Optional[str] = None,
+ is_production_app: Optional[bool] = False,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(OAuthProvider, self).__init__(**kwargs)
+ self.app_id = app_id
+ self.app_secret = app_secret
+ self.api_key = api_key
+ self.is_production_app = is_production_app
+ self.id = None
+ self.e_tag = None
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = name
+ self.description = description
+ self.properties = properties
+
+
+class OAuthProviderListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.OAuthProvider]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[OAuthProvider]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["OAuthProvider"]] = None,
+ skip_token: Optional[str] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(OAuthProviderListResponse, self).__init__(**kwargs)
+ self.value = value
+ self.skip_token = skip_token
+ self.next_link = next_link
+
+
+class OAuthToken(msrest.serialization.Model):
+ """Schema of OAuth token resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param farmer_id: Required. Farmer ID for this OAuth config.
+ :type farmer_id: str
+ :param auth_provider_id: Required. ID of the OAuth provider resource containing app
+ information.
+ :type auth_provider_id: str
+ :param is_valid: An optional flag indicating whether the token is a valid or expired (Default
+ value: true).
+ :type is_valid: bool
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ """
+
+ _validation = {
+ 'farmer_id': {'required': True},
+ 'auth_provider_id': {'required': True},
+ 'e_tag': {'readonly': True},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'auth_provider_id': {'key': 'authProviderId', 'type': 'str'},
+ 'is_valid': {'key': 'isValid', 'type': 'bool'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ }
+
+ def __init__(
+ self,
+ *,
+ farmer_id: str,
+ auth_provider_id: str,
+ is_valid: Optional[bool] = True,
+ **kwargs
+ ):
+ super(OAuthToken, self).__init__(**kwargs)
+ self.farmer_id = farmer_id
+ self.auth_provider_id = auth_provider_id
+ self.is_valid = is_valid
+ self.e_tag = None
+ self.created_date_time = None
+ self.modified_date_time = None
+
+
+class OAuthTokenListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.OAuthToken]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[OAuthToken]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["OAuthToken"]] = None,
+ skip_token: Optional[str] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(OAuthTokenListResponse, self).__init__(**kwargs)
+ self.value = value
+ self.skip_token = skip_token
+ self.next_link = next_link
+
+
+class Paths1LxjoxzFarmersFarmeridAttachmentsAttachmentidPatchRequestbodyContentMultipartFormDataSchema(msrest.serialization.Model):
+ """Paths1LxjoxzFarmersFarmeridAttachmentsAttachmentidPatchRequestbodyContentMultipartFormDataSchema.
+
+ :param file: File to be uploaded.
+ :type file: IO
+ :param farmer_id: Farmer id for this attachment.
+ :type farmer_id: str
+ :param resource_id: Associated Resource id for this attachment.
+ :type resource_id: str
+ :param resource_type: Associated Resource type for this attachment
+ i.e. Farmer, Farm, Field, SeasonalField, Boundary, FarmOperationApplicationData, HarvestData,
+ TillageData, PlantingData.
+ :type resource_type: str
+ :param original_file_name: Original File Name for this attachment.
+ :type original_file_name: str
+ :param id: Unique id.
+ :type id: str
+ :param status: Status of the resource.
+ :type status: str
+ :param created_date_time: Date when resource was created.
+ :type created_date_time: str
+ :param modified_date_time: Date when resource was last modified.
+ :type modified_date_time: str
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of resource.
+ :type description: str
+ :param e_tag: The ETag value to implement optimistic concurrency.
+ :type e_tag: str
+ """
+
+ _attribute_map = {
+ 'file': {'key': 'file', 'type': 'IO'},
+ 'farmer_id': {'key': 'FarmerId', 'type': 'str'},
+ 'resource_id': {'key': 'ResourceId', 'type': 'str'},
+ 'resource_type': {'key': 'ResourceType', 'type': 'str'},
+ 'original_file_name': {'key': 'OriginalFileName', 'type': 'str'},
+ 'id': {'key': 'Id', 'type': 'str'},
+ 'status': {'key': 'Status', 'type': 'str'},
+ 'created_date_time': {'key': 'CreatedDateTime', 'type': 'str'},
+ 'modified_date_time': {'key': 'ModifiedDateTime', 'type': 'str'},
+ 'name': {'key': 'Name', 'type': 'str'},
+ 'description': {'key': 'Description', 'type': 'str'},
+ 'e_tag': {'key': 'ETag', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ file: Optional[IO] = None,
+ farmer_id: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ resource_type: Optional[str] = None,
+ original_file_name: Optional[str] = None,
+ id: Optional[str] = None,
+ status: Optional[str] = None,
+ created_date_time: Optional[str] = None,
+ modified_date_time: Optional[str] = None,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ e_tag: Optional[str] = None,
+ **kwargs
+ ):
+ super(Paths1LxjoxzFarmersFarmeridAttachmentsAttachmentidPatchRequestbodyContentMultipartFormDataSchema, self).__init__(**kwargs)
+ self.file = file
+ self.farmer_id = farmer_id
+ self.resource_id = resource_id
+ self.resource_type = resource_type
+ self.original_file_name = original_file_name
+ self.id = id
+ self.status = status
+ self.created_date_time = created_date_time
+ self.modified_date_time = modified_date_time
+ self.name = name
+ self.description = description
+ self.e_tag = e_tag
+
+
+class PlantingData(msrest.serialization.Model):
+ """Schema of planting data resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param avg_planting_rate: Schema for storing measurement reading and unit.
+ :type avg_planting_rate: ~azure.farmbeats.models.Measure
+ :param total_material: Schema for storing measurement reading and unit.
+ :type total_material: ~azure.farmbeats.models.Measure
+ :param avg_material: Schema for storing measurement reading and unit.
+ :type avg_material: ~azure.farmbeats.models.Measure
+ :param planting_product_details: Planting product details.
+ :type planting_product_details: list[~azure.farmbeats.models.PlantingProductDetail]
+ :param area: Schema for storing measurement reading and unit.
+ :type area: ~azure.farmbeats.models.Measure
+ :param source: Source of the operation data.
+ :type source: str
+ :param operation_modified_date_time: Modified date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ Note: this will be specified by the source provider itself.
+ :type operation_modified_date_time: ~datetime.datetime
+ :param operation_start_date_time: Start date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type operation_start_date_time: ~datetime.datetime
+ :param operation_end_date_time: End date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type operation_end_date_time: ~datetime.datetime
+ :ivar attachments_link: Link for attachments.
+ :vartype attachments_link: str
+ :param associated_boundary_id: Optional boundary ID of the field for which operation was
+ applied.
+ :type associated_boundary_id: str
+ :param operation_boundary_id: Optional boundary ID of the actual area for which operation was
+ applied inside the specified field.
+ :type operation_boundary_id: str
+ :ivar farmer_id: Farmer ID which belongs to the operation data.
+ :vartype farmer_id: str
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'source': {'max_length': 100, 'min_length': 2},
+ 'attachments_link': {'readonly': True},
+ 'farmer_id': {'readonly': True},
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'avg_planting_rate': {'key': 'avgPlantingRate', 'type': 'Measure'},
+ 'total_material': {'key': 'totalMaterial', 'type': 'Measure'},
+ 'avg_material': {'key': 'avgMaterial', 'type': 'Measure'},
+ 'planting_product_details': {'key': 'plantingProductDetails', 'type': '[PlantingProductDetail]'},
+ 'area': {'key': 'area', 'type': 'Measure'},
+ 'source': {'key': 'source', 'type': 'str'},
+ 'operation_modified_date_time': {'key': 'operationModifiedDateTime', 'type': 'iso-8601'},
+ 'operation_start_date_time': {'key': 'operationStartDateTime', 'type': 'iso-8601'},
+ 'operation_end_date_time': {'key': 'operationEndDateTime', 'type': 'iso-8601'},
+ 'attachments_link': {'key': 'attachmentsLink', 'type': 'str'},
+ 'associated_boundary_id': {'key': 'associatedBoundaryId', 'type': 'str'},
+ 'operation_boundary_id': {'key': 'operationBoundaryId', 'type': 'str'},
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ avg_planting_rate: Optional["Measure"] = None,
+ total_material: Optional["Measure"] = None,
+ avg_material: Optional["Measure"] = None,
+ planting_product_details: Optional[List["PlantingProductDetail"]] = None,
+ area: Optional["Measure"] = None,
+ source: Optional[str] = None,
+ operation_modified_date_time: Optional[datetime.datetime] = None,
+ operation_start_date_time: Optional[datetime.datetime] = None,
+ operation_end_date_time: Optional[datetime.datetime] = None,
+ associated_boundary_id: Optional[str] = None,
+ operation_boundary_id: Optional[str] = None,
+ status: Optional[str] = None,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(PlantingData, self).__init__(**kwargs)
+ self.avg_planting_rate = avg_planting_rate
+ self.total_material = total_material
+ self.avg_material = avg_material
+ self.planting_product_details = planting_product_details
+ self.area = area
+ self.source = source
+ self.operation_modified_date_time = operation_modified_date_time
+ self.operation_start_date_time = operation_start_date_time
+ self.operation_end_date_time = operation_end_date_time
+ self.attachments_link = None
+ self.associated_boundary_id = associated_boundary_id
+ self.operation_boundary_id = operation_boundary_id
+ self.farmer_id = None
+ self.id = None
+ self.e_tag = None
+ self.status = status
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = name
+ self.description = description
+ self.properties = properties
+
+
+class PlantingDataListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.PlantingData]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[PlantingData]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["PlantingData"]] = None,
+ skip_token: Optional[str] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(PlantingDataListResponse, self).__init__(**kwargs)
+ self.value = value
+ self.skip_token = skip_token
+ self.next_link = next_link
+
+
+class PlantingProductDetail(msrest.serialization.Model):
+ """Schema for Planting product detail.
+
+ :param product_name: Name of the product.
+ :type product_name: str
+ :param area: Schema for storing measurement reading and unit.
+ :type area: ~azure.farmbeats.models.Measure
+ :param total_material: Schema for storing measurement reading and unit.
+ :type total_material: ~azure.farmbeats.models.Measure
+ :param avg_material: Schema for storing measurement reading and unit.
+ :type avg_material: ~azure.farmbeats.models.Measure
+ """
+
+ _attribute_map = {
+ 'product_name': {'key': 'productName', 'type': 'str'},
+ 'area': {'key': 'area', 'type': 'Measure'},
+ 'total_material': {'key': 'totalMaterial', 'type': 'Measure'},
+ 'avg_material': {'key': 'avgMaterial', 'type': 'Measure'},
+ }
+
+ def __init__(
+ self,
+ *,
+ product_name: Optional[str] = None,
+ area: Optional["Measure"] = None,
+ total_material: Optional["Measure"] = None,
+ avg_material: Optional["Measure"] = None,
+ **kwargs
+ ):
+ super(PlantingProductDetail, self).__init__(**kwargs)
+ self.product_name = product_name
+ self.area = area
+ self.total_material = total_material
+ self.avg_material = avg_material
+
+
+class Point(GeoJsonObject):
+ """Point geometry.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param type: Required. GeoJSON object type.Constant filled by server. Possible values include:
+ "Point", "Polygon", "MultiPolygon".
+ :type type: str or ~azure.farmbeats.models.GeoJsonObjectType
+ :param coordinates: Required. Gets or sets the coordinate of this point.
+ It must be an array of 2 or 3 elements for a 2D or 3D system.
+ :type coordinates: list[float]
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ 'coordinates': {'required': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ 'coordinates': {'key': 'coordinates', 'type': '[float]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ coordinates: List[float],
+ **kwargs
+ ):
+ super(Point, self).__init__(**kwargs)
+ self.type = 'Point' # type: str
+ self.coordinates = coordinates
+
+
+class Polygon(GeoJsonObject):
+ """Polygon geometry.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param type: Required. GeoJSON object type.Constant filled by server. Possible values include:
+ "Point", "Polygon", "MultiPolygon".
+ :type type: str or ~azure.farmbeats.models.GeoJsonObjectType
+ :param coordinates: Required. Gets or sets type of the GeoJSON Object.
+ It must be an array of linear ring coordinate arrays.
+ For Polygons with more than one of these rings, the first MUST be the exterior ring,
+ and any others MUST be interior rings.
+ :type coordinates: list[list[list[float]]]
+ """
+
+ _validation = {
+ 'type': {'required': True},
+ 'coordinates': {'required': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ 'coordinates': {'key': 'coordinates', 'type': '[[[float]]]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ coordinates: List[List[List[float]]],
+ **kwargs
+ ):
+ super(Polygon, self).__init__(**kwargs)
+ self.type = 'Polygon' # type: str
+ self.coordinates = coordinates
+
+
+class SatelliteData(msrest.serialization.Model):
+ """Data Model for SatelliteIngestionJobRequest.
+
+ :param image_names: List of ImageNames.
+ :type image_names: list[str]
+ :param image_formats: List of ImageFormats. Available value: TIF.
+ :type image_formats: list[str]
+ :param image_resolutions: List of ImageResolutions in meters. Available values: 10, 20, 60.
+ :type image_resolutions: list[float]
+ """
+
+ _attribute_map = {
+ 'image_names': {'key': 'imageNames', 'type': '[str]'},
+ 'image_formats': {'key': 'imageFormats', 'type': '[str]'},
+ 'image_resolutions': {'key': 'imageResolutions', 'type': '[float]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ image_names: Optional[List[str]] = None,
+ image_formats: Optional[List[str]] = None,
+ image_resolutions: Optional[List[float]] = None,
+ **kwargs
+ ):
+ super(SatelliteData, self).__init__(**kwargs)
+ self.image_names = image_names
+ self.image_formats = image_formats
+ self.image_resolutions = image_resolutions
+
+
+class SatelliteDataIngestionJob(msrest.serialization.Model):
+ """Schema of satellite data ingestion job.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param farmer_id: Required. Farmer Id.
+ :type farmer_id: str
+ :param boundary_id: Required. The id of the boundary object for which satellite data is being
+ fetched.
+ :type boundary_id: str
+ :param start_date_time: Required. Start Date.
+ :type start_date_time: ~datetime.datetime
+ :param end_date_time: Required. End Date.
+ :type end_date_time: ~datetime.datetime
+ :param provider: Provider of satellite data. Possible values include: "Microsoft".
+ :type provider: str or ~azure.farmbeats.models.DataProvider
+ :param source: Source of satellite data. Possible values include: "Sentinel_2_L2A".
+ :type source: str or ~azure.farmbeats.models.Source
+ :param data: Data Model for SatelliteIngestionJobRequest.
+ :type data: ~azure.farmbeats.models.SatelliteData
+ :ivar id: Unique job id.
+ :vartype id: str
+ :ivar status: Status of the job.
+ Possible values: 'Waiting', 'Running', 'Succeeded', 'Failed', 'Cancelled'.
+ :vartype status: str
+ :ivar duration_in_seconds: Duration of the job in seconds.
+ :vartype duration_in_seconds: float
+ :ivar message: Status message to capture more details of the job.
+ :vartype message: str
+ :ivar created_date_time: Job created at dateTime. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar last_action_date_time: Job was last acted upon at dateTime. Sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype last_action_date_time: ~datetime.datetime
+ :ivar start_time: Job start time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype start_time: ~datetime.datetime
+ :ivar end_time: Job end time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype end_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'farmer_id': {'required': True},
+ 'boundary_id': {'required': True},
+ 'start_date_time': {'required': True},
+ 'end_date_time': {'required': True},
+ 'id': {'readonly': True},
+ 'status': {'readonly': True},
+ 'duration_in_seconds': {'readonly': True},
+ 'message': {'readonly': True},
+ 'created_date_time': {'readonly': True},
+ 'last_action_date_time': {'readonly': True},
+ 'start_time': {'readonly': True},
+ 'end_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'boundary_id': {'key': 'boundaryId', 'type': 'str'},
+ 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'},
+ 'end_date_time': {'key': 'endDateTime', 'type': 'iso-8601'},
+ 'provider': {'key': 'provider', 'type': 'str'},
+ 'source': {'key': 'source', 'type': 'str'},
+ 'data': {'key': 'data', 'type': 'SatelliteData'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'duration_in_seconds': {'key': 'durationInSeconds', 'type': 'float'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'last_action_date_time': {'key': 'lastActionDateTime', 'type': 'iso-8601'},
+ 'start_time': {'key': 'startTime', 'type': 'iso-8601'},
+ 'end_time': {'key': 'endTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ farmer_id: str,
+ boundary_id: str,
+ start_date_time: datetime.datetime,
+ end_date_time: datetime.datetime,
+ provider: Optional[Union[str, "DataProvider"]] = None,
+ source: Optional[Union[str, "Source"]] = None,
+ data: Optional["SatelliteData"] = None,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(SatelliteDataIngestionJob, self).__init__(**kwargs)
+ self.farmer_id = farmer_id
+ self.boundary_id = boundary_id
+ self.start_date_time = start_date_time
+ self.end_date_time = end_date_time
+ self.provider = provider
+ self.source = source
+ self.data = data
+ self.id = None
+ self.status = None
+ self.duration_in_seconds = None
+ self.message = None
+ self.created_date_time = None
+ self.last_action_date_time = None
+ self.start_time = None
+ self.end_time = None
+ self.name = name
+ self.description = description
+ self.properties = properties
+
+
+class Scene(msrest.serialization.Model):
+ """Schema of scene resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param scene_date_time: Date-time of the scene, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type scene_date_time: ~datetime.datetime
+ :param provider: Data provider of the scene.
+ :type provider: str
+ :param source: Data source of the scene.
+ :type source: str
+ :param image_files: Collection of image files.
+ :type image_files: list[~azure.farmbeats.models.ImageFile]
+ :param image_format: Supported image formats for scene resource. Possible values include:
+ "TIF".
+ :type image_format: str or ~azure.farmbeats.models.ImageFormat
+ :param cloud_cover_percentage: Cloud cover percentage of the scene.
+ :type cloud_cover_percentage: float
+ :param dark_pixel_percentage: Dark pixel percentage of the scene.
+ :type dark_pixel_percentage: float
+ :param ndvi_median_value: Median of NDVI of the scene.
+ :type ndvi_median_value: float
+ :param boundary_id: Boundary ID which belongs to the scene.
+ :type boundary_id: str
+ :param farmer_id: Farmer ID which belongs to the scene.
+ :type farmer_id: str
+ :param id: Unique scene resource ID.
+ :type id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ """
+
+ _validation = {
+ 'provider': {'max_length': 100, 'min_length': 2},
+ 'source': {'max_length': 100, 'min_length': 2},
+ 'cloud_cover_percentage': {'maximum': 100, 'minimum': 0},
+ 'dark_pixel_percentage': {'maximum': 100, 'minimum': 0},
+ 'ndvi_median_value': {'maximum': 1, 'minimum': 0},
+ 'boundary_id': {'max_length': 100, 'min_length': 2},
+ 'e_tag': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'scene_date_time': {'key': 'sceneDateTime', 'type': 'iso-8601'},
+ 'provider': {'key': 'provider', 'type': 'str'},
+ 'source': {'key': 'source', 'type': 'str'},
+ 'image_files': {'key': 'imageFiles', 'type': '[ImageFile]'},
+ 'image_format': {'key': 'imageFormat', 'type': 'str'},
+ 'cloud_cover_percentage': {'key': 'cloudCoverPercentage', 'type': 'float'},
+ 'dark_pixel_percentage': {'key': 'darkPixelPercentage', 'type': 'float'},
+ 'ndvi_median_value': {'key': 'ndviMedianValue', 'type': 'float'},
+ 'boundary_id': {'key': 'boundaryId', 'type': 'str'},
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ scene_date_time: Optional[datetime.datetime] = None,
+ provider: Optional[str] = None,
+ source: Optional[str] = None,
+ image_files: Optional[List["ImageFile"]] = None,
+ image_format: Optional[Union[str, "ImageFormat"]] = None,
+ cloud_cover_percentage: Optional[float] = None,
+ dark_pixel_percentage: Optional[float] = None,
+ ndvi_median_value: Optional[float] = None,
+ boundary_id: Optional[str] = None,
+ farmer_id: Optional[str] = None,
+ id: Optional[str] = None,
+ **kwargs
+ ):
+ super(Scene, self).__init__(**kwargs)
+ self.scene_date_time = scene_date_time
+ self.provider = provider
+ self.source = source
+ self.image_files = image_files
+ self.image_format = image_format
+ self.cloud_cover_percentage = cloud_cover_percentage
+ self.dark_pixel_percentage = dark_pixel_percentage
+ self.ndvi_median_value = ndvi_median_value
+ self.boundary_id = boundary_id
+ self.farmer_id = farmer_id
+ self.id = id
+ self.e_tag = None
+
+
+class SceneListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.Scene]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Scene]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["Scene"]] = None,
+ skip_token: Optional[str] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(SceneListResponse, self).__init__(**kwargs)
+ self.value = value
+ self.skip_token = skip_token
+ self.next_link = next_link
+
+
+class SearchBoundaryQuery(msrest.serialization.Model):
+ """SearchAllBoundaries and SearchBoundaries parameters.
+
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :param is_primary: Is the boundary primary.
+ :type is_primary: bool
+ :param parent_type: Type of the parent it belongs to.
+ :type parent_type: str
+ :param parent_ids: Parent Ids of the resource.
+ :type parent_ids: list[str]
+ :param min_acreage: Minimum acreage of the boundary (inclusive).
+ :type min_acreage: float
+ :param max_acreage: Maximum acreage of the boundary (inclusive).
+ :type max_acreage: float
+ :param intersects_with_geometry: GeoJSON abstract class.
+ :type intersects_with_geometry: ~azure.farmbeats.models.GeoJsonObject
+ """
+
+ _validation = {
+ 'max_page_size': {'maximum': 1000, 'minimum': 10},
+ }
+
+ _attribute_map = {
+ 'ids': {'key': 'ids', 'type': '[str]'},
+ 'names': {'key': 'names', 'type': '[str]'},
+ 'property_filters': {'key': 'propertyFilters', 'type': '[str]'},
+ 'statuses': {'key': 'statuses', 'type': '[str]'},
+ 'min_created_date_time': {'key': 'minCreatedDateTime', 'type': 'iso-8601'},
+ 'max_created_date_time': {'key': 'maxCreatedDateTime', 'type': 'iso-8601'},
+ 'min_last_modified_date_time': {'key': 'minLastModifiedDateTime', 'type': 'iso-8601'},
+ 'max_last_modified_date_time': {'key': 'maxLastModifiedDateTime', 'type': 'iso-8601'},
+ 'max_page_size': {'key': '$maxPageSize', 'type': 'int'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'is_primary': {'key': 'isPrimary', 'type': 'bool'},
+ 'parent_type': {'key': 'parentType', 'type': 'str'},
+ 'parent_ids': {'key': 'parentIds', 'type': '[str]'},
+ 'min_acreage': {'key': 'minAcreage', 'type': 'float'},
+ 'max_acreage': {'key': 'maxAcreage', 'type': 'float'},
+ 'intersects_with_geometry': {'key': 'intersectsWithGeometry', 'type': 'GeoJsonObject'},
+ }
+
+ def __init__(
+ self,
+ *,
+ ids: Optional[List[str]] = None,
+ names: Optional[List[str]] = None,
+ property_filters: Optional[List[str]] = None,
+ statuses: Optional[List[str]] = None,
+ min_created_date_time: Optional[datetime.datetime] = None,
+ max_created_date_time: Optional[datetime.datetime] = None,
+ min_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_last_modified_date_time: Optional[datetime.datetime] = None,
+ max_page_size: Optional[int] = 50,
+ skip_token: Optional[str] = None,
+ is_primary: Optional[bool] = None,
+ parent_type: Optional[str] = None,
+ parent_ids: Optional[List[str]] = None,
+ min_acreage: Optional[float] = None,
+ max_acreage: Optional[float] = None,
+ intersects_with_geometry: Optional["GeoJsonObject"] = None,
+ **kwargs
+ ):
+ super(SearchBoundaryQuery, self).__init__(**kwargs)
+ self.ids = ids
+ self.names = names
+ self.property_filters = property_filters
+ self.statuses = statuses
+ self.min_created_date_time = min_created_date_time
+ self.max_created_date_time = max_created_date_time
+ self.min_last_modified_date_time = min_last_modified_date_time
+ self.max_last_modified_date_time = max_last_modified_date_time
+ self.max_page_size = max_page_size
+ self.skip_token = skip_token
+ self.is_primary = is_primary
+ self.parent_type = parent_type
+ self.parent_ids = parent_ids
+ self.min_acreage = min_acreage
+ self.max_acreage = max_acreage
+ self.intersects_with_geometry = intersects_with_geometry
+
+
+class Season(msrest.serialization.Model):
+ """Schema of season resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param start_date_time: Season start datetime, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type start_date_time: ~datetime.datetime
+ :param end_date_time: Season end datetime, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type end_date_time: ~datetime.datetime
+ :param year: Season year.
+ :type year: int
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'},
+ 'end_date_time': {'key': 'endDateTime', 'type': 'iso-8601'},
+ 'year': {'key': 'year', 'type': 'int'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ start_date_time: Optional[datetime.datetime] = None,
+ end_date_time: Optional[datetime.datetime] = None,
+ year: Optional[int] = None,
+ status: Optional[str] = None,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(Season, self).__init__(**kwargs)
+ self.start_date_time = start_date_time
+ self.end_date_time = end_date_time
+ self.year = year
+ self.id = None
+ self.e_tag = None
+ self.status = status
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = name
+ self.description = description
+ self.properties = properties
+
+
+class SeasonalField(msrest.serialization.Model):
+ """Schema of seasonal field resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar farmer_id: Farmer Id.
+ :vartype farmer_id: str
+ :ivar primary_boundary_id: Primary boundary id.
+ :vartype primary_boundary_id: str
+ :ivar boundary_ids: Boundary Ids.
+ :vartype boundary_ids: list[str]
+ :param farm_id: Id of the associated Farm.
+ :type farm_id: str
+ :param field_id: Id of the associated Field.
+ :type field_id: str
+ :param season_id: Id of the season it belongs to.
+ :type season_id: str
+ :param crop_variety_ids: CropVariety ids.
+ :type crop_variety_ids: list[str]
+ :param crop_id: Id of the crop it belongs to.
+ :type crop_id: str
+ :param avg_yield_value: Average yield value of the seasonal field.
+ :type avg_yield_value: float
+ :param avg_yield_unit: Unit of the average yield value attribute.
+ :type avg_yield_unit: str
+ :param avg_seed_population_value: Average seed population value of the seasonal field.
+ :type avg_seed_population_value: float
+ :param avg_seed_population_unit: Unit of average seed population value attribute.
+ :type avg_seed_population_unit: str
+ :param planting_date_time: Planting datetime, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type planting_date_time: ~datetime.datetime
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'farmer_id': {'readonly': True},
+ 'primary_boundary_id': {'readonly': True},
+ 'boundary_ids': {'readonly': True, 'unique': True},
+ 'crop_variety_ids': {'unique': True},
+ 'avg_yield_unit': {'max_length': 32, 'min_length': 2},
+ 'avg_seed_population_unit': {'max_length': 32, 'min_length': 2},
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'primary_boundary_id': {'key': 'primaryBoundaryId', 'type': 'str'},
+ 'boundary_ids': {'key': 'boundaryIds', 'type': '[str]'},
+ 'farm_id': {'key': 'farmId', 'type': 'str'},
+ 'field_id': {'key': 'fieldId', 'type': 'str'},
+ 'season_id': {'key': 'seasonId', 'type': 'str'},
+ 'crop_variety_ids': {'key': 'cropVarietyIds', 'type': '[str]'},
+ 'crop_id': {'key': 'cropId', 'type': 'str'},
+ 'avg_yield_value': {'key': 'avgYieldValue', 'type': 'float'},
+ 'avg_yield_unit': {'key': 'avgYieldUnit', 'type': 'str'},
+ 'avg_seed_population_value': {'key': 'avgSeedPopulationValue', 'type': 'float'},
+ 'avg_seed_population_unit': {'key': 'avgSeedPopulationUnit', 'type': 'str'},
+ 'planting_date_time': {'key': 'plantingDateTime', 'type': 'iso-8601'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ farm_id: Optional[str] = None,
+ field_id: Optional[str] = None,
+ season_id: Optional[str] = None,
+ crop_variety_ids: Optional[List[str]] = None,
+ crop_id: Optional[str] = None,
+ avg_yield_value: Optional[float] = None,
+ avg_yield_unit: Optional[str] = None,
+ avg_seed_population_value: Optional[float] = None,
+ avg_seed_population_unit: Optional[str] = None,
+ planting_date_time: Optional[datetime.datetime] = None,
+ status: Optional[str] = None,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(SeasonalField, self).__init__(**kwargs)
+ self.farmer_id = None
+ self.primary_boundary_id = None
+ self.boundary_ids = None
+ self.farm_id = farm_id
+ self.field_id = field_id
+ self.season_id = season_id
+ self.crop_variety_ids = crop_variety_ids
+ self.crop_id = crop_id
+ self.avg_yield_value = avg_yield_value
+ self.avg_yield_unit = avg_yield_unit
+ self.avg_seed_population_value = avg_seed_population_value
+ self.avg_seed_population_unit = avg_seed_population_unit
+ self.planting_date_time = planting_date_time
+ self.id = None
+ self.e_tag = None
+ self.status = status
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = name
+ self.description = description
+ self.properties = properties
+
+
+class SeasonalFieldListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.SeasonalField]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[SeasonalField]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["SeasonalField"]] = None,
+ skip_token: Optional[str] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(SeasonalFieldListResponse, self).__init__(**kwargs)
+ self.value = value
+ self.skip_token = skip_token
+ self.next_link = next_link
+
+
+class SeasonListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.Season]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Season]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["Season"]] = None,
+ skip_token: Optional[str] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(SeasonListResponse, self).__init__(**kwargs)
+ self.value = value
+ self.skip_token = skip_token
+ self.next_link = next_link
+
+
+class TillageData(msrest.serialization.Model):
+ """Schema of tillage data resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param tillage_depth: Schema for storing measurement reading and unit.
+ :type tillage_depth: ~azure.farmbeats.models.Measure
+ :param tillage_pressure: Schema for storing measurement reading and unit.
+ :type tillage_pressure: ~azure.farmbeats.models.Measure
+ :param area: Schema for storing measurement reading and unit.
+ :type area: ~azure.farmbeats.models.Measure
+ :param source: Source of the operation data.
+ :type source: str
+ :param operation_modified_date_time: Modified date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ Note: this will be specified by the source provider itself.
+ :type operation_modified_date_time: ~datetime.datetime
+ :param operation_start_date_time: Start date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type operation_start_date_time: ~datetime.datetime
+ :param operation_end_date_time: End date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type operation_end_date_time: ~datetime.datetime
+ :ivar attachments_link: Link for attachments.
+ :vartype attachments_link: str
+ :param associated_boundary_id: Optional boundary ID of the field for which operation was
+ applied.
+ :type associated_boundary_id: str
+ :param operation_boundary_id: Optional boundary ID of the actual area for which operation was
+ applied inside the specified field.
+ :type operation_boundary_id: str
+ :ivar farmer_id: Farmer ID which belongs to the operation data.
+ :vartype farmer_id: str
+ :ivar id: Unique resource ID.
+ :vartype id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :param status: Status of the resource.
+ :type status: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'source': {'max_length': 100, 'min_length': 2},
+ 'attachments_link': {'readonly': True},
+ 'farmer_id': {'readonly': True},
+ 'id': {'readonly': True},
+ 'e_tag': {'readonly': True},
+ 'status': {'max_length': 100, 'min_length': 0},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'tillage_depth': {'key': 'tillageDepth', 'type': 'Measure'},
+ 'tillage_pressure': {'key': 'tillagePressure', 'type': 'Measure'},
+ 'area': {'key': 'area', 'type': 'Measure'},
+ 'source': {'key': 'source', 'type': 'str'},
+ 'operation_modified_date_time': {'key': 'operationModifiedDateTime', 'type': 'iso-8601'},
+ 'operation_start_date_time': {'key': 'operationStartDateTime', 'type': 'iso-8601'},
+ 'operation_end_date_time': {'key': 'operationEndDateTime', 'type': 'iso-8601'},
+ 'attachments_link': {'key': 'attachmentsLink', 'type': 'str'},
+ 'associated_boundary_id': {'key': 'associatedBoundaryId', 'type': 'str'},
+ 'operation_boundary_id': {'key': 'operationBoundaryId', 'type': 'str'},
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ tillage_depth: Optional["Measure"] = None,
+ tillage_pressure: Optional["Measure"] = None,
+ area: Optional["Measure"] = None,
+ source: Optional[str] = None,
+ operation_modified_date_time: Optional[datetime.datetime] = None,
+ operation_start_date_time: Optional[datetime.datetime] = None,
+ operation_end_date_time: Optional[datetime.datetime] = None,
+ associated_boundary_id: Optional[str] = None,
+ operation_boundary_id: Optional[str] = None,
+ status: Optional[str] = None,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(TillageData, self).__init__(**kwargs)
+ self.tillage_depth = tillage_depth
+ self.tillage_pressure = tillage_pressure
+ self.area = area
+ self.source = source
+ self.operation_modified_date_time = operation_modified_date_time
+ self.operation_start_date_time = operation_start_date_time
+ self.operation_end_date_time = operation_end_date_time
+ self.attachments_link = None
+ self.associated_boundary_id = associated_boundary_id
+ self.operation_boundary_id = operation_boundary_id
+ self.farmer_id = None
+ self.id = None
+ self.e_tag = None
+ self.status = status
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.name = name
+ self.description = description
+ self.properties = properties
+
+
+class TillageDataListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.TillageData]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[TillageData]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["TillageData"]] = None,
+ skip_token: Optional[str] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(TillageDataListResponse, self).__init__(**kwargs)
+ self.value = value
+ self.skip_token = skip_token
+ self.next_link = next_link
+
+
+class WeatherData(msrest.serialization.Model):
+ """Schema of weather data.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param farmer_id: Required. Farmer ID.
+ :type farmer_id: str
+ :param boundary_id: Required. Boundary ID.
+ :type boundary_id: str
+ :param extension_id: Required. ID of the weather extension.
+ :type extension_id: str
+ :param location: Required. Location model class.
+ :type location: ~azure.farmbeats.models.Location
+ :param date_time: Required. Date-time of the weather data, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type date_time: ~datetime.datetime
+ :param unit_system_code: Unit System like US/SI etc.
+ :type unit_system_code: str
+ :param extension_version: Required. Version of the weather data extension.
+ :type extension_version: str
+ :param weather_data_type: Required. Type of weather data (forecast/historical).
+ :type weather_data_type: str
+ :param granularity: Required. Granularity of weather data (daily/hourly).
+ :type granularity: str
+ :param cloud_cover: Schema for storing measurement reading and unit.
+ :type cloud_cover: ~azure.farmbeats.models.Measure
+ :param dew_point: Schema for storing measurement reading and unit.
+ :type dew_point: ~azure.farmbeats.models.Measure
+ :param growing_degree_day: Schema for storing measurement reading and unit.
+ :type growing_degree_day: ~azure.farmbeats.models.Measure
+ :param precipitation: Schema for storing measurement reading and unit.
+ :type precipitation: ~azure.farmbeats.models.Measure
+ :param pressure: Schema for storing measurement reading and unit.
+ :type pressure: ~azure.farmbeats.models.Measure
+ :param relative_humidity: Schema for storing measurement reading and unit.
+ :type relative_humidity: ~azure.farmbeats.models.Measure
+ :param soil_moisture: Schema for storing measurement reading and unit.
+ :type soil_moisture: ~azure.farmbeats.models.Measure
+ :param soil_temperature: Schema for storing measurement reading and unit.
+ :type soil_temperature: ~azure.farmbeats.models.Measure
+ :param temperature: Schema for storing measurement reading and unit.
+ :type temperature: ~azure.farmbeats.models.Measure
+ :param visibility: Schema for storing measurement reading and unit.
+ :type visibility: ~azure.farmbeats.models.Measure
+ :param wet_bulb_temperature: Schema for storing measurement reading and unit.
+ :type wet_bulb_temperature: ~azure.farmbeats.models.Measure
+ :param wind_chill: Schema for storing measurement reading and unit.
+ :type wind_chill: ~azure.farmbeats.models.Measure
+ :param wind_direction: Schema for storing measurement reading and unit.
+ :type wind_direction: ~azure.farmbeats.models.Measure
+ :param wind_gust: Schema for storing measurement reading and unit.
+ :type wind_gust: ~azure.farmbeats.models.Measure
+ :param wind_speed: Schema for storing measurement reading and unit.
+ :type wind_speed: ~azure.farmbeats.models.Measure
+ :param id: Weather data ID.
+ :type id: str
+ :ivar e_tag: The ETag value to implement optimistic concurrency.
+ :vartype e_tag: str
+ :ivar created_date_time: Date-time when resource was created, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar modified_date_time: Date-time when resource was last modified, sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype modified_date_time: ~datetime.datetime
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'farmer_id': {'required': True},
+ 'boundary_id': {'required': True},
+ 'extension_id': {'required': True},
+ 'location': {'required': True},
+ 'date_time': {'required': True},
+ 'extension_version': {'required': True},
+ 'weather_data_type': {'required': True},
+ 'granularity': {'required': True},
+ 'e_tag': {'readonly': True},
+ 'created_date_time': {'readonly': True},
+ 'modified_date_time': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'boundary_id': {'key': 'boundaryId', 'type': 'str'},
+ 'extension_id': {'key': 'extensionId', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'Location'},
+ 'date_time': {'key': 'dateTime', 'type': 'iso-8601'},
+ 'unit_system_code': {'key': 'unitSystemCode', 'type': 'str'},
+ 'extension_version': {'key': 'extensionVersion', 'type': 'str'},
+ 'weather_data_type': {'key': 'weatherDataType', 'type': 'str'},
+ 'granularity': {'key': 'granularity', 'type': 'str'},
+ 'cloud_cover': {'key': 'cloudCover', 'type': 'Measure'},
+ 'dew_point': {'key': 'dewPoint', 'type': 'Measure'},
+ 'growing_degree_day': {'key': 'growingDegreeDay', 'type': 'Measure'},
+ 'precipitation': {'key': 'precipitation', 'type': 'Measure'},
+ 'pressure': {'key': 'pressure', 'type': 'Measure'},
+ 'relative_humidity': {'key': 'relativeHumidity', 'type': 'Measure'},
+ 'soil_moisture': {'key': 'soilMoisture', 'type': 'Measure'},
+ 'soil_temperature': {'key': 'soilTemperature', 'type': 'Measure'},
+ 'temperature': {'key': 'temperature', 'type': 'Measure'},
+ 'visibility': {'key': 'visibility', 'type': 'Measure'},
+ 'wet_bulb_temperature': {'key': 'wetBulbTemperature', 'type': 'Measure'},
+ 'wind_chill': {'key': 'windChill', 'type': 'Measure'},
+ 'wind_direction': {'key': 'windDirection', 'type': 'Measure'},
+ 'wind_gust': {'key': 'windGust', 'type': 'Measure'},
+ 'wind_speed': {'key': 'windSpeed', 'type': 'Measure'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'e_tag': {'key': 'eTag', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'modified_date_time': {'key': 'modifiedDateTime', 'type': 'iso-8601'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ farmer_id: str,
+ boundary_id: str,
+ extension_id: str,
+ location: "Location",
+ date_time: datetime.datetime,
+ extension_version: str,
+ weather_data_type: str,
+ granularity: str,
+ unit_system_code: Optional[str] = None,
+ cloud_cover: Optional["Measure"] = None,
+ dew_point: Optional["Measure"] = None,
+ growing_degree_day: Optional["Measure"] = None,
+ precipitation: Optional["Measure"] = None,
+ pressure: Optional["Measure"] = None,
+ relative_humidity: Optional["Measure"] = None,
+ soil_moisture: Optional["Measure"] = None,
+ soil_temperature: Optional["Measure"] = None,
+ temperature: Optional["Measure"] = None,
+ visibility: Optional["Measure"] = None,
+ wet_bulb_temperature: Optional["Measure"] = None,
+ wind_chill: Optional["Measure"] = None,
+ wind_direction: Optional["Measure"] = None,
+ wind_gust: Optional["Measure"] = None,
+ wind_speed: Optional["Measure"] = None,
+ id: Optional[str] = None,
+ properties: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(WeatherData, self).__init__(**kwargs)
+ self.farmer_id = farmer_id
+ self.boundary_id = boundary_id
+ self.extension_id = extension_id
+ self.location = location
+ self.date_time = date_time
+ self.unit_system_code = unit_system_code
+ self.extension_version = extension_version
+ self.weather_data_type = weather_data_type
+ self.granularity = granularity
+ self.cloud_cover = cloud_cover
+ self.dew_point = dew_point
+ self.growing_degree_day = growing_degree_day
+ self.precipitation = precipitation
+ self.pressure = pressure
+ self.relative_humidity = relative_humidity
+ self.soil_moisture = soil_moisture
+ self.soil_temperature = soil_temperature
+ self.temperature = temperature
+ self.visibility = visibility
+ self.wet_bulb_temperature = wet_bulb_temperature
+ self.wind_chill = wind_chill
+ self.wind_direction = wind_direction
+ self.wind_gust = wind_gust
+ self.wind_speed = wind_speed
+ self.id = id
+ self.e_tag = None
+ self.created_date_time = None
+ self.modified_date_time = None
+ self.properties = properties
+
+
+class WeatherDataDeleteJob(msrest.serialization.Model):
+ """Schema of weather data delete job.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param extension_id: Required. Id of the extension to be used for the providerInput. eg.
+ DTN.ClearAg.
+ :type extension_id: str
+ :param farmer_id: Required. The id of the farmer object for which weather data is being
+ fetched.
+ :type farmer_id: str
+ :param boundary_id: Required. The id of the boundary object for which weather data is being
+ fetched.
+ :type boundary_id: str
+ :param weather_data_type: Type of weather data. Possible values include: 'forecast' ,
+ 'historical'.
+ :type weather_data_type: str
+ :param granularity: Granularity of weather data. Possible values include: 'daily' , 'hourly'.
+ :type granularity: str
+ :param start_date_time: Weather data start UTC date-time (inclusive), sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type start_date_time: ~datetime.datetime
+ :param end_date_time: Weather data end UTC date-time (inclusive), sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type end_date_time: ~datetime.datetime
+ :ivar id: Unique job id.
+ :vartype id: str
+ :ivar status: Status of the job.
+ Possible values: 'Waiting', 'Running', 'Succeeded', 'Failed', 'Cancelled'.
+ :vartype status: str
+ :ivar duration_in_seconds: Duration of the job in seconds.
+ :vartype duration_in_seconds: float
+ :ivar message: Status message to capture more details of the job.
+ :vartype message: str
+ :ivar created_date_time: Job created at dateTime. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar last_action_date_time: Job was last acted upon at dateTime. Sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype last_action_date_time: ~datetime.datetime
+ :ivar start_time: Job start time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype start_time: ~datetime.datetime
+ :ivar end_time: Job end time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype end_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'extension_id': {'required': True, 'max_length': 100, 'min_length': 2, 'pattern': r'^[A-za-z]{3,50}[.][A-za-z]{3,100}$'},
+ 'farmer_id': {'required': True},
+ 'boundary_id': {'required': True},
+ 'id': {'readonly': True},
+ 'status': {'readonly': True},
+ 'duration_in_seconds': {'readonly': True},
+ 'message': {'readonly': True},
+ 'created_date_time': {'readonly': True},
+ 'last_action_date_time': {'readonly': True},
+ 'start_time': {'readonly': True},
+ 'end_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'extension_id': {'key': 'extensionId', 'type': 'str'},
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'boundary_id': {'key': 'boundaryId', 'type': 'str'},
+ 'weather_data_type': {'key': 'weatherDataType', 'type': 'str'},
+ 'granularity': {'key': 'granularity', 'type': 'str'},
+ 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'},
+ 'end_date_time': {'key': 'endDateTime', 'type': 'iso-8601'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'duration_in_seconds': {'key': 'durationInSeconds', 'type': 'float'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'last_action_date_time': {'key': 'lastActionDateTime', 'type': 'iso-8601'},
+ 'start_time': {'key': 'startTime', 'type': 'iso-8601'},
+ 'end_time': {'key': 'endTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ extension_id: str,
+ farmer_id: str,
+ boundary_id: str,
+ weather_data_type: Optional[str] = None,
+ granularity: Optional[str] = None,
+ start_date_time: Optional[datetime.datetime] = None,
+ end_date_time: Optional[datetime.datetime] = None,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(WeatherDataDeleteJob, self).__init__(**kwargs)
+ self.extension_id = extension_id
+ self.farmer_id = farmer_id
+ self.boundary_id = boundary_id
+ self.weather_data_type = weather_data_type
+ self.granularity = granularity
+ self.start_date_time = start_date_time
+ self.end_date_time = end_date_time
+ self.id = None
+ self.status = None
+ self.duration_in_seconds = None
+ self.message = None
+ self.created_date_time = None
+ self.last_action_date_time = None
+ self.start_time = None
+ self.end_time = None
+ self.name = name
+ self.description = description
+ self.properties = properties
+
+
+class WeatherDataIngestionJob(msrest.serialization.Model):
+ """Schema of weather ingestion job.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param boundary_id: Required. The id of the boundary object for which weather data is being
+ fetched.
+ :type boundary_id: str
+ :param farmer_id: Required. The id of the farmer object for which weather data is being
+ fetched.
+ :type farmer_id: str
+ :param extension_id: Required. Id of the extension to be used for the providerInput. eg.
+ DTN.ClearAg.
+ :type extension_id: str
+ :param extension_api_name: Required. Extension api name to which request is to be made.
+ :type extension_api_name: str
+ :param extension_api_input: Required. Extension api input dictionary which would be used to
+ feed request query/body/parameter information.
+ :type extension_api_input: dict[str, object]
+ :param extension_data_provider_app_id: App id of the weather data provider.
+ :type extension_data_provider_app_id: str
+ :param extension_data_provider_api_key: Api key of the weather data provider.
+ :type extension_data_provider_api_key: str
+ :ivar id: Unique job id.
+ :vartype id: str
+ :ivar status: Status of the job.
+ Possible values: 'Waiting', 'Running', 'Succeeded', 'Failed', 'Cancelled'.
+ :vartype status: str
+ :ivar duration_in_seconds: Duration of the job in seconds.
+ :vartype duration_in_seconds: float
+ :ivar message: Status message to capture more details of the job.
+ :vartype message: str
+ :ivar created_date_time: Job created at dateTime. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype created_date_time: ~datetime.datetime
+ :ivar last_action_date_time: Job was last acted upon at dateTime. Sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :vartype last_action_date_time: ~datetime.datetime
+ :ivar start_time: Job start time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype start_time: ~datetime.datetime
+ :ivar end_time: Job end time when available. Sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :vartype end_time: ~datetime.datetime
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of the resource.
+ :type description: str
+ :param properties: A collection of key value pairs that belongs to the resource.
+ Each pair must not have a key greater than 50 characters
+ and must not have a value greater than 150 characters.
+ Note: A maximum of 25 key value pairs can be provided for a resource and only string and
+ numeral values are supported.
+ :type properties: dict[str, object]
+ """
+
+ _validation = {
+ 'boundary_id': {'required': True},
+ 'farmer_id': {'required': True},
+ 'extension_id': {'required': True, 'max_length': 100, 'min_length': 2, 'pattern': r'^[A-za-z]{3,50}[.][A-za-z]{3,100}$'},
+ 'extension_api_name': {'required': True, 'max_length': 100, 'min_length': 2},
+ 'extension_api_input': {'required': True},
+ 'extension_data_provider_app_id': {'max_length': 200, 'min_length': 2},
+ 'extension_data_provider_api_key': {'max_length': 200, 'min_length': 2},
+ 'id': {'readonly': True},
+ 'status': {'readonly': True},
+ 'duration_in_seconds': {'readonly': True},
+ 'message': {'readonly': True},
+ 'created_date_time': {'readonly': True},
+ 'last_action_date_time': {'readonly': True},
+ 'start_time': {'readonly': True},
+ 'end_time': {'readonly': True},
+ 'name': {'max_length': 100, 'min_length': 0},
+ 'description': {'max_length': 500, 'min_length': 0},
+ }
+
+ _attribute_map = {
+ 'boundary_id': {'key': 'boundaryId', 'type': 'str'},
+ 'farmer_id': {'key': 'farmerId', 'type': 'str'},
+ 'extension_id': {'key': 'extensionId', 'type': 'str'},
+ 'extension_api_name': {'key': 'extensionApiName', 'type': 'str'},
+ 'extension_api_input': {'key': 'extensionApiInput', 'type': '{object}'},
+ 'extension_data_provider_app_id': {'key': 'extensionDataProviderAppId', 'type': 'str'},
+ 'extension_data_provider_api_key': {'key': 'extensionDataProviderApiKey', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ 'duration_in_seconds': {'key': 'durationInSeconds', 'type': 'float'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'},
+ 'last_action_date_time': {'key': 'lastActionDateTime', 'type': 'iso-8601'},
+ 'start_time': {'key': 'startTime', 'type': 'iso-8601'},
+ 'end_time': {'key': 'endTime', 'type': 'iso-8601'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'properties': {'key': 'properties', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ boundary_id: str,
+ farmer_id: str,
+ extension_id: str,
+ extension_api_name: str,
+ extension_api_input: Dict[str, object],
+ extension_data_provider_app_id: Optional[str] = None,
+ extension_data_provider_api_key: Optional[str] = None,
+ name: Optional[str] = None,
+ description: Optional[str] = None,
+ properties: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(WeatherDataIngestionJob, self).__init__(**kwargs)
+ self.boundary_id = boundary_id
+ self.farmer_id = farmer_id
+ self.extension_id = extension_id
+ self.extension_api_name = extension_api_name
+ self.extension_api_input = extension_api_input
+ self.extension_data_provider_app_id = extension_data_provider_app_id
+ self.extension_data_provider_api_key = extension_data_provider_api_key
+ self.id = None
+ self.status = None
+ self.duration_in_seconds = None
+ self.message = None
+ self.created_date_time = None
+ self.last_action_date_time = None
+ self.start_time = None
+ self.end_time = None
+ self.name = name
+ self.description = description
+ self.properties = properties
+
+
+class WeatherDataListResponse(msrest.serialization.Model):
+ """Paged response contains list of requested objects and a URL link to get the next set of results.
+
+ :param value: List of requested objects.
+ :type value: list[~azure.farmbeats.models.WeatherData]
+ :param skip_token: Token used in retrieving the next page. If null, there are no additional
+ pages.
+ :type skip_token: str
+ :param next_link: Continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[WeatherData]'},
+ 'skip_token': {'key': '$skipToken', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["WeatherData"]] = None,
+ skip_token: Optional[str] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(WeatherDataListResponse, self).__init__(**kwargs)
+ self.value = value
+ self.skip_token = skip_token
+ self.next_link = next_link
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/__init__.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/__init__.py
new file mode 100644
index 000000000000..ee0bef1b3236
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/__init__.py
@@ -0,0 +1,47 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._application_data_operations import ApplicationDataOperations
+from ._attachments_operations import AttachmentsOperations
+from ._boundaries_operations import BoundariesOperations
+from ._crops_operations import CropsOperations
+from ._crop_varieties_operations import CropVarietiesOperations
+from ._farmers_operations import FarmersOperations
+from ._farm_operations_operations import FarmOperationsOperations
+from ._farms_operations import FarmsOperations
+from ._fields_operations import FieldsOperations
+from ._harvest_data_operations import HarvestDataOperations
+from ._image_processing_operations import ImageProcessingOperations
+from ._oauth_providers_operations import OAuthProvidersOperations
+from ._oauth_tokens_operations import OAuthTokensOperations
+from ._planting_data_operations import PlantingDataOperations
+from ._scenes_operations import ScenesOperations
+from ._seasonal_fields_operations import SeasonalFieldsOperations
+from ._seasons_operations import SeasonsOperations
+from ._tillage_data_operations import TillageDataOperations
+from ._weather_operations import WeatherOperations
+
+__all__ = [
+ 'ApplicationDataOperations',
+ 'AttachmentsOperations',
+ 'BoundariesOperations',
+ 'CropsOperations',
+ 'CropVarietiesOperations',
+ 'FarmersOperations',
+ 'FarmOperationsOperations',
+ 'FarmsOperations',
+ 'FieldsOperations',
+ 'HarvestDataOperations',
+ 'ImageProcessingOperations',
+ 'OAuthProvidersOperations',
+ 'OAuthTokensOperations',
+ 'PlantingDataOperations',
+ 'ScenesOperations',
+ 'SeasonalFieldsOperations',
+ 'SeasonsOperations',
+ 'TillageDataOperations',
+ 'WeatherOperations',
+]
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_application_data_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_application_data_operations.py
new file mode 100644
index 000000000000..1ddf06b4d815
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_application_data_operations.py
@@ -0,0 +1,646 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+
+from .. import models as _models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class ApplicationDataOperations(object):
+ """ApplicationDataOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_farmer_id(
+ self,
+ farmer_id, # type: str
+ min_avg_material=None, # type: Optional[float]
+ max_avg_material=None, # type: Optional[float]
+ min_total_material=None, # type: Optional[float]
+ max_total_material=None, # type: Optional[float]
+ sources=None, # type: Optional[List[str]]
+ associated_boundary_ids=None, # type: Optional[List[str]]
+ operation_boundary_ids=None, # type: Optional[List[str]]
+ min_operation_start_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_start_date_time=None, # type: Optional[datetime.datetime]
+ min_operation_end_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_end_date_time=None, # type: Optional[datetime.datetime]
+ min_operation_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_modified_date_time=None, # type: Optional[datetime.datetime]
+ min_area=None, # type: Optional[float]
+ max_area=None, # type: Optional[float]
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.ApplicationDataListResponse"]
+ """Returns a paginated list of application data resources under a particular farm.
+
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :param min_avg_material: Minimum average amount of material applied during the application
+ (inclusive).
+ :type min_avg_material: float
+ :param max_avg_material: Maximum average amount of material applied during the application
+ (inclusive).
+ :type max_avg_material: float
+ :param min_total_material: Minimum total amount of material applied during the application
+ (inclusive).
+ :type min_total_material: float
+ :param max_total_material: Maximum total amount of material applied during the application
+ (inclusive).
+ :type max_total_material: float
+ :param sources: Sources of the operation data.
+ :type sources: list[str]
+ :param associated_boundary_ids: Boundary IDs associated with operation data.
+ :type associated_boundary_ids: list[str]
+ :param operation_boundary_ids: Operation boundary IDs associated with operation data.
+ :type operation_boundary_ids: list[str]
+ :param min_operation_start_date_time: Minimum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_start_date_time: ~datetime.datetime
+ :param max_operation_start_date_time: Maximum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_start_date_time: ~datetime.datetime
+ :param min_operation_end_date_time: Minimum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_end_date_time: ~datetime.datetime
+ :param max_operation_end_date_time: Maximum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_end_date_time: ~datetime.datetime
+ :param min_operation_modified_date_time: Minimum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_modified_date_time: ~datetime.datetime
+ :param max_operation_modified_date_time: Maximum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_modified_date_time: ~datetime.datetime
+ :param min_area: Minimum area for which operation was applied (inclusive).
+ :type min_area: float
+ :param max_area: Maximum area for which operation was applied (inclusive).
+ :type max_area: float
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ApplicationDataListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.ApplicationDataListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationDataListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_farmer_id.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if min_avg_material is not None:
+ query_parameters['minAvgMaterial'] = self._serialize.query("min_avg_material", min_avg_material, 'float')
+ if max_avg_material is not None:
+ query_parameters['maxAvgMaterial'] = self._serialize.query("max_avg_material", max_avg_material, 'float')
+ if min_total_material is not None:
+ query_parameters['minTotalMaterial'] = self._serialize.query("min_total_material", min_total_material, 'float')
+ if max_total_material is not None:
+ query_parameters['maxTotalMaterial'] = self._serialize.query("max_total_material", max_total_material, 'float')
+ if sources is not None:
+ query_parameters['sources'] = [self._serialize.query("sources", q, 'str') if q is not None else '' for q in sources]
+ if associated_boundary_ids is not None:
+ query_parameters['associatedBoundaryIds'] = [self._serialize.query("associated_boundary_ids", q, 'str') if q is not None else '' for q in associated_boundary_ids]
+ if operation_boundary_ids is not None:
+ query_parameters['operationBoundaryIds'] = [self._serialize.query("operation_boundary_ids", q, 'str') if q is not None else '' for q in operation_boundary_ids]
+ if min_operation_start_date_time is not None:
+ query_parameters['minOperationStartDateTime'] = self._serialize.query("min_operation_start_date_time", min_operation_start_date_time, 'iso-8601')
+ if max_operation_start_date_time is not None:
+ query_parameters['maxOperationStartDateTime'] = self._serialize.query("max_operation_start_date_time", max_operation_start_date_time, 'iso-8601')
+ if min_operation_end_date_time is not None:
+ query_parameters['minOperationEndDateTime'] = self._serialize.query("min_operation_end_date_time", min_operation_end_date_time, 'iso-8601')
+ if max_operation_end_date_time is not None:
+ query_parameters['maxOperationEndDateTime'] = self._serialize.query("max_operation_end_date_time", max_operation_end_date_time, 'iso-8601')
+ if min_operation_modified_date_time is not None:
+ query_parameters['minOperationModifiedDateTime'] = self._serialize.query("min_operation_modified_date_time", min_operation_modified_date_time, 'iso-8601')
+ if max_operation_modified_date_time is not None:
+ query_parameters['maxOperationModifiedDateTime'] = self._serialize.query("max_operation_modified_date_time", max_operation_modified_date_time, 'iso-8601')
+ if min_area is not None:
+ query_parameters['minArea'] = self._serialize.query("min_area", min_area, 'float')
+ if max_area is not None:
+ query_parameters['maxArea'] = self._serialize.query("max_area", max_area, 'float')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ApplicationDataListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_farmer_id.metadata = {'url': '/farmers/{farmerId}/application-data'} # type: ignore
+
+ def list(
+ self,
+ min_avg_material=None, # type: Optional[float]
+ max_avg_material=None, # type: Optional[float]
+ min_total_material=None, # type: Optional[float]
+ max_total_material=None, # type: Optional[float]
+ sources=None, # type: Optional[List[str]]
+ associated_boundary_ids=None, # type: Optional[List[str]]
+ operation_boundary_ids=None, # type: Optional[List[str]]
+ min_operation_start_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_start_date_time=None, # type: Optional[datetime.datetime]
+ min_operation_end_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_end_date_time=None, # type: Optional[datetime.datetime]
+ min_operation_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_modified_date_time=None, # type: Optional[datetime.datetime]
+ min_area=None, # type: Optional[float]
+ max_area=None, # type: Optional[float]
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.ApplicationDataListResponse"]
+ """Returns a paginated list of application data resources across all farmers.
+
+ :param min_avg_material: Minimum average amount of material applied during the application
+ (inclusive).
+ :type min_avg_material: float
+ :param max_avg_material: Maximum average amount of material applied during the application
+ (inclusive).
+ :type max_avg_material: float
+ :param min_total_material: Minimum total amount of material applied during the application
+ (inclusive).
+ :type min_total_material: float
+ :param max_total_material: Maximum total amount of material applied during the application
+ (inclusive).
+ :type max_total_material: float
+ :param sources: Sources of the operation data.
+ :type sources: list[str]
+ :param associated_boundary_ids: Boundary IDs associated with operation data.
+ :type associated_boundary_ids: list[str]
+ :param operation_boundary_ids: Operation boundary IDs associated with operation data.
+ :type operation_boundary_ids: list[str]
+ :param min_operation_start_date_time: Minimum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_start_date_time: ~datetime.datetime
+ :param max_operation_start_date_time: Maximum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_start_date_time: ~datetime.datetime
+ :param min_operation_end_date_time: Minimum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_end_date_time: ~datetime.datetime
+ :param max_operation_end_date_time: Maximum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_end_date_time: ~datetime.datetime
+ :param min_operation_modified_date_time: Minimum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_modified_date_time: ~datetime.datetime
+ :param max_operation_modified_date_time: Maximum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_modified_date_time: ~datetime.datetime
+ :param min_area: Minimum area for which operation was applied (inclusive).
+ :type min_area: float
+ :param max_area: Maximum area for which operation was applied (inclusive).
+ :type max_area: float
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ApplicationDataListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.ApplicationDataListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationDataListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if min_avg_material is not None:
+ query_parameters['minAvgMaterial'] = self._serialize.query("min_avg_material", min_avg_material, 'float')
+ if max_avg_material is not None:
+ query_parameters['maxAvgMaterial'] = self._serialize.query("max_avg_material", max_avg_material, 'float')
+ if min_total_material is not None:
+ query_parameters['minTotalMaterial'] = self._serialize.query("min_total_material", min_total_material, 'float')
+ if max_total_material is not None:
+ query_parameters['maxTotalMaterial'] = self._serialize.query("max_total_material", max_total_material, 'float')
+ if sources is not None:
+ query_parameters['sources'] = [self._serialize.query("sources", q, 'str') if q is not None else '' for q in sources]
+ if associated_boundary_ids is not None:
+ query_parameters['associatedBoundaryIds'] = [self._serialize.query("associated_boundary_ids", q, 'str') if q is not None else '' for q in associated_boundary_ids]
+ if operation_boundary_ids is not None:
+ query_parameters['operationBoundaryIds'] = [self._serialize.query("operation_boundary_ids", q, 'str') if q is not None else '' for q in operation_boundary_ids]
+ if min_operation_start_date_time is not None:
+ query_parameters['minOperationStartDateTime'] = self._serialize.query("min_operation_start_date_time", min_operation_start_date_time, 'iso-8601')
+ if max_operation_start_date_time is not None:
+ query_parameters['maxOperationStartDateTime'] = self._serialize.query("max_operation_start_date_time", max_operation_start_date_time, 'iso-8601')
+ if min_operation_end_date_time is not None:
+ query_parameters['minOperationEndDateTime'] = self._serialize.query("min_operation_end_date_time", min_operation_end_date_time, 'iso-8601')
+ if max_operation_end_date_time is not None:
+ query_parameters['maxOperationEndDateTime'] = self._serialize.query("max_operation_end_date_time", max_operation_end_date_time, 'iso-8601')
+ if min_operation_modified_date_time is not None:
+ query_parameters['minOperationModifiedDateTime'] = self._serialize.query("min_operation_modified_date_time", min_operation_modified_date_time, 'iso-8601')
+ if max_operation_modified_date_time is not None:
+ query_parameters['maxOperationModifiedDateTime'] = self._serialize.query("max_operation_modified_date_time", max_operation_modified_date_time, 'iso-8601')
+ if min_area is not None:
+ query_parameters['minArea'] = self._serialize.query("min_area", min_area, 'float')
+ if max_area is not None:
+ query_parameters['maxArea'] = self._serialize.query("max_area", max_area, 'float')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ApplicationDataListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/application-data'} # type: ignore
+
+ def get(
+ self,
+ farmer_id, # type: str
+ application_data_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.ApplicationData"
+ """Get a specified application data resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer resource.
+ :type farmer_id: str
+ :param application_data_id: ID of the application data resource.
+ :type application_data_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ApplicationData, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.ApplicationData
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationData"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'applicationDataId': self._serialize.url("application_data_id", application_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('ApplicationData', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/farmers/{farmerId}/application-data/{applicationDataId}'} # type: ignore
+
+ def create_or_update(
+ self,
+ farmer_id, # type: str
+ application_data_id, # type: str
+ application_data=None, # type: Optional["_models.ApplicationData"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.ApplicationData"
+ """Creates or updates an application data resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :param application_data_id: ID of the application data resource.
+ :type application_data_id: str
+ :param application_data: Application data resource payload to create or update.
+ :type application_data: ~azure.farmbeats.models.ApplicationData
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ApplicationData, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.ApplicationData
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationData"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'applicationDataId': self._serialize.url("application_data_id", application_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if application_data is not None:
+ body_content = self._serialize.body(application_data, 'ApplicationData')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('ApplicationData', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('ApplicationData', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/farmers/{farmerId}/application-data/{applicationDataId}'} # type: ignore
+
+ def delete(
+ self,
+ farmer_id, # type: str
+ application_data_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Deletes a specified application data resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer resource.
+ :type farmer_id: str
+ :param application_data_id: ID of the application data.
+ :type application_data_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'applicationDataId': self._serialize.url("application_data_id", application_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/farmers/{farmerId}/application-data/{applicationDataId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_attachments_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_attachments_operations.py
new file mode 100644
index 000000000000..87698882b768
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_attachments_operations.py
@@ -0,0 +1,473 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+
+from .. import models as _models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, IO, Iterable, List, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class AttachmentsOperations(object):
+ """AttachmentsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_farmer_id(
+ self,
+ farmer_id, # type: str
+ resource_ids=None, # type: Optional[List[str]]
+ resource_types=None, # type: Optional[List[str]]
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.AttachmentListResponse"]
+ """Returns a paginated list of attachment resources under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer.
+ :type farmer_id: str
+ :param resource_ids: Resource Ids of the resource.
+ :type resource_ids: list[str]
+ :param resource_types: Resource Types of the resource.
+ :type resource_types: list[str]
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either AttachmentListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.AttachmentListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.AttachmentListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_farmer_id.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if resource_ids is not None:
+ query_parameters['resourceIds'] = [self._serialize.query("resource_ids", q, 'str') if q is not None else '' for q in resource_ids]
+ if resource_types is not None:
+ query_parameters['resourceTypes'] = [self._serialize.query("resource_types", q, 'str') if q is not None else '' for q in resource_types]
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('AttachmentListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_farmer_id.metadata = {'url': '/farmers/{farmerId}/attachments'} # type: ignore
+
+ def get(
+ self,
+ farmer_id, # type: str
+ attachment_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.Attachment"
+ """Gets a specified attachment resource under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer.
+ :type farmer_id: str
+ :param attachment_id: Id of the attachment.
+ :type attachment_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Attachment, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Attachment
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Attachment"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'attachmentId': self._serialize.url("attachment_id", attachment_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('Attachment', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/farmers/{farmerId}/attachments/{attachmentId}'} # type: ignore
+
+ def create_or_update(
+ self,
+ farmer_id, # type: str
+ attachment_id, # type: str
+ file=None, # type: Optional[IO]
+ farmer_id1=None, # type: Optional[str]
+ resource_id=None, # type: Optional[str]
+ resource_type=None, # type: Optional[str]
+ original_file_name=None, # type: Optional[str]
+ id=None, # type: Optional[str]
+ status=None, # type: Optional[str]
+ created_date_time=None, # type: Optional[str]
+ modified_date_time=None, # type: Optional[str]
+ name=None, # type: Optional[str]
+ description=None, # type: Optional[str]
+ e_tag=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.Attachment"
+ """Creates or updates an attachment resource under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer resource.
+ :type farmer_id: str
+ :param attachment_id: Id of the attachment resource.
+ :type attachment_id: str
+ :param file: File to be uploaded.
+ :type file: IO
+ :param farmer_id1: Farmer id for this attachment.
+ :type farmer_id1: str
+ :param resource_id: Associated Resource id for this attachment.
+ :type resource_id: str
+ :param resource_type: Associated Resource type for this attachment
+ i.e. Farmer, Farm, Field, SeasonalField, Boundary, FarmOperationApplicationData, HarvestData,
+ TillageData, PlantingData.
+ :type resource_type: str
+ :param original_file_name: Original File Name for this attachment.
+ :type original_file_name: str
+ :param id: Unique id.
+ :type id: str
+ :param status: Status of the resource.
+ :type status: str
+ :param created_date_time: Date when resource was created.
+ :type created_date_time: str
+ :param modified_date_time: Date when resource was last modified.
+ :type modified_date_time: str
+ :param name: Name to identify resource.
+ :type name: str
+ :param description: Textual description of resource.
+ :type description: str
+ :param e_tag: The ETag value to implement optimistic concurrency.
+ :type e_tag: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Attachment, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Attachment
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Attachment"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "multipart/form-data")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'attachmentId': self._serialize.url("attachment_id", attachment_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ # Construct form data
+ _form_content = {
+ 'file': file,
+ 'FarmerId': farmer_id1,
+ 'ResourceId': resource_id,
+ 'ResourceType': resource_type,
+ 'OriginalFileName': original_file_name,
+ 'Id': id,
+ 'Status': status,
+ 'CreatedDateTime': created_date_time,
+ 'ModifiedDateTime': modified_date_time,
+ 'Name': name,
+ 'Description': description,
+ 'ETag': e_tag,
+ }
+ request = self._client.patch(url, query_parameters, header_parameters, form_content=_form_content)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('Attachment', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('Attachment', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/farmers/{farmerId}/attachments/{attachmentId}'} # type: ignore
+
+ def delete(
+ self,
+ farmer_id, # type: str
+ attachment_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Deletes a specified attachment resource under a particular farmer.
+
+ :param farmer_id: Id of the farmer.
+ :type farmer_id: str
+ :param attachment_id: Id of the attachment.
+ :type attachment_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'attachmentId': self._serialize.url("attachment_id", attachment_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/farmers/{farmerId}/attachments/{attachmentId}'} # type: ignore
+
+ def download(
+ self,
+ farmer_id, # type: str
+ attachment_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> IO
+ """Downloads and returns attachment as response for the given input filePath.
+
+ :param farmer_id: Id of the associated farmer.
+ :type farmer_id: str
+ :param attachment_id: Id of attachment to be downloaded.
+ :type attachment_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: IO, or the result of cls(response)
+ :rtype: IO
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[IO]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/octet-stream, application/json"
+
+ # Construct URL
+ url = self.download.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'attachmentId': self._serialize.url("attachment_id", attachment_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=True, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = response.stream_download(self._client._pipeline)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ download.metadata = {'url': '/farmers/{farmerId}/attachments/{attachmentId}/file'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_boundaries_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_boundaries_operations.py
new file mode 100644
index 000000000000..2c3276a90c51
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_boundaries_operations.py
@@ -0,0 +1,943 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.core.polling.base_polling import LROBasePolling
+
+from .. import models as _models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class BoundariesOperations(object):
+ """BoundariesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_farmer_id(
+ self,
+ farmer_id, # type: str
+ is_primary=None, # type: Optional[bool]
+ parent_type=None, # type: Optional[str]
+ parent_ids=None, # type: Optional[List[str]]
+ min_acreage=None, # type: Optional[float]
+ max_acreage=None, # type: Optional[float]
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.BoundaryListResponse"]
+ """Returns a paginated list of boundary resources under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer.
+ :type farmer_id: str
+ :param is_primary: Is the boundary primary.
+ :type is_primary: bool
+ :param parent_type: Type of the parent it belongs to.
+ :type parent_type: str
+ :param parent_ids: Parent Ids of the resource.
+ :type parent_ids: list[str]
+ :param min_acreage: Minimum acreage of the boundary (inclusive).
+ :type min_acreage: float
+ :param max_acreage: Maximum acreage of the boundary (inclusive).
+ :type max_acreage: float
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either BoundaryListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.BoundaryListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.BoundaryListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_farmer_id.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if is_primary is not None:
+ query_parameters['isPrimary'] = self._serialize.query("is_primary", is_primary, 'bool')
+ if parent_type is not None:
+ query_parameters['parentType'] = self._serialize.query("parent_type", parent_type, 'str')
+ if parent_ids is not None:
+ query_parameters['parentIds'] = [self._serialize.query("parent_ids", q, 'str') if q is not None else '' for q in parent_ids]
+ if min_acreage is not None:
+ query_parameters['minAcreage'] = self._serialize.query("min_acreage", min_acreage, 'float')
+ if max_acreage is not None:
+ query_parameters['maxAcreage'] = self._serialize.query("max_acreage", max_acreage, 'float')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('BoundaryListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_farmer_id.metadata = {'url': '/farmers/{farmerId}/boundaries'} # type: ignore
+
+ def search_by_farmer_id(
+ self,
+ farmer_id, # type: str
+ query=None, # type: Optional["_models.SearchBoundaryQuery"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.BoundaryListResponse"]
+ """Search for boundaries by fields and intersecting geometry.
+
+ :param farmer_id: Id of the farmer.
+ :type farmer_id: str
+ :param query: Query filters.
+ :type query: ~azure.farmbeats.models.SearchBoundaryQuery
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either BoundaryListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.BoundaryListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.BoundaryListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = "application/json"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.search_by_farmer_id.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if query is not None:
+ body_content = self._serialize.body(query, 'SearchBoundaryQuery')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if query is not None:
+ body_content = self._serialize.body(query, 'SearchBoundaryQuery')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('BoundaryListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ search_by_farmer_id.metadata = {'url': '/farmers/{farmerId}/boundaries'} # type: ignore
+
+ def list(
+ self,
+ is_primary=None, # type: Optional[bool]
+ parent_type=None, # type: Optional[str]
+ parent_ids=None, # type: Optional[List[str]]
+ min_acreage=None, # type: Optional[float]
+ max_acreage=None, # type: Optional[float]
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.BoundaryListResponse"]
+ """Returns a paginated list of boundary resources across all farmers.
+
+ :param is_primary: Is the boundary primary.
+ :type is_primary: bool
+ :param parent_type: Type of the parent it belongs to.
+ :type parent_type: str
+ :param parent_ids: Parent Ids of the resource.
+ :type parent_ids: list[str]
+ :param min_acreage: Minimum acreage of the boundary (inclusive).
+ :type min_acreage: float
+ :param max_acreage: Maximum acreage of the boundary (inclusive).
+ :type max_acreage: float
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either BoundaryListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.BoundaryListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.BoundaryListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if is_primary is not None:
+ query_parameters['isPrimary'] = self._serialize.query("is_primary", is_primary, 'bool')
+ if parent_type is not None:
+ query_parameters['parentType'] = self._serialize.query("parent_type", parent_type, 'str')
+ if parent_ids is not None:
+ query_parameters['parentIds'] = [self._serialize.query("parent_ids", q, 'str') if q is not None else '' for q in parent_ids]
+ if min_acreage is not None:
+ query_parameters['minAcreage'] = self._serialize.query("min_acreage", min_acreage, 'float')
+ if max_acreage is not None:
+ query_parameters['maxAcreage'] = self._serialize.query("max_acreage", max_acreage, 'float')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('BoundaryListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/boundaries'} # type: ignore
+
+ def search(
+ self,
+ query=None, # type: Optional["_models.SearchBoundaryQuery"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.BoundaryListResponse"]
+ """Search for boundaries across all farmers by fields and intersecting geometry.
+
+ :param query: Query filters.
+ :type query: ~azure.farmbeats.models.SearchBoundaryQuery
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either BoundaryListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.BoundaryListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.BoundaryListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = "application/json"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.search.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if query is not None:
+ body_content = self._serialize.body(query, 'SearchBoundaryQuery')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if query is not None:
+ body_content = self._serialize.body(query, 'SearchBoundaryQuery')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('BoundaryListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ search.metadata = {'url': '/boundaries'} # type: ignore
+
+ def get_cascade_delete_job_details(
+ self,
+ job_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.CascadeDeleteJob"
+ """Get cascade delete job for specified boundary.
+
+ :param job_id: Id of the job.
+ :type job_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CascadeDeleteJob, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.CascadeDeleteJob
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_cascade_delete_job_details.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_cascade_delete_job_details.metadata = {'url': '/boundaries/cascade-delete/{jobId}'} # type: ignore
+
+ def _create_cascade_delete_job_initial(
+ self,
+ job_id, # type: str
+ farmer_id, # type: str
+ boundary_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.CascadeDeleteJob"
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_cascade_delete_job_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['farmerId'] = self._serialize.query("farmer_id", farmer_id, 'str')
+ query_parameters['boundaryId'] = self._serialize.query("boundary_id", boundary_id, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.put(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_cascade_delete_job_initial.metadata = {'url': '/boundaries/cascade-delete/{jobId}'} # type: ignore
+
+ def begin_create_cascade_delete_job(
+ self,
+ job_id, # type: str
+ farmer_id, # type: str
+ boundary_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["_models.CascadeDeleteJob"]
+ """Create a cascade delete job for specified boundary.
+
+ :param job_id: Job ID supplied by end user.
+ :type job_id: str
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :param boundary_id: ID of the boundary to be deleted.
+ :type boundary_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: By default, your polling method will be LROBasePolling.
+ Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either CascadeDeleteJob or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.farmbeats.models.CascadeDeleteJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_cascade_delete_job_initial(
+ job_id=job_id,
+ farmer_id=farmer_id,
+ boundary_id=boundary_id,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+
+ if polling is True: polling_method = LROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_cascade_delete_job.metadata = {'url': '/boundaries/cascade-delete/{jobId}'} # type: ignore
+
+ def get(
+ self,
+ farmer_id, # type: str
+ boundary_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.Boundary"
+ """Gets a specified boundary resource under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer.
+ :type farmer_id: str
+ :param boundary_id: Id of the boundary.
+ :type boundary_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Boundary, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Boundary
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Boundary"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'boundaryId': self._serialize.url("boundary_id", boundary_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('Boundary', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/farmers/{farmerId}/boundaries/{boundaryId}'} # type: ignore
+
+ def create_or_update(
+ self,
+ farmer_id, # type: str
+ boundary_id, # type: str
+ boundary=None, # type: Optional["_models.Boundary"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.Boundary"
+ """Creates or updates a boundary resource.
+
+ :param farmer_id: Id of the farmer resource.
+ :type farmer_id: str
+ :param boundary_id: Id of the boundary resource.
+ :type boundary_id: str
+ :param boundary: Boundary resource payload to create or update.
+ :type boundary: ~azure.farmbeats.models.Boundary
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Boundary, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Boundary
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Boundary"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'boundaryId': self._serialize.url("boundary_id", boundary_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if boundary is not None:
+ body_content = self._serialize.body(boundary, 'Boundary')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('Boundary', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('Boundary', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/farmers/{farmerId}/boundaries/{boundaryId}'} # type: ignore
+
+ def delete(
+ self,
+ farmer_id, # type: str
+ boundary_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Deletes a specified boundary resource under a particular farmer.
+
+ :param farmer_id: Id of the farmer.
+ :type farmer_id: str
+ :param boundary_id: Id of the boundary.
+ :type boundary_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'boundaryId': self._serialize.url("boundary_id", boundary_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/farmers/{farmerId}/boundaries/{boundaryId}'} # type: ignore
+
+ def get_overlap(
+ self,
+ farmer_id, # type: str
+ boundary_id, # type: str
+ other_farmer_id, # type: str
+ other_boundary_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.BoundaryOverlapResponse"
+ """Returns overlapping acreage between two boundary Ids.
+
+ :param farmer_id: Id of the farmer.
+ :type farmer_id: str
+ :param boundary_id: Id of the boundary.
+ :type boundary_id: str
+ :param other_farmer_id: FarmerId of the other field.
+ :type other_farmer_id: str
+ :param other_boundary_id: Id of the other boundary.
+ :type other_boundary_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: BoundaryOverlapResponse, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.BoundaryOverlapResponse
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.BoundaryOverlapResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_overlap.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'boundaryId': self._serialize.url("boundary_id", boundary_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['otherFarmerId'] = self._serialize.query("other_farmer_id", other_farmer_id, 'str')
+ query_parameters['otherBoundaryId'] = self._serialize.query("other_boundary_id", other_boundary_id, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('BoundaryOverlapResponse', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_overlap.metadata = {'url': '/farmers/{farmerId}/boundaries/{boundaryId}/overlap'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_crop_varieties_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_crop_varieties_operations.py
new file mode 100644
index 000000000000..2c90a9e39e4f
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_crop_varieties_operations.py
@@ -0,0 +1,506 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+
+from .. import models as _models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class CropVarietiesOperations(object):
+ """CropVarietiesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_crop_id(
+ self,
+ crop_id, # type: str
+ crop_ids=None, # type: Optional[List[str]]
+ brands=None, # type: Optional[List[str]]
+ products=None, # type: Optional[List[str]]
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.CropVarietyListResponse"]
+ """Returns a paginated list of crop variety resources under a particular crop.
+
+ :param crop_id: Id of the associated crop.
+ :type crop_id: str
+ :param crop_ids: CropIds of the resource.
+ :type crop_ids: list[str]
+ :param brands: Brands of the resource.
+ :type brands: list[str]
+ :param products: Products of the resource.
+ :type products: list[str]
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either CropVarietyListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.CropVarietyListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CropVarietyListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_crop_id.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'cropId': self._serialize.url("crop_id", crop_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if crop_ids is not None:
+ query_parameters['cropIds'] = [self._serialize.query("crop_ids", q, 'str') if q is not None else '' for q in crop_ids]
+ if brands is not None:
+ query_parameters['brands'] = [self._serialize.query("brands", q, 'str') if q is not None else '' for q in brands]
+ if products is not None:
+ query_parameters['products'] = [self._serialize.query("products", q, 'str') if q is not None else '' for q in products]
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ path_format_arguments = {
+ 'cropId': self._serialize.url("crop_id", crop_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('CropVarietyListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_crop_id.metadata = {'url': '/crops/{cropId}/crop-varieties'} # type: ignore
+
+ def list(
+ self,
+ crop_ids=None, # type: Optional[List[str]]
+ brands=None, # type: Optional[List[str]]
+ products=None, # type: Optional[List[str]]
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.CropVarietyListResponse"]
+ """Returns a paginated list of crop variety resources across all crops.
+
+ :param crop_ids: CropIds of the resource.
+ :type crop_ids: list[str]
+ :param brands: Brands of the resource.
+ :type brands: list[str]
+ :param products: Products of the resource.
+ :type products: list[str]
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either CropVarietyListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.CropVarietyListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CropVarietyListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if crop_ids is not None:
+ query_parameters['cropIds'] = [self._serialize.query("crop_ids", q, 'str') if q is not None else '' for q in crop_ids]
+ if brands is not None:
+ query_parameters['brands'] = [self._serialize.query("brands", q, 'str') if q is not None else '' for q in brands]
+ if products is not None:
+ query_parameters['products'] = [self._serialize.query("products", q, 'str') if q is not None else '' for q in products]
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('CropVarietyListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/crop-varieties'} # type: ignore
+
+ def get(
+ self,
+ crop_id, # type: str
+ crop_variety_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.CropVariety"
+ """Gets a specified crop variety resource under a particular crop.
+
+ :param crop_id: Id of the associated crop.
+ :type crop_id: str
+ :param crop_variety_id: Id of the crop variety.
+ :type crop_variety_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CropVariety, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.CropVariety
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CropVariety"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'cropId': self._serialize.url("crop_id", crop_id, 'str'),
+ 'cropVarietyId': self._serialize.url("crop_variety_id", crop_variety_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CropVariety', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/crops/{cropId}/crop-varieties/{cropVarietyId}'} # type: ignore
+
+ def create_or_update(
+ self,
+ crop_id, # type: str
+ crop_variety_id, # type: str
+ crop_variety=None, # type: Optional["_models.CropVariety"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.CropVariety"
+ """Creates or updates a crop variety resource.
+
+ :param crop_id: Id of the crop resource.
+ :type crop_id: str
+ :param crop_variety_id: Id of the crop variety resource.
+ :type crop_variety_id: str
+ :param crop_variety: Crop variety resource payload to create or update.
+ :type crop_variety: ~azure.farmbeats.models.CropVariety
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CropVariety, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.CropVariety
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CropVariety"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'cropId': self._serialize.url("crop_id", crop_id, 'str'),
+ 'cropVarietyId': self._serialize.url("crop_variety_id", crop_variety_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if crop_variety is not None:
+ body_content = self._serialize.body(crop_variety, 'CropVariety')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('CropVariety', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('CropVariety', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/crops/{cropId}/crop-varieties/{cropVarietyId}'} # type: ignore
+
+ def delete(
+ self,
+ crop_id, # type: str
+ crop_variety_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Deletes a specified crop variety resource under a particular crop.
+
+ :param crop_id: Id of the crop.
+ :type crop_id: str
+ :param crop_variety_id: Id of the crop variety.
+ :type crop_variety_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'cropId': self._serialize.url("crop_id", crop_id, 'str'),
+ 'cropVarietyId': self._serialize.url("crop_variety_id", crop_variety_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/crops/{cropId}/crop-varieties/{cropVarietyId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_crops_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_crops_operations.py
new file mode 100644
index 000000000000..8745997f4a36
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_crops_operations.py
@@ -0,0 +1,342 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+
+from .. import models as _models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class CropsOperations(object):
+ """CropsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ phenotypes=None, # type: Optional[List[str]]
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.CropListResponse"]
+ """Returns a paginated list of crop resources.
+
+ :param phenotypes: Crop phenotypes of the resource.
+ :type phenotypes: list[str]
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either CropListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.CropListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CropListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if phenotypes is not None:
+ query_parameters['phenotypes'] = [self._serialize.query("phenotypes", q, 'str') if q is not None else '' for q in phenotypes]
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('CropListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/crops'} # type: ignore
+
+ def get(
+ self,
+ crop_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.Crop"
+ """Gets a specified crop resource.
+
+ :param crop_id: Id of the crop.
+ :type crop_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Crop, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Crop
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Crop"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'cropId': self._serialize.url("crop_id", crop_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('Crop', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/crops/{cropId}'} # type: ignore
+
+ def create_or_update(
+ self,
+ crop_id, # type: str
+ crop=None, # type: Optional["_models.Crop"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.Crop"
+ """Creates or updates a crop resource.
+
+ :param crop_id: Id of the crop resource.
+ :type crop_id: str
+ :param crop: Crop resource payload to create or update.
+ :type crop: ~azure.farmbeats.models.Crop
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Crop, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Crop
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Crop"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'cropId': self._serialize.url("crop_id", crop_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if crop is not None:
+ body_content = self._serialize.body(crop, 'Crop')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('Crop', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('Crop', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/crops/{cropId}'} # type: ignore
+
+ def delete(
+ self,
+ crop_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Deletes Crop for given crop id.
+
+ :param crop_id: Id of crop to be deleted.
+ :type crop_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'cropId': self._serialize.url("crop_id", crop_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/crops/{cropId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_farm_operations_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_farm_operations_operations.py
new file mode 100644
index 000000000000..3ba1af5511ef
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_farm_operations_operations.py
@@ -0,0 +1,220 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.core.polling.base_polling import LROBasePolling
+
+from .. import models as _models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class FarmOperationsOperations(object):
+ """FarmOperationsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def _create_data_ingestion_job_initial(
+ self,
+ job_id, # type: str
+ job=None, # type: Optional["_models.FarmOperationDataIngestionJob"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.FarmOperationDataIngestionJob"
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.FarmOperationDataIngestionJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_data_ingestion_job_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if job is not None:
+ body_content = self._serialize.body(job, 'FarmOperationDataIngestionJob')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('FarmOperationDataIngestionJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_data_ingestion_job_initial.metadata = {'url': '/farm-operations/ingest-data/{jobId}'} # type: ignore
+
+ def begin_create_data_ingestion_job(
+ self,
+ job_id, # type: str
+ job=None, # type: Optional["_models.FarmOperationDataIngestionJob"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["_models.FarmOperationDataIngestionJob"]
+ """Create a farm operation data ingestion job.
+
+ :param job_id: Job Id supplied by user.
+ :type job_id: str
+ :param job: Job parameters supplied by user.
+ :type job: ~azure.farmbeats.models.FarmOperationDataIngestionJob
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: By default, your polling method will be LROBasePolling.
+ Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either FarmOperationDataIngestionJob or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.farmbeats.models.FarmOperationDataIngestionJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.FarmOperationDataIngestionJob"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_data_ingestion_job_initial(
+ job_id=job_id,
+ job=job,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('FarmOperationDataIngestionJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+
+ if polling is True: polling_method = LROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_data_ingestion_job.metadata = {'url': '/farm-operations/ingest-data/{jobId}'} # type: ignore
+
+ def get_data_ingestion_job_details(
+ self,
+ job_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.FarmOperationDataIngestionJob"
+ """Get a farm operation data ingestion job.
+
+ :param job_id: Id of the job.
+ :type job_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: FarmOperationDataIngestionJob, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.FarmOperationDataIngestionJob
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.FarmOperationDataIngestionJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_data_ingestion_job_details.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('FarmOperationDataIngestionJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_data_ingestion_job_details.metadata = {'url': '/farm-operations/ingest-data/{jobId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_farmers_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_farmers_operations.py
new file mode 100644
index 000000000000..4f3d3f57cd83
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_farmers_operations.py
@@ -0,0 +1,508 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.core.polling.base_polling import LROBasePolling
+
+from .. import models as _models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class FarmersOperations(object):
+ """FarmersOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.FarmerListResponse"]
+ """Returns a paginated list of farmer resources.
+
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either FarmerListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.FarmerListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.FarmerListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('FarmerListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/farmers'} # type: ignore
+
+ def get(
+ self,
+ farmer_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.Farmer"
+ """Gets a specified farmer resource.
+
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Farmer, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Farmer
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Farmer"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('Farmer', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/farmers/{farmerId}'} # type: ignore
+
+ def create_or_update(
+ self,
+ farmer_id, # type: str
+ farmer=None, # type: Optional["_models.Farmer"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.Farmer"
+ """Creates or updates a farmer resource.
+
+ :param farmer_id: Id of the farmer resource.
+ :type farmer_id: str
+ :param farmer: Farmer resource payload to create or update.
+ :type farmer: ~azure.farmbeats.models.Farmer
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Farmer, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Farmer
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Farmer"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if farmer is not None:
+ body_content = self._serialize.body(farmer, 'Farmer')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('Farmer', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('Farmer', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/farmers/{farmerId}'} # type: ignore
+
+ def delete(
+ self,
+ farmer_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Deletes a specified farmer resource.
+
+ :param farmer_id: Id of farmer to be deleted.
+ :type farmer_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/farmers/{farmerId}'} # type: ignore
+
+ def get_cascade_delete_job_details(
+ self,
+ job_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.CascadeDeleteJob"
+ """Get a cascade delete job for specified farmer.
+
+ :param job_id: Id of the job.
+ :type job_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CascadeDeleteJob, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.CascadeDeleteJob
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_cascade_delete_job_details.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_cascade_delete_job_details.metadata = {'url': '/farmers/cascade-delete/{jobId}'} # type: ignore
+
+ def _create_cascade_delete_job_initial(
+ self,
+ job_id, # type: str
+ farmer_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.CascadeDeleteJob"
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_cascade_delete_job_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['farmerId'] = self._serialize.query("farmer_id", farmer_id, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.put(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_cascade_delete_job_initial.metadata = {'url': '/farmers/cascade-delete/{jobId}'} # type: ignore
+
+ def begin_create_cascade_delete_job(
+ self,
+ job_id, # type: str
+ farmer_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["_models.CascadeDeleteJob"]
+ """Create a cascade delete job for specified farmer.
+
+ :param job_id: Job ID supplied by end user.
+ :type job_id: str
+ :param farmer_id: ID of the farmer to be deleted.
+ :type farmer_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: By default, your polling method will be LROBasePolling.
+ Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either CascadeDeleteJob or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.farmbeats.models.CascadeDeleteJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_cascade_delete_job_initial(
+ job_id=job_id,
+ farmer_id=farmer_id,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+
+ if polling is True: polling_method = LROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_cascade_delete_job.metadata = {'url': '/farmers/cascade-delete/{jobId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_farms_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_farms_operations.py
new file mode 100644
index 000000000000..419bf9c68d3a
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_farms_operations.py
@@ -0,0 +1,653 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.core.polling.base_polling import LROBasePolling
+
+from .. import models as _models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class FarmsOperations(object):
+ """FarmsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_farmer_id(
+ self,
+ farmer_id, # type: str
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.FarmListResponse"]
+ """Returns a paginated list of farm resources under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer.
+ :type farmer_id: str
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either FarmListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.FarmListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.FarmListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_farmer_id.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('FarmListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_farmer_id.metadata = {'url': '/farmers/{farmerId}/farms'} # type: ignore
+
+ def list(
+ self,
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.FarmListResponse"]
+ """Returns a paginated list of farm resources across all farmers.
+
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either FarmListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.FarmListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.FarmListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('FarmListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/farms'} # type: ignore
+
+ def get(
+ self,
+ farmer_id, # type: str
+ farm_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.Farm"
+ """Gets a specified farm resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer resource.
+ :type farmer_id: str
+ :param farm_id: ID of the farm resource.
+ :type farm_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Farm, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Farm
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Farm"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'farmId': self._serialize.url("farm_id", farm_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('Farm', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/farmers/{farmerId}/farms/{farmId}'} # type: ignore
+
+ def create_or_update(
+ self,
+ farmer_id, # type: str
+ farm_id, # type: str
+ farm=None, # type: Optional["_models.Farm"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.Farm"
+ """Creates or updates a farm resource under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer resource.
+ :type farmer_id: str
+ :param farm_id: Id of the farm resource.
+ :type farm_id: str
+ :param farm: Farm resource payload to create or update.
+ :type farm: ~azure.farmbeats.models.Farm
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Farm, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Farm
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Farm"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'farmId': self._serialize.url("farm_id", farm_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if farm is not None:
+ body_content = self._serialize.body(farm, 'Farm')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('Farm', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('Farm', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/farmers/{farmerId}/farms/{farmId}'} # type: ignore
+
+ def delete(
+ self,
+ farmer_id, # type: str
+ farm_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Deletes a specified farm resource under a particular farmer.
+
+ :param farmer_id: Id of the farmer.
+ :type farmer_id: str
+ :param farm_id: Id of the farm.
+ :type farm_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'farmId': self._serialize.url("farm_id", farm_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/farmers/{farmerId}/farms/{farmId}'} # type: ignore
+
+ def get_cascade_delete_job_details(
+ self,
+ job_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.CascadeDeleteJob"
+ """Get a cascade delete job for specified farm.
+
+ :param job_id: Id of the job.
+ :type job_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CascadeDeleteJob, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.CascadeDeleteJob
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_cascade_delete_job_details.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_cascade_delete_job_details.metadata = {'url': '/farms/cascade-delete/{jobId}'} # type: ignore
+
+ def _create_cascade_delete_job_initial(
+ self,
+ job_id, # type: str
+ farmer_id, # type: str
+ farm_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.CascadeDeleteJob"
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_cascade_delete_job_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['farmerId'] = self._serialize.query("farmer_id", farmer_id, 'str')
+ query_parameters['farmId'] = self._serialize.query("farm_id", farm_id, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.put(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_cascade_delete_job_initial.metadata = {'url': '/farms/cascade-delete/{jobId}'} # type: ignore
+
+ def begin_create_cascade_delete_job(
+ self,
+ job_id, # type: str
+ farmer_id, # type: str
+ farm_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["_models.CascadeDeleteJob"]
+ """Create a cascade delete job for specified farm.
+
+ :param job_id: Job ID supplied by end user.
+ :type job_id: str
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :param farm_id: ID of the farm to be deleted.
+ :type farm_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: By default, your polling method will be LROBasePolling.
+ Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either CascadeDeleteJob or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.farmbeats.models.CascadeDeleteJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_cascade_delete_job_initial(
+ job_id=job_id,
+ farmer_id=farmer_id,
+ farm_id=farm_id,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+
+ if polling is True: polling_method = LROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_cascade_delete_job.metadata = {'url': '/farms/cascade-delete/{jobId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_fields_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_fields_operations.py
new file mode 100644
index 000000000000..cf0a66975d07
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_fields_operations.py
@@ -0,0 +1,663 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.core.polling.base_polling import LROBasePolling
+
+from .. import models as _models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class FieldsOperations(object):
+ """FieldsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_farmer_id(
+ self,
+ farmer_id, # type: str
+ farm_ids=None, # type: Optional[List[str]]
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.FieldListResponse"]
+ """Returns a paginated list of field resources under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer.
+ :type farmer_id: str
+ :param farm_ids: Farm Ids of the resource.
+ :type farm_ids: list[str]
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either FieldListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.FieldListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.FieldListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_farmer_id.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if farm_ids is not None:
+ query_parameters['farmIds'] = [self._serialize.query("farm_ids", q, 'str') if q is not None else '' for q in farm_ids]
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('FieldListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_farmer_id.metadata = {'url': '/farmers/{farmerId}/fields'} # type: ignore
+
+ def list(
+ self,
+ farm_ids=None, # type: Optional[List[str]]
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.FieldListResponse"]
+ """Returns a paginated list of field resources across all farmers.
+
+ :param farm_ids: Farm Ids of the resource.
+ :type farm_ids: list[str]
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either FieldListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.FieldListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.FieldListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if farm_ids is not None:
+ query_parameters['farmIds'] = [self._serialize.query("farm_ids", q, 'str') if q is not None else '' for q in farm_ids]
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('FieldListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/fields'} # type: ignore
+
+ def get(
+ self,
+ farmer_id, # type: str
+ field_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.Field"
+ """Gets a specified field resource under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer.
+ :type farmer_id: str
+ :param field_id: Id of the field.
+ :type field_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Field, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Field
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Field"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'fieldId': self._serialize.url("field_id", field_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('Field', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/farmers/{farmerId}/fields/{fieldId}'} # type: ignore
+
+ def create_or_update(
+ self,
+ farmer_id, # type: str
+ field_id, # type: str
+ field=None, # type: Optional["_models.Field"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.Field"
+ """Creates or Updates a field resource under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer resource.
+ :type farmer_id: str
+ :param field_id: Id of the field resource.
+ :type field_id: str
+ :param field: Field resource payload to create or update.
+ :type field: ~azure.farmbeats.models.Field
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Field, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Field
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Field"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'fieldId': self._serialize.url("field_id", field_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if field is not None:
+ body_content = self._serialize.body(field, 'Field')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('Field', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('Field', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/farmers/{farmerId}/fields/{fieldId}'} # type: ignore
+
+ def delete(
+ self,
+ farmer_id, # type: str
+ field_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Deletes a specified field resource under a particular farmer.
+
+ :param farmer_id: Id of the farmer.
+ :type farmer_id: str
+ :param field_id: Id of the field.
+ :type field_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'fieldId': self._serialize.url("field_id", field_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/farmers/{farmerId}/fields/{fieldId}'} # type: ignore
+
+ def get_cascade_delete_job_details(
+ self,
+ job_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.CascadeDeleteJob"
+ """Get a cascade delete job for specified field.
+
+ :param job_id: Id of the job.
+ :type job_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CascadeDeleteJob, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.CascadeDeleteJob
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_cascade_delete_job_details.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_cascade_delete_job_details.metadata = {'url': '/fields/cascade-delete/{jobId}'} # type: ignore
+
+ def _create_cascade_delete_job_initial(
+ self,
+ job_id, # type: str
+ farmer_id, # type: str
+ field_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.CascadeDeleteJob"
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_cascade_delete_job_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['farmerId'] = self._serialize.query("farmer_id", farmer_id, 'str')
+ query_parameters['fieldId'] = self._serialize.query("field_id", field_id, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.put(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_cascade_delete_job_initial.metadata = {'url': '/fields/cascade-delete/{jobId}'} # type: ignore
+
+ def begin_create_cascade_delete_job(
+ self,
+ job_id, # type: str
+ farmer_id, # type: str
+ field_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["_models.CascadeDeleteJob"]
+ """Create a cascade delete job for specified field.
+
+ :param job_id: Job ID supplied by end user.
+ :type job_id: str
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :param field_id: ID of the field to be deleted.
+ :type field_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: By default, your polling method will be LROBasePolling.
+ Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either CascadeDeleteJob or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.farmbeats.models.CascadeDeleteJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_cascade_delete_job_initial(
+ job_id=job_id,
+ farmer_id=farmer_id,
+ field_id=field_id,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+
+ if polling is True: polling_method = LROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_cascade_delete_job.metadata = {'url': '/fields/cascade-delete/{jobId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_harvest_data_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_harvest_data_operations.py
new file mode 100644
index 000000000000..6c3c146e3137
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_harvest_data_operations.py
@@ -0,0 +1,718 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+
+from .. import models as _models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class HarvestDataOperations(object):
+ """HarvestDataOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_farmer_id(
+ self,
+ farmer_id, # type: str
+ min_total_yield=None, # type: Optional[float]
+ max_total_yield=None, # type: Optional[float]
+ min_avg_yield=None, # type: Optional[float]
+ max_avg_yield=None, # type: Optional[float]
+ min_total_wet_mass=None, # type: Optional[float]
+ max_total_wet_mass=None, # type: Optional[float]
+ min_avg_wet_mass=None, # type: Optional[float]
+ max_avg_wet_mass=None, # type: Optional[float]
+ min_avg_moisture=None, # type: Optional[float]
+ max_avg_moisture=None, # type: Optional[float]
+ min_avg_speed=None, # type: Optional[float]
+ max_avg_speed=None, # type: Optional[float]
+ sources=None, # type: Optional[List[str]]
+ associated_boundary_ids=None, # type: Optional[List[str]]
+ operation_boundary_ids=None, # type: Optional[List[str]]
+ min_operation_start_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_start_date_time=None, # type: Optional[datetime.datetime]
+ min_operation_end_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_end_date_time=None, # type: Optional[datetime.datetime]
+ min_operation_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_modified_date_time=None, # type: Optional[datetime.datetime]
+ min_area=None, # type: Optional[float]
+ max_area=None, # type: Optional[float]
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.HarvestDataListResponse"]
+ """Returns a paginated list of harvest data resources under a particular farm.
+
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :param min_total_yield: Minimum Yield value(inclusive).
+ :type min_total_yield: float
+ :param max_total_yield: Maximum Yield value (inclusive).
+ :type max_total_yield: float
+ :param min_avg_yield: Minimum AvgYield value(inclusive).
+ :type min_avg_yield: float
+ :param max_avg_yield: Maximum AvgYield value (inclusive).
+ :type max_avg_yield: float
+ :param min_total_wet_mass: Minimum Total WetMass value(inclusive).
+ :type min_total_wet_mass: float
+ :param max_total_wet_mass: Maximum Total WetMass value (inclusive).
+ :type max_total_wet_mass: float
+ :param min_avg_wet_mass: Minimum AvgWetMass value(inclusive).
+ :type min_avg_wet_mass: float
+ :param max_avg_wet_mass: Maximum AvgWetMass value (inclusive).
+ :type max_avg_wet_mass: float
+ :param min_avg_moisture: Minimum AvgMoisture value(inclusive).
+ :type min_avg_moisture: float
+ :param max_avg_moisture: Maximum AvgMoisture value (inclusive).
+ :type max_avg_moisture: float
+ :param min_avg_speed: Minimum AvgSpeed value(inclusive).
+ :type min_avg_speed: float
+ :param max_avg_speed: Maximum AvgSpeed value (inclusive).
+ :type max_avg_speed: float
+ :param sources: Sources of the operation data.
+ :type sources: list[str]
+ :param associated_boundary_ids: Boundary IDs associated with operation data.
+ :type associated_boundary_ids: list[str]
+ :param operation_boundary_ids: Operation boundary IDs associated with operation data.
+ :type operation_boundary_ids: list[str]
+ :param min_operation_start_date_time: Minimum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_start_date_time: ~datetime.datetime
+ :param max_operation_start_date_time: Maximum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_start_date_time: ~datetime.datetime
+ :param min_operation_end_date_time: Minimum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_end_date_time: ~datetime.datetime
+ :param max_operation_end_date_time: Maximum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_end_date_time: ~datetime.datetime
+ :param min_operation_modified_date_time: Minimum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_modified_date_time: ~datetime.datetime
+ :param max_operation_modified_date_time: Maximum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_modified_date_time: ~datetime.datetime
+ :param min_area: Minimum area for which operation was applied (inclusive).
+ :type min_area: float
+ :param max_area: Maximum area for which operation was applied (inclusive).
+ :type max_area: float
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either HarvestDataListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.HarvestDataListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.HarvestDataListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_farmer_id.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if min_total_yield is not None:
+ query_parameters['minTotalYield'] = self._serialize.query("min_total_yield", min_total_yield, 'float')
+ if max_total_yield is not None:
+ query_parameters['maxTotalYield'] = self._serialize.query("max_total_yield", max_total_yield, 'float')
+ if min_avg_yield is not None:
+ query_parameters['minAvgYield'] = self._serialize.query("min_avg_yield", min_avg_yield, 'float')
+ if max_avg_yield is not None:
+ query_parameters['maxAvgYield'] = self._serialize.query("max_avg_yield", max_avg_yield, 'float')
+ if min_total_wet_mass is not None:
+ query_parameters['minTotalWetMass'] = self._serialize.query("min_total_wet_mass", min_total_wet_mass, 'float')
+ if max_total_wet_mass is not None:
+ query_parameters['maxTotalWetMass'] = self._serialize.query("max_total_wet_mass", max_total_wet_mass, 'float')
+ if min_avg_wet_mass is not None:
+ query_parameters['minAvgWetMass'] = self._serialize.query("min_avg_wet_mass", min_avg_wet_mass, 'float')
+ if max_avg_wet_mass is not None:
+ query_parameters['maxAvgWetMass'] = self._serialize.query("max_avg_wet_mass", max_avg_wet_mass, 'float')
+ if min_avg_moisture is not None:
+ query_parameters['minAvgMoisture'] = self._serialize.query("min_avg_moisture", min_avg_moisture, 'float')
+ if max_avg_moisture is not None:
+ query_parameters['maxAvgMoisture'] = self._serialize.query("max_avg_moisture", max_avg_moisture, 'float')
+ if min_avg_speed is not None:
+ query_parameters['minAvgSpeed'] = self._serialize.query("min_avg_speed", min_avg_speed, 'float')
+ if max_avg_speed is not None:
+ query_parameters['maxAvgSpeed'] = self._serialize.query("max_avg_speed", max_avg_speed, 'float')
+ if sources is not None:
+ query_parameters['sources'] = [self._serialize.query("sources", q, 'str') if q is not None else '' for q in sources]
+ if associated_boundary_ids is not None:
+ query_parameters['associatedBoundaryIds'] = [self._serialize.query("associated_boundary_ids", q, 'str') if q is not None else '' for q in associated_boundary_ids]
+ if operation_boundary_ids is not None:
+ query_parameters['operationBoundaryIds'] = [self._serialize.query("operation_boundary_ids", q, 'str') if q is not None else '' for q in operation_boundary_ids]
+ if min_operation_start_date_time is not None:
+ query_parameters['minOperationStartDateTime'] = self._serialize.query("min_operation_start_date_time", min_operation_start_date_time, 'iso-8601')
+ if max_operation_start_date_time is not None:
+ query_parameters['maxOperationStartDateTime'] = self._serialize.query("max_operation_start_date_time", max_operation_start_date_time, 'iso-8601')
+ if min_operation_end_date_time is not None:
+ query_parameters['minOperationEndDateTime'] = self._serialize.query("min_operation_end_date_time", min_operation_end_date_time, 'iso-8601')
+ if max_operation_end_date_time is not None:
+ query_parameters['maxOperationEndDateTime'] = self._serialize.query("max_operation_end_date_time", max_operation_end_date_time, 'iso-8601')
+ if min_operation_modified_date_time is not None:
+ query_parameters['minOperationModifiedDateTime'] = self._serialize.query("min_operation_modified_date_time", min_operation_modified_date_time, 'iso-8601')
+ if max_operation_modified_date_time is not None:
+ query_parameters['maxOperationModifiedDateTime'] = self._serialize.query("max_operation_modified_date_time", max_operation_modified_date_time, 'iso-8601')
+ if min_area is not None:
+ query_parameters['minArea'] = self._serialize.query("min_area", min_area, 'float')
+ if max_area is not None:
+ query_parameters['maxArea'] = self._serialize.query("max_area", max_area, 'float')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('HarvestDataListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_farmer_id.metadata = {'url': '/farmers/{farmerId}/harvest-data'} # type: ignore
+
+ def list(
+ self,
+ min_total_yield=None, # type: Optional[float]
+ max_total_yield=None, # type: Optional[float]
+ min_avg_yield=None, # type: Optional[float]
+ max_avg_yield=None, # type: Optional[float]
+ min_total_wet_mass=None, # type: Optional[float]
+ max_total_wet_mass=None, # type: Optional[float]
+ min_avg_wet_mass=None, # type: Optional[float]
+ max_avg_wet_mass=None, # type: Optional[float]
+ min_avg_moisture=None, # type: Optional[float]
+ max_avg_moisture=None, # type: Optional[float]
+ min_avg_speed=None, # type: Optional[float]
+ max_avg_speed=None, # type: Optional[float]
+ sources=None, # type: Optional[List[str]]
+ associated_boundary_ids=None, # type: Optional[List[str]]
+ operation_boundary_ids=None, # type: Optional[List[str]]
+ min_operation_start_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_start_date_time=None, # type: Optional[datetime.datetime]
+ min_operation_end_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_end_date_time=None, # type: Optional[datetime.datetime]
+ min_operation_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_modified_date_time=None, # type: Optional[datetime.datetime]
+ min_area=None, # type: Optional[float]
+ max_area=None, # type: Optional[float]
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.HarvestDataListResponse"]
+ """Returns a paginated list of harvest data resources across all farmers.
+
+ :param min_total_yield: Minimum Yield value(inclusive).
+ :type min_total_yield: float
+ :param max_total_yield: Maximum Yield value (inclusive).
+ :type max_total_yield: float
+ :param min_avg_yield: Minimum AvgYield value(inclusive).
+ :type min_avg_yield: float
+ :param max_avg_yield: Maximum AvgYield value (inclusive).
+ :type max_avg_yield: float
+ :param min_total_wet_mass: Minimum Total WetMass value(inclusive).
+ :type min_total_wet_mass: float
+ :param max_total_wet_mass: Maximum Total WetMass value (inclusive).
+ :type max_total_wet_mass: float
+ :param min_avg_wet_mass: Minimum AvgWetMass value(inclusive).
+ :type min_avg_wet_mass: float
+ :param max_avg_wet_mass: Maximum AvgWetMass value (inclusive).
+ :type max_avg_wet_mass: float
+ :param min_avg_moisture: Minimum AvgMoisture value(inclusive).
+ :type min_avg_moisture: float
+ :param max_avg_moisture: Maximum AvgMoisture value (inclusive).
+ :type max_avg_moisture: float
+ :param min_avg_speed: Minimum AvgSpeed value(inclusive).
+ :type min_avg_speed: float
+ :param max_avg_speed: Maximum AvgSpeed value (inclusive).
+ :type max_avg_speed: float
+ :param sources: Sources of the operation data.
+ :type sources: list[str]
+ :param associated_boundary_ids: Boundary IDs associated with operation data.
+ :type associated_boundary_ids: list[str]
+ :param operation_boundary_ids: Operation boundary IDs associated with operation data.
+ :type operation_boundary_ids: list[str]
+ :param min_operation_start_date_time: Minimum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_start_date_time: ~datetime.datetime
+ :param max_operation_start_date_time: Maximum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_start_date_time: ~datetime.datetime
+ :param min_operation_end_date_time: Minimum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_end_date_time: ~datetime.datetime
+ :param max_operation_end_date_time: Maximum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_end_date_time: ~datetime.datetime
+ :param min_operation_modified_date_time: Minimum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_modified_date_time: ~datetime.datetime
+ :param max_operation_modified_date_time: Maximum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_modified_date_time: ~datetime.datetime
+ :param min_area: Minimum area for which operation was applied (inclusive).
+ :type min_area: float
+ :param max_area: Maximum area for which operation was applied (inclusive).
+ :type max_area: float
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either HarvestDataListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.HarvestDataListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.HarvestDataListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if min_total_yield is not None:
+ query_parameters['minTotalYield'] = self._serialize.query("min_total_yield", min_total_yield, 'float')
+ if max_total_yield is not None:
+ query_parameters['maxTotalYield'] = self._serialize.query("max_total_yield", max_total_yield, 'float')
+ if min_avg_yield is not None:
+ query_parameters['minAvgYield'] = self._serialize.query("min_avg_yield", min_avg_yield, 'float')
+ if max_avg_yield is not None:
+ query_parameters['maxAvgYield'] = self._serialize.query("max_avg_yield", max_avg_yield, 'float')
+ if min_total_wet_mass is not None:
+ query_parameters['minTotalWetMass'] = self._serialize.query("min_total_wet_mass", min_total_wet_mass, 'float')
+ if max_total_wet_mass is not None:
+ query_parameters['maxTotalWetMass'] = self._serialize.query("max_total_wet_mass", max_total_wet_mass, 'float')
+ if min_avg_wet_mass is not None:
+ query_parameters['minAvgWetMass'] = self._serialize.query("min_avg_wet_mass", min_avg_wet_mass, 'float')
+ if max_avg_wet_mass is not None:
+ query_parameters['maxAvgWetMass'] = self._serialize.query("max_avg_wet_mass", max_avg_wet_mass, 'float')
+ if min_avg_moisture is not None:
+ query_parameters['minAvgMoisture'] = self._serialize.query("min_avg_moisture", min_avg_moisture, 'float')
+ if max_avg_moisture is not None:
+ query_parameters['maxAvgMoisture'] = self._serialize.query("max_avg_moisture", max_avg_moisture, 'float')
+ if min_avg_speed is not None:
+ query_parameters['minAvgSpeed'] = self._serialize.query("min_avg_speed", min_avg_speed, 'float')
+ if max_avg_speed is not None:
+ query_parameters['maxAvgSpeed'] = self._serialize.query("max_avg_speed", max_avg_speed, 'float')
+ if sources is not None:
+ query_parameters['sources'] = [self._serialize.query("sources", q, 'str') if q is not None else '' for q in sources]
+ if associated_boundary_ids is not None:
+ query_parameters['associatedBoundaryIds'] = [self._serialize.query("associated_boundary_ids", q, 'str') if q is not None else '' for q in associated_boundary_ids]
+ if operation_boundary_ids is not None:
+ query_parameters['operationBoundaryIds'] = [self._serialize.query("operation_boundary_ids", q, 'str') if q is not None else '' for q in operation_boundary_ids]
+ if min_operation_start_date_time is not None:
+ query_parameters['minOperationStartDateTime'] = self._serialize.query("min_operation_start_date_time", min_operation_start_date_time, 'iso-8601')
+ if max_operation_start_date_time is not None:
+ query_parameters['maxOperationStartDateTime'] = self._serialize.query("max_operation_start_date_time", max_operation_start_date_time, 'iso-8601')
+ if min_operation_end_date_time is not None:
+ query_parameters['minOperationEndDateTime'] = self._serialize.query("min_operation_end_date_time", min_operation_end_date_time, 'iso-8601')
+ if max_operation_end_date_time is not None:
+ query_parameters['maxOperationEndDateTime'] = self._serialize.query("max_operation_end_date_time", max_operation_end_date_time, 'iso-8601')
+ if min_operation_modified_date_time is not None:
+ query_parameters['minOperationModifiedDateTime'] = self._serialize.query("min_operation_modified_date_time", min_operation_modified_date_time, 'iso-8601')
+ if max_operation_modified_date_time is not None:
+ query_parameters['maxOperationModifiedDateTime'] = self._serialize.query("max_operation_modified_date_time", max_operation_modified_date_time, 'iso-8601')
+ if min_area is not None:
+ query_parameters['minArea'] = self._serialize.query("min_area", min_area, 'float')
+ if max_area is not None:
+ query_parameters['maxArea'] = self._serialize.query("max_area", max_area, 'float')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('HarvestDataListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/harvest-data'} # type: ignore
+
+ def get(
+ self,
+ farmer_id, # type: str
+ harvest_data_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.HarvestData"
+ """Get a specified harvest data resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer resource.
+ :type farmer_id: str
+ :param harvest_data_id: ID of the harvest data resource.
+ :type harvest_data_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: HarvestData, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.HarvestData
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.HarvestData"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'harvestDataId': self._serialize.url("harvest_data_id", harvest_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('HarvestData', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/farmers/{farmerId}/harvest-data/{harvestDataId}'} # type: ignore
+
+ def create_or_update(
+ self,
+ farmer_id, # type: str
+ harvest_data_id, # type: str
+ harvest_data=None, # type: Optional["_models.HarvestData"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.HarvestData"
+ """Creates or updates harvest data resource under a particular farmer.
+
+ :param farmer_id: ID of the farmer.
+ :type farmer_id: str
+ :param harvest_data_id: ID of the harvest data resource.
+ :type harvest_data_id: str
+ :param harvest_data: Harvest data resource payload to create or update.
+ :type harvest_data: ~azure.farmbeats.models.HarvestData
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: HarvestData, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.HarvestData
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.HarvestData"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'harvestDataId': self._serialize.url("harvest_data_id", harvest_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if harvest_data is not None:
+ body_content = self._serialize.body(harvest_data, 'HarvestData')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('HarvestData', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('HarvestData', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/farmers/{farmerId}/harvest-data/{harvestDataId}'} # type: ignore
+
+ def delete(
+ self,
+ farmer_id, # type: str
+ harvest_data_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Deletes a specified harvest data resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer resource.
+ :type farmer_id: str
+ :param harvest_data_id: ID of the harvest data.
+ :type harvest_data_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'harvestDataId': self._serialize.url("harvest_data_id", harvest_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/farmers/{farmerId}/harvest-data/{harvestDataId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_image_processing_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_image_processing_operations.py
new file mode 100644
index 000000000000..aa559388f2e9
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_image_processing_operations.py
@@ -0,0 +1,218 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.core.polling.base_polling import LROBasePolling
+
+from .. import models as _models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class ImageProcessingOperations(object):
+ """ImageProcessingOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def _create_rasterize_job_initial(
+ self,
+ job_id, # type: str
+ job=None, # type: Optional["_models.ImageProcessingRasterizeJob"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.ImageProcessingRasterizeJob"
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.ImageProcessingRasterizeJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_rasterize_job_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if job is not None:
+ body_content = self._serialize.body(job, 'ImageProcessingRasterizeJob')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response)
+
+ deserialized = self._deserialize('ImageProcessingRasterizeJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_rasterize_job_initial.metadata = {'url': '/image-processing/rasterize/{jobId}'} # type: ignore
+
+ def begin_create_rasterize_job(
+ self,
+ job_id, # type: str
+ job=None, # type: Optional["_models.ImageProcessingRasterizeJob"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["_models.ImageProcessingRasterizeJob"]
+ """Create a ImageProcessing Rasterize job.
+
+ :param job_id: JobId provided by user.
+ :type job_id: str
+ :param job: Job parameters supplied by user.
+ :type job: ~azure.farmbeats.models.ImageProcessingRasterizeJob
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: By default, your polling method will be LROBasePolling.
+ Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either ImageProcessingRasterizeJob or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.farmbeats.models.ImageProcessingRasterizeJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.ImageProcessingRasterizeJob"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_rasterize_job_initial(
+ job_id=job_id,
+ job=job,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('ImageProcessingRasterizeJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+
+ if polling is True: polling_method = LROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_rasterize_job.metadata = {'url': '/image-processing/rasterize/{jobId}'} # type: ignore
+
+ def get_rasterize_job(
+ self,
+ job_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.ImageProcessingRasterizeJob"
+ """Get ImageProcessing Rasterize job's details.
+
+ :param job_id: Id of the job.
+ :type job_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ImageProcessingRasterizeJob, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.ImageProcessingRasterizeJob
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.ImageProcessingRasterizeJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_rasterize_job.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response)
+
+ deserialized = self._deserialize('ImageProcessingRasterizeJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_rasterize_job.metadata = {'url': '/image-processing/rasterize/{jobId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_oauth_providers_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_oauth_providers_operations.py
new file mode 100644
index 000000000000..36b003ba59a6
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_oauth_providers_operations.py
@@ -0,0 +1,337 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+
+from .. import models as _models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class OAuthProvidersOperations(object):
+ """OAuthProvidersOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.OAuthProviderListResponse"]
+ """Returns a paginated list of oauthProvider resources.
+
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either OAuthProviderListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.OAuthProviderListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.OAuthProviderListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('OAuthProviderListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/oauth/providers'} # type: ignore
+
+ def get(
+ self,
+ oauth_provider_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.OAuthProvider"
+ """Get a specified oauthProvider resource.
+
+ :param oauth_provider_id: ID of the oauthProvider resource.
+ :type oauth_provider_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: OAuthProvider, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.OAuthProvider
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.OAuthProvider"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'oauthProviderId': self._serialize.url("oauth_provider_id", oauth_provider_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('OAuthProvider', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/oauth/providers/{oauthProviderId}'} # type: ignore
+
+ def create_or_update(
+ self,
+ oauth_provider_id, # type: str
+ oauth_provider=None, # type: Optional["_models.OAuthProvider"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.OAuthProvider"
+ """Creates or updates an oauthProvider resource.
+
+ :param oauth_provider_id: ID of oauthProvider resource.
+ :type oauth_provider_id: str
+ :param oauth_provider: OauthProvider resource payload to create or update.
+ :type oauth_provider: ~azure.farmbeats.models.OAuthProvider
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: OAuthProvider, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.OAuthProvider
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.OAuthProvider"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'oauthProviderId': self._serialize.url("oauth_provider_id", oauth_provider_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if oauth_provider is not None:
+ body_content = self._serialize.body(oauth_provider, 'OAuthProvider')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('OAuthProvider', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('OAuthProvider', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/oauth/providers/{oauthProviderId}'} # type: ignore
+
+ def delete(
+ self,
+ oauth_provider_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Deletes an specified oauthProvider resource.
+
+ :param oauth_provider_id: ID of oauthProvider.
+ :type oauth_provider_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'oauthProviderId': self._serialize.url("oauth_provider_id", oauth_provider_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/oauth/providers/{oauthProviderId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_oauth_tokens_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_oauth_tokens_operations.py
new file mode 100644
index 000000000000..7f5e211329d7
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_oauth_tokens_operations.py
@@ -0,0 +1,390 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.core.polling.base_polling import LROBasePolling
+
+from .. import models as _models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class OAuthTokensOperations(object):
+ """OAuthTokensOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ auth_provider_ids=None, # type: Optional[List[str]]
+ farmer_ids=None, # type: Optional[List[str]]
+ is_valid=None, # type: Optional[bool]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.OAuthTokenListResponse"]
+ """Returns a list of OAuthToken documents.
+
+ :param auth_provider_ids: Name of AuthProvider.
+ :type auth_provider_ids: list[str]
+ :param farmer_ids: List of farmers.
+ :type farmer_ids: list[str]
+ :param is_valid: If the token object is valid.
+ :type is_valid: bool
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either OAuthTokenListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.OAuthTokenListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.OAuthTokenListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if auth_provider_ids is not None:
+ query_parameters['authProviderIds'] = [self._serialize.query("auth_provider_ids", q, 'str') if q is not None else '' for q in auth_provider_ids]
+ if farmer_ids is not None:
+ query_parameters['farmerIds'] = [self._serialize.query("farmer_ids", q, 'str') if q is not None else '' for q in farmer_ids]
+ if is_valid is not None:
+ query_parameters['isValid'] = self._serialize.query("is_valid", is_valid, 'bool')
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('OAuthTokenListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/oauth/tokens'} # type: ignore
+
+ def get_o_auth_connection_link(
+ self,
+ connect_request=None, # type: Optional["_models.OAuthConnectRequest"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> str
+ """Returns Connection link needed in the OAuth flow.
+
+ :param connect_request: OAuth Connect Request.
+ :type connect_request: ~azure.farmbeats.models.OAuthConnectRequest
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: str, or the result of cls(response)
+ :rtype: str
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[str]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_o_auth_connection_link.metadata['url'] # type: ignore
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if connect_request is not None:
+ body_content = self._serialize.body(connect_request, 'OAuthConnectRequest')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('str', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_o_auth_connection_link.metadata = {'url': '/oauth/tokens/:connect'} # type: ignore
+
+ def get_cascade_delete_job_details(
+ self,
+ job_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.CascadeDeleteJob"
+ """Get remove job for OAuth token.
+
+ :param job_id: Id of the job.
+ :type job_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CascadeDeleteJob, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.CascadeDeleteJob
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_cascade_delete_job_details.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_cascade_delete_job_details.metadata = {'url': '/oauth/tokens/remove/{jobId}'} # type: ignore
+
+ def _create_cascade_delete_job_initial(
+ self,
+ job_id, # type: str
+ farmer_id, # type: str
+ oauth_provider_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.CascadeDeleteJob"
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_cascade_delete_job_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['farmerId'] = self._serialize.query("farmer_id", farmer_id, 'str')
+ query_parameters['oauthProviderId'] = self._serialize.query("oauth_provider_id", oauth_provider_id, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.put(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_cascade_delete_job_initial.metadata = {'url': '/oauth/tokens/remove/{jobId}'} # type: ignore
+
+ def begin_create_cascade_delete_job(
+ self,
+ job_id, # type: str
+ farmer_id, # type: str
+ oauth_provider_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["_models.CascadeDeleteJob"]
+ """Create remove job for OAuth token.
+
+ :param job_id: Job Id supplied by end user.
+ :type job_id: str
+ :param farmer_id: Id of the farmer.
+ :type farmer_id: str
+ :param oauth_provider_id: Id of the OAuthProvider.
+ :type oauth_provider_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: By default, your polling method will be LROBasePolling.
+ Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either CascadeDeleteJob or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.farmbeats.models.CascadeDeleteJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_cascade_delete_job_initial(
+ job_id=job_id,
+ farmer_id=farmer_id,
+ oauth_provider_id=oauth_provider_id,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+
+ if polling is True: polling_method = LROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_cascade_delete_job.metadata = {'url': '/oauth/tokens/remove/{jobId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_planting_data_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_planting_data_operations.py
new file mode 100644
index 000000000000..6a7e06ad153c
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_planting_data_operations.py
@@ -0,0 +1,658 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+
+from .. import models as _models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class PlantingDataOperations(object):
+ """PlantingDataOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_farmer_id(
+ self,
+ farmer_id, # type: str
+ min_avg_planting_rate=None, # type: Optional[float]
+ max_avg_planting_rate=None, # type: Optional[float]
+ min_total_material=None, # type: Optional[float]
+ max_total_material=None, # type: Optional[float]
+ min_avg_material=None, # type: Optional[float]
+ max_avg_material=None, # type: Optional[float]
+ sources=None, # type: Optional[List[str]]
+ associated_boundary_ids=None, # type: Optional[List[str]]
+ operation_boundary_ids=None, # type: Optional[List[str]]
+ min_operation_start_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_start_date_time=None, # type: Optional[datetime.datetime]
+ min_operation_end_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_end_date_time=None, # type: Optional[datetime.datetime]
+ min_operation_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_modified_date_time=None, # type: Optional[datetime.datetime]
+ min_area=None, # type: Optional[float]
+ max_area=None, # type: Optional[float]
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.PlantingDataListResponse"]
+ """Returns a paginated list of planting data resources under a particular farm.
+
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :param min_avg_planting_rate: Minimum AvgPlantingRate value(inclusive).
+ :type min_avg_planting_rate: float
+ :param max_avg_planting_rate: Maximum AvgPlantingRate value (inclusive).
+ :type max_avg_planting_rate: float
+ :param min_total_material: Minimum TotalMaterial value(inclusive).
+ :type min_total_material: float
+ :param max_total_material: Maximum TotalMaterial value (inclusive).
+ :type max_total_material: float
+ :param min_avg_material: Minimum AvgMaterial value(inclusive).
+ :type min_avg_material: float
+ :param max_avg_material: Maximum AvgMaterial value (inclusive).
+ :type max_avg_material: float
+ :param sources: Sources of the operation data.
+ :type sources: list[str]
+ :param associated_boundary_ids: Boundary IDs associated with operation data.
+ :type associated_boundary_ids: list[str]
+ :param operation_boundary_ids: Operation boundary IDs associated with operation data.
+ :type operation_boundary_ids: list[str]
+ :param min_operation_start_date_time: Minimum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_start_date_time: ~datetime.datetime
+ :param max_operation_start_date_time: Maximum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_start_date_time: ~datetime.datetime
+ :param min_operation_end_date_time: Minimum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_end_date_time: ~datetime.datetime
+ :param max_operation_end_date_time: Maximum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_end_date_time: ~datetime.datetime
+ :param min_operation_modified_date_time: Minimum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_modified_date_time: ~datetime.datetime
+ :param max_operation_modified_date_time: Maximum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_modified_date_time: ~datetime.datetime
+ :param min_area: Minimum area for which operation was applied (inclusive).
+ :type min_area: float
+ :param max_area: Maximum area for which operation was applied (inclusive).
+ :type max_area: float
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PlantingDataListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.PlantingDataListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.PlantingDataListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_farmer_id.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if min_avg_planting_rate is not None:
+ query_parameters['minAvgPlantingRate'] = self._serialize.query("min_avg_planting_rate", min_avg_planting_rate, 'float')
+ if max_avg_planting_rate is not None:
+ query_parameters['maxAvgPlantingRate'] = self._serialize.query("max_avg_planting_rate", max_avg_planting_rate, 'float')
+ if min_total_material is not None:
+ query_parameters['minTotalMaterial'] = self._serialize.query("min_total_material", min_total_material, 'float')
+ if max_total_material is not None:
+ query_parameters['maxTotalMaterial'] = self._serialize.query("max_total_material", max_total_material, 'float')
+ if min_avg_material is not None:
+ query_parameters['minAvgMaterial'] = self._serialize.query("min_avg_material", min_avg_material, 'float')
+ if max_avg_material is not None:
+ query_parameters['maxAvgMaterial'] = self._serialize.query("max_avg_material", max_avg_material, 'float')
+ if sources is not None:
+ query_parameters['sources'] = [self._serialize.query("sources", q, 'str') if q is not None else '' for q in sources]
+ if associated_boundary_ids is not None:
+ query_parameters['associatedBoundaryIds'] = [self._serialize.query("associated_boundary_ids", q, 'str') if q is not None else '' for q in associated_boundary_ids]
+ if operation_boundary_ids is not None:
+ query_parameters['operationBoundaryIds'] = [self._serialize.query("operation_boundary_ids", q, 'str') if q is not None else '' for q in operation_boundary_ids]
+ if min_operation_start_date_time is not None:
+ query_parameters['minOperationStartDateTime'] = self._serialize.query("min_operation_start_date_time", min_operation_start_date_time, 'iso-8601')
+ if max_operation_start_date_time is not None:
+ query_parameters['maxOperationStartDateTime'] = self._serialize.query("max_operation_start_date_time", max_operation_start_date_time, 'iso-8601')
+ if min_operation_end_date_time is not None:
+ query_parameters['minOperationEndDateTime'] = self._serialize.query("min_operation_end_date_time", min_operation_end_date_time, 'iso-8601')
+ if max_operation_end_date_time is not None:
+ query_parameters['maxOperationEndDateTime'] = self._serialize.query("max_operation_end_date_time", max_operation_end_date_time, 'iso-8601')
+ if min_operation_modified_date_time is not None:
+ query_parameters['minOperationModifiedDateTime'] = self._serialize.query("min_operation_modified_date_time", min_operation_modified_date_time, 'iso-8601')
+ if max_operation_modified_date_time is not None:
+ query_parameters['maxOperationModifiedDateTime'] = self._serialize.query("max_operation_modified_date_time", max_operation_modified_date_time, 'iso-8601')
+ if min_area is not None:
+ query_parameters['minArea'] = self._serialize.query("min_area", min_area, 'float')
+ if max_area is not None:
+ query_parameters['maxArea'] = self._serialize.query("max_area", max_area, 'float')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('PlantingDataListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_farmer_id.metadata = {'url': '/farmers/{farmerId}/planting-data'} # type: ignore
+
+ def list(
+ self,
+ min_avg_planting_rate=None, # type: Optional[float]
+ max_avg_planting_rate=None, # type: Optional[float]
+ min_total_material=None, # type: Optional[float]
+ max_total_material=None, # type: Optional[float]
+ min_avg_material=None, # type: Optional[float]
+ max_avg_material=None, # type: Optional[float]
+ sources=None, # type: Optional[List[str]]
+ associated_boundary_ids=None, # type: Optional[List[str]]
+ operation_boundary_ids=None, # type: Optional[List[str]]
+ min_operation_start_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_start_date_time=None, # type: Optional[datetime.datetime]
+ min_operation_end_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_end_date_time=None, # type: Optional[datetime.datetime]
+ min_operation_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_modified_date_time=None, # type: Optional[datetime.datetime]
+ min_area=None, # type: Optional[float]
+ max_area=None, # type: Optional[float]
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.PlantingDataListResponse"]
+ """Returns a paginated list of planting data resources across all farmers.
+
+ :param min_avg_planting_rate: Minimum AvgPlantingRate value(inclusive).
+ :type min_avg_planting_rate: float
+ :param max_avg_planting_rate: Maximum AvgPlantingRate value (inclusive).
+ :type max_avg_planting_rate: float
+ :param min_total_material: Minimum TotalMaterial value(inclusive).
+ :type min_total_material: float
+ :param max_total_material: Maximum TotalMaterial value (inclusive).
+ :type max_total_material: float
+ :param min_avg_material: Minimum AvgMaterial value(inclusive).
+ :type min_avg_material: float
+ :param max_avg_material: Maximum AvgMaterial value (inclusive).
+ :type max_avg_material: float
+ :param sources: Sources of the operation data.
+ :type sources: list[str]
+ :param associated_boundary_ids: Boundary IDs associated with operation data.
+ :type associated_boundary_ids: list[str]
+ :param operation_boundary_ids: Operation boundary IDs associated with operation data.
+ :type operation_boundary_ids: list[str]
+ :param min_operation_start_date_time: Minimum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_start_date_time: ~datetime.datetime
+ :param max_operation_start_date_time: Maximum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_start_date_time: ~datetime.datetime
+ :param min_operation_end_date_time: Minimum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_end_date_time: ~datetime.datetime
+ :param max_operation_end_date_time: Maximum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_end_date_time: ~datetime.datetime
+ :param min_operation_modified_date_time: Minimum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_modified_date_time: ~datetime.datetime
+ :param max_operation_modified_date_time: Maximum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_modified_date_time: ~datetime.datetime
+ :param min_area: Minimum area for which operation was applied (inclusive).
+ :type min_area: float
+ :param max_area: Maximum area for which operation was applied (inclusive).
+ :type max_area: float
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PlantingDataListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.PlantingDataListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.PlantingDataListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if min_avg_planting_rate is not None:
+ query_parameters['minAvgPlantingRate'] = self._serialize.query("min_avg_planting_rate", min_avg_planting_rate, 'float')
+ if max_avg_planting_rate is not None:
+ query_parameters['maxAvgPlantingRate'] = self._serialize.query("max_avg_planting_rate", max_avg_planting_rate, 'float')
+ if min_total_material is not None:
+ query_parameters['minTotalMaterial'] = self._serialize.query("min_total_material", min_total_material, 'float')
+ if max_total_material is not None:
+ query_parameters['maxTotalMaterial'] = self._serialize.query("max_total_material", max_total_material, 'float')
+ if min_avg_material is not None:
+ query_parameters['minAvgMaterial'] = self._serialize.query("min_avg_material", min_avg_material, 'float')
+ if max_avg_material is not None:
+ query_parameters['maxAvgMaterial'] = self._serialize.query("max_avg_material", max_avg_material, 'float')
+ if sources is not None:
+ query_parameters['sources'] = [self._serialize.query("sources", q, 'str') if q is not None else '' for q in sources]
+ if associated_boundary_ids is not None:
+ query_parameters['associatedBoundaryIds'] = [self._serialize.query("associated_boundary_ids", q, 'str') if q is not None else '' for q in associated_boundary_ids]
+ if operation_boundary_ids is not None:
+ query_parameters['operationBoundaryIds'] = [self._serialize.query("operation_boundary_ids", q, 'str') if q is not None else '' for q in operation_boundary_ids]
+ if min_operation_start_date_time is not None:
+ query_parameters['minOperationStartDateTime'] = self._serialize.query("min_operation_start_date_time", min_operation_start_date_time, 'iso-8601')
+ if max_operation_start_date_time is not None:
+ query_parameters['maxOperationStartDateTime'] = self._serialize.query("max_operation_start_date_time", max_operation_start_date_time, 'iso-8601')
+ if min_operation_end_date_time is not None:
+ query_parameters['minOperationEndDateTime'] = self._serialize.query("min_operation_end_date_time", min_operation_end_date_time, 'iso-8601')
+ if max_operation_end_date_time is not None:
+ query_parameters['maxOperationEndDateTime'] = self._serialize.query("max_operation_end_date_time", max_operation_end_date_time, 'iso-8601')
+ if min_operation_modified_date_time is not None:
+ query_parameters['minOperationModifiedDateTime'] = self._serialize.query("min_operation_modified_date_time", min_operation_modified_date_time, 'iso-8601')
+ if max_operation_modified_date_time is not None:
+ query_parameters['maxOperationModifiedDateTime'] = self._serialize.query("max_operation_modified_date_time", max_operation_modified_date_time, 'iso-8601')
+ if min_area is not None:
+ query_parameters['minArea'] = self._serialize.query("min_area", min_area, 'float')
+ if max_area is not None:
+ query_parameters['maxArea'] = self._serialize.query("max_area", max_area, 'float')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('PlantingDataListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/planting-data'} # type: ignore
+
+ def get(
+ self,
+ farmer_id, # type: str
+ planting_data_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.PlantingData"
+ """Get a specified planting data resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer resource.
+ :type farmer_id: str
+ :param planting_data_id: ID of the planting data resource.
+ :type planting_data_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PlantingData, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.PlantingData
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.PlantingData"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'plantingDataId': self._serialize.url("planting_data_id", planting_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('PlantingData', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/farmers/{farmerId}/planting-data/{plantingDataId}'} # type: ignore
+
+ def create_or_update(
+ self,
+ farmer_id, # type: str
+ planting_data_id, # type: str
+ planting_data=None, # type: Optional["_models.PlantingData"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.PlantingData"
+ """Creates or updates an planting data resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :param planting_data_id: ID of the planting data resource.
+ :type planting_data_id: str
+ :param planting_data: Planting data resource payload to create or update.
+ :type planting_data: ~azure.farmbeats.models.PlantingData
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PlantingData, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.PlantingData
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.PlantingData"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'plantingDataId': self._serialize.url("planting_data_id", planting_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if planting_data is not None:
+ body_content = self._serialize.body(planting_data, 'PlantingData')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('PlantingData', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('PlantingData', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/farmers/{farmerId}/planting-data/{plantingDataId}'} # type: ignore
+
+ def delete(
+ self,
+ farmer_id, # type: str
+ planting_data_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Deletes a specified planting data resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer resource.
+ :type farmer_id: str
+ :param planting_data_id: ID of the planting data.
+ :type planting_data_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'plantingDataId': self._serialize.url("planting_data_id", planting_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/farmers/{farmerId}/planting-data/{plantingDataId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_scenes_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_scenes_operations.py
new file mode 100644
index 000000000000..c0da259c5958
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_scenes_operations.py
@@ -0,0 +1,404 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.core.polling.base_polling import LROBasePolling
+
+from .. import models as _models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, IO, Iterable, List, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class ScenesOperations(object):
+ """ScenesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ farmer_id, # type: str
+ boundary_id, # type: str
+ provider="Microsoft", # type: str
+ source="Sentinel_2_L2A", # type: Optional[str]
+ start_date_time=None, # type: Optional[datetime.datetime]
+ end_date_time=None, # type: Optional[datetime.datetime]
+ max_cloud_coverage_percentage=100, # type: Optional[float]
+ max_dark_pixel_coverage_percentage=100, # type: Optional[float]
+ image_names=None, # type: Optional[List[str]]
+ image_resolutions=None, # type: Optional[List[float]]
+ image_formats=None, # type: Optional[List[str]]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.SceneListResponse"]
+ """Returns a paginated list of scene resources.
+
+ :param farmer_id: FarmerId.
+ :type farmer_id: str
+ :param boundary_id: BoundaryId.
+ :type boundary_id: str
+ :param provider: Provider name of scene data.
+ :type provider: str
+ :param source: Source name of scene data, default value Sentinel_2_L2A (Sentinel 2 L2A).
+ :type source: str
+ :param start_date_time: Scene start UTC datetime (inclusive), sample format:
+ yyyy-MM-ddThh:mm:ssZ.
+ :type start_date_time: ~datetime.datetime
+ :param end_date_time: Scene end UTC datetime (inclusive), sample format: yyyy-MM-dThh:mm:ssZ.
+ :type end_date_time: ~datetime.datetime
+ :param max_cloud_coverage_percentage: Filter scenes with cloud coverage percentage less than
+ max value. Range [0 to 100.0].
+ :type max_cloud_coverage_percentage: float
+ :param max_dark_pixel_coverage_percentage: Filter scenes with dark pixel coverage percentage
+ less than max value. Range [0 to 100.0].
+ :type max_dark_pixel_coverage_percentage: float
+ :param image_names: List of image names to be filtered.
+ :type image_names: list[str]
+ :param image_resolutions: List of image resolutions in meters to be filtered.
+ :type image_resolutions: list[float]
+ :param image_formats: List of image formats to be filtered.
+ :type image_formats: list[str]
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either SceneListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.SceneListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.SceneListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['provider'] = self._serialize.query("provider", provider, 'str')
+ query_parameters['farmerId'] = self._serialize.query("farmer_id", farmer_id, 'str')
+ query_parameters['boundaryId'] = self._serialize.query("boundary_id", boundary_id, 'str')
+ if source is not None:
+ query_parameters['source'] = self._serialize.query("source", source, 'str')
+ if start_date_time is not None:
+ query_parameters['startDateTime'] = self._serialize.query("start_date_time", start_date_time, 'iso-8601')
+ if end_date_time is not None:
+ query_parameters['endDateTime'] = self._serialize.query("end_date_time", end_date_time, 'iso-8601')
+ if max_cloud_coverage_percentage is not None:
+ query_parameters['maxCloudCoveragePercentage'] = self._serialize.query("max_cloud_coverage_percentage", max_cloud_coverage_percentage, 'float', maximum=100, minimum=0)
+ if max_dark_pixel_coverage_percentage is not None:
+ query_parameters['maxDarkPixelCoveragePercentage'] = self._serialize.query("max_dark_pixel_coverage_percentage", max_dark_pixel_coverage_percentage, 'float', maximum=100, minimum=0)
+ if image_names is not None:
+ query_parameters['imageNames'] = [self._serialize.query("image_names", q, 'str') if q is not None else '' for q in image_names]
+ if image_resolutions is not None:
+ query_parameters['imageResolutions'] = [self._serialize.query("image_resolutions", q, 'float') if q is not None else '' for q in image_resolutions]
+ if image_formats is not None:
+ query_parameters['imageFormats'] = [self._serialize.query("image_formats", q, 'str') if q is not None else '' for q in image_formats]
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('SceneListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/scenes'} # type: ignore
+
+ def _create_satellite_data_ingestion_job_initial(
+ self,
+ job_id, # type: str
+ job=None, # type: Optional["_models.SatelliteDataIngestionJob"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.SatelliteDataIngestionJob"
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.SatelliteDataIngestionJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_satellite_data_ingestion_job_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if job is not None:
+ body_content = self._serialize.body(job, 'SatelliteDataIngestionJob')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('SatelliteDataIngestionJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_satellite_data_ingestion_job_initial.metadata = {'url': '/scenes/satellite/ingest-data/{jobId}'} # type: ignore
+
+ def begin_create_satellite_data_ingestion_job(
+ self,
+ job_id, # type: str
+ job=None, # type: Optional["_models.SatelliteDataIngestionJob"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["_models.SatelliteDataIngestionJob"]
+ """Create a satellite data ingestion job.
+
+ :param job_id: JobId provided by user.
+ :type job_id: str
+ :param job: Job parameters supplied by user.
+ :type job: ~azure.farmbeats.models.SatelliteDataIngestionJob
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: By default, your polling method will be LROBasePolling.
+ Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either SatelliteDataIngestionJob or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.farmbeats.models.SatelliteDataIngestionJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.SatelliteDataIngestionJob"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_satellite_data_ingestion_job_initial(
+ job_id=job_id,
+ job=job,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('SatelliteDataIngestionJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+
+ if polling is True: polling_method = LROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_satellite_data_ingestion_job.metadata = {'url': '/scenes/satellite/ingest-data/{jobId}'} # type: ignore
+
+ def get_satellite_data_ingestion_job_details(
+ self,
+ job_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.SatelliteDataIngestionJob"
+ """Get a satellite data ingestion job.
+
+ :param job_id: Id of the job.
+ :type job_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: SatelliteDataIngestionJob, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.SatelliteDataIngestionJob
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.SatelliteDataIngestionJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_satellite_data_ingestion_job_details.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('SatelliteDataIngestionJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_satellite_data_ingestion_job_details.metadata = {'url': '/scenes/satellite/ingest-data/{jobId}'} # type: ignore
+
+ def download(
+ self,
+ file_path, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> IO
+ """Downloads and returns file Stream as response for the given input filePath.
+
+ :param file_path: cloud storage path of scene file.
+ :type file_path: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: IO, or the result of cls(response)
+ :rtype: IO
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[IO]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/octet-stream, application/json"
+
+ # Construct URL
+ url = self.download.metadata['url'] # type: ignore
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['filePath'] = self._serialize.query("file_path", file_path, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=True, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = response.stream_download(self._client._pipeline)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ download.metadata = {'url': '/scenes/downloadFiles'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_seasonal_fields_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_seasonal_fields_operations.py
new file mode 100644
index 000000000000..620fd4c42c34
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_seasonal_fields_operations.py
@@ -0,0 +1,787 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.core.polling.base_polling import LROBasePolling
+
+from .. import models as _models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class SeasonalFieldsOperations(object):
+ """SeasonalFieldsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_farmer_id(
+ self,
+ farmer_id, # type: str
+ farm_ids=None, # type: Optional[List[str]]
+ field_ids=None, # type: Optional[List[str]]
+ season_ids=None, # type: Optional[List[str]]
+ crop_variety_ids=None, # type: Optional[List[str]]
+ crop_ids=None, # type: Optional[List[str]]
+ min_avg_yield_value=None, # type: Optional[float]
+ max_avg_yield_value=None, # type: Optional[float]
+ avg_yield_unit=None, # type: Optional[str]
+ min_avg_seed_population_value=None, # type: Optional[float]
+ max_avg_seed_population_value=None, # type: Optional[float]
+ avg_seed_population_unit=None, # type: Optional[str]
+ min_planting_date_time=None, # type: Optional[datetime.datetime]
+ max_planting_date_time=None, # type: Optional[datetime.datetime]
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.SeasonalFieldListResponse"]
+ """Returns a paginated list of seasonal field resources under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer.
+ :type farmer_id: str
+ :param farm_ids: Farm Ids of the resource.
+ :type farm_ids: list[str]
+ :param field_ids: Field Ids of the resource.
+ :type field_ids: list[str]
+ :param season_ids: Season Ids of the resource.
+ :type season_ids: list[str]
+ :param crop_variety_ids: CropVarietyIds of the resource.
+ :type crop_variety_ids: list[str]
+ :param crop_ids: Ids of the crop it belongs to.
+ :type crop_ids: list[str]
+ :param min_avg_yield_value: Minimum average yield value of the seasonal field(inclusive).
+ :type min_avg_yield_value: float
+ :param max_avg_yield_value: Maximum average yield value of the seasonal field(inclusive).
+ :type max_avg_yield_value: float
+ :param avg_yield_unit: Unit of the average yield value attribute.
+ :type avg_yield_unit: str
+ :param min_avg_seed_population_value: Minimum average seed population value of the seasonal
+ field(inclusive).
+ :type min_avg_seed_population_value: float
+ :param max_avg_seed_population_value: Maximum average seed population value of the seasonal
+ field(inclusive).
+ :type max_avg_seed_population_value: float
+ :param avg_seed_population_unit: Unit of average seed population value attribute.
+ :type avg_seed_population_unit: str
+ :param min_planting_date_time: Minimum planting datetime, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type min_planting_date_time: ~datetime.datetime
+ :param max_planting_date_time: Maximum planting datetime, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type max_planting_date_time: ~datetime.datetime
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either SeasonalFieldListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.SeasonalFieldListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.SeasonalFieldListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_farmer_id.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if farm_ids is not None:
+ query_parameters['farmIds'] = [self._serialize.query("farm_ids", q, 'str') if q is not None else '' for q in farm_ids]
+ if field_ids is not None:
+ query_parameters['fieldIds'] = [self._serialize.query("field_ids", q, 'str') if q is not None else '' for q in field_ids]
+ if season_ids is not None:
+ query_parameters['seasonIds'] = [self._serialize.query("season_ids", q, 'str') if q is not None else '' for q in season_ids]
+ if crop_variety_ids is not None:
+ query_parameters['cropVarietyIds'] = [self._serialize.query("crop_variety_ids", q, 'str') if q is not None else '' for q in crop_variety_ids]
+ if crop_ids is not None:
+ query_parameters['cropIds'] = [self._serialize.query("crop_ids", q, 'str') if q is not None else '' for q in crop_ids]
+ if min_avg_yield_value is not None:
+ query_parameters['minAvgYieldValue'] = self._serialize.query("min_avg_yield_value", min_avg_yield_value, 'float')
+ if max_avg_yield_value is not None:
+ query_parameters['maxAvgYieldValue'] = self._serialize.query("max_avg_yield_value", max_avg_yield_value, 'float')
+ if avg_yield_unit is not None:
+ query_parameters['avgYieldUnit'] = self._serialize.query("avg_yield_unit", avg_yield_unit, 'str')
+ if min_avg_seed_population_value is not None:
+ query_parameters['minAvgSeedPopulationValue'] = self._serialize.query("min_avg_seed_population_value", min_avg_seed_population_value, 'float')
+ if max_avg_seed_population_value is not None:
+ query_parameters['maxAvgSeedPopulationValue'] = self._serialize.query("max_avg_seed_population_value", max_avg_seed_population_value, 'float')
+ if avg_seed_population_unit is not None:
+ query_parameters['avgSeedPopulationUnit'] = self._serialize.query("avg_seed_population_unit", avg_seed_population_unit, 'str')
+ if min_planting_date_time is not None:
+ query_parameters['minPlantingDateTime'] = self._serialize.query("min_planting_date_time", min_planting_date_time, 'iso-8601')
+ if max_planting_date_time is not None:
+ query_parameters['maxPlantingDateTime'] = self._serialize.query("max_planting_date_time", max_planting_date_time, 'iso-8601')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('SeasonalFieldListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_farmer_id.metadata = {'url': '/farmers/{farmerId}/seasonal-fields'} # type: ignore
+
+ def list(
+ self,
+ farm_ids=None, # type: Optional[List[str]]
+ field_ids=None, # type: Optional[List[str]]
+ season_ids=None, # type: Optional[List[str]]
+ crop_variety_ids=None, # type: Optional[List[str]]
+ crop_ids=None, # type: Optional[List[str]]
+ min_avg_yield_value=None, # type: Optional[float]
+ max_avg_yield_value=None, # type: Optional[float]
+ avg_yield_unit=None, # type: Optional[str]
+ min_avg_seed_population_value=None, # type: Optional[float]
+ max_avg_seed_population_value=None, # type: Optional[float]
+ avg_seed_population_unit=None, # type: Optional[str]
+ min_planting_date_time=None, # type: Optional[datetime.datetime]
+ max_planting_date_time=None, # type: Optional[datetime.datetime]
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.SeasonalFieldListResponse"]
+ """Returns a paginated list of seasonal field resources across all farmers.
+
+ :param farm_ids: Farm Ids of the resource.
+ :type farm_ids: list[str]
+ :param field_ids: Field Ids of the resource.
+ :type field_ids: list[str]
+ :param season_ids: Season Ids of the resource.
+ :type season_ids: list[str]
+ :param crop_variety_ids: CropVarietyIds of the resource.
+ :type crop_variety_ids: list[str]
+ :param crop_ids: Ids of the crop it belongs to.
+ :type crop_ids: list[str]
+ :param min_avg_yield_value: Minimum average yield value of the seasonal field(inclusive).
+ :type min_avg_yield_value: float
+ :param max_avg_yield_value: Maximum average yield value of the seasonal field(inclusive).
+ :type max_avg_yield_value: float
+ :param avg_yield_unit: Unit of the average yield value attribute.
+ :type avg_yield_unit: str
+ :param min_avg_seed_population_value: Minimum average seed population value of the seasonal
+ field(inclusive).
+ :type min_avg_seed_population_value: float
+ :param max_avg_seed_population_value: Maximum average seed population value of the seasonal
+ field(inclusive).
+ :type max_avg_seed_population_value: float
+ :param avg_seed_population_unit: Unit of average seed population value attribute.
+ :type avg_seed_population_unit: str
+ :param min_planting_date_time: Minimum planting datetime, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type min_planting_date_time: ~datetime.datetime
+ :param max_planting_date_time: Maximum planting datetime, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type max_planting_date_time: ~datetime.datetime
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either SeasonalFieldListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.SeasonalFieldListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.SeasonalFieldListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if farm_ids is not None:
+ query_parameters['farmIds'] = [self._serialize.query("farm_ids", q, 'str') if q is not None else '' for q in farm_ids]
+ if field_ids is not None:
+ query_parameters['fieldIds'] = [self._serialize.query("field_ids", q, 'str') if q is not None else '' for q in field_ids]
+ if season_ids is not None:
+ query_parameters['seasonIds'] = [self._serialize.query("season_ids", q, 'str') if q is not None else '' for q in season_ids]
+ if crop_variety_ids is not None:
+ query_parameters['cropVarietyIds'] = [self._serialize.query("crop_variety_ids", q, 'str') if q is not None else '' for q in crop_variety_ids]
+ if crop_ids is not None:
+ query_parameters['cropIds'] = [self._serialize.query("crop_ids", q, 'str') if q is not None else '' for q in crop_ids]
+ if min_avg_yield_value is not None:
+ query_parameters['minAvgYieldValue'] = self._serialize.query("min_avg_yield_value", min_avg_yield_value, 'float')
+ if max_avg_yield_value is not None:
+ query_parameters['maxAvgYieldValue'] = self._serialize.query("max_avg_yield_value", max_avg_yield_value, 'float')
+ if avg_yield_unit is not None:
+ query_parameters['avgYieldUnit'] = self._serialize.query("avg_yield_unit", avg_yield_unit, 'str')
+ if min_avg_seed_population_value is not None:
+ query_parameters['minAvgSeedPopulationValue'] = self._serialize.query("min_avg_seed_population_value", min_avg_seed_population_value, 'float')
+ if max_avg_seed_population_value is not None:
+ query_parameters['maxAvgSeedPopulationValue'] = self._serialize.query("max_avg_seed_population_value", max_avg_seed_population_value, 'float')
+ if avg_seed_population_unit is not None:
+ query_parameters['avgSeedPopulationUnit'] = self._serialize.query("avg_seed_population_unit", avg_seed_population_unit, 'str')
+ if min_planting_date_time is not None:
+ query_parameters['minPlantingDateTime'] = self._serialize.query("min_planting_date_time", min_planting_date_time, 'iso-8601')
+ if max_planting_date_time is not None:
+ query_parameters['maxPlantingDateTime'] = self._serialize.query("max_planting_date_time", max_planting_date_time, 'iso-8601')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('SeasonalFieldListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/seasonal-fields'} # type: ignore
+
+ def get(
+ self,
+ farmer_id, # type: str
+ seasonal_field_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.SeasonalField"
+ """Gets a specified seasonal field resource under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer.
+ :type farmer_id: str
+ :param seasonal_field_id: Id of the seasonal field.
+ :type seasonal_field_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: SeasonalField, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.SeasonalField
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.SeasonalField"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'seasonalFieldId': self._serialize.url("seasonal_field_id", seasonal_field_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('SeasonalField', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/farmers/{farmerId}/seasonal-fields/{seasonalFieldId}'} # type: ignore
+
+ def create_or_update(
+ self,
+ farmer_id, # type: str
+ seasonal_field_id, # type: str
+ seasonal_field=None, # type: Optional["_models.SeasonalField"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.SeasonalField"
+ """Creates or Updates a seasonal field resource under a particular farmer.
+
+ :param farmer_id: Id of the associated farmer resource.
+ :type farmer_id: str
+ :param seasonal_field_id: Id of the seasonal field resource.
+ :type seasonal_field_id: str
+ :param seasonal_field: Seasonal field resource payload to create or update.
+ :type seasonal_field: ~azure.farmbeats.models.SeasonalField
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: SeasonalField, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.SeasonalField
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.SeasonalField"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'seasonalFieldId': self._serialize.url("seasonal_field_id", seasonal_field_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if seasonal_field is not None:
+ body_content = self._serialize.body(seasonal_field, 'SeasonalField')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('SeasonalField', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('SeasonalField', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/farmers/{farmerId}/seasonal-fields/{seasonalFieldId}'} # type: ignore
+
+ def delete(
+ self,
+ farmer_id, # type: str
+ seasonal_field_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Deletes a specified seasonal-field resource under a particular farmer.
+
+ :param farmer_id: Id of the farmer.
+ :type farmer_id: str
+ :param seasonal_field_id: Id of the seasonal field.
+ :type seasonal_field_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'seasonalFieldId': self._serialize.url("seasonal_field_id", seasonal_field_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/farmers/{farmerId}/seasonal-fields/{seasonalFieldId}'} # type: ignore
+
+ def get_cascade_delete_job_details(
+ self,
+ job_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.CascadeDeleteJob"
+ """Get cascade delete job for specified seasonal field.
+
+ :param job_id: Id of the job.
+ :type job_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: CascadeDeleteJob, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.CascadeDeleteJob
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_cascade_delete_job_details.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_cascade_delete_job_details.metadata = {'url': '/seasonal-fields/cascade-delete/{jobId}'} # type: ignore
+
+ def _create_cascade_delete_job_initial(
+ self,
+ job_id, # type: str
+ farmer_id, # type: str
+ seasonal_field_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.CascadeDeleteJob"
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_cascade_delete_job_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['farmerId'] = self._serialize.query("farmer_id", farmer_id, 'str')
+ query_parameters['seasonalFieldId'] = self._serialize.query("seasonal_field_id", seasonal_field_id, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.put(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_cascade_delete_job_initial.metadata = {'url': '/seasonal-fields/cascade-delete/{jobId}'} # type: ignore
+
+ def begin_create_cascade_delete_job(
+ self,
+ job_id, # type: str
+ farmer_id, # type: str
+ seasonal_field_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["_models.CascadeDeleteJob"]
+ """Create a cascade delete job for specified seasonal field.
+
+ :param job_id: Job ID supplied by end user.
+ :type job_id: str
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :param seasonal_field_id: ID of the seasonalField to be deleted.
+ :type seasonal_field_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: By default, your polling method will be LROBasePolling.
+ Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either CascadeDeleteJob or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.farmbeats.models.CascadeDeleteJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.CascadeDeleteJob"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_cascade_delete_job_initial(
+ job_id=job_id,
+ farmer_id=farmer_id,
+ seasonal_field_id=seasonal_field_id,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('CascadeDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+
+ if polling is True: polling_method = LROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_cascade_delete_job.metadata = {'url': '/seasonal-fields/cascade-delete/{jobId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_seasons_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_seasons_operations.py
new file mode 100644
index 000000000000..06198c99dacb
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_seasons_operations.py
@@ -0,0 +1,362 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+
+from .. import models as _models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class SeasonsOperations(object):
+ """SeasonsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ min_start_date_time=None, # type: Optional[datetime.datetime]
+ max_start_date_time=None, # type: Optional[datetime.datetime]
+ min_end_date_time=None, # type: Optional[datetime.datetime]
+ max_end_date_time=None, # type: Optional[datetime.datetime]
+ years=None, # type: Optional[List[int]]
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.SeasonListResponse"]
+ """Returns a paginated list of season resources.
+
+ :param min_start_date_time: Minimum season start datetime, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type min_start_date_time: ~datetime.datetime
+ :param max_start_date_time: Maximum season start datetime, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type max_start_date_time: ~datetime.datetime
+ :param min_end_date_time: Minimum season end datetime, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type min_end_date_time: ~datetime.datetime
+ :param max_end_date_time: Maximum season end datetime, sample format: yyyy-MM-ddTHH:mm:ssZ.
+ :type max_end_date_time: ~datetime.datetime
+ :param years: Years of the resource.
+ :type years: list[int]
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either SeasonListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.SeasonListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.SeasonListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if min_start_date_time is not None:
+ query_parameters['minStartDateTime'] = self._serialize.query("min_start_date_time", min_start_date_time, 'iso-8601')
+ if max_start_date_time is not None:
+ query_parameters['maxStartDateTime'] = self._serialize.query("max_start_date_time", max_start_date_time, 'iso-8601')
+ if min_end_date_time is not None:
+ query_parameters['minEndDateTime'] = self._serialize.query("min_end_date_time", min_end_date_time, 'iso-8601')
+ if max_end_date_time is not None:
+ query_parameters['maxEndDateTime'] = self._serialize.query("max_end_date_time", max_end_date_time, 'iso-8601')
+ if years is not None:
+ query_parameters['years'] = [self._serialize.query("years", q, 'int') if q is not None else '' for q in years]
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('SeasonListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/seasons'} # type: ignore
+
+ def get(
+ self,
+ season_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.Season"
+ """Gets a specified season resource.
+
+ :param season_id: Id of the season.
+ :type season_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Season, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Season
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Season"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'seasonId': self._serialize.url("season_id", season_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('Season', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/seasons/{seasonId}'} # type: ignore
+
+ def create_or_update(
+ self,
+ season_id, # type: str
+ season=None, # type: Optional["_models.Season"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.Season"
+ """Creates or updates a season resource.
+
+ :param season_id: Id of the season resource.
+ :type season_id: str
+ :param season: Season resource payload to create or update.
+ :type season: ~azure.farmbeats.models.Season
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Season, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.Season
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.Season"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'seasonId': self._serialize.url("season_id", season_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if season is not None:
+ body_content = self._serialize.body(season, 'Season')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('Season', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('Season', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/seasons/{seasonId}'} # type: ignore
+
+ def delete(
+ self,
+ season_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Deletes a specified season resource.
+
+ :param season_id: Id of the season.
+ :type season_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'seasonId': self._serialize.url("season_id", season_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/seasons/{seasonId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_tillage_data_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_tillage_data_operations.py
new file mode 100644
index 000000000000..a9a4c3d402a2
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_tillage_data_operations.py
@@ -0,0 +1,638 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+
+from .. import models as _models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class TillageDataOperations(object):
+ """TillageDataOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_farmer_id(
+ self,
+ farmer_id, # type: str
+ min_tillage_depth=None, # type: Optional[float]
+ max_tillage_depth=None, # type: Optional[float]
+ min_tillage_pressure=None, # type: Optional[float]
+ max_tillage_pressure=None, # type: Optional[float]
+ sources=None, # type: Optional[List[str]]
+ associated_boundary_ids=None, # type: Optional[List[str]]
+ operation_boundary_ids=None, # type: Optional[List[str]]
+ min_operation_start_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_start_date_time=None, # type: Optional[datetime.datetime]
+ min_operation_end_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_end_date_time=None, # type: Optional[datetime.datetime]
+ min_operation_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_modified_date_time=None, # type: Optional[datetime.datetime]
+ min_area=None, # type: Optional[float]
+ max_area=None, # type: Optional[float]
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.TillageDataListResponse"]
+ """Returns a paginated list of tillage data resources under a particular farm.
+
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :param min_tillage_depth: Minimum measured tillage depth (inclusive).
+ :type min_tillage_depth: float
+ :param max_tillage_depth: Maximum measured tillage depth (inclusive).
+ :type max_tillage_depth: float
+ :param min_tillage_pressure: Minimum pressure applied to a tillage implement (inclusive).
+ :type min_tillage_pressure: float
+ :param max_tillage_pressure: Maximum pressure applied to a tillage implement (inclusive).
+ :type max_tillage_pressure: float
+ :param sources: Sources of the operation data.
+ :type sources: list[str]
+ :param associated_boundary_ids: Boundary IDs associated with operation data.
+ :type associated_boundary_ids: list[str]
+ :param operation_boundary_ids: Operation boundary IDs associated with operation data.
+ :type operation_boundary_ids: list[str]
+ :param min_operation_start_date_time: Minimum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_start_date_time: ~datetime.datetime
+ :param max_operation_start_date_time: Maximum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_start_date_time: ~datetime.datetime
+ :param min_operation_end_date_time: Minimum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_end_date_time: ~datetime.datetime
+ :param max_operation_end_date_time: Maximum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_end_date_time: ~datetime.datetime
+ :param min_operation_modified_date_time: Minimum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_modified_date_time: ~datetime.datetime
+ :param max_operation_modified_date_time: Maximum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_modified_date_time: ~datetime.datetime
+ :param min_area: Minimum area for which operation was applied (inclusive).
+ :type min_area: float
+ :param max_area: Maximum area for which operation was applied (inclusive).
+ :type max_area: float
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either TillageDataListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.TillageDataListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.TillageDataListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_farmer_id.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if min_tillage_depth is not None:
+ query_parameters['minTillageDepth'] = self._serialize.query("min_tillage_depth", min_tillage_depth, 'float')
+ if max_tillage_depth is not None:
+ query_parameters['maxTillageDepth'] = self._serialize.query("max_tillage_depth", max_tillage_depth, 'float')
+ if min_tillage_pressure is not None:
+ query_parameters['minTillagePressure'] = self._serialize.query("min_tillage_pressure", min_tillage_pressure, 'float')
+ if max_tillage_pressure is not None:
+ query_parameters['maxTillagePressure'] = self._serialize.query("max_tillage_pressure", max_tillage_pressure, 'float')
+ if sources is not None:
+ query_parameters['sources'] = [self._serialize.query("sources", q, 'str') if q is not None else '' for q in sources]
+ if associated_boundary_ids is not None:
+ query_parameters['associatedBoundaryIds'] = [self._serialize.query("associated_boundary_ids", q, 'str') if q is not None else '' for q in associated_boundary_ids]
+ if operation_boundary_ids is not None:
+ query_parameters['operationBoundaryIds'] = [self._serialize.query("operation_boundary_ids", q, 'str') if q is not None else '' for q in operation_boundary_ids]
+ if min_operation_start_date_time is not None:
+ query_parameters['minOperationStartDateTime'] = self._serialize.query("min_operation_start_date_time", min_operation_start_date_time, 'iso-8601')
+ if max_operation_start_date_time is not None:
+ query_parameters['maxOperationStartDateTime'] = self._serialize.query("max_operation_start_date_time", max_operation_start_date_time, 'iso-8601')
+ if min_operation_end_date_time is not None:
+ query_parameters['minOperationEndDateTime'] = self._serialize.query("min_operation_end_date_time", min_operation_end_date_time, 'iso-8601')
+ if max_operation_end_date_time is not None:
+ query_parameters['maxOperationEndDateTime'] = self._serialize.query("max_operation_end_date_time", max_operation_end_date_time, 'iso-8601')
+ if min_operation_modified_date_time is not None:
+ query_parameters['minOperationModifiedDateTime'] = self._serialize.query("min_operation_modified_date_time", min_operation_modified_date_time, 'iso-8601')
+ if max_operation_modified_date_time is not None:
+ query_parameters['maxOperationModifiedDateTime'] = self._serialize.query("max_operation_modified_date_time", max_operation_modified_date_time, 'iso-8601')
+ if min_area is not None:
+ query_parameters['minArea'] = self._serialize.query("min_area", min_area, 'float')
+ if max_area is not None:
+ query_parameters['maxArea'] = self._serialize.query("max_area", max_area, 'float')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('TillageDataListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_farmer_id.metadata = {'url': '/farmers/{farmerId}/tillage-data'} # type: ignore
+
+ def list(
+ self,
+ min_tillage_depth=None, # type: Optional[float]
+ max_tillage_depth=None, # type: Optional[float]
+ min_tillage_pressure=None, # type: Optional[float]
+ max_tillage_pressure=None, # type: Optional[float]
+ sources=None, # type: Optional[List[str]]
+ associated_boundary_ids=None, # type: Optional[List[str]]
+ operation_boundary_ids=None, # type: Optional[List[str]]
+ min_operation_start_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_start_date_time=None, # type: Optional[datetime.datetime]
+ min_operation_end_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_end_date_time=None, # type: Optional[datetime.datetime]
+ min_operation_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_operation_modified_date_time=None, # type: Optional[datetime.datetime]
+ min_area=None, # type: Optional[float]
+ max_area=None, # type: Optional[float]
+ ids=None, # type: Optional[List[str]]
+ names=None, # type: Optional[List[str]]
+ property_filters=None, # type: Optional[List[str]]
+ statuses=None, # type: Optional[List[str]]
+ min_created_date_time=None, # type: Optional[datetime.datetime]
+ max_created_date_time=None, # type: Optional[datetime.datetime]
+ min_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_last_modified_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.TillageDataListResponse"]
+ """Returns a paginated list of tillage data resources across all farmers.
+
+ :param min_tillage_depth: Minimum measured tillage depth (inclusive).
+ :type min_tillage_depth: float
+ :param max_tillage_depth: Maximum measured tillage depth (inclusive).
+ :type max_tillage_depth: float
+ :param min_tillage_pressure: Minimum pressure applied to a tillage implement (inclusive).
+ :type min_tillage_pressure: float
+ :param max_tillage_pressure: Maximum pressure applied to a tillage implement (inclusive).
+ :type max_tillage_pressure: float
+ :param sources: Sources of the operation data.
+ :type sources: list[str]
+ :param associated_boundary_ids: Boundary IDs associated with operation data.
+ :type associated_boundary_ids: list[str]
+ :param operation_boundary_ids: Operation boundary IDs associated with operation data.
+ :type operation_boundary_ids: list[str]
+ :param min_operation_start_date_time: Minimum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_start_date_time: ~datetime.datetime
+ :param max_operation_start_date_time: Maximum start date-time of the operation data, sample
+ format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_start_date_time: ~datetime.datetime
+ :param min_operation_end_date_time: Minimum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_end_date_time: ~datetime.datetime
+ :param max_operation_end_date_time: Maximum end date-time of the operation data, sample format:
+ yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_end_date_time: ~datetime.datetime
+ :param min_operation_modified_date_time: Minimum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type min_operation_modified_date_time: ~datetime.datetime
+ :param max_operation_modified_date_time: Maximum modified date-time of the operation data,
+ sample format: yyyy-MM-ddTHH:mm:ssZ (inclusive).
+ :type max_operation_modified_date_time: ~datetime.datetime
+ :param min_area: Minimum area for which operation was applied (inclusive).
+ :type min_area: float
+ :param max_area: Maximum area for which operation was applied (inclusive).
+ :type max_area: float
+ :param ids: Ids of the resource.
+ :type ids: list[str]
+ :param names: Names of the resource.
+ :type names: list[str]
+ :param property_filters: Filters on key-value pairs within the Properties object.
+ eg. "{testKey} eq {testValue}".
+ :type property_filters: list[str]
+ :param statuses: Statuses of the resource.
+ :type statuses: list[str]
+ :param min_created_date_time: Minimum creation date of resource (inclusive).
+ :type min_created_date_time: ~datetime.datetime
+ :param max_created_date_time: Maximum creation date of resource (inclusive).
+ :type max_created_date_time: ~datetime.datetime
+ :param min_last_modified_date_time: Minimum last modified date of resource (inclusive).
+ :type min_last_modified_date_time: ~datetime.datetime
+ :param max_last_modified_date_time: Maximum last modified date of resource (inclusive).
+ :type max_last_modified_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either TillageDataListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.TillageDataListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.TillageDataListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ if min_tillage_depth is not None:
+ query_parameters['minTillageDepth'] = self._serialize.query("min_tillage_depth", min_tillage_depth, 'float')
+ if max_tillage_depth is not None:
+ query_parameters['maxTillageDepth'] = self._serialize.query("max_tillage_depth", max_tillage_depth, 'float')
+ if min_tillage_pressure is not None:
+ query_parameters['minTillagePressure'] = self._serialize.query("min_tillage_pressure", min_tillage_pressure, 'float')
+ if max_tillage_pressure is not None:
+ query_parameters['maxTillagePressure'] = self._serialize.query("max_tillage_pressure", max_tillage_pressure, 'float')
+ if sources is not None:
+ query_parameters['sources'] = [self._serialize.query("sources", q, 'str') if q is not None else '' for q in sources]
+ if associated_boundary_ids is not None:
+ query_parameters['associatedBoundaryIds'] = [self._serialize.query("associated_boundary_ids", q, 'str') if q is not None else '' for q in associated_boundary_ids]
+ if operation_boundary_ids is not None:
+ query_parameters['operationBoundaryIds'] = [self._serialize.query("operation_boundary_ids", q, 'str') if q is not None else '' for q in operation_boundary_ids]
+ if min_operation_start_date_time is not None:
+ query_parameters['minOperationStartDateTime'] = self._serialize.query("min_operation_start_date_time", min_operation_start_date_time, 'iso-8601')
+ if max_operation_start_date_time is not None:
+ query_parameters['maxOperationStartDateTime'] = self._serialize.query("max_operation_start_date_time", max_operation_start_date_time, 'iso-8601')
+ if min_operation_end_date_time is not None:
+ query_parameters['minOperationEndDateTime'] = self._serialize.query("min_operation_end_date_time", min_operation_end_date_time, 'iso-8601')
+ if max_operation_end_date_time is not None:
+ query_parameters['maxOperationEndDateTime'] = self._serialize.query("max_operation_end_date_time", max_operation_end_date_time, 'iso-8601')
+ if min_operation_modified_date_time is not None:
+ query_parameters['minOperationModifiedDateTime'] = self._serialize.query("min_operation_modified_date_time", min_operation_modified_date_time, 'iso-8601')
+ if max_operation_modified_date_time is not None:
+ query_parameters['maxOperationModifiedDateTime'] = self._serialize.query("max_operation_modified_date_time", max_operation_modified_date_time, 'iso-8601')
+ if min_area is not None:
+ query_parameters['minArea'] = self._serialize.query("min_area", min_area, 'float')
+ if max_area is not None:
+ query_parameters['maxArea'] = self._serialize.query("max_area", max_area, 'float')
+ if ids is not None:
+ query_parameters['ids'] = [self._serialize.query("ids", q, 'str') if q is not None else '' for q in ids]
+ if names is not None:
+ query_parameters['names'] = [self._serialize.query("names", q, 'str') if q is not None else '' for q in names]
+ if property_filters is not None:
+ query_parameters['propertyFilters'] = [self._serialize.query("property_filters", q, 'str') if q is not None else '' for q in property_filters]
+ if statuses is not None:
+ query_parameters['statuses'] = [self._serialize.query("statuses", q, 'str') if q is not None else '' for q in statuses]
+ if min_created_date_time is not None:
+ query_parameters['minCreatedDateTime'] = self._serialize.query("min_created_date_time", min_created_date_time, 'iso-8601')
+ if max_created_date_time is not None:
+ query_parameters['maxCreatedDateTime'] = self._serialize.query("max_created_date_time", max_created_date_time, 'iso-8601')
+ if min_last_modified_date_time is not None:
+ query_parameters['minLastModifiedDateTime'] = self._serialize.query("min_last_modified_date_time", min_last_modified_date_time, 'iso-8601')
+ if max_last_modified_date_time is not None:
+ query_parameters['maxLastModifiedDateTime'] = self._serialize.query("max_last_modified_date_time", max_last_modified_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('TillageDataListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/tillage-data'} # type: ignore
+
+ def get(
+ self,
+ farmer_id, # type: str
+ tillage_data_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.TillageData"
+ """Get a specified tillage data resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer resource.
+ :type farmer_id: str
+ :param tillage_data_id: ID of the tillage data resource.
+ :type tillage_data_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: TillageData, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.TillageData
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.TillageData"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'tillageDataId': self._serialize.url("tillage_data_id", tillage_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('TillageData', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/farmers/{farmerId}/tillage-data/{tillageDataId}'} # type: ignore
+
+ def create_or_update(
+ self,
+ farmer_id, # type: str
+ tillage_data_id, # type: str
+ tillage_data=None, # type: Optional["_models.TillageData"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.TillageData"
+ """Creates or updates an tillage data resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer.
+ :type farmer_id: str
+ :param tillage_data_id: ID of the tillage data resource.
+ :type tillage_data_id: str
+ :param tillage_data: Tillage data resource payload to create or update.
+ :type tillage_data: ~azure.farmbeats.models.TillageData
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: TillageData, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.TillageData
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.TillageData"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/merge-patch+json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'tillageDataId': self._serialize.url("tillage_data_id", tillage_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if tillage_data is not None:
+ body_content = self._serialize.body(tillage_data, 'TillageData')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if response.status_code == 200:
+ deserialized = self._deserialize('TillageData', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('TillageData', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/farmers/{farmerId}/tillage-data/{tillageDataId}'} # type: ignore
+
+ def delete(
+ self,
+ farmer_id, # type: str
+ tillage_data_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Deletes a specified tillage data resource under a particular farmer.
+
+ :param farmer_id: ID of the associated farmer resource.
+ :type farmer_id: str
+ :param tillage_data_id: ID of the tillage data.
+ :type tillage_data_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'farmerId': self._serialize.url("farmer_id", farmer_id, 'str'),
+ 'tillageDataId': self._serialize.url("tillage_data_id", tillage_data_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/farmers/{farmerId}/tillage-data/{tillageDataId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_weather_operations.py b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_weather_operations.py
new file mode 100644
index 000000000000..b62fda89bb22
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/operations/_weather_operations.py
@@ -0,0 +1,505 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.1.3, generator: {generator})
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+import datetime
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.core.polling.base_polling import LROBasePolling
+
+from .. import models as _models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class WeatherOperations(object):
+ """WeatherOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure.farmbeats.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = _models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ farmer_id, # type: str
+ boundary_id, # type: str
+ extension_id, # type: str
+ weather_data_type, # type: str
+ granularity, # type: str
+ start_date_time=None, # type: Optional[datetime.datetime]
+ end_date_time=None, # type: Optional[datetime.datetime]
+ max_page_size=50, # type: Optional[int]
+ skip_token=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["_models.WeatherDataListResponse"]
+ """Returns a paginated list of weather data.
+
+ :param farmer_id: Farmer ID.
+ :type farmer_id: str
+ :param boundary_id: Boundary ID.
+ :type boundary_id: str
+ :param extension_id: ID of the weather extension.
+ :type extension_id: str
+ :param weather_data_type: Type of weather data (forecast/historical).
+ :type weather_data_type: str
+ :param granularity: Granularity of weather data (daily/hourly).
+ :type granularity: str
+ :param start_date_time: Weather data start UTC date-time (inclusive), sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type start_date_time: ~datetime.datetime
+ :param end_date_time: Weather data end UTC date-time (inclusive), sample format:
+ yyyy-MM-ddTHH:mm:ssZ.
+ :type end_date_time: ~datetime.datetime
+ :param max_page_size: Maximum number of items needed (inclusive).
+ Minimum = 10, Maximum = 1000, Default value = 50.
+ :type max_page_size: int
+ :param skip_token: Skip token for getting next set of results.
+ :type skip_token: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either WeatherDataListResponse or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure.farmbeats.models.WeatherDataListResponse]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.WeatherDataListResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['farmerId'] = self._serialize.query("farmer_id", farmer_id, 'str')
+ query_parameters['boundaryId'] = self._serialize.query("boundary_id", boundary_id, 'str')
+ query_parameters['extensionId'] = self._serialize.query("extension_id", extension_id, 'str', pattern=r'^[A-za-z]{3,50}[.][A-za-z]{3,100}$')
+ query_parameters['weatherDataType'] = self._serialize.query("weather_data_type", weather_data_type, 'str', max_length=50, min_length=0)
+ query_parameters['granularity'] = self._serialize.query("granularity", granularity, 'str', max_length=50, min_length=0)
+ if start_date_time is not None:
+ query_parameters['startDateTime'] = self._serialize.query("start_date_time", start_date_time, 'iso-8601')
+ if end_date_time is not None:
+ query_parameters['endDateTime'] = self._serialize.query("end_date_time", end_date_time, 'iso-8601')
+ if max_page_size is not None:
+ query_parameters['$maxPageSize'] = self._serialize.query("max_page_size", max_page_size, 'int', maximum=1000, minimum=10)
+ if skip_token is not None:
+ query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str')
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('WeatherDataListResponse', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/weather'} # type: ignore
+
+ def get_data_ingestion_job_details(
+ self,
+ job_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.WeatherDataIngestionJob"
+ """Get weather ingestion job.
+
+ :param job_id: Id of the job.
+ :type job_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WeatherDataIngestionJob, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.WeatherDataIngestionJob
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.WeatherDataIngestionJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_data_ingestion_job_details.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('WeatherDataIngestionJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_data_ingestion_job_details.metadata = {'url': '/weather/ingest-data/{jobId}'} # type: ignore
+
+ def _create_data_ingestion_job_initial(
+ self,
+ job_id, # type: str
+ job=None, # type: Optional["_models.WeatherDataIngestionJob"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.WeatherDataIngestionJob"
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.WeatherDataIngestionJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_data_ingestion_job_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if job is not None:
+ body_content = self._serialize.body(job, 'WeatherDataIngestionJob')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('WeatherDataIngestionJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_data_ingestion_job_initial.metadata = {'url': '/weather/ingest-data/{jobId}'} # type: ignore
+
+ def begin_create_data_ingestion_job(
+ self,
+ job_id, # type: str
+ job=None, # type: Optional["_models.WeatherDataIngestionJob"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["_models.WeatherDataIngestionJob"]
+ """Create a weather data ingestion job.
+
+ :param job_id: Job id supplied by user.
+ :type job_id: str
+ :param job: Job parameters supplied by user.
+ :type job: ~azure.farmbeats.models.WeatherDataIngestionJob
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: By default, your polling method will be LROBasePolling.
+ Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either WeatherDataIngestionJob or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.farmbeats.models.WeatherDataIngestionJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.WeatherDataIngestionJob"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_data_ingestion_job_initial(
+ job_id=job_id,
+ job=job,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('WeatherDataIngestionJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+
+ if polling is True: polling_method = LROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_data_ingestion_job.metadata = {'url': '/weather/ingest-data/{jobId}'} # type: ignore
+
+ def get_data_delete_job_details(
+ self,
+ job_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.WeatherDataDeleteJob"
+ """Get weather data delete job.
+
+ :param job_id: Id of the job.
+ :type job_id: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WeatherDataDeleteJob, or the result of cls(response)
+ :rtype: ~azure.farmbeats.models.WeatherDataDeleteJob
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.WeatherDataDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get_data_delete_job_details.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('WeatherDataDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get_data_delete_job_details.metadata = {'url': '/weather/delete-data/{jobId}'} # type: ignore
+
+ def _create_data_delete_job_initial(
+ self,
+ job_id, # type: str
+ job=None, # type: Optional["_models.WeatherDataDeleteJob"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "_models.WeatherDataDeleteJob"
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.WeatherDataDeleteJob"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-03-31-preview"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_data_delete_job_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if job is not None:
+ body_content = self._serialize.body(job, 'WeatherDataDeleteJob')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error)
+
+ deserialized = self._deserialize('WeatherDataDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_data_delete_job_initial.metadata = {'url': '/weather/delete-data/{jobId}'} # type: ignore
+
+ def begin_create_data_delete_job(
+ self,
+ job_id, # type: str
+ job=None, # type: Optional["_models.WeatherDataDeleteJob"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["_models.WeatherDataDeleteJob"]
+ """Create a weather data delete job.
+
+ :param job_id: Job Id supplied by end user.
+ :type job_id: str
+ :param job: Job parameters supplied by user.
+ :type job: ~azure.farmbeats.models.WeatherDataDeleteJob
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: By default, your polling method will be LROBasePolling.
+ Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either WeatherDataDeleteJob or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure.farmbeats.models.WeatherDataDeleteJob]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["_models.WeatherDataDeleteJob"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_data_delete_job_initial(
+ job_id=job_id,
+ job=job,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('WeatherDataDeleteJob', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'jobId': self._serialize.url("job_id", job_id, 'str'),
+ }
+
+ if polling is True: polling_method = LROBasePolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_data_delete_job.metadata = {'url': '/weather/delete-data/{jobId}'} # type: ignore
diff --git a/sdk/farmbeats/azure-farmbeats/azure/farmbeats/py.typed b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/py.typed
new file mode 100644
index 000000000000..e5aff4f83af8
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/azure/farmbeats/py.typed
@@ -0,0 +1 @@
+# Marker file for PEP 561.
\ No newline at end of file
diff --git a/sdk/farmbeats/azure-farmbeats/dev_requirements.txt b/sdk/farmbeats/azure-farmbeats/dev_requirements.txt
new file mode 100644
index 000000000000..4b833f6edece
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/dev_requirements.txt
@@ -0,0 +1,4 @@
+-e ../../../tools/azure-devtools
+-e ../../../tools/azure-sdk-tools
+-e ../../core/azure-core
+-e ../../identity/azure-identity
diff --git a/sdk/farmbeats/azure-farmbeats/generate-client.sh b/sdk/farmbeats/azure-farmbeats/generate-client.sh
new file mode 100644
index 000000000000..76bcfa5f96de
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/generate-client.sh
@@ -0,0 +1,9 @@
+autorest --python \
+ --input-file=swagger.json \
+ --namespace=azure.farmbeats \
+ --add-credential \
+ --output-folder=azure \
+ --title=FarmBeatsClient \
+ --clear-output-folder \
+ --credential-scope=https://farmbeats-dogfood.azure.net/.default \
+ --package-version=1.0.0b1
\ No newline at end of file
diff --git a/sdk/farmbeats/azure-farmbeats/setup.py b/sdk/farmbeats/azure-farmbeats/setup.py
new file mode 100644
index 000000000000..cd3e5de1c6f8
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/setup.py
@@ -0,0 +1,18 @@
+from setuptools import find_packages, setup
+
+setup(
+ name="azure-farmbeats",
+ version="1.0.0b1",
+ description="Azure Farmbeats Client Library v0",
+ packages=find_packages(),
+ author="Microsoft Corporation",
+ license="MIT License",
+ install_requires=[
+ "azure-identity<2.0.0,>=1.5.0",
+ "azure-core<2.0.0,>=1.12.0",
+ "requests<3.0.0,>=2.23.0",
+ "urllib3<2.0.0,>=1.25.8",
+ "msrest>=0.6.21",
+ "aiohttp<4.0.0,>=3.7.4"
+ ],
+)
diff --git a/sdk/farmbeats/azure-farmbeats/test_async.py b/sdk/farmbeats/azure-farmbeats/test_async.py
new file mode 100644
index 000000000000..329765bf55d2
--- /dev/null
+++ b/sdk/farmbeats/azure-farmbeats/test_async.py
@@ -0,0 +1,205 @@
+# Adding here just as a reference. Will remove before merge.
+
+
+
+from azure.core.exceptions import HttpResponseError
+from azure.identity.aio import ClientSecretCredential
+from azure.farmbeats.aio import FarmBeatsClient
+from azure.farmbeats.models import Farmer, Boundary, Polygon, SatelliteDataIngestionJob, SatelliteData
+import asyncio
+
+from datetime import datetime, timedelta
+from pytz import utc
+from urllib.parse import unquote, urlparse, parse_qs
+import random
+from pathlib import Path
+import os
+
+# sample
+credential = ClientSecretCredential(
+ tenant_id="",
+ client_id="",
+ client_secret="",
+ authority="https://login.windows-ppe.net"
+)
+
+credential_scopes = ["https://farmbeats-dogfood.azure.net/.default"]
+
+client = FarmBeatsClient(
+ base_url="https://.farmbeats-dogfood.azure.net",
+ credential=credential,
+ credential_scopes=credential_scopes,
+ logging_enable=True
+)
+
+# Define contstants for this script
+farmer_id = "agadhika-farmer"
+boundary_id = "agadhika-boundary"
+job_id_prefix = "agadhika-job"
+start_date_time = datetime(2020, 1, 1, tzinfo=utc)
+end_date_time = datetime(2020, 1, 31, tzinfo=utc)
+data_root_dir = "./asyncio_test_downloads"
+
+# Utility funcitons
+def print_error(exception):
+ print("Error:")
+ try:
+ pprint(exception.model.as_dict())
+ except:
+ print(exception.response.body())
+ print("Couldn't print error info")
+
+
+def parse_file_path_from_file_link(file_link):
+ return parse_qs(urlparse(file_link).query)['filePath'][0]
+
+
+async def aiter_to_list(aiter):
+ l = list()
+ async for obj in aiter:
+ l.append(obj)
+ return l
+
+
+# Get the default event loop
+loop = asyncio.get_event_loop()
+
+# Ensure farmer
+try:
+ print(
+ f"Create/updating farmer with id {farmer_id}... ", end="", flush=True)
+ farmer = loop.run_until_complete(
+ client.farmers.create_or_update(
+ farmer_id=farmer_id,
+ farmer=Farmer()
+ )
+ )
+ print("Done")
+ print(farmer.as_dict())
+except HttpResponseError as e:
+ print("Ooops... here's the error:")
+ print_error(e)
+
+
+# Ensure boundary
+try:
+ print(
+ f"Checking if boundary with id {boundary_id} exists... ", end="", flush=True)
+ boundary = loop.run_until_complete(
+ client.boundaries.get(
+ farmer_id=farmer_id,
+ boundary_id=boundary_id
+ )
+ )
+ if boundary:
+ print("Exists")
+ else:
+ print("Boundary doesn't exist... Creating... ", end="", flush=True)
+
+ boundary = loop.run_until_complete(
+ client.boundaries.create_or_update(
+ farmer_id=farmer_id,
+ boundary_id=boundary_id,
+ boundary=Boundary(
+ description="Created by SDK",
+ geometry=Polygon(
+ coordinates=[
+ [
+ [79.27057921886444, 18.042507660177698],
+ [79.26899135112762, 18.040135849620704],
+ [79.27113711833954, 18.03927382882835],
+ [79.27248358726501, 18.041069275656195],
+ [79.27057921886444, 18.042507660177698]
+ ]
+ ]
+ )
+ )
+ )
+ )
+ print("Created")
+
+ print(boundary.as_dict())
+except HttpResponseError as e:
+ print("Ooops... here's the error:")
+ print_error(e)
+
+
+# Queue satellite job and wait for completion
+try:
+ job_id = f"{job_id_prefix}-{random.randint(0, 1000)}"
+ print(f"Queuing satellite job {job_id}... ", end="", flush=True)
+ satellite_job_poller = loop.run_until_complete(
+ client.scenes.begin_create_satellite_data_ingestion_job(
+ job_id=job_id,
+ job=SatelliteDataIngestionJob(
+ farmer_id=farmer_id,
+ boundary_id=boundary_id,
+ start_date_time=start_date_time,
+ end_date_time=end_date_time,
+ data=SatelliteData(
+ image_names=[
+ "LAI"
+ ]
+ )
+ ),
+ polling=True
+ )
+ )
+ print("Queued... Waiting for completion... ", end="", flush=True)
+ satellite_job_result = loop.run_until_complete(
+ satellite_job_poller.result()
+ )
+ print("Done")
+ print(satellite_job_result.as_dict())
+except HttpResponseError as e:
+ print_error(e)
+ raise
+
+
+# Get scenes
+try:
+ print("Getting scenes list... ", end="", flush=True)
+ scenes_aiter = client.scenes.list(
+ boundary.farmer_id,
+ boundary.id,
+ start_date_time=start_date_time,
+ end_date_time=end_date_time,
+ )
+ scenes = loop.run_until_complete(
+ aiter_to_list(scenes_aiter)
+ )
+ print("Done")
+except HttpResponseError as e:
+ print_error(e)
+ raise
+
+# Set up async functions to parallel download
+async def download_image(client, file_link, root_dir, semaphore):
+ file_path = parse_file_path_from_file_link(file_link)
+ out_path = Path(os.path.join(root_dir, file_path))
+ out_path.parent.mkdir(parents=True, exist_ok=True)
+ async with semaphore:
+ print(f"Async downloading image {file_path}... ")
+ with open(out_path, 'wb') as tif_file:
+ file_stream = await client.scenes.download(file_path)
+ async for bits in file_stream:
+ tif_file.write(bits)
+ return str(out_path)
+
+files_to_download = list()
+for scene in scenes:
+ for image_file in scene.image_files:
+ files_to_download.append(image_file.file_link)
+
+download_semaphore = asyncio.Semaphore(2)
+
+all_downloads = asyncio.gather(
+ *[download_image(client, file_link, data_root_dir, download_semaphore) for file_link in files_to_download])
+
+loop.run_until_complete(all_downloads)
+
+print("Downloads done")
+
+loop.run_until_complete(credential.close())
+loop.run_until_complete(client.close())
+loop.close()
diff --git a/sdk/farmbeats/ci.yml b/sdk/farmbeats/ci.yml
new file mode 100644
index 000000000000..9da4ee9ee1be
--- /dev/null
+++ b/sdk/farmbeats/ci.yml
@@ -0,0 +1,39 @@
+trigger:
+ branches:
+ include:
+ - master
+ - main
+ - hotfix/*
+ - release/*
+ - restapi*
+ paths:
+ include:
+ - sdk/farmbeats/
+ - sdk/core/
+ - tools/
+ - eng/
+
+pr:
+ branches:
+ include:
+ - master
+ - main
+ - feature/*
+ - hotfix/*
+ - release/*
+ - restapi*
+ paths:
+ include:
+ - sdk/farmbeats/
+ - sdk/core/
+ - tools/
+ - eng/
+
+extends:
+ template: ../../eng/pipelines/templates/stages/archetype-sdk-client.yml
+ parameters:
+ ServiceDirectory: farmbeats
+ Artifacts:
+ - name: azure-farmbeats
+ safeName: azurefarmbeats
+