diff --git a/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 7170e7172e..a2112c5688 100644 --- a/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -1051,6 +1051,11 @@ def test_{{ method_name }}_raw_page_lro(): {% with method_name = method.safe_name|snake_case + "_unary" if method.extended_lro and not full_extended_lro else method.name|snake_case, method_output = method.extended_lro.operation_type if method.extended_lro and not full_extended_lro else method.output %}{% if method.http_options %} {# TODO(kbandes): remove this if condition when lro and client streaming are supported. #} {% if not method.client_streaming %} +{# NOTE: This guard is added to avoid generating duplicate tests for methods which are tested elsewhere. As we implement each of the api methods + # in the `macro::call_success_test`, the case will be removed from this condition below. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2143): Remove the test `test_{{ method_name }}_rest` from here once the linked issue is resolved. +#} +{% if method.server_streaming or method.lro or method.extended_lro or method.paged_result_field %} @pytest.mark.parametrize("request_type", [ {{ method.input.ident }}, dict, @@ -1185,6 +1190,7 @@ def test_{{ method_name }}_rest(request_type): json_return_value = json_format.MessageToJson(return_value) {% else %} {% if method.output.ident.is_proto_plus_type %} + # Convert return value to protobuf type return_value = {{ method.output.ident }}.pb(return_value) {% endif %} @@ -1249,6 +1255,7 @@ def test_{{ method_name }}_rest(request_type): {% endfor %} {% endif %} +{% endif %}{# if method.server_streaming or method.lro or method.extended_lro or method.paged_result_field #} def test_{{ method_name }}_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1953,6 +1960,7 @@ def test_unsupported_parameter_rest_asyncio(): {{ rest_method_not_implemented_error(service, method, transport, is_async) }} {% else %} {{ bad_request_test(service, method, transport, is_async) }} +{{ call_success_test(service, method, transport, is_async) }} {% endif %}{# is_rest_unsupported_method(method, is_async) == 'False' and method.http_options #} {% endfor %} {{ initialize_client_with_transport_test(service, transport, is_async) }} @@ -2056,3 +2064,206 @@ def test_initialize_client_w_{{transport_name}}(): {% endif %}{# if 'rest' in transport #} {% endmacro %} + +{# call_success_test generates tests for rest methods + # when they make a successful request. + # NOTE: Currently, this macro does not support the following method + # types: [method.server_streaming, method.lro, method.extended_lro, method.paged_result_field]. + # As support is added for the above methods, the relevant guard can be removed from within the macro + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2142): Clean up `rest_required_tests` as we add support for each of the method types metioned above. +#} +{% macro call_success_test(service, method, transport, is_async) %} +{% if 'rest' in transport %} +{% set await_prefix = get_await_prefix(is_async) %} +{% set async_prefix = get_async_prefix(is_async) %} +{% set async_decorator = get_async_decorator(is_async) %} +{% set transport_name = get_transport_name(transport, is_async) %} +{% set method_name = method.name|snake_case %} +{# NOTE: set method_output to method.extended_lro.operation_type for the following method types: + # (method.extended_lro and not full_extended_lro) +#} +{% set method_output = method.output %} +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2143): Update the guard below as we add support for each method, and keep it in sync with the guard in + # `rest_required_tests`, which should be the exact opposite. Remove it once we have all the methods supported in async rest transport that are supported in sync rest transport. + #} +{% if not (method.server_streaming or method.lro or method.extended_lro or method.paged_result_field)%} +{{async_decorator}} +@pytest.mark.parametrize("request_type", [ + {{ method.input.ident }}, + dict, +]) +{{async_prefix}}def test_{{method_name}}_{{transport_name}}_call_success(request_type): + {% if transport_name == 'rest_asyncio' %} + if not HAS_GOOGLE_AUTH_AIO: + {# TODO(https://github.com/googleapis/google-auth-library-python/pull/1577): Update the version of google-auth once the linked PR is merged. #} + pytest.skip("google-auth > 2.x.x is required for async rest transport.") + + {% endif %} + client = {{ get_client(service, is_async) }}( + credentials={{get_credentials(is_async)}}, + transport="{{transport_name}}" + ) + + # send a request that will satisfy transcoding + request_init = {{ method.http_options[0].sample_request(method) }} + {% for field in method.body_fields.values() %} + {% if not field.oneof or field.proto3_optional %} + {# ignore oneof fields that might conflict with sample_request #} + request_init["{{ field.name }}"] = {{ field.merged_mock_value(method.http_options[0].sample_request(method).get(field.name)) }} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = {{ method.input.ident }}.meta.fields["{{ field.name }}"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["{{ field.name }}"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["{{ field.name }}"][field])): + del request_init["{{ field.name }}"][field][i][subfield] + else: + del request_init["{{ field.name }}"][field][subfield] + {% endif %} + {% endfor %} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + {% if method.void %} + return_value = None + {% elif method.lro %} + return_value = operations_pb2.Operation(name='operations/spam') + {% elif method.extended_lro %} + return_value = {{ method.extended_lro.operation_type.ident }}( + {% for field in method.extended_lro.operation_type.fields.values() | rejectattr('message')%} + {% if not field.oneof or field.proto3_optional %} + {{ field.name }}={{ field.mock_value }}, + {% endif %}{% endfor %} + {# This is a hack to only pick one field #} + {% for oneof_fields in method.output.oneof_fields().values() %} + {% if (oneof_fields | rejectattr('message') | list) %} + {% with field = (oneof_fields | rejectattr('message') | first) %} + {{ field.name }}={{ field.mock_value }}, + {% endwith %} + {% endif %} + {% endfor %} + ) + {% else %} + return_value = {{ method.output.ident }}( + {% for field in method.output.fields.values() | rejectattr('message')%} + {% if not field.oneof or field.proto3_optional %} + {{ field.name }}={{ field.mock_value }}, + {% endif %}{% endfor %} + {# This is a hack to only pick one field #} + {% for oneof_fields in method.output.oneof_fields().values() %} + {% if (oneof_fields | rejectattr('message') | list) %} + {% with field = (oneof_fields | rejectattr('message') | first) %} + {{ field.name }}={{ field.mock_value }}, + {% endwith %} + {% endif %} + {% endfor %} + ) + {% endif %}{# method.void #} + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + {% if method.void %} + json_return_value = '' + {% else %}{# method.void #} + {% if method.output.ident.is_proto_plus_type %} + + # Convert return value to protobuf type + return_value = {{ method.output.ident }}.pb(return_value) + {% endif %}{# method.output.ident.is_proto_plus_type #} + json_return_value = json_format.MessageToJson(return_value) + {% endif %}{# method.void #} + {% if is_async %} + response_value.read = mock.AsyncMock(return_value=b'{}') + {% else %}{# is_async #} + response_value.content = json_return_value.encode('UTF-8') + {% endif %} + req.return_value = response_value + response = {{ await_prefix }}client.{{ method_name }}(request) + + # Establish that the response is the type that we expect. + {% if method.void %} + assert response is None + {% else %} + assert isinstance(response, {{ method.client_output.ident }}) + {% for field in method_output.fields.values() | rejectattr('message') %} + {% if not field.oneof or field.proto3_optional %} + {% if field.field_pb.type in [1, 2] %}{# Use approx eq for floats #} + {% if field.repeated %} + for index in range(len(response.{{ field.name }})): + assert math.isclose( + response.{{ field.name }}[index], + {{ field.mock_value }}[index], + rel_tol=1e-6, + ) + {% else %}{# field.repeated #} + assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) + {% endif %}{# field.repeated #} + {% elif field.field_pb.type == 8 %}{# Use 'is' for bools #} + assert response.{{ field.name }} is {{ field.mock_value }} + {% else %} + assert response.{{ field.name }} == {{ field.mock_value }} + {% endif %}{# field.field_pb.type in [1, 2] #} + {% endif %}{# not field.oneof or field.proto3_optional #} + {% endfor %}{# field in method_output.fields.values() | rejectattr('message') #} + {% endif %}{# method.void #} + +{% endif %}{# if not (method.server_streaming or method.lro or method.extended_lro or method.paged_result_field) #} +{% endif %}{# if 'rest' in transport #} +{% endmacro %} diff --git a/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 83f384392d..951289dd2b 100755 --- a/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -10137,6 +10137,7 @@ def test_list_assets_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type return_value = asset_service.ListAssetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) @@ -10417,40 +10418,6 @@ def test_list_assets_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.BatchGetAssetsHistoryRequest, - dict, -]) -def test_batch_get_assets_history_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.BatchGetAssetsHistoryResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.batch_get_assets_history(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse) - def test_batch_get_assets_history_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10604,50 +10571,6 @@ def test_batch_get_assets_history_rest_interceptors(null_interceptor): post.assert_called_once() -@pytest.mark.parametrize("request_type", [ - asset_service.CreateFeedRequest, - dict, -]) -def test_create_feed_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_feed(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] - def test_create_feed_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10857,50 +10780,6 @@ def test_create_feed_rest_flattened_error(transport: str = 'rest'): ) -@pytest.mark.parametrize("request_type", [ - asset_service.GetFeedRequest, - dict, -]) -def test_get_feed_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_feed(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] - def test_get_feed_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11105,40 +10984,6 @@ def test_get_feed_rest_flattened_error(transport: str = 'rest'): ) -@pytest.mark.parametrize("request_type", [ - asset_service.ListFeedsRequest, - dict, -]) -def test_list_feeds_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.ListFeedsResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.ListFeedsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_feeds(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.ListFeedsResponse) - def test_list_feeds_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11343,50 +11188,6 @@ def test_list_feeds_rest_flattened_error(transport: str = 'rest'): ) -@pytest.mark.parametrize("request_type", [ - asset_service.UpdateFeedRequest, - dict, -]) -def test_update_feed_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_feed(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] - def test_update_feed_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11587,37 +11388,6 @@ def test_update_feed_rest_flattened_error(transport: str = 'rest'): ) -@pytest.mark.parametrize("request_type", [ - asset_service.DeleteFeedRequest, - dict, -]) -def test_delete_feed_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_feed(request) - - # Establish that the response is the type that we expect. - assert response is None - def test_delete_feed_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11836,6 +11606,7 @@ def test_search_all_resources_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type return_value = asset_service.SearchAllResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) @@ -12144,6 +11915,7 @@ def test_search_all_iam_policies_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) @@ -12426,42 +12198,6 @@ def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeIamPolicyRequest, - dict, -]) -def test_analyze_iam_policy_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeIamPolicyResponse( - fully_explored=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.AnalyzeIamPolicyResponse) - assert response.fully_explored is True - def test_analyze_iam_policy_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -12790,40 +12526,6 @@ def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): post.assert_called_once() -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeMoveRequest, - dict, -]) -def test_analyze_move_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeMoveResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.AnalyzeMoveResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_move(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.AnalyzeMoveResponse) - def test_analyze_move_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -12988,44 +12690,6 @@ def test_analyze_move_rest_interceptors(null_interceptor): post.assert_called_once() -@pytest.mark.parametrize("request_type", [ - asset_service.QueryAssetsRequest, - dict, -]) -def test_query_assets_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.QueryAssetsResponse( - job_reference='job_reference_value', - done=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.QueryAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.query_assets(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.QueryAssetsResponse) - assert response.job_reference == 'job_reference_value' - assert response.done is True - def test_query_assets_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -13178,112 +12842,6 @@ def test_query_assets_rest_interceptors(null_interceptor): post.assert_called_once() -@pytest.mark.parametrize("request_type", [ - asset_service.CreateSavedQueryRequest, - dict, -]) -def test_create_saved_query_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request_init["saved_query"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = asset_service.CreateSavedQueryRequest.meta.fields["saved_query"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["saved_query"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["saved_query"][field])): - del request_init["saved_query"][field][i][subfield] - else: - del request_init["saved_query"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_saved_query(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' - def test_create_saved_query_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -13506,48 +13064,6 @@ def test_create_saved_query_rest_flattened_error(transport: str = 'rest'): ) -@pytest.mark.parametrize("request_type", [ - asset_service.GetSavedQueryRequest, - dict, -]) -def test_get_saved_query_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_saved_query(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' - def test_get_saved_query_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -13776,6 +13292,7 @@ def test_list_saved_queries_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type return_value = asset_service.ListSavedQueriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) @@ -14056,135 +13573,29 @@ def test_list_saved_queries_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.UpdateSavedQueryRequest, - dict, -]) -def test_update_saved_query_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_update_saved_query_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # send a request that will satisfy transcoding - request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} - request_init["saved_query"] = {'name': 'sample1/sample2/savedQueries/sample3', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Determine if the message type is proto-plus or protobuf - test_field = asset_service.UpdateSavedQueryRequest.meta.fields["saved_query"] + # Ensure method has been cached + assert client._transport.update_saved_query in client._transport._wrapped_methods - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_saved_query] = mock_rpc - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["saved_query"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["saved_query"][field])): - del request_init["saved_query"][field][i][subfield] - else: - del request_init["saved_query"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_saved_query(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' - -def test_update_saved_query_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_saved_query in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_saved_query] = mock_rpc - - request = {} - client.update_saved_query(request) + request = {} + client.update_saved_query(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -14366,37 +13777,6 @@ def test_update_saved_query_rest_flattened_error(transport: str = 'rest'): ) -@pytest.mark.parametrize("request_type", [ - asset_service.DeleteSavedQueryRequest, - dict, -]) -def test_delete_saved_query_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_saved_query(request) - - # Establish that the response is the type that we expect. - assert response is None - def test_delete_saved_query_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -14591,40 +13971,6 @@ def test_delete_saved_query_rest_flattened_error(transport: str = 'rest'): ) -@pytest.mark.parametrize("request_type", [ - asset_service.BatchGetEffectiveIamPoliciesRequest, - dict, -]) -def test_batch_get_effective_iam_policies_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.BatchGetEffectiveIamPoliciesResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.batch_get_effective_iam_policies(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.BatchGetEffectiveIamPoliciesResponse) - def test_batch_get_effective_iam_policies_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -14813,6 +14159,7 @@ def test_analyze_org_policies_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) @@ -15132,6 +14479,7 @@ def test_analyze_org_policy_governed_containers_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) @@ -15451,6 +14799,7 @@ def test_analyze_org_policy_governed_assets_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) @@ -15933,34 +15282,48 @@ def test_batch_get_assets_history_rest_bad_request(request_type=asset_service.Ba client.batch_get_assets_history(request) -def test_create_feed_rest_bad_request(request_type=asset_service.CreateFeedRequest): +@pytest.mark.parametrize("request_type", [ + asset_service.BatchGetAssetsHistoryRequest, + dict, +]) +def test_batch_get_assets_history_rest_call_success(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) + # send a request that will satisfy transcoding request_init = {'parent': 'sample1/sample2'} request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.BatchGetAssetsHistoryResponse( + ) + # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value - client.create_feed(request) + response = client.batch_get_assets_history(request) + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse) -def test_get_feed_rest_bad_request(request_type=asset_service.GetFeedRequest): + +def test_create_feed_rest_bad_request(request_type=asset_service.CreateFeedRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} + request_init = {'parent': 'sample1/sample2'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -15972,37 +15335,61 @@ def test_get_feed_rest_bad_request(request_type=asset_service.GetFeedRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.get_feed(request) + client.create_feed(request) -def test_list_feeds_rest_bad_request(request_type=asset_service.ListFeedsRequest): +@pytest.mark.parametrize("request_type", [ + asset_service.CreateFeedRequest, + dict, +]) +def test_create_feed_rest_call_success(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) + # send a request that will satisfy transcoding request_init = {'parent': 'sample1/sample2'} request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + ) + # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value - client.list_feeds(request) + response = client.create_feed(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] -def test_update_feed_rest_bad_request(request_type=asset_service.UpdateFeedRequest): +def test_get_feed_rest_bad_request(request_type=asset_service.GetFeedRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + request_init = {'name': 'sample1/sample2/feeds/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16014,37 +15401,61 @@ def test_update_feed_rest_bad_request(request_type=asset_service.UpdateFeedReque response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.update_feed(request) + client.get_feed(request) -def test_delete_feed_rest_bad_request(request_type=asset_service.DeleteFeedRequest): +@pytest.mark.parametrize("request_type", [ + asset_service.GetFeedRequest, + dict, +]) +def test_get_feed_rest_call_success(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) + # send a request that will satisfy transcoding request_init = {'name': 'sample1/sample2/feeds/sample3'} request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - client.delete_feed(request) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + ) + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 -def test_search_all_resources_rest_bad_request(request_type=asset_service.SearchAllResourcesRequest): + # Convert return value to protobuf type + return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_feed(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] + + +def test_list_feeds_rest_bad_request(request_type=asset_service.ListFeedsRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {'parent': 'sample1/sample2'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16056,37 +15467,51 @@ def test_search_all_resources_rest_bad_request(request_type=asset_service.Search response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.search_all_resources(request) + client.list_feeds(request) -def test_search_all_iam_policies_rest_bad_request(request_type=asset_service.SearchAllIamPoliciesRequest): +@pytest.mark.parametrize("request_type", [ + asset_service.ListFeedsRequest, + dict, +]) +def test_list_feeds_rest_call_success(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) + # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {'parent': 'sample1/sample2'} request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.ListFeedsResponse( + ) + # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.ListFeedsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value - client.search_all_iam_policies(request) + response = client.list_feeds(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.ListFeedsResponse) -def test_analyze_iam_policy_rest_bad_request(request_type=asset_service.AnalyzeIamPolicyRequest): +def test_update_feed_rest_bad_request(request_type=asset_service.UpdateFeedRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16098,37 +15523,61 @@ def test_analyze_iam_policy_rest_bad_request(request_type=asset_service.AnalyzeI response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.analyze_iam_policy(request) + client.update_feed(request) -def test_analyze_iam_policy_longrunning_rest_bad_request(request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): +@pytest.mark.parametrize("request_type", [ + asset_service.UpdateFeedRequest, + dict, +]) +def test_update_feed_rest_call_success(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) + # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + ) + # Wrap the value into a proper Response obj response_value = mock.Mock() - json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value - client.analyze_iam_policy_longrunning(request) + response = client.update_feed(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] -def test_analyze_move_rest_bad_request(request_type=asset_service.AnalyzeMoveRequest): +def test_delete_feed_rest_bad_request(request_type=asset_service.DeleteFeedRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'resource': 'sample1/sample2'} + request_init = {'name': 'sample1/sample2/feeds/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16140,37 +15589,47 @@ def test_analyze_move_rest_bad_request(request_type=asset_service.AnalyzeMoveReq response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.analyze_move(request) + client.delete_feed(request) -def test_query_assets_rest_bad_request(request_type=asset_service.QueryAssetsRequest): +@pytest.mark.parametrize("request_type", [ + asset_service.DeleteFeedRequest, + dict, +]) +def test_delete_feed_rest_call_success(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) + # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {'name': 'sample1/sample2/feeds/sample3'} request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + # Wrap the value into a proper Response obj response_value = mock.Mock() + response_value.status_code = 200 json_return_value = '' - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() + response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value - client.query_assets(request) + response = client.delete_feed(request) + + # Establish that the response is the type that we expect. + assert response is None -def test_create_saved_query_rest_bad_request(request_type=asset_service.CreateSavedQueryRequest): +def test_search_all_resources_rest_bad_request(request_type=asset_service.SearchAllResourcesRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {'scope': 'sample1/sample2'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16182,16 +15641,16 @@ def test_create_saved_query_rest_bad_request(request_type=asset_service.CreateSa response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.create_saved_query(request) + client.search_all_resources(request) -def test_get_saved_query_rest_bad_request(request_type=asset_service.GetSavedQueryRequest): +def test_search_all_iam_policies_rest_bad_request(request_type=asset_service.SearchAllIamPoliciesRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} + request_init = {'scope': 'sample1/sample2'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16203,16 +15662,16 @@ def test_get_saved_query_rest_bad_request(request_type=asset_service.GetSavedQue response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.get_saved_query(request) + client.search_all_iam_policies(request) -def test_list_saved_queries_rest_bad_request(request_type=asset_service.ListSavedQueriesRequest): +def test_analyze_iam_policy_rest_bad_request(request_type=asset_service.AnalyzeIamPolicyRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16224,16 +15683,53 @@ def test_list_saved_queries_rest_bad_request(request_type=asset_service.ListSave response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.list_saved_queries(request) + client.analyze_iam_policy(request) -def test_update_saved_query_rest_bad_request(request_type=asset_service.UpdateSavedQueryRequest): +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeIamPolicyRequest, + dict, +]) +def test_analyze_iam_policy_rest_call_success(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) + # send a request that will satisfy transcoding - request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeIamPolicyResponse( + fully_explored=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.AnalyzeIamPolicyResponse) + assert response.fully_explored is True + + +def test_analyze_iam_policy_longrunning_rest_bad_request(request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16245,16 +15741,16 @@ def test_update_saved_query_rest_bad_request(request_type=asset_service.UpdateSa response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.update_saved_query(request) + client.analyze_iam_policy_longrunning(request) -def test_delete_saved_query_rest_bad_request(request_type=asset_service.DeleteSavedQueryRequest): +def test_analyze_move_rest_bad_request(request_type=asset_service.AnalyzeMoveRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} + request_init = {'resource': 'sample1/sample2'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16266,16 +15762,51 @@ def test_delete_saved_query_rest_bad_request(request_type=asset_service.DeleteSa response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.delete_saved_query(request) + client.analyze_move(request) -def test_batch_get_effective_iam_policies_rest_bad_request(request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeMoveRequest, + dict, +]) +def test_analyze_move_rest_call_success(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) + # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} + request_init = {'resource': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeMoveResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.AnalyzeMoveResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_move(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.AnalyzeMoveResponse) + + +def test_query_assets_rest_bad_request(request_type=asset_service.QueryAssetsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -16287,7 +15818,495 @@ def test_batch_get_effective_iam_policies_rest_bad_request(request_type=asset_se response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value - client.batch_get_effective_iam_policies(request) + client.query_assets(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.QueryAssetsRequest, + dict, +]) +def test_query_assets_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.QueryAssetsResponse( + job_reference='job_reference_value', + done=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.QueryAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.query_assets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.QueryAssetsResponse) + assert response.job_reference == 'job_reference_value' + assert response.done is True + + +def test_create_saved_query_rest_bad_request(request_type=asset_service.CreateSavedQueryRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_saved_query(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.CreateSavedQueryRequest, + dict, +]) +def test_create_saved_query_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request_init["saved_query"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = asset_service.CreateSavedQueryRequest.meta.fields["saved_query"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["saved_query"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["saved_query"][field])): + del request_init["saved_query"][field][i][subfield] + else: + del request_init["saved_query"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.SavedQuery.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_saved_query(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.SavedQuery) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' + + +def test_get_saved_query_rest_bad_request(request_type=asset_service.GetSavedQueryRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/savedQueries/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_saved_query(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.GetSavedQueryRequest, + dict, +]) +def test_get_saved_query_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/savedQueries/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.SavedQuery.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_saved_query(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.SavedQuery) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' + + +def test_list_saved_queries_rest_bad_request(request_type=asset_service.ListSavedQueriesRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_saved_queries(request) + + +def test_update_saved_query_rest_bad_request(request_type=asset_service.UpdateSavedQueryRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.update_saved_query(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.UpdateSavedQueryRequest, + dict, +]) +def test_update_saved_query_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} + request_init["saved_query"] = {'name': 'sample1/sample2/savedQueries/sample3', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = asset_service.UpdateSavedQueryRequest.meta.fields["saved_query"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["saved_query"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["saved_query"][field])): + del request_init["saved_query"][field][i][subfield] + else: + del request_init["saved_query"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.SavedQuery.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_saved_query(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.SavedQuery) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' + + +def test_delete_saved_query_rest_bad_request(request_type=asset_service.DeleteSavedQueryRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/savedQueries/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.delete_saved_query(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.DeleteSavedQueryRequest, + dict, +]) +def test_delete_saved_query_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/savedQueries/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_saved_query(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_batch_get_effective_iam_policies_rest_bad_request(request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.batch_get_effective_iam_policies(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.BatchGetEffectiveIamPoliciesRequest, + dict, +]) +def test_batch_get_effective_iam_policies_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.BatchGetEffectiveIamPoliciesResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.batch_get_effective_iam_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.BatchGetEffectiveIamPoliciesResponse) def test_analyze_org_policies_rest_bad_request(request_type=asset_service.AnalyzeOrgPoliciesRequest): diff --git a/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 35c5979347..1f0642e879 100755 --- a/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -2291,42 +2291,6 @@ async def test_sign_jwt_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [ - common.GenerateAccessTokenRequest, - dict, -]) -def test_generate_access_token_rest(request_type): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = common.GenerateAccessTokenResponse( - access_token='access_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common.GenerateAccessTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.generate_access_token(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common.GenerateAccessTokenResponse) - assert response.access_token == 'access_token_value' - def test_generate_access_token_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2542,42 +2506,6 @@ def test_generate_access_token_rest_flattened_error(transport: str = 'rest'): ) -@pytest.mark.parametrize("request_type", [ - common.GenerateIdTokenRequest, - dict, -]) -def test_generate_id_token_rest(request_type): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = common.GenerateIdTokenResponse( - token='token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common.GenerateIdTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.generate_id_token(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common.GenerateIdTokenResponse) - assert response.token == 'token_value' - def test_generate_id_token_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2793,44 +2721,6 @@ def test_generate_id_token_rest_flattened_error(transport: str = 'rest'): ) -@pytest.mark.parametrize("request_type", [ - common.SignBlobRequest, - dict, -]) -def test_sign_blob_rest(request_type): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = common.SignBlobResponse( - key_id='key_id_value', - signed_blob=b'signed_blob_blob', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common.SignBlobResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.sign_blob(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common.SignBlobResponse) - assert response.key_id == 'key_id_value' - assert response.signed_blob == b'signed_blob_blob' - def test_sign_blob_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3044,44 +2934,6 @@ def test_sign_blob_rest_flattened_error(transport: str = 'rest'): ) -@pytest.mark.parametrize("request_type", [ - common.SignJwtRequest, - dict, -]) -def test_sign_jwt_rest(request_type): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = common.SignJwtResponse( - key_id='key_id_value', - signed_jwt='signed_jwt_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common.SignJwtResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.sign_jwt(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common.SignJwtResponse) - assert response.key_id == 'key_id_value' - assert response.signed_jwt == 'signed_jwt_value' - def test_sign_jwt_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3440,6 +3292,43 @@ def test_generate_access_token_rest_bad_request(request_type=common.GenerateAcce client.generate_access_token(request) +@pytest.mark.parametrize("request_type", [ + common.GenerateAccessTokenRequest, + dict, +]) +def test_generate_access_token_rest_call_success(request_type): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = common.GenerateAccessTokenResponse( + access_token='access_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common.GenerateAccessTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.generate_access_token(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common.GenerateAccessTokenResponse) + assert response.access_token == 'access_token_value' + + def test_generate_id_token_rest_bad_request(request_type=common.GenerateIdTokenRequest): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3461,6 +3350,43 @@ def test_generate_id_token_rest_bad_request(request_type=common.GenerateIdTokenR client.generate_id_token(request) +@pytest.mark.parametrize("request_type", [ + common.GenerateIdTokenRequest, + dict, +]) +def test_generate_id_token_rest_call_success(request_type): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = common.GenerateIdTokenResponse( + token='token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common.GenerateIdTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.generate_id_token(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common.GenerateIdTokenResponse) + assert response.token == 'token_value' + + def test_sign_blob_rest_bad_request(request_type=common.SignBlobRequest): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3482,6 +3408,45 @@ def test_sign_blob_rest_bad_request(request_type=common.SignBlobRequest): client.sign_blob(request) +@pytest.mark.parametrize("request_type", [ + common.SignBlobRequest, + dict, +]) +def test_sign_blob_rest_call_success(request_type): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = common.SignBlobResponse( + key_id='key_id_value', + signed_blob=b'signed_blob_blob', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common.SignBlobResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.sign_blob(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common.SignBlobResponse) + assert response.key_id == 'key_id_value' + assert response.signed_blob == b'signed_blob_blob' + + def test_sign_jwt_rest_bad_request(request_type=common.SignJwtRequest): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3503,6 +3468,45 @@ def test_sign_jwt_rest_bad_request(request_type=common.SignJwtRequest): client.sign_jwt(request) +@pytest.mark.parametrize("request_type", [ + common.SignJwtRequest, + dict, +]) +def test_sign_jwt_rest_call_success(request_type): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = common.SignJwtResponse( + key_id='key_id_value', + signed_jwt='signed_jwt_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common.SignJwtResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.sign_jwt(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common.SignJwtResponse) + assert response.key_id == 'key_id_value' + assert response.signed_jwt == 'signed_jwt_value' + + def test_initialize_client_w_rest(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 2f79055e2e..45b2f7c670 100755 --- a/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -8203,50 +8203,6 @@ async def test_update_google_channel_config_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [ - eventarc.GetTriggerRequest, - dict, -]) -def test_get_trigger_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = trigger.Trigger( - name='name_value', - uid='uid_value', - service_account='service_account_value', - channel='channel_value', - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = trigger.Trigger.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, trigger.Trigger) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.service_account == 'service_account_value' - assert response.channel == 'channel_value' - assert response.etag == 'etag_value' - def test_get_trigger_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -8476,6 +8432,7 @@ def test_list_triggers_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type return_value = eventarc.ListTriggersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) @@ -9648,53 +9605,6 @@ def test_delete_trigger_rest_flattened_error(transport: str = 'rest'): ) -@pytest.mark.parametrize("request_type", [ - eventarc.GetChannelRequest, - dict, -]) -def test_get_channel_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = channel.Channel( - name='name_value', - uid='uid_value', - provider='provider_value', - state=channel.Channel.State.PENDING, - activation_token='activation_token_value', - crypto_key_name='crypto_key_name_value', - pubsub_topic='pubsub_topic_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = channel.Channel.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_channel(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, channel.Channel) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.provider == 'provider_value' - assert response.state == channel.Channel.State.PENDING - assert response.activation_token == 'activation_token_value' - assert response.crypto_key_name == 'crypto_key_name_value' - def test_get_channel_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -9924,6 +9834,7 @@ def test_list_channels_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type return_value = eventarc.ListChannelsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) @@ -11092,44 +11003,6 @@ def test_delete_channel_rest_flattened_error(transport: str = 'rest'): ) -@pytest.mark.parametrize("request_type", [ - eventarc.GetProviderRequest, - dict, -]) -def test_get_provider_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/providers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = discovery.Provider( - name='name_value', - display_name='display_name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = discovery.Provider.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_provider(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, discovery.Provider) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - def test_get_provider_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11359,6 +11232,7 @@ def test_list_providers_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type return_value = eventarc.ListProvidersResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) @@ -11640,48 +11514,6 @@ def test_list_providers_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - eventarc.GetChannelConnectionRequest, - dict, -]) -def test_get_channel_connection_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = channel_connection.ChannelConnection( - name='name_value', - uid='uid_value', - channel='channel_value', - activation_token='activation_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = channel_connection.ChannelConnection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_channel_connection(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, channel_connection.ChannelConnection) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.channel == 'channel_value' - assert response.activation_token == 'activation_token_value' - def test_get_channel_connection_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11911,6 +11743,7 @@ def test_list_channel_connections_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) @@ -12744,44 +12577,6 @@ def test_delete_channel_connection_rest_flattened_error(transport: str = 'rest') ) -@pytest.mark.parametrize("request_type", [ - eventarc.GetGoogleChannelConfigRequest, - dict, -]) -def test_get_google_channel_config_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = google_channel_config.GoogleChannelConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_google_channel_config(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, google_channel_config.GoogleChannelConfig) - assert response.name == 'name_value' - assert response.crypto_key_name == 'crypto_key_name_value' - def test_get_google_channel_config_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -12986,134 +12781,32 @@ def test_get_google_channel_config_rest_flattened_error(transport: str = 'rest') ) -@pytest.mark.parametrize("request_type", [ - eventarc.UpdateGoogleChannelConfigRequest, - dict, -]) -def test_update_google_channel_config_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_update_google_channel_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # send a request that will satisfy transcoding - request_init = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} - request_init["google_channel_config"] = {'name': 'projects/sample1/locations/sample2/googleChannelConfig', 'update_time': {'seconds': 751, 'nanos': 543}, 'crypto_key_name': 'crypto_key_name_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Determine if the message type is proto-plus or protobuf - test_field = eventarc.UpdateGoogleChannelConfigRequest.meta.fields["google_channel_config"] + # Ensure method has been cached + assert client._transport.update_google_channel_config in client._transport._wrapped_methods - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_google_channel_config] = mock_rpc - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + request = {} + client.update_google_channel_config(request) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["google_channel_config"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["google_channel_config"][field])): - del request_init["google_channel_config"][field][i][subfield] - else: - del request_init["google_channel_config"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gce_google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_google_channel_config(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gce_google_channel_config.GoogleChannelConfig) - assert response.name == 'name_value' - assert response.crypto_key_name == 'crypto_key_name_value' - -def test_update_google_channel_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_google_channel_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_google_channel_config] = mock_rpc - - request = {} - client.update_google_channel_config(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 client.update_google_channel_config(request) @@ -13437,6 +13130,51 @@ def test_get_trigger_rest_bad_request(request_type=eventarc.GetTriggerRequest): client.get_trigger(request) +@pytest.mark.parametrize("request_type", [ + eventarc.GetTriggerRequest, + dict, +]) +def test_get_trigger_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = trigger.Trigger( + name='name_value', + uid='uid_value', + service_account='service_account_value', + channel='channel_value', + etag='etag_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = trigger.Trigger.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, trigger.Trigger) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.service_account == 'service_account_value' + assert response.channel == 'channel_value' + assert response.etag == 'etag_value' + + def test_list_triggers_rest_bad_request(request_type=eventarc.ListTriggersRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13542,6 +13280,54 @@ def test_get_channel_rest_bad_request(request_type=eventarc.GetChannelRequest): client.get_channel(request) +@pytest.mark.parametrize("request_type", [ + eventarc.GetChannelRequest, + dict, +]) +def test_get_channel_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = channel.Channel( + name='name_value', + uid='uid_value', + provider='provider_value', + state=channel.Channel.State.PENDING, + activation_token='activation_token_value', + crypto_key_name='crypto_key_name_value', + pubsub_topic='pubsub_topic_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = channel.Channel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_channel(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, channel.Channel) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.provider == 'provider_value' + assert response.state == channel.Channel.State.PENDING + assert response.activation_token == 'activation_token_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + def test_list_channels_rest_bad_request(request_type=eventarc.ListChannelsRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13647,6 +13433,45 @@ def test_get_provider_rest_bad_request(request_type=eventarc.GetProviderRequest) client.get_provider(request) +@pytest.mark.parametrize("request_type", [ + eventarc.GetProviderRequest, + dict, +]) +def test_get_provider_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/providers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = discovery.Provider( + name='name_value', + display_name='display_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = discovery.Provider.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_provider(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, discovery.Provider) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + + def test_list_providers_rest_bad_request(request_type=eventarc.ListProvidersRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13689,6 +13514,49 @@ def test_get_channel_connection_rest_bad_request(request_type=eventarc.GetChanne client.get_channel_connection(request) +@pytest.mark.parametrize("request_type", [ + eventarc.GetChannelConnectionRequest, + dict, +]) +def test_get_channel_connection_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = channel_connection.ChannelConnection( + name='name_value', + uid='uid_value', + channel='channel_value', + activation_token='activation_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = channel_connection.ChannelConnection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_channel_connection(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, channel_connection.ChannelConnection) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.channel == 'channel_value' + assert response.activation_token == 'activation_token_value' + + def test_list_channel_connections_rest_bad_request(request_type=eventarc.ListChannelConnectionsRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13773,6 +13641,45 @@ def test_get_google_channel_config_rest_bad_request(request_type=eventarc.GetGoo client.get_google_channel_config(request) +@pytest.mark.parametrize("request_type", [ + eventarc.GetGoogleChannelConfigRequest, + dict, +]) +def test_get_google_channel_config_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_google_channel_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, google_channel_config.GoogleChannelConfig) + assert response.name == 'name_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + def test_update_google_channel_config_rest_bad_request(request_type=eventarc.UpdateGoogleChannelConfigRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13794,6 +13701,109 @@ def test_update_google_channel_config_rest_bad_request(request_type=eventarc.Upd client.update_google_channel_config(request) +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateGoogleChannelConfigRequest, + dict, +]) +def test_update_google_channel_config_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} + request_init["google_channel_config"] = {'name': 'projects/sample1/locations/sample2/googleChannelConfig', 'update_time': {'seconds': 751, 'nanos': 543}, 'crypto_key_name': 'crypto_key_name_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = eventarc.UpdateGoogleChannelConfigRequest.meta.fields["google_channel_config"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["google_channel_config"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["google_channel_config"][field])): + del request_init["google_channel_config"][field][i][subfield] + else: + del request_init["google_channel_config"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = gce_google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_google_channel_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gce_google_channel_config.GoogleChannelConfig) + assert response.name == 'name_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + def test_initialize_client_w_rest(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 093a4b385b..2080273609 100755 --- a/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -5167,6 +5167,7 @@ def test_list_instances_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type return_value = cloud_redis.ListInstancesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) @@ -5448,94 +5449,6 @@ def test_list_instances_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - cloud_redis.GetInstanceRequest, - dict, -]) -def test_get_instance_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', - port=453, - current_location_id='current_location_id_value', - state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', - tier=cloud_redis.Instance.Tier.BASIC, - memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', - connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, - auth_enabled=True, - transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, - replica_count=1384, - read_endpoint='read_endpoint_value', - read_endpoint_port=1920, - read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', - suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_redis.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_instance(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloud_redis.Instance) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.location_id == 'location_id_value' - assert response.alternative_location_id == 'alternative_location_id_value' - assert response.redis_version == 'redis_version_value' - assert response.reserved_ip_range == 'reserved_ip_range_value' - assert response.secondary_ip_range == 'secondary_ip_range_value' - assert response.host == 'host_value' - assert response.port == 453 - assert response.current_location_id == 'current_location_id_value' - assert response.state == cloud_redis.Instance.State.CREATING - assert response.status_message == 'status_message_value' - assert response.tier == cloud_redis.Instance.Tier.BASIC - assert response.memory_size_gb == 1499 - assert response.authorized_network == 'authorized_network_value' - assert response.persistence_iam_identity == 'persistence_iam_identity_value' - assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING - assert response.auth_enabled is True - assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION - assert response.replica_count == 1384 - assert response.read_endpoint == 'read_endpoint_value' - assert response.read_endpoint_port == 1920 - assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED - assert response.customer_managed_key == 'customer_managed_key_value' - assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] - assert response.maintenance_version == 'maintenance_version_value' - assert response.available_maintenance_versions == ['available_maintenance_versions_value'] - def test_get_instance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5740,42 +5653,6 @@ def test_get_instance_rest_flattened_error(transport: str = 'rest'): ) -@pytest.mark.parametrize("request_type", [ - cloud_redis.GetInstanceAuthStringRequest, - dict, -]) -def test_get_instance_auth_string_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloud_redis.InstanceAuthString( - auth_string='auth_string_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_redis.InstanceAuthString.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_instance_auth_string(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloud_redis.InstanceAuthString) - assert response.auth_string == 'auth_string_value' - def test_get_instance_auth_string_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -8193,6 +8070,95 @@ def test_get_instance_rest_bad_request(request_type=cloud_redis.GetInstanceReque client.get_instance(request) +@pytest.mark.parametrize("request_type", [ + cloud_redis.GetInstanceRequest, + dict, +]) +def test_get_instance_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.Instance( + name='name_value', + display_name='display_name_value', + location_id='location_id_value', + alternative_location_id='alternative_location_id_value', + redis_version='redis_version_value', + reserved_ip_range='reserved_ip_range_value', + secondary_ip_range='secondary_ip_range_value', + host='host_value', + port=453, + current_location_id='current_location_id_value', + state=cloud_redis.Instance.State.CREATING, + status_message='status_message_value', + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network='authorized_network_value', + persistence_iam_identity='persistence_iam_identity_value', + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint='read_endpoint_value', + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key='customer_managed_key_value', + suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], + maintenance_version='maintenance_version_value', + available_maintenance_versions=['available_maintenance_versions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.Instance) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.location_id == 'location_id_value' + assert response.alternative_location_id == 'alternative_location_id_value' + assert response.redis_version == 'redis_version_value' + assert response.reserved_ip_range == 'reserved_ip_range_value' + assert response.secondary_ip_range == 'secondary_ip_range_value' + assert response.host == 'host_value' + assert response.port == 453 + assert response.current_location_id == 'current_location_id_value' + assert response.state == cloud_redis.Instance.State.CREATING + assert response.status_message == 'status_message_value' + assert response.tier == cloud_redis.Instance.Tier.BASIC + assert response.memory_size_gb == 1499 + assert response.authorized_network == 'authorized_network_value' + assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING + assert response.auth_enabled is True + assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert response.replica_count == 1384 + assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint_port == 1920 + assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + assert response.customer_managed_key == 'customer_managed_key_value' + assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] + assert response.maintenance_version == 'maintenance_version_value' + assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + + def test_get_instance_auth_string_rest_bad_request(request_type=cloud_redis.GetInstanceAuthStringRequest): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8214,6 +8180,43 @@ def test_get_instance_auth_string_rest_bad_request(request_type=cloud_redis.GetI client.get_instance_auth_string(request) +@pytest.mark.parametrize("request_type", [ + cloud_redis.GetInstanceAuthStringRequest, + dict, +]) +def test_get_instance_auth_string_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.InstanceAuthString( + auth_string='auth_string_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis.InstanceAuthString.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_instance_auth_string(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.InstanceAuthString) + assert response.auth_string == 'auth_string_value' + + def test_create_instance_rest_bad_request(request_type=cloud_redis.CreateInstanceRequest): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(),