diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 0245dd69..894fb6bc 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -14,5 +14,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest digest: sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf - -trigger ci diff --git a/google/cloud/language_v1/gapic_metadata.json b/google/cloud/language_v1/gapic_metadata.json index 64d3c3e4..e475aad9 100644 --- a/google/cloud/language_v1/gapic_metadata.json +++ b/google/cloud/language_v1/gapic_metadata.json @@ -76,6 +76,41 @@ ] } } + }, + "rest": { + "libraryClient": "LanguageServiceClient", + "rpcs": { + "AnalyzeEntities": { + "methods": [ + "analyze_entities" + ] + }, + "AnalyzeEntitySentiment": { + "methods": [ + "analyze_entity_sentiment" + ] + }, + "AnalyzeSentiment": { + "methods": [ + "analyze_sentiment" + ] + }, + "AnalyzeSyntax": { + "methods": [ + "analyze_syntax" + ] + }, + "AnnotateText": { + "methods": [ + "annotate_text" + ] + }, + "ClassifyText": { + "methods": [ + "classify_text" + ] + } + } } } } diff --git a/google/cloud/language_v1/services/language_service/client.py b/google/cloud/language_v1/services/language_service/client.py index 276e9489..e97f3240 100644 --- a/google/cloud/language_v1/services/language_service/client.py +++ b/google/cloud/language_v1/services/language_service/client.py @@ -51,6 +51,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, LanguageServiceTransport from .transports.grpc import LanguageServiceGrpcTransport from .transports.grpc_asyncio import LanguageServiceGrpcAsyncIOTransport +from .transports.rest import LanguageServiceRestTransport class LanguageServiceClientMeta(type): @@ -66,6 +67,7 @@ class LanguageServiceClientMeta(type): ) # type: Dict[str, Type[LanguageServiceTransport]] _transport_registry["grpc"] = LanguageServiceGrpcTransport _transport_registry["grpc_asyncio"] = LanguageServiceGrpcAsyncIOTransport + _transport_registry["rest"] = LanguageServiceRestTransport def get_transport_class( cls, diff --git a/google/cloud/language_v1/services/language_service/transports/__init__.py b/google/cloud/language_v1/services/language_service/transports/__init__.py index 5ec2196c..12a453a7 100644 --- a/google/cloud/language_v1/services/language_service/transports/__init__.py +++ b/google/cloud/language_v1/services/language_service/transports/__init__.py @@ -19,14 +19,18 @@ from .base import LanguageServiceTransport from .grpc import LanguageServiceGrpcTransport from .grpc_asyncio import LanguageServiceGrpcAsyncIOTransport +from .rest import LanguageServiceRestInterceptor, LanguageServiceRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[LanguageServiceTransport]] _transport_registry["grpc"] = LanguageServiceGrpcTransport _transport_registry["grpc_asyncio"] = LanguageServiceGrpcAsyncIOTransport +_transport_registry["rest"] = LanguageServiceRestTransport __all__ = ( "LanguageServiceTransport", "LanguageServiceGrpcTransport", "LanguageServiceGrpcAsyncIOTransport", + "LanguageServiceRestTransport", + "LanguageServiceRestInterceptor", ) diff --git a/google/cloud/language_v1/services/language_service/transports/rest.py b/google/cloud/language_v1/services/language_service/transports/rest.py new file mode 100644 index 00000000..e9b0585f --- /dev/null +++ b/google/cloud/language_v1/services/language_service/transports/rest.py @@ -0,0 +1,1023 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.language_v1.types import language_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import LanguageServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class LanguageServiceRestInterceptor: + """Interceptor for LanguageService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the LanguageServiceRestTransport. + + .. code-block:: python + class MyCustomLanguageServiceInterceptor(LanguageServiceRestInterceptor): + def pre_analyze_entities(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_entities(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_entity_sentiment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_entity_sentiment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_sentiment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_sentiment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_syntax(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_syntax(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_annotate_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_annotate_text(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_classify_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_classify_text(self, response): + logging.log(f"Received response: {response}") + return response + + transport = LanguageServiceRestTransport(interceptor=MyCustomLanguageServiceInterceptor()) + client = LanguageServiceClient(transport=transport) + + + """ + + def pre_analyze_entities( + self, + request: language_service.AnalyzeEntitiesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[language_service.AnalyzeEntitiesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_entities + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_entities( + self, response: language_service.AnalyzeEntitiesResponse + ) -> language_service.AnalyzeEntitiesResponse: + """Post-rpc interceptor for analyze_entities + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + + def pre_analyze_entity_sentiment( + self, + request: language_service.AnalyzeEntitySentimentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + language_service.AnalyzeEntitySentimentRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for analyze_entity_sentiment + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_entity_sentiment( + self, response: language_service.AnalyzeEntitySentimentResponse + ) -> language_service.AnalyzeEntitySentimentResponse: + """Post-rpc interceptor for analyze_entity_sentiment + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + + def pre_analyze_sentiment( + self, + request: language_service.AnalyzeSentimentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[language_service.AnalyzeSentimentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_sentiment + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_sentiment( + self, response: language_service.AnalyzeSentimentResponse + ) -> language_service.AnalyzeSentimentResponse: + """Post-rpc interceptor for analyze_sentiment + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + + def pre_analyze_syntax( + self, + request: language_service.AnalyzeSyntaxRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[language_service.AnalyzeSyntaxRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_syntax + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_syntax( + self, response: language_service.AnalyzeSyntaxResponse + ) -> language_service.AnalyzeSyntaxResponse: + """Post-rpc interceptor for analyze_syntax + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + + def pre_annotate_text( + self, + request: language_service.AnnotateTextRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[language_service.AnnotateTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for annotate_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_annotate_text( + self, response: language_service.AnnotateTextResponse + ) -> language_service.AnnotateTextResponse: + """Post-rpc interceptor for annotate_text + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + + def pre_classify_text( + self, + request: language_service.ClassifyTextRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[language_service.ClassifyTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for classify_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_classify_text( + self, response: language_service.ClassifyTextResponse + ) -> language_service.ClassifyTextResponse: + """Post-rpc interceptor for classify_text + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class LanguageServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: LanguageServiceRestInterceptor + + +class LanguageServiceRestTransport(LanguageServiceTransport): + """REST backend transport for LanguageService. + + Provides text analysis operations such as sentiment analysis + and entity recognition. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "language.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[LanguageServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or LanguageServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AnalyzeEntities(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeEntities") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: language_service.AnalyzeEntitiesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitiesResponse: + r"""Call the analyze entities method over HTTP. + + Args: + request (~.language_service.AnalyzeEntitiesRequest): + The request object. The entity analysis request message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeEntitiesResponse: + The entity analysis response message. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/documents:analyzeEntities", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_analyze_entities( + request, metadata + ) + pb_request = language_service.AnalyzeEntitiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeEntitiesResponse() + pb_resp = language_service.AnalyzeEntitiesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_entities(resp) + return resp + + class _AnalyzeEntitySentiment(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeEntitySentiment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: language_service.AnalyzeEntitySentimentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitySentimentResponse: + r"""Call the analyze entity sentiment method over HTTP. + + Args: + request (~.language_service.AnalyzeEntitySentimentRequest): + The request object. The entity-level sentiment analysis + request message. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeEntitySentimentResponse: + The entity-level sentiment analysis + response message. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/documents:analyzeEntitySentiment", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_analyze_entity_sentiment( + request, metadata + ) + pb_request = language_service.AnalyzeEntitySentimentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeEntitySentimentResponse() + pb_resp = language_service.AnalyzeEntitySentimentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_entity_sentiment(resp) + return resp + + class _AnalyzeSentiment(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeSentiment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: language_service.AnalyzeSentimentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSentimentResponse: + r"""Call the analyze sentiment method over HTTP. + + Args: + request (~.language_service.AnalyzeSentimentRequest): + The request object. The sentiment analysis request + message. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeSentimentResponse: + The sentiment analysis response + message. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/documents:analyzeSentiment", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_analyze_sentiment( + request, metadata + ) + pb_request = language_service.AnalyzeSentimentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeSentimentResponse() + pb_resp = language_service.AnalyzeSentimentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_sentiment(resp) + return resp + + class _AnalyzeSyntax(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeSyntax") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: language_service.AnalyzeSyntaxRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSyntaxResponse: + r"""Call the analyze syntax method over HTTP. + + Args: + request (~.language_service.AnalyzeSyntaxRequest): + The request object. The syntax analysis request message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeSyntaxResponse: + The syntax analysis response message. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/documents:analyzeSyntax", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_analyze_syntax(request, metadata) + pb_request = language_service.AnalyzeSyntaxRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeSyntaxResponse() + pb_resp = language_service.AnalyzeSyntaxResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_syntax(resp) + return resp + + class _AnnotateText(LanguageServiceRestStub): + def __hash__(self): + return hash("AnnotateText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: language_service.AnnotateTextRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnnotateTextResponse: + r"""Call the annotate text method over HTTP. + + Args: + request (~.language_service.AnnotateTextRequest): + The request object. The request message for the text + annotation API, which can perform + multiple analysis types (sentiment, + entities, and syntax) in one call. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnnotateTextResponse: + The text annotations response + message. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/documents:annotateText", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_annotate_text(request, metadata) + pb_request = language_service.AnnotateTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnnotateTextResponse() + pb_resp = language_service.AnnotateTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_annotate_text(resp) + return resp + + class _ClassifyText(LanguageServiceRestStub): + def __hash__(self): + return hash("ClassifyText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: language_service.ClassifyTextRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ClassifyTextResponse: + r"""Call the classify text method over HTTP. + + Args: + request (~.language_service.ClassifyTextRequest): + The request object. The document classification request + message. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.ClassifyTextResponse: + The document classification response + message. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/documents:classifyText", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_classify_text(request, metadata) + pb_request = language_service.ClassifyTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.ClassifyTextResponse() + pb_resp = language_service.ClassifyTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_classify_text(resp) + return resp + + @property + def analyze_entities( + self, + ) -> Callable[ + [language_service.AnalyzeEntitiesRequest], + language_service.AnalyzeEntitiesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeEntities(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_entity_sentiment( + self, + ) -> Callable[ + [language_service.AnalyzeEntitySentimentRequest], + language_service.AnalyzeEntitySentimentResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeEntitySentiment(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_sentiment( + self, + ) -> Callable[ + [language_service.AnalyzeSentimentRequest], + language_service.AnalyzeSentimentResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeSentiment(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_syntax( + self, + ) -> Callable[ + [language_service.AnalyzeSyntaxRequest], language_service.AnalyzeSyntaxResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeSyntax(self._session, self._host, self._interceptor) # type: ignore + + @property + def annotate_text( + self, + ) -> Callable[ + [language_service.AnnotateTextRequest], language_service.AnnotateTextResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnnotateText(self._session, self._host, self._interceptor) # type: ignore + + @property + def classify_text( + self, + ) -> Callable[ + [language_service.ClassifyTextRequest], language_service.ClassifyTextResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ClassifyText(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("LanguageServiceRestTransport",) diff --git a/google/cloud/language_v1beta2/gapic_metadata.json b/google/cloud/language_v1beta2/gapic_metadata.json index dbb6d13e..fca8f442 100644 --- a/google/cloud/language_v1beta2/gapic_metadata.json +++ b/google/cloud/language_v1beta2/gapic_metadata.json @@ -76,6 +76,41 @@ ] } } + }, + "rest": { + "libraryClient": "LanguageServiceClient", + "rpcs": { + "AnalyzeEntities": { + "methods": [ + "analyze_entities" + ] + }, + "AnalyzeEntitySentiment": { + "methods": [ + "analyze_entity_sentiment" + ] + }, + "AnalyzeSentiment": { + "methods": [ + "analyze_sentiment" + ] + }, + "AnalyzeSyntax": { + "methods": [ + "analyze_syntax" + ] + }, + "AnnotateText": { + "methods": [ + "annotate_text" + ] + }, + "ClassifyText": { + "methods": [ + "classify_text" + ] + } + } } } } diff --git a/google/cloud/language_v1beta2/services/language_service/client.py b/google/cloud/language_v1beta2/services/language_service/client.py index 10d44fa5..613c8c88 100644 --- a/google/cloud/language_v1beta2/services/language_service/client.py +++ b/google/cloud/language_v1beta2/services/language_service/client.py @@ -51,6 +51,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, LanguageServiceTransport from .transports.grpc import LanguageServiceGrpcTransport from .transports.grpc_asyncio import LanguageServiceGrpcAsyncIOTransport +from .transports.rest import LanguageServiceRestTransport class LanguageServiceClientMeta(type): @@ -66,6 +67,7 @@ class LanguageServiceClientMeta(type): ) # type: Dict[str, Type[LanguageServiceTransport]] _transport_registry["grpc"] = LanguageServiceGrpcTransport _transport_registry["grpc_asyncio"] = LanguageServiceGrpcAsyncIOTransport + _transport_registry["rest"] = LanguageServiceRestTransport def get_transport_class( cls, diff --git a/google/cloud/language_v1beta2/services/language_service/transports/__init__.py b/google/cloud/language_v1beta2/services/language_service/transports/__init__.py index 5ec2196c..12a453a7 100644 --- a/google/cloud/language_v1beta2/services/language_service/transports/__init__.py +++ b/google/cloud/language_v1beta2/services/language_service/transports/__init__.py @@ -19,14 +19,18 @@ from .base import LanguageServiceTransport from .grpc import LanguageServiceGrpcTransport from .grpc_asyncio import LanguageServiceGrpcAsyncIOTransport +from .rest import LanguageServiceRestInterceptor, LanguageServiceRestTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[LanguageServiceTransport]] _transport_registry["grpc"] = LanguageServiceGrpcTransport _transport_registry["grpc_asyncio"] = LanguageServiceGrpcAsyncIOTransport +_transport_registry["rest"] = LanguageServiceRestTransport __all__ = ( "LanguageServiceTransport", "LanguageServiceGrpcTransport", "LanguageServiceGrpcAsyncIOTransport", + "LanguageServiceRestTransport", + "LanguageServiceRestInterceptor", ) diff --git a/google/cloud/language_v1beta2/services/language_service/transports/rest.py b/google/cloud/language_v1beta2/services/language_service/transports/rest.py new file mode 100644 index 00000000..6ab75ba5 --- /dev/null +++ b/google/cloud/language_v1beta2/services/language_service/transports/rest.py @@ -0,0 +1,1023 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.language_v1beta2.types import language_service + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import LanguageServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class LanguageServiceRestInterceptor: + """Interceptor for LanguageService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the LanguageServiceRestTransport. + + .. code-block:: python + class MyCustomLanguageServiceInterceptor(LanguageServiceRestInterceptor): + def pre_analyze_entities(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_entities(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_entity_sentiment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_entity_sentiment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_sentiment(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_sentiment(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_syntax(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_syntax(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_annotate_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_annotate_text(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_classify_text(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_classify_text(self, response): + logging.log(f"Received response: {response}") + return response + + transport = LanguageServiceRestTransport(interceptor=MyCustomLanguageServiceInterceptor()) + client = LanguageServiceClient(transport=transport) + + + """ + + def pre_analyze_entities( + self, + request: language_service.AnalyzeEntitiesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[language_service.AnalyzeEntitiesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_entities + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_entities( + self, response: language_service.AnalyzeEntitiesResponse + ) -> language_service.AnalyzeEntitiesResponse: + """Post-rpc interceptor for analyze_entities + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + + def pre_analyze_entity_sentiment( + self, + request: language_service.AnalyzeEntitySentimentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + language_service.AnalyzeEntitySentimentRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for analyze_entity_sentiment + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_entity_sentiment( + self, response: language_service.AnalyzeEntitySentimentResponse + ) -> language_service.AnalyzeEntitySentimentResponse: + """Post-rpc interceptor for analyze_entity_sentiment + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + + def pre_analyze_sentiment( + self, + request: language_service.AnalyzeSentimentRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[language_service.AnalyzeSentimentRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_sentiment + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_sentiment( + self, response: language_service.AnalyzeSentimentResponse + ) -> language_service.AnalyzeSentimentResponse: + """Post-rpc interceptor for analyze_sentiment + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + + def pre_analyze_syntax( + self, + request: language_service.AnalyzeSyntaxRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[language_service.AnalyzeSyntaxRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_syntax + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_analyze_syntax( + self, response: language_service.AnalyzeSyntaxResponse + ) -> language_service.AnalyzeSyntaxResponse: + """Post-rpc interceptor for analyze_syntax + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + + def pre_annotate_text( + self, + request: language_service.AnnotateTextRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[language_service.AnnotateTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for annotate_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_annotate_text( + self, response: language_service.AnnotateTextResponse + ) -> language_service.AnnotateTextResponse: + """Post-rpc interceptor for annotate_text + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + + def pre_classify_text( + self, + request: language_service.ClassifyTextRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[language_service.ClassifyTextRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for classify_text + + Override in a subclass to manipulate the request or metadata + before they are sent to the LanguageService server. + """ + return request, metadata + + def post_classify_text( + self, response: language_service.ClassifyTextResponse + ) -> language_service.ClassifyTextResponse: + """Post-rpc interceptor for classify_text + + Override in a subclass to manipulate the response + after it is returned by the LanguageService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class LanguageServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: LanguageServiceRestInterceptor + + +class LanguageServiceRestTransport(LanguageServiceTransport): + """REST backend transport for LanguageService. + + Provides text analysis operations such as sentiment analysis + and entity recognition. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "language.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[LanguageServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or LanguageServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AnalyzeEntities(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeEntities") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: language_service.AnalyzeEntitiesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitiesResponse: + r"""Call the analyze entities method over HTTP. + + Args: + request (~.language_service.AnalyzeEntitiesRequest): + The request object. The entity analysis request message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeEntitiesResponse: + The entity analysis response message. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta2/documents:analyzeEntities", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_analyze_entities( + request, metadata + ) + pb_request = language_service.AnalyzeEntitiesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeEntitiesResponse() + pb_resp = language_service.AnalyzeEntitiesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_entities(resp) + return resp + + class _AnalyzeEntitySentiment(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeEntitySentiment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: language_service.AnalyzeEntitySentimentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeEntitySentimentResponse: + r"""Call the analyze entity sentiment method over HTTP. + + Args: + request (~.language_service.AnalyzeEntitySentimentRequest): + The request object. The entity-level sentiment analysis + request message. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeEntitySentimentResponse: + The entity-level sentiment analysis + response message. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta2/documents:analyzeEntitySentiment", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_analyze_entity_sentiment( + request, metadata + ) + pb_request = language_service.AnalyzeEntitySentimentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeEntitySentimentResponse() + pb_resp = language_service.AnalyzeEntitySentimentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_entity_sentiment(resp) + return resp + + class _AnalyzeSentiment(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeSentiment") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: language_service.AnalyzeSentimentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSentimentResponse: + r"""Call the analyze sentiment method over HTTP. + + Args: + request (~.language_service.AnalyzeSentimentRequest): + The request object. The sentiment analysis request + message. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeSentimentResponse: + The sentiment analysis response + message. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta2/documents:analyzeSentiment", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_analyze_sentiment( + request, metadata + ) + pb_request = language_service.AnalyzeSentimentRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeSentimentResponse() + pb_resp = language_service.AnalyzeSentimentResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_sentiment(resp) + return resp + + class _AnalyzeSyntax(LanguageServiceRestStub): + def __hash__(self): + return hash("AnalyzeSyntax") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: language_service.AnalyzeSyntaxRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnalyzeSyntaxResponse: + r"""Call the analyze syntax method over HTTP. + + Args: + request (~.language_service.AnalyzeSyntaxRequest): + The request object. The syntax analysis request message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnalyzeSyntaxResponse: + The syntax analysis response message. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta2/documents:analyzeSyntax", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_analyze_syntax(request, metadata) + pb_request = language_service.AnalyzeSyntaxRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnalyzeSyntaxResponse() + pb_resp = language_service.AnalyzeSyntaxResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_syntax(resp) + return resp + + class _AnnotateText(LanguageServiceRestStub): + def __hash__(self): + return hash("AnnotateText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: language_service.AnnotateTextRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.AnnotateTextResponse: + r"""Call the annotate text method over HTTP. + + Args: + request (~.language_service.AnnotateTextRequest): + The request object. The request message for the text + annotation API, which can perform + multiple analysis types (sentiment, + entities, and syntax) in one call. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.AnnotateTextResponse: + The text annotations response + message. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta2/documents:annotateText", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_annotate_text(request, metadata) + pb_request = language_service.AnnotateTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.AnnotateTextResponse() + pb_resp = language_service.AnnotateTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_annotate_text(resp) + return resp + + class _ClassifyText(LanguageServiceRestStub): + def __hash__(self): + return hash("ClassifyText") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: language_service.ClassifyTextRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> language_service.ClassifyTextResponse: + r"""Call the classify text method over HTTP. + + Args: + request (~.language_service.ClassifyTextRequest): + The request object. The document classification request + message. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.language_service.ClassifyTextResponse: + The document classification response + message. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta2/documents:classifyText", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_classify_text(request, metadata) + pb_request = language_service.ClassifyTextRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = language_service.ClassifyTextResponse() + pb_resp = language_service.ClassifyTextResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_classify_text(resp) + return resp + + @property + def analyze_entities( + self, + ) -> Callable[ + [language_service.AnalyzeEntitiesRequest], + language_service.AnalyzeEntitiesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeEntities(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_entity_sentiment( + self, + ) -> Callable[ + [language_service.AnalyzeEntitySentimentRequest], + language_service.AnalyzeEntitySentimentResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeEntitySentiment(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_sentiment( + self, + ) -> Callable[ + [language_service.AnalyzeSentimentRequest], + language_service.AnalyzeSentimentResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeSentiment(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_syntax( + self, + ) -> Callable[ + [language_service.AnalyzeSyntaxRequest], language_service.AnalyzeSyntaxResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeSyntax(self._session, self._host, self._interceptor) # type: ignore + + @property + def annotate_text( + self, + ) -> Callable[ + [language_service.AnnotateTextRequest], language_service.AnnotateTextResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnnotateText(self._session, self._host, self._interceptor) # type: ignore + + @property + def classify_text( + self, + ) -> Callable[ + [language_service.ClassifyTextRequest], language_service.ClassifyTextResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ClassifyText(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("LanguageServiceRestTransport",) diff --git a/tests/unit/gapic/language_v1/test_language_service.py b/tests/unit/gapic/language_v1/test_language_service.py index 2e656801..4904c456 100644 --- a/tests/unit/gapic/language_v1/test_language_service.py +++ b/tests/unit/gapic/language_v1/test_language_service.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -31,11 +33,14 @@ from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account +from google.protobuf import json_format import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.language_v1.services.language_service import ( LanguageServiceAsyncClient, @@ -94,6 +99,7 @@ def test__get_default_mtls_endpoint(): [ (LanguageServiceClient, "grpc"), (LanguageServiceAsyncClient, "grpc_asyncio"), + (LanguageServiceClient, "rest"), ], ) def test_language_service_client_from_service_account_info( @@ -109,7 +115,11 @@ def test_language_service_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("language.googleapis.com:443") + assert client.transport._host == ( + "language.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://language.googleapis.com" + ) @pytest.mark.parametrize( @@ -117,6 +127,7 @@ def test_language_service_client_from_service_account_info( [ (transports.LanguageServiceGrpcTransport, "grpc"), (transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.LanguageServiceRestTransport, "rest"), ], ) def test_language_service_client_service_account_always_use_jwt( @@ -142,6 +153,7 @@ def test_language_service_client_service_account_always_use_jwt( [ (LanguageServiceClient, "grpc"), (LanguageServiceAsyncClient, "grpc_asyncio"), + (LanguageServiceClient, "rest"), ], ) def test_language_service_client_from_service_account_file( @@ -164,13 +176,18 @@ def test_language_service_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("language.googleapis.com:443") + assert client.transport._host == ( + "language.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://language.googleapis.com" + ) def test_language_service_client_get_transport_class(): transport = LanguageServiceClient.get_transport_class() available_transports = [ transports.LanguageServiceGrpcTransport, + transports.LanguageServiceRestTransport, ] assert transport in available_transports @@ -187,6 +204,7 @@ def test_language_service_client_get_transport_class(): transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest"), ], ) @mock.patch.object( @@ -342,6 +360,18 @@ def test_language_service_client_client_options( "grpc_asyncio", "false", ), + ( + LanguageServiceClient, + transports.LanguageServiceRestTransport, + "rest", + "true", + ), + ( + LanguageServiceClient, + transports.LanguageServiceRestTransport, + "rest", + "false", + ), ], ) @mock.patch.object( @@ -541,6 +571,7 @@ def test_language_service_client_get_mtls_endpoint_and_cert_source(client_class) transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest"), ], ) def test_language_service_client_client_options_scopes( @@ -581,6 +612,7 @@ def test_language_service_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", None), ], ) def test_language_service_client_client_options_credentials_file( @@ -1871,186 +1903,1809 @@ async def test_annotate_text_flattened_error_async(): ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.LanguageServiceGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + language_service.AnalyzeSentimentRequest, + dict, + ], +) +def test_analyze_sentiment_rest(request_type): + client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSentimentResponse( + language="language_value", ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.analyze_sentiment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSentimentResponse) + assert response.language == "language_value" + + +def test_analyze_sentiment_rest_required_fields( + request_type=language_service.AnalyzeSentimentRequest, +): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, ) + ) - # It is an error to provide an api_key and a transport instance. - transport = transports.LanguageServiceGrpcTransport( + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSentimentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.analyze_sentiment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_analyze_sentiment_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options=options, - transport=transport, - ) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) + unset_fields = transport.analyze_sentiment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document",))) - # It is an error to provide scopes and a transport instance. - transport = transports.LanguageServiceGrpcTransport( + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_sentiment_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LanguageServiceRestInterceptor(), ) - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client = LanguageServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_analyze_sentiment" + ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "pre_analyze_sentiment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeSentimentRequest.pb( + language_service.AnalyzeSentimentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnalyzeSentimentResponse.to_json( + language_service.AnalyzeSentimentResponse() ) + request = language_service.AnalyzeSentimentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeSentimentResponse() -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.LanguageServiceGrpcTransport( + client.analyze_sentiment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_sentiment_rest_bad_request( + transport: str = "rest", request_type=language_service.AnalyzeSentimentRequest +): + client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - client = LanguageServiceClient(transport=transport) - assert client.transport is transport + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.LanguageServiceGrpcTransport( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_sentiment(request) + + +def test_analyze_sentiment_rest_flattened(): + client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel - transport = transports.LanguageServiceGrpcAsyncIOTransport( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSentimentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.analyze_sentiment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/documents:analyzeSentiment" % client.transport._host, args[1] + ) + + +def test_analyze_sentiment_rest_flattened_error(transport: str = "rest"): + client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_sentiment( + language_service.AnalyzeSentimentRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.LanguageServiceGrpcTransport, - transports.LanguageServiceGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() +def test_analyze_sentiment_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", + language_service.AnalyzeEntitiesRequest, + dict, ], ) -def test_transport_kind(transport_name): - transport = LanguageServiceClient.get_transport_class(transport_name)( +def test_analyze_entities_rest(request_type): + client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert transport.kind == transport_name + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitiesResponse( + language="language_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.analyze_entities(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitiesResponse) + assert response.language == "language_value" + + +def test_analyze_entities_rest_required_fields( + request_type=language_service.AnalyzeEntitiesRequest, +): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_entities._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_entities._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert isinstance( - client.transport, - transports.LanguageServiceGrpcTransport, + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitiesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.analyze_entities(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_analyze_entities_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) + unset_fields = transport.analyze_entities._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document",))) -def test_language_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.LanguageServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_entities_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_analyze_entities" + ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "pre_analyze_entities" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeEntitiesRequest.pb( + language_service.AnalyzeEntitiesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnalyzeEntitiesResponse.to_json( + language_service.AnalyzeEntitiesResponse() ) + request = language_service.AnalyzeEntitiesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeEntitiesResponse() -def test_language_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.language_v1.services.language_service.transports.LanguageServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.LanguageServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), + client.analyze_entities( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "analyze_sentiment", - "analyze_entities", - "analyze_entity_sentiment", - "analyze_syntax", - "classify_text", - "annotate_text", + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_entities_rest_bad_request( + transport: str = "rest", request_type=language_service.AnalyzeEntitiesRequest +): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - with pytest.raises(NotImplementedError): - transport.close() + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_entities(request) -def test_language_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.language_v1.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LanguageServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=( - "https://www.googleapis.com/auth/cloud-language", - "https://www.googleapis.com/auth/cloud-platform", - ), +def test_analyze_entities_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitiesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.analyze_entities(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/documents:analyzeEntities" % client.transport._host, args[1] + ) + + +def test_analyze_entities_rest_flattened_error(transport: str = "rest"): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entities( + language_service.AnalyzeEntitiesRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_entities_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + language_service.AnalyzeEntitySentimentRequest, + dict, + ], +) +def test_analyze_entity_sentiment_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitySentimentResponse( + language="language_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.analyze_entity_sentiment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) + assert response.language == "language_value" + + +def test_analyze_entity_sentiment_rest_required_fields( + request_type=language_service.AnalyzeEntitySentimentRequest, +): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_entity_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_entity_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitySentimentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.analyze_entity_sentiment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_analyze_entity_sentiment_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.analyze_entity_sentiment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_entity_sentiment_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_analyze_entity_sentiment" + ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "pre_analyze_entity_sentiment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeEntitySentimentRequest.pb( + language_service.AnalyzeEntitySentimentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + language_service.AnalyzeEntitySentimentResponse.to_json( + language_service.AnalyzeEntitySentimentResponse() + ) + ) + + request = language_service.AnalyzeEntitySentimentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeEntitySentimentResponse() + + client.analyze_entity_sentiment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_entity_sentiment_rest_bad_request( + transport: str = "rest", request_type=language_service.AnalyzeEntitySentimentRequest +): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_entity_sentiment(request) + + +def test_analyze_entity_sentiment_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitySentimentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.analyze_entity_sentiment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/documents:analyzeEntitySentiment" % client.transport._host, args[1] + ) + + +def test_analyze_entity_sentiment_rest_flattened_error(transport: str = "rest"): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entity_sentiment( + language_service.AnalyzeEntitySentimentRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_entity_sentiment_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + language_service.AnalyzeSyntaxRequest, + dict, + ], +) +def test_analyze_syntax_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSyntaxResponse( + language="language_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.analyze_syntax(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSyntaxResponse) + assert response.language == "language_value" + + +def test_analyze_syntax_rest_required_fields( + request_type=language_service.AnalyzeSyntaxRequest, +): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_syntax._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_syntax._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSyntaxResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.analyze_syntax(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_analyze_syntax_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.analyze_syntax._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_syntax_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_analyze_syntax" + ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "pre_analyze_syntax" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeSyntaxRequest.pb( + language_service.AnalyzeSyntaxRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnalyzeSyntaxResponse.to_json( + language_service.AnalyzeSyntaxResponse() + ) + + request = language_service.AnalyzeSyntaxRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeSyntaxResponse() + + client.analyze_syntax( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_syntax_rest_bad_request( + transport: str = "rest", request_type=language_service.AnalyzeSyntaxRequest +): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_syntax(request) + + +def test_analyze_syntax_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSyntaxResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.analyze_syntax(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/documents:analyzeSyntax" % client.transport._host, args[1] + ) + + +def test_analyze_syntax_rest_flattened_error(transport: str = "rest"): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_syntax( + language_service.AnalyzeSyntaxRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_syntax_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + language_service.ClassifyTextRequest, + dict, + ], +) +def test_classify_text_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.ClassifyTextResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.ClassifyTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.classify_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ClassifyTextResponse) + + +def test_classify_text_rest_required_fields( + request_type=language_service.ClassifyTextRequest, +): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).classify_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).classify_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.ClassifyTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.ClassifyTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.classify_text(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_classify_text_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.classify_text._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_classify_text_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_classify_text" + ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "pre_classify_text" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.ClassifyTextRequest.pb( + language_service.ClassifyTextRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.ClassifyTextResponse.to_json( + language_service.ClassifyTextResponse() + ) + + request = language_service.ClassifyTextRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.ClassifyTextResponse() + + client.classify_text( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_classify_text_rest_bad_request( + transport: str = "rest", request_type=language_service.ClassifyTextRequest +): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.classify_text(request) + + +def test_classify_text_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.ClassifyTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.ClassifyTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.classify_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/documents:classifyText" % client.transport._host, args[1] + ) + + +def test_classify_text_rest_flattened_error(transport: str = "rest"): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.classify_text( + language_service.ClassifyTextRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + + +def test_classify_text_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + language_service.AnnotateTextRequest, + dict, + ], +) +def test_annotate_text_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnnotateTextResponse( + language="language_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnnotateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.annotate_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnnotateTextResponse) + assert response.language == "language_value" + + +def test_annotate_text_rest_required_fields( + request_type=language_service.AnnotateTextRequest, +): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).annotate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).annotate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnnotateTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnnotateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.annotate_text(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_annotate_text_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.annotate_text._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "document", + "features", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_annotate_text_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_annotate_text" + ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "pre_annotate_text" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnnotateTextRequest.pb( + language_service.AnnotateTextRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnnotateTextResponse.to_json( + language_service.AnnotateTextResponse() + ) + + request = language_service.AnnotateTextRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnnotateTextResponse() + + client.annotate_text( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_annotate_text_rest_bad_request( + transport: str = "rest", request_type=language_service.AnnotateTextRequest +): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.annotate_text(request) + + +def test_annotate_text_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnnotateTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnnotateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.annotate_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/documents:annotateText" % client.transport._host, args[1] + ) + + +def test_annotate_text_rest_flattened_error(transport: str = "rest"): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.annotate_text( + language_service.AnnotateTextRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_annotate_text_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = LanguageServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.LanguageServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceGrpcAsyncIOTransport, + transports.LanguageServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = LanguageServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.LanguageServiceGrpcTransport, + ) + + +def test_language_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.LanguageServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_language_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.language_v1.services.language_service.transports.LanguageServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.LanguageServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "analyze_sentiment", + "analyze_entities", + "analyze_entity_sentiment", + "analyze_syntax", + "classify_text", + "annotate_text", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_language_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.language_v1.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LanguageServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-language", + "https://www.googleapis.com/auth/cloud-platform", + ), quota_project_id="octopus", ) @@ -2109,6 +3764,7 @@ def test_language_service_transport_auth_adc(transport_class): [ transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport, + transports.LanguageServiceRestTransport, ], ) def test_language_service_transport_auth_gdch_credentials(transport_class): @@ -2209,11 +3865,23 @@ def test_language_service_grpc_transport_client_cert_source_for_mtls(transport_c ) +def test_language_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.LanguageServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_language_service_host_no_port(transport_name): @@ -2224,7 +3892,11 @@ def test_language_service_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("language.googleapis.com:443") + assert client.transport._host == ( + "language.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://language.googleapis.com" + ) @pytest.mark.parametrize( @@ -2232,6 +3904,7 @@ def test_language_service_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_language_service_host_with_port(transport_name): @@ -2242,7 +3915,48 @@ def test_language_service_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("language.googleapis.com:8000") + assert client.transport._host == ( + "language.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://language.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_language_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = LanguageServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = LanguageServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.analyze_sentiment._session + session2 = client2.transport.analyze_sentiment._session + assert session1 != session2 + session1 = client1.transport.analyze_entities._session + session2 = client2.transport.analyze_entities._session + assert session1 != session2 + session1 = client1.transport.analyze_entity_sentiment._session + session2 = client2.transport.analyze_entity_sentiment._session + assert session1 != session2 + session1 = client1.transport.analyze_syntax._session + session2 = client2.transport.analyze_syntax._session + assert session1 != session2 + session1 = client1.transport.classify_text._session + session2 = client2.transport.classify_text._session + assert session1 != session2 + session1 = client1.transport.annotate_text._session + session2 = client2.transport.annotate_text._session + assert session1 != session2 def test_language_service_grpc_transport_channel(): @@ -2513,6 +4227,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -2530,6 +4245,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/tests/unit/gapic/language_v1beta2/test_language_service.py b/tests/unit/gapic/language_v1beta2/test_language_service.py index ae913b69..809ce0e0 100644 --- a/tests/unit/gapic/language_v1beta2/test_language_service.py +++ b/tests/unit/gapic/language_v1beta2/test_language_service.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -31,11 +33,14 @@ from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account +from google.protobuf import json_format import grpc from grpc.experimental import aio from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.language_v1beta2.services.language_service import ( LanguageServiceAsyncClient, @@ -94,6 +99,7 @@ def test__get_default_mtls_endpoint(): [ (LanguageServiceClient, "grpc"), (LanguageServiceAsyncClient, "grpc_asyncio"), + (LanguageServiceClient, "rest"), ], ) def test_language_service_client_from_service_account_info( @@ -109,7 +115,11 @@ def test_language_service_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("language.googleapis.com:443") + assert client.transport._host == ( + "language.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://language.googleapis.com" + ) @pytest.mark.parametrize( @@ -117,6 +127,7 @@ def test_language_service_client_from_service_account_info( [ (transports.LanguageServiceGrpcTransport, "grpc"), (transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.LanguageServiceRestTransport, "rest"), ], ) def test_language_service_client_service_account_always_use_jwt( @@ -142,6 +153,7 @@ def test_language_service_client_service_account_always_use_jwt( [ (LanguageServiceClient, "grpc"), (LanguageServiceAsyncClient, "grpc_asyncio"), + (LanguageServiceClient, "rest"), ], ) def test_language_service_client_from_service_account_file( @@ -164,13 +176,18 @@ def test_language_service_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("language.googleapis.com:443") + assert client.transport._host == ( + "language.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://language.googleapis.com" + ) def test_language_service_client_get_transport_class(): transport = LanguageServiceClient.get_transport_class() available_transports = [ transports.LanguageServiceGrpcTransport, + transports.LanguageServiceRestTransport, ] assert transport in available_transports @@ -187,6 +204,7 @@ def test_language_service_client_get_transport_class(): transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest"), ], ) @mock.patch.object( @@ -342,6 +360,18 @@ def test_language_service_client_client_options( "grpc_asyncio", "false", ), + ( + LanguageServiceClient, + transports.LanguageServiceRestTransport, + "rest", + "true", + ), + ( + LanguageServiceClient, + transports.LanguageServiceRestTransport, + "rest", + "false", + ), ], ) @mock.patch.object( @@ -541,6 +571,7 @@ def test_language_service_client_get_mtls_endpoint_and_cert_source(client_class) transports.LanguageServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest"), ], ) def test_language_service_client_client_options_scopes( @@ -581,6 +612,7 @@ def test_language_service_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (LanguageServiceClient, transports.LanguageServiceRestTransport, "rest", None), ], ) def test_language_service_client_client_options_credentials_file( @@ -1871,186 +1903,1810 @@ async def test_annotate_text_flattened_error_async(): ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.LanguageServiceGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + language_service.AnalyzeSentimentRequest, + dict, + ], +) +def test_analyze_sentiment_rest(request_type): + client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = LanguageServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSentimentResponse( + language="language_value", ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.LanguageServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.analyze_sentiment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSentimentResponse) + assert response.language == "language_value" + + +def test_analyze_sentiment_rest_required_fields( + request_type=language_service.AnalyzeSentimentRequest, +): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, ) + ) - # It is an error to provide an api_key and a transport instance. - transport = transports.LanguageServiceGrpcTransport( + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSentimentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.analyze_sentiment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_analyze_sentiment_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options=options, - transport=transport, - ) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) + unset_fields = transport.analyze_sentiment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document",))) - # It is an error to provide scopes and a transport instance. - transport = transports.LanguageServiceGrpcTransport( + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_sentiment_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LanguageServiceRestInterceptor(), ) - with pytest.raises(ValueError): - client = LanguageServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client = LanguageServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_analyze_sentiment" + ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "pre_analyze_sentiment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeSentimentRequest.pb( + language_service.AnalyzeSentimentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnalyzeSentimentResponse.to_json( + language_service.AnalyzeSentimentResponse() ) + request = language_service.AnalyzeSentimentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeSentimentResponse() -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.LanguageServiceGrpcTransport( + client.analyze_sentiment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_sentiment_rest_bad_request( + transport: str = "rest", request_type=language_service.AnalyzeSentimentRequest +): + client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - client = LanguageServiceClient(transport=transport) - assert client.transport is transport + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.LanguageServiceGrpcTransport( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_sentiment(request) + + +def test_analyze_sentiment_rest_flattened(): + client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel - transport = transports.LanguageServiceGrpcAsyncIOTransport( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSentimentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSentimentResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.analyze_sentiment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/documents:analyzeSentiment" % client.transport._host, args[1] + ) + + +def test_analyze_sentiment_rest_flattened_error(transport: str = "rest"): + client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_sentiment( + language_service.AnalyzeSentimentRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.LanguageServiceGrpcTransport, - transports.LanguageServiceGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() +def test_analyze_sentiment_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", + language_service.AnalyzeEntitiesRequest, + dict, ], ) -def test_transport_kind(transport_name): - transport = LanguageServiceClient.get_transport_class(transport_name)( +def test_analyze_entities_rest(request_type): + client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert transport.kind == transport_name + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitiesResponse( + language="language_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.analyze_entities(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitiesResponse) + assert response.language == "language_value" + + +def test_analyze_entities_rest_required_fields( + request_type=language_service.AnalyzeEntitiesRequest, +): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_entities._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_entities._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert isinstance( - client.transport, - transports.LanguageServiceGrpcTransport, + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitiesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.analyze_entities(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_analyze_entities_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) + unset_fields = transport.analyze_entities._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document",))) -def test_language_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.LanguageServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_entities_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_analyze_entities" + ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "pre_analyze_entities" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeEntitiesRequest.pb( + language_service.AnalyzeEntitiesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnalyzeEntitiesResponse.to_json( + language_service.AnalyzeEntitiesResponse() ) + request = language_service.AnalyzeEntitiesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeEntitiesResponse() -def test_language_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.LanguageServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), + client.analyze_entities( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "analyze_sentiment", - "analyze_entities", - "analyze_entity_sentiment", - "analyze_syntax", - "classify_text", - "annotate_text", + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_entities_rest_bad_request( + transport: str = "rest", request_type=language_service.AnalyzeEntitiesRequest +): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - with pytest.raises(NotImplementedError): - transport.close() + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_entities(request) -def test_language_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LanguageServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=( - "https://www.googleapis.com/auth/cloud-language", - "https://www.googleapis.com/auth/cloud-platform", - ), +def test_analyze_entities_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitiesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitiesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.analyze_entities(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/documents:analyzeEntities" % client.transport._host, args[1] + ) + + +def test_analyze_entities_rest_flattened_error(transport: str = "rest"): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entities( + language_service.AnalyzeEntitiesRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_entities_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + language_service.AnalyzeEntitySentimentRequest, + dict, + ], +) +def test_analyze_entity_sentiment_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitySentimentResponse( + language="language_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.analyze_entity_sentiment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeEntitySentimentResponse) + assert response.language == "language_value" + + +def test_analyze_entity_sentiment_rest_required_fields( + request_type=language_service.AnalyzeEntitySentimentRequest, +): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_entity_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_entity_sentiment._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitySentimentResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.analyze_entity_sentiment(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_analyze_entity_sentiment_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.analyze_entity_sentiment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_entity_sentiment_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_analyze_entity_sentiment" + ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "pre_analyze_entity_sentiment" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeEntitySentimentRequest.pb( + language_service.AnalyzeEntitySentimentRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + language_service.AnalyzeEntitySentimentResponse.to_json( + language_service.AnalyzeEntitySentimentResponse() + ) + ) + + request = language_service.AnalyzeEntitySentimentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeEntitySentimentResponse() + + client.analyze_entity_sentiment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_entity_sentiment_rest_bad_request( + transport: str = "rest", request_type=language_service.AnalyzeEntitySentimentRequest +): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_entity_sentiment(request) + + +def test_analyze_entity_sentiment_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeEntitySentimentResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeEntitySentimentResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.analyze_entity_sentiment(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/documents:analyzeEntitySentiment" % client.transport._host, + args[1], + ) + + +def test_analyze_entity_sentiment_rest_flattened_error(transport: str = "rest"): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_entity_sentiment( + language_service.AnalyzeEntitySentimentRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_entity_sentiment_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + language_service.AnalyzeSyntaxRequest, + dict, + ], +) +def test_analyze_syntax_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSyntaxResponse( + language="language_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.analyze_syntax(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnalyzeSyntaxResponse) + assert response.language == "language_value" + + +def test_analyze_syntax_rest_required_fields( + request_type=language_service.AnalyzeSyntaxRequest, +): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_syntax._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).analyze_syntax._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSyntaxResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.analyze_syntax(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_analyze_syntax_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.analyze_syntax._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_syntax_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_analyze_syntax" + ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "pre_analyze_syntax" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnalyzeSyntaxRequest.pb( + language_service.AnalyzeSyntaxRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnalyzeSyntaxResponse.to_json( + language_service.AnalyzeSyntaxResponse() + ) + + request = language_service.AnalyzeSyntaxRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnalyzeSyntaxResponse() + + client.analyze_syntax( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_syntax_rest_bad_request( + transport: str = "rest", request_type=language_service.AnalyzeSyntaxRequest +): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_syntax(request) + + +def test_analyze_syntax_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnalyzeSyntaxResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnalyzeSyntaxResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.analyze_syntax(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/documents:analyzeSyntax" % client.transport._host, args[1] + ) + + +def test_analyze_syntax_rest_flattened_error(transport: str = "rest"): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_syntax( + language_service.AnalyzeSyntaxRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_analyze_syntax_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + language_service.ClassifyTextRequest, + dict, + ], +) +def test_classify_text_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.ClassifyTextResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.ClassifyTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.classify_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.ClassifyTextResponse) + + +def test_classify_text_rest_required_fields( + request_type=language_service.ClassifyTextRequest, +): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).classify_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).classify_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.ClassifyTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.ClassifyTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.classify_text(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_classify_text_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.classify_text._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("document",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_classify_text_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_classify_text" + ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "pre_classify_text" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.ClassifyTextRequest.pb( + language_service.ClassifyTextRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.ClassifyTextResponse.to_json( + language_service.ClassifyTextResponse() + ) + + request = language_service.ClassifyTextRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.ClassifyTextResponse() + + client.classify_text( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_classify_text_rest_bad_request( + transport: str = "rest", request_type=language_service.ClassifyTextRequest +): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.classify_text(request) + + +def test_classify_text_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.ClassifyTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.ClassifyTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.classify_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/documents:classifyText" % client.transport._host, args[1] + ) + + +def test_classify_text_rest_flattened_error(transport: str = "rest"): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.classify_text( + language_service.ClassifyTextRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + ) + + +def test_classify_text_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + language_service.AnnotateTextRequest, + dict, + ], +) +def test_annotate_text_rest(request_type): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnnotateTextResponse( + language="language_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnnotateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.annotate_text(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, language_service.AnnotateTextResponse) + assert response.language == "language_value" + + +def test_annotate_text_rest_required_fields( + request_type=language_service.AnnotateTextRequest, +): + transport_class = transports.LanguageServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).annotate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).annotate_text._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = language_service.AnnotateTextResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = language_service.AnnotateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.annotate_text(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_annotate_text_rest_unset_required_fields(): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.annotate_text._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "document", + "features", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_annotate_text_rest_interceptors(null_interceptor): + transport = transports.LanguageServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.LanguageServiceRestInterceptor(), + ) + client = LanguageServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.LanguageServiceRestInterceptor, "post_annotate_text" + ) as post, mock.patch.object( + transports.LanguageServiceRestInterceptor, "pre_annotate_text" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = language_service.AnnotateTextRequest.pb( + language_service.AnnotateTextRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = language_service.AnnotateTextResponse.to_json( + language_service.AnnotateTextResponse() + ) + + request = language_service.AnnotateTextRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = language_service.AnnotateTextResponse() + + client.annotate_text( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_annotate_text_rest_bad_request( + transport: str = "rest", request_type=language_service.AnnotateTextRequest +): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.annotate_text(request) + + +def test_annotate_text_rest_flattened(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = language_service.AnnotateTextResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = language_service.AnnotateTextResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.annotate_text(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta2/documents:annotateText" % client.transport._host, args[1] + ) + + +def test_annotate_text_rest_flattened_error(transport: str = "rest"): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.annotate_text( + language_service.AnnotateTextRequest(), + document=language_service.Document( + type_=language_service.Document.Type.PLAIN_TEXT + ), + features=language_service.AnnotateTextRequest.Features(extract_syntax=True), + encoding_type=language_service.EncodingType.UTF8, + ) + + +def test_annotate_text_rest_error(): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LanguageServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = LanguageServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.LanguageServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.LanguageServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LanguageServiceGrpcTransport, + transports.LanguageServiceGrpcAsyncIOTransport, + transports.LanguageServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = LanguageServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = LanguageServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.LanguageServiceGrpcTransport, + ) + + +def test_language_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.LanguageServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_language_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.LanguageServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "analyze_sentiment", + "analyze_entities", + "analyze_entity_sentiment", + "analyze_syntax", + "classify_text", + "annotate_text", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_language_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.language_v1beta2.services.language_service.transports.LanguageServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LanguageServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-language", + "https://www.googleapis.com/auth/cloud-platform", + ), quota_project_id="octopus", ) @@ -2109,6 +3765,7 @@ def test_language_service_transport_auth_adc(transport_class): [ transports.LanguageServiceGrpcTransport, transports.LanguageServiceGrpcAsyncIOTransport, + transports.LanguageServiceRestTransport, ], ) def test_language_service_transport_auth_gdch_credentials(transport_class): @@ -2209,11 +3866,23 @@ def test_language_service_grpc_transport_client_cert_source_for_mtls(transport_c ) +def test_language_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.LanguageServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_language_service_host_no_port(transport_name): @@ -2224,7 +3893,11 @@ def test_language_service_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("language.googleapis.com:443") + assert client.transport._host == ( + "language.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://language.googleapis.com" + ) @pytest.mark.parametrize( @@ -2232,6 +3905,7 @@ def test_language_service_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_language_service_host_with_port(transport_name): @@ -2242,7 +3916,48 @@ def test_language_service_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("language.googleapis.com:8000") + assert client.transport._host == ( + "language.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://language.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_language_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = LanguageServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = LanguageServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.analyze_sentiment._session + session2 = client2.transport.analyze_sentiment._session + assert session1 != session2 + session1 = client1.transport.analyze_entities._session + session2 = client2.transport.analyze_entities._session + assert session1 != session2 + session1 = client1.transport.analyze_entity_sentiment._session + session2 = client2.transport.analyze_entity_sentiment._session + assert session1 != session2 + session1 = client1.transport.analyze_syntax._session + session2 = client2.transport.analyze_syntax._session + assert session1 != session2 + session1 = client1.transport.classify_text._session + session2 = client2.transport.classify_text._session + assert session1 != session2 + session1 = client1.transport.annotate_text._session + session2 = client2.transport.annotate_text._session + assert session1 != session2 def test_language_service_grpc_transport_channel(): @@ -2513,6 +4228,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -2530,6 +4246,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: