Updated DB_Helper by adding firebase methods.

This commit is contained in:
Batuhan Berk Başoğlu 2020-10-05 16:53:40 -04:00
parent 485cc3bbba
commit c82121d036
1810 changed files with 537281 additions and 1 deletions

View file

@ -0,0 +1,23 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Logging API wrapper."""
from gcloud.logging.client import Client
from gcloud.logging.connection import Connection
SCOPE = Connection.SCOPE
ASCENDING = 'timestamp asc'
DESCENDING = 'timestamp desc'

View file

@ -0,0 +1,575 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GAX wrapper for Logging API requests."""
import json
# pylint: disable=import-error
from google.gax import CallOptions
from google.gax import INITIAL_PAGE
from google.gax.errors import GaxError
from google.gax.grpc import exc_to_code
from google.logging.type.log_severity_pb2 import LogSeverity
from google.logging.v2.logging_config_pb2 import LogSink
from google.logging.v2.logging_metrics_pb2 import LogMetric
from google.logging.v2.log_entry_pb2 import LogEntry
from google.protobuf.json_format import Parse
from grpc.beta.interfaces import StatusCode
# pylint: enable=import-error
from gcloud.exceptions import Conflict
from gcloud.exceptions import NotFound
from gcloud._helpers import _datetime_to_pb_timestamp
from gcloud._helpers import _datetime_to_rfc3339
from gcloud._helpers import _pb_timestamp_to_datetime
class _LoggingAPI(object):
"""Helper mapping logging-related APIs.
:type gax_api:
:class:`google.logging.v2.logging_service_v2_api.LoggingServiceV2Api`
:param gax_api: API object used to make GAX requests.
"""
def __init__(self, gax_api):
self._gax_api = gax_api
def list_entries(self, projects, filter_='', order_by='',
page_size=0, page_token=None):
"""Return a page of log entry resources.
:type projects: list of strings
:param projects: project IDs to include. If not passed,
defaults to the project bound to the API's client.
:type filter_: str
:param filter_: a filter expression. See:
https://cloud.google.com/logging/docs/view/advanced_filters
:type order_by: str
:param order_by: One of :data:`gcloud.logging.ASCENDING` or
:data:`gcloud.logging.DESCENDING`.
:type page_size: int
:param page_size: maximum number of entries to return, If not passed,
defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of entries. If not
passed, the API will return the first page of
entries.
:rtype: tuple, (list, str)
:returns: list of mappings, plus a "next page token" string:
if not None, indicates that more entries can be retrieved
with another call (pass that value as ``page_token``).
"""
options = _build_paging_options(page_token)
page_iter = self._gax_api.list_log_entries(
projects, filter_, order_by, page_size, options)
entries = [_log_entry_pb_to_mapping(entry_pb)
for entry_pb in page_iter.next()]
token = page_iter.page_token or None
return entries, token
def write_entries(self, entries, logger_name=None, resource=None,
labels=None):
"""API call: log an entry resource via a POST request
:type entries: sequence of mapping
:param entries: the log entry resources to log.
:type logger_name: string
:param logger_name: name of default logger to which to log the entries;
individual entries may override.
:type resource: mapping
:param resource: default resource to associate with entries;
individual entries may override.
:type labels: mapping
:param labels: default labels to associate with entries;
individual entries may override.
"""
options = None
partial_success = False
entry_pbs = [_log_entry_mapping_to_pb(entry) for entry in entries]
self._gax_api.write_log_entries(entry_pbs, logger_name, resource,
labels, partial_success, options)
def logger_delete(self, project, logger_name):
"""API call: delete all entries in a logger via a DELETE request
:type project: string
:param project: ID of project containing the log entries to delete
:type logger_name: string
:param logger_name: name of logger containing the log entries to delete
"""
options = None
path = 'projects/%s/logs/%s' % (project, logger_name)
self._gax_api.delete_log(path, options)
class _SinksAPI(object):
"""Helper mapping sink-related APIs.
:type gax_api:
:class:`google.logging.v2.config_service_v2_api.ConfigServiceV2Api`
:param gax_api: API object used to make GAX requests.
"""
def __init__(self, gax_api):
self._gax_api = gax_api
def list_sinks(self, project, page_size=0, page_token=None):
"""List sinks for the project associated with this client.
:type project: string
:param project: ID of the project whose sinks are to be listed.
:type page_size: int
:param page_size: maximum number of sinks to return, If not passed,
defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of sinks. If not
passed, the API will return the first page of
sinks.
:rtype: tuple, (list, str)
:returns: list of mappings, plus a "next page token" string:
if not None, indicates that more sinks can be retrieved
with another call (pass that value as ``page_token``).
"""
options = _build_paging_options(page_token)
page_iter = self._gax_api.list_sinks(project, page_size, options)
sinks = [_log_sink_pb_to_mapping(log_sink_pb)
for log_sink_pb in page_iter.next()]
token = page_iter.page_token or None
return sinks, token
def sink_create(self, project, sink_name, filter_, destination):
"""API call: create a sink resource.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/create
:type project: string
:param project: ID of the project in which to create the sink.
:type sink_name: string
:param sink_name: the name of the sink
:type filter_: string
:param filter_: the advanced logs filter expression defining the
entries exported by the sink.
:type destination: string
:param destination: destination URI for the entries exported by
the sink.
"""
options = None
parent = 'projects/%s' % (project,)
sink_pb = LogSink(name=sink_name, filter=filter_,
destination=destination)
try:
self._gax_api.create_sink(parent, sink_pb, options)
except GaxError as exc:
if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION:
path = 'projects/%s/sinks/%s' % (project, sink_name)
raise Conflict(path)
raise
def sink_get(self, project, sink_name):
"""API call: retrieve a sink resource.
:type project: string
:param project: ID of the project containing the sink.
:type sink_name: string
:param sink_name: the name of the sink
"""
options = None
path = 'projects/%s/sinks/%s' % (project, sink_name)
try:
sink_pb = self._gax_api.get_sink(path, options)
except GaxError as exc:
if exc_to_code(exc.cause) == StatusCode.NOT_FOUND:
raise NotFound(path)
raise
return _log_sink_pb_to_mapping(sink_pb)
def sink_update(self, project, sink_name, filter_, destination):
"""API call: update a sink resource.
:type project: string
:param project: ID of the project containing the sink.
:type sink_name: string
:param sink_name: the name of the sink
:type filter_: string
:param filter_: the advanced logs filter expression defining the
entries exported by the sink.
:type destination: string
:param destination: destination URI for the entries exported by
the sink.
"""
options = None
path = 'projects/%s/sinks/%s' % (project, sink_name)
sink_pb = LogSink(name=path, filter=filter_, destination=destination)
try:
self._gax_api.update_sink(path, sink_pb, options)
except GaxError as exc:
if exc_to_code(exc.cause) == StatusCode.NOT_FOUND:
raise NotFound(path)
raise
return _log_sink_pb_to_mapping(sink_pb)
def sink_delete(self, project, sink_name):
"""API call: delete a sink resource.
:type project: string
:param project: ID of the project containing the sink.
:type sink_name: string
:param sink_name: the name of the sink
"""
options = None
path = 'projects/%s/sinks/%s' % (project, sink_name)
try:
self._gax_api.delete_sink(path, options)
except GaxError as exc:
if exc_to_code(exc.cause) == StatusCode.NOT_FOUND:
raise NotFound(path)
raise
class _MetricsAPI(object):
"""Helper mapping sink-related APIs.
:type gax_api:
:class:`google.logging.v2.metrics_service_v2_api.MetricsServiceV2Api`
:param gax_api: API object used to make GAX requests.
"""
def __init__(self, gax_api):
self._gax_api = gax_api
def list_metrics(self, project, page_size=0, page_token=None):
"""List metrics for the project associated with this client.
:type project: string
:param project: ID of the project whose metrics are to be listed.
:type page_size: int
:param page_size: maximum number of metrics to return, If not passed,
defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of metrics. If not
passed, the API will return the first page of
metrics.
:rtype: tuple, (list, str)
:returns: list of mappings, plus a "next page token" string:
if not None, indicates that more metrics can be retrieved
with another call (pass that value as ``page_token``).
"""
options = _build_paging_options(page_token)
page_iter = self._gax_api.list_log_metrics(project, page_size, options)
metrics = [_log_metric_pb_to_mapping(log_metric_pb)
for log_metric_pb in page_iter.next()]
token = page_iter.page_token or None
return metrics, token
def metric_create(self, project, metric_name, filter_, description):
"""API call: create a metric resource.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/create
:type project: string
:param project: ID of the project in which to create the metric.
:type metric_name: string
:param metric_name: the name of the metric
:type filter_: string
:param filter_: the advanced logs filter expression defining the
entries exported by the metric.
:type description: string
:param description: description of the metric.
"""
options = None
parent = 'projects/%s' % (project,)
metric_pb = LogMetric(name=metric_name, filter=filter_,
description=description)
try:
self._gax_api.create_log_metric(parent, metric_pb, options)
except GaxError as exc:
if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION:
path = 'projects/%s/metrics/%s' % (project, metric_name)
raise Conflict(path)
raise
def metric_get(self, project, metric_name):
"""API call: retrieve a metric resource.
:type project: string
:param project: ID of the project containing the metric.
:type metric_name: string
:param metric_name: the name of the metric
"""
options = None
path = 'projects/%s/metrics/%s' % (project, metric_name)
try:
metric_pb = self._gax_api.get_log_metric(path, options)
except GaxError as exc:
if exc_to_code(exc.cause) == StatusCode.NOT_FOUND:
raise NotFound(path)
raise
return _log_metric_pb_to_mapping(metric_pb)
def metric_update(self, project, metric_name, filter_, description):
"""API call: update a metric resource.
:type project: string
:param project: ID of the project containing the metric.
:type metric_name: string
:param metric_name: the name of the metric
:type filter_: string
:param filter_: the advanced logs filter expression defining the
entries exported by the metric.
:type description: string
:param description: description of the metric.
"""
options = None
path = 'projects/%s/metrics/%s' % (project, metric_name)
metric_pb = LogMetric(name=path, filter=filter_,
description=description)
try:
self._gax_api.update_log_metric(path, metric_pb, options)
except GaxError as exc:
if exc_to_code(exc.cause) == StatusCode.NOT_FOUND:
raise NotFound(path)
raise
return _log_metric_pb_to_mapping(metric_pb)
def metric_delete(self, project, metric_name):
"""API call: delete a metric resource.
:type project: string
:param project: ID of the project containing the metric.
:type metric_name: string
:param metric_name: the name of the metric
"""
options = None
path = 'projects/%s/metrics/%s' % (project, metric_name)
try:
self._gax_api.delete_log_metric(path, options)
except GaxError as exc:
if exc_to_code(exc.cause) == StatusCode.NOT_FOUND:
raise NotFound(path)
raise
def _build_paging_options(page_token=None):
"""Helper for :meth:'_PublisherAPI.list_topics' et aliae."""
if page_token is None:
page_token = INITIAL_PAGE
options = {'page_token': page_token}
return CallOptions(**options)
def _mon_resource_pb_to_mapping(resource_pb):
"""Helper for :func:_log_entry_pb_to_mapping"""
mapping = {
'type': resource_pb.type,
}
if resource_pb.labels:
mapping['labels'] = resource_pb.labels
return mapping
def _pb_timestamp_to_rfc3339(timestamp_pb):
"""Helper for :func:_log_entry_pb_to_mapping"""
timestamp = _pb_timestamp_to_datetime(timestamp_pb)
return _datetime_to_rfc3339(timestamp)
def _log_entry_pb_to_mapping(entry_pb):
"""Helper for :meth:`list_entries`, et aliae
Ideally, would use a function from :mod:`protobuf.json_format`, but
the right one isn't public. See:
https://github.com/google/protobuf/issues/1351
"""
mapping = {
'logName': entry_pb.log_name,
'resource': _mon_resource_pb_to_mapping(entry_pb.resource),
'severity': entry_pb.severity,
'insertId': entry_pb.insert_id,
'timestamp': _pb_timestamp_to_rfc3339(entry_pb.timestamp),
'labels': entry_pb.labels,
'textPayload': entry_pb.text_payload,
'jsonPayload': entry_pb.json_payload,
'protoPayload': entry_pb.proto_payload,
}
if entry_pb.http_request:
request = entry_pb.http_request
mapping['httpRequest'] = {
'request_method': request.request_method,
'request_url': request.request_url,
'status': request.status,
'referer': request.referer,
'user_agent': request.user_agent,
'cache_hit': request.cache_hit,
'request_size': request.request_size,
'response_size': request.response_size,
'remote_ip': request.remote_ip,
}
if entry_pb.operation:
operation = entry_pb.operation
mapping['operation'] = {
'producer': operation.producer,
'id': operation.id,
'first': operation.first,
'last': operation.last,
}
return mapping
def _http_request_mapping_to_pb(info, request):
"""Helper for _log_entry_mapping_to_pb"""
optional_request_keys = {
'requestMethod': 'request_method',
'requestUrl': 'request_url',
'status': 'status',
'referer': 'referer',
'userAgent': 'user_agent',
'cacheHit': 'cache_hit',
'requestSize': 'request_size',
'responseSize': 'response_size',
'remoteIp': 'remote_ip',
}
for key, pb_name in optional_request_keys.items():
if key in info:
setattr(request, pb_name, info[key])
def _log_operation_mapping_to_pb(info, operation):
"""Helper for _log_entry_mapping_to_pb"""
operation.producer = info['producer']
operation.id = info['id']
if 'first' in info:
operation.first = info['first']
if 'last' in info:
operation.last = info['last']
def _log_entry_mapping_to_pb(mapping):
"""Helper for :meth:`write_entries`, et aliae
Ideally, would use a function from :mod:`protobuf.json_format`, but
the right one isn't public. See:
https://github.com/google/protobuf/issues/1351
"""
# pylint: disable=too-many-branches
entry_pb = LogEntry()
optional_scalar_keys = {
'logName': 'log_name',
'insertId': 'insert_id',
'textPayload': 'text_payload',
}
for key, pb_name in optional_scalar_keys.items():
if key in mapping:
setattr(entry_pb, pb_name, mapping[key])
if 'resource' in mapping:
entry_pb.resource.type = mapping['resource']['type']
if 'severity' in mapping:
severity = mapping['severity']
if isinstance(severity, str):
severity = LogSeverity.Value(severity)
entry_pb.severity = severity
if 'timestamp' in mapping:
timestamp = _datetime_to_pb_timestamp(mapping['timestamp'])
entry_pb.timestamp.CopyFrom(timestamp)
if 'labels' in mapping:
for key, value in mapping['labels'].items():
entry_pb.labels[key] = value
if 'jsonPayload' in mapping:
for key, value in mapping['jsonPayload'].items():
entry_pb.json_payload[key] = value
if 'protoPayload' in mapping:
Parse(json.dumps(mapping['protoPayload']), entry_pb.proto_payload)
if 'httpRequest' in mapping:
_http_request_mapping_to_pb(
mapping['httpRequest'], entry_pb.http_request)
if 'operation' in mapping:
_log_operation_mapping_to_pb(
mapping['operation'], entry_pb.operation)
return entry_pb
# pylint: enable=too-many-branches
def _log_sink_pb_to_mapping(sink_pb):
"""Helper for :meth:`list_sinks`, et aliae
Ideally, would use a function from :mod:`protobuf.json_format`, but
the right one isn't public. See:
https://github.com/google/protobuf/issues/1351
"""
return {
'name': sink_pb.name,
'destination': sink_pb.destination,
'filter': sink_pb.filter,
}
def _log_metric_pb_to_mapping(metric_pb):
"""Helper for :meth:`list_metrics`, et aliae
Ideally, would use a function from :mod:`protobuf.json_format`, but
the right one isn't public. See:
https://github.com/google/protobuf/issues/1351
"""
return {
'name': metric_pb.name,
'description': metric_pb.description,
'filter': metric_pb.filter,
}

View file

@ -0,0 +1,300 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client for interacting with the Google Cloud Logging API."""
import os
try:
from google.logging.v2.config_service_v2_api import (
ConfigServiceV2Api as GeneratedSinksAPI)
from google.logging.v2.logging_service_v2_api import (
LoggingServiceV2Api as GeneratedLoggingAPI)
from google.logging.v2.metrics_service_v2_api import (
MetricsServiceV2Api as GeneratedMetricsAPI)
from gcloud.logging._gax import _LoggingAPI as GAXLoggingAPI
from gcloud.logging._gax import _MetricsAPI as GAXMetricsAPI
from gcloud.logging._gax import _SinksAPI as GAXSinksAPI
except ImportError: # pragma: NO COVER
_HAVE_GAX = False
GeneratedLoggingAPI = GAXLoggingAPI = None
GeneratedMetricsAPI = GAXMetricsAPI = None
GeneratedSinksAPI = GAXSinksAPI = None
else:
_HAVE_GAX = True
from gcloud.client import JSONClient
from gcloud.logging.connection import Connection
from gcloud.logging.connection import _LoggingAPI as JSONLoggingAPI
from gcloud.logging.connection import _MetricsAPI as JSONMetricsAPI
from gcloud.logging.connection import _SinksAPI as JSONSinksAPI
from gcloud.logging.entries import ProtobufEntry
from gcloud.logging.entries import StructEntry
from gcloud.logging.entries import TextEntry
from gcloud.logging.logger import Logger
from gcloud.logging.metric import Metric
from gcloud.logging.sink import Sink
_USE_GAX = _HAVE_GAX and (os.environ.get('GCLOUD_ENABLE_GAX') is not None)
class Client(JSONClient):
"""Client to bundle configuration needed for API requests.
:type project: str
:param project: the project which the client acts on behalf of.
If not passed, falls back to the default inferred
from the environment.
:type credentials: :class:`oauth2client.client.OAuth2Credentials` or
:class:`NoneType`
:param credentials: The OAuth2 Credentials to use for the connection
owned by this client. If not passed (and if no ``http``
object is passed), falls back to the default inferred
from the environment.
:type http: :class:`httplib2.Http` or class that defines ``request()``.
:param http: An optional HTTP object to make requests. If not passed, an
``http`` object is created that is bound to the
``credentials`` for the current object.
"""
_connection_class = Connection
_logging_api = _sinks_api = _metrics_api = None
@property
def logging_api(self):
"""Helper for logging-related API calls.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs
"""
if self._logging_api is None:
if _USE_GAX:
generated = GeneratedLoggingAPI()
self._logging_api = GAXLoggingAPI(generated)
else:
self._logging_api = JSONLoggingAPI(self.connection)
return self._logging_api
@property
def sinks_api(self):
"""Helper for log sink-related API calls.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks
"""
if self._sinks_api is None:
if _USE_GAX:
generated = GeneratedSinksAPI()
self._sinks_api = GAXSinksAPI(generated)
else:
self._sinks_api = JSONSinksAPI(self.connection)
return self._sinks_api
@property
def metrics_api(self):
"""Helper for log metric-related API calls.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics
"""
if self._metrics_api is None:
if _USE_GAX:
generated = GeneratedMetricsAPI()
self._metrics_api = GAXMetricsAPI(generated)
else:
self._metrics_api = JSONMetricsAPI(self.connection)
return self._metrics_api
def logger(self, name):
"""Creates a logger bound to the current client.
:type name: str
:param name: the name of the logger to be constructed.
:rtype: :class:`gcloud.logging.logger.Logger`
:returns: Logger created with the current client.
"""
return Logger(name, client=self)
def _entry_from_resource(self, resource, loggers):
"""Detect correct entry type from resource and instantiate.
:type resource: dict
:param resource: one entry resource from API response
:type loggers: dict or None
:param loggers: A mapping of logger fullnames -> loggers. If not
passed, the entry will have a newly-created logger.
:rtype: One of:
:class:`gcloud.logging.entries.TextEntry`,
:class:`gcloud.logging.entries.StructEntry`,
:class:`gcloud.logging.entries.ProtobufEntry`
:returns: the entry instance, constructed via the resource
"""
if 'textPayload' in resource:
return TextEntry.from_api_repr(resource, self, loggers)
elif 'jsonPayload' in resource:
return StructEntry.from_api_repr(resource, self, loggers)
elif 'protoPayload' in resource:
return ProtobufEntry.from_api_repr(resource, self, loggers)
raise ValueError('Cannot parse log entry resource')
def list_entries(self, projects=None, filter_=None, order_by=None,
page_size=None, page_token=None):
"""Return a page of log entries.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/list
:type projects: list of strings
:param projects: project IDs to include. If not passed,
defaults to the project bound to the client.
:type filter_: str
:param filter_: a filter expression. See:
https://cloud.google.com/logging/docs/view/advanced_filters
:type order_by: str
:param order_by: One of :data:`gcloud.logging.ASCENDING` or
:data:`gcloud.logging.DESCENDING`.
:type page_size: int
:param page_size: maximum number of entries to return, If not passed,
defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of entries. If not
passed, the API will return the first page of
entries.
:rtype: tuple, (list, str)
:returns: list of :class:`gcloud.logging.entry.TextEntry`, plus a
"next page token" string: if not None, indicates that
more entries can be retrieved with another call (pass that
value as ``page_token``).
"""
if projects is None:
projects = [self.project]
resources, token = self.logging_api.list_entries(
projects=projects, filter_=filter_, order_by=order_by,
page_size=page_size, page_token=page_token)
loggers = {}
entries = [self._entry_from_resource(resource, loggers)
for resource in resources]
return entries, token
def sink(self, name, filter_=None, destination=None):
"""Creates a sink bound to the current client.
:type name: str
:param name: the name of the sink to be constructed.
:type filter_: str
:param filter_: (optional) the advanced logs filter expression
defining the entries exported by the sink. If not
passed, the instance should already exist, to be
refreshed via :meth:`Sink.reload`.
:type destination: str
:param destination: destination URI for the entries exported by
the sink. If not passed, the instance should
already exist, to be refreshed via
:meth:`Sink.reload`.
:rtype: :class:`gcloud.logging.sink.Sink`
:returns: Sink created with the current client.
"""
return Sink(name, filter_, destination, client=self)
def list_sinks(self, page_size=None, page_token=None):
"""List sinks for the project associated with this client.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/list
:type page_size: int
:param page_size: maximum number of sinks to return, If not passed,
defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of sinks. If not
passed, the API will return the first page of
sinks.
:rtype: tuple, (list, str)
:returns: list of :class:`gcloud.logging.sink.Sink`, plus a
"next page token" string: if not None, indicates that
more sinks can be retrieved with another call (pass that
value as ``page_token``).
"""
resources, token = self.sinks_api.list_sinks(
self.project, page_size, page_token)
sinks = [Sink.from_api_repr(resource, self)
for resource in resources]
return sinks, token
def metric(self, name, filter_=None, description=''):
"""Creates a metric bound to the current client.
:type name: str
:param name: the name of the metric to be constructed.
:type filter_: str
:param filter_: the advanced logs filter expression defining the
entries tracked by the metric. If not
passed, the instance should already exist, to be
refreshed via :meth:`Metric.reload`.
:type description: str
:param description: the description of the metric to be constructed.
If not passed, the instance should already exist,
to be refreshed via :meth:`Metric.reload`.
:rtype: :class:`gcloud.logging.metric.Metric`
:returns: Metric created with the current client.
"""
return Metric(name, filter_, client=self, description=description)
def list_metrics(self, page_size=None, page_token=None):
"""List metrics for the project associated with this client.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/list
:type page_size: int
:param page_size: maximum number of metrics to return, If not passed,
defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of metrics. If not
passed, the API will return the first page of
metrics.
:rtype: tuple, (list, str)
:returns: list of :class:`gcloud.logging.metric.Metric`, plus a
"next page token" string: if not None, indicates that
more metrics can be retrieved with another call (pass that
value as ``page_token``).
"""
resources, token = self.metrics_api.list_metrics(
self.project, page_size, page_token)
metrics = [Metric.from_api_repr(resource, self)
for resource in resources]
return metrics, token

View file

@ -0,0 +1,435 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create / interact with gcloud logging connections."""
from gcloud import connection as base_connection
class Connection(base_connection.JSONConnection):
"""A connection to Google Cloud Logging via the JSON REST API.
:type credentials: :class:`oauth2client.client.OAuth2Credentials`
:param credentials: (Optional) The OAuth2 Credentials to use for this
connection.
:type http: :class:`httplib2.Http` or class that defines ``request()``.
:param http: (Optional) HTTP object to make requests.
:type api_base_url: string
:param api_base_url: The base of the API call URL. Defaults to the value
:attr:`Connection.API_BASE_URL`.
"""
API_BASE_URL = 'https://logging.googleapis.com'
"""The base of the API call URL."""
API_VERSION = 'v2beta1'
"""The version of the API, used in building the API call's URL."""
API_URL_TEMPLATE = '{api_base_url}/{api_version}{path}'
"""A template for the URL of a particular API call."""
SCOPE = ('https://www.googleapis.com/auth/logging.read',
'https://www.googleapis.com/auth/logging.write',
'https://www.googleapis.com/auth/logging.admin',
'https://www.googleapis.com/auth/cloud-platform')
"""The scopes required for authenticating as a Cloud Logging consumer."""
class _LoggingAPI(object):
"""Helper mapping logging-related APIs.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs
:type connection: :class:`gcloud.logging.connection.Connection`
:param connection: the connection used to make API requests.
"""
def __init__(self, connection):
self._connection = connection
def list_entries(self, projects, filter_=None, order_by=None,
page_size=None, page_token=None):
"""Return a page of log entry resources.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/list
:type projects: list of strings
:param projects: project IDs to include. If not passed,
defaults to the project bound to the client.
:type filter_: str
:param filter_: a filter expression. See:
https://cloud.google.com/logging/docs/view/advanced_filters
:type order_by: str
:param order_by: One of :data:`gcloud.logging.ASCENDING` or
:data:`gcloud.logging.DESCENDING`.
:type page_size: int
:param page_size: maximum number of entries to return, If not passed,
defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of entries. If not
passed, the API will return the first page of
entries.
:rtype: tuple, (list, str)
:returns: list of mappings, plus a "next page token" string:
if not None, indicates that more entries can be retrieved
with another call (pass that value as ``page_token``).
"""
params = {'projectIds': projects}
if filter_ is not None:
params['filter'] = filter_
if order_by is not None:
params['orderBy'] = order_by
if page_size is not None:
params['pageSize'] = page_size
if page_token is not None:
params['pageToken'] = page_token
resp = self._connection.api_request(
method='POST', path='/entries:list', data=params)
return resp.get('entries', ()), resp.get('nextPageToken')
def write_entries(self, entries, logger_name=None, resource=None,
labels=None):
"""API call: log an entry resource via a POST request
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write
:type entries: sequence of mapping
:param entries: the log entry resources to log.
:type logger_name: string
:param logger_name: name of default logger to which to log the entries;
individual entries may override.
:type resource: mapping
:param resource: default resource to associate with entries;
individual entries may override.
:type labels: mapping
:param labels: default labels to associate with entries;
individual entries may override.
"""
data = {'entries': list(entries)}
if logger_name is not None:
data['logName'] = logger_name
if resource is not None:
data['resource'] = resource
if labels is not None:
data['labels'] = labels
self._connection.api_request(method='POST', path='/entries:write',
data=data)
def logger_delete(self, project, logger_name):
"""API call: delete all entries in a logger via a DELETE request
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs/delete
:type project: string
:param project: ID of project containing the log entries to delete
:type logger_name: string
:param logger_name: name of logger containing the log entries to delete
"""
path = '/projects/%s/logs/%s' % (project, logger_name)
self._connection.api_request(method='DELETE', path=path)
class _SinksAPI(object):
"""Helper mapping sink-related APIs.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks
:type connection: :class:`gcloud.logging.connection.Connection`
:param connection: the connection used to make API requests.
"""
def __init__(self, connection):
self._connection = connection
def list_sinks(self, project, page_size=None, page_token=None):
"""List sinks for the project associated with this client.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/list
:type project: string
:param project: ID of the project whose sinks are to be listed.
:type page_size: int
:param page_size: maximum number of sinks to return, If not passed,
defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of sinks. If not
passed, the API will return the first page of
sinks.
:rtype: tuple, (list, str)
:returns: list of mappings, plus a "next page token" string:
if not None, indicates that more sinks can be retrieved
with another call (pass that value as ``page_token``).
"""
params = {}
if page_size is not None:
params['pageSize'] = page_size
if page_token is not None:
params['pageToken'] = page_token
path = '/projects/%s/sinks' % (project,)
resp = self._connection.api_request(
method='GET', path=path, query_params=params)
sinks = resp.get('sinks', ())
return sinks, resp.get('nextPageToken')
def sink_create(self, project, sink_name, filter_, destination):
"""API call: create a sink resource.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/create
:type project: string
:param project: ID of the project in which to create the sink.
:type sink_name: string
:param sink_name: the name of the sink
:type filter_: string
:param filter_: the advanced logs filter expression defining the
entries exported by the sink.
:type destination: string
:param destination: destination URI for the entries exported by
the sink.
"""
target = '/projects/%s/sinks' % (project,)
data = {
'name': sink_name,
'filter': filter_,
'destination': destination,
}
self._connection.api_request(method='POST', path=target, data=data)
def sink_get(self, project, sink_name):
"""API call: retrieve a sink resource.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get
:type project: string
:param project: ID of the project containing the sink.
:type sink_name: string
:param sink_name: the name of the sink
"""
target = '/projects/%s/sinks/%s' % (project, sink_name)
return self._connection.api_request(method='GET', path=target)
def sink_update(self, project, sink_name, filter_, destination):
"""API call: update a sink resource.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/update
:type project: string
:param project: ID of the project containing the sink.
:type sink_name: string
:param sink_name: the name of the sink
:type filter_: string
:param filter_: the advanced logs filter expression defining the
entries exported by the sink.
:type destination: string
:param destination: destination URI for the entries exported by
the sink.
"""
target = '/projects/%s/sinks/%s' % (project, sink_name)
data = {
'name': sink_name,
'filter': filter_,
'destination': destination,
}
self._connection.api_request(method='PUT', path=target, data=data)
def sink_delete(self, project, sink_name):
"""API call: delete a sink resource.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/delete
:type project: string
:param project: ID of the project containing the sink.
:type sink_name: string
:param sink_name: the name of the sink
"""
target = '/projects/%s/sinks/%s' % (project, sink_name)
self._connection.api_request(method='DELETE', path=target)
class _MetricsAPI(object):
"""Helper mapping sink-related APIs.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics
:type connection: :class:`gcloud.logging.connection.Connection`
:param connection: the connection used to make API requests.
"""
def __init__(self, connection):
self._connection = connection
def list_metrics(self, project, page_size=None, page_token=None):
"""List metrics for the project associated with this client.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/list
:type project: string
:param project: ID of the project whose metrics are to be listed.
:type page_size: int
:param page_size: maximum number of metrics to return, If not passed,
defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of metrics. If not
passed, the API will return the first page of
metrics.
:rtype: tuple, (list, str)
:returns: list of mappings, plus a "next page token" string:
if not None, indicates that more metrics can be retrieved
with another call (pass that value as ``page_token``).
"""
params = {}
if page_size is not None:
params['pageSize'] = page_size
if page_token is not None:
params['pageToken'] = page_token
path = '/projects/%s/metrics' % (project,)
resp = self._connection.api_request(
method='GET', path=path, query_params=params)
metrics = resp.get('metrics', ())
return metrics, resp.get('nextPageToken')
def metric_create(self, project, metric_name, filter_, description=None):
"""API call: create a metric resource.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/create
:type project: string
:param project: ID of the project in which to create the metric.
:type metric_name: string
:param metric_name: the name of the metric
:type filter_: string
:param filter_: the advanced logs filter expression defining the
entries exported by the metric.
:type description: string
:param description: description of the metric.
"""
target = '/projects/%s/metrics' % (project,)
data = {
'name': metric_name,
'filter': filter_,
'description': description,
}
self._connection.api_request(method='POST', path=target, data=data)
def metric_get(self, project, metric_name):
"""API call: retrieve a metric resource.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/get
:type project: string
:param project: ID of the project containing the metric.
:type metric_name: string
:param metric_name: the name of the metric
"""
target = '/projects/%s/metrics/%s' % (project, metric_name)
return self._connection.api_request(method='GET', path=target)
def metric_update(self, project, metric_name, filter_, description):
"""API call: update a metric resource.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/update
:type project: string
:param project: ID of the project containing the metric.
:type metric_name: string
:param metric_name: the name of the metric
:type filter_: string
:param filter_: the advanced logs filter expression defining the
entries exported by the metric.
:type description: string
:param description: description of the metric.
"""
target = '/projects/%s/metrics/%s' % (project, metric_name)
data = {
'name': metric_name,
'filter': filter_,
'description': description,
}
self._connection.api_request(method='PUT', path=target, data=data)
def metric_delete(self, project, metric_name):
"""API call: delete a metric resource.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/delete
:type project: string
:param project: ID of the project containing the metric.
:type metric_name: string
:param metric_name: the name of the metric
"""
target = '/projects/%s/metrics/%s' % (project, metric_name)
self._connection.api_request(method='DELETE', path=target)

View file

@ -0,0 +1,157 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Log entries within the Google Cloud Logging API."""
import json
import re
from google.protobuf.json_format import Parse
from gcloud._helpers import _name_from_project_path
from gcloud._helpers import _rfc3339_nanos_to_datetime
_LOGGER_TEMPLATE = re.compile(r"""
projects/ # static prefix
(?P<project>[^/]+) # initial letter, wordchars + hyphen
/logs/ # static midfix
(?P<name>[^/]+) # initial letter, wordchars + allowed punc
""", re.VERBOSE)
def logger_name_from_path(path):
"""Validate a logger URI path and get the logger name.
:type path: str
:param path: URI path for a logger API request.
:rtype: str
:returns: Logger name parsed from ``path``.
:raises: :class:`ValueError` if the ``path`` is ill-formed or if
the project from the ``path`` does not agree with the
``project`` passed in.
"""
return _name_from_project_path(path, None, _LOGGER_TEMPLATE)
class _BaseEntry(object):
"""Base class for TextEntry, StructEntry.
:type payload: text or dict
:param payload: The payload passed as ``textPayload``, ``jsonPayload``,
or ``protoPayload``.
:type logger: :class:`gcloud.logging.logger.Logger`
:param logger: the logger used to write the entry.
:type insert_id: text, or :class:`NoneType`
:param insert_id: (optional) the ID used to identify an entry uniquely.
:type timestamp: :class:`datetime.datetime`, or :class:`NoneType`
:param timestamp: (optional) timestamp for the entry
:type labels: dict or :class:`NoneType`
:param labels: (optional) mapping of labels for the entry
:type severity: string or :class:`NoneType`
:param severity: (optional) severity of event being logged.
:type http_request: dict or :class:`NoneType`
:param http_request: (optional) info about HTTP request associated with
the entry
"""
def __init__(self, payload, logger, insert_id=None, timestamp=None,
labels=None, severity=None, http_request=None):
self.payload = payload
self.logger = logger
self.insert_id = insert_id
self.timestamp = timestamp
self.labels = labels
self.severity = severity
self.http_request = http_request
@classmethod
def from_api_repr(cls, resource, client, loggers=None):
"""Factory: construct an entry given its API representation
:type resource: dict
:param resource: text entry resource representation returned from
the API
:type client: :class:`gcloud.logging.client.Client`
:param client: Client which holds credentials and project
configuration.
:type loggers: dict or None
:param loggers: A mapping of logger fullnames -> loggers. If not
passed, the entry will have a newly-created logger.
:rtype: :class:`gcloud.logging.entries.TextEntry`
:returns: Text entry parsed from ``resource``.
"""
if loggers is None:
loggers = {}
logger_fullname = resource['logName']
logger = loggers.get(logger_fullname)
if logger is None:
logger_name = logger_name_from_path(logger_fullname)
logger = loggers[logger_fullname] = client.logger(logger_name)
payload = resource[cls._PAYLOAD_KEY]
insert_id = resource.get('insertId')
timestamp = resource.get('timestamp')
if timestamp is not None:
timestamp = _rfc3339_nanos_to_datetime(timestamp)
labels = resource.get('labels')
severity = resource.get('severity')
http_request = resource.get('httpRequest')
return cls(payload, logger, insert_id=insert_id, timestamp=timestamp,
labels=labels, severity=severity, http_request=http_request)
class TextEntry(_BaseEntry):
"""Entry created with ``textPayload``.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry
"""
_PAYLOAD_KEY = 'textPayload'
class StructEntry(_BaseEntry):
"""Entry created with ``jsonPayload``.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry
"""
_PAYLOAD_KEY = 'jsonPayload'
class ProtobufEntry(_BaseEntry):
"""Entry created with ``protoPayload``.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry
"""
_PAYLOAD_KEY = 'protoPayload'
def parse_message(self, message):
"""Parse payload into a protobuf message.
Mutates the passed-in ``message`` in place.
:type message: Protobuf message
:param message: the message to be logged
"""
Parse(json.dumps(self.payload), message)

View file

@ -0,0 +1,443 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Define API Loggers."""
import json
from google.protobuf.json_format import MessageToJson
class Logger(object):
"""Loggers represent named targets for log entries.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs
:type name: string
:param name: the name of the logger
:type client: :class:`gcloud.logging.client.Client`
:param client: A client which holds credentials and project configuration
for the logger (which requires a project).
:type labels: dict or :class:`NoneType`
:param labels: (optional) mapping of default labels for entries written
via this logger.
"""
def __init__(self, name, client, labels=None):
self.name = name
self._client = client
self.labels = labels
@property
def client(self):
"""Clent bound to the logger."""
return self._client
@property
def project(self):
"""Project bound to the logger."""
return self._client.project
@property
def full_name(self):
"""Fully-qualified name used in logging APIs"""
return 'projects/%s/logs/%s' % (self.project, self.name)
@property
def path(self):
"""URI path for use in logging APIs"""
return '/%s' % (self.full_name,)
def _require_client(self, client):
"""Check client or verify over-ride.
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current logger.
:rtype: :class:`gcloud.logging.client.Client`
:returns: The client passed in or the currently bound client.
"""
if client is None:
client = self._client
return client
def batch(self, client=None):
"""Return a batch to use as a context manager.
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current topic.
:rtype: :class:`Batch`
:returns: A batch to use as a context manager.
"""
client = self._require_client(client)
return Batch(self, client)
def _make_entry_resource(self, text=None, info=None, message=None,
labels=None, insert_id=None, severity=None,
http_request=None):
"""Return a log entry resource of the appropriate type.
Helper for :meth:`log_text`, :meth:`log_struct`, and :meth:`log_proto`.
Only one of ``text``, ``info``, or ``message`` should be passed.
:type text: string or :class:`NoneType`
:param text: text payload
:type info: dict or :class:`NoneType`
:param info: struct payload
:type message: Protobuf message or :class:`NoneType`
:param message: protobuf payload
:type labels: dict or :class:`NoneType`
:param labels: labels passed in to calling method.
:type insert_id: string or :class:`NoneType`
:param insert_id: (optional) unique ID for log entry.
:type severity: string or :class:`NoneType`
:param severity: (optional) severity of event being logged.
:type http_request: dict or :class:`NoneType`
:param http_request: (optional) info about HTTP request associated with
the entry
"""
resource = {
'logName': self.full_name,
'resource': {'type': 'global'},
}
if text is not None:
resource['textPayload'] = text
if info is not None:
resource['jsonPayload'] = info
if message is not None:
as_json_str = MessageToJson(message)
as_json = json.loads(as_json_str)
resource['protoPayload'] = as_json
if labels is None:
labels = self.labels
if labels is not None:
resource['labels'] = labels
if insert_id is not None:
resource['insertId'] = insert_id
if severity is not None:
resource['severity'] = severity
if http_request is not None:
resource['httpRequest'] = http_request
return resource
def log_text(self, text, client=None, labels=None, insert_id=None,
severity=None, http_request=None):
"""API call: log a text message via a POST request
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write
:type text: text
:param text: the log message.
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current logger.
:type labels: dict or :class:`NoneType`
:param labels: (optional) mapping of labels for the entry.
:type insert_id: string or :class:`NoneType`
:param insert_id: (optional) unique ID for log entry.
:type severity: string or :class:`NoneType`
:param severity: (optional) severity of event being logged.
:type http_request: dict or :class:`NoneType`
:param http_request: (optional) info about HTTP request associated with
the entry
"""
client = self._require_client(client)
entry_resource = self._make_entry_resource(
text=text, labels=labels, insert_id=insert_id, severity=severity,
http_request=http_request)
client.logging_api.write_entries([entry_resource])
def log_struct(self, info, client=None, labels=None, insert_id=None,
severity=None, http_request=None):
"""API call: log a structured message via a POST request
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write
:type info: dict
:param info: the log entry information
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current logger.
:type labels: dict or :class:`NoneType`
:param labels: (optional) mapping of labels for the entry.
:type insert_id: string or :class:`NoneType`
:param insert_id: (optional) unique ID for log entry.
:type severity: string or :class:`NoneType`
:param severity: (optional) severity of event being logged.
:type http_request: dict or :class:`NoneType`
:param http_request: (optional) info about HTTP request associated with
the entry
"""
client = self._require_client(client)
entry_resource = self._make_entry_resource(
info=info, labels=labels, insert_id=insert_id, severity=severity,
http_request=http_request)
client.logging_api.write_entries([entry_resource])
def log_proto(self, message, client=None, labels=None, insert_id=None,
severity=None, http_request=None):
"""API call: log a protobuf message via a POST request
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write
:type message: Protobuf message
:param message: the message to be logged
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current logger.
:type labels: dict or :class:`NoneType`
:param labels: (optional) mapping of labels for the entry.
:type insert_id: string or :class:`NoneType`
:param insert_id: (optional) unique ID for log entry.
:type severity: string or :class:`NoneType`
:param severity: (optional) severity of event being logged.
:type http_request: dict or :class:`NoneType`
:param http_request: (optional) info about HTTP request associated with
the entry
"""
client = self._require_client(client)
entry_resource = self._make_entry_resource(
message=message, labels=labels, insert_id=insert_id,
severity=severity, http_request=http_request)
client.logging_api.write_entries([entry_resource])
def delete(self, client=None):
"""API call: delete all entries in a logger via a DELETE request
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs/delete
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current logger.
"""
client = self._require_client(client)
client.logging_api.logger_delete(self.project, self.name)
def list_entries(self, projects=None, filter_=None, order_by=None,
page_size=None, page_token=None):
"""Return a page of log entries.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/list
:type projects: list of strings
:param projects: project IDs to include. If not passed,
defaults to the project bound to the client.
:type filter_: string
:param filter_: a filter expression. See:
https://cloud.google.com/logging/docs/view/advanced_filters
:type order_by: string
:param order_by: One of :data:`gcloud.logging.ASCENDING` or
:data:`gcloud.logging.DESCENDING`.
:type page_size: int
:param page_size: maximum number of entries to return, If not passed,
defaults to a value set by the API.
:type page_token: string
:param page_token: opaque marker for the next "page" of entries. If not
passed, the API will return the first page of
entries.
:rtype: tuple, (list, str)
:returns: list of :class:`gcloud.logging.entry.TextEntry`, plus a
"next page token" string: if not None, indicates that
more entries can be retrieved with another call (pass that
value as ``page_token``).
"""
log_filter = 'logName=%s' % (self.full_name,)
if filter_ is not None:
filter_ = '%s AND %s' % (filter_, log_filter)
else:
filter_ = log_filter
return self.client.list_entries(
projects=projects, filter_=filter_, order_by=order_by,
page_size=page_size, page_token=page_token)
class Batch(object):
"""Context manager: collect entries to log via a single API call.
Helper returned by :meth:`Logger.batch`
:type logger: :class:`gcloud.logging.logger.Logger`
:param logger: the logger to which entries will be logged.
:type client: :class:`gcloud.logging.client.Client`
:param client: The client to use.
"""
def __init__(self, logger, client):
self.logger = logger
self.entries = []
self.client = client
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is None:
self.commit()
def log_text(self, text, labels=None, insert_id=None, severity=None,
http_request=None):
"""Add a text entry to be logged during :meth:`commit`.
:type text: string
:param text: the text entry
:type labels: dict or :class:`NoneType`
:param labels: (optional) mapping of labels for the entry.
:type insert_id: string or :class:`NoneType`
:param insert_id: (optional) unique ID for log entry.
:type severity: string or :class:`NoneType`
:param severity: (optional) severity of event being logged.
:type http_request: dict or :class:`NoneType`
:param http_request: (optional) info about HTTP request associated with
the entry.
"""
self.entries.append(
('text', text, labels, insert_id, severity, http_request))
def log_struct(self, info, labels=None, insert_id=None, severity=None,
http_request=None):
"""Add a struct entry to be logged during :meth:`commit`.
:type info: dict
:param info: the struct entry
:type labels: dict or :class:`NoneType`
:param labels: (optional) mapping of labels for the entry.
:type insert_id: string or :class:`NoneType`
:param insert_id: (optional) unique ID for log entry.
:type severity: string or :class:`NoneType`
:param severity: (optional) severity of event being logged.
:type http_request: dict or :class:`NoneType`
:param http_request: (optional) info about HTTP request associated with
the entry.
"""
self.entries.append(
('struct', info, labels, insert_id, severity, http_request))
def log_proto(self, message, labels=None, insert_id=None, severity=None,
http_request=None):
"""Add a protobuf entry to be logged during :meth:`commit`.
:type message: protobuf message
:param message: the protobuf entry
:type labels: dict or :class:`NoneType`
:param labels: (optional) mapping of labels for the entry.
:type insert_id: string or :class:`NoneType`
:param insert_id: (optional) unique ID for log entry.
:type severity: string or :class:`NoneType`
:param severity: (optional) severity of event being logged.
:type http_request: dict or :class:`NoneType`
:param http_request: (optional) info about HTTP request associated with
the entry.
"""
self.entries.append(
('proto', message, labels, insert_id, severity, http_request))
def commit(self, client=None):
"""Send saved log entries as a single API call.
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current batch.
"""
if client is None:
client = self.client
kwargs = {
'logger_name': self.logger.path,
'resource': {'type': 'global'},
}
if self.logger.labels is not None:
kwargs['labels'] = self.logger.labels
entries = []
for entry_type, entry, labels, iid, severity, http_req in self.entries:
if entry_type == 'text':
info = {'textPayload': entry}
elif entry_type == 'struct':
info = {'jsonPayload': entry}
elif entry_type == 'proto':
as_json_str = MessageToJson(entry)
as_json = json.loads(as_json_str)
info = {'protoPayload': as_json}
else:
raise ValueError('Unknown entry type: %s' % (entry_type,))
if labels is not None:
info['labels'] = labels
if iid is not None:
info['insertId'] = iid
if severity is not None:
info['severity'] = severity
if http_req is not None:
info['httpRequest'] = http_req
entries.append(info)
client.logging_api.write_entries(entries, **kwargs)
del self.entries[:]

View file

@ -0,0 +1,174 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Define Logging API Metrics."""
from gcloud.exceptions import NotFound
class Metric(object):
"""Metrics represent named filters for log entries.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics
:type name: string
:param name: the name of the metric
:type filter_: string
:param filter_: the advanced logs filter expression defining the entries
tracked by the metric. If not passed, the instance should
already exist, to be refreshed via :meth:`reload`.
:type client: :class:`gcloud.logging.client.Client`
:param client: A client which holds credentials and project configuration
for the metric (which requires a project).
:type description: string
:param description: an optional description of the metric.
"""
def __init__(self, name, filter_=None, client=None, description=''):
self.name = name
self._client = client
self.filter_ = filter_
self.description = description
@property
def client(self):
"""Clent bound to the logger."""
return self._client
@property
def project(self):
"""Project bound to the logger."""
return self._client.project
@property
def full_name(self):
"""Fully-qualified name used in metric APIs"""
return 'projects/%s/metrics/%s' % (self.project, self.name)
@property
def path(self):
"""URL path for the metric's APIs"""
return '/%s' % (self.full_name,)
@classmethod
def from_api_repr(cls, resource, client):
"""Factory: construct a metric given its API representation
:type resource: dict
:param resource: metric resource representation returned from the API
:type client: :class:`gcloud.logging.client.Client`
:param client: Client which holds credentials and project
configuration for the metric.
:rtype: :class:`gcloud.logging.metric.Metric`
:returns: Metric parsed from ``resource``.
"""
metric_name = resource['name']
filter_ = resource['filter']
description = resource.get('description', '')
return cls(metric_name, filter_, client=client,
description=description)
def _require_client(self, client):
"""Check client or verify over-ride.
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current metric.
:rtype: :class:`gcloud.logging.client.Client`
:returns: The client passed in or the currently bound client.
"""
if client is None:
client = self._client
return client
def create(self, client=None):
"""API call: create the metric via a PUT request
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/create
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current metric.
"""
client = self._require_client(client)
client.metrics_api.metric_create(
self.project, self.name, self.filter_, self.description)
def exists(self, client=None):
"""API call: test for the existence of the metric via a GET request
See
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/get
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current metric.
"""
client = self._require_client(client)
try:
client.metrics_api.metric_get(self.project, self.name)
except NotFound:
return False
else:
return True
def reload(self, client=None):
"""API call: sync local metric configuration via a GET request
See
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/get
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current metric.
"""
client = self._require_client(client)
data = client.metrics_api.metric_get(self.project, self.name)
self.description = data.get('description', '')
self.filter_ = data['filter']
def update(self, client=None):
"""API call: update metric configuration via a PUT request
See
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/update
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current metric.
"""
client = self._require_client(client)
client.metrics_api.metric_update(
self.project, self.name, self.filter_, self.description)
def delete(self, client=None):
"""API call: delete a metric via a DELETE request
See
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/delete
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current metric.
"""
client = self._require_client(client)
client.metrics_api.metric_delete(self.project, self.name)

View file

@ -0,0 +1,178 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Define Logging API Sinks."""
from gcloud.exceptions import NotFound
class Sink(object):
"""Sinks represent filtered exports for log entries.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks
:type name: string
:param name: the name of the sink
:type filter_: string
:param filter_: the advanced logs filter expression defining the entries
exported by the sink. If not passed, the instance should
already exist, to be refreshed via :meth:`reload`.
:type destination: string
:param destination: destination URI for the entries exported by the sink.
If not passed, the instance should already exist, to
be refreshed via :meth:`reload`.
:type client: :class:`gcloud.logging.client.Client`
:param client: A client which holds credentials and project configuration
for the sink (which requires a project).
"""
def __init__(self, name, filter_=None, destination=None, client=None):
self.name = name
self.filter_ = filter_
self.destination = destination
self._client = client
@property
def client(self):
"""Clent bound to the sink."""
return self._client
@property
def project(self):
"""Project bound to the sink."""
return self._client.project
@property
def full_name(self):
"""Fully-qualified name used in sink APIs"""
return 'projects/%s/sinks/%s' % (self.project, self.name)
@property
def path(self):
"""URL path for the sink's APIs"""
return '/%s' % (self.full_name)
@classmethod
def from_api_repr(cls, resource, client):
"""Factory: construct a sink given its API representation
:type resource: dict
:param resource: sink resource representation returned from the API
:type client: :class:`gcloud.logging.client.Client`
:param client: Client which holds credentials and project
configuration for the sink.
:rtype: :class:`gcloud.logging.sink.Sink`
:returns: Sink parsed from ``resource``.
:raises: :class:`ValueError` if ``client`` is not ``None`` and the
project from the resource does not agree with the project
from the client.
"""
sink_name = resource['name']
filter_ = resource['filter']
destination = resource['destination']
return cls(sink_name, filter_, destination, client=client)
def _require_client(self, client):
"""Check client or verify over-ride.
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
:rtype: :class:`gcloud.logging.client.Client`
:returns: The client passed in or the currently bound client.
"""
if client is None:
client = self._client
return client
def create(self, client=None):
"""API call: create the sink via a PUT request
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/create
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
"""
client = self._require_client(client)
client.sinks_api.sink_create(
self.project, self.name, self.filter_, self.destination)
def exists(self, client=None):
"""API call: test for the existence of the sink via a GET request
See
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
"""
client = self._require_client(client)
try:
client.sinks_api.sink_get(self.project, self.name)
except NotFound:
return False
else:
return True
def reload(self, client=None):
"""API call: sync local sink configuration via a GET request
See
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
"""
client = self._require_client(client)
data = client.sinks_api.sink_get(self.project, self.name)
self.filter_ = data['filter']
self.destination = data['destination']
def update(self, client=None):
"""API call: update sink configuration via a PUT request
See
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/update
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
"""
client = self._require_client(client)
client.sinks_api.sink_update(
self.project, self.name, self.filter_, self.destination)
def delete(self, client=None):
"""API call: delete a sink via a DELETE request
See
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/delete
:type client: :class:`gcloud.logging.client.Client` or ``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current sink.
"""
client = self._require_client(client)
client.sinks_api.sink_delete(self.project, self.name)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,495 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
class TestClient(unittest2.TestCase):
PROJECT = 'PROJECT'
LOGGER_NAME = 'LOGGER_NAME'
SINK_NAME = 'SINK_NAME'
FILTER = 'logName:syslog AND severity>=ERROR'
DESTINATION_URI = 'faux.googleapis.com/destination'
METRIC_NAME = 'metric_name'
FILTER = 'logName:syslog AND severity>=ERROR'
DESCRIPTION = 'DESCRIPTION'
def _getTargetClass(self):
from gcloud.logging.client import Client
return Client
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor(self):
creds = _Credentials()
client = self._makeOne(project=self.PROJECT, credentials=creds)
self.assertEqual(client.project, self.PROJECT)
def test_logging_api_wo_gax(self):
from gcloud.logging.connection import _LoggingAPI
from gcloud.logging import client as MUT
from gcloud._testing import _Monkey
client = self._makeOne(self.PROJECT, credentials=_Credentials())
conn = client.connection = object()
with _Monkey(MUT, _USE_GAX=False):
api = client.logging_api
self.assertTrue(isinstance(api, _LoggingAPI))
self.assertTrue(api._connection is conn)
# API instance is cached
again = client.logging_api
self.assertTrue(again is api)
def test_logging_api_w_gax(self):
from gcloud.logging import client as MUT
from gcloud._testing import _Monkey
wrapped = object()
_called_with = []
def _generated_api(*args, **kw):
_called_with.append((args, kw))
return wrapped
class _GaxLoggingAPI(object):
def __init__(self, _wrapped):
self._wrapped = _wrapped
creds = _Credentials()
client = self._makeOne(project=self.PROJECT, credentials=creds)
with _Monkey(MUT,
_USE_GAX=True,
GeneratedLoggingAPI=_generated_api,
GAXLoggingAPI=_GaxLoggingAPI):
api = client.logging_api
self.assertIsInstance(api, _GaxLoggingAPI)
self.assertTrue(api._wrapped is wrapped)
# API instance is cached
again = client.logging_api
self.assertTrue(again is api)
def test_sinks_api_wo_gax(self):
from gcloud.logging.connection import _SinksAPI
from gcloud.logging import client as MUT
from gcloud._testing import _Monkey
client = self._makeOne(self.PROJECT, credentials=_Credentials())
conn = client.connection = object()
with _Monkey(MUT, _USE_GAX=False):
api = client.sinks_api
self.assertTrue(isinstance(api, _SinksAPI))
self.assertTrue(api._connection is conn)
# API instance is cached
again = client.sinks_api
self.assertTrue(again is api)
def test_sinks_api_w_gax(self):
from gcloud.logging import client as MUT
from gcloud._testing import _Monkey
wrapped = object()
_called_with = []
def _generated_api(*args, **kw):
_called_with.append((args, kw))
return wrapped
class _GaxSinksAPI(object):
def __init__(self, _wrapped):
self._wrapped = _wrapped
creds = _Credentials()
client = self._makeOne(project=self.PROJECT, credentials=creds)
with _Monkey(MUT,
_USE_GAX=True,
GeneratedSinksAPI=_generated_api,
GAXSinksAPI=_GaxSinksAPI):
api = client.sinks_api
self.assertIsInstance(api, _GaxSinksAPI)
self.assertTrue(api._wrapped is wrapped)
# API instance is cached
again = client.sinks_api
self.assertTrue(again is api)
def test_metrics_api_wo_gax(self):
from gcloud.logging.connection import _MetricsAPI
from gcloud.logging import client as MUT
from gcloud._testing import _Monkey
client = self._makeOne(self.PROJECT, credentials=_Credentials())
conn = client.connection = object()
with _Monkey(MUT, _USE_GAX=False):
api = client.metrics_api
self.assertTrue(isinstance(api, _MetricsAPI))
self.assertTrue(api._connection is conn)
# API instance is cached
again = client.metrics_api
self.assertTrue(again is api)
def test_metrics_api_w_gax(self):
from gcloud.logging import client as MUT
from gcloud._testing import _Monkey
wrapped = object()
_called_with = []
def _generated_api(*args, **kw):
_called_with.append((args, kw))
return wrapped
class _GaxMetricsAPI(object):
def __init__(self, _wrapped):
self._wrapped = _wrapped
creds = _Credentials()
client = self._makeOne(project=self.PROJECT, credentials=creds)
with _Monkey(MUT,
_USE_GAX=True,
GeneratedMetricsAPI=_generated_api,
GAXMetricsAPI=_GaxMetricsAPI):
api = client.metrics_api
self.assertIsInstance(api, _GaxMetricsAPI)
self.assertTrue(api._wrapped is wrapped)
# API instance is cached
again = client.metrics_api
self.assertTrue(again is api)
def test_logger(self):
from gcloud.logging.logger import Logger
creds = _Credentials()
client = self._makeOne(project=self.PROJECT, credentials=creds)
logger = client.logger(self.LOGGER_NAME)
self.assertTrue(isinstance(logger, Logger))
self.assertEqual(logger.name, self.LOGGER_NAME)
self.assertTrue(logger.client is client)
self.assertEqual(logger.project, self.PROJECT)
def test__entry_from_resource_unknown_type(self):
PROJECT = 'PROJECT'
creds = _Credentials()
client = self._makeOne(PROJECT, creds)
loggers = {}
with self.assertRaises(ValueError):
client._entry_from_resource({'unknownPayload': {}}, loggers)
def test_list_entries_defaults(self):
from gcloud.logging.entries import TextEntry
IID = 'IID'
TEXT = 'TEXT'
TOKEN = 'TOKEN'
ENTRIES = [{
'textPayload': TEXT,
'insertId': IID,
'resource': {
'type': 'global',
},
'logName': 'projects/%s/logs/%s' % (
self.PROJECT, self.LOGGER_NAME),
}]
creds = _Credentials()
client = self._makeOne(project=self.PROJECT, credentials=creds)
api = client._logging_api = _DummyLoggingAPI()
api._list_entries_response = ENTRIES, TOKEN
entries, token = client.list_entries()
self.assertEqual(len(entries), 1)
entry = entries[0]
self.assertTrue(isinstance(entry, TextEntry))
self.assertEqual(entry.insert_id, IID)
self.assertEqual(entry.payload, TEXT)
logger = entry.logger
self.assertEqual(logger.name, self.LOGGER_NAME)
self.assertTrue(logger.client is client)
self.assertEqual(logger.project, self.PROJECT)
self.assertEqual(token, TOKEN)
self.assertEqual(
api._list_entries_called_with,
([self.PROJECT], None, None, None, None))
def test_list_entries_explicit(self):
from gcloud.logging import DESCENDING
from gcloud.logging.entries import ProtobufEntry
from gcloud.logging.entries import StructEntry
from gcloud.logging.logger import Logger
PROJECT1 = 'PROJECT1'
PROJECT2 = 'PROJECT2'
FILTER = 'logName:LOGNAME'
IID1 = 'IID1'
IID2 = 'IID2'
PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'}
PROTO_PAYLOAD = PAYLOAD.copy()
PROTO_PAYLOAD['@type'] = 'type.googleapis.com/testing.example'
TOKEN = 'TOKEN'
PAGE_SIZE = 42
ENTRIES = [{
'jsonPayload': PAYLOAD,
'insertId': IID1,
'resource': {
'type': 'global',
},
'logName': 'projects/%s/logs/%s' % (
self.PROJECT, self.LOGGER_NAME),
}, {
'protoPayload': PROTO_PAYLOAD,
'insertId': IID2,
'resource': {
'type': 'global',
},
'logName': 'projects/%s/logs/%s' % (
self.PROJECT, self.LOGGER_NAME),
}]
client = self._makeOne(self.PROJECT, credentials=_Credentials())
api = client._logging_api = _DummyLoggingAPI()
api._list_entries_response = ENTRIES, None
entries, token = client.list_entries(
projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING,
page_size=PAGE_SIZE, page_token=TOKEN)
self.assertEqual(len(entries), 2)
entry = entries[0]
self.assertTrue(isinstance(entry, StructEntry))
self.assertEqual(entry.insert_id, IID1)
self.assertEqual(entry.payload, PAYLOAD)
logger = entry.logger
self.assertTrue(isinstance(logger, Logger))
self.assertEqual(logger.name, self.LOGGER_NAME)
self.assertTrue(logger.client is client)
self.assertEqual(logger.project, self.PROJECT)
entry = entries[1]
self.assertTrue(isinstance(entry, ProtobufEntry))
self.assertEqual(entry.insert_id, IID2)
self.assertEqual(entry.payload, PROTO_PAYLOAD)
logger = entry.logger
self.assertEqual(logger.name, self.LOGGER_NAME)
self.assertTrue(logger.client is client)
self.assertEqual(logger.project, self.PROJECT)
self.assertTrue(entries[0].logger is entries[1].logger)
self.assertEqual(token, None)
self.assertEqual(
api._list_entries_called_with,
([PROJECT1, PROJECT2], FILTER, DESCENDING, PAGE_SIZE, TOKEN))
def test_sink_defaults(self):
from gcloud.logging.sink import Sink
creds = _Credentials()
client = self._makeOne(project=self.PROJECT, credentials=creds)
sink = client.sink(self.SINK_NAME)
self.assertTrue(isinstance(sink, Sink))
self.assertEqual(sink.name, self.SINK_NAME)
self.assertEqual(sink.filter_, None)
self.assertEqual(sink.destination, None)
self.assertTrue(sink.client is client)
self.assertEqual(sink.project, self.PROJECT)
def test_sink_explicit(self):
from gcloud.logging.sink import Sink
creds = _Credentials()
client = self._makeOne(project=self.PROJECT, credentials=creds)
sink = client.sink(self.SINK_NAME, self.FILTER, self.DESTINATION_URI)
self.assertTrue(isinstance(sink, Sink))
self.assertEqual(sink.name, self.SINK_NAME)
self.assertEqual(sink.filter_, self.FILTER)
self.assertEqual(sink.destination, self.DESTINATION_URI)
self.assertTrue(sink.client is client)
self.assertEqual(sink.project, self.PROJECT)
def test_list_sinks_no_paging(self):
from gcloud.logging.sink import Sink
PROJECT = 'PROJECT'
TOKEN = 'TOKEN'
SINK_NAME = 'sink_name'
FILTER = 'logName:syslog AND severity>=ERROR'
SINKS = [{
'name': SINK_NAME,
'filter': FILTER,
'destination': self.DESTINATION_URI,
}]
client = self._makeOne(project=PROJECT, credentials=_Credentials())
api = client._sinks_api = _DummySinksAPI()
api._list_sinks_response = SINKS, TOKEN
sinks, token = client.list_sinks()
self.assertEqual(len(sinks), 1)
sink = sinks[0]
self.assertTrue(isinstance(sink, Sink))
self.assertEqual(sink.name, SINK_NAME)
self.assertEqual(sink.filter_, FILTER)
self.assertEqual(sink.destination, self.DESTINATION_URI)
self.assertEqual(token, TOKEN)
self.assertEqual(api._list_sinks_called_with,
(PROJECT, None, None))
def test_list_sinks_with_paging(self):
from gcloud.logging.sink import Sink
PROJECT = 'PROJECT'
SINK_NAME = 'sink_name'
FILTER = 'logName:syslog AND severity>=ERROR'
TOKEN = 'TOKEN'
PAGE_SIZE = 42
SINKS = [{
'name': SINK_NAME,
'filter': FILTER,
'destination': self.DESTINATION_URI,
}]
client = self._makeOne(project=PROJECT, credentials=_Credentials())
api = client._sinks_api = _DummySinksAPI()
api._list_sinks_response = SINKS, None
sinks, token = client.list_sinks(PAGE_SIZE, TOKEN)
self.assertEqual(len(sinks), 1)
sink = sinks[0]
self.assertTrue(isinstance(sink, Sink))
self.assertEqual(sink.name, SINK_NAME)
self.assertEqual(sink.filter_, FILTER)
self.assertEqual(sink.destination, self.DESTINATION_URI)
self.assertEqual(token, None)
self.assertEqual(api._list_sinks_called_with,
(PROJECT, PAGE_SIZE, TOKEN))
def test_metric_defaults(self):
from gcloud.logging.metric import Metric
creds = _Credentials()
client_obj = self._makeOne(project=self.PROJECT, credentials=creds)
metric = client_obj.metric(self.METRIC_NAME)
self.assertTrue(isinstance(metric, Metric))
self.assertEqual(metric.name, self.METRIC_NAME)
self.assertEqual(metric.filter_, None)
self.assertEqual(metric.description, '')
self.assertTrue(metric.client is client_obj)
self.assertEqual(metric.project, self.PROJECT)
def test_metric_explicit(self):
from gcloud.logging.metric import Metric
creds = _Credentials()
client_obj = self._makeOne(project=self.PROJECT, credentials=creds)
metric = client_obj.metric(self.METRIC_NAME, self.FILTER,
description=self.DESCRIPTION)
self.assertTrue(isinstance(metric, Metric))
self.assertEqual(metric.name, self.METRIC_NAME)
self.assertEqual(metric.filter_, self.FILTER)
self.assertEqual(metric.description, self.DESCRIPTION)
self.assertTrue(metric.client is client_obj)
self.assertEqual(metric.project, self.PROJECT)
def test_list_metrics_no_paging(self):
from gcloud.logging.metric import Metric
PROJECT = 'PROJECT'
TOKEN = 'TOKEN'
METRICS = [{
'name': self.METRIC_NAME,
'filter': self.FILTER,
'description': self.DESCRIPTION,
}]
client = self._makeOne(project=PROJECT, credentials=_Credentials())
api = client._metrics_api = _DummyMetricsAPI()
api._list_metrics_response = METRICS, TOKEN
metrics, token = client.list_metrics()
self.assertEqual(len(metrics), 1)
metric = metrics[0]
self.assertTrue(isinstance(metric, Metric))
self.assertEqual(metric.name, self.METRIC_NAME)
self.assertEqual(metric.filter_, self.FILTER)
self.assertEqual(metric.description, self.DESCRIPTION)
self.assertEqual(token, TOKEN)
self.assertEqual(api._list_metrics_called_with,
(PROJECT, None, None))
def test_list_metrics_with_paging(self):
from gcloud.logging.metric import Metric
PROJECT = 'PROJECT'
TOKEN = 'TOKEN'
PAGE_SIZE = 42
METRICS = [{
'name': self.METRIC_NAME,
'filter': self.FILTER,
'description': self.DESCRIPTION,
}]
client = self._makeOne(project=PROJECT, credentials=_Credentials())
api = client._metrics_api = _DummyMetricsAPI()
api._list_metrics_response = METRICS, None
# Execute request.
metrics, token = client.list_metrics(PAGE_SIZE, TOKEN)
# Test values are correct.
self.assertEqual(len(metrics), 1)
metric = metrics[0]
self.assertTrue(isinstance(metric, Metric))
self.assertEqual(metric.name, self.METRIC_NAME)
self.assertEqual(metric.filter_, self.FILTER)
self.assertEqual(metric.description, self.DESCRIPTION)
self.assertEqual(token, None)
self.assertEqual(api._list_metrics_called_with,
(PROJECT, PAGE_SIZE, TOKEN))
class _Credentials(object):
_scopes = None
@staticmethod
def create_scoped_required():
return True
def create_scoped(self, scope):
self._scopes = scope
return self
class _DummyLoggingAPI(object):
def list_entries(self, projects, filter_, order_by, page_size, page_token):
self._list_entries_called_with = (
projects, filter_, order_by, page_size, page_token)
return self._list_entries_response
class _DummySinksAPI(object):
def list_sinks(self, project, page_size, page_token):
self._list_sinks_called_with = (project, page_size, page_token)
return self._list_sinks_response
class _DummyMetricsAPI(object):
def list_metrics(self, project, page_size, page_token):
self._list_metrics_called_with = (project, page_size, page_token)
return self._list_metrics_response

View file

@ -0,0 +1,633 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
class TestConnection(unittest2.TestCase):
PROJECT = 'project'
FILTER = 'logName:syslog AND severity>=ERROR'
def _getTargetClass(self):
from gcloud.logging.connection import Connection
return Connection
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_default_url(self):
creds = _Credentials()
conn = self._makeOne(creds)
klass = self._getTargetClass()
self.assertEqual(conn.credentials._scopes, klass.SCOPE)
class Test_LoggingAPI(unittest2.TestCase):
PROJECT = 'project'
LIST_ENTRIES_PATH = 'entries:list'
WRITE_ENTRIES_PATH = 'entries:write'
LOGGER_NAME = 'LOGGER_NAME'
FILTER = 'logName:syslog AND severity>=ERROR'
def _getTargetClass(self):
from gcloud.logging.connection import _LoggingAPI
return _LoggingAPI
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor(self):
connection = object()
api = self._makeOne(connection)
self.assertTrue(api._connection is connection)
@staticmethod
def _make_timestamp():
from datetime import datetime
from gcloud._helpers import UTC
from gcloud.logging.test_entries import _datetime_to_rfc3339_w_nanos
NOW = datetime.utcnow().replace(tzinfo=UTC)
return _datetime_to_rfc3339_w_nanos(NOW)
def test_list_entries_no_paging(self):
TIMESTAMP = self._make_timestamp()
IID = 'IID'
TEXT = 'TEXT'
SENT = {
'projectIds': [self.PROJECT],
}
TOKEN = 'TOKEN'
RETURNED = {
'entries': [{
'textPayload': TEXT,
'insertId': IID,
'resource': {
'type': 'global',
},
'timestamp': TIMESTAMP,
'logName': 'projects/%s/logs/%s' % (
self.PROJECT, self.LOGGER_NAME),
}],
'nextPageToken': TOKEN,
}
conn = _Connection(RETURNED)
api = self._makeOne(conn)
entries, token = api.list_entries([self.PROJECT])
self.assertEqual(entries, RETURNED['entries'])
self.assertEqual(token, TOKEN)
self.assertEqual(conn._called_with['method'], 'POST')
path = '/%s' % self.LIST_ENTRIES_PATH
self.assertEqual(conn._called_with['path'], path)
self.assertEqual(conn._called_with['data'], SENT)
def test_list_entries_w_paging(self):
from gcloud.logging import DESCENDING
PROJECT1 = 'PROJECT1'
PROJECT2 = 'PROJECT2'
TIMESTAMP = self._make_timestamp()
IID1 = 'IID1'
IID2 = 'IID2'
PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'}
PROTO_PAYLOAD = PAYLOAD.copy()
PROTO_PAYLOAD['@type'] = 'type.googleapis.com/testing.example'
TOKEN = 'TOKEN'
PAGE_SIZE = 42
SENT = {
'projectIds': [PROJECT1, PROJECT2],
'filter': self.FILTER,
'orderBy': DESCENDING,
'pageSize': PAGE_SIZE,
'pageToken': TOKEN,
}
RETURNED = {
'entries': [{
'jsonPayload': PAYLOAD,
'insertId': IID1,
'resource': {
'type': 'global',
},
'timestamp': TIMESTAMP,
'logName': 'projects/%s/logs/%s' % (
self.PROJECT, self.LOGGER_NAME),
}, {
'protoPayload': PROTO_PAYLOAD,
'insertId': IID2,
'resource': {
'type': 'global',
},
'timestamp': TIMESTAMP,
'logName': 'projects/%s/logs/%s' % (
self.PROJECT, self.LOGGER_NAME),
}],
}
conn = _Connection(RETURNED)
api = self._makeOne(conn)
entries, token = api.list_entries(
projects=[PROJECT1, PROJECT2], filter_=self.FILTER,
order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN)
self.assertEqual(entries, RETURNED['entries'])
self.assertEqual(token, None)
self.assertEqual(conn._called_with['method'], 'POST')
path = '/%s' % self.LIST_ENTRIES_PATH
self.assertEqual(conn._called_with['path'], path)
self.assertEqual(conn._called_with['data'], SENT)
def test_write_entries_single(self):
TEXT = 'TEXT'
ENTRY = {
'textPayload': TEXT,
'resource': {
'type': 'global',
},
'logName': 'projects/%s/logs/%s' % (
self.PROJECT, self.LOGGER_NAME),
}
SENT = {
'entries': [ENTRY],
}
conn = _Connection({})
api = self._makeOne(conn)
api.write_entries([ENTRY])
self.assertEqual(conn._called_with['method'], 'POST')
path = '/%s' % self.WRITE_ENTRIES_PATH
self.assertEqual(conn._called_with['path'], path)
self.assertEqual(conn._called_with['data'], SENT)
def test_write_entries_multiple(self):
TEXT = 'TEXT'
LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME)
RESOURCE = {
'type': 'global',
}
LABELS = {
'baz': 'qux',
'spam': 'eggs',
}
ENTRY1 = {
'textPayload': TEXT,
}
ENTRY2 = {
'jsonPayload': {'foo': 'bar'},
}
SENT = {
'logName': LOG_NAME,
'resource': RESOURCE,
'labels': LABELS,
'entries': [ENTRY1, ENTRY2],
}
conn = _Connection({})
api = self._makeOne(conn)
api.write_entries([ENTRY1, ENTRY2], LOG_NAME, RESOURCE, LABELS)
self.assertEqual(conn._called_with['method'], 'POST')
path = '/%s' % self.WRITE_ENTRIES_PATH
self.assertEqual(conn._called_with['path'], path)
self.assertEqual(conn._called_with['data'], SENT)
def test_logger_delete(self):
path = '/projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME)
conn = _Connection({})
api = self._makeOne(conn)
api.logger_delete(self.PROJECT, self.LOGGER_NAME)
self.assertEqual(conn._called_with['method'], 'DELETE')
self.assertEqual(conn._called_with['path'], path)
class Test_SinksAPI(unittest2.TestCase):
PROJECT = 'project'
FILTER = 'logName:syslog AND severity>=ERROR'
LIST_SINKS_PATH = 'projects/%s/sinks' % (PROJECT,)
SINK_NAME = 'sink_name'
SINK_PATH = 'projects/%s/sinks/%s' % (PROJECT, SINK_NAME)
DESTINATION_URI = 'faux.googleapis.com/destination'
def _getTargetClass(self):
from gcloud.logging.connection import _SinksAPI
return _SinksAPI
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor(self):
connection = object()
api = self._makeOne(connection)
self.assertTrue(api._connection is connection)
def test_list_sinks_no_paging(self):
TOKEN = 'TOKEN'
RETURNED = {
'sinks': [{
'name': self.SINK_PATH,
'filter': self.FILTER,
'destination': self.DESTINATION_URI,
}],
'nextPageToken': TOKEN,
}
conn = _Connection(RETURNED)
api = self._makeOne(conn)
sinks, token = api.list_sinks(self.PROJECT)
self.assertEqual(sinks, RETURNED['sinks'])
self.assertEqual(token, TOKEN)
self.assertEqual(conn._called_with['method'], 'GET')
path = '/%s' % (self.LIST_SINKS_PATH,)
self.assertEqual(conn._called_with['path'], path)
self.assertEqual(conn._called_with['query_params'], {})
def test_list_sinks_w_paging(self):
TOKEN = 'TOKEN'
PAGE_SIZE = 42
RETURNED = {
'sinks': [{
'name': self.SINK_PATH,
'filter': self.FILTER,
'destination': self.DESTINATION_URI,
}],
}
conn = _Connection(RETURNED)
api = self._makeOne(conn)
sinks, token = api.list_sinks(
self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN)
self.assertEqual(sinks, RETURNED['sinks'])
self.assertEqual(token, None)
self.assertEqual(conn._called_with['method'], 'GET')
path = '/%s' % (self.LIST_SINKS_PATH,)
self.assertEqual(conn._called_with['path'], path)
self.assertEqual(conn._called_with['query_params'],
{'pageSize': PAGE_SIZE, 'pageToken': TOKEN})
def test_sink_create_conflict(self):
from gcloud.exceptions import Conflict
SENT = {
'name': self.SINK_NAME,
'filter': self.FILTER,
'destination': self.DESTINATION_URI,
}
conn = _Connection()
conn._raise_conflict = True
api = self._makeOne(conn)
with self.assertRaises(Conflict):
api.sink_create(
self.PROJECT, self.SINK_NAME, self.FILTER,
self.DESTINATION_URI)
self.assertEqual(conn._called_with['method'], 'POST')
path = '/projects/%s/sinks' % (self.PROJECT,)
self.assertEqual(conn._called_with['path'], path)
self.assertEqual(conn._called_with['data'], SENT)
def test_sink_create_ok(self):
SENT = {
'name': self.SINK_NAME,
'filter': self.FILTER,
'destination': self.DESTINATION_URI,
}
conn = _Connection({})
api = self._makeOne(conn)
api.sink_create(
self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI)
self.assertEqual(conn._called_with['method'], 'POST')
path = '/projects/%s/sinks' % (self.PROJECT,)
self.assertEqual(conn._called_with['path'], path)
self.assertEqual(conn._called_with['data'], SENT)
def test_sink_get_miss(self):
from gcloud.exceptions import NotFound
conn = _Connection()
api = self._makeOne(conn)
with self.assertRaises(NotFound):
api.sink_get(self.PROJECT, self.SINK_NAME)
self.assertEqual(conn._called_with['method'], 'GET')
path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME)
self.assertEqual(conn._called_with['path'], path)
def test_sink_get_hit(self):
RESPONSE = {
'name': self.SINK_PATH,
'filter': self.FILTER,
'destination': self.DESTINATION_URI,
}
conn = _Connection(RESPONSE)
api = self._makeOne(conn)
response = api.sink_get(self.PROJECT, self.SINK_NAME)
self.assertEqual(response, RESPONSE)
self.assertEqual(conn._called_with['method'], 'GET')
path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME)
self.assertEqual(conn._called_with['path'], path)
def test_sink_update_miss(self):
from gcloud.exceptions import NotFound
SENT = {
'name': self.SINK_NAME,
'filter': self.FILTER,
'destination': self.DESTINATION_URI,
}
conn = _Connection()
api = self._makeOne(conn)
with self.assertRaises(NotFound):
api.sink_update(
self.PROJECT, self.SINK_NAME, self.FILTER,
self.DESTINATION_URI)
self.assertEqual(conn._called_with['method'], 'PUT')
path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME)
self.assertEqual(conn._called_with['path'], path)
self.assertEqual(conn._called_with['data'], SENT)
def test_sink_update_hit(self):
SENT = {
'name': self.SINK_NAME,
'filter': self.FILTER,
'destination': self.DESTINATION_URI,
}
conn = _Connection({})
api = self._makeOne(conn)
api.sink_update(
self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI)
self.assertEqual(conn._called_with['method'], 'PUT')
path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME)
self.assertEqual(conn._called_with['path'], path)
self.assertEqual(conn._called_with['data'], SENT)
def test_sink_delete_miss(self):
from gcloud.exceptions import NotFound
conn = _Connection()
api = self._makeOne(conn)
with self.assertRaises(NotFound):
api.sink_delete(self.PROJECT, self.SINK_NAME)
self.assertEqual(conn._called_with['method'], 'DELETE')
path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME)
self.assertEqual(conn._called_with['path'], path)
def test_sink_delete_hit(self):
conn = _Connection({})
api = self._makeOne(conn)
api.sink_delete(self.PROJECT, self.SINK_NAME)
self.assertEqual(conn._called_with['method'], 'DELETE')
path = '/projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME)
self.assertEqual(conn._called_with['path'], path)
class Test_MetricsAPI(unittest2.TestCase):
PROJECT = 'project'
FILTER = 'logName:syslog AND severity>=ERROR'
LIST_METRICS_PATH = 'projects/%s/metrics' % (PROJECT,)
METRIC_NAME = 'metric_name'
METRIC_PATH = 'projects/%s/metrics/%s' % (PROJECT, METRIC_NAME)
DESCRIPTION = 'DESCRIPTION'
def _getTargetClass(self):
from gcloud.logging.connection import _MetricsAPI
return _MetricsAPI
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_list_metrics_no_paging(self):
TOKEN = 'TOKEN'
RETURNED = {
'metrics': [{
'name': self.METRIC_PATH,
'filter': self.FILTER,
}],
'nextPageToken': TOKEN,
}
conn = _Connection(RETURNED)
api = self._makeOne(conn)
metrics, token = api.list_metrics(self.PROJECT)
self.assertEqual(metrics, RETURNED['metrics'])
self.assertEqual(token, TOKEN)
self.assertEqual(conn._called_with['method'], 'GET')
path = '/%s' % (self.LIST_METRICS_PATH,)
self.assertEqual(conn._called_with['path'], path)
def test_list_metrics_w_paging(self):
TOKEN = 'TOKEN'
PAGE_SIZE = 42
RETURNED = {
'metrics': [{
'name': self.METRIC_PATH,
'filter': self.FILTER,
}],
}
conn = _Connection(RETURNED)
api = self._makeOne(conn)
metrics, token = api.list_metrics(
self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN)
self.assertEqual(metrics, RETURNED['metrics'])
self.assertEqual(token, None)
self.assertEqual(conn._called_with['method'], 'GET')
path = '/%s' % (self.LIST_METRICS_PATH,)
self.assertEqual(conn._called_with['path'], path)
self.assertEqual(conn._called_with['query_params'],
{'pageSize': PAGE_SIZE, 'pageToken': TOKEN})
def test_metric_create_conflict(self):
from gcloud.exceptions import Conflict
SENT = {
'name': self.METRIC_NAME,
'filter': self.FILTER,
'description': self.DESCRIPTION,
}
conn = _Connection()
conn._raise_conflict = True
api = self._makeOne(conn)
with self.assertRaises(Conflict):
api.metric_create(
self.PROJECT, self.METRIC_NAME, self.FILTER,
self.DESCRIPTION)
self.assertEqual(conn._called_with['method'], 'POST')
path = '/projects/%s/metrics' % (self.PROJECT,)
self.assertEqual(conn._called_with['path'], path)
self.assertEqual(conn._called_with['data'], SENT)
def test_metric_create_ok(self):
SENT = {
'name': self.METRIC_NAME,
'filter': self.FILTER,
'description': self.DESCRIPTION,
}
conn = _Connection({})
api = self._makeOne(conn)
api.metric_create(
self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION)
self.assertEqual(conn._called_with['method'], 'POST')
path = '/projects/%s/metrics' % (self.PROJECT,)
self.assertEqual(conn._called_with['path'], path)
self.assertEqual(conn._called_with['data'], SENT)
def test_metric_get_miss(self):
from gcloud.exceptions import NotFound
conn = _Connection()
api = self._makeOne(conn)
with self.assertRaises(NotFound):
api.metric_get(self.PROJECT, self.METRIC_NAME)
self.assertEqual(conn._called_with['method'], 'GET')
path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME)
self.assertEqual(conn._called_with['path'], path)
def test_metric_get_hit(self):
RESPONSE = {
'name': self.METRIC_NAME,
'filter': self.FILTER,
'description': self.DESCRIPTION,
}
conn = _Connection(RESPONSE)
api = self._makeOne(conn)
response = api.metric_get(self.PROJECT, self.METRIC_NAME)
self.assertEqual(response, RESPONSE)
self.assertEqual(conn._called_with['method'], 'GET')
path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME)
self.assertEqual(conn._called_with['path'], path)
def test_metric_update_miss(self):
from gcloud.exceptions import NotFound
SENT = {
'name': self.METRIC_NAME,
'filter': self.FILTER,
'description': self.DESCRIPTION,
}
conn = _Connection()
api = self._makeOne(conn)
with self.assertRaises(NotFound):
api.metric_update(
self.PROJECT, self.METRIC_NAME, self.FILTER,
self.DESCRIPTION)
self.assertEqual(conn._called_with['method'], 'PUT')
path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME)
self.assertEqual(conn._called_with['path'], path)
self.assertEqual(conn._called_with['data'], SENT)
def test_metric_update_hit(self):
SENT = {
'name': self.METRIC_NAME,
'filter': self.FILTER,
'description': self.DESCRIPTION,
}
conn = _Connection({})
api = self._makeOne(conn)
api.metric_update(
self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION)
self.assertEqual(conn._called_with['method'], 'PUT')
path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME)
self.assertEqual(conn._called_with['path'], path)
self.assertEqual(conn._called_with['data'], SENT)
def test_metric_delete_miss(self):
from gcloud.exceptions import NotFound
conn = _Connection()
api = self._makeOne(conn)
with self.assertRaises(NotFound):
api.metric_delete(self.PROJECT, self.METRIC_NAME)
self.assertEqual(conn._called_with['method'], 'DELETE')
path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME)
self.assertEqual(conn._called_with['path'], path)
def test_metric_delete_hit(self):
conn = _Connection({})
api = self._makeOne(conn)
api.metric_delete(self.PROJECT, self.METRIC_NAME)
self.assertEqual(conn._called_with['method'], 'DELETE')
path = '/projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME)
self.assertEqual(conn._called_with['path'], path)
class _Credentials(object):
_scopes = None
@staticmethod
def create_scoped_required():
return True
def create_scoped(self, scope):
self._scopes = scope
return self
class _Connection(object):
_called_with = None
_raise_conflict = False
def __init__(self, *responses):
self._responses = responses
def api_request(self, **kw):
from gcloud.exceptions import Conflict
from gcloud.exceptions import NotFound
self._called_with = kw
if self._raise_conflict:
raise Conflict('oops')
try:
response, self._responses = self._responses[0], self._responses[1:]
except IndexError:
raise NotFound('miss')
return response

View file

@ -0,0 +1,235 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
class Test_logger_name_from_path(unittest2.TestCase):
def _callFUT(self, path):
from gcloud.logging.entries import logger_name_from_path
return logger_name_from_path(path)
def test_w_simple_name(self):
LOGGER_NAME = 'LOGGER_NAME'
PROJECT = 'my-project-1234'
PATH = 'projects/%s/logs/%s' % (PROJECT, LOGGER_NAME)
logger_name = self._callFUT(PATH)
self.assertEqual(logger_name, LOGGER_NAME)
def test_w_name_w_all_extras(self):
LOGGER_NAME = 'LOGGER_NAME-part.one~part.two%part-three'
PROJECT = 'my-project-1234'
PATH = 'projects/%s/logs/%s' % (PROJECT, LOGGER_NAME)
logger_name = self._callFUT(PATH)
self.assertEqual(logger_name, LOGGER_NAME)
class Test_BaseEntry(unittest2.TestCase):
PROJECT = 'PROJECT'
LOGGER_NAME = 'LOGGER_NAME'
def _getTargetClass(self):
from gcloud.logging.entries import _BaseEntry
class _Dummy(_BaseEntry):
_PAYLOAD_KEY = 'dummyPayload'
return _Dummy
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor_defaults(self):
PAYLOAD = 'PAYLOAD'
logger = _Logger(self.LOGGER_NAME, self.PROJECT)
entry = self._makeOne(PAYLOAD, logger)
self.assertEqual(entry.payload, PAYLOAD)
self.assertTrue(entry.logger is logger)
self.assertTrue(entry.insert_id is None)
self.assertTrue(entry.timestamp is None)
self.assertTrue(entry.labels is None)
self.assertTrue(entry.severity is None)
self.assertTrue(entry.http_request is None)
def test_ctor_explicit(self):
import datetime
PAYLOAD = 'PAYLOAD'
IID = 'IID'
TIMESTAMP = datetime.datetime.now()
LABELS = {'foo': 'bar', 'baz': 'qux'}
SEVERITY = 'CRITICAL'
METHOD = 'POST'
URI = 'https://api.example.com/endpoint'
STATUS = '500'
REQUEST = {
'requestMethod': METHOD,
'requestUrl': URI,
'status': STATUS,
}
logger = _Logger(self.LOGGER_NAME, self.PROJECT)
entry = self._makeOne(PAYLOAD, logger,
insert_id=IID,
timestamp=TIMESTAMP,
labels=LABELS,
severity=SEVERITY,
http_request=REQUEST)
self.assertEqual(entry.payload, PAYLOAD)
self.assertTrue(entry.logger is logger)
self.assertEqual(entry.insert_id, IID)
self.assertEqual(entry.timestamp, TIMESTAMP)
self.assertEqual(entry.labels, LABELS)
self.assertEqual(entry.severity, SEVERITY)
self.assertEqual(entry.http_request['requestMethod'], METHOD)
self.assertEqual(entry.http_request['requestUrl'], URI)
self.assertEqual(entry.http_request['status'], STATUS)
def test_from_api_repr_missing_data_no_loggers(self):
client = _Client(self.PROJECT)
PAYLOAD = 'PAYLOAD'
LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME)
API_REPR = {
'dummyPayload': PAYLOAD,
'logName': LOG_NAME,
}
klass = self._getTargetClass()
entry = klass.from_api_repr(API_REPR, client)
self.assertEqual(entry.payload, PAYLOAD)
self.assertTrue(entry.insert_id is None)
self.assertTrue(entry.timestamp is None)
self.assertTrue(entry.severity is None)
self.assertTrue(entry.http_request is None)
logger = entry.logger
self.assertTrue(isinstance(logger, _Logger))
self.assertTrue(logger.client is client)
self.assertEqual(logger.name, self.LOGGER_NAME)
def test_from_api_repr_w_loggers_no_logger_match(self):
from datetime import datetime
from gcloud._helpers import UTC
klass = self._getTargetClass()
client = _Client(self.PROJECT)
PAYLOAD = 'PAYLOAD'
SEVERITY = 'CRITICAL'
IID = 'IID'
NOW = datetime.utcnow().replace(tzinfo=UTC)
TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW)
LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME)
LABELS = {'foo': 'bar', 'baz': 'qux'}
METHOD = 'POST'
URI = 'https://api.example.com/endpoint'
STATUS = '500'
API_REPR = {
'dummyPayload': PAYLOAD,
'logName': LOG_NAME,
'insertId': IID,
'timestamp': TIMESTAMP,
'labels': LABELS,
'severity': SEVERITY,
'httpRequest': {
'requestMethod': METHOD,
'requestUrl': URI,
'status': STATUS,
},
}
loggers = {}
entry = klass.from_api_repr(API_REPR, client, loggers=loggers)
self.assertEqual(entry.payload, PAYLOAD)
self.assertEqual(entry.insert_id, IID)
self.assertEqual(entry.timestamp, NOW)
self.assertEqual(entry.labels, LABELS)
self.assertEqual(entry.severity, SEVERITY)
self.assertEqual(entry.http_request['requestMethod'], METHOD)
self.assertEqual(entry.http_request['requestUrl'], URI)
self.assertEqual(entry.http_request['status'], STATUS)
logger = entry.logger
self.assertTrue(isinstance(logger, _Logger))
self.assertTrue(logger.client is client)
self.assertEqual(logger.name, self.LOGGER_NAME)
self.assertEqual(loggers, {LOG_NAME: logger})
def test_from_api_repr_w_loggers_w_logger_match(self):
from datetime import datetime
from gcloud._helpers import UTC
client = _Client(self.PROJECT)
PAYLOAD = 'PAYLOAD'
IID = 'IID'
NOW = datetime.utcnow().replace(tzinfo=UTC)
TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW)
LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME)
LABELS = {'foo': 'bar', 'baz': 'qux'}
API_REPR = {
'dummyPayload': PAYLOAD,
'logName': LOG_NAME,
'insertId': IID,
'timestamp': TIMESTAMP,
'labels': LABELS,
}
LOGGER = object()
loggers = {LOG_NAME: LOGGER}
klass = self._getTargetClass()
entry = klass.from_api_repr(API_REPR, client, loggers=loggers)
self.assertEqual(entry.payload, PAYLOAD)
self.assertEqual(entry.insert_id, IID)
self.assertEqual(entry.timestamp, NOW)
self.assertEqual(entry.labels, LABELS)
self.assertTrue(entry.logger is LOGGER)
class TestProtobufEntry(unittest2.TestCase):
PROJECT = 'PROJECT'
LOGGER_NAME = 'LOGGER_NAME'
def _getTargetClass(self):
from gcloud.logging.entries import ProtobufEntry
return ProtobufEntry
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_parse_message(self):
import json
from google.protobuf.json_format import MessageToJson
from google.protobuf.struct_pb2 import Struct, Value
LOGGER = object()
message = Struct(fields={'foo': Value(bool_value=False)})
with_true = Struct(fields={'foo': Value(bool_value=True)})
PAYLOAD = json.loads(MessageToJson(with_true))
entry = self._makeOne(PAYLOAD, LOGGER)
entry.parse_message(message)
self.assertTrue(message.fields['foo'])
def _datetime_to_rfc3339_w_nanos(value):
from gcloud._helpers import _RFC3339_NO_FRACTION
no_fraction = value.strftime(_RFC3339_NO_FRACTION)
return '%s.%09dZ' % (no_fraction, value.microsecond * 1000)
class _Logger(object):
def __init__(self, name, client):
self.name = name
self.client = client
class _Client(object):
def __init__(self, project):
self.project = project
def logger(self, name):
return _Logger(name, self)

View file

@ -0,0 +1,704 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
class TestLogger(unittest2.TestCase):
PROJECT = 'test-project'
LOGGER_NAME = 'logger-name'
def _getTargetClass(self):
from gcloud.logging.logger import Logger
return Logger
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor_defaults(self):
conn = object()
client = _Client(self.PROJECT, conn)
logger = self._makeOne(self.LOGGER_NAME, client=client)
self.assertEqual(logger.name, self.LOGGER_NAME)
self.assertTrue(logger.client is client)
self.assertEqual(logger.project, self.PROJECT)
self.assertEqual(logger.full_name, 'projects/%s/logs/%s'
% (self.PROJECT, self.LOGGER_NAME))
self.assertEqual(logger.path, '/projects/%s/logs/%s'
% (self.PROJECT, self.LOGGER_NAME))
self.assertEqual(logger.labels, None)
def test_ctor_explicit(self):
LABELS = {'foo': 'bar', 'baz': 'qux'}
conn = object()
client = _Client(self.PROJECT, conn)
logger = self._makeOne(self.LOGGER_NAME, client=client, labels=LABELS)
self.assertEqual(logger.name, self.LOGGER_NAME)
self.assertTrue(logger.client is client)
self.assertEqual(logger.project, self.PROJECT)
self.assertEqual(logger.full_name, 'projects/%s/logs/%s'
% (self.PROJECT, self.LOGGER_NAME))
self.assertEqual(logger.path, '/projects/%s/logs/%s'
% (self.PROJECT, self.LOGGER_NAME))
self.assertEqual(logger.labels, LABELS)
def test_batch_w_bound_client(self):
from gcloud.logging.logger import Batch
conn = object()
client = _Client(self.PROJECT, conn)
logger = self._makeOne(self.LOGGER_NAME, client=client)
batch = logger.batch()
self.assertTrue(isinstance(batch, Batch))
self.assertTrue(batch.logger is logger)
self.assertTrue(batch.client is client)
def test_batch_w_alternate_client(self):
from gcloud.logging.logger import Batch
conn1 = object()
conn2 = object()
client1 = _Client(self.PROJECT, conn1)
client2 = _Client(self.PROJECT, conn2)
logger = self._makeOne(self.LOGGER_NAME, client=client1)
batch = logger.batch(client2)
self.assertTrue(isinstance(batch, Batch))
self.assertTrue(batch.logger is logger)
self.assertTrue(batch.client is client2)
def test_log_text_w_str_implicit_client(self):
TEXT = 'TEXT'
ENTRIES = [{
'logName': 'projects/%s/logs/%s' % (
self.PROJECT, self.LOGGER_NAME),
'textPayload': TEXT,
'resource': {
'type': 'global',
},
}]
client = _Client(self.PROJECT)
api = client.logging_api = _DummyLoggingAPI()
logger = self._makeOne(self.LOGGER_NAME, client=client)
logger.log_text(TEXT)
self.assertEqual(api._write_entries_called_with,
(ENTRIES, None, None, None))
def test_log_text_w_default_labels(self):
TEXT = 'TEXT'
DEFAULT_LABELS = {'foo': 'spam'}
ENTRIES = [{
'logName': 'projects/%s/logs/%s' % (
self.PROJECT, self.LOGGER_NAME),
'textPayload': TEXT,
'resource': {
'type': 'global',
},
'labels': DEFAULT_LABELS,
}]
client = _Client(self.PROJECT)
api = client.logging_api = _DummyLoggingAPI()
logger = self._makeOne(self.LOGGER_NAME, client=client,
labels=DEFAULT_LABELS)
logger.log_text(TEXT)
self.assertEqual(api._write_entries_called_with,
(ENTRIES, None, None, None))
def test_log_text_w_unicode_explicit_client_labels_severity_httpreq(self):
TEXT = u'TEXT'
DEFAULT_LABELS = {'foo': 'spam'}
LABELS = {'foo': 'bar', 'baz': 'qux'}
IID = 'IID'
SEVERITY = 'CRITICAL'
METHOD = 'POST'
URI = 'https://api.example.com/endpoint'
STATUS = '500'
REQUEST = {
'requestMethod': METHOD,
'requestUrl': URI,
'status': STATUS,
}
ENTRIES = [{
'logName': 'projects/%s/logs/%s' % (
self.PROJECT, self.LOGGER_NAME),
'textPayload': TEXT,
'resource': {
'type': 'global',
},
'labels': LABELS,
'insertId': IID,
'severity': SEVERITY,
'httpRequest': REQUEST,
}]
client1 = _Client(self.PROJECT)
client2 = _Client(self.PROJECT)
api = client2.logging_api = _DummyLoggingAPI()
logger = self._makeOne(self.LOGGER_NAME, client=client1,
labels=DEFAULT_LABELS)
logger.log_text(TEXT, client=client2, labels=LABELS,
insert_id=IID, severity=SEVERITY, http_request=REQUEST)
self.assertEqual(api._write_entries_called_with,
(ENTRIES, None, None, None))
def test_log_struct_w_implicit_client(self):
STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'}
ENTRIES = [{
'logName': 'projects/%s/logs/%s' % (
self.PROJECT, self.LOGGER_NAME),
'jsonPayload': STRUCT,
'resource': {
'type': 'global',
},
}]
client = _Client(self.PROJECT)
api = client.logging_api = _DummyLoggingAPI()
logger = self._makeOne(self.LOGGER_NAME, client=client)
logger.log_struct(STRUCT)
self.assertEqual(api._write_entries_called_with,
(ENTRIES, None, None, None))
def test_log_struct_w_default_labels(self):
STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'}
DEFAULT_LABELS = {'foo': 'spam'}
ENTRIES = [{
'logName': 'projects/%s/logs/%s' % (
self.PROJECT, self.LOGGER_NAME),
'jsonPayload': STRUCT,
'resource': {
'type': 'global',
},
'labels': DEFAULT_LABELS,
}]
client = _Client(self.PROJECT)
api = client.logging_api = _DummyLoggingAPI()
logger = self._makeOne(self.LOGGER_NAME, client=client,
labels=DEFAULT_LABELS)
logger.log_struct(STRUCT)
self.assertEqual(api._write_entries_called_with,
(ENTRIES, None, None, None))
def test_log_struct_w_explicit_client_labels_severity_httpreq(self):
STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'}
DEFAULT_LABELS = {'foo': 'spam'}
LABELS = {'foo': 'bar', 'baz': 'qux'}
IID = 'IID'
SEVERITY = 'CRITICAL'
METHOD = 'POST'
URI = 'https://api.example.com/endpoint'
STATUS = '500'
REQUEST = {
'requestMethod': METHOD,
'requestUrl': URI,
'status': STATUS,
}
ENTRIES = [{
'logName': 'projects/%s/logs/%s' % (
self.PROJECT, self.LOGGER_NAME),
'jsonPayload': STRUCT,
'resource': {
'type': 'global',
},
'labels': LABELS,
'insertId': IID,
'severity': SEVERITY,
'httpRequest': REQUEST,
}]
client1 = _Client(self.PROJECT)
client2 = _Client(self.PROJECT)
api = client2.logging_api = _DummyLoggingAPI()
logger = self._makeOne(self.LOGGER_NAME, client=client1,
labels=DEFAULT_LABELS)
logger.log_struct(STRUCT, client=client2, labels=LABELS,
insert_id=IID, severity=SEVERITY,
http_request=REQUEST)
self.assertEqual(api._write_entries_called_with,
(ENTRIES, None, None, None))
def test_log_proto_w_implicit_client(self):
import json
from google.protobuf.json_format import MessageToJson
from google.protobuf.struct_pb2 import Struct, Value
message = Struct(fields={'foo': Value(bool_value=True)})
ENTRIES = [{
'logName': 'projects/%s/logs/%s' % (
self.PROJECT, self.LOGGER_NAME),
'protoPayload': json.loads(MessageToJson(message)),
'resource': {
'type': 'global',
},
}]
client = _Client(self.PROJECT)
api = client.logging_api = _DummyLoggingAPI()
logger = self._makeOne(self.LOGGER_NAME, client=client)
logger.log_proto(message)
self.assertEqual(api._write_entries_called_with,
(ENTRIES, None, None, None))
def test_log_proto_w_default_labels(self):
import json
from google.protobuf.json_format import MessageToJson
from google.protobuf.struct_pb2 import Struct, Value
message = Struct(fields={'foo': Value(bool_value=True)})
DEFAULT_LABELS = {'foo': 'spam'}
ENTRIES = [{
'logName': 'projects/%s/logs/%s' % (
self.PROJECT, self.LOGGER_NAME),
'protoPayload': json.loads(MessageToJson(message)),
'resource': {
'type': 'global',
},
'labels': DEFAULT_LABELS,
}]
client = _Client(self.PROJECT)
api = client.logging_api = _DummyLoggingAPI()
logger = self._makeOne(self.LOGGER_NAME, client=client,
labels=DEFAULT_LABELS)
logger.log_proto(message)
self.assertEqual(api._write_entries_called_with,
(ENTRIES, None, None, None))
def test_log_proto_w_explicit_client_labels_severity_httpreq(self):
import json
from google.protobuf.json_format import MessageToJson
from google.protobuf.struct_pb2 import Struct, Value
message = Struct(fields={'foo': Value(bool_value=True)})
DEFAULT_LABELS = {'foo': 'spam'}
LABELS = {'foo': 'bar', 'baz': 'qux'}
IID = 'IID'
SEVERITY = 'CRITICAL'
METHOD = 'POST'
URI = 'https://api.example.com/endpoint'
STATUS = '500'
REQUEST = {
'requestMethod': METHOD,
'requestUrl': URI,
'status': STATUS,
}
ENTRIES = [{
'logName': 'projects/%s/logs/%s' % (
self.PROJECT, self.LOGGER_NAME),
'protoPayload': json.loads(MessageToJson(message)),
'resource': {
'type': 'global',
},
'labels': LABELS,
'insertId': IID,
'severity': SEVERITY,
'httpRequest': REQUEST,
}]
client1 = _Client(self.PROJECT)
client2 = _Client(self.PROJECT)
api = client2.logging_api = _DummyLoggingAPI()
logger = self._makeOne(self.LOGGER_NAME, client=client1,
labels=DEFAULT_LABELS)
logger.log_proto(message, client=client2, labels=LABELS,
insert_id=IID, severity=SEVERITY,
http_request=REQUEST)
self.assertEqual(api._write_entries_called_with,
(ENTRIES, None, None, None))
def test_delete_w_bound_client(self):
client = _Client(project=self.PROJECT)
api = client.logging_api = _DummyLoggingAPI()
logger = self._makeOne(self.LOGGER_NAME, client=client)
logger.delete()
self.assertEqual(api._logger_delete_called_with,
(self.PROJECT, self.LOGGER_NAME))
def test_delete_w_alternate_client(self):
client1 = _Client(project=self.PROJECT)
client2 = _Client(project=self.PROJECT)
api = client2.logging_api = _DummyLoggingAPI()
logger = self._makeOne(self.LOGGER_NAME, client=client1)
logger.delete(client=client2)
self.assertEqual(api._logger_delete_called_with,
(self.PROJECT, self.LOGGER_NAME))
def test_list_entries_defaults(self):
LISTED = {
'projects': None,
'filter_': 'logName=projects/%s/logs/%s' %
(self.PROJECT, self.LOGGER_NAME),
'order_by': None,
'page_size': None,
'page_token': None,
}
TOKEN = 'TOKEN'
client = _Client(self.PROJECT)
client._token = TOKEN
logger = self._makeOne(self.LOGGER_NAME, client=client)
entries, token = logger.list_entries()
self.assertEqual(len(entries), 0)
self.assertEqual(token, TOKEN)
self.assertEqual(client._listed, LISTED)
def test_list_entries_explicit(self):
from gcloud.logging import DESCENDING
PROJECT1 = 'PROJECT1'
PROJECT2 = 'PROJECT2'
FILTER = 'resource.type:global'
TOKEN = 'TOKEN'
PAGE_SIZE = 42
LISTED = {
'projects': ['PROJECT1', 'PROJECT2'],
'filter_': '%s AND logName=projects/%s/logs/%s' %
(FILTER, self.PROJECT, self.LOGGER_NAME),
'order_by': DESCENDING,
'page_size': PAGE_SIZE,
'page_token': TOKEN,
}
client = _Client(self.PROJECT)
logger = self._makeOne(self.LOGGER_NAME, client=client)
entries, token = logger.list_entries(
projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING,
page_size=PAGE_SIZE, page_token=TOKEN)
self.assertEqual(len(entries), 0)
self.assertEqual(token, None)
self.assertEqual(client._listed, LISTED)
class TestBatch(unittest2.TestCase):
PROJECT = 'test-project'
def _getTargetClass(self):
from gcloud.logging.logger import Batch
return Batch
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_ctor_defaults(self):
logger = _Logger()
client = _Client(project=self.PROJECT)
batch = self._makeOne(logger, client)
self.assertTrue(batch.logger is logger)
self.assertTrue(batch.client is client)
self.assertEqual(len(batch.entries), 0)
def test_log_text_defaults(self):
TEXT = 'This is the entry text'
client = _Client(project=self.PROJECT, connection=object())
logger = _Logger()
batch = self._makeOne(logger, client=client)
batch.log_text(TEXT)
self.assertEqual(batch.entries,
[('text', TEXT, None, None, None, None)])
def test_log_text_explicit(self):
TEXT = 'This is the entry text'
LABELS = {'foo': 'bar', 'baz': 'qux'}
IID = 'IID'
SEVERITY = 'CRITICAL'
METHOD = 'POST'
URI = 'https://api.example.com/endpoint'
STATUS = '500'
REQUEST = {
'requestMethod': METHOD,
'requestUrl': URI,
'status': STATUS,
}
client = _Client(project=self.PROJECT, connection=object())
logger = _Logger()
batch = self._makeOne(logger, client=client)
batch.log_text(TEXT, labels=LABELS, insert_id=IID, severity=SEVERITY,
http_request=REQUEST)
self.assertEqual(batch.entries,
[('text', TEXT, LABELS, IID, SEVERITY, REQUEST)])
def test_log_struct_defaults(self):
STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'}
client = _Client(project=self.PROJECT, connection=object())
logger = _Logger()
batch = self._makeOne(logger, client=client)
batch.log_struct(STRUCT)
self.assertEqual(batch.entries,
[('struct', STRUCT, None, None, None, None)])
def test_log_struct_explicit(self):
STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'}
LABELS = {'foo': 'bar', 'baz': 'qux'}
IID = 'IID'
SEVERITY = 'CRITICAL'
METHOD = 'POST'
URI = 'https://api.example.com/endpoint'
STATUS = '500'
REQUEST = {
'requestMethod': METHOD,
'requestUrl': URI,
'status': STATUS,
}
client = _Client(project=self.PROJECT, connection=object())
logger = _Logger()
batch = self._makeOne(logger, client=client)
batch.log_struct(STRUCT, labels=LABELS, insert_id=IID,
severity=SEVERITY, http_request=REQUEST)
self.assertEqual(batch.entries,
[('struct', STRUCT, LABELS, IID, SEVERITY, REQUEST)])
def test_log_proto_defaults(self):
from google.protobuf.struct_pb2 import Struct, Value
message = Struct(fields={'foo': Value(bool_value=True)})
client = _Client(project=self.PROJECT, connection=object())
logger = _Logger()
batch = self._makeOne(logger, client=client)
batch.log_proto(message)
self.assertEqual(batch.entries,
[('proto', message, None, None, None, None)])
def test_log_proto_explicit(self):
from google.protobuf.struct_pb2 import Struct, Value
message = Struct(fields={'foo': Value(bool_value=True)})
LABELS = {'foo': 'bar', 'baz': 'qux'}
IID = 'IID'
SEVERITY = 'CRITICAL'
METHOD = 'POST'
URI = 'https://api.example.com/endpoint'
STATUS = '500'
REQUEST = {
'requestMethod': METHOD,
'requestUrl': URI,
'status': STATUS,
}
client = _Client(project=self.PROJECT, connection=object())
logger = _Logger()
batch = self._makeOne(logger, client=client)
batch.log_proto(message, labels=LABELS, insert_id=IID,
severity=SEVERITY, http_request=REQUEST)
self.assertEqual(batch.entries,
[('proto', message, LABELS, IID, SEVERITY, REQUEST)])
def test_commit_w_invalid_entry_type(self):
logger = _Logger()
client = _Client(project=self.PROJECT, connection=object())
batch = self._makeOne(logger, client)
batch.entries.append(('bogus', 'BOGUS', None, None, None, None))
with self.assertRaises(ValueError):
batch.commit()
def test_commit_w_bound_client(self):
import json
from google.protobuf.json_format import MessageToJson
from google.protobuf.struct_pb2 import Struct, Value
TEXT = 'This is the entry text'
STRUCT = {'message': TEXT, 'weather': 'partly cloudy'}
message = Struct(fields={'foo': Value(bool_value=True)})
IID1 = 'IID1'
IID2 = 'IID2'
IID3 = 'IID3'
RESOURCE = {
'type': 'global',
}
ENTRIES = [
{'textPayload': TEXT, 'insertId': IID1},
{'jsonPayload': STRUCT, 'insertId': IID2},
{'protoPayload': json.loads(MessageToJson(message)),
'insertId': IID3},
]
client = _Client(project=self.PROJECT)
api = client.logging_api = _DummyLoggingAPI()
logger = _Logger()
batch = self._makeOne(logger, client=client)
batch.log_text(TEXT, insert_id=IID1)
batch.log_struct(STRUCT, insert_id=IID2)
batch.log_proto(message, insert_id=IID3)
batch.commit()
self.assertEqual(list(batch.entries), [])
self.assertEqual(api._write_entries_called_with,
(ENTRIES, logger.path, RESOURCE, None))
def test_commit_w_alternate_client(self):
import json
from google.protobuf.json_format import MessageToJson
from google.protobuf.struct_pb2 import Struct, Value
from gcloud.logging.logger import Logger
TEXT = 'This is the entry text'
STRUCT = {'message': TEXT, 'weather': 'partly cloudy'}
message = Struct(fields={'foo': Value(bool_value=True)})
DEFAULT_LABELS = {'foo': 'spam'}
LABELS = {
'foo': 'bar',
'baz': 'qux',
}
SEVERITY = 'CRITICAL'
METHOD = 'POST'
URI = 'https://api.example.com/endpoint'
STATUS = '500'
REQUEST = {
'requestMethod': METHOD,
'requestUrl': URI,
'status': STATUS,
}
client1 = _Client(project=self.PROJECT)
client2 = _Client(project=self.PROJECT)
api = client2.logging_api = _DummyLoggingAPI()
logger = Logger('logger_name', client1, labels=DEFAULT_LABELS)
RESOURCE = {'type': 'global'}
ENTRIES = [
{'textPayload': TEXT, 'labels': LABELS},
{'jsonPayload': STRUCT, 'severity': SEVERITY},
{'protoPayload': json.loads(MessageToJson(message)),
'httpRequest': REQUEST},
]
batch = self._makeOne(logger, client=client1)
batch.log_text(TEXT, labels=LABELS)
batch.log_struct(STRUCT, severity=SEVERITY)
batch.log_proto(message, http_request=REQUEST)
batch.commit(client=client2)
self.assertEqual(list(batch.entries), [])
self.assertEqual(api._write_entries_called_with,
(ENTRIES, logger.path, RESOURCE, DEFAULT_LABELS))
def test_context_mgr_success(self):
import json
from google.protobuf.json_format import MessageToJson
from google.protobuf.struct_pb2 import Struct, Value
from gcloud.logging.logger import Logger
TEXT = 'This is the entry text'
STRUCT = {'message': TEXT, 'weather': 'partly cloudy'}
message = Struct(fields={'foo': Value(bool_value=True)})
DEFAULT_LABELS = {'foo': 'spam'}
LABELS = {'foo': 'bar', 'baz': 'qux'}
SEVERITY = 'CRITICAL'
METHOD = 'POST'
URI = 'https://api.example.com/endpoint'
STATUS = '500'
REQUEST = {
'requestMethod': METHOD,
'requestUrl': URI,
'status': STATUS,
}
client = _Client(project=self.PROJECT)
api = client.logging_api = _DummyLoggingAPI()
logger = Logger('logger_name', client, labels=DEFAULT_LABELS)
RESOURCE = {
'type': 'global',
}
ENTRIES = [
{'textPayload': TEXT, 'httpRequest': REQUEST},
{'jsonPayload': STRUCT, 'labels': LABELS},
{'protoPayload': json.loads(MessageToJson(message)),
'severity': SEVERITY},
]
batch = self._makeOne(logger, client=client)
with batch as other:
other.log_text(TEXT, http_request=REQUEST)
other.log_struct(STRUCT, labels=LABELS)
other.log_proto(message, severity=SEVERITY)
self.assertEqual(list(batch.entries), [])
self.assertEqual(api._write_entries_called_with,
(ENTRIES, logger.path, RESOURCE, DEFAULT_LABELS))
def test_context_mgr_failure(self):
from google.protobuf.struct_pb2 import Struct, Value
TEXT = 'This is the entry text'
STRUCT = {'message': TEXT, 'weather': 'partly cloudy'}
LABELS = {'foo': 'bar', 'baz': 'qux'}
IID = 'IID'
SEVERITY = 'CRITICAL'
METHOD = 'POST'
URI = 'https://api.example.com/endpoint'
STATUS = '500'
REQUEST = {
'requestMethod': METHOD,
'requestUrl': URI,
'status': STATUS,
}
message = Struct(fields={'foo': Value(bool_value=True)})
client = _Client(project=self.PROJECT)
api = client.logging_api = _DummyLoggingAPI()
logger = _Logger()
UNSENT = [
('text', TEXT, None, IID, None, None),
('struct', STRUCT, None, None, SEVERITY, None),
('proto', message, LABELS, None, None, REQUEST),
]
batch = self._makeOne(logger, client=client)
try:
with batch as other:
other.log_text(TEXT, insert_id=IID)
other.log_struct(STRUCT, severity=SEVERITY)
other.log_proto(message, labels=LABELS, http_request=REQUEST)
raise _Bugout()
except _Bugout:
pass
self.assertEqual(list(batch.entries), UNSENT)
self.assertEqual(api._write_entries_called_with, None)
class _Logger(object):
labels = None
def __init__(self, name="NAME", project="PROJECT"):
self.path = '/projects/%s/logs/%s' % (project, name)
class _DummyLoggingAPI(object):
_write_entries_called_with = None
def write_entries(self, entries, logger_name=None, resource=None,
labels=None):
self._write_entries_called_with = (
entries, logger_name, resource, labels)
def logger_delete(self, project, logger_name):
self._logger_delete_called_with = (project, logger_name)
class _Client(object):
_listed = _token = None
_entries = ()
def __init__(self, project, connection=None):
self.project = project
self.connection = connection
def list_entries(self, **kw):
self._listed = kw
return self._entries, self._token
class _Bugout(Exception):
pass

View file

@ -0,0 +1,251 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
class TestMetric(unittest2.TestCase):
PROJECT = 'test-project'
METRIC_NAME = 'metric-name'
FILTER = 'logName:syslog AND severity>=ERROR'
DESCRIPTION = 'DESCRIPTION'
def _getTargetClass(self):
from gcloud.logging.metric import Metric
return Metric
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor_defaults(self):
FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME)
client = _Client(self.PROJECT)
metric = self._makeOne(self.METRIC_NAME, client=client)
self.assertEqual(metric.name, self.METRIC_NAME)
self.assertEqual(metric.filter_, None)
self.assertEqual(metric.description, '')
self.assertTrue(metric.client is client)
self.assertEqual(metric.project, self.PROJECT)
self.assertEqual(metric.full_name, FULL)
self.assertEqual(metric.path, '/%s' % (FULL,))
def test_ctor_explicit(self):
FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME)
client = _Client(self.PROJECT)
metric = self._makeOne(self.METRIC_NAME, self.FILTER,
client=client, description=self.DESCRIPTION)
self.assertEqual(metric.name, self.METRIC_NAME)
self.assertEqual(metric.filter_, self.FILTER)
self.assertEqual(metric.description, self.DESCRIPTION)
self.assertTrue(metric.client is client)
self.assertEqual(metric.project, self.PROJECT)
self.assertEqual(metric.full_name, FULL)
self.assertEqual(metric.path, '/%s' % (FULL,))
def test_from_api_repr_minimal(self):
client = _Client(project=self.PROJECT)
FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME)
RESOURCE = {
'name': self.METRIC_NAME,
'filter': self.FILTER,
}
klass = self._getTargetClass()
metric = klass.from_api_repr(RESOURCE, client=client)
self.assertEqual(metric.name, self.METRIC_NAME)
self.assertEqual(metric.filter_, self.FILTER)
self.assertEqual(metric.description, '')
self.assertTrue(metric._client is client)
self.assertEqual(metric.project, self.PROJECT)
self.assertEqual(metric.full_name, FULL)
def test_from_api_repr_w_description(self):
client = _Client(project=self.PROJECT)
FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME)
DESCRIPTION = 'DESCRIPTION'
RESOURCE = {
'name': self.METRIC_NAME,
'filter': self.FILTER,
'description': DESCRIPTION,
}
klass = self._getTargetClass()
metric = klass.from_api_repr(RESOURCE, client=client)
self.assertEqual(metric.name, self.METRIC_NAME)
self.assertEqual(metric.filter_, self.FILTER)
self.assertEqual(metric.description, DESCRIPTION)
self.assertTrue(metric._client is client)
self.assertEqual(metric.project, self.PROJECT)
self.assertEqual(metric.full_name, FULL)
def test_create_w_bound_client(self):
client = _Client(project=self.PROJECT)
api = client.metrics_api = _DummyMetricsAPI()
metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client)
metric.create()
self.assertEqual(
api._metric_create_called_with,
(self.PROJECT, self.METRIC_NAME, self.FILTER, ''))
def test_create_w_alternate_client(self):
client1 = _Client(project=self.PROJECT)
client2 = _Client(project=self.PROJECT)
api = client2.metrics_api = _DummyMetricsAPI()
metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1,
description=self.DESCRIPTION)
metric.create(client=client2)
self.assertEqual(
api._metric_create_called_with,
(self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION))
def test_exists_miss_w_bound_client(self):
client = _Client(project=self.PROJECT)
api = client.metrics_api = _DummyMetricsAPI()
metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client)
self.assertFalse(metric.exists())
self.assertEqual(api._metric_get_called_with,
(self.PROJECT, self.METRIC_NAME))
def test_exists_hit_w_alternate_client(self):
RESOURCE = {
'name': self.METRIC_NAME,
'filter': self.FILTER,
}
client1 = _Client(project=self.PROJECT)
client2 = _Client(project=self.PROJECT)
api = client2.metrics_api = _DummyMetricsAPI()
api._metric_get_response = RESOURCE
metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1)
self.assertTrue(metric.exists(client=client2))
self.assertEqual(api._metric_get_called_with,
(self.PROJECT, self.METRIC_NAME))
def test_reload_w_bound_client(self):
NEW_FILTER = 'logName:syslog AND severity>=INFO'
RESOURCE = {
'name': self.METRIC_NAME,
'filter': NEW_FILTER,
}
client = _Client(project=self.PROJECT)
api = client.metrics_api = _DummyMetricsAPI()
api._metric_get_response = RESOURCE
metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client,
description=self.DESCRIPTION)
metric.reload()
self.assertEqual(metric.filter_, NEW_FILTER)
self.assertEqual(metric.description, '')
self.assertEqual(api._metric_get_called_with,
(self.PROJECT, self.METRIC_NAME))
def test_reload_w_alternate_client(self):
NEW_FILTER = 'logName:syslog AND severity>=INFO'
RESOURCE = {
'name': self.METRIC_NAME,
'description': self.DESCRIPTION,
'filter': NEW_FILTER,
}
client1 = _Client(project=self.PROJECT)
client2 = _Client(project=self.PROJECT)
api = client2.metrics_api = _DummyMetricsAPI()
api._metric_get_response = RESOURCE
metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1)
metric.reload(client=client2)
self.assertEqual(metric.filter_, NEW_FILTER)
self.assertEqual(metric.description, self.DESCRIPTION)
self.assertEqual(api._metric_get_called_with,
(self.PROJECT, self.METRIC_NAME))
def test_update_w_bound_client(self):
client = _Client(project=self.PROJECT)
api = client.metrics_api = _DummyMetricsAPI()
metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client)
metric.update()
self.assertEqual(
api._metric_update_called_with,
(self.PROJECT, self.METRIC_NAME, self.FILTER, ''))
def test_update_w_alternate_client(self):
client1 = _Client(project=self.PROJECT)
client2 = _Client(project=self.PROJECT)
api = client2.metrics_api = _DummyMetricsAPI()
metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1,
description=self.DESCRIPTION)
metric.update(client=client2)
self.assertEqual(
api._metric_update_called_with,
(self.PROJECT, self.METRIC_NAME, self.FILTER, self.DESCRIPTION))
def test_delete_w_bound_client(self):
client = _Client(project=self.PROJECT)
api = client.metrics_api = _DummyMetricsAPI()
metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client)
metric.delete()
self.assertEqual(api._metric_delete_called_with,
(self.PROJECT, self.METRIC_NAME))
def test_delete_w_alternate_client(self):
client1 = _Client(project=self.PROJECT)
client2 = _Client(project=self.PROJECT)
api = client2.metrics_api = _DummyMetricsAPI()
metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1)
metric.delete(client=client2)
self.assertEqual(api._metric_delete_called_with,
(self.PROJECT, self.METRIC_NAME))
class _Client(object):
def __init__(self, project):
self.project = project
class _DummyMetricsAPI(object):
def metric_create(self, project, metric_name, filter_, description):
self._metric_create_called_with = (
project, metric_name, filter_, description)
def metric_get(self, project, metric_name):
from gcloud.exceptions import NotFound
self._metric_get_called_with = (project, metric_name)
try:
return self._metric_get_response
except AttributeError:
raise NotFound('miss')
def metric_update(self, project, metric_name, filter_, description):
self._metric_update_called_with = (
project, metric_name, filter_, description)
def metric_delete(self, project, metric_name):
self._metric_delete_called_with = (project, metric_name)

View file

@ -0,0 +1,262 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
class TestSink(unittest2.TestCase):
PROJECT = 'test-project'
SINK_NAME = 'sink-name'
FILTER = 'logName:syslog AND severity>=INFO'
DESTINATION_URI = 'faux.googleapis.com/destination'
def _getTargetClass(self):
from gcloud.logging.sink import Sink
return Sink
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor_defaults(self):
FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME)
client = _Client(self.PROJECT)
sink = self._makeOne(self.SINK_NAME, client=client)
self.assertEqual(sink.name, self.SINK_NAME)
self.assertEqual(sink.filter_, None)
self.assertEqual(sink.destination, None)
self.assertTrue(sink.client is client)
self.assertEqual(sink.project, self.PROJECT)
self.assertEqual(sink.full_name, FULL)
self.assertEqual(sink.path, '/%s' % (FULL,))
def test_ctor_explicit(self):
FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME)
client = _Client(self.PROJECT)
sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI,
client=client)
self.assertEqual(sink.name, self.SINK_NAME)
self.assertEqual(sink.filter_, self.FILTER)
self.assertEqual(sink.destination, self.DESTINATION_URI)
self.assertTrue(sink.client is client)
self.assertEqual(sink.project, self.PROJECT)
self.assertEqual(sink.full_name, FULL)
self.assertEqual(sink.path, '/%s' % (FULL,))
def test_from_api_repr_minimal(self):
client = _Client(project=self.PROJECT)
FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME)
RESOURCE = {
'name': self.SINK_NAME,
'filter': self.FILTER,
'destination': self.DESTINATION_URI,
}
klass = self._getTargetClass()
sink = klass.from_api_repr(RESOURCE, client=client)
self.assertEqual(sink.name, self.SINK_NAME)
self.assertEqual(sink.filter_, self.FILTER)
self.assertEqual(sink.destination, self.DESTINATION_URI)
self.assertTrue(sink._client is client)
self.assertEqual(sink.project, self.PROJECT)
self.assertEqual(sink.full_name, FULL)
def test_from_api_repr_w_description(self):
client = _Client(project=self.PROJECT)
FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME)
RESOURCE = {
'name': self.SINK_NAME,
'filter': self.FILTER,
'destination': self.DESTINATION_URI,
}
klass = self._getTargetClass()
sink = klass.from_api_repr(RESOURCE, client=client)
self.assertEqual(sink.name, self.SINK_NAME)
self.assertEqual(sink.filter_, self.FILTER)
self.assertEqual(sink.destination, self.DESTINATION_URI)
self.assertTrue(sink._client is client)
self.assertEqual(sink.project, self.PROJECT)
self.assertEqual(sink.full_name, FULL)
def test_create_w_bound_client(self):
client = _Client(project=self.PROJECT)
api = client.sinks_api = _DummySinksAPI()
sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI,
client=client)
sink.create()
self.assertEqual(
api._sink_create_called_with,
(self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI))
def test_create_w_alternate_client(self):
client1 = _Client(project=self.PROJECT)
client2 = _Client(project=self.PROJECT)
sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI,
client=client1)
api = client2.sinks_api = _DummySinksAPI()
sink.create(client=client2)
self.assertEqual(
api._sink_create_called_with,
(self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI))
def test_exists_miss_w_bound_client(self):
client = _Client(project=self.PROJECT)
api = client.sinks_api = _DummySinksAPI()
sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI,
client=client)
self.assertFalse(sink.exists())
self.assertEqual(api._sink_get_called_with,
(self.PROJECT, self.SINK_NAME))
def test_exists_hit_w_alternate_client(self):
RESOURCE = {
'name': self.SINK_NAME,
'filter': self.FILTER,
'destination': self.DESTINATION_URI,
}
client1 = _Client(project=self.PROJECT)
client2 = _Client(project=self.PROJECT)
api = client2.sinks_api = _DummySinksAPI()
api._sink_get_response = RESOURCE
sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI,
client=client1)
self.assertTrue(sink.exists(client=client2))
self.assertEqual(api._sink_get_called_with,
(self.PROJECT, self.SINK_NAME))
def test_reload_w_bound_client(self):
NEW_FILTER = 'logName:syslog AND severity>=INFO'
NEW_DESTINATION_URI = 'faux.googleapis.com/other'
RESOURCE = {
'name': self.SINK_NAME,
'filter': NEW_FILTER,
'destination': NEW_DESTINATION_URI,
}
client = _Client(project=self.PROJECT)
api = client.sinks_api = _DummySinksAPI()
api._sink_get_response = RESOURCE
sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI,
client=client)
sink.reload()
self.assertEqual(sink.filter_, NEW_FILTER)
self.assertEqual(sink.destination, NEW_DESTINATION_URI)
self.assertEqual(api._sink_get_called_with,
(self.PROJECT, self.SINK_NAME))
def test_reload_w_alternate_client(self):
NEW_FILTER = 'logName:syslog AND severity>=INFO'
NEW_DESTINATION_URI = 'faux.googleapis.com/other'
RESOURCE = {
'name': self.SINK_NAME,
'filter': NEW_FILTER,
'destination': NEW_DESTINATION_URI,
}
client1 = _Client(project=self.PROJECT)
client2 = _Client(project=self.PROJECT)
api = client2.sinks_api = _DummySinksAPI()
api._sink_get_response = RESOURCE
sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI,
client=client1)
sink.reload(client=client2)
self.assertEqual(sink.filter_, NEW_FILTER)
self.assertEqual(sink.destination, NEW_DESTINATION_URI)
self.assertEqual(api._sink_get_called_with,
(self.PROJECT, self.SINK_NAME))
def test_update_w_bound_client(self):
client = _Client(project=self.PROJECT)
api = client.sinks_api = _DummySinksAPI()
sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI,
client=client)
sink.update()
self.assertEqual(
api._sink_update_called_with,
(self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI))
def test_update_w_alternate_client(self):
client1 = _Client(project=self.PROJECT)
client2 = _Client(project=self.PROJECT)
api = client2.sinks_api = _DummySinksAPI()
sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI,
client=client1)
sink.update(client=client2)
self.assertEqual(
api._sink_update_called_with,
(self.PROJECT, self.SINK_NAME, self.FILTER, self.DESTINATION_URI))
def test_delete_w_bound_client(self):
client = _Client(project=self.PROJECT)
api = client.sinks_api = _DummySinksAPI()
sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI,
client=client)
sink.delete()
self.assertEqual(api._sink_delete_called_with,
(self.PROJECT, self.SINK_NAME))
def test_delete_w_alternate_client(self):
client1 = _Client(project=self.PROJECT)
client2 = _Client(project=self.PROJECT)
api = client2.sinks_api = _DummySinksAPI()
sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI,
client=client1)
sink.delete(client=client2)
self.assertEqual(api._sink_delete_called_with,
(self.PROJECT, self.SINK_NAME))
class _Client(object):
def __init__(self, project):
self.project = project
class _DummySinksAPI(object):
def sink_create(self, project, sink_name, filter_, destination):
self._sink_create_called_with = (
project, sink_name, filter_, destination)
def sink_get(self, project, sink_name):
from gcloud.exceptions import NotFound
self._sink_get_called_with = (project, sink_name)
try:
return self._sink_get_response
except AttributeError:
raise NotFound('miss')
def sink_update(self, project, sink_name, filter_, destination):
self._sink_update_called_with = (
project, sink_name, filter_, destination)
def sink_delete(self, project, sink_name):
self._sink_delete_called_with = (project, sink_name)