Updated DB_Helper by adding firebase methods.

This commit is contained in:
Batuhan Berk Başoğlu 2020-10-05 16:53:40 -04:00
parent 485cc3bbba
commit c82121d036
1810 changed files with 537281 additions and 1 deletions

View file

@ -0,0 +1,45 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Monitoring API wrapper."""
from gcloud.monitoring.client import Client
from gcloud.monitoring.connection import Connection
from gcloud.monitoring.label import LabelDescriptor
from gcloud.monitoring.label import LabelValueType
from gcloud.monitoring.metric import Metric
from gcloud.monitoring.metric import MetricDescriptor
from gcloud.monitoring.metric import MetricKind
from gcloud.monitoring.metric import ValueType
from gcloud.monitoring.query import Aligner
from gcloud.monitoring.query import Query
from gcloud.monitoring.query import Reducer
from gcloud.monitoring.resource import Resource
from gcloud.monitoring.resource import ResourceDescriptor
from gcloud.monitoring.timeseries import Point
from gcloud.monitoring.timeseries import TimeSeries
__all__ = (
'Client',
'Connection',
'LabelDescriptor', 'LabelValueType',
'Metric', 'MetricDescriptor', 'MetricKind', 'ValueType',
'Aligner', 'Query', 'Reducer',
'Resource', 'ResourceDescriptor',
'Point', 'TimeSeries',
'SCOPE',
)
SCOPE = Connection.SCOPE

View file

@ -0,0 +1,116 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Time series as :mod:`pandas` dataframes."""
import itertools
TOP_RESOURCE_LABELS = (
'project_id',
'aws_account',
'location',
'region',
'zone',
)
def _build_dataframe(time_series_iterable,
label=None, labels=None): # pragma: NO COVER
"""Build a :mod:`pandas` dataframe out of time series.
:type time_series_iterable:
iterable over :class:`~gcloud.monitoring.timeseries.TimeSeries`
:param time_series_iterable:
An iterable (e.g., a query object) yielding time series.
:type label: string or None
:param label:
The label name to use for the dataframe header. This can be the name
of a resource label or metric label (e.g., ``"instance_name"``), or
the string ``"resource_type"``.
:type labels: list of strings, or None
:param labels:
A list or tuple of label names to use for the dataframe header.
If more than one label name is provided, the resulting dataframe
will have a multi-level column header.
Specifying neither ``label`` or ``labels`` results in a dataframe
with a multi-level column header including the resource type and
all available resource and metric labels.
Specifying both ``label`` and ``labels`` is an error.
:rtype: :class:`pandas.DataFrame`
:returns: A dataframe where each column represents one time series.
"""
import pandas # pylint: disable=import-error
if labels is not None:
if label is not None:
raise ValueError('Cannot specify both "label" and "labels".')
elif not labels:
raise ValueError('"labels" must be non-empty or None.')
columns = []
headers = []
for time_series in time_series_iterable:
pandas_series = pandas.Series(
data=[point.value for point in time_series.points],
index=[point.end_time for point in time_series.points],
)
columns.append(pandas_series)
headers.append(time_series.header())
# Implement a smart default of using all available labels.
if label is None and labels is None:
resource_labels = set(itertools.chain.from_iterable(
header.resource.labels for header in headers))
metric_labels = set(itertools.chain.from_iterable(
header.metric.labels for header in headers))
labels = (['resource_type'] +
_sorted_resource_labels(resource_labels) +
sorted(metric_labels))
# Assemble the columns into a DataFrame.
dataframe = pandas.DataFrame.from_records(columns).T
# Convert the timestamp strings into a DatetimeIndex.
dataframe.index = pandas.to_datetime(dataframe.index)
# Build a multi-level stack of column headers. Some labels may
# be undefined for some time series.
levels = []
for key in labels or [label]:
level = [header.labels.get(key, '') for header in headers]
levels.append(level)
# Build a column Index or MultiIndex. Do not include level names
# in the column header if the user requested a single-level header
# by specifying "label".
dataframe.columns = pandas.MultiIndex.from_arrays(
levels,
names=labels or None)
# Sort the rows just in case (since the API doesn't guarantee the
# ordering), and sort the columns lexicographically.
return dataframe.sort_index(axis=0).sort_index(axis=1)
def _sorted_resource_labels(labels):
"""Sort label names, putting well-known resource labels first."""
head = [label for label in TOP_RESOURCE_LABELS if label in labels]
tail = sorted(label for label in labels
if label not in TOP_RESOURCE_LABELS)
return head + tail

View file

@ -0,0 +1,280 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client for interacting with the `Google Monitoring API (V3)`_.
Example::
>>> from gcloud import monitoring
>>> client = monitoring.Client()
>>> query = client.query(minutes=5)
>>> print(query.as_dataframe()) # Requires pandas.
At present, the client supports querying of time series, metric descriptors,
and monitored resource descriptors.
.. _Google Monitoring API (V3): https://cloud.google.com/monitoring/api/v3/
"""
from gcloud.client import JSONClient
from gcloud.monitoring.connection import Connection
from gcloud.monitoring.metric import MetricDescriptor
from gcloud.monitoring.metric import MetricKind
from gcloud.monitoring.metric import ValueType
from gcloud.monitoring.query import Query
from gcloud.monitoring.resource import ResourceDescriptor
class Client(JSONClient):
"""Client to bundle configuration needed for API requests.
:type project: string
:param project: The target project. If not passed, falls back to the
default inferred from the environment.
:type credentials: :class:`oauth2client.client.OAuth2Credentials` or
:class:`NoneType`
:param credentials: The OAuth2 Credentials to use for the connection
owned by this client. If not passed (and if no ``http``
object is passed), falls back to the default inferred
from the environment.
:type http: :class:`httplib2.Http` or class that defines ``request()``
:param http: An optional HTTP object to make requests. If not passed, an
``http`` object is created that is bound to the
``credentials`` for the current object.
"""
_connection_class = Connection
def query(self,
metric_type=Query.DEFAULT_METRIC_TYPE,
end_time=None,
days=0, hours=0, minutes=0):
"""Construct a query object for retrieving metric data.
Example::
>>> query = client.query(minutes=5)
>>> print(query.as_dataframe()) # Requires pandas.
:type metric_type: string
:param metric_type: The metric type name. The default value is
:data:`Query.DEFAULT_METRIC_TYPE
<gcloud.monitoring.query.Query.DEFAULT_METRIC_TYPE>`,
but please note that this default value is provided only for
demonstration purposes and is subject to change. See the
`supported metrics`_.
:type end_time: :class:`datetime.datetime` or None
:param end_time: The end time (inclusive) of the time interval
for which results should be returned, as a datetime object.
The default is the start of the current minute.
The start time (exclusive) is determined by combining the
values of ``days``, ``hours``, and ``minutes``, and
subtracting the resulting duration from the end time.
It is also allowed to omit the end time and duration here,
in which case
:meth:`~gcloud.monitoring.query.Query.select_interval`
must be called before the query is executed.
:type days: integer
:param days: The number of days in the time interval.
:type hours: integer
:param hours: The number of hours in the time interval.
:type minutes: integer
:param minutes: The number of minutes in the time interval.
:rtype: :class:`~gcloud.monitoring.query.Query`
:returns: The query object.
:raises: :exc:`ValueError` if ``end_time`` is specified but
``days``, ``hours``, and ``minutes`` are all zero.
If you really want to specify a point in time, use
:meth:`~gcloud.monitoring.query.Query.select_interval`.
.. _supported metrics: https://cloud.google.com/monitoring/api/metrics
"""
return Query(self, metric_type,
end_time=end_time,
days=days, hours=hours, minutes=minutes)
def metric_descriptor(self, type_,
metric_kind=MetricKind.METRIC_KIND_UNSPECIFIED,
value_type=ValueType.VALUE_TYPE_UNSPECIFIED,
labels=(), unit='', description='', display_name=''):
"""Construct a metric descriptor object.
Metric descriptors specify the schema for a particular metric type.
This factory method is used most often in conjunction with the metric
descriptor :meth:`~gcloud.monitoring.metric.MetricDescriptor.create`
method to define custom metrics::
>>> descriptor = client.metric_descriptor(
... 'custom.googleapis.com/my_metric',
... metric_kind=MetricKind.GAUGE,
... value_type=ValueType.DOUBLE,
... description='This is a simple example of a custom metric.')
>>> descriptor.create()
Here is an example where the custom metric is parameterized by a
metric label::
>>> label = LabelDescriptor('response_code', LabelValueType.INT64,
... description='HTTP status code')
>>> descriptor = client.metric_descriptor(
... 'custom.googleapis.com/my_app/response_count',
... metric_kind=MetricKind.CUMULATIVE,
... value_type=ValueType.INT64,
... labels=[label],
... description='Cumulative count of HTTP responses.')
>>> descriptor.create()
:type type_: string
:param type_:
The metric type including a DNS name prefix. For example:
``"custom.googleapis.com/my_metric"``
:type metric_kind: string
:param metric_kind:
The kind of measurement. It must be one of
:data:`MetricKind.GAUGE`, :data:`MetricKind.DELTA`,
or :data:`MetricKind.CUMULATIVE`.
See :class:`~gcloud.monitoring.metric.MetricKind`.
:type value_type: string
:param value_type:
The value type of the metric. It must be one of
:data:`ValueType.BOOL`, :data:`ValueType.INT64`,
:data:`ValueType.DOUBLE`, :data:`ValueType.STRING`,
or :data:`ValueType.DISTRIBUTION`.
See :class:`ValueType`.
:type labels: list of :class:`~gcloud.monitoring.label.LabelDescriptor`
:param labels:
A sequence of zero or more label descriptors specifying the labels
used to identify a specific instance of this metric.
:type unit: string
:param unit: An optional unit in which the metric value is reported.
:type description: string
:param description: An optional detailed description of the metric.
:type display_name: string
:param display_name: An optional concise name for the metric.
"""
return MetricDescriptor(
self, type_,
metric_kind=metric_kind,
value_type=value_type,
labels=labels,
unit=unit,
description=description,
display_name=display_name,
)
def fetch_metric_descriptor(self, metric_type):
"""Look up a metric descriptor by type.
Example::
>>> METRIC = 'compute.googleapis.com/instance/cpu/utilization'
>>> print(client.fetch_metric_descriptor(METRIC))
:type metric_type: string
:param metric_type: The metric type name.
:rtype: :class:`~gcloud.monitoring.metric.MetricDescriptor`
:returns: The metric descriptor instance.
:raises: :class:`gcloud.exceptions.NotFound` if the metric descriptor
is not found.
"""
return MetricDescriptor._fetch(self, metric_type)
def list_metric_descriptors(self, filter_string=None, type_prefix=None):
"""List all metric descriptors for the project.
Examples::
>>> for descriptor in client.list_metric_descriptors():
... print(descriptor.type)
>>> for descriptor in client.list_metric_descriptors(
... type_prefix='custom.'):
... print(descriptor.type)
:type filter_string: string or None
:param filter_string:
An optional filter expression describing the metric descriptors
to be returned. See the `filter documentation`_.
:type type_prefix: string or None
:param type_prefix: An optional prefix constraining the selected
metric types. This adds ``metric.type = starts_with("<prefix>")``
to the filter.
:rtype: list of :class:`~gcloud.monitoring.metric.MetricDescriptor`
:returns: A list of metric descriptor instances.
.. _filter documentation:
https://cloud.google.com/monitoring/api/v3/filters
"""
return MetricDescriptor._list(self, filter_string,
type_prefix=type_prefix)
def fetch_resource_descriptor(self, resource_type):
"""Look up a monitored resource descriptor by type.
Example::
>>> print(client.fetch_resource_descriptor('gce_instance'))
:type resource_type: string
:param resource_type: The resource type name.
:rtype: :class:`~gcloud.monitoring.resource.ResourceDescriptor`
:returns: The resource descriptor instance.
:raises: :class:`gcloud.exceptions.NotFound` if the resource descriptor
is not found.
"""
return ResourceDescriptor._fetch(self, resource_type)
def list_resource_descriptors(self, filter_string=None):
"""List all monitored resource descriptors for the project.
Example::
>>> for descriptor in client.list_resource_descriptors():
... print(descriptor.type)
:type filter_string: string or None
:param filter_string:
An optional filter expression describing the resource descriptors
to be returned. See the `filter documentation`_.
:rtype: list of :class:`~gcloud.monitoring.resource.ResourceDescriptor`
:returns: A list of resource descriptor instances.
.. _filter documentation:
https://cloud.google.com/monitoring/api/v3/filters
"""
return ResourceDescriptor._list(self, filter_string)

View file

@ -0,0 +1,47 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create / interact with gcloud monitoring connections."""
from gcloud import connection as base_connection
class Connection(base_connection.JSONConnection):
"""A connection to Google Monitoring via the JSON REST API.
:type credentials: :class:`oauth2client.client.OAuth2Credentials`
:param credentials: (Optional) The OAuth2 Credentials to use for this
connection.
:type http: :class:`httplib2.Http` or class that defines ``request()``
:param http: (Optional) HTTP object to make requests.
:type api_base_url: string
:param api_base_url: The base of the API call URL. Defaults to the value
:attr:`Connection.API_BASE_URL`.
"""
API_BASE_URL = 'https://monitoring.googleapis.com'
"""The base of the API call URL."""
API_VERSION = 'v3'
"""The version of the API, used in building the API call's URL."""
API_URL_TEMPLATE = '{api_base_url}/{api_version}{path}'
"""A template for the URL of a particular API call."""
SCOPE = ('https://www.googleapis.com/auth/monitoring.read',
'https://www.googleapis.com/auth/monitoring',
'https://www.googleapis.com/auth/cloud-platform')
"""The scopes required for authenticating as a Monitoring consumer."""

View file

@ -0,0 +1,99 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Label Descriptors for the `Google Monitoring API (V3)`_.
.. _Google Monitoring API (V3):
https://cloud.google.com/monitoring/api/ref_v3/rest/v3/LabelDescriptor
"""
class LabelValueType(object):
"""Allowed values for the `type of a label`_.
.. _type of a label:
https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
LabelDescriptor#ValueType
"""
STRING = 'STRING'
BOOL = 'BOOL'
INT64 = 'INT64'
class LabelDescriptor(object):
"""Schema specification and documentation for a single label.
:type key: string
:param key: The name of the label.
:type value_type: string
:param value_type:
The type of the label. It must be one of :data:`LabelValueType.STRING`,
:data:`LabelValueType.BOOL`, or :data:`LabelValueType.INT64`.
See :class:`LabelValueType`.
:type description: string
:param description: A human-readable description for the label.
"""
def __init__(self, key, value_type=LabelValueType.STRING, description=''):
self.key = key
self.value_type = value_type
self.description = description
@classmethod
def _from_dict(cls, info):
"""Construct a label descriptor from the parsed JSON representation.
:type info: dict
:param info:
A ``dict`` parsed from the JSON wire-format representation.
:rtype: :class:`LabelDescriptor`
:returns: A label descriptor.
"""
return cls(
info['key'],
info.get('valueType', LabelValueType.STRING),
info.get('description', ''),
)
def _to_dict(self):
"""Build a dictionary ready to be serialized to the JSON wire format.
:rtype: dict
:returns: A dictionary.
"""
info = {
'key': self.key,
'valueType': self.value_type,
}
if self.description:
info['description'] = self.description
return info
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __ne__(self, other):
return self.__dict__ != other.__dict__
def __repr__(self):
return (
'LabelDescriptor(key={key!r}, value_type={value_type!r},'
' description={description!r})'
).format(**self.__dict__)

View file

@ -0,0 +1,345 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Metric Descriptors for the `Google Monitoring API (V3)`_.
.. _Google Monitoring API (V3):
https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
projects.metricDescriptors
"""
import collections
from gcloud.monitoring.label import LabelDescriptor
class MetricKind(object):
"""Choices for the `kind of measurement`_.
.. _kind of measurement:
https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
projects.metricDescriptors#MetricKind
"""
METRIC_KIND_UNSPECIFIED = 'METRIC_KIND_UNSPECIFIED'
""".. note:: An unspecified kind is not allowed in metric descriptors."""
GAUGE = 'GAUGE'
DELTA = 'DELTA'
CUMULATIVE = 'CUMULATIVE'
class ValueType(object):
"""Choices for the `metric value type`_.
.. _metric value type:
https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
projects.metricDescriptors#ValueType
"""
VALUE_TYPE_UNSPECIFIED = 'VALUE_TYPE_UNSPECIFIED'
""".. note:: An unspecified type is not allowed in metric descriptors."""
BOOL = 'BOOL'
INT64 = 'INT64'
DOUBLE = 'DOUBLE'
STRING = 'STRING'
DISTRIBUTION = 'DISTRIBUTION'
class MetricDescriptor(object):
"""Specification of a metric type and its schema.
The preferred way to construct a metric descriptor object is using the
:meth:`~gcloud.monitoring.client.Client.metric_descriptor` factory method
of the :class:`~gcloud.monitoring.client.Client` class.
:type client: :class:`gcloud.monitoring.client.Client`
:param client: A client for operating on the metric descriptor.
:type type_: string
:param type_:
The metric type including a DNS name prefix. For example:
``"compute.googleapis.com/instance/cpu/utilization"``
:type metric_kind: string
:param metric_kind:
The kind of measurement. It must be one of
:data:`MetricKind.GAUGE`, :data:`MetricKind.DELTA`,
or :data:`MetricKind.CUMULATIVE`. See :class:`MetricKind`.
:type value_type: string
:param value_type:
The value type of the metric. It must be one of
:data:`ValueType.BOOL`, :data:`ValueType.INT64`,
:data:`ValueType.DOUBLE`, :data:`ValueType.STRING`,
or :data:`ValueType.DISTRIBUTION`.
See :class:`ValueType`.
:type labels: list of :class:`~gcloud.monitoring.label.LabelDescriptor`
:param labels:
A sequence of zero or more label descriptors specifying the labels
used to identify a specific instance of this metric.
:type unit: string
:param unit: An optional unit in which the metric value is reported.
:type description: string
:param description: An optional detailed description of the metric.
:type display_name: string
:param display_name: An optional concise name for the metric.
:type name: string or None
:param name:
The "resource name" of the metric descriptor. For example:
``"projects/<project_id>/metricDescriptors/<type>"``. As
retrieved from the service, this will always be specified.
You can and should omit it when constructing an instance for
the purpose of creating a new metric descriptor.
"""
def __init__(self, client, type_,
metric_kind=MetricKind.METRIC_KIND_UNSPECIFIED,
value_type=ValueType.VALUE_TYPE_UNSPECIFIED,
labels=(),
unit='', description='', display_name='',
name=None):
self.client = client
self.name = name
self.type = type_
self.labels = labels
self.metric_kind = metric_kind
self.value_type = value_type
self.unit = unit
self.description = description
self.display_name = display_name
def create(self):
"""Create a new metric descriptor based on this object.
Example::
>>> descriptor = client.metric_descriptor(
... 'custom.googleapis.com/my_metric',
... metric_kind=MetricKind.GAUGE,
... value_type=ValueType.DOUBLE,
... description='This is a simple example of a custom metric.')
>>> descriptor.create()
The metric kind must not be :data:`MetricKind.METRIC_KIND_UNSPECIFIED`,
and the value type must not be
:data:`ValueType.VALUE_TYPE_UNSPECIFIED`.
The ``name`` attribute is ignored in preparing the creation request.
All attributes are overwritten by the values received in the response
(normally affecting only ``name``).
"""
path = '/projects/{project}/metricDescriptors/'.format(
project=self.client.project)
response = self.client.connection.api_request(method='POST', path=path,
data=self._to_dict())
self._init_from_dict(response)
def delete(self):
"""Delete the metric descriptor identified by this object.
Example::
>>> descriptor = client.metric_descriptor(
... 'custom.googleapis.com/my_metric')
>>> descriptor.delete()
Only the ``client`` and ``type`` attributes are used.
"""
path = '/projects/{project}/metricDescriptors/{type}'.format(
project=self.client.project,
type=self.type)
self.client.connection.api_request(method='DELETE', path=path)
@classmethod
def _fetch(cls, client, metric_type):
"""Look up a metric descriptor by type.
:type client: :class:`gcloud.monitoring.client.Client`
:param client: The client to use.
:type metric_type: string
:param metric_type: The metric type name.
:rtype: :class:`MetricDescriptor`
:returns: The metric descriptor instance.
:raises: :class:`gcloud.exceptions.NotFound` if the metric descriptor
is not found.
"""
path = '/projects/{project}/metricDescriptors/{type}'.format(
project=client.project,
type=metric_type)
info = client.connection.api_request(method='GET', path=path)
return cls._from_dict(client, info)
@classmethod
def _list(cls, client, filter_string=None, type_prefix=None):
"""List all metric descriptors for the project.
:type client: :class:`gcloud.monitoring.client.Client`
:param client: The client to use.
:type filter_string: string or None
:param filter_string:
An optional filter expression describing the metric descriptors
to be returned. See the `filter documentation`_.
:type type_prefix: string or None
:param type_prefix: An optional prefix constraining the selected
metric types. This adds ``metric.type = starts_with("<prefix>")``
to the filter.
:rtype: list of :class:`MetricDescriptor`
:returns: A list of metric descriptor instances.
.. _filter documentation:
https://cloud.google.com/monitoring/api/v3/filters
"""
path = '/projects/{project}/metricDescriptors/'.format(
project=client.project)
filters = []
if filter_string is not None:
filters.append(filter_string)
if type_prefix is not None:
filters.append('metric.type = starts_with("{prefix}")'.format(
prefix=type_prefix))
descriptors = []
page_token = None
while True:
params = {}
if filters:
params['filter'] = ' AND '.join(filters)
if page_token is not None:
params['pageToken'] = page_token
response = client.connection.api_request(
method='GET', path=path, query_params=params)
for info in response.get('metricDescriptors', ()):
descriptors.append(cls._from_dict(client, info))
page_token = response.get('nextPageToken')
if not page_token:
break
return descriptors
@classmethod
def _from_dict(cls, client, info):
"""Construct a metric descriptor from the parsed JSON representation.
:type client: :class:`gcloud.monitoring.client.Client`
:param client: A client to be included in the returned object.
:type info: dict
:param info:
A ``dict`` parsed from the JSON wire-format representation.
:rtype: :class:`MetricDescriptor`
:returns: A metric descriptor.
"""
descriptor = cls(client, None)
descriptor._init_from_dict(info)
return descriptor
def _init_from_dict(self, info):
"""Initialize attributes from the parsed JSON representation.
:type info: dict
:param info:
A ``dict`` parsed from the JSON wire-format representation.
"""
self.name = info['name']
self.type = info['type']
self.labels = tuple(LabelDescriptor._from_dict(label)
for label in info.get('labels', []))
self.metric_kind = info['metricKind']
self.value_type = info['valueType']
self.unit = info.get('unit', '')
self.description = info.get('description', '')
self.display_name = info.get('displayName', '')
def _to_dict(self):
"""Build a dictionary ready to be serialized to the JSON wire format.
:rtype: dict
:returns: A dictionary.
"""
info = {
'type': self.type,
'metricKind': self.metric_kind,
'valueType': self.value_type,
}
if self.labels:
info['labels'] = [label._to_dict() for label in self.labels]
if self.unit:
info['unit'] = self.unit
if self.description:
info['description'] = self.description
if self.display_name:
info['displayName'] = self.display_name
return info
def __repr__(self):
return (
'<MetricDescriptor:\n'
' name={name!r},\n'
' type={type!r},\n'
' metric_kind={metric_kind!r}, value_type={value_type!r},\n'
' labels={labels!r},\n'
' display_name={display_name!r}, unit={unit!r},\n'
' description={description!r}>'
).format(**self.__dict__)
class Metric(collections.namedtuple('Metric', 'type labels')):
"""A specific metric identified by specifying values for all labels.
:type type: string
:param type: The metric type name.
:type labels: dict
:param labels: A mapping from label names to values for all labels
enumerated in the associated :class:`MetricDescriptor`.
"""
__slots__ = ()
@classmethod
def _from_dict(cls, info):
"""Construct a metric object from the parsed JSON representation.
:type info: dict
:param info:
A ``dict`` parsed from the JSON wire-format representation.
:rtype: :class:`Metric`
:returns: A metric object.
"""
return cls(
type=info['type'],
labels=info.get('labels', {}),
)

View file

@ -0,0 +1,673 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Time series query for the `Google Monitoring API (V3)`_.
.. _Google Monitoring API (V3):
https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
projects.timeSeries/list
"""
import copy
import datetime
import itertools
import six
from gcloud.monitoring._dataframe import _build_dataframe
from gcloud.monitoring.timeseries import TimeSeries
_UTCNOW = datetime.datetime.utcnow # To be replaced by tests.
class Aligner(object):
"""Allowed values for the `supported aligners`_."""
ALIGN_NONE = 'ALIGN_NONE'
ALIGN_DELTA = 'ALIGN_DELTA'
ALIGN_RATE = 'ALIGN_RATE'
ALIGN_INTERPOLATE = 'ALIGN_INTERPOLATE'
ALIGN_NEXT_OLDER = 'ALIGN_NEXT_OLDER'
ALIGN_MIN = 'ALIGN_MIN'
ALIGN_MAX = 'ALIGN_MAX'
ALIGN_MEAN = 'ALIGN_MEAN'
ALIGN_COUNT = 'ALIGN_COUNT'
ALIGN_SUM = 'ALIGN_SUM'
ALIGN_STDDEV = 'ALIGN_STDDEV'
ALIGN_COUNT_TRUE = 'ALIGN_COUNT_TRUE'
ALIGN_FRACTION_TRUE = 'ALIGN_FRACTION_TRUE'
class Reducer(object):
"""Allowed values for the `supported reducers`_."""
REDUCE_NONE = 'REDUCE_NONE'
REDUCE_MEAN = 'REDUCE_MEAN'
REDUCE_MIN = 'REDUCE_MIN'
REDUCE_MAX = 'REDUCE_MAX'
REDUCE_SUM = 'REDUCE_SUM'
REDUCE_STDDEV = 'REDUCE_STDDEV'
REDUCE_COUNT = 'REDUCE_COUNT'
REDUCE_COUNT_TRUE = 'REDUCE_COUNT_TRUE'
REDUCE_FRACTION_TRUE = 'REDUCE_FRACTION_TRUE'
REDUCE_PERCENTILE_99 = 'REDUCE_PERCENTILE_99'
REDUCE_PERCENTILE_95 = 'REDUCE_PERCENTILE_95'
REDUCE_PERCENTILE_50 = 'REDUCE_PERCENTILE_50'
REDUCE_PERCENTILE_05 = 'REDUCE_PERCENTILE_05'
class Query(object):
"""Query object for retrieving metric data.
The preferred way to construct a query object is using the
:meth:`~gcloud.monitoring.client.Client.query` method
of the :class:`~gcloud.monitoring.client.Client` class.
:type client: :class:`gcloud.monitoring.client.Client`
:param client: The client to use.
:type metric_type: string
:param metric_type: The metric type name. The default value is
:data:`Query.DEFAULT_METRIC_TYPE
<gcloud.monitoring.query.Query.DEFAULT_METRIC_TYPE>`,
but please note that this default value is provided only for
demonstration purposes and is subject to change. See the
`supported metrics`_.
:type end_time: :class:`datetime.datetime` or None
:param end_time: The end time (inclusive) of the time interval
for which results should be returned, as a datetime object.
The default is the start of the current minute.
The start time (exclusive) is determined by combining the
values of ``days``, ``hours``, and ``minutes``, and
subtracting the resulting duration from the end time.
It is also allowed to omit the end time and duration here,
in which case
:meth:`~gcloud.monitoring.query.Query.select_interval`
must be called before the query is executed.
:type days: integer
:param days: The number of days in the time interval.
:type hours: integer
:param hours: The number of hours in the time interval.
:type minutes: integer
:param minutes: The number of minutes in the time interval.
:raises: :exc:`ValueError` if ``end_time`` is specified but
``days``, ``hours``, and ``minutes`` are all zero.
If you really want to specify a point in time, use
:meth:`~gcloud.monitoring.query.Query.select_interval`.
.. _supported metrics: https://cloud.google.com/monitoring/api/metrics
"""
DEFAULT_METRIC_TYPE = 'compute.googleapis.com/instance/cpu/utilization'
def __init__(self, client,
metric_type=DEFAULT_METRIC_TYPE,
end_time=None, days=0, hours=0, minutes=0):
start_time = None
if days or hours or minutes:
if end_time is None:
end_time = _UTCNOW().replace(second=0, microsecond=0)
start_time = end_time - datetime.timedelta(days=days,
hours=hours,
minutes=minutes)
elif end_time is not None:
raise ValueError('Non-zero duration required for time interval.')
self._client = client
self._end_time = end_time
self._start_time = start_time
self._filter = _Filter(metric_type)
self._per_series_aligner = None
self._alignment_period_seconds = None
self._cross_series_reducer = None
self._group_by_fields = ()
def __iter__(self):
return self.iter()
@property
def metric_type(self):
"""The metric type name."""
return self._filter.metric_type
@property
def filter(self):
"""The filter string.
This is constructed from the metric type, the resource type, and
selectors for the group ID, monitored projects, resource labels,
and metric labels.
"""
return str(self._filter)
def select_interval(self, end_time, start_time=None):
"""Copy the query and set the query time interval.
Example::
import datetime
now = datetime.datetime.utcnow()
query = query.select_interval(
end_time=now,
start_time=now - datetime.timedelta(minutes=5))
As a convenience, you can alternatively specify the end time and
an interval duration when you create the query initially.
:type end_time: :class:`datetime.datetime`
:param end_time: The end time (inclusive) of the time interval
for which results should be returned, as a datetime object.
:type start_time: :class:`datetime.datetime` or None
:param start_time: The start time (exclusive) of the time interval
for which results should be returned, as a datetime object.
If not specified, the interval is a point in time.
:rtype: :class:`Query`
:returns: The new query object.
"""
new_query = self.copy()
new_query._end_time = end_time
new_query._start_time = start_time
return new_query
def select_group(self, group_id):
"""Copy the query and add filtering by group.
Example::
query = query.select_group('1234567')
:type group_id: string
:param group_id: The ID of a group to filter by.
:rtype: :class:`Query`
:returns: The new query object.
"""
new_query = self.copy()
new_query._filter.group_id = group_id
return new_query
def select_projects(self, *args):
"""Copy the query and add filtering by monitored projects.
This is only useful if the target project represents a Stackdriver
account containing the specified monitored projects.
Examples::
query = query.select_projects('project-1')
query = query.select_projects('project-1', 'project-2')
:type args: tuple
:param args: Project IDs limiting the resources to be included
in the query.
:rtype: :class:`Query`
:returns: The new query object.
"""
new_query = self.copy()
new_query._filter.projects = args
return new_query
def select_resources(self, *args, **kwargs):
"""Copy the query and add filtering by resource labels.
Examples::
query = query.select_resources(zone='us-central1-a')
query = query.select_resources(zone_prefix='europe-')
query = query.select_resources(resource_type='gce_instance')
A keyword argument ``<label>=<value>`` ordinarily generates a filter
expression of the form::
resource.label.<label> = "<value>"
However, by adding ``"_prefix"`` or ``"_suffix"`` to the keyword,
you can specify a partial match.
``<label>_prefix=<value>`` generates::
resource.label.<label> = starts_with("<value>")
``<label>_suffix=<value>`` generates::
resource.label.<label> = ends_with("<value>")
As a special case, ``"resource_type"`` is treated as a special
pseudo-label corresponding to the filter object ``resource.type``.
For example, ``resource_type=<value>`` generates::
resource.type = "<value>"
See the `defined resource types`_.
.. note::
The label ``"instance_name"`` is a metric label,
not a resource label. You would filter on it using
``select_metrics(instance_name=...)``.
:type args: tuple
:param args: Raw filter expression strings to include in the
conjunction. If just one is provided and no keyword arguments
are provided, it can be a disjunction.
:type kwargs: dict
:param kwargs: Label filters to include in the conjunction as
described above.
:rtype: :class:`Query`
:returns: The new query object.
.. _defined resource types:
https://cloud.google.com/monitoring/api/v3/monitored-resources
"""
new_query = self.copy()
new_query._filter.select_resources(*args, **kwargs)
return new_query
def select_metrics(self, *args, **kwargs):
"""Copy the query and add filtering by metric labels.
Examples::
query = query.select_metrics(instance_name='myinstance')
query = query.select_metrics(instance_name_prefix='mycluster-')
A keyword argument ``<label>=<value>`` ordinarily generates a filter
expression of the form::
metric.label.<label> = "<value>"
However, by adding ``"_prefix"`` or ``"_suffix"`` to the keyword,
you can specify a partial match.
``<label>_prefix=<value>`` generates::
metric.label.<label> = starts_with("<value>")
``<label>_suffix=<value>`` generates::
metric.label.<label> = ends_with("<value>")
:type args: tuple
:param args: Raw filter expression strings to include in the
conjunction. If just one is provided and no keyword arguments
are provided, it can be a disjunction.
:type kwargs: dict
:param kwargs: Label filters to include in the conjunction as
described above.
:rtype: :class:`Query`
:returns: The new query object.
"""
new_query = self.copy()
new_query._filter.select_metrics(*args, **kwargs)
return new_query
def align(self, per_series_aligner, seconds=0, minutes=0, hours=0):
"""Copy the query and add temporal alignment.
If ``per_series_aligner`` is not :data:`Aligner.ALIGN_NONE`, each time
series will contain data points only on the period boundaries.
Example::
query = query.align(Aligner.ALIGN_MEAN, minutes=5)
It is also possible to specify the aligner as a literal string::
query = query.align('ALIGN_MEAN', minutes=5)
:type per_series_aligner: string
:param per_series_aligner: The approach to be used to align
individual time series. For example: :data:`Aligner.ALIGN_MEAN`.
See :class:`Aligner` and the descriptions of the `supported
aligners`_.
:type seconds: integer
:param seconds: The number of seconds in the alignment period.
:type minutes: integer
:param minutes: The number of minutes in the alignment period.
:type hours: integer
:param hours: The number of hours in the alignment period.
:rtype: :class:`Query`
:returns: The new query object.
.. _supported aligners:
https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
projects.timeSeries/list#Aligner
"""
new_query = self.copy()
new_query._per_series_aligner = per_series_aligner
new_query._alignment_period_seconds = seconds + 60 * (minutes +
60 * hours)
return new_query
def reduce(self, cross_series_reducer, *group_by_fields):
"""Copy the query and add cross-series reduction.
Cross-series reduction combines time series by aggregating their
data points.
For example, you could request an aggregated time series for each
combination of project and zone as follows::
query = query.reduce(Reducer.REDUCE_MEAN,
'resource.project_id', 'resource.zone')
:type cross_series_reducer: string
:param cross_series_reducer:
The approach to be used to combine time series. For example:
:data:`Reducer.REDUCE_MEAN`. See :class:`Reducer` and the
descriptions of the `supported reducers`_.
:type group_by_fields: strings
:param group_by_fields:
Fields to be preserved by the reduction. For example, specifying
just ``"resource.zone"`` will result in one time series per zone.
The default is to aggregate all of the time series into just one.
:rtype: :class:`Query`
:returns: The new query object.
.. _supported reducers:
https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
projects.timeSeries/list#Reducer
"""
new_query = self.copy()
new_query._cross_series_reducer = cross_series_reducer
new_query._group_by_fields = group_by_fields
return new_query
def iter(self, headers_only=False, page_size=None):
"""Yield all time series objects selected by the query.
Note that the :class:`Query` object itself is an iterable, such that
the following are equivalent::
for timeseries in query:
...
for timeseries in query.iter():
...
:type headers_only: boolean
:param headers_only:
Whether to omit the point data from the time series objects.
:type page_size: integer or None
:param page_size:
An optional positive number specifying the maximum number of
points to return per page. This can be used to control how far
the iterator reads ahead.
:rtype: iterator over :class:`~gcloud.monitoring.timeseries.TimeSeries`
:returns: Time series objects, containing points ordered from oldest
to newest.
:raises: :exc:`ValueError` if the query time interval has not been
specified.
"""
# The following use of groupby() relies on equality comparison
# of time series as (named) tuples.
for timeseries, fragments in itertools.groupby(
self._iter_fragments(headers_only, page_size),
lambda fragment: fragment.header()):
points = list(itertools.chain.from_iterable(
fragment.points for fragment in fragments))
points.reverse() # Order from oldest to newest.
yield timeseries.header(points=points)
def _iter_fragments(self, headers_only=False, page_size=None):
"""Yield all time series fragments selected by the query.
There may be multiple fragments per time series. These will be
contiguous.
The parameters and return value are as for :meth:`Query.iter`.
"""
if self._end_time is None:
raise ValueError('Query time interval not specified.')
path = '/projects/{project}/timeSeries/'.format(
project=self._client.project)
page_token = None
while True:
params = list(self._build_query_params(
headers_only=headers_only,
page_size=page_size,
page_token=page_token,
))
response = self._client.connection.api_request(
method='GET',
path=path,
query_params=params,
)
for info in response.get('timeSeries', ()):
yield TimeSeries._from_dict(info)
page_token = response.get('nextPageToken')
if not page_token:
break
def _build_query_params(self, headers_only=False,
page_size=None, page_token=None):
"""Yield key-value pairs for the URL query string.
We use a series of key-value pairs instead of a ``dict`` to allow for
repeated fields.
:type headers_only: boolean
:param headers_only:
Whether to omit the point data from the
:class:`~gcloud.monitoring.timeseries.TimeSeries` objects.
:type page_size: integer or None
:param page_size: A limit on the number of points to return per page.
:type page_token: string or None
:param page_token: A token to continue the retrieval.
:rtype: iterator over tuples
:returns:
Key-value pairs suitable for passing to ``urlencode``.
"""
yield 'filter', self.filter
yield 'interval.endTime', _format_timestamp(self._end_time)
if self._start_time is not None:
yield 'interval.startTime', _format_timestamp(self._start_time)
if self._per_series_aligner is not None:
yield 'aggregation.perSeriesAligner', self._per_series_aligner
if self._alignment_period_seconds is not None:
alignment_period = '{period}s'.format(
period=self._alignment_period_seconds)
yield 'aggregation.alignmentPeriod', alignment_period
if self._cross_series_reducer is not None:
yield ('aggregation.crossSeriesReducer',
self._cross_series_reducer)
for field in self._group_by_fields:
yield 'aggregation.groupByFields', field
if headers_only:
yield 'view', 'HEADERS'
if page_size is not None:
yield 'pageSize', page_size
if page_token is not None:
yield 'pageToken', page_token
def as_dataframe(self, label=None, labels=None):
"""Return all the selected time series as a :mod:`pandas` dataframe.
.. note::
Use of this method requires that you have :mod:`pandas` installed.
Examples::
# Generate a dataframe with a multi-level column header including
# the resource type and all available resource and metric labels.
# This can be useful for seeing what labels are available.
dataframe = query.as_dataframe()
# Generate a dataframe using a particular label for the column
# names.
dataframe = query.as_dataframe(label='instance_name')
# Generate a dataframe with a multi-level column header.
dataframe = query.as_dataframe(labels=['zone', 'instance_name'])
# Generate a dataframe with a multi-level column header, assuming
# the metric is issued by more than one type of resource.
dataframe = query.as_dataframe(
labels=['resource_type', 'instance_id'])
:type label: string or None
:param label: The label name to use for the dataframe header.
This can be the name of a resource label or metric label
(e.g., ``"instance_name"``), or the string ``"resource_type"``.
:type labels: list of strings, or None
:param labels: A list or tuple of label names to use for the dataframe
header. If more than one label name is provided, the resulting
dataframe will have a multi-level column header. Providing values
for both ``label`` and ``labels`` is an error.
:rtype: :class:`pandas.DataFrame`
:returns: A dataframe where each column represents one time series.
"""
return _build_dataframe(self, label, labels) # pragma: NO COVER
def copy(self):
"""Copy the query object.
:rtype: :class:`Query`
:returns: The new query object.
"""
# Using copy.deepcopy() would be appropriate, except that we want
# to copy self._client only as a reference.
new_query = copy.copy(self)
new_query._filter = copy.copy(self._filter)
return new_query
class _Filter(object):
"""Helper for assembling a filter string."""
def __init__(self, metric_type):
self.metric_type = metric_type
self.group_id = None
self.projects = ()
self.resource_label_filter = None
self.metric_label_filter = None
def select_resources(self, *args, **kwargs):
"""Select by resource labels.
See :meth:`Query.select_resources`.
"""
self.resource_label_filter = _build_label_filter('resource',
*args, **kwargs)
def select_metrics(self, *args, **kwargs):
"""Select by metric labels.
See :meth:`Query.select_metrics`.
"""
self.metric_label_filter = _build_label_filter('metric',
*args, **kwargs)
def __str__(self):
filters = ['metric.type = "{type}"'.format(type=self.metric_type)]
if self.group_id is not None:
filters.append('group.id = "{id}"'.format(id=self.group_id))
if self.projects:
filters.append(
' OR '.join('project = "{project}"'.format(project=project)
for project in self.projects))
if self.resource_label_filter:
filters.append(self.resource_label_filter)
if self.metric_label_filter:
filters.append(self.metric_label_filter)
# Parentheses are never actually required, because OR binds more
# tightly than AND in the Monitoring API's filter syntax.
return ' AND '.join(filters)
def _build_label_filter(category, *args, **kwargs):
"""Construct a filter string to filter on metric or resource labels."""
terms = list(args)
for key, value in six.iteritems(kwargs):
if value is None:
continue
suffix = None
if key.endswith('_prefix') or key.endswith('_suffix'):
key, suffix = key.rsplit('_', 1)
if category == 'resource' and key == 'resource_type':
key = 'resource.type'
else:
key = '.'.join((category, 'label', key))
if suffix == 'prefix':
term = '{key} = starts_with("{value}")'
elif suffix == 'suffix':
term = '{key} = ends_with("{value}")'
else:
term = '{key} = "{value}"'
terms.append(term.format(key=key, value=value))
return ' AND '.join(sorted(terms))
def _format_timestamp(timestamp):
"""Convert a datetime object to a string as required by the API.
:type timestamp: :class:`datetime.datetime`
:param timestamp: A datetime object.
:rtype: string
:returns: The formatted timestamp. For example:
``"2016-02-17T19:18:01.763000Z"``
"""
if timestamp.tzinfo is not None:
# Convert to UTC and remove the time zone info.
timestamp = timestamp.replace(tzinfo=None) - timestamp.utcoffset()
return timestamp.isoformat() + 'Z'

View file

@ -0,0 +1,183 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Monitored Resource Descriptors for the `Google Monitoring API (V3)`_.
.. _Google Monitoring API (V3):
https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
projects.monitoredResourceDescriptors
"""
import collections
from gcloud.monitoring.label import LabelDescriptor
class ResourceDescriptor(object):
"""Specification of a monitored resource type and its schema.
:type name: string
:param name:
The "resource name" of the monitored resource descriptor:
``"projects/<project_id>/monitoredResourceDescriptors/<type>"``
:type type_: string
:param type_:
The monitored resource type. For example: ``"gce_instance"``
:type display_name: string
:param display_name:
A concise name that might be displayed in user interfaces.
:type description: string
:param description:
A detailed description that might be used in documentation.
:type labels: list of :class:`~gcloud.monitoring.label.LabelDescriptor`
:param labels:
A sequence of label descriptors specifying the labels used
to identify a specific instance of this monitored resource.
"""
def __init__(self, name, type_, display_name, description, labels):
self.name = name
self.type = type_
self.display_name = display_name
self.description = description
self.labels = labels
@classmethod
def _fetch(cls, client, resource_type):
"""Look up a monitored resource descriptor by type.
:type client: :class:`gcloud.monitoring.client.Client`
:param client: The client to use.
:type resource_type: string
:param resource_type: The resource type name.
:rtype: :class:`ResourceDescriptor`
:returns: The resource descriptor instance.
:raises: :class:`gcloud.exceptions.NotFound` if the resource descriptor
is not found.
"""
path = ('/projects/{project}/monitoredResourceDescriptors/{type}'
.format(project=client.project,
type=resource_type))
info = client.connection.api_request(method='GET', path=path)
return cls._from_dict(info)
@classmethod
def _list(cls, client, filter_string=None):
"""List all monitored resource descriptors for the project.
:type client: :class:`gcloud.monitoring.client.Client`
:param client: The client to use.
:type filter_string: string or None
:param filter_string:
An optional filter expression describing the resource descriptors
to be returned. See the `filter documentation`_.
:rtype: list of :class:`ResourceDescriptor`
:returns: A list of resource descriptor instances.
.. _filter documentation:
https://cloud.google.com/monitoring/api/v3/filters
"""
path = '/projects/{project}/monitoredResourceDescriptors/'.format(
project=client.project)
descriptors = []
page_token = None
while True:
params = {}
if filter_string is not None:
params['filter'] = filter_string
if page_token is not None:
params['pageToken'] = page_token
response = client.connection.api_request(
method='GET', path=path, query_params=params)
for info in response.get('resourceDescriptors', ()):
descriptors.append(cls._from_dict(info))
page_token = response.get('nextPageToken')
if not page_token:
break
return descriptors
@classmethod
def _from_dict(cls, info):
"""Construct a resource descriptor from the parsed JSON representation.
:type info: dict
:param info:
A ``dict`` parsed from the JSON wire-format representation.
:rtype: :class:`ResourceDescriptor`
:returns: A resource descriptor.
"""
return cls(
name=info['name'],
type_=info['type'],
display_name=info.get('displayName', ''),
description=info.get('description', ''),
labels=tuple(LabelDescriptor._from_dict(label)
for label in info.get('labels', ())),
)
def __repr__(self):
return (
'<ResourceDescriptor:\n'
' name={name!r},\n'
' type={type!r},\n'
' labels={labels!r},\n'
' display_name={display_name!r},\n'
' description={description!r}>'
).format(**self.__dict__)
class Resource(collections.namedtuple('Resource', 'type labels')):
"""A monitored resource identified by specifying values for all labels.
:type type: string
:param type: The resource type name.
:type labels: dict
:param labels: A mapping from label names to values for all labels
enumerated in the associated :class:`ResourceDescriptor`.
"""
__slots__ = ()
@classmethod
def _from_dict(cls, info):
"""Construct a resource object from the parsed JSON representation.
:type info: dict
:param info:
A ``dict`` parsed from the JSON wire-format representation.
:rtype: :class:`Resource`
:returns: A resource object.
"""
return cls(
type=info['type'],
labels=info.get('labels', {}),
)

View file

@ -0,0 +1,226 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import pandas
except ImportError:
HAVE_PANDAS = False
else:
HAVE_PANDAS = True # pragma: NO COVER
import unittest2
PROJECT = 'my-project'
INSTANCE_NAMES = ['instance-1', 'instance-2']
INSTANCE_ZONES = ['us-east1-a', 'us-east1-b']
INSTANCE_IDS = ['1234567890123456789', '9876543210987654321']
METRIC_TYPE = 'compute.googleapis.com/instance/cpu/utilization'
METRIC_LABELS = list({'instance_name': name} for name in INSTANCE_NAMES)
RESOURCE_TYPE = 'gce_instance'
RESOURCE_LABELS = list({
'project_id': PROJECT,
'zone': zone,
'instance_id': instance_id,
} for zone, instance_id in zip(INSTANCE_ZONES, INSTANCE_IDS))
METRIC_KIND = 'GAUGE'
VALUE_TYPE = 'DOUBLE'
TIMESTAMPS = [
'2016-04-06T22:05:00.042Z',
'2016-04-06T22:05:01.042Z',
'2016-04-06T22:05:02.042Z',
]
DIMENSIONS = len(TIMESTAMPS), len(INSTANCE_NAMES)
VALUES = list(0.1 * i for i in range(DIMENSIONS[1]))
ARRAY = [VALUES] * DIMENSIONS[0]
def parse_timestamps(): # pragma: NO COVER
import datetime
from gcloud._helpers import _RFC3339_MICROS
return [datetime.datetime.strptime(t, _RFC3339_MICROS)
for t in TIMESTAMPS]
def generate_query_results(): # pragma: NO COVER
from gcloud.monitoring.metric import Metric
from gcloud.monitoring.resource import Resource
from gcloud.monitoring.timeseries import Point
from gcloud.monitoring.timeseries import TimeSeries
def P(timestamp, value):
return Point(
start_time=timestamp,
end_time=timestamp,
value=value,
)
for metric_labels, resource_labels, value in zip(
METRIC_LABELS, RESOURCE_LABELS, VALUES):
yield TimeSeries(
metric=Metric(type=METRIC_TYPE, labels=metric_labels),
resource=Resource(type=RESOURCE_TYPE, labels=resource_labels),
metric_kind=METRIC_KIND,
value_type=VALUE_TYPE,
points=[P(t, value) for t in TIMESTAMPS],
)
@unittest2.skipUnless(HAVE_PANDAS, 'No pandas')
class Test__build_dataframe(unittest2.TestCase): # pragma: NO COVER
def _callFUT(self, *args, **kwargs):
from gcloud.monitoring._dataframe import _build_dataframe
return _build_dataframe(*args, **kwargs)
def test_both_label_and_labels_illegal(self):
with self.assertRaises(ValueError):
self._callFUT([], label='instance_name', labels=['zone'])
def test_empty_labels_illegal(self):
with self.assertRaises(ValueError):
self._callFUT([], labels=[])
def test_simple_label(self):
iterable = generate_query_results()
dataframe = self._callFUT(iterable, label='instance_name')
self.assertEqual(dataframe.shape, DIMENSIONS)
self.assertEqual(dataframe.values.tolist(), ARRAY)
self.assertEqual(list(dataframe.columns), INSTANCE_NAMES)
self.assertIsNone(dataframe.columns.name)
self.assertEqual(list(dataframe.index), parse_timestamps())
self.assertIsNone(dataframe.index.name)
def test_multiple_labels(self):
NAMES = ['resource_type', 'instance_id']
iterable = generate_query_results()
dataframe = self._callFUT(iterable, labels=NAMES)
self.assertEqual(dataframe.shape, DIMENSIONS)
self.assertEqual(dataframe.values.tolist(), ARRAY)
expected_headers = [(RESOURCE_TYPE, instance_id)
for instance_id in INSTANCE_IDS]
self.assertEqual(list(dataframe.columns), expected_headers)
self.assertEqual(dataframe.columns.names, NAMES)
self.assertIsNone(dataframe.columns.name)
self.assertEqual(list(dataframe.index), parse_timestamps())
self.assertIsNone(dataframe.index.name)
def test_multiple_labels_with_just_one(self):
NAME = 'instance_id'
NAMES = [NAME]
iterable = generate_query_results()
dataframe = self._callFUT(iterable, labels=NAMES)
self.assertEqual(dataframe.shape, DIMENSIONS)
self.assertEqual(dataframe.values.tolist(), ARRAY)
self.assertEqual(list(dataframe.columns), INSTANCE_IDS)
self.assertEqual(dataframe.columns.names, NAMES)
self.assertEqual(dataframe.columns.name, NAME)
self.assertEqual(list(dataframe.index), parse_timestamps())
self.assertIsNone(dataframe.index.name)
def test_smart_labels(self):
NAMES = ['resource_type', 'project_id',
'zone', 'instance_id',
'instance_name']
iterable = generate_query_results()
dataframe = self._callFUT(iterable)
self.assertEqual(dataframe.shape, DIMENSIONS)
self.assertEqual(dataframe.values.tolist(), ARRAY)
expected_headers = [
(RESOURCE_TYPE, PROJECT, zone, instance_id, instance_name)
for zone, instance_id, instance_name
in zip(INSTANCE_ZONES, INSTANCE_IDS, INSTANCE_NAMES)]
self.assertEqual(list(dataframe.columns), expected_headers)
self.assertEqual(dataframe.columns.names, NAMES)
self.assertIsNone(dataframe.columns.name)
self.assertEqual(list(dataframe.index), parse_timestamps())
self.assertIsNone(dataframe.index.name)
def test_empty_table_simple_label(self):
dataframe = self._callFUT([], label='instance_name')
self.assertEqual(dataframe.shape, (0, 0))
self.assertIsNone(dataframe.columns.name)
self.assertIsNone(dataframe.index.name)
self.assertIsInstance(dataframe.index, pandas.DatetimeIndex)
def test_empty_table_multiple_labels(self):
NAMES = ['resource_type', 'instance_id']
dataframe = self._callFUT([], labels=NAMES)
self.assertEqual(dataframe.shape, (0, 0))
self.assertEqual(dataframe.columns.names, NAMES)
self.assertIsNone(dataframe.columns.name)
self.assertIsNone(dataframe.index.name)
self.assertIsInstance(dataframe.index, pandas.DatetimeIndex)
def test_empty_table_multiple_labels_with_just_one(self):
NAME = 'instance_id'
NAMES = [NAME]
dataframe = self._callFUT([], labels=NAMES)
self.assertEqual(dataframe.shape, (0, 0))
self.assertEqual(dataframe.columns.names, NAMES)
self.assertEqual(dataframe.columns.name, NAME)
self.assertIsNone(dataframe.index.name)
self.assertIsInstance(dataframe.index, pandas.DatetimeIndex)
def test_empty_table_smart_labels(self):
NAME = 'resource_type'
NAMES = [NAME]
dataframe = self._callFUT([])
self.assertEqual(dataframe.shape, (0, 0))
self.assertEqual(dataframe.columns.names, NAMES)
self.assertEqual(dataframe.columns.name, NAME)
self.assertIsNone(dataframe.index.name)
self.assertIsInstance(dataframe.index, pandas.DatetimeIndex)
class Test__sorted_resource_labels(unittest2.TestCase):
def _callFUT(self, labels):
from gcloud.monitoring._dataframe import _sorted_resource_labels
return _sorted_resource_labels(labels)
def test_empty(self):
self.assertEqual(self._callFUT([]), [])
def test_sorted(self):
from gcloud.monitoring._dataframe import TOP_RESOURCE_LABELS
EXPECTED = TOP_RESOURCE_LABELS + ('other-1', 'other-2')
self.assertSequenceEqual(self._callFUT(EXPECTED), EXPECTED)
def test_reversed(self):
from gcloud.monitoring._dataframe import TOP_RESOURCE_LABELS
EXPECTED = TOP_RESOURCE_LABELS + ('other-1', 'other-2')
INPUT = list(reversed(EXPECTED))
self.assertSequenceEqual(self._callFUT(INPUT), EXPECTED)

View file

@ -0,0 +1,358 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
PROJECT = 'my-project'
class TestClient(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.monitoring.client import Client
return Client
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_query(self):
import datetime
from gcloud.exceptions import NotFound
START_TIME = datetime.datetime(2016, 4, 6, 22, 5, 0)
END_TIME = datetime.datetime(2016, 4, 6, 22, 10, 0)
MINUTES = 5
METRIC_TYPE = 'compute.googleapis.com/instance/cpu/utilization'
METRIC_LABELS = {'instance_name': 'instance-1'}
METRIC_LABELS2 = {'instance_name': 'instance-2'}
RESOURCE_TYPE = 'gce_instance'
RESOURCE_LABELS = {
'project_id': 'my-project',
'zone': 'us-east1-a',
'instance_id': '1234567890123456789',
}
RESOURCE_LABELS2 = {
'project_id': 'my-project',
'zone': 'us-east1-b',
'instance_id': '9876543210987654321',
}
METRIC_KIND = 'GAUGE'
VALUE_TYPE = 'DOUBLE'
TS1 = '2016-04-06T22:05:00.042Z'
TS2 = '2016-04-06T22:05:01.042Z'
TS3 = '2016-04-06T22:05:02.042Z'
VAL1 = 0.1
VAL2 = 0.2
def P(timestamp, value):
return {
'interval': {'startTime': timestamp, 'endTime': timestamp},
'value': {'doubleValue': value},
}
SERIES1 = {
'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS},
'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS},
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
'points': [P(TS3, VAL1), P(TS2, VAL1), P(TS1, VAL1)],
}
SERIES2 = {
'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS2},
'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS2},
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
'points': [P(TS3, VAL2), P(TS2, VAL2), P(TS1, VAL2)],
}
RESPONSE = {'timeSeries': [SERIES1, SERIES2]}
client = self._makeOne(project=PROJECT, credentials=_Credentials())
connection = client.connection = _Connection(RESPONSE)
# A simple query. In practice, it can be very convenient to let the
# end time default to the start of the current minute.
query = client.query(METRIC_TYPE, end_time=END_TIME, minutes=MINUTES)
response = list(query)
self.assertEqual(len(response), 2)
series1, series2 = response
self.assertEqual(series1.metric.type, METRIC_TYPE)
self.assertEqual(series2.metric.type, METRIC_TYPE)
self.assertEqual(series1.metric.labels, METRIC_LABELS)
self.assertEqual(series2.metric.labels, METRIC_LABELS2)
self.assertEqual(series1.resource.type, RESOURCE_TYPE)
self.assertEqual(series2.resource.type, RESOURCE_TYPE)
self.assertEqual(series1.resource.labels, RESOURCE_LABELS)
self.assertEqual(series2.resource.labels, RESOURCE_LABELS2)
self.assertEqual(series1.metric_kind, METRIC_KIND)
self.assertEqual(series2.metric_kind, METRIC_KIND)
self.assertEqual(series1.value_type, VALUE_TYPE)
self.assertEqual(series2.value_type, VALUE_TYPE)
self.assertEqual([p.value for p in series1.points], [VAL1, VAL1, VAL1])
self.assertEqual([p.value for p in series2.points], [VAL2, VAL2, VAL2])
self.assertEqual([p.end_time for p in series1.points], [TS1, TS2, TS3])
self.assertEqual([p.end_time for p in series2.points], [TS1, TS2, TS3])
expected_request = {
'method': 'GET',
'path': '/projects/{project}/timeSeries/'.format(project=PROJECT),
'query_params': [
('filter', 'metric.type = "{type}"'.format(type=METRIC_TYPE)),
('interval.endTime', END_TIME.isoformat() + 'Z'),
('interval.startTime', START_TIME.isoformat() + 'Z'),
],
}
request, = connection._requested
self.assertEqual(request, expected_request)
with self.assertRaises(NotFound):
list(query)
def test_metric_descriptor_factory(self):
TYPE = 'custom.googleapis.com/my_metric'
METRIC_KIND = 'GAUGE'
VALUE_TYPE = 'DOUBLE'
DESCRIPTION = 'This is my metric.'
client = self._makeOne(project=PROJECT, credentials=_Credentials())
client.connection = _Connection() # For safety's sake.
descriptor = client.metric_descriptor(TYPE,
metric_kind=METRIC_KIND,
value_type=VALUE_TYPE,
description=DESCRIPTION)
self.assertIs(descriptor.client, client)
self.assertIsNone(descriptor.name)
self.assertEqual(descriptor.type, TYPE)
self.assertEqual(descriptor.labels, ())
self.assertEqual(descriptor.metric_kind, METRIC_KIND)
self.assertEqual(descriptor.value_type, VALUE_TYPE)
self.assertEqual(descriptor.unit, '')
self.assertEqual(descriptor.description, DESCRIPTION)
self.assertEqual(descriptor.display_name, '')
def test_fetch_metric_descriptor(self):
TYPE = 'custom.googleapis.com/my_metric'
NAME = 'projects/{project}/metricDescriptors/{type}'.format(
project=PROJECT, type=TYPE)
DESCRIPTION = 'This is my metric.'
METRIC_DESCRIPTOR = {
'name': NAME,
'type': TYPE,
'metricKind': 'GAUGE',
'valueType': 'DOUBLE',
'description': DESCRIPTION,
}
# This test is identical to TestMetricDescriptor.test_fetch()
# except for the following three lines.
client = self._makeOne(project=PROJECT, credentials=_Credentials())
connection = client.connection = _Connection(METRIC_DESCRIPTOR)
descriptor = client.fetch_metric_descriptor(TYPE)
self.assertIs(descriptor.client, client)
self.assertEqual(descriptor.name, NAME)
self.assertEqual(descriptor.type, TYPE)
self.assertEqual(descriptor.description, DESCRIPTION)
request, = connection._requested
expected_request = {'method': 'GET', 'path': '/' + NAME}
self.assertEqual(request, expected_request)
def test_list_metric_descriptors(self):
PATH = 'projects/{project}/metricDescriptors/'.format(project=PROJECT)
TYPE1 = 'custom.googleapis.com/my_metric_1'
DESCRIPTION1 = 'This is my first metric.'
NAME1 = PATH + TYPE1
METRIC_DESCRIPTOR1 = {
'name': NAME1,
'type': TYPE1,
'metricKind': 'GAUGE',
'valueType': 'DOUBLE',
'description': DESCRIPTION1,
}
TYPE2 = 'custom.googleapis.com/my_metric_2'
DESCRIPTION2 = 'This is my second metric.'
NAME2 = PATH + TYPE2
METRIC_DESCRIPTOR2 = {
'name': NAME2,
'type': TYPE2,
'metricKind': 'GAUGE',
'valueType': 'DOUBLE',
'description': DESCRIPTION2,
}
RESPONSE = {
'metricDescriptors': [METRIC_DESCRIPTOR1, METRIC_DESCRIPTOR2],
}
# This test is identical to TestMetricDescriptor.test_list()
# except for the following three lines.
client = self._makeOne(project=PROJECT, credentials=_Credentials())
connection = client.connection = _Connection(RESPONSE)
descriptors = client.list_metric_descriptors()
self.assertEqual(len(descriptors), 2)
descriptor1, descriptor2 = descriptors
self.assertIs(descriptor1.client, client)
self.assertEqual(descriptor1.name, NAME1)
self.assertEqual(descriptor1.type, TYPE1)
self.assertEqual(descriptor1.description, DESCRIPTION1)
self.assertIs(descriptor2.client, client)
self.assertEqual(descriptor2.name, NAME2)
self.assertEqual(descriptor2.type, TYPE2)
self.assertEqual(descriptor2.description, DESCRIPTION2)
request, = connection._requested
expected_request = {'method': 'GET', 'path': '/' + PATH,
'query_params': {}}
self.assertEqual(request, expected_request)
def test_fetch_resource_descriptor(self):
TYPE = 'gce_instance'
NAME = 'projects/{project}/monitoredResourceDescriptors/{type}'.format(
project=PROJECT, type=TYPE)
DISPLAY_NAME = 'GCE Instance'
DESCRIPTION = 'A VM instance hosted in Google Compute Engine.'
LABEL1 = {'key': 'project_id', 'valueType': 'STRING',
'description': 'The ID of the GCP project...'}
LABEL2 = {'key': 'instance_id', 'valueType': 'STRING',
'description': 'The VM instance identifier...'}
LABEL3 = {'key': 'zone', 'valueType': 'STRING',
'description': 'The GCE zone...'}
RESOURCE_DESCRIPTOR = {
'name': NAME,
'type': TYPE,
'displayName': DISPLAY_NAME,
'description': DESCRIPTION,
'labels': [LABEL1, LABEL2, LABEL3],
}
# This test is identical to TestResourceDescriptor.test_fetch()
# except for the following three lines.
client = self._makeOne(project=PROJECT, credentials=_Credentials())
connection = client.connection = _Connection(RESOURCE_DESCRIPTOR)
descriptor = client.fetch_resource_descriptor(TYPE)
self.assertEqual(descriptor.name, NAME)
self.assertEqual(descriptor.type, TYPE)
self.assertEqual(descriptor.display_name, DISPLAY_NAME)
self.assertEqual(descriptor.description, DESCRIPTION)
self.assertEqual(len(descriptor.labels), 3)
label1, label2, label3 = descriptor.labels
self.assertEqual(label1.key, LABEL1['key'])
self.assertEqual(label2.key, LABEL2['key'])
self.assertEqual(label3.key, LABEL3['key'])
request, = connection._requested
expected_request = {'method': 'GET', 'path': '/' + NAME}
self.assertEqual(request, expected_request)
def test_list_resource_descriptors(self):
PATH = 'projects/{project}/monitoredResourceDescriptors/'.format(
project=PROJECT)
TYPE1 = 'custom.googleapis.com/resource-1'
DESCRIPTION1 = 'This is the first resource.'
NAME1 = PATH + TYPE1
RESOURCE_DESCRIPTOR1 = {
'name': NAME1,
'type': TYPE1,
'description': DESCRIPTION1,
}
TYPE2 = 'custom.googleapis.com/resource-2'
DESCRIPTION2 = 'This is the second resource.'
NAME2 = PATH + TYPE2
RESOURCE_DESCRIPTOR2 = {
'name': NAME2,
'type': TYPE2,
'description': DESCRIPTION2,
}
RESPONSE = {
'resourceDescriptors':
[RESOURCE_DESCRIPTOR1, RESOURCE_DESCRIPTOR2],
}
# This test is identical to TestResourceDescriptor.test_list()
# except for the following three lines.
client = self._makeOne(project=PROJECT, credentials=_Credentials())
connection = client.connection = _Connection(RESPONSE)
descriptors = client.list_resource_descriptors()
self.assertEqual(len(descriptors), 2)
descriptor1, descriptor2 = descriptors
self.assertEqual(descriptor1.name, NAME1)
self.assertEqual(descriptor1.type, TYPE1)
self.assertEqual(descriptor1.description, DESCRIPTION1)
self.assertEqual(descriptor2.name, NAME2)
self.assertEqual(descriptor2.type, TYPE2)
self.assertEqual(descriptor2.description, DESCRIPTION2)
request, = connection._requested
expected_request = {'method': 'GET', 'path': '/' + PATH,
'query_params': {}}
self.assertEqual(request, expected_request)
class _Credentials(object):
_scopes = None
@staticmethod
def create_scoped_required():
return True
def create_scoped(self, scope):
self._scopes = scope
return self
class _Connection(object):
def __init__(self, *responses):
self._responses = list(responses)
self._requested = []
def api_request(self, **kwargs):
from gcloud.exceptions import NotFound
self._requested.append(kwargs)
try:
return self._responses.pop(0)
except IndexError:
raise NotFound('miss')

View file

@ -0,0 +1,44 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
class TestConnection(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.monitoring.connection import Connection
return Connection
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor(self):
credentials = _Credentials()
connection = self._makeOne(credentials)
self.assertEqual(connection.credentials._scopes,
self._getTargetClass().SCOPE)
class _Credentials(object):
_scopes = None
@staticmethod
def create_scoped_required():
return True
def create_scoped(self, scope):
self._scopes = scope
return self

View file

@ -0,0 +1,112 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
class TestLabelValueType(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.monitoring.label import LabelValueType
return LabelValueType
def test_one(self):
self.assertTrue(hasattr(self._getTargetClass(), 'STRING'))
def test_names(self):
for name in self._getTargetClass().__dict__:
if not name.startswith('_'):
self.assertEqual(getattr(self._getTargetClass(), name), name)
class TestLabelDescriptor(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.monitoring.label import LabelDescriptor
return LabelDescriptor
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor(self):
KEY = 'response_code'
VALUE_TYPE = 'INT64'
DESCRIPTION = 'HTTP status code for the request.'
descriptor = self._makeOne(key=KEY, value_type=VALUE_TYPE,
description=DESCRIPTION)
self.assertEqual(descriptor.key, KEY)
self.assertEqual(descriptor.value_type, VALUE_TYPE)
self.assertEqual(descriptor.description, DESCRIPTION)
def test_constructor_defaults(self):
KEY = 'response_code'
descriptor = self._makeOne(key=KEY)
self.assertEqual(descriptor.key, KEY)
self.assertEqual(descriptor.value_type, 'STRING')
self.assertEqual(descriptor.description, '')
def test_from_dict(self):
KEY = 'response_code'
VALUE_TYPE = 'INT64'
DESCRIPTION = 'HTTP status code for the request.'
info = {
'key': KEY,
'valueType': VALUE_TYPE,
'description': DESCRIPTION,
}
descriptor = self._getTargetClass()._from_dict(info)
self.assertEqual(descriptor.key, KEY)
self.assertEqual(descriptor.value_type, VALUE_TYPE)
self.assertEqual(descriptor.description, DESCRIPTION)
def test_from_dict_defaults(self):
KEY = 'response_code'
info = {'key': KEY}
descriptor = self._getTargetClass()._from_dict(info)
self.assertEqual(descriptor.key, KEY)
self.assertEqual(descriptor.value_type, 'STRING')
self.assertEqual(descriptor.description, '')
def test_to_dict(self):
KEY = 'response_code'
VALUE_TYPE = 'INT64'
DESCRIPTION = 'HTTP status code for the request.'
descriptor = self._makeOne(key=KEY, value_type=VALUE_TYPE,
description=DESCRIPTION)
expected = {
'key': KEY,
'valueType': VALUE_TYPE,
'description': DESCRIPTION,
}
self.assertEqual(descriptor._to_dict(), expected)
def test_to_dict_defaults(self):
KEY = 'response_code'
descriptor = self._makeOne(key=KEY)
expected = {
'key': KEY,
'valueType': 'STRING',
}
self.assertEqual(descriptor._to_dict(), expected)
def test_equality(self):
KEY = 'response_code'
VALUE_TYPE = 'INT64'
DESCRIPTION = 'HTTP status code for the request.'
descriptor1 = self._makeOne(key=KEY, value_type=VALUE_TYPE,
description=DESCRIPTION)
descriptor2 = self._makeOne(key=KEY, value_type=VALUE_TYPE,
description=DESCRIPTION)
self.assertTrue(descriptor1 == descriptor2)
self.assertFalse(descriptor1 != descriptor2)

View file

@ -0,0 +1,553 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
class TestMetricKind(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.monitoring.metric import MetricKind
return MetricKind
def test_one(self):
self.assertTrue(hasattr(self._getTargetClass(), 'GAUGE'))
def test_names(self):
for name in self._getTargetClass().__dict__:
if not name.startswith('_'):
self.assertEqual(getattr(self._getTargetClass(), name), name)
class TestValueType(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.monitoring.metric import ValueType
return ValueType
def test_one(self):
self.assertTrue(hasattr(self._getTargetClass(), 'DISTRIBUTION'))
def test_names(self):
for name in self._getTargetClass().__dict__:
if not name.startswith('_'):
self.assertEqual(getattr(self._getTargetClass(), name), name)
class TestMetricDescriptor(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.monitoring.metric import MetricDescriptor
return MetricDescriptor
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor(self):
from gcloud.monitoring.label import LabelDescriptor
TYPE = 'appengine.googleapis.com/http/server/response_count'
NAME = 'projects/my-project/metricDescriptors/' + TYPE
LABELS = [
LabelDescriptor(key='loading', value_type='BOOL',
description='Loaded a new instance?'),
LabelDescriptor(key='response_code', value_type='INT64',
description='HTTP status code for the request.'),
]
METRIC_KIND = 'DELTA'
VALUE_TYPE = 'INT64'
UNIT = '{responses}/s'
DESCRIPTION = 'Delta HTTP response count.'
DISPLAY_NAME = 'Response count'
client = object()
descriptor = self._makeOne(
client=client,
name=NAME,
type_=TYPE,
labels=LABELS,
metric_kind=METRIC_KIND,
value_type=VALUE_TYPE,
unit=UNIT,
description=DESCRIPTION,
display_name=DISPLAY_NAME,
)
self.assertIs(descriptor.client, client)
self.assertEqual(descriptor.name, NAME)
self.assertEqual(descriptor.type, TYPE)
self.assertEqual(descriptor.labels, LABELS)
self.assertEqual(descriptor.metric_kind, METRIC_KIND)
self.assertEqual(descriptor.value_type, VALUE_TYPE)
self.assertEqual(descriptor.unit, UNIT)
self.assertEqual(descriptor.description, DESCRIPTION)
self.assertEqual(descriptor.display_name, DISPLAY_NAME)
def test_constructor_defaults(self):
TYPE = 'appengine.googleapis.com/http/server/response_count'
client = object()
descriptor = self._makeOne(client=client, type_=TYPE)
self.assertIs(descriptor.client, client)
self.assertIsNone(descriptor.name)
self.assertEqual(descriptor.type, TYPE)
self.assertEqual(descriptor.labels, ())
self.assertEqual(descriptor.metric_kind, 'METRIC_KIND_UNSPECIFIED')
self.assertEqual(descriptor.value_type, 'VALUE_TYPE_UNSPECIFIED')
self.assertEqual(descriptor.unit, '')
self.assertEqual(descriptor.description, '')
self.assertEqual(descriptor.display_name, '')
def test_from_dict(self):
TYPE = 'appengine.googleapis.com/http/server/response_count'
NAME = 'projects/my-project/metricDescriptors/' + TYPE
LABEL1 = {'key': 'loading', 'valueType': 'BOOL',
'description': 'Loaded a new instance?'}
LABEL2 = {'key': 'response_code', 'valueType': 'INT64',
'description': 'HTTP status code for the request.'}
METRIC_KIND = 'DELTA'
VALUE_TYPE = 'INT64'
UNIT = '{responses}/s'
DESCRIPTION = 'Delta HTTP response count.'
DISPLAY_NAME = 'Response count'
info = {
'name': NAME,
'type': TYPE,
'labels': [LABEL1, LABEL2],
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
'unit': UNIT,
'description': DESCRIPTION,
'displayName': DISPLAY_NAME,
}
client = object()
descriptor = self._getTargetClass()._from_dict(client, info)
self.assertIs(descriptor.client, client)
self.assertEqual(descriptor.name, NAME)
self.assertEqual(descriptor.type, TYPE)
self.assertEqual(len(descriptor.labels), 2)
label1, label2 = descriptor.labels
self.assertEqual(label1.key, LABEL1['key'])
self.assertEqual(label2.key, LABEL2['key'])
self.assertEqual(descriptor.metric_kind, METRIC_KIND)
self.assertEqual(descriptor.value_type, VALUE_TYPE)
self.assertEqual(descriptor.unit, UNIT)
self.assertEqual(descriptor.description, DESCRIPTION)
self.assertEqual(descriptor.display_name, DISPLAY_NAME)
def test_from_dict_defaults(self):
TYPE = 'appengine.googleapis.com/http/server/response_count'
NAME = 'projects/my-project/metricDescriptors/' + TYPE
METRIC_KIND = 'CUMULATIVE'
VALUE_TYPE = 'DOUBLE'
info = {
'name': NAME,
'type': TYPE,
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
}
client = object()
descriptor = self._getTargetClass()._from_dict(client, info)
self.assertIs(descriptor.client, client)
self.assertEqual(descriptor.name, NAME)
self.assertEqual(descriptor.type, TYPE)
self.assertEqual(descriptor.labels, ())
self.assertEqual(descriptor.metric_kind, METRIC_KIND)
self.assertEqual(descriptor.value_type, VALUE_TYPE)
self.assertEqual(descriptor.unit, '')
self.assertEqual(descriptor.description, '')
self.assertEqual(descriptor.display_name, '')
def test_to_dict(self):
TYPE = 'appengine.googleapis.com/http/server/response_count'
NAME = 'projects/my-project/metricDescriptors/' + TYPE
LABEL1 = {'key': 'loading', 'valueType': 'BOOL',
'description': 'Loaded a new instance?'}
LABEL2 = {'key': 'response_code', 'valueType': 'INT64',
'description': 'HTTP status code for the request.'}
METRIC_KIND = 'DELTA'
VALUE_TYPE = 'INT64'
UNIT = '{responses}/s'
DESCRIPTION = 'Delta HTTP response count.'
DISPLAY_NAME = 'Response count'
info = {
'name': NAME,
'type': TYPE,
'labels': [LABEL1, LABEL2],
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
'unit': UNIT,
'description': DESCRIPTION,
'displayName': DISPLAY_NAME,
}
client = object()
descriptor = self._getTargetClass()._from_dict(client, info)
del info['name']
self.assertEqual(descriptor._to_dict(), info)
def test_to_dict_defaults(self):
TYPE = 'appengine.googleapis.com/http/server/response_count'
NAME = 'projects/my-project/metricDescriptors/' + TYPE
METRIC_KIND = 'DELTA'
VALUE_TYPE = 'INT64'
info = {
'name': NAME,
'type': TYPE,
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
}
client = object()
descriptor = self._getTargetClass()._from_dict(client, info)
del info['name']
self.assertEqual(descriptor._to_dict(), info)
def test_create(self):
PROJECT = 'my-project'
TYPE = 'custom.googleapis.com/my_metric'
PATH = 'projects/{project}/metricDescriptors/'.format(project=PROJECT)
NAME = PATH + TYPE
METRIC_KIND = 'GAUGE'
VALUE_TYPE = 'DOUBLE'
DESCRIPTION = 'This is my metric.'
REQUEST = {
'type': TYPE,
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
'description': DESCRIPTION,
}
RESPONSE = dict(REQUEST, name=NAME)
connection = _Connection(RESPONSE)
client = _Client(project=PROJECT, connection=connection)
descriptor = self._makeOne(
client=client,
type_=TYPE,
metric_kind=METRIC_KIND,
value_type=VALUE_TYPE,
description=DESCRIPTION,
)
descriptor.create()
self.assertEqual(descriptor.name, NAME)
self.assertEqual(descriptor.type, TYPE)
self.assertEqual(descriptor.labels, ())
self.assertEqual(descriptor.metric_kind, METRIC_KIND)
self.assertEqual(descriptor.value_type, VALUE_TYPE)
self.assertEqual(descriptor.unit, '')
self.assertEqual(descriptor.description, DESCRIPTION)
self.assertEqual(descriptor.display_name, '')
request, = connection._requested
expected_request = {'method': 'POST', 'path': '/' + PATH,
'data': REQUEST}
self.assertEqual(request, expected_request)
def test_delete(self):
PROJECT = 'my-project'
TYPE = 'custom.googleapis.com/my_metric'
NAME = 'projects/{project}/metricDescriptors/{type}'.format(
project=PROJECT, type=TYPE)
connection = _Connection({})
client = _Client(project=PROJECT, connection=connection)
descriptor = self._makeOne(
client=client,
type_=TYPE,
metric_kind='NOTUSED',
value_type='NOTUSED',
)
descriptor.delete()
request, = connection._requested
expected_request = {'method': 'DELETE', 'path': '/' + NAME}
self.assertEqual(request, expected_request)
def test_fetch(self):
PROJECT = 'my-project'
TYPE = 'custom.googleapis.com/my_metric'
NAME = 'projects/{project}/metricDescriptors/{type}'.format(
project=PROJECT, type=TYPE)
DESCRIPTION = 'This is my metric.'
METRIC_DESCRIPTOR = {
'name': NAME,
'type': TYPE,
'metricKind': 'GAUGE',
'valueType': 'DOUBLE',
'description': DESCRIPTION,
}
connection = _Connection(METRIC_DESCRIPTOR)
client = _Client(project=PROJECT, connection=connection)
descriptor = self._getTargetClass()._fetch(client, TYPE)
self.assertIs(descriptor.client, client)
self.assertEqual(descriptor.name, NAME)
self.assertEqual(descriptor.type, TYPE)
self.assertEqual(descriptor.description, DESCRIPTION)
request, = connection._requested
expected_request = {'method': 'GET', 'path': '/' + NAME}
self.assertEqual(request, expected_request)
def test_list(self):
PROJECT = 'my-project'
PATH = 'projects/{project}/metricDescriptors/'.format(project=PROJECT)
TYPE1 = 'custom.googleapis.com/my_metric_1'
DESCRIPTION1 = 'This is my first metric.'
NAME1 = PATH + TYPE1
METRIC_DESCRIPTOR1 = {
'name': NAME1,
'type': TYPE1,
'metricKind': 'GAUGE',
'valueType': 'DOUBLE',
'description': DESCRIPTION1,
}
TYPE2 = 'custom.googleapis.com/my_metric_2'
DESCRIPTION2 = 'This is my second metric.'
NAME2 = PATH + TYPE2
METRIC_DESCRIPTOR2 = {
'name': NAME2,
'type': TYPE2,
'metricKind': 'GAUGE',
'valueType': 'DOUBLE',
'description': DESCRIPTION2,
}
RESPONSE = {
'metricDescriptors': [METRIC_DESCRIPTOR1, METRIC_DESCRIPTOR2],
}
connection = _Connection(RESPONSE)
client = _Client(project=PROJECT, connection=connection)
descriptors = self._getTargetClass()._list(client)
self.assertEqual(len(descriptors), 2)
descriptor1, descriptor2 = descriptors
self.assertIs(descriptor1.client, client)
self.assertEqual(descriptor1.name, NAME1)
self.assertEqual(descriptor1.type, TYPE1)
self.assertEqual(descriptor1.description, DESCRIPTION1)
self.assertIs(descriptor2.client, client)
self.assertEqual(descriptor2.name, NAME2)
self.assertEqual(descriptor2.type, TYPE2)
self.assertEqual(descriptor2.description, DESCRIPTION2)
request, = connection._requested
expected_request = {'method': 'GET', 'path': '/' + PATH,
'query_params': {}}
self.assertEqual(request, expected_request)
def test_list_paged(self):
from gcloud.exceptions import NotFound
PROJECT = 'my-project'
PATH = 'projects/{project}/metricDescriptors/'.format(project=PROJECT)
TYPE1 = 'custom.googleapis.com/my_metric_1'
DESCRIPTION1 = 'This is my first metric.'
NAME1 = PATH + TYPE1
METRIC_DESCRIPTOR1 = {
'name': NAME1,
'type': TYPE1,
'metricKind': 'GAUGE',
'valueType': 'DOUBLE',
'description': DESCRIPTION1,
}
TYPE2 = 'custom.googleapis.com/my_metric_2'
DESCRIPTION2 = 'This is my second metric.'
NAME2 = PATH + TYPE2
METRIC_DESCRIPTOR2 = {
'name': NAME2,
'type': TYPE2,
'metricKind': 'GAUGE',
'valueType': 'DOUBLE',
'description': DESCRIPTION2,
}
TOKEN = 'second-page-please'
RESPONSE1 = {
'metricDescriptors': [METRIC_DESCRIPTOR1],
'nextPageToken': TOKEN,
}
RESPONSE2 = {
'metricDescriptors': [METRIC_DESCRIPTOR2],
}
connection = _Connection(RESPONSE1, RESPONSE2)
client = _Client(project=PROJECT, connection=connection)
descriptors = self._getTargetClass()._list(client)
self.assertEqual(len(descriptors), 2)
descriptor1, descriptor2 = descriptors
self.assertEqual(descriptor1.name, NAME1)
self.assertEqual(descriptor1.type, TYPE1)
self.assertEqual(descriptor1.description, DESCRIPTION1)
self.assertEqual(descriptor2.name, NAME2)
self.assertEqual(descriptor2.type, TYPE2)
self.assertEqual(descriptor2.description, DESCRIPTION2)
request1, request2 = connection._requested
expected_request1 = {'method': 'GET', 'path': '/' + PATH,
'query_params': {}}
expected_request2 = {'method': 'GET', 'path': '/' + PATH,
'query_params': {'pageToken': TOKEN}}
self.assertEqual(request1, expected_request1)
self.assertEqual(request2, expected_request2)
with self.assertRaises(NotFound):
self._getTargetClass()._list(client)
def test_list_filtered(self):
PROJECT = 'my-project'
PATH = 'projects/{project}/metricDescriptors/'.format(project=PROJECT)
# Request only custom metrics.
FILTER = 'metric.type = starts_with("custom.googleapis.com/")'
# But let's say there are no custom metrics.
RESPONSE = {'metricDescriptors': []}
connection = _Connection(RESPONSE)
client = _Client(project=PROJECT, connection=connection)
descriptors = self._getTargetClass()._list(client, FILTER)
self.assertEqual(len(descriptors), 0)
request, = connection._requested
expected_request = {'method': 'GET', 'path': '/' + PATH,
'query_params': {'filter': FILTER}}
self.assertEqual(request, expected_request)
def test_list_filtered_by_type_prefix(self):
PROJECT = 'my-project'
PATH = 'projects/{project}/metricDescriptors/'.format(project=PROJECT)
# Request only custom metrics.
PREFIX = 'custom.googleapis.com/'
FILTER = 'metric.type = starts_with("{prefix}")'.format(prefix=PREFIX)
# But let's say there are no custom metrics.
RESPONSE = {'metricDescriptors': []}
connection = _Connection(RESPONSE)
client = _Client(project=PROJECT, connection=connection)
descriptors = self._getTargetClass()._list(client, type_prefix=PREFIX)
self.assertEqual(len(descriptors), 0)
request, = connection._requested
expected_request = {'method': 'GET', 'path': '/' + PATH,
'query_params': {'filter': FILTER}}
self.assertEqual(request, expected_request)
class TestMetric(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.monitoring.metric import Metric
return Metric
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor(self):
TYPE = 'appengine.googleapis.com/http/server/response_count'
LABELS = {
'response_code': 200,
'loading': False,
}
metric = self._makeOne(type=TYPE, labels=LABELS)
self.assertEqual(metric.type, TYPE)
self.assertEqual(metric.labels, LABELS)
def test_from_dict(self):
TYPE = 'appengine.googleapis.com/http/server/response_count'
LABELS = {
'response_code': 200,
'loading': False,
}
info = {
'type': TYPE,
'labels': LABELS,
}
metric = self._getTargetClass()._from_dict(info)
self.assertEqual(metric.type, TYPE)
self.assertEqual(metric.labels, LABELS)
def test_from_dict_defaults(self):
TYPE = 'appengine.googleapis.com/http/server/response_count'
info = {'type': TYPE}
metric = self._getTargetClass()._from_dict(info)
self.assertEqual(metric.type, TYPE)
self.assertEqual(metric.labels, {})
class _Connection(object):
def __init__(self, *responses):
self._responses = list(responses)
self._requested = []
def api_request(self, **kwargs):
from gcloud.exceptions import NotFound
self._requested.append(kwargs)
try:
return self._responses.pop(0)
except IndexError:
raise NotFound('miss')
class _Client(object):
def __init__(self, project, connection):
self.project = project
self.connection = connection

View file

@ -0,0 +1,638 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
PROJECT = 'my-project'
METRIC_TYPE = 'compute.googleapis.com/instance/uptime'
METRIC_LABELS = {'instance_name': 'instance-1'}
METRIC_LABELS2 = {'instance_name': 'instance-2'}
RESOURCE_TYPE = 'gce_instance'
RESOURCE_LABELS = {
'project_id': 'my-project',
'zone': 'us-east1-a',
'instance_id': '1234567890123456789',
}
RESOURCE_LABELS2 = {
'project_id': 'my-project',
'zone': 'us-east1-b',
'instance_id': '9876543210987654321',
}
METRIC_KIND = 'DELTA'
VALUE_TYPE = 'DOUBLE'
TS0 = '2016-04-06T22:05:00.042Z'
TS1 = '2016-04-06T22:05:01.042Z'
TS2 = '2016-04-06T22:05:02.042Z'
class TestAligner(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.monitoring.query import Aligner
return Aligner
def test_one(self):
self.assertTrue(hasattr(self._getTargetClass(), 'ALIGN_RATE'))
def test_names(self):
for name in self._getTargetClass().__dict__:
if not name.startswith('_'):
self.assertEqual(getattr(self._getTargetClass(), name), name)
class TestReducer(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.monitoring.query import Reducer
return Reducer
def test_one(self):
self.assertTrue(hasattr(self._getTargetClass(),
'REDUCE_PERCENTILE_99'))
def test_names(self):
for name in self._getTargetClass().__dict__:
if not name.startswith('_'):
self.assertEqual(getattr(self._getTargetClass(), name), name)
class TestQuery(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.monitoring.query import Query
return Query
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor_minimal(self):
client = _Client(project=PROJECT, connection=_Connection())
query = self._makeOne(client)
self.assertEqual(query._client, client)
self.assertEqual(query._filter.metric_type,
self._getTargetClass().DEFAULT_METRIC_TYPE)
self.assertIsNone(query._start_time)
self.assertIsNone(query._end_time)
self.assertIsNone(query._per_series_aligner)
self.assertIsNone(query._alignment_period_seconds)
self.assertIsNone(query._cross_series_reducer)
self.assertEqual(query._group_by_fields, ())
def test_constructor_maximal(self):
import datetime
T1 = datetime.datetime(2016, 4, 7, 2, 30, 30)
DAYS, HOURS, MINUTES = 1, 2, 3
T0 = T1 - datetime.timedelta(days=DAYS, hours=HOURS, minutes=MINUTES)
client = _Client(project=PROJECT, connection=_Connection())
query = self._makeOne(client, METRIC_TYPE,
end_time=T1,
days=DAYS, hours=HOURS, minutes=MINUTES)
self.assertEqual(query._client, client)
self.assertEqual(query._filter.metric_type, METRIC_TYPE)
self.assertEqual(query._start_time, T0)
self.assertEqual(query._end_time, T1)
self.assertIsNone(query._per_series_aligner)
self.assertIsNone(query._alignment_period_seconds)
self.assertIsNone(query._cross_series_reducer)
self.assertEqual(query._group_by_fields, ())
def test_constructor_default_end_time(self):
import datetime
from gcloud._testing import _Monkey
from gcloud.monitoring import query as MUT
MINUTES = 5
NOW, T0, T1 = [
datetime.datetime(2016, 4, 7, 2, 30, 30),
datetime.datetime(2016, 4, 7, 2, 25, 0),
datetime.datetime(2016, 4, 7, 2, 30, 0),
]
client = _Client(project=PROJECT, connection=_Connection())
with _Monkey(MUT, _UTCNOW=lambda: NOW):
query = self._makeOne(client, METRIC_TYPE, minutes=MINUTES)
self.assertEqual(query._start_time, T0)
self.assertEqual(query._end_time, T1)
def test_constructor_nonzero_duration_illegal(self):
import datetime
T1 = datetime.datetime(2016, 4, 7, 2, 30, 30)
client = _Client(project=PROJECT, connection=_Connection())
with self.assertRaises(ValueError):
self._makeOne(client, METRIC_TYPE, end_time=T1)
def test_execution_without_interval_illegal(self):
client = _Client(project=PROJECT, connection=_Connection())
query = self._makeOne(client, METRIC_TYPE)
with self.assertRaises(ValueError):
list(query)
def test_metric_type(self):
client = _Client(project=PROJECT, connection=_Connection())
query = self._makeOne(client, METRIC_TYPE)
self.assertEqual(query.metric_type, METRIC_TYPE)
def test_filter(self):
client = _Client(project=PROJECT, connection=_Connection())
query = self._makeOne(client, METRIC_TYPE)
expected = 'metric.type = "{type}"'.format(type=METRIC_TYPE)
self.assertEqual(query.filter, expected)
def test_filter_by_group(self):
GROUP = '1234567'
client = _Client(project=PROJECT, connection=_Connection())
query = self._makeOne(client, METRIC_TYPE)
query = query.select_group(GROUP)
expected = (
'metric.type = "{type}"'
' AND group.id = "{group}"'
).format(type=METRIC_TYPE, group=GROUP)
self.assertEqual(query.filter, expected)
def test_filter_by_projects(self):
PROJECT1, PROJECT2 = 'project-1', 'project-2'
client = _Client(project=PROJECT, connection=_Connection())
query = self._makeOne(client, METRIC_TYPE)
query = query.select_projects(PROJECT1, PROJECT2)
expected = (
'metric.type = "{type}"'
' AND project = "{project1}" OR project = "{project2}"'
).format(type=METRIC_TYPE, project1=PROJECT1, project2=PROJECT2)
self.assertEqual(query.filter, expected)
def test_filter_by_resources(self):
ZONE_PREFIX = 'europe-'
client = _Client(project=PROJECT, connection=_Connection())
query = self._makeOne(client, METRIC_TYPE)
query = query.select_resources(zone_prefix=ZONE_PREFIX)
expected = (
'metric.type = "{type}"'
' AND resource.label.zone = starts_with("{prefix}")'
).format(type=METRIC_TYPE, prefix=ZONE_PREFIX)
self.assertEqual(query.filter, expected)
def test_filter_by_metrics(self):
INSTANCE = 'my-instance'
client = _Client(project=PROJECT, connection=_Connection())
query = self._makeOne(client, METRIC_TYPE)
query = query.select_metrics(instance_name=INSTANCE)
expected = (
'metric.type = "{type}"'
' AND metric.label.instance_name = "{instance}"'
).format(type=METRIC_TYPE, instance=INSTANCE)
self.assertEqual(query.filter, expected)
def test_request_parameters_minimal(self):
import datetime
T1 = datetime.datetime(2016, 4, 7, 2, 30, 0)
client = _Client(project=PROJECT, connection=_Connection())
query = self._makeOne(client, METRIC_TYPE)
query = query.select_interval(end_time=T1)
actual = list(query._build_query_params())
expected = [
('filter', 'metric.type = "{type}"'.format(type=METRIC_TYPE)),
('interval.endTime', T1.isoformat() + 'Z'),
]
self.assertEqual(actual, expected)
def test_request_parameters_maximal(self):
import datetime
T0 = datetime.datetime(2016, 4, 7, 2, 0, 0)
T1 = datetime.datetime(2016, 4, 7, 2, 30, 0)
ALIGNER = 'ALIGN_DELTA'
MINUTES, SECONDS, PERIOD = 1, 30, '90s'
REDUCER = 'REDUCE_MEAN'
FIELD1, FIELD2 = 'resource.zone', 'metric.instance_name'
PAGE_SIZE = 100
PAGE_TOKEN = 'second-page-please'
client = _Client(project=PROJECT, connection=_Connection())
query = self._makeOne(client, METRIC_TYPE)
query = query.select_interval(start_time=T0, end_time=T1)
query = query.align(ALIGNER, minutes=MINUTES, seconds=SECONDS)
query = query.reduce(REDUCER, FIELD1, FIELD2)
actual = list(query._build_query_params(headers_only=True,
page_size=PAGE_SIZE,
page_token=PAGE_TOKEN))
expected = [
('filter', 'metric.type = "{type}"'.format(type=METRIC_TYPE)),
('interval.endTime', T1.isoformat() + 'Z'),
('interval.startTime', T0.isoformat() + 'Z'),
('aggregation.perSeriesAligner', ALIGNER),
('aggregation.alignmentPeriod', PERIOD),
('aggregation.crossSeriesReducer', REDUCER),
('aggregation.groupByFields', FIELD1),
('aggregation.groupByFields', FIELD2),
('view', 'HEADERS'),
('pageSize', PAGE_SIZE),
('pageToken', PAGE_TOKEN),
]
self.assertEqual(actual, expected)
def test_iteration(self):
import datetime
T0 = datetime.datetime(2016, 4, 6, 22, 5, 0)
T1 = datetime.datetime(2016, 4, 6, 22, 10, 0)
INTERVAL1 = {'startTime': TS0, 'endTime': TS1}
INTERVAL2 = {'startTime': TS1, 'endTime': TS2}
VALUE1 = 60 # seconds
VALUE2 = 60.001 # seconds
SERIES1 = {
'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS},
'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS},
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
'points': [
{'interval': INTERVAL2, 'value': {'doubleValue': VALUE1}},
{'interval': INTERVAL1, 'value': {'doubleValue': VALUE1}},
],
}
SERIES2 = {
'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS2},
'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS2},
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
'points': [
{'interval': INTERVAL2, 'value': {'doubleValue': VALUE2}},
{'interval': INTERVAL1, 'value': {'doubleValue': VALUE2}},
],
}
RESPONSE = {'timeSeries': [SERIES1, SERIES2]}
connection = _Connection(RESPONSE)
client = _Client(project=PROJECT, connection=connection)
query = self._makeOne(client, METRIC_TYPE)
query = query.select_interval(start_time=T0, end_time=T1)
response = list(query)
self.assertEqual(len(response), 2)
series1, series2 = response
self.assertEqual(series1.metric.labels, METRIC_LABELS)
self.assertEqual(series2.metric.labels, METRIC_LABELS2)
self.assertEqual(series1.resource.labels, RESOURCE_LABELS)
self.assertEqual(series2.resource.labels, RESOURCE_LABELS2)
self.assertEqual([p.value for p in series1.points], [VALUE1, VALUE1])
self.assertEqual([p.value for p in series2.points], [VALUE2, VALUE2])
self.assertEqual([p.end_time for p in series1.points], [TS1, TS2])
self.assertEqual([p.end_time for p in series2.points], [TS1, TS2])
expected_request = {
'method': 'GET',
'path': '/projects/{project}/timeSeries/'.format(project=PROJECT),
'query_params': [
('filter', 'metric.type = "{type}"'.format(type=METRIC_TYPE)),
('interval.endTime', T1.isoformat() + 'Z'),
('interval.startTime', T0.isoformat() + 'Z'),
],
}
request, = connection._requested
self.assertEqual(request, expected_request)
def test_iteration_paged(self):
import copy
import datetime
from gcloud.exceptions import NotFound
T0 = datetime.datetime(2016, 4, 6, 22, 5, 0)
T1 = datetime.datetime(2016, 4, 6, 22, 10, 0)
INTERVAL1 = {'startTime': TS0, 'endTime': TS1}
INTERVAL2 = {'startTime': TS1, 'endTime': TS2}
VALUE1 = 60 # seconds
VALUE2 = 60.001 # seconds
SERIES1 = {
'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS},
'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS},
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
'points': [
{'interval': INTERVAL2, 'value': {'doubleValue': VALUE1}},
{'interval': INTERVAL1, 'value': {'doubleValue': VALUE1}},
],
}
SERIES2_PART1 = {
'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS2},
'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS2},
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
'points': [
{'interval': INTERVAL2, 'value': {'doubleValue': VALUE2}},
],
}
SERIES2_PART2 = {
'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS2},
'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS2},
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
'points': [
{'interval': INTERVAL1, 'value': {'doubleValue': VALUE2}},
],
}
TOKEN = 'second-page-please'
RESPONSE1 = {'timeSeries': [SERIES1, SERIES2_PART1],
'nextPageToken': TOKEN}
RESPONSE2 = {'timeSeries': [SERIES2_PART2]}
connection = _Connection(RESPONSE1, RESPONSE2)
client = _Client(project=PROJECT, connection=connection)
query = self._makeOne(client, METRIC_TYPE)
query = query.select_interval(start_time=T0, end_time=T1)
response = list(query)
self.assertEqual(len(response), 2)
series1, series2 = response
self.assertEqual(series1.metric.labels, METRIC_LABELS)
self.assertEqual(series2.metric.labels, METRIC_LABELS2)
self.assertEqual(series1.resource.labels, RESOURCE_LABELS)
self.assertEqual(series2.resource.labels, RESOURCE_LABELS2)
self.assertEqual([p.value for p in series1.points], [VALUE1, VALUE1])
self.assertEqual([p.value for p in series2.points], [VALUE2, VALUE2])
self.assertEqual([p.end_time for p in series1.points], [TS1, TS2])
self.assertEqual([p.end_time for p in series2.points], [TS1, TS2])
expected_request1 = {
'method': 'GET',
'path': '/projects/{project}/timeSeries/'.format(project=PROJECT),
'query_params': [
('filter', 'metric.type = "{type}"'.format(type=METRIC_TYPE)),
('interval.endTime', T1.isoformat() + 'Z'),
('interval.startTime', T0.isoformat() + 'Z'),
],
}
expected_request2 = copy.deepcopy(expected_request1)
expected_request2['query_params'].append(('pageToken', TOKEN))
request1, request2 = connection._requested
self.assertEqual(request1, expected_request1)
self.assertEqual(request2, expected_request2)
with self.assertRaises(NotFound):
list(query)
def test_iteration_empty(self):
import datetime
T0 = datetime.datetime(2016, 4, 6, 22, 5, 0)
T1 = datetime.datetime(2016, 4, 6, 22, 10, 0)
connection = _Connection({})
client = _Client(project=PROJECT, connection=connection)
query = self._makeOne(client, METRIC_TYPE)
query = query.select_interval(start_time=T0, end_time=T1)
response = list(query)
self.assertEqual(len(response), 0)
expected_request = {
'method': 'GET',
'path': '/projects/{project}/timeSeries/'.format(project=PROJECT),
'query_params': [
('filter', 'metric.type = "{type}"'.format(type=METRIC_TYPE)),
('interval.endTime', T1.isoformat() + 'Z'),
('interval.startTime', T0.isoformat() + 'Z'),
],
}
request, = connection._requested
self.assertEqual(request, expected_request)
def test_iteration_headers_only(self):
import datetime
T0 = datetime.datetime(2016, 4, 6, 22, 5, 0)
T1 = datetime.datetime(2016, 4, 6, 22, 10, 0)
SERIES1 = {
'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS},
'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS},
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
}
SERIES2 = {
'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS2},
'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS2},
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
}
RESPONSE = {'timeSeries': [SERIES1, SERIES2]}
connection = _Connection(RESPONSE)
client = _Client(project=PROJECT, connection=connection)
query = self._makeOne(client, METRIC_TYPE)
query = query.select_interval(start_time=T0, end_time=T1)
response = list(query.iter(headers_only=True))
self.assertEqual(len(response), 2)
series1, series2 = response
self.assertEqual(series1.metric.labels, METRIC_LABELS)
self.assertEqual(series2.metric.labels, METRIC_LABELS2)
self.assertEqual(series1.resource.labels, RESOURCE_LABELS)
self.assertEqual(series2.resource.labels, RESOURCE_LABELS2)
self.assertEqual(series1.points, [])
self.assertEqual(series2.points, [])
expected_request = {
'method': 'GET',
'path': '/projects/{project}/timeSeries/'.format(project=PROJECT),
'query_params': [
('filter', 'metric.type = "{type}"'.format(type=METRIC_TYPE)),
('interval.endTime', T1.isoformat() + 'Z'),
('interval.startTime', T0.isoformat() + 'Z'),
('view', 'HEADERS'),
],
}
request, = connection._requested
self.assertEqual(request, expected_request)
class Test_Filter(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.monitoring.query import _Filter
return _Filter
def _makeOne(self, metric_type):
return self._getTargetClass()(metric_type)
def test_minimal(self):
obj = self._makeOne(METRIC_TYPE)
expected = 'metric.type = "{type}"'.format(type=METRIC_TYPE)
self.assertEqual(str(obj), expected)
def test_maximal(self):
obj = self._makeOne(METRIC_TYPE)
obj.group_id = '1234567'
obj.projects = 'project-1', 'project-2'
obj.select_resources(resource_type='some-resource',
resource_label='foo')
obj.select_metrics(metric_label_prefix='bar-')
expected = (
'metric.type = "{type}"'
' AND group.id = "1234567"'
' AND project = "project-1" OR project = "project-2"'
' AND resource.label.resource_label = "foo"'
' AND resource.type = "some-resource"'
' AND metric.label.metric_label = starts_with("bar-")'
).format(type=METRIC_TYPE)
self.assertEqual(str(obj), expected)
class Test__build_label_filter(unittest2.TestCase):
def _callFUT(self, *args, **kwargs):
from gcloud.monitoring.query import _build_label_filter
return _build_label_filter(*args, **kwargs)
def test_no_labels(self):
self.assertEqual(self._callFUT('resource'), '')
def test_label_is_none(self):
self.assertEqual(self._callFUT('resource', foo=None), '')
def test_metric_labels(self):
actual = self._callFUT(
'metric',
alpha_prefix='a-',
beta_gamma_suffix='-b',
delta_epsilon='xyz',
)
expected = (
'metric.label.alpha = starts_with("a-")'
' AND metric.label.beta_gamma = ends_with("-b")'
' AND metric.label.delta_epsilon = "xyz"'
)
self.assertEqual(actual, expected)
def test_resource_labels(self):
actual = self._callFUT(
'resource',
alpha_prefix='a-',
beta_gamma_suffix='-b',
delta_epsilon='xyz',
)
expected = (
'resource.label.alpha = starts_with("a-")'
' AND resource.label.beta_gamma = ends_with("-b")'
' AND resource.label.delta_epsilon = "xyz"'
)
self.assertEqual(actual, expected)
def test_raw_label_filters(self):
actual = self._callFUT(
'resource',
'resource.label.alpha = starts_with("a-")',
'resource.label.beta_gamma = ends_with("-b")',
'resource.label.delta_epsilon = "xyz"',
)
expected = (
'resource.label.alpha = starts_with("a-")'
' AND resource.label.beta_gamma = ends_with("-b")'
' AND resource.label.delta_epsilon = "xyz"'
)
self.assertEqual(actual, expected)
def test_resource_type(self):
actual = self._callFUT('resource', resource_type='foo')
expected = 'resource.type = "foo"'
self.assertEqual(actual, expected)
def test_resource_type_prefix(self):
actual = self._callFUT('resource', resource_type_prefix='foo-')
expected = 'resource.type = starts_with("foo-")'
self.assertEqual(actual, expected)
def test_resource_type_suffix(self):
actual = self._callFUT('resource', resource_type_suffix='-foo')
expected = 'resource.type = ends_with("-foo")'
self.assertEqual(actual, expected)
class Test__format_timestamp(unittest2.TestCase):
def _callFUT(self, timestamp):
from gcloud.monitoring.query import _format_timestamp
return _format_timestamp(timestamp)
def test_naive(self):
from datetime import datetime
TIMESTAMP = datetime(2016, 4, 5, 13, 30, 0)
timestamp = self._callFUT(TIMESTAMP)
self.assertEqual(timestamp, '2016-04-05T13:30:00Z')
def test_with_timezone(self):
from datetime import datetime
from gcloud._helpers import UTC
TIMESTAMP = datetime(2016, 4, 5, 13, 30, 0, tzinfo=UTC)
timestamp = self._callFUT(TIMESTAMP)
self.assertEqual(timestamp, '2016-04-05T13:30:00Z')
class _Connection(object):
def __init__(self, *responses):
self._responses = list(responses)
self._requested = []
def api_request(self, **kwargs):
from gcloud.exceptions import NotFound
self._requested.append(kwargs)
try:
return self._responses.pop(0)
except IndexError:
raise NotFound('miss')
class _Client(object):
def __init__(self, project, connection):
self.project = project
self.connection = connection

View file

@ -0,0 +1,339 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
class TestResourceDescriptor(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.monitoring.resource import ResourceDescriptor
return ResourceDescriptor
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor(self):
from gcloud.monitoring.label import LabelDescriptor
TYPE = 'gce_instance'
NAME = 'projects/my-project/monitoredResourceDescriptors/' + TYPE
DISPLAY_NAME = 'GCE Instance'
DESCRIPTION = 'A VM instance hosted in Google Compute Engine.'
LABELS = [
LabelDescriptor(key='project_id', value_type='STRING',
description='The ID of the GCP project...'),
LabelDescriptor(key='instance_id', value_type='STRING',
description='The VM instance identifier...'),
LabelDescriptor(key='zone', value_type='STRING',
description='The GCE zone...'),
]
descriptor = self._makeOne(
name=NAME,
type_=TYPE,
display_name=DISPLAY_NAME,
description=DESCRIPTION,
labels=LABELS,
)
self.assertEqual(descriptor.name, NAME)
self.assertEqual(descriptor.type, TYPE)
self.assertEqual(descriptor.display_name, DISPLAY_NAME)
self.assertEqual(descriptor.description, DESCRIPTION)
self.assertEqual(descriptor.labels, LABELS)
def test_from_dict(self):
TYPE = 'gce_instance'
NAME = 'projects/my-project/monitoredResourceDescriptors/' + TYPE
DISPLAY_NAME = 'GCE Instance'
DESCRIPTION = 'A VM instance hosted in Google Compute Engine.'
LABEL1 = {'key': 'project_id', 'valueType': 'STRING',
'description': 'The ID of the GCP project...'}
LABEL2 = {'key': 'instance_id', 'valueType': 'STRING',
'description': 'The VM instance identifier...'}
LABEL3 = {'key': 'zone', 'valueType': 'STRING',
'description': 'The GCE zone...'}
info = {
'name': NAME,
'type': TYPE,
'displayName': DISPLAY_NAME,
'description': DESCRIPTION,
'labels': [LABEL1, LABEL2, LABEL3],
}
descriptor = self._getTargetClass()._from_dict(info)
self.assertEqual(descriptor.name, NAME)
self.assertEqual(descriptor.type, TYPE)
self.assertEqual(descriptor.display_name, DISPLAY_NAME)
self.assertEqual(descriptor.description, DESCRIPTION)
self.assertEqual(len(descriptor.labels), 3)
label1, label2, label3 = descriptor.labels
self.assertEqual(label1.key, LABEL1['key'])
self.assertEqual(label2.key, LABEL2['key'])
self.assertEqual(label3.key, LABEL3['key'])
def test_from_dict_defaults(self):
TYPE = 'gce_instance'
NAME = 'projects/my-project/monitoredResourceDescriptors/' + TYPE
info = {
'name': NAME,
'type': TYPE,
}
descriptor = self._getTargetClass()._from_dict(info)
self.assertEqual(descriptor.name, NAME)
self.assertEqual(descriptor.type, TYPE)
self.assertEqual(descriptor.display_name, '')
self.assertEqual(descriptor.description, '')
self.assertEqual(descriptor.labels, ())
def test_fetch(self):
PROJECT = 'my-project'
TYPE = 'gce_instance'
NAME = 'projects/{project}/monitoredResourceDescriptors/{type}'.format(
project=PROJECT, type=TYPE)
DISPLAY_NAME = 'GCE Instance'
DESCRIPTION = 'A VM instance hosted in Google Compute Engine.'
LABEL1 = {'key': 'project_id', 'valueType': 'STRING',
'description': 'The ID of the GCP project...'}
LABEL2 = {'key': 'instance_id', 'valueType': 'STRING',
'description': 'The VM instance identifier...'}
LABEL3 = {'key': 'zone', 'valueType': 'STRING',
'description': 'The GCE zone...'}
RESOURCE_DESCRIPTOR = {
'name': NAME,
'type': TYPE,
'displayName': DISPLAY_NAME,
'description': DESCRIPTION,
'labels': [LABEL1, LABEL2, LABEL3],
}
connection = _Connection(RESOURCE_DESCRIPTOR)
client = _Client(project=PROJECT, connection=connection)
descriptor = self._getTargetClass()._fetch(client, TYPE)
self.assertEqual(descriptor.name, NAME)
self.assertEqual(descriptor.type, TYPE)
self.assertEqual(descriptor.display_name, DISPLAY_NAME)
self.assertEqual(descriptor.description, DESCRIPTION)
self.assertEqual(len(descriptor.labels), 3)
label1, label2, label3 = descriptor.labels
self.assertEqual(label1.key, LABEL1['key'])
self.assertEqual(label2.key, LABEL2['key'])
self.assertEqual(label3.key, LABEL3['key'])
request, = connection._requested
expected_request = {'method': 'GET', 'path': '/' + NAME}
self.assertEqual(request, expected_request)
def test_list(self):
PROJECT = 'my-project'
PATH = 'projects/{project}/monitoredResourceDescriptors/'.format(
project=PROJECT)
TYPE1 = 'custom.googleapis.com/resource-1'
DESCRIPTION1 = 'This is the first resource.'
NAME1 = PATH + TYPE1
RESOURCE_DESCRIPTOR1 = {
'name': NAME1,
'type': TYPE1,
'description': DESCRIPTION1,
}
TYPE2 = 'custom.googleapis.com/resource-2'
DESCRIPTION2 = 'This is the second resource.'
NAME2 = PATH + TYPE2
RESOURCE_DESCRIPTOR2 = {
'name': NAME2,
'type': TYPE2,
'description': DESCRIPTION2,
}
RESPONSE = {
'resourceDescriptors':
[RESOURCE_DESCRIPTOR1, RESOURCE_DESCRIPTOR2],
}
connection = _Connection(RESPONSE)
client = _Client(project=PROJECT, connection=connection)
descriptors = self._getTargetClass()._list(client)
self.assertEqual(len(descriptors), 2)
descriptor1, descriptor2 = descriptors
self.assertEqual(descriptor1.name, NAME1)
self.assertEqual(descriptor1.type, TYPE1)
self.assertEqual(descriptor1.description, DESCRIPTION1)
self.assertEqual(descriptor2.name, NAME2)
self.assertEqual(descriptor2.type, TYPE2)
self.assertEqual(descriptor2.description, DESCRIPTION2)
request, = connection._requested
expected_request = {'method': 'GET', 'path': '/' + PATH,
'query_params': {}}
self.assertEqual(request, expected_request)
def test_list_paged(self):
from gcloud.exceptions import NotFound
PROJECT = 'my-project'
PATH = 'projects/{project}/monitoredResourceDescriptors/'.format(
project=PROJECT)
TYPE1 = 'custom.googleapis.com/resource-1'
DESCRIPTION1 = 'This is the first resource.'
NAME1 = PATH + TYPE1
RESOURCE_DESCRIPTOR1 = {
'name': NAME1,
'type': TYPE1,
'description': DESCRIPTION1,
}
TYPE2 = 'custom.googleapis.com/resource-2'
DESCRIPTION2 = 'This is the second resource.'
NAME2 = PATH + TYPE2
RESOURCE_DESCRIPTOR2 = {
'name': NAME2,
'type': TYPE2,
'description': DESCRIPTION2,
}
TOKEN = 'second-page-please'
RESPONSE1 = {
'resourceDescriptors': [RESOURCE_DESCRIPTOR1],
'nextPageToken': TOKEN,
}
RESPONSE2 = {
'resourceDescriptors': [RESOURCE_DESCRIPTOR2],
}
connection = _Connection(RESPONSE1, RESPONSE2)
client = _Client(project=PROJECT, connection=connection)
descriptors = self._getTargetClass()._list(client)
self.assertEqual(len(descriptors), 2)
descriptor1, descriptor2 = descriptors
self.assertEqual(descriptor1.name, NAME1)
self.assertEqual(descriptor1.type, TYPE1)
self.assertEqual(descriptor1.description, DESCRIPTION1)
self.assertEqual(descriptor2.name, NAME2)
self.assertEqual(descriptor2.type, TYPE2)
self.assertEqual(descriptor2.description, DESCRIPTION2)
request1, request2 = connection._requested
expected_request1 = {'method': 'GET', 'path': '/' + PATH,
'query_params': {}}
expected_request2 = {'method': 'GET', 'path': '/' + PATH,
'query_params': {'pageToken': TOKEN}}
self.assertEqual(request1, expected_request1)
self.assertEqual(request2, expected_request2)
with self.assertRaises(NotFound):
self._getTargetClass()._list(client)
def test_list_filtered(self):
PROJECT = 'my-project'
PATH = 'projects/{project}/monitoredResourceDescriptors/'.format(
project=PROJECT)
# Request only resources with type names that start with "foobar_".
FILTER = 'resource.type = starts_with("foobar_")'
# But there are none.
RESPONSE = {'resourceDescriptors': []}
connection = _Connection(RESPONSE)
client = _Client(project=PROJECT, connection=connection)
descriptors = self._getTargetClass()._list(client, FILTER)
self.assertEqual(len(descriptors), 0)
request, = connection._requested
expected_request = {'method': 'GET', 'path': '/' + PATH,
'query_params': {'filter': FILTER}}
self.assertEqual(request, expected_request)
class TestResource(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.monitoring.resource import Resource
return Resource
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor(self):
TYPE = 'gce_instance'
LABELS = {
'project_id': 'my-project',
'instance_id': '1234567890123456789',
'zone': 'us-central1-a',
}
resource = self._makeOne(type=TYPE, labels=LABELS)
self.assertEqual(resource.type, TYPE)
self.assertEqual(resource.labels, LABELS)
def test_from_dict(self):
TYPE = 'gce_instance'
LABELS = {
'project_id': 'my-project',
'instance_id': '1234567890123456789',
'zone': 'us-central1-a',
}
info = {
'type': TYPE,
'labels': LABELS,
}
resource = self._getTargetClass()._from_dict(info)
self.assertEqual(resource.type, TYPE)
self.assertEqual(resource.labels, LABELS)
def test_from_dict_defaults(self):
TYPE = 'gce_instance'
info = {'type': TYPE}
resource = self._getTargetClass()._from_dict(info)
self.assertEqual(resource.type, TYPE)
self.assertEqual(resource.labels, {})
class _Connection(object):
def __init__(self, *responses):
self._responses = list(responses)
self._requested = []
def api_request(self, **kwargs):
from gcloud.exceptions import NotFound
self._requested.append(kwargs)
try:
return self._responses.pop(0)
except IndexError:
raise NotFound('miss')
class _Client(object):
def __init__(self, project, connection):
self.project = project
self.connection = connection

View file

@ -0,0 +1,198 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
METRIC_TYPE = 'compute.googleapis.com/instance/uptime'
METRIC_LABELS = {'instance_name': 'instance-1'}
RESOURCE_TYPE = 'gce_instance'
RESOURCE_LABELS = {
'project_id': 'my-project',
'zone': 'us-east1-a',
'instance_id': '1234567890123456789',
}
METRIC_KIND = 'DELTA'
VALUE_TYPE = 'DOUBLE'
TS0 = '2016-04-06T22:05:00.042Z'
TS1 = '2016-04-06T22:05:01.042Z'
TS2 = '2016-04-06T22:05:02.042Z'
class TestTimeSeries(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.monitoring.timeseries import TimeSeries
return TimeSeries
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor(self):
from gcloud.monitoring.metric import Metric
from gcloud.monitoring.resource import Resource
from gcloud.monitoring.timeseries import Point
VALUE = 60 # seconds
METRIC = Metric(type=METRIC_TYPE, labels=METRIC_LABELS)
RESOURCE = Resource(type=RESOURCE_TYPE, labels=RESOURCE_LABELS)
POINTS = [
Point(start_time=TS0, end_time=TS1, value=VALUE),
Point(start_time=TS1, end_time=TS2, value=VALUE),
]
series = self._makeOne(metric=METRIC,
resource=RESOURCE,
metric_kind=METRIC_KIND,
value_type=VALUE_TYPE,
points=POINTS)
self.assertEqual(series.metric, METRIC)
self.assertEqual(series.resource, RESOURCE)
self.assertEqual(series.metric_kind, METRIC_KIND)
self.assertEqual(series.value_type, VALUE_TYPE)
self.assertEqual(series.points, POINTS)
def test_from_dict(self):
VALUE = 60 # seconds
info = {
'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS},
'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS},
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
'points': [
{
'interval': {'startTime': TS0, 'endTime': TS1},
'value': {'doubleValue': VALUE},
},
{
'interval': {'startTime': TS1, 'endTime': TS2},
'value': {'doubleValue': VALUE},
},
],
}
series = self._getTargetClass()._from_dict(info)
self.assertEqual(series.metric.type, METRIC_TYPE)
self.assertEqual(series.metric.labels, METRIC_LABELS)
self.assertEqual(series.resource.type, RESOURCE_TYPE)
self.assertEqual(series.resource.labels, RESOURCE_LABELS)
self.assertEqual(series.metric_kind, METRIC_KIND)
self.assertEqual(series.value_type, VALUE_TYPE)
self.assertEqual(len(series.points), 2)
point1, point2 = series.points
self.assertEqual(point1.start_time, TS0)
self.assertEqual(point1.end_time, TS1)
self.assertEqual(point1.value, VALUE)
self.assertEqual(point2.start_time, TS1)
self.assertEqual(point2.end_time, TS2)
self.assertEqual(point2.value, VALUE)
def test_from_dict_no_points(self):
info = {
'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS},
'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS},
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
}
series = self._getTargetClass()._from_dict(info)
self.assertEqual(series.metric.type, METRIC_TYPE)
self.assertEqual(series.metric.labels, METRIC_LABELS)
self.assertEqual(series.resource.type, RESOURCE_TYPE)
self.assertEqual(series.resource.labels, RESOURCE_LABELS)
self.assertEqual(series.metric_kind, METRIC_KIND)
self.assertEqual(series.value_type, VALUE_TYPE)
self.assertEqual(series.points, [])
def test_labels(self):
info = {
'metric': {'type': METRIC_TYPE, 'labels': METRIC_LABELS},
'resource': {'type': RESOURCE_TYPE, 'labels': RESOURCE_LABELS},
'metricKind': METRIC_KIND,
'valueType': VALUE_TYPE,
}
series = self._getTargetClass()._from_dict(info)
labels = {'resource_type': RESOURCE_TYPE}
labels.update(RESOURCE_LABELS)
labels.update(METRIC_LABELS)
self.assertIsNone(series._labels)
self.assertEqual(series.labels, labels)
self.assertIsNotNone(series._labels)
self.assertEqual(series.labels, labels)
class TestPoint(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.monitoring.timeseries import Point
return Point
def _makeOne(self, *args, **kwargs):
return self._getTargetClass()(*args, **kwargs)
def test_constructor(self):
VALUE = 3.14
point = self._makeOne(start_time=TS0, end_time=TS1, value=VALUE)
self.assertEqual(point.start_time, TS0)
self.assertEqual(point.end_time, TS1)
self.assertEqual(point.value, VALUE)
def test_from_dict(self):
VALUE = 3.14
info = {
'interval': {'startTime': TS0, 'endTime': TS1},
'value': {'doubleValue': VALUE},
}
point = self._getTargetClass()._from_dict(info)
self.assertEqual(point.start_time, TS0)
self.assertEqual(point.end_time, TS1)
self.assertEqual(point.value, VALUE)
def test_from_dict_defaults(self):
VALUE = 3.14
info = {
'interval': {'endTime': TS1},
'value': {'doubleValue': VALUE},
}
point = self._getTargetClass()._from_dict(info)
self.assertIsNone(point.start_time)
self.assertEqual(point.end_time, TS1)
self.assertEqual(point.value, VALUE)
def test_from_dict_int64(self):
VALUE = 2 ** 63 - 1
info = {
'interval': {'endTime': TS1},
'value': {'int64Value': str(VALUE)},
}
point = self._getTargetClass()._from_dict(info)
self.assertIsNone(point.start_time)
self.assertEqual(point.end_time, TS1)
self.assertEqual(point.value, VALUE)

View file

@ -0,0 +1,152 @@
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Time series for the `Google Monitoring API (V3)`_.
Features intentionally omitted from this first version of the client library:
* Writing time series.
* Natural representation of distribution values.
.. _Google Monitoring API (V3):
https://cloud.google.com/monitoring/api/ref_v3/rest/v3/TimeSeries
"""
import collections
from gcloud.monitoring.metric import Metric
from gcloud.monitoring.resource import Resource
class TimeSeries(collections.namedtuple(
'TimeSeries', 'metric resource metric_kind value_type points')):
"""A single time series of metric values.
:type metric: :class:`~gcloud.monitoring.metric.Metric`
:param metric: A metric object.
:type resource: :class:`~gcloud.monitoring.resource.Resource`
:param resource: A resource object.
:type metric_kind: string
:param metric_kind:
The kind of measurement: :data:`MetricKind.GAUGE`,
:data:`MetricKind.DELTA`, or :data:`MetricKind.CUMULATIVE`.
See :class:`~gcloud.monitoring.metric.MetricKind`.
:type value_type: string
:param value_type:
The value type of the metric: :data:`ValueType.BOOL`,
:data:`ValueType.INT64`, :data:`ValueType.DOUBLE`,
:data:`ValueType.STRING`, or :data:`ValueType.DISTRIBUTION`.
See :class:`~gcloud.monitoring.metric.ValueType`.
:type points: list of :class:`Point`
:param points: A list of point objects.
"""
_labels = None
@property
def labels(self):
"""A single dictionary with values for all the labels.
This combines ``resource.labels`` and ``metric.labels`` and also
adds ``"resource_type"``.
"""
if self._labels is None:
labels = {'resource_type': self.resource.type}
labels.update(self.resource.labels)
labels.update(self.metric.labels)
self._labels = labels
return self._labels
def header(self, points=None):
"""Copy everything but the point data.
:type points: list of :class:`Point`, or None
:param points: An optional point list.
:rtype: :class:`TimeSeries`
:returns: The new time series object.
"""
points = list(points) if points else []
return self._replace(points=points)
@classmethod
def _from_dict(cls, info):
"""Construct a time series from the parsed JSON representation.
:type info: dict
:param info:
A ``dict`` parsed from the JSON wire-format representation.
:rtype: :class:`TimeSeries`
:returns: A time series object.
"""
metric = Metric._from_dict(info['metric'])
resource = Resource._from_dict(info['resource'])
metric_kind = info['metricKind']
value_type = info['valueType']
points = [Point._from_dict(p) for p in info.get('points', ())]
return cls(metric, resource, metric_kind, value_type, points)
def __repr__(self):
"""Return a representation string with the points elided."""
return (
'<TimeSeries with {num} points:\n'
' metric={metric!r},\n'
' resource={resource!r},\n'
' metric_kind={kind!r}, value_type={type!r}>'
).format(
num=len(self.points),
metric=self.metric,
resource=self.resource,
kind=self.metric_kind,
type=self.value_type,
)
class Point(collections.namedtuple('Point', 'end_time start_time value')):
"""A single point in a time series.
:type end_time: string
:param end_time: The end time in RFC3339 UTC "Zulu" format.
:type start_time: string or None
:param start_time: An optional start time in RFC3339 UTC "Zulu" format.
:type value: object
:param value: The metric value. This can be a scalar or a distribution.
"""
__slots__ = ()
@classmethod
def _from_dict(cls, info):
"""Construct a Point from the parsed JSON representation.
:type info: dict
:param info:
A ``dict`` parsed from the JSON wire-format representation.
:rtype: :class:`Point`
:returns: A point object.
"""
end_time = info['interval']['endTime']
start_time = info['interval'].get('startTime')
(value_type, value), = info['value'].items()
if value_type == 'int64Value':
value = int(value) # Convert from string.
return cls(end_time, start_time, value)