gcpdiag.queries.monitoring
49def period_aligned_now(period_seconds: int) -> str: 50 """Return a MQL date string for the current timestamp aligned to the given period. 51 52 This will return "now - now%period" in a MQL-parseable date string and is useful 53 to get stable results. See also: (internal) 54 """ 55 56 now = time.time() 57 now -= now % period_seconds 58 return time.strftime('%Y/%m/%d-%H:%M:%S+00:00', time.gmtime(now))
Return a MQL date string for the current timestamp aligned to the given period.
This will return "now - now%period" in a MQL-parseable date string and is useful to get stable results. See also: (internal)
61class TimeSeriesCollection(collections.abc.Mapping): 62 """A mapping that stores Cloud Monitoring time series data. 63 64 Each time series is identified by a set of labels stored as 65 frozenset where the elements are strings 'label:value'. E.g.: 66 67 frozenset({'resource.cluster_name:regional', 68 'resource.container_name:dnsmasq'}) 69 70 The frozenset is used as key to store the time series data. The data 71 is a dictionary with the following fields: 72 73 - 'start_time': timestamp string (ISO format) for the earliest point 74 - 'end_time': timestamp string (ISO format) for the latest point 75 - 'values': point values in bi-dimensional array-like structure: 76 [[val1_t0, val2_t0], [val1_t1, val2_t1], ...]. The first dimension of 77 the array is time, and the second is the value columns (usually there will 78 be only one). The points are sorted chronologically (most recent point is 79 the latest in the list). 80 """ 81 82 _data: dict 83 84 def __init__(self): 85 # In order to ease the retrieval and matching, we store 86 # label:value pairs as strings in a frozenset object. 87 self._data = {} 88 89 def __str__(self): 90 return str(self._data) 91 92 def __repr__(self): 93 return repr(self._data) 94 95 def add_api_response(self, resource_data): 96 """Add results to an existing TimeSeriesCollection object. 97 98 The monitoring API returns paginated results, so we need to be able to add 99 results to an existing TimeSeriesCollection object. 100 """ 101 102 if 'timeSeriesData' not in resource_data: 103 return 104 105 for ts in resource_data['timeSeriesData']: 106 # No data? 107 if not ts['pointData'] or not 'values' in ts['pointData'][0] or not ts[ 108 'pointData'][0]['values']: 109 continue 110 111 # Use frozenset of label:value pairs as key to store the data 112 labels_dict = {} 113 if 'labelValues' in ts: 114 for i, value in enumerate(ts['labelValues']): 115 label_name = resource_data['timeSeriesDescriptor'][ 116 'labelDescriptors'][i]['key'] 117 if 'stringValue' in value: 118 labels_dict[label_name] = value['stringValue'] 119 labels_frozenset = frozenset(f'{k}:{v}' for k, v in labels_dict.items()) 120 else: 121 labels_frozenset = frozenset() 122 123 ts_point_data = ts['pointData'] 124 self._data[labels_frozenset] = { 125 'labels': 126 labels_dict, 127 'start_time': 128 ts_point_data[-1]['timeInterval']['startTime'], 129 'end_time': 130 ts_point_data[0]['timeInterval']['endTime'], 131 'values': [ 132 _gcp_typed_values_to_python_list(ts_point_data[i]['values']) 133 for i in reversed(range(len(ts_point_data))) 134 ] 135 } 136 137 def __getitem__(self, labels): 138 """Returns the time series identified by labels (frozenset).""" 139 return self._data[labels] 140 141 def __iter__(self): 142 return iter(self._data) 143 144 def __len__(self): 145 return len(self._data) 146 147 def keys(self): 148 return self._data.keys() 149 150 def items(self): 151 return self._data.items() 152 153 def values(self): 154 return self._data.values()
A mapping that stores Cloud Monitoring time series data.
Each time series is identified by a set of labels stored as frozenset where the elements are strings 'label:value'. E.g.:
frozenset({'resource.cluster_name:regional',
'resource.container_name:dnsmasq'})
The frozenset is used as key to store the time series data. The data is a dictionary with the following fields:
- 'start_time': timestamp string (ISO format) for the earliest point
- 'end_time': timestamp string (ISO format) for the latest point
- 'values': point values in bi-dimensional array-like structure: [[val1_t0, val2_t0], [val1_t1, val2_t1], ...]. The first dimension of the array is time, and the second is the value columns (usually there will be only one). The points are sorted chronologically (most recent point is the latest in the list).
95 def add_api_response(self, resource_data): 96 """Add results to an existing TimeSeriesCollection object. 97 98 The monitoring API returns paginated results, so we need to be able to add 99 results to an existing TimeSeriesCollection object. 100 """ 101 102 if 'timeSeriesData' not in resource_data: 103 return 104 105 for ts in resource_data['timeSeriesData']: 106 # No data? 107 if not ts['pointData'] or not 'values' in ts['pointData'][0] or not ts[ 108 'pointData'][0]['values']: 109 continue 110 111 # Use frozenset of label:value pairs as key to store the data 112 labels_dict = {} 113 if 'labelValues' in ts: 114 for i, value in enumerate(ts['labelValues']): 115 label_name = resource_data['timeSeriesDescriptor'][ 116 'labelDescriptors'][i]['key'] 117 if 'stringValue' in value: 118 labels_dict[label_name] = value['stringValue'] 119 labels_frozenset = frozenset(f'{k}:{v}' for k, v in labels_dict.items()) 120 else: 121 labels_frozenset = frozenset() 122 123 ts_point_data = ts['pointData'] 124 self._data[labels_frozenset] = { 125 'labels': 126 labels_dict, 127 'start_time': 128 ts_point_data[-1]['timeInterval']['startTime'], 129 'end_time': 130 ts_point_data[0]['timeInterval']['endTime'], 131 'values': [ 132 _gcp_typed_values_to_python_list(ts_point_data[i]['values']) 133 for i in reversed(range(len(ts_point_data))) 134 ] 135 }
Add results to an existing TimeSeriesCollection object.
The monitoring API returns paginated results, so we need to be able to add results to an existing TimeSeriesCollection object.
157def query(project_id: str, query_str: str) -> TimeSeriesCollection: 158 """Do a monitoring query in the specified project. 159 160 Note that the project can be either the project where the monitored resources 161 are, or a workspace host project, in which case you will get results for all 162 associated monitored projects. 163 """ 164 165 time_series = TimeSeriesCollection() 166 167 mon_api = apis.get_api('monitoring', 'v3', project_id) 168 try: 169 request = mon_api.projects().timeSeries().query(name='projects/' + 170 project_id, 171 body={'query': query_str}) 172 173 logging.info('executing monitoring query (project: %s)', project_id) 174 logging.debug('query: %s', query_str) 175 pages = 0 176 start_time = datetime.datetime.now() 177 while request: 178 pages += 1 179 response = request.execute(num_retries=config.API_RETRIES) 180 time_series.add_api_response(response) 181 request = mon_api.projects().timeSeries().query_next( 182 previous_request=request, previous_response=response) 183 if request: 184 logging.info('still executing monitoring query (project: %s)', 185 project_id) 186 end_time = datetime.datetime.now() 187 logging.debug('query run time: %s, pages: %d', end_time - start_time, pages) 188 except googleapiclient.errors.HttpError as err: 189 gcp_err = utils.GcpApiError(err) 190 # Ignore 502 because we get that when the monitoring query times out. 191 if gcp_err.status in [502]: 192 logging.warning('error executing monitoring query: %s', 193 str(gcp_err.message)) 194 else: 195 raise utils.GcpApiError(err) from err 196 return time_series
Do a monitoring query in the specified project.
Note that the project can be either the project where the monitored resources are, or a workspace host project, in which case you will get results for all associated monitored projects.