Coverage for curator/helpers/testers.py: 100%
153 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-07-20 21:00 -0600
« prev ^ index » next coverage.py v7.2.7, created at 2023-07-20 21:00 -0600
1"""Utility functions that get things"""
2import logging
3from voluptuous import Schema
4from elasticsearch8 import Elasticsearch
5from elasticsearch8.exceptions import NotFoundError
6from es_client.helpers.utils import prune_nones
7from curator.helpers.getters import get_repository, get_write_index
8from curator.exceptions import (
9 ConfigurationError, MissingArgument, RepositoryException, SearchableSnapshotException)
10from curator.defaults.settings import index_filtertypes, snapshot_actions, snapshot_filtertypes
11from curator.validators import SchemaCheck, actions, options
12from curator.validators.filter_functions import validfilters
13from curator.helpers.utils import report_failure
15def has_lifecycle_name(idx_settings):
16 """
17 :param idx_settings: The settings for an index being tested
18 :type idx_settings: dict
20 :returns: ``True`` if a lifecycle name exists in settings, else ``False``
21 :rtype: bool
22 """
23 if 'lifecycle' in idx_settings:
24 if 'name' in idx_settings['lifecycle']:
25 return True
26 return False
28def is_idx_partial(idx_settings):
29 """
30 :param idx_settings: The settings for an index being tested
31 :type idx_settings: dict
33 :returns: ``True`` if store.snapshot.partial exists in settings, else ``False``
34 :rtype: bool
35 """
36 if 'store' in idx_settings:
37 if 'snapshot' in idx_settings['store']:
38 if 'partial' in idx_settings['store']['snapshot']:
39 if idx_settings['store']['snapshot']['partial']:
40 return True
41 # store.snapshot.partial exists but is False -- Not a frozen tier mount
42 return False
43 # store.snapshot exists, but partial isn't there -- Possibly a cold tier mount
44 return False
45 raise SearchableSnapshotException('Index not a mounted searchable snapshot')
46 raise SearchableSnapshotException('Index not a mounted searchable snapshot')
48def ilm_policy_check(client, alias):
49 """Test if alias is associated with an ILM policy
51 Calls :py:meth:`~.elasticsearch.client.IndicesClient.get_settings`
53 :param client: A client connection object
54 :param alias: The alias name
56 :type client: :py:class:`~.elasticsearch.Elasticsearch`
57 :type alias: str
58 :rtype: bool
59 """
60 logger = logging.getLogger(__name__)
61 # alias = action_obj.options['name']
62 write_index = get_write_index(client, alias)
63 try:
64 idx_settings = client.indices.get_settings(index=write_index)
65 if 'name' in idx_settings[write_index]['settings']['index']['lifecycle']:
66 # logger.info('Alias %s is associated with ILM policy.', alias)
67 # logger.info('Skipping action %s because allow_ilm_indices is false.', idx)
68 return True
69 except KeyError:
70 logger.debug('No ILM policies associated with %s', alias)
71 return False
73def repository_exists(client, repository=None):
74 """
75 Calls :py:meth:`~.elasticsearch.client.SnapshotClient.get_repository`
77 :param client: A client connection object
78 :param repository: The Elasticsearch snapshot repository to use
80 :type client: :py:class:`~.elasticsearch.Elasticsearch`
81 :type repository: str
83 :returns: ``True`` if ``repository`` exists, else ``False``
84 :rtype: bool
85 """
86 logger = logging.getLogger(__name__)
87 if not repository:
88 raise MissingArgument('No value for "repository" provided')
89 try:
90 test_result = get_repository(client, repository)
91 if repository in test_result:
92 logger.debug("Repository %s exists.", repository)
93 response = True
94 else:
95 logger.debug("Repository %s not found...", repository)
96 response = False
97 # pylint: disable=broad-except
98 except Exception as err:
99 logger.debug('Unable to find repository "%s": Error: %s', repository, err)
100 response = False
101 return response
103def rollable_alias(client, alias):
104 """
105 Calls :py:meth:`~.elasticsearch.client.IndicesClient.get_alias`
107 :param client: A client connection object
108 :param alias: An Elasticsearch alias
110 :type client: :py:class:`~.elasticsearch.Elasticsearch`
111 :type alias: str
114 :returns: ``True`` or ``False`` depending on whether ``alias`` is an alias that points to an
115 index that can be used by the ``_rollover`` API.
116 :rtype: bool
117 """
118 logger = logging.getLogger(__name__)
119 try:
120 response = client.indices.get_alias(name=alias)
121 except NotFoundError:
122 logger.error('Alias "%s" not found.', alias)
123 return False
124 # Response should be like:
125 # {'there_should_be_only_one': {'aliases': {'value of "alias" here': {}}}}
126 # where 'there_should_be_only_one' is a single index name that ends in a number, and 'value of
127 # "alias" here' reflects the value of the passed parameter, except where the ``is_write_index``
128 # setting makes it possible to have more than one index associated with a rollover index
129 for idx in response:
130 if 'is_write_index' in response[idx]['aliases'][alias]:
131 if response[idx]['aliases'][alias]['is_write_index']:
132 return True
133 # implied ``else``: If not ``is_write_index``, it has to fit the following criteria:
134 if len(response) > 1:
135 logger.error('"alias" must only reference one index, but points to %s', response)
136 return False
137 index = list(response.keys())[0]
138 rollable = False
139 # In order for `rollable` to be True, the last 2 digits of the index
140 # must be digits, or a hyphen followed by a digit.
141 # NOTE: This is not a guarantee that the rest of the index name is
142 # necessarily correctly formatted.
143 if index[-2:][1].isdigit():
144 if index[-2:][0].isdigit():
145 rollable = True
146 elif index[-2:][0] == '-':
147 rollable = True
148 return rollable
150def snapshot_running(client):
151 """
152 Calls :py:meth:`~.elasticsearch.client.SnapshotClient.get_repository`
154 Return ``True`` if a snapshot is in progress, and ``False`` if not
156 :param client: A client connection object
158 :type client: :py:class:`~.elasticsearch.Elasticsearch`
160 :rtype: bool
161 """
162 try:
163 status = client.snapshot.status()['snapshots']
164 # pylint: disable=broad-except
165 except Exception as exc:
166 report_failure(exc)
167 # We will only accept a positively identified False. Anything else is
168 # suspect. That's why this statement, rather than just ``return status``
169 # pylint: disable=simplifiable-if-expression
170 return False if not status else True
172def validate_actions(data):
173 """
174 Validate the ``actions`` configuration dictionary, as imported from actions.yml, for example.
176 :param data: The configuration dictionary
178 :type data: dict
180 :returns: The validated and sanitized configuration dictionary.
181 :rtype: dict
182 """
183 # data is the ENTIRE schema...
184 clean_config = {}
185 # Let's break it down into smaller chunks...
186 # First, let's make sure it has "actions" as a key, with a subdictionary
187 root = SchemaCheck(data, actions.root(), 'Actions File', 'root').result()
188 # We've passed the first step. Now let's iterate over the actions...
189 for action_id in root['actions']:
190 # Now, let's ensure that the basic action structure is correct, with
191 # the proper possibilities for 'action'
192 action_dict = root['actions'][action_id]
193 loc = f'Action ID "{action_id}"'
194 valid_structure = SchemaCheck(
195 action_dict, actions.structure(action_dict, loc), 'structure', loc
196 ).result()
197 # With the basic structure validated, now we extract the action name
198 current_action = valid_structure['action']
199 # And let's update the location with the action.
200 loc = f'Action ID "{action_id}", action "{current_action}"'
201 clean_options = SchemaCheck(
202 prune_nones(valid_structure['options']),
203 options.get_schema(current_action),
204 'options',
205 loc
206 ).result()
207 clean_config[action_id] = {
208 'action' : current_action,
209 'description' : valid_structure['description'],
210 'options' : clean_options,
211 }
212 if current_action == 'alias':
213 add_remove = {}
214 for k in ['add', 'remove']:
215 if k in valid_structure:
216 current_filters = SchemaCheck(
217 valid_structure[k]['filters'],
218 Schema(validfilters(current_action, location=loc)),
219 f'"{k}" filters',
220 f'{loc}, "filters"'
221 ).result()
222 add_remove.update(
223 {
224 k: {
225 'filters' : SchemaCheck(
226 current_filters,
227 Schema(validfilters(current_action, location=loc)),
228 'filters',
229 f'{loc}, "{k}", "filters"'
230 ).result()
231 }
232 }
233 )
234 # Add/Remove here
235 clean_config[action_id].update(add_remove)
236 elif current_action in ['cluster_routing', 'create_index', 'rollover']:
237 # neither cluster_routing nor create_index should have filters
238 pass
239 else: # Filters key only appears in non-alias actions
240 valid_filters = SchemaCheck(
241 valid_structure['filters'],
242 Schema(validfilters(current_action, location=loc)),
243 'filters',
244 f'{loc}, "filters"'
245 ).result()
246 clean_filters = validate_filters(current_action, valid_filters)
247 clean_config[action_id].update({'filters' : clean_filters})
248 # This is a special case for remote reindex
249 if current_action == 'reindex':
250 # Check only if populated with something.
251 if 'remote_filters' in valid_structure['options']:
252 valid_filters = SchemaCheck(
253 valid_structure['options']['remote_filters'],
254 Schema(validfilters(current_action, location=loc)),
255 'filters',
256 f'{loc}, "filters"'
257 ).result()
258 clean_remote_filters = validate_filters(current_action, valid_filters)
259 clean_config[action_id]['options'].update({'remote_filters': clean_remote_filters})
261 # if we've gotten this far without any Exceptions raised, it's valid!
262 return {'actions': clean_config}
264def validate_filters(action, myfilters):
265 """
266 Validate that myfilters are appropriate for the action type, e.g. no
267 index filters applied to a snapshot list.
269 :param action: An action name
270 :param myfilters: A list of filters to test.
272 :type action: str
273 :type myfilters: list
275 :returns: Validated list of filters
276 :rtype: list
277 """
278 # Define which set of filtertypes to use for testing
279 if action in snapshot_actions():
280 filtertypes = snapshot_filtertypes()
281 else:
282 filtertypes = index_filtertypes()
283 for fil in myfilters:
284 if fil['filtertype'] not in filtertypes:
285 raise ConfigurationError(
286 f"\"{fil['filtertype']}\" filtertype is not compatible with action \"{action}\""
287 )
288 # If we get to this point, we're still valid. Return the original list
289 return myfilters
291def verify_client_object(test):
292 """
293 :param test: The variable or object to test
295 :type test: :py:class:`~.elasticsearch.Elasticsearch`
297 :returns: ``True`` if ``test`` is a proper :py:class:`~.elasticsearch.Elasticsearch` client
298 object and raise a :py:exc:`TypeError` exception if it is not.
299 :rtype: bool
300 """
301 logger = logging.getLogger(__name__)
302 # Ignore mock type for testing
303 if str(type(test)) == "<class 'mock.Mock'>" or \
304 str(type(test)) == "<class 'mock.mock.Mock'>":
305 pass
306 elif not isinstance(test, Elasticsearch):
307 msg = f'Not a valid client object. Type: {type(test)} was passed'
308 logger.error(msg)
309 raise TypeError(msg)
311def verify_index_list(test):
312 """
313 :param test: The variable or object to test
315 :type test: :py:class:`~.curator.IndexList`
317 :returns: ``None`` if ``test`` is a proper :py:class:`~.curator.indexlist.IndexList` object,
318 else raise a :py:class:`TypeError` exception.
319 :rtype: None
320 """
321 # It breaks if this import isn't local to this function:
322 # ImportError: cannot import name 'IndexList' from partially initialized module
323 # 'curator.indexlist' (most likely due to a circular import)
324 # pylint: disable=import-outside-toplevel
325 from curator.indexlist import IndexList
326 logger = logging.getLogger(__name__)
327 if not isinstance(test, IndexList):
328 msg = f'Not a valid IndexList object. Type: {type(test)} was passed'
329 logger.error(msg)
330 raise TypeError(msg)
332def verify_repository(client, repository=None):
333 """
334 Do :py:meth:`~.elasticsearch.snapshot.verify_repository` call. If it fails, raise a
335 :py:exc:`~.curator.exceptions.RepositoryException`.
337 :param client: A client connection object
338 :type client: :py:class:`~.elasticsearch.Elasticsearch`
339 :param repository: A repository name
341 :type client: :py:class:`~.elasticsearch.Elasticsearch`
342 :type repository: str
344 :rtype: None
345 """
346 logger = logging.getLogger(__name__)
347 try:
348 nodes = client.snapshot.verify_repository(name=repository)['nodes']
349 logger.debug('All nodes can write to the repository')
350 logger.debug('Nodes with verified repository access: %s', nodes)
351 except Exception as err:
352 try:
353 if err.status_code == 404:
354 msg = (
355 f'--- Repository "{repository}" not found. Error: '
356 f'{err.meta.status}, {err.error}'
357 )
358 else:
359 msg = (
360 f'--- Got a {err.meta.status} response from Elasticsearch. '
361 f'Error message: {err.error}'
362 )
363 except AttributeError:
364 msg = f'--- Error message: {err}'.format()
365 report = f'Failed to verify all nodes have repository access: {msg}'
366 raise RepositoryException(report) from err
368def verify_snapshot_list(test):
369 """
370 :param test: The variable or object to test
372 :type test: :py:class:`~.curator.SnapshotList`
374 :returns: ``None`` if ``test`` is a proper :py:class:`~.curator.snapshotlist.SnapshotList`
375 object, else raise a :py:class:`TypeError` exception.
376 :rtype: None
377 """
378 # It breaks if this import isn't local to this function:
379 # ImportError: cannot import name 'SnapshotList' from partially initialized module
380 # 'curator.snapshotlist' (most likely due to a circular import)
381 # pylint: disable=import-outside-toplevel
382 from curator.snapshotlist import SnapshotList
383 logger = logging.getLogger(__name__)
384 if not isinstance(test, SnapshotList):
385 msg = f'Not a valid SnapshotList object. Type: {type(test)} was passed'
386 logger.error(msg)
387 raise TypeError(msg)