Coverage for /Users/buh/.pyenv/versions/3.12.9/envs/es-testbed/lib/python3.12/site-packages/es_testbed/_base.py: 100%
172 statements
« prev ^ index » next coverage.py v7.6.12, created at 2025-03-31 13:12 -0600
« prev ^ index » next coverage.py v7.6.12, created at 2025-03-31 13:12 -0600
1"""Base TestBed Class"""
3import typing as t
4import logging
5from importlib import import_module
6from datetime import datetime, timezone
7from shutil import rmtree
8import tiered_debug as debug
9from es_testbed.exceptions import ResultNotExpected
10from es_testbed.defaults import NAMEMAPPER
11from es_testbed.helpers.es_api import delete, get
12from es_testbed.helpers.utils import prettystr, process_preset
13from es_testbed._plan import PlanBuilder
14from es_testbed.mgrs import (
15 ComponentMgr,
16 DataStreamMgr,
17 IlmMgr,
18 IndexMgr,
19 SnapshotMgr,
20 TemplateMgr,
21)
23if t.TYPE_CHECKING:
24 from elasticsearch8 import Elasticsearch
26logger = logging.getLogger('es_testbed.TestBed')
28# pylint: disable=R0902,R0913,R0917
30# Preset Import
31# This imports the preset directory which must include the following files:
32# - A plan YAML file.
33# - A buildlist YAML file.
34# - A functions.py file (the actual python code), which must contain a
35# function named doc_generator(). This function must accept all kwargs from
36# the buildlist's options
37# - A definitions.py file, which is a Python variable file that helps find
38# the path to the module, etc., as well as import the plan, the buildlist,
39# the mappings and settings, etc. This must at least include a get_plan()
40# function that returns a dictionary of a plan.
41# - A mappings.json file (contains the index mappings your docs need)
42# - A settings.json file (contains the index settings)
43#
44# Any other files can be included to help your doc_generator function, e.g.
45# Faker definitions and classes, etc. Once the preset module is imported,
46# relative imports should work.
49class TestBed:
50 """TestBed Class"""
52 __test__ = False # Without this, this appears to be test class because of the name
54 def __init__(
55 self,
56 client: t.Optional['Elasticsearch'] = None,
57 builtin: t.Optional[str] = None,
58 path: t.Optional[str] = None,
59 ref: t.Optional[str] = None,
60 url: t.Optional[str] = None,
61 scenario: t.Optional[str] = None,
62 ):
63 debug.lv2('Initializing TestBed object...')
64 #: The plan settings
65 self.settings = None
67 modpath, tmpdir = process_preset(builtin, path, ref, url)
68 if modpath is None:
69 msg = 'Must define a preset'
70 logger.critical(msg)
71 raise ValueError(msg)
73 try:
74 debug.lv4('TRY: Attempting to import preset module')
75 debug.lv5(f'preset module: {modpath}')
76 preset = import_module(f'{modpath}.definitions')
77 self.settings = preset.get_plan(scenario)
78 except ImportError as err:
79 logger.critical('Preset settings incomplete or incorrect')
80 raise err
81 debug.lv5(f'Preset module imported: {modpath}')
83 self.settings['modpath'] = modpath
84 if scenario:
85 debug.lv5(f'Using scenario: {scenario}')
86 self.settings['scenario'] = scenario
87 if tmpdir:
88 debug.lv5(f'Using tmpdir: {tmpdir}')
89 self.settings['tmpdir'] = tmpdir
91 #: The Elasticsearch client object
92 self.client = client
93 #: The test plan
94 self.plan = None
96 # Set up for tracking
97 #: The ILM entity manager
98 self.ilmmgr = None
99 #: The Component Template entity manager
100 self.componentmgr = None
101 #: The (index) Template entity manager
102 self.templatemgr = None
103 #: The Snapshot entity manager
104 self.snapshotmgr = None
105 #: The Index entity manager
106 self.indexmgr = None
107 #: The data_stream entity manager
108 self.data_streammgr = None
110 def _erase(self, kind: str, lst: t.Sequence[str]) -> None:
111 debug.lv2('Starting method...')
112 overall_success = True
113 if not lst:
114 debug.lv3(f'{kind}: nothing to delete.')
115 return True
116 if kind == 'ilm': # ILM policies can't be batch deleted
117 ilm = [self._while(kind, x) for x in lst]
118 overall_success = False not in ilm # No False values == True
119 else:
120 overall_success = self._while(kind, ','.join(lst))
121 debug.lv3('Exiting method, returning value')
122 debug.lv5(f'Value = {overall_success}')
123 return overall_success
125 def _fodder_generator(
126 self,
127 ) -> t.Generator[str, t.Sequence[str], None]:
128 """Method to delete everything matching our pattern(s)"""
129 debug.lv2('Starting method...')
130 items = ['index', 'data_stream', 'snapshot', 'template', 'component', 'ilm']
131 for i in items:
132 if i == 'snapshot' and self.plan.repository is None:
133 debug.lv4('No repository, no snapshots.')
134 continue
135 pattern = f'*{self.plan.prefix}-{NAMEMAPPER[i]}-{self.plan.uniq}*'
136 entities = get(self.client, i, pattern, repository=self.plan.repository)
137 yield (i, entities)
138 debug.lv3('Exiting method')
140 def _while(self, kind: str, item: str) -> bool:
141 debug.lv2('Starting method...')
142 count = 1
143 success = False
144 exc = None
145 while count < 4 and not success:
146 try:
147 debug.lv4(f'TRY: Deleting {kind} "{item}"')
148 success = delete(
149 self.client, kind, item, repository=self.plan.repository
150 )
151 break
152 except ResultNotExpected as err:
153 debug.lv1(f'Tried deleting "{item}" {count} time(s)')
154 exc = err
155 count += 1
156 if not success:
157 logger.warning(
158 f'Failed to delete "{item}" after {count - 1} tries. '
159 f'Final error: {exc}'
160 )
161 return success
163 def get_ilm_polling(self) -> None:
164 """
165 Get current ILM polling settings and store them in self.plan.polling_interval
166 """
167 debug.lv2('Starting method...')
168 debug.lv3('Storing current ILM polling settings, if any...')
169 try:
170 debug.lv4('TRY: Getting cluster settings')
171 res = dict(self.client.cluster.get_settings())
172 debug.lv5(f'Cluster settings: {prettystr(res)}')
173 except Exception as err:
174 logger.critical('Unable to get persistent cluster settings')
175 logger.critical('This could be permissions, or something larger.')
176 logger.critical(f'Exception: {prettystr(err)}')
177 logger.critical('Exiting.')
178 raise err
179 try:
180 debug.lv4('TRY: Getting ILM Polling Interval from settings')
181 retval = res['persistent']['indices']['lifecycle']['poll_interval']
182 except KeyError:
183 debug.lv3('No setting for indices.lifecycle.poll_interval. Must be default')
184 retval = None # Must be an actual value to go into a DotMap
185 if retval == '1s':
186 msg = (
187 'ILM polling already set at 1s. A previous run most likely did not '
188 'tear down properly. Resetting to null after this run'
189 )
190 logger.warning(msg)
191 retval = None # Must be an actual value to go into a DotMap
192 self.plan.ilm_polling_interval = retval
193 debug.lv3(f'Stored ILM Polling Interval: {retval}')
194 debug.lv3('Exiting method')
196 def ilm_polling(self, interval: t.Union[str, None] = None) -> t.Dict:
197 """Return persistent cluster settings to speed up ILM polling during testing"""
198 debug.lv2('Starting method...')
199 retval = {'indices.lifecycle.poll_interval': interval}
200 debug.lv3('Exiting method, returning value')
201 debug.lv5(f'Value = {retval}')
202 return retval
204 def set_debug_tier(self, tier: int) -> None:
205 """
206 Set the debug tier globally for this module
207 """
208 debug.set_level(tier)
210 def setup(self) -> None:
211 """Setup the instance"""
212 debug.lv2('Starting method...')
213 start = datetime.now(timezone.utc)
214 # If we build self.plan here, then we can modify settings before setup()
215 self.plan = PlanBuilder(settings=self.settings).plan
216 self.get_ilm_polling()
217 debug.lv5(f'Setting: {self.ilm_polling(interval="1s")}')
218 self.client.cluster.put_settings(persistent=self.ilm_polling(interval='1s'))
219 self.setup_entitymgrs()
220 end = datetime.now(timezone.utc)
221 debug.lv1(f'Testbed setup elapsed time: {(end - start).total_seconds()}')
222 debug.lv3('Exiting method')
224 def setup_entitymgrs(self) -> None:
225 """
226 Setup each EntityMgr child class
227 """
228 debug.lv2('Starting method...')
229 kw = {'client': self.client, 'plan': self.plan}
231 self.ilmmgr = IlmMgr(**kw)
232 self.ilmmgr.setup()
233 self.componentmgr = ComponentMgr(**kw)
234 self.componentmgr.setup()
235 self.templatemgr = TemplateMgr(**kw)
236 self.templatemgr.setup()
237 self.snapshotmgr = SnapshotMgr(**kw)
238 self.snapshotmgr.setup()
239 if self.plan.type == 'indices':
240 self.indexmgr = IndexMgr(**kw, snapmgr=self.snapshotmgr)
241 self.indexmgr.setup()
242 if self.plan.type == 'data_stream':
243 self.data_streammgr = DataStreamMgr(**kw, snapmgr=self.snapshotmgr)
244 self.data_streammgr.setup()
245 debug.lv3('Exiting method')
247 def teardown(self) -> None:
248 """Tear down anything we created"""
249 debug.lv2('Starting method...')
250 start = datetime.now(timezone.utc)
251 successful = True
252 if self.plan.tmpdir:
253 debug.lv3(f'Removing tmpdir: {self.plan.tmpdir}')
254 rmtree(self.plan.tmpdir) # Remove the tmpdir stored here
255 for kind, list_of_kind in self._fodder_generator():
256 if not self._erase(kind, list_of_kind):
257 successful = False
258 persist = self.ilm_polling(interval=self.plan.ilm_polling_interval)
259 debug.lv3(
260 f'Restoring ILM polling to previous value: '
261 f'{self.plan.ilm_polling_interval}'
262 )
263 self.client.cluster.put_settings(persistent=persist)
264 end = datetime.now(timezone.utc)
265 debug.lv1(f'Testbed teardown elapsed time: {(end - start).total_seconds()}')
266 if successful:
267 logger.info('Cleanup successful')
268 else:
269 logger.error('Cleanup was unsuccessful/incomplete')
270 self.plan.cleanup = successful
271 debug.lv3('Exiting method')