Coverage for /opt/homebrew/lib/python3.11/site-packages/_pytest/cacheprovider.py: 51%
313 statements
« prev ^ index » next coverage.py v7.2.3, created at 2023-05-04 13:14 +0700
« prev ^ index » next coverage.py v7.2.3, created at 2023-05-04 13:14 +0700
1"""Implementation of the cache provider."""
2# This plugin was not named "cache" to avoid conflicts with the external
3# pytest-cache version.
4import json
5import os
6from pathlib import Path
7from typing import Dict
8from typing import Generator
9from typing import Iterable
10from typing import List
11from typing import Optional
12from typing import Set
13from typing import Union
15import attr
17from .pathlib import resolve_from_str
18from .pathlib import rm_rf
19from .reports import CollectReport
20from _pytest import nodes
21from _pytest._io import TerminalWriter
22from _pytest.compat import final
23from _pytest.config import Config
24from _pytest.config import ExitCode
25from _pytest.config import hookimpl
26from _pytest.config.argparsing import Parser
27from _pytest.deprecated import check_ispytest
28from _pytest.fixtures import fixture
29from _pytest.fixtures import FixtureRequest
30from _pytest.main import Session
31from _pytest.python import Module
32from _pytest.python import Package
33from _pytest.reports import TestReport
35README_CONTENT = """\
36# pytest cache directory #
38This directory contains data from the pytest's cache plugin,
39which provides the `--lf` and `--ff` options, as well as the `cache` fixture.
41**Do not** commit this to version control.
43See [the docs](https://docs.pytest.org/en/stable/how-to/cache.html) for more information.
44"""
46CACHEDIR_TAG_CONTENT = b"""\
47Signature: 8a477f597d28d172789f06886806bc55
48# This file is a cache directory tag created by pytest.
49# For information about cache directory tags, see:
50# https://bford.info/cachedir/spec.html
51"""
54@final
55@attr.s(init=False, auto_attribs=True)
56class Cache:
57 _cachedir: Path = attr.ib(repr=False)
58 _config: Config = attr.ib(repr=False)
60 # Sub-directory under cache-dir for directories created by `mkdir()`.
61 _CACHE_PREFIX_DIRS = "d"
63 # Sub-directory under cache-dir for values created by `set()`.
64 _CACHE_PREFIX_VALUES = "v"
66 def __init__(
67 self, cachedir: Path, config: Config, *, _ispytest: bool = False
68 ) -> None:
69 check_ispytest(_ispytest)
70 self._cachedir = cachedir
71 self._config = config
73 @classmethod
74 def for_config(cls, config: Config, *, _ispytest: bool = False) -> "Cache":
75 """Create the Cache instance for a Config.
77 :meta private:
78 """
79 check_ispytest(_ispytest)
80 cachedir = cls.cache_dir_from_config(config, _ispytest=True)
81 if config.getoption("cacheclear") and cachedir.is_dir():
82 cls.clear_cache(cachedir, _ispytest=True)
83 return cls(cachedir, config, _ispytest=True)
85 @classmethod
86 def clear_cache(cls, cachedir: Path, _ispytest: bool = False) -> None:
87 """Clear the sub-directories used to hold cached directories and values.
89 :meta private:
90 """
91 check_ispytest(_ispytest)
92 for prefix in (cls._CACHE_PREFIX_DIRS, cls._CACHE_PREFIX_VALUES):
93 d = cachedir / prefix
94 if d.is_dir():
95 rm_rf(d)
97 @staticmethod
98 def cache_dir_from_config(config: Config, *, _ispytest: bool = False) -> Path:
99 """Get the path to the cache directory for a Config.
101 :meta private:
102 """
103 check_ispytest(_ispytest)
104 return resolve_from_str(config.getini("cache_dir"), config.rootpath)
106 def warn(self, fmt: str, *, _ispytest: bool = False, **args: object) -> None:
107 """Issue a cache warning.
109 :meta private:
110 """
111 check_ispytest(_ispytest)
112 import warnings
113 from _pytest.warning_types import PytestCacheWarning
115 warnings.warn(
116 PytestCacheWarning(fmt.format(**args) if args else fmt),
117 self._config.hook,
118 stacklevel=3,
119 )
121 def mkdir(self, name: str) -> Path:
122 """Return a directory path object with the given name.
124 If the directory does not yet exist, it will be created. You can use
125 it to manage files to e.g. store/retrieve database dumps across test
126 sessions.
128 .. versionadded:: 7.0
130 :param name:
131 Must be a string not containing a ``/`` separator.
132 Make sure the name contains your plugin or application
133 identifiers to prevent clashes with other cache users.
134 """
135 path = Path(name)
136 if len(path.parts) > 1:
137 raise ValueError("name is not allowed to contain path separators")
138 res = self._cachedir.joinpath(self._CACHE_PREFIX_DIRS, path)
139 res.mkdir(exist_ok=True, parents=True)
140 return res
142 def _getvaluepath(self, key: str) -> Path:
143 return self._cachedir.joinpath(self._CACHE_PREFIX_VALUES, Path(key))
145 def get(self, key: str, default):
146 """Return the cached value for the given key.
148 If no value was yet cached or the value cannot be read, the specified
149 default is returned.
151 :param key:
152 Must be a ``/`` separated value. Usually the first
153 name is the name of your plugin or your application.
154 :param default:
155 The value to return in case of a cache-miss or invalid cache value.
156 """
157 path = self._getvaluepath(key)
158 try:
159 with path.open("r", encoding="UTF-8") as f:
160 return json.load(f)
161 except (ValueError, OSError):
162 return default
164 def set(self, key: str, value: object) -> None:
165 """Save value for the given key.
167 :param key:
168 Must be a ``/`` separated value. Usually the first
169 name is the name of your plugin or your application.
170 :param value:
171 Must be of any combination of basic python types,
172 including nested types like lists of dictionaries.
173 """
174 path = self._getvaluepath(key)
175 try:
176 if path.parent.is_dir():
177 cache_dir_exists_already = True
178 else:
179 cache_dir_exists_already = self._cachedir.exists()
180 path.parent.mkdir(exist_ok=True, parents=True)
181 except OSError:
182 self.warn("could not create cache path {path}", path=path, _ispytest=True)
183 return
184 if not cache_dir_exists_already:
185 self._ensure_supporting_files()
186 data = json.dumps(value, ensure_ascii=False, indent=2)
187 try:
188 f = path.open("w", encoding="UTF-8")
189 except OSError:
190 self.warn("cache could not write path {path}", path=path, _ispytest=True)
191 else:
192 with f:
193 f.write(data)
195 def _ensure_supporting_files(self) -> None:
196 """Create supporting files in the cache dir that are not really part of the cache."""
197 readme_path = self._cachedir / "README.md"
198 readme_path.write_text(README_CONTENT, encoding="UTF-8")
200 gitignore_path = self._cachedir.joinpath(".gitignore")
201 msg = "# Created by pytest automatically.\n*\n"
202 gitignore_path.write_text(msg, encoding="UTF-8")
204 cachedir_tag_path = self._cachedir.joinpath("CACHEDIR.TAG")
205 cachedir_tag_path.write_bytes(CACHEDIR_TAG_CONTENT)
208class LFPluginCollWrapper:
209 def __init__(self, lfplugin: "LFPlugin") -> None:
210 self.lfplugin = lfplugin
211 self._collected_at_least_one_failure = False
213 @hookimpl(hookwrapper=True)
214 def pytest_make_collect_report(self, collector: nodes.Collector):
215 if isinstance(collector, Session):
216 out = yield
217 res: CollectReport = out.get_result()
219 # Sort any lf-paths to the beginning.
220 lf_paths = self.lfplugin._last_failed_paths
222 res.result = sorted(
223 res.result,
224 # use stable sort to priorize last failed
225 key=lambda x: x.path in lf_paths,
226 reverse=True,
227 )
228 return
230 elif isinstance(collector, Module):
231 if collector.path in self.lfplugin._last_failed_paths:
232 out = yield
233 res = out.get_result()
234 result = res.result
235 lastfailed = self.lfplugin.lastfailed
237 # Only filter with known failures.
238 if not self._collected_at_least_one_failure:
239 if not any(x.nodeid in lastfailed for x in result):
240 return
241 self.lfplugin.config.pluginmanager.register(
242 LFPluginCollSkipfiles(self.lfplugin), "lfplugin-collskip"
243 )
244 self._collected_at_least_one_failure = True
246 session = collector.session
247 result[:] = [
248 x
249 for x in result
250 if x.nodeid in lastfailed
251 # Include any passed arguments (not trivial to filter).
252 or session.isinitpath(x.path)
253 # Keep all sub-collectors.
254 or isinstance(x, nodes.Collector)
255 ]
256 return
257 yield
260class LFPluginCollSkipfiles:
261 def __init__(self, lfplugin: "LFPlugin") -> None:
262 self.lfplugin = lfplugin
264 @hookimpl
265 def pytest_make_collect_report(
266 self, collector: nodes.Collector
267 ) -> Optional[CollectReport]:
268 # Packages are Modules, but _last_failed_paths only contains
269 # test-bearing paths and doesn't try to include the paths of their
270 # packages, so don't filter them.
271 if isinstance(collector, Module) and not isinstance(collector, Package):
272 if collector.path not in self.lfplugin._last_failed_paths:
273 self.lfplugin._skipped_files += 1
275 return CollectReport(
276 collector.nodeid, "passed", longrepr=None, result=[]
277 )
278 return None
281class LFPlugin:
282 """Plugin which implements the --lf (run last-failing) option."""
284 def __init__(self, config: Config) -> None:
285 self.config = config
286 active_keys = "lf", "failedfirst"
287 self.active = any(config.getoption(key) for key in active_keys)
288 assert config.cache
289 self.lastfailed: Dict[str, bool] = config.cache.get("cache/lastfailed", {})
290 self._previously_failed_count: Optional[int] = None
291 self._report_status: Optional[str] = None
292 self._skipped_files = 0 # count skipped files during collection due to --lf
294 if config.getoption("lf"):
295 self._last_failed_paths = self.get_last_failed_paths()
296 config.pluginmanager.register(
297 LFPluginCollWrapper(self), "lfplugin-collwrapper"
298 )
300 def get_last_failed_paths(self) -> Set[Path]:
301 """Return a set with all Paths()s of the previously failed nodeids."""
302 rootpath = self.config.rootpath
303 result = {rootpath / nodeid.split("::")[0] for nodeid in self.lastfailed}
304 return {x for x in result if x.exists()}
306 def pytest_report_collectionfinish(self) -> Optional[str]:
307 if self.active and self.config.getoption("verbose") >= 0:
308 return "run-last-failure: %s" % self._report_status
309 return None
311 def pytest_runtest_logreport(self, report: TestReport) -> None:
312 if (report.when == "call" and report.passed) or report.skipped:
313 self.lastfailed.pop(report.nodeid, None)
314 elif report.failed:
315 self.lastfailed[report.nodeid] = True
317 def pytest_collectreport(self, report: CollectReport) -> None:
318 passed = report.outcome in ("passed", "skipped")
319 if passed:
320 if report.nodeid in self.lastfailed:
321 self.lastfailed.pop(report.nodeid)
322 self.lastfailed.update((item.nodeid, True) for item in report.result)
323 else:
324 self.lastfailed[report.nodeid] = True
326 @hookimpl(hookwrapper=True, tryfirst=True)
327 def pytest_collection_modifyitems(
328 self, config: Config, items: List[nodes.Item]
329 ) -> Generator[None, None, None]:
330 yield
332 if not self.active:
333 return
335 if self.lastfailed:
336 previously_failed = []
337 previously_passed = []
338 for item in items:
339 if item.nodeid in self.lastfailed:
340 previously_failed.append(item)
341 else:
342 previously_passed.append(item)
343 self._previously_failed_count = len(previously_failed)
345 if not previously_failed:
346 # Running a subset of all tests with recorded failures
347 # only outside of it.
348 self._report_status = "%d known failures not in selected tests" % (
349 len(self.lastfailed),
350 )
351 else:
352 if self.config.getoption("lf"):
353 items[:] = previously_failed
354 config.hook.pytest_deselected(items=previously_passed)
355 else: # --failedfirst
356 items[:] = previously_failed + previously_passed
358 noun = "failure" if self._previously_failed_count == 1 else "failures"
359 suffix = " first" if self.config.getoption("failedfirst") else ""
360 self._report_status = "rerun previous {count} {noun}{suffix}".format(
361 count=self._previously_failed_count, suffix=suffix, noun=noun
362 )
364 if self._skipped_files > 0:
365 files_noun = "file" if self._skipped_files == 1 else "files"
366 self._report_status += " (skipped {files} {files_noun})".format(
367 files=self._skipped_files, files_noun=files_noun
368 )
369 else:
370 self._report_status = "no previously failed tests, "
371 if self.config.getoption("last_failed_no_failures") == "none":
372 self._report_status += "deselecting all items."
373 config.hook.pytest_deselected(items=items[:])
374 items[:] = []
375 else:
376 self._report_status += "not deselecting items."
378 def pytest_sessionfinish(self, session: Session) -> None:
379 config = self.config
380 if config.getoption("cacheshow") or hasattr(config, "workerinput"):
381 return
383 assert config.cache is not None
384 saved_lastfailed = config.cache.get("cache/lastfailed", {})
385 if saved_lastfailed != self.lastfailed:
386 config.cache.set("cache/lastfailed", self.lastfailed)
389class NFPlugin:
390 """Plugin which implements the --nf (run new-first) option."""
392 def __init__(self, config: Config) -> None:
393 self.config = config
394 self.active = config.option.newfirst
395 assert config.cache is not None
396 self.cached_nodeids = set(config.cache.get("cache/nodeids", []))
398 @hookimpl(hookwrapper=True, tryfirst=True)
399 def pytest_collection_modifyitems(
400 self, items: List[nodes.Item]
401 ) -> Generator[None, None, None]:
402 yield
404 if self.active:
405 new_items: Dict[str, nodes.Item] = {}
406 other_items: Dict[str, nodes.Item] = {}
407 for item in items:
408 if item.nodeid not in self.cached_nodeids:
409 new_items[item.nodeid] = item
410 else:
411 other_items[item.nodeid] = item
413 items[:] = self._get_increasing_order(
414 new_items.values()
415 ) + self._get_increasing_order(other_items.values())
416 self.cached_nodeids.update(new_items)
417 else:
418 self.cached_nodeids.update(item.nodeid for item in items)
420 def _get_increasing_order(self, items: Iterable[nodes.Item]) -> List[nodes.Item]:
421 return sorted(items, key=lambda item: item.path.stat().st_mtime, reverse=True) # type: ignore[no-any-return]
423 def pytest_sessionfinish(self) -> None:
424 config = self.config
425 if config.getoption("cacheshow") or hasattr(config, "workerinput"):
426 return
428 if config.getoption("collectonly"):
429 return
431 assert config.cache is not None
432 config.cache.set("cache/nodeids", sorted(self.cached_nodeids))
435def pytest_addoption(parser: Parser) -> None:
436 group = parser.getgroup("general")
437 group.addoption(
438 "--lf",
439 "--last-failed",
440 action="store_true",
441 dest="lf",
442 help="Rerun only the tests that failed "
443 "at the last run (or all if none failed)",
444 )
445 group.addoption(
446 "--ff",
447 "--failed-first",
448 action="store_true",
449 dest="failedfirst",
450 help="Run all tests, but run the last failures first. "
451 "This may re-order tests and thus lead to "
452 "repeated fixture setup/teardown.",
453 )
454 group.addoption(
455 "--nf",
456 "--new-first",
457 action="store_true",
458 dest="newfirst",
459 help="Run tests from new files first, then the rest of the tests "
460 "sorted by file mtime",
461 )
462 group.addoption(
463 "--cache-show",
464 action="append",
465 nargs="?",
466 dest="cacheshow",
467 help=(
468 "Show cache contents, don't perform collection or tests. "
469 "Optional argument: glob (default: '*')."
470 ),
471 )
472 group.addoption(
473 "--cache-clear",
474 action="store_true",
475 dest="cacheclear",
476 help="Remove all cache contents at start of test run",
477 )
478 cache_dir_default = ".pytest_cache"
479 if "TOX_ENV_DIR" in os.environ:
480 cache_dir_default = os.path.join(os.environ["TOX_ENV_DIR"], cache_dir_default)
481 parser.addini("cache_dir", default=cache_dir_default, help="Cache directory path")
482 group.addoption(
483 "--lfnf",
484 "--last-failed-no-failures",
485 action="store",
486 dest="last_failed_no_failures",
487 choices=("all", "none"),
488 default="all",
489 help="Which tests to run with no previously (known) failures",
490 )
493def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]:
494 if config.option.cacheshow and not config.option.help:
495 from _pytest.main import wrap_session
497 return wrap_session(config, cacheshow)
498 return None
501@hookimpl(tryfirst=True)
502def pytest_configure(config: Config) -> None:
503 config.cache = Cache.for_config(config, _ispytest=True)
504 config.pluginmanager.register(LFPlugin(config), "lfplugin")
505 config.pluginmanager.register(NFPlugin(config), "nfplugin")
508@fixture
509def cache(request: FixtureRequest) -> Cache:
510 """Return a cache object that can persist state between testing sessions.
512 cache.get(key, default)
513 cache.set(key, value)
515 Keys must be ``/`` separated strings, where the first part is usually the
516 name of your plugin or application to avoid clashes with other cache users.
518 Values can be any object handled by the json stdlib module.
519 """
520 assert request.config.cache is not None
521 return request.config.cache
524def pytest_report_header(config: Config) -> Optional[str]:
525 """Display cachedir with --cache-show and if non-default."""
526 if config.option.verbose > 0 or config.getini("cache_dir") != ".pytest_cache":
527 assert config.cache is not None
528 cachedir = config.cache._cachedir
529 # TODO: evaluate generating upward relative paths
530 # starting with .., ../.. if sensible
532 try:
533 displaypath = cachedir.relative_to(config.rootpath)
534 except ValueError:
535 displaypath = cachedir
536 return f"cachedir: {displaypath}"
537 return None
540def cacheshow(config: Config, session: Session) -> int:
541 from pprint import pformat
543 assert config.cache is not None
545 tw = TerminalWriter()
546 tw.line("cachedir: " + str(config.cache._cachedir))
547 if not config.cache._cachedir.is_dir():
548 tw.line("cache is empty")
549 return 0
551 glob = config.option.cacheshow[0]
552 if glob is None:
553 glob = "*"
555 dummy = object()
556 basedir = config.cache._cachedir
557 vdir = basedir / Cache._CACHE_PREFIX_VALUES
558 tw.sep("-", "cache values for %r" % glob)
559 for valpath in sorted(x for x in vdir.rglob(glob) if x.is_file()):
560 key = str(valpath.relative_to(vdir))
561 val = config.cache.get(key, dummy)
562 if val is dummy:
563 tw.line("%s contains unreadable content, will be ignored" % key)
564 else:
565 tw.line("%s contains:" % key)
566 for line in pformat(val).splitlines():
567 tw.line(" " + line)
569 ddir = basedir / Cache._CACHE_PREFIX_DIRS
570 if ddir.is_dir():
571 contents = sorted(ddir.rglob(glob))
572 tw.sep("-", "cache directories for %r" % glob)
573 for p in contents:
574 # if p.is_dir():
575 # print("%s/" % p.relative_to(basedir))
576 if p.is_file():
577 key = str(p.relative_to(basedir))
578 tw.line(f"{key} is a file of length {p.stat().st_size:d}")
579 return 0