general: move most core tests inside my.core.tests package
- distributes tests alongside the package, might be convenient for package users - removes some weird indirection (e.g. dummy test files improting tests from modules) - makes the command line for tests cleaner (e.g. no need to remember to manually add files to tox.ini) - tests automatically covered by mypy (so makes mypy runs cleaner and ultimately better coverage) The (vague) convention is - tests/somemodule.py -- testing my.core.somemodule, contains tests directly re - tests/test_something.py -- testing a specific feature, e.g. test_get_files.py tests get_files methon only
This commit is contained in:
parent
04d976f937
commit
9594caa1cd
18 changed files with 77 additions and 102 deletions
|
@ -3,13 +3,13 @@ Various helpers for compression
|
|||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import total_ordering
|
||||
from datetime import datetime
|
||||
from functools import total_ordering
|
||||
import io
|
||||
import pathlib
|
||||
from pathlib import Path
|
||||
import sys
|
||||
from typing import Union, IO, Sequence, Any, Iterator
|
||||
import io
|
||||
|
||||
PathIsh = Union[Path, str]
|
||||
|
||||
|
|
3
my/core/tests/__init__.py
Normal file
3
my/core/tests/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
# hmm, sadly pytest --import-mode importlib --pyargs my.core.tests doesn't work properly without __init__.py
|
||||
# although it works if you run either my.core or my.core.tests.sqlite (for example) directly
|
||||
# so if it gets in the way could get rid of this later?
|
|
@ -1,11 +1,10 @@
|
|||
import warnings
|
||||
|
||||
from datetime import datetime
|
||||
import json
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from typing import NamedTuple, Iterator
|
||||
import warnings
|
||||
|
||||
from my.core.denylist import DenyList
|
||||
from ..denylist import DenyList
|
||||
|
||||
|
||||
class IP(NamedTuple):
|
||||
|
@ -30,7 +29,6 @@ def data() -> Iterator[IP]:
|
|||
def test_denylist(tmp_path: Path) -> None:
|
||||
tf = (tmp_path / "denylist.json").absolute()
|
||||
with warnings.catch_warnings(record=True):
|
||||
|
||||
# create empty denylist (though file does not have to exist for denylist to work)
|
||||
tf.write_text("[]")
|
||||
|
|
@ -1,10 +1,9 @@
|
|||
from datetime import datetime
|
||||
import lzma
|
||||
from pathlib import Path
|
||||
import lzma
|
||||
import sys
|
||||
import zipfile
|
||||
|
||||
from my.core.kompress import kopen, kexists, CPath
|
||||
from ..kompress import kopen, kexists, CPath, ZipPath
|
||||
|
||||
import pytest
|
||||
|
||||
|
@ -14,27 +13,31 @@ structure_data: Path = Path(__file__).parent / "structure_data"
|
|||
|
||||
def test_kopen(tmp_path: Path) -> None:
|
||||
"Plaintext handled transparently"
|
||||
# fmt: off
|
||||
assert kopen(tmp_path / 'file' ).read() == 'just plaintext'
|
||||
assert kopen(tmp_path / 'file.xz').read() == 'compressed text'
|
||||
# fmt: on
|
||||
|
||||
"For zips behaviour is a bit different (not sure about all this, tbh...)"
|
||||
assert kopen(tmp_path / 'file.zip', 'path/in/archive').read() == 'data in zip'
|
||||
|
||||
|
||||
# TODO here?
|
||||
def test_kexists(tmp_path: Path) -> None:
|
||||
# TODO also test top level?
|
||||
# fmt: off
|
||||
assert kexists(str(tmp_path / 'file.zip'), 'path/in/archive')
|
||||
assert not kexists(str(tmp_path / 'file.zip'), 'path/notin/archive')
|
||||
# fmt: on
|
||||
|
||||
# TODO not sure about this?
|
||||
assert not kexists(tmp_path / 'nosuchzip.zip', 'path/in/archive')
|
||||
|
||||
|
||||
def test_cpath(tmp_path: Path) -> None:
|
||||
# fmt: off
|
||||
CPath(str(tmp_path / 'file' )).read_text() == 'just plaintext'
|
||||
CPath( tmp_path / 'file.xz').read_text() == 'compressed text'
|
||||
# TODO not sure about zip files??
|
||||
# fmt: on
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
|
@ -51,12 +54,7 @@ def prepare(tmp_path: Path):
|
|||
pass
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info[:2] < (3, 8),
|
||||
reason=f"ZipFile.Path is only available since 3.8",
|
||||
)
|
||||
def test_zippath() -> None:
|
||||
from my.core.kompress import ZipPath
|
||||
target = structure_data / 'gdpr_export.zip'
|
||||
assert target.exists(), target # precondition
|
||||
|
||||
|
@ -87,6 +85,7 @@ def test_zippath() -> None:
|
|||
|
||||
rpaths = [p.relative_to(zp) for p in matched]
|
||||
gdpr_export = Path('gdpr_export')
|
||||
# fmt: off
|
||||
assert rpaths == [
|
||||
gdpr_export,
|
||||
gdpr_export / 'comments',
|
||||
|
@ -96,7 +95,7 @@ def test_zippath() -> None:
|
|||
gdpr_export / 'messages',
|
||||
gdpr_export / 'messages' / 'index.csv',
|
||||
], rpaths
|
||||
|
||||
# fmt: on
|
||||
|
||||
# TODO hmm this doesn't work atm, whereas Path does
|
||||
# not sure if it should be defensive or something...
|
||||
|
@ -107,10 +106,12 @@ def test_zippath() -> None:
|
|||
assert (ZipPath(target) / 'gdpr_export' / 'comments').exists()
|
||||
|
||||
jsons = [p.relative_to(zp / 'gdpr_export') for p in zp.rglob('*.json')]
|
||||
# fmt: off
|
||||
assert jsons == [
|
||||
Path('comments','comments.json'),
|
||||
Path('profile','settings.json'),
|
||||
Path('comments', 'comments.json'),
|
||||
Path('profile' , 'settings.json'),
|
||||
]
|
||||
# fmt: on
|
||||
|
||||
# NOTE: hmm interesting, seems that ZipPath is happy with forward slash regardless OS?
|
||||
assert list(zp.rglob('mes*')) == [ZipPath(target, 'gdpr_export/messages')]
|
|
@ -1,10 +1,10 @@
|
|||
from concurrent.futures import ProcessPoolExecutor
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import sqlite3
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
|
||||
from my.core.sqlite import sqlite_connect_immutable, sqlite_copy_and_open
|
||||
from ..sqlite import sqlite_connect_immutable, sqlite_copy_and_open
|
||||
|
||||
|
||||
def test_sqlite_read_with_wal(tmp_path: Path) -> None:
|
||||
|
@ -27,13 +27,14 @@ def test_sqlite_read_with_wal(tmp_path: Path) -> None:
|
|||
assert len(wals) == 1
|
||||
|
||||
## now run the tests in separate process to ensure there is no potential for reusing sqlite connections or something
|
||||
from concurrent.futures import ProcessPoolExecutor as Pool
|
||||
with Pool(1) as pool:
|
||||
with ProcessPoolExecutor(1) as pool:
|
||||
# merely using it for ctx manager..
|
||||
# fmt: off
|
||||
pool.submit(_test_do_copy , db).result()
|
||||
pool.submit(_test_do_immutable , db).result()
|
||||
pool.submit(_test_do_copy_and_open, db).result()
|
||||
pool.submit(_test_open_asis , db).result()
|
||||
# fmt: on
|
||||
|
||||
|
||||
def _test_do_copy(db: Path) -> None:
|
|
@ -1,8 +1,8 @@
|
|||
import pytest
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from my.core.structure import match_structure
|
||||
from ..structure import match_structure
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
structure_data: Path = Path(__file__).parent / "structure_data"
|
||||
|
@ -16,10 +16,7 @@ def test_gdpr_structure_exists() -> None:
|
|||
|
||||
|
||||
def test_gdpr_unzip() -> None:
|
||||
|
||||
with match_structure(
|
||||
structure_data / "gdpr_export.zip", expected=gdpr_expected
|
||||
) as results:
|
||||
with match_structure(structure_data / "gdpr_export.zip", expected=gdpr_expected) as results:
|
||||
assert len(results) == 1
|
||||
extracted = results[0]
|
||||
index_file = extracted / "messages" / "index.csv"
|
||||
|
@ -31,15 +28,11 @@ def test_gdpr_unzip() -> None:
|
|||
|
||||
def test_match_partial() -> None:
|
||||
# a partial match should match both the 'broken' and 'gdpr_export' directories
|
||||
with match_structure(
|
||||
structure_data / "gdpr_subdirs", expected=gdpr_expected, partial=True
|
||||
) as results:
|
||||
with match_structure(structure_data / "gdpr_subdirs", expected=gdpr_expected, partial=True) as results:
|
||||
assert len(results) == 2
|
||||
|
||||
|
||||
def test_not_directory() -> None:
|
||||
with pytest.raises(NotADirectoryError, match=r"Expected either a zipfile or a directory"):
|
||||
with match_structure(
|
||||
structure_data / "messages/index.csv", expected=gdpr_expected
|
||||
):
|
||||
with match_structure(structure_data / "messages/index.csv", expected=gdpr_expected):
|
||||
pass
|
|
@ -1,5 +1,7 @@
|
|||
import os
|
||||
from subprocess import check_call
|
||||
import sys
|
||||
|
||||
|
||||
def test_lists_modules() -> None:
|
||||
# hack PYTHONUTF8 for windows
|
||||
|
@ -11,4 +13,4 @@ def test_lists_modules() -> None:
|
|||
**os.environ,
|
||||
'PYTHONUTF8': '1',
|
||||
}
|
||||
check_call(['hpi', 'modules'], env=env)
|
||||
check_call([sys.executable, '-m', 'my.core', 'modules'], env=env)
|
|
@ -1,30 +1,33 @@
|
|||
import os
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import tempfile
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from my.core.compat import windows
|
||||
from my.core.common import get_files
|
||||
from ..compat import windows
|
||||
from ..common import get_files
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
# hack to replace all /tmp with 'real' tmp dir
|
||||
# not ideal, but makes tests more concise
|
||||
# hack to replace all /tmp with 'real' tmp dir
|
||||
# not ideal, but makes tests more concise
|
||||
def _get_files(x, *args, **kwargs):
|
||||
import my.core.common as C
|
||||
from ..common import get_files as get_files_orig
|
||||
|
||||
def repl(x):
|
||||
if isinstance(x, str):
|
||||
return x.replace('/tmp', TMP)
|
||||
elif isinstance(x, Path):
|
||||
assert x.parts[:2] == (os.sep, 'tmp') # meh
|
||||
assert x.parts[:2] == (os.sep, 'tmp') # meh
|
||||
return Path(TMP) / Path(*x.parts[2:])
|
||||
else:
|
||||
# iterable?
|
||||
return [repl(i) for i in x]
|
||||
|
||||
x = repl(x)
|
||||
res = C.get_files(x, *args, **kwargs)
|
||||
return tuple(Path(str(i).replace(TMP, '/tmp')) for i in res) # hack back for asserts..
|
||||
res = get_files_orig(x, *args, **kwargs)
|
||||
return tuple(Path(str(i).replace(TMP, '/tmp')) for i in res) # hack back for asserts..
|
||||
|
||||
|
||||
if not TYPE_CHECKING:
|
||||
|
@ -40,7 +43,6 @@ def test_single_file() -> None:
|
|||
with pytest.raises(Exception):
|
||||
get_files('/tmp/hpi_test/file.ext')
|
||||
|
||||
|
||||
create('/tmp/hpi_test/file.ext')
|
||||
|
||||
'''
|
||||
|
@ -48,16 +50,11 @@ def test_single_file() -> None:
|
|||
1. Return type is a tuple, it's friendlier for hashing/caching
|
||||
2. It always return pathlib.Path instead of plain strings
|
||||
'''
|
||||
assert get_files('/tmp/hpi_test/file.ext') == (
|
||||
Path('/tmp/hpi_test/file.ext'),
|
||||
)
|
||||
|
||||
assert get_files('/tmp/hpi_test/file.ext') == (Path('/tmp/hpi_test/file.ext'),)
|
||||
|
||||
"if the path starts with ~, we expand it"
|
||||
if not windows: # windows dowsn't have bashrc.. ugh
|
||||
assert get_files('~/.bashrc') == (
|
||||
Path('~').expanduser() / '.bashrc',
|
||||
)
|
||||
if not windows: # windows doesn't have bashrc.. ugh
|
||||
assert get_files('~/.bashrc') == (Path('~').expanduser() / '.bashrc',)
|
||||
|
||||
|
||||
def test_multiple_files() -> None:
|
||||
|
@ -74,6 +71,7 @@ def test_multiple_files() -> None:
|
|||
create('/tmp/hpi_test/dir3/')
|
||||
create('/tmp/hpi_test/dir3/ttt')
|
||||
|
||||
# fmt: off
|
||||
assert get_files([
|
||||
Path('/tmp/hpi_test/dir3'), # it takes in Path as well as str
|
||||
'/tmp/hpi_test/dir1',
|
||||
|
@ -83,6 +81,7 @@ def test_multiple_files() -> None:
|
|||
Path('/tmp/hpi_test/dir1/zzz'),
|
||||
Path('/tmp/hpi_test/dir3/ttt'),
|
||||
)
|
||||
# fmt: on
|
||||
|
||||
|
||||
def test_explicit_glob() -> None:
|
||||
|
@ -130,27 +129,26 @@ def test_no_files() -> None:
|
|||
'''
|
||||
Test for empty matches. They work, but should result in warning
|
||||
'''
|
||||
assert get_files('') == ()
|
||||
assert get_files('') == ()
|
||||
|
||||
# todo test these for warnings?
|
||||
assert get_files([]) == ()
|
||||
assert get_files([]) == ()
|
||||
assert get_files('bad*glob') == ()
|
||||
|
||||
|
||||
# TODO not sure if should uniquify if the filenames end up same?
|
||||
# TODO not sure about the symlinks? and hidden files?
|
||||
|
||||
import tempfile
|
||||
TMP = tempfile.gettempdir()
|
||||
test_path = Path(TMP) / 'hpi_test'
|
||||
|
||||
|
||||
def setup():
|
||||
teardown()
|
||||
test_path.mkdir()
|
||||
|
||||
|
||||
def teardown():
|
||||
import shutil
|
||||
if test_path.is_dir():
|
||||
shutil.rmtree(test_path)
|
||||
|
|
@ -1,15 +1,12 @@
|
|||
from pathlib import Path
|
||||
import tempfile
|
||||
|
||||
from my.core.cfg import tmp_config
|
||||
|
||||
import pytest
|
||||
from ..cfg import tmp_config
|
||||
|
||||
|
||||
def _init_default_config() -> None:
|
||||
import my.config
|
||||
|
||||
class default_config:
|
||||
count = 5
|
||||
|
||||
my.config.simple = default_config # type: ignore[assignment,misc]
|
||||
|
||||
|
||||
|
@ -19,7 +16,6 @@ def test_tmp_config() -> None:
|
|||
## later would be nice to be a bit more careful about them
|
||||
_init_default_config()
|
||||
from my.simple import items
|
||||
##
|
||||
|
||||
assert len(list(items())) == 5
|
||||
|
|
@ -1,25 +0,0 @@
|
|||
'''
|
||||
NOTE: Sigh. it's nice to be able to define the tests next to the source code (so it serves as documentation).
|
||||
However, if you run 'pytest --pyargs my.core', it detects 'core' package name (because there is no my/__init__.py)
|
||||
(see https://docs.pytest.org/en/latest/goodpractices.html#tests-as-part-of-application-code)
|
||||
|
||||
This results in relative imports failing (e.g. from ..core import...).
|
||||
|
||||
By using this helper file, pytest can detect the package name properly. A bit meh, but perhaps later,
|
||||
we can run against the tests in my.core directly.
|
||||
|
||||
'''
|
||||
|
||||
from my.core.cfg import *
|
||||
from my.core.common import *
|
||||
from my.core.core_config import *
|
||||
from my.core.error import *
|
||||
from my.core.util import *
|
||||
from my.core.discovery_pure import *
|
||||
from my.core.freezer import *
|
||||
from my.core.stats import *
|
||||
from my.core.query import *
|
||||
from my.core.query_range import *
|
||||
from my.core.serialize import test_serialize_fallback
|
||||
from my.core.sqlite import *
|
||||
from my.core.__main__ import *
|
|
@ -1 +0,0 @@
|
|||
from my.core.pandas import *
|
29
tox.ini
29
tox.ini
|
@ -19,12 +19,26 @@ passenv =
|
|||
[testenv:tests-core]
|
||||
commands =
|
||||
pip install -e .[testing]
|
||||
|
||||
# seems that denylist tests rely on it? ideally we should get rid of this in tests-core
|
||||
pip install orjson
|
||||
|
||||
{envpython} -m pytest \
|
||||
tests/core.py \
|
||||
tests/sqlite.py \
|
||||
tests/get_files.py \
|
||||
tests/test_tmp_config.py \
|
||||
{posargs}
|
||||
# importlib is the new suggested import-mode
|
||||
# without it test package names end up as core.tests.* instead of my.core.tests.*
|
||||
--import-mode=importlib \
|
||||
--pyargs my.core \
|
||||
# ignore orgmode because it imports orgparse
|
||||
# tbh not sure if it even belongs to core, maybe move somewhere else..
|
||||
# same with pandas?
|
||||
--ignore my/core/orgmode.py \
|
||||
# causes error during test collection on 3.8
|
||||
# dataset is deprecated anyway so whatever
|
||||
--ignore my/core/dataset.py \
|
||||
# this test uses orjson which is an optional dependency
|
||||
# it would be covered by tests-all
|
||||
-k 'not test_nt_serialize' \
|
||||
{posargs}
|
||||
|
||||
|
||||
# todo maybe also have core tests and misc tests? since ideally want them without dependencies
|
||||
|
@ -94,11 +108,6 @@ commands =
|
|||
{posargs}
|
||||
cat .coverage.mypy-core/index.txt
|
||||
|
||||
# todo hmm might be better to move modules test in a separate subpackage?
|
||||
{envpython} -m mypy --install-types --non-interactive \
|
||||
tests \
|
||||
--exclude 'tests/(bluemaestro|emfit|takeout|pdfs|jawbone).py'
|
||||
|
||||
|
||||
# specific modules that are known to be mypy compliant (to avoid false negatives)
|
||||
# todo maybe split into separate jobs? need to add comment how to run
|
||||
|
|
Loading…
Add table
Reference in a new issue