Skip to content

Commit 0db2aa6

Browse files
committed
merge master into 217-fix-none-bug-in-search-file
2 parents 3bb0670 + d05c0cd commit 0db2aa6

File tree

3 files changed

+96
-105
lines changed

3 files changed

+96
-105
lines changed

clients/python/requirements/e2e-test.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
-r ../../../requirements.txt
22

33
black
4+
faker
45
ipykernel
56
ipython
67
jinja2

clients/python/test/e2e/conftest.py

Lines changed: 39 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -17,14 +17,18 @@
1717
from numpy import random
1818
from packaging.version import Version
1919
from pydantic import ByteSize
20-
from typing import Callable
20+
from typing import NamedTuple, Final
2121

2222
try:
2323
from osparc._settings import ConfigurationEnvVars
2424
except ImportError:
2525
pass
2626

2727

28+
_KB: ByteSize = ByteSize(1024) # in bytes
29+
_MB: ByteSize = ByteSize(_KB * 1024) # in bytes
30+
_GB: ByteSize = ByteSize(_MB * 1024) # in bytes
31+
2832
# Dictionary to store start times of tests
2933
_test_start_times = {}
3034

@@ -90,7 +94,7 @@ def pytest_configure(config):
9094
config.pluginmanager.register(pytest_runtest_makereport, "osparc_makereport_plugin")
9195

9296

93-
@pytest.fixture
97+
@pytest.fixture(scope="session")
9498
def api_client() -> Iterable[osparc.ApiClient]:
9599
if Version(osparc.__version__) >= Version("8.0.0"):
96100
with osparc.ApiClient() as api_client:
@@ -107,6 +111,11 @@ def api_client() -> Iterable[osparc.ApiClient]:
107111
yield api_client
108112

109113

114+
@pytest.fixture(scope="session")
115+
def files_api(api_client: osparc.ApiClient) -> osparc.FilesApi:
116+
return osparc.FilesApi(api_client=api_client)
117+
118+
110119
@pytest.fixture
111120
def async_client() -> Iterable[AsyncClient]:
112121
if Version(osparc.__version__) >= Version("8.0.0"):
@@ -128,25 +137,34 @@ def async_client() -> Iterable[AsyncClient]:
128137
) # type: ignore
129138

130139

131-
@pytest.fixture
132-
def create_tmp_file(
133-
tmp_path: Path, caplog: pytest.LogCaptureFixture
134-
) -> Callable[[ByteSize], Path]:
135-
def _generate_file(file_size: ByteSize):
136-
caplog.set_level(logging.INFO)
137-
tmp_file = tmp_path / "large_test_file.txt"
138-
ss: random.SeedSequence = random.SeedSequence()
139-
logging.info("Entropy used to generate random file: %s", f"{ss.entropy}")
140-
rng: random.Generator = random.default_rng(ss)
141-
tmp_file.write_bytes(rng.bytes(1000))
142-
with open(tmp_file, "wb") as f:
143-
f.truncate(file_size)
144-
assert (
145-
tmp_file.stat().st_size == file_size
146-
), f"Could not create file of size: {file_size}"
147-
return tmp_file
148-
149-
return _generate_file
140+
class ServerFile(NamedTuple):
141+
server_file: osparc.File
142+
local_file: Path
143+
144+
145+
@pytest.fixture(scope="session")
146+
def large_server_file(
147+
files_api: osparc.FilesApi, tmp_path_factory
148+
) -> Iterable[ServerFile]:
149+
_file_size: Final[ByteSize] = ByteSize(1 * _GB)
150+
tmp_file = (
151+
tmp_path_factory.mktemp(basename=large_server_file.__name__)
152+
/ "large_test_file.txt"
153+
)
154+
ss: random.SeedSequence = random.SeedSequence()
155+
logging.info("Entropy used to generate random file: %s", f"{ss.entropy}")
156+
rng: random.Generator = random.default_rng(ss)
157+
tmp_file.write_bytes(rng.bytes(1000))
158+
with open(tmp_file, "wb") as f:
159+
f.truncate(_file_size)
160+
assert (
161+
tmp_file.stat().st_size == _file_size
162+
), f"Could not create file of size: {_file_size}"
163+
uploaded_file: osparc.File = files_api.upload_file(tmp_file)
164+
165+
yield ServerFile(local_file=tmp_file, server_file=uploaded_file)
166+
167+
files_api.delete_file(uploaded_file.id)
150168

151169

152170
@pytest.fixture

clients/python/test/e2e/test_files_api.py

Lines changed: 56 additions & 84 deletions
Original file line numberDiff line numberDiff line change
@@ -14,10 +14,8 @@
1414
from pydantic import ByteSize
1515
from _utils import skip_if_osparc_version
1616
from packaging.version import Version
17-
18-
_KB: ByteSize = ByteSize(1024) # in bytes
19-
_MB: ByteSize = ByteSize(_KB * 1024) # in bytes
20-
_GB: ByteSize = ByteSize(_MB * 1024) # in bytes
17+
from conftest import ServerFile, _KB
18+
from faker import Faker
2119

2220

2321
def _hash_file(file: Path) -> str:
@@ -34,106 +32,80 @@ def _hash_file(file: Path) -> str:
3432

3533
@skip_if_osparc_version(at_least=Version("0.8.0"), at_most=Version("0.8.3.post0.dev11"))
3634
def test_upload_file(
37-
create_tmp_file: Callable[[ByteSize], Path], api_client: osparc.ApiClient
35+
tmp_path: Path, large_server_file: ServerFile, files_api: osparc.FilesApi
3836
) -> None:
3937
"""Test that we can upload a file via the multipart upload and download it again."""
40-
tmp_file = create_tmp_file(ByteSize(1 * _GB))
41-
tmp_path: Path = tmp_file.parent
42-
files_api: osparc.FilesApi = osparc.FilesApi(api_client=api_client)
43-
try:
44-
uploaded_file1: osparc.File = files_api.upload_file(tmp_file)
45-
uploaded_file2: osparc.File = files_api.upload_file(tmp_file)
46-
assert (
47-
uploaded_file1.id == uploaded_file2.id
48-
), "could not detect that file was already on server"
49-
downloaded_file = files_api.download_file(
50-
uploaded_file1.id, destination_folder=tmp_path
51-
)
52-
assert Path(downloaded_file).parent == tmp_path
53-
assert _hash_file(Path(downloaded_file)) == _hash_file(tmp_file)
54-
finally:
55-
files_api.delete_file(uploaded_file1.id)
38+
uploaded_file: osparc.File = files_api.upload_file(large_server_file.local_file)
39+
assert (
40+
large_server_file.server_file.id == uploaded_file.id
41+
), "could not detect that file was already on server"
42+
downloaded_file = files_api.download_file(
43+
uploaded_file.id, destination_folder=tmp_path
44+
)
45+
assert Path(downloaded_file).parent == tmp_path
46+
assert _hash_file(Path(downloaded_file)) == _hash_file(large_server_file.local_file)
5647

5748

5849
@skip_if_osparc_version(at_least=Version("0.8.3.post0.dev12"))
5950
def test_upload_download_file_ram_usage(
60-
create_tmp_file: Callable[[ByteSize], Path], api_client: osparc.ApiClient
51+
tmp_path: Path, large_server_file: ServerFile, files_api: osparc.FilesApi
6152
) -> None:
6253
"""Check RAM usage of upload/download fcns"""
6354
_allowed_ram_usage_in_mb: Final[int] = 300 # 300MB
64-
tmp_file = create_tmp_file(ByteSize(1 * _GB))
6555
assert (
66-
tmp_file.stat().st_size > _allowed_ram_usage_in_mb * 1024 * 1024
67-
), "For this test to make sense, file size must be larger than allowed ram usage."
56+
large_server_file.local_file.stat().st_size
57+
> _allowed_ram_usage_in_mb * 1024 * 1024
58+
), f"For this test to make sense, {large_server_file.local_file.stat().st_size=} must be larger than {_allowed_ram_usage_in_mb=}."
6859

6960
def max_diff(data: List[int]) -> int:
7061
return max(data) - min(data)
7162

72-
tmp_path: Path = tmp_file.parent
73-
files_api: osparc.FilesApi = osparc.FilesApi(api_client=api_client)
74-
try:
75-
upload_ram_usage_in_mb, uploaded_file1 = memory_usage(
76-
(files_api.upload_file, (tmp_file,)), # type: ignore
77-
retval=True,
78-
)
79-
uploaded_file2: osparc.File = files_api.upload_file(tmp_file)
80-
assert (
81-
uploaded_file1.id == uploaded_file2.id
82-
), "could not detect that file was already on server"
83-
assert (
84-
max_diff(upload_ram_usage_in_mb) < _allowed_ram_usage_in_mb
85-
), f"Used more than {_allowed_ram_usage_in_mb=} to upload file of size {tmp_file.stat().st_size=}"
86-
download_ram_usage_in_mb, downloaded_file = memory_usage(
87-
(
88-
files_api.download_file,
89-
(uploaded_file1.id,),
90-
{"destination_folder": tmp_path},
91-
), # type: ignore
92-
retval=True,
93-
)
94-
assert (
95-
max_diff(download_ram_usage_in_mb) < _allowed_ram_usage_in_mb
96-
), f"Used more than {_allowed_ram_usage_in_mb=} to download file of size {Path(downloaded_file).stat().st_size=}"
97-
assert _hash_file(Path(downloaded_file)) == _hash_file(tmp_file)
98-
finally:
99-
files_api.delete_file(uploaded_file1.id)
63+
upload_ram_usage_in_mb, uploaded_file = memory_usage(
64+
(files_api.upload_file, (large_server_file.local_file,)), # type: ignore
65+
retval=True,
66+
)
67+
assert (
68+
large_server_file.server_file.id == uploaded_file.id
69+
), "could not detect that file was already on server"
70+
assert (
71+
max_diff(upload_ram_usage_in_mb) < _allowed_ram_usage_in_mb
72+
), f"Used more than {_allowed_ram_usage_in_mb=} to upload file of size {large_server_file.local_file.stat().st_size=}"
73+
download_ram_usage_in_mb, downloaded_file = memory_usage(
74+
(
75+
files_api.download_file,
76+
(uploaded_file.id,),
77+
{"destination_folder": tmp_path},
78+
), # type: ignore
79+
retval=True,
80+
)
81+
assert Path(downloaded_file).parent == tmp_path
82+
assert (
83+
max_diff(download_ram_usage_in_mb) < _allowed_ram_usage_in_mb
84+
), f"Used more than {_allowed_ram_usage_in_mb=} to download file of size {Path(downloaded_file).stat().st_size=}"
85+
assert _hash_file(Path(downloaded_file)) == _hash_file(large_server_file.local_file)
10086

10187

10288
@skip_if_osparc_version(at_least=Version("0.8.3.post0.dev20"))
10389
@pytest.mark.parametrize("use_checksum", [True, False])
10490
@pytest.mark.parametrize("use_id", [True, False])
10591
def test_search_files(
106-
create_tmp_file: Callable[[ByteSize], Path],
107-
api_client: osparc.ApiClient,
92+
large_server_file: Callable[[ByteSize], Path],
93+
files_api: osparc.FilesApi,
10894
use_checksum: bool,
10995
use_id: bool,
96+
faker: Faker,
11097
) -> None:
111-
tmp_file = create_tmp_file(ByteSize(1 * _GB))
112-
checksum: str = _hash_file(tmp_file)
113-
results: osparc.PaginationGenerator
114-
files_api: osparc.FilesApi = osparc.FilesApi(api_client=api_client)
115-
try:
116-
results = files_api._search_files(sha256_checksum=checksum)
117-
assert len(results) == 0, "Found file which shouldn't be there"
118-
119-
uploaded_file: osparc.File = files_api.upload_file(tmp_file)
120-
assert checksum == uploaded_file.checksum
121-
122-
results = files_api._search_files(
123-
file_id=uploaded_file.id if use_id else None,
124-
sha256_checksum=uploaded_file.checksum if use_checksum else None,
125-
)
126-
assert len(results) == 1, "Could not find file after it had been uploaded"
127-
128-
files_api.delete_file(uploaded_file.id)
129-
results = files_api._search_files(
130-
file_id=uploaded_file.id if use_id else None,
131-
sha256_checksum=uploaded_file.checksum if use_checksum else None,
132-
)
133-
assert len(results) == 0, "Could find file on server after it had been deleted"
134-
135-
except Exception:
136-
# clean up in case of failure
137-
results = files_api._search_files(sha256_checksum=checksum)
138-
for file in results:
139-
files_api.delete_file(file.id)
98+
results: osparc.PaginationGenerator = files_api._search_files(
99+
sha256_checksum=f"{faker.sha256()}"
100+
)
101+
assert len(results) == 0, "Found file which shouldn't be there"
102+
103+
results = files_api._search_files(
104+
file_id=large_server_file.server_file.id if use_id else None,
105+
sha256_checksum=large_server_file.server_file.checksum
106+
if use_checksum
107+
else None,
108+
)
109+
assert len(results) == 1, "Could not find file after it had been uploaded"
110+
for file in results:
111+
assert file.checksum == large_server_file.server_file.checksum

0 commit comments

Comments
 (0)