14
14
from pydantic import ByteSize
15
15
from _utils import skip_if_osparc_version
16
16
from packaging .version import Version
17
-
18
- _KB : ByteSize = ByteSize (1024 ) # in bytes
19
- _MB : ByteSize = ByteSize (_KB * 1024 ) # in bytes
20
- _GB : ByteSize = ByteSize (_MB * 1024 ) # in bytes
17
+ from conftest import ServerFile , _KB
18
+ from faker import Faker
21
19
22
20
23
21
def _hash_file (file : Path ) -> str :
@@ -34,106 +32,80 @@ def _hash_file(file: Path) -> str:
34
32
35
33
@skip_if_osparc_version (at_least = Version ("0.8.0" ), at_most = Version ("0.8.3.post0.dev11" ))
36
34
def test_upload_file (
37
- create_tmp_file : Callable [[ ByteSize ], Path ], api_client : osparc .ApiClient
35
+ tmp_path : Path , large_server_file : ServerFile , files_api : osparc .FilesApi
38
36
) -> None :
39
37
"""Test that we can upload a file via the multipart upload and download it again."""
40
- tmp_file = create_tmp_file (ByteSize (1 * _GB ))
41
- tmp_path : Path = tmp_file .parent
42
- files_api : osparc .FilesApi = osparc .FilesApi (api_client = api_client )
43
- try :
44
- uploaded_file1 : osparc .File = files_api .upload_file (tmp_file )
45
- uploaded_file2 : osparc .File = files_api .upload_file (tmp_file )
46
- assert (
47
- uploaded_file1 .id == uploaded_file2 .id
48
- ), "could not detect that file was already on server"
49
- downloaded_file = files_api .download_file (
50
- uploaded_file1 .id , destination_folder = tmp_path
51
- )
52
- assert Path (downloaded_file ).parent == tmp_path
53
- assert _hash_file (Path (downloaded_file )) == _hash_file (tmp_file )
54
- finally :
55
- files_api .delete_file (uploaded_file1 .id )
38
+ uploaded_file : osparc .File = files_api .upload_file (large_server_file .local_file )
39
+ assert (
40
+ large_server_file .server_file .id == uploaded_file .id
41
+ ), "could not detect that file was already on server"
42
+ downloaded_file = files_api .download_file (
43
+ uploaded_file .id , destination_folder = tmp_path
44
+ )
45
+ assert Path (downloaded_file ).parent == tmp_path
46
+ assert _hash_file (Path (downloaded_file )) == _hash_file (large_server_file .local_file )
56
47
57
48
58
49
@skip_if_osparc_version (at_least = Version ("0.8.3.post0.dev12" ))
59
50
def test_upload_download_file_ram_usage (
60
- create_tmp_file : Callable [[ ByteSize ], Path ], api_client : osparc .ApiClient
51
+ tmp_path : Path , large_server_file : ServerFile , files_api : osparc .FilesApi
61
52
) -> None :
62
53
"""Check RAM usage of upload/download fcns"""
63
54
_allowed_ram_usage_in_mb : Final [int ] = 300 # 300MB
64
- tmp_file = create_tmp_file (ByteSize (1 * _GB ))
65
55
assert (
66
- tmp_file .stat ().st_size > _allowed_ram_usage_in_mb * 1024 * 1024
67
- ), "For this test to make sense, file size must be larger than allowed ram usage."
56
+ large_server_file .local_file .stat ().st_size
57
+ > _allowed_ram_usage_in_mb * 1024 * 1024
58
+ ), f"For this test to make sense, { large_server_file .local_file .stat ().st_size = } must be larger than { _allowed_ram_usage_in_mb = } ."
68
59
69
60
def max_diff (data : List [int ]) -> int :
70
61
return max (data ) - min (data )
71
62
72
- tmp_path : Path = tmp_file .parent
73
- files_api : osparc .FilesApi = osparc .FilesApi (api_client = api_client )
74
- try :
75
- upload_ram_usage_in_mb , uploaded_file1 = memory_usage (
76
- (files_api .upload_file , (tmp_file ,)), # type: ignore
77
- retval = True ,
78
- )
79
- uploaded_file2 : osparc .File = files_api .upload_file (tmp_file )
80
- assert (
81
- uploaded_file1 .id == uploaded_file2 .id
82
- ), "could not detect that file was already on server"
83
- assert (
84
- max_diff (upload_ram_usage_in_mb ) < _allowed_ram_usage_in_mb
85
- ), f"Used more than { _allowed_ram_usage_in_mb = } to upload file of size { tmp_file .stat ().st_size = } "
86
- download_ram_usage_in_mb , downloaded_file = memory_usage (
87
- (
88
- files_api .download_file ,
89
- (uploaded_file1 .id ,),
90
- {"destination_folder" : tmp_path },
91
- ), # type: ignore
92
- retval = True ,
93
- )
94
- assert (
95
- max_diff (download_ram_usage_in_mb ) < _allowed_ram_usage_in_mb
96
- ), f"Used more than { _allowed_ram_usage_in_mb = } to download file of size { Path (downloaded_file ).stat ().st_size = } "
97
- assert _hash_file (Path (downloaded_file )) == _hash_file (tmp_file )
98
- finally :
99
- files_api .delete_file (uploaded_file1 .id )
63
+ upload_ram_usage_in_mb , uploaded_file = memory_usage (
64
+ (files_api .upload_file , (large_server_file .local_file ,)), # type: ignore
65
+ retval = True ,
66
+ )
67
+ assert (
68
+ large_server_file .server_file .id == uploaded_file .id
69
+ ), "could not detect that file was already on server"
70
+ assert (
71
+ max_diff (upload_ram_usage_in_mb ) < _allowed_ram_usage_in_mb
72
+ ), f"Used more than { _allowed_ram_usage_in_mb = } to upload file of size { large_server_file .local_file .stat ().st_size = } "
73
+ download_ram_usage_in_mb , downloaded_file = memory_usage (
74
+ (
75
+ files_api .download_file ,
76
+ (uploaded_file .id ,),
77
+ {"destination_folder" : tmp_path },
78
+ ), # type: ignore
79
+ retval = True ,
80
+ )
81
+ assert Path (downloaded_file ).parent == tmp_path
82
+ assert (
83
+ max_diff (download_ram_usage_in_mb ) < _allowed_ram_usage_in_mb
84
+ ), f"Used more than { _allowed_ram_usage_in_mb = } to download file of size { Path (downloaded_file ).stat ().st_size = } "
85
+ assert _hash_file (Path (downloaded_file )) == _hash_file (large_server_file .local_file )
100
86
101
87
102
88
@skip_if_osparc_version (at_least = Version ("0.8.3.post0.dev20" ))
103
89
@pytest .mark .parametrize ("use_checksum" , [True , False ])
104
90
@pytest .mark .parametrize ("use_id" , [True , False ])
105
91
def test_search_files (
106
- create_tmp_file : Callable [[ByteSize ], Path ],
107
- api_client : osparc .ApiClient ,
92
+ large_server_file : Callable [[ByteSize ], Path ],
93
+ files_api : osparc .FilesApi ,
108
94
use_checksum : bool ,
109
95
use_id : bool ,
96
+ faker : Faker ,
110
97
) -> None :
111
- tmp_file = create_tmp_file (ByteSize (1 * _GB ))
112
- checksum : str = _hash_file (tmp_file )
113
- results : osparc .PaginationGenerator
114
- files_api : osparc .FilesApi = osparc .FilesApi (api_client = api_client )
115
- try :
116
- results = files_api ._search_files (sha256_checksum = checksum )
117
- assert len (results ) == 0 , "Found file which shouldn't be there"
118
-
119
- uploaded_file : osparc .File = files_api .upload_file (tmp_file )
120
- assert checksum == uploaded_file .checksum
121
-
122
- results = files_api ._search_files (
123
- file_id = uploaded_file .id if use_id else None ,
124
- sha256_checksum = uploaded_file .checksum if use_checksum else None ,
125
- )
126
- assert len (results ) == 1 , "Could not find file after it had been uploaded"
127
-
128
- files_api .delete_file (uploaded_file .id )
129
- results = files_api ._search_files (
130
- file_id = uploaded_file .id if use_id else None ,
131
- sha256_checksum = uploaded_file .checksum if use_checksum else None ,
132
- )
133
- assert len (results ) == 0 , "Could find file on server after it had been deleted"
134
-
135
- except Exception :
136
- # clean up in case of failure
137
- results = files_api ._search_files (sha256_checksum = checksum )
138
- for file in results :
139
- files_api .delete_file (file .id )
98
+ results : osparc .PaginationGenerator = files_api ._search_files (
99
+ sha256_checksum = f"{ faker .sha256 ()} "
100
+ )
101
+ assert len (results ) == 0 , "Found file which shouldn't be there"
102
+
103
+ results = files_api ._search_files (
104
+ file_id = large_server_file .server_file .id if use_id else None ,
105
+ sha256_checksum = large_server_file .server_file .checksum
106
+ if use_checksum
107
+ else None ,
108
+ )
109
+ assert len (results ) == 1 , "Could not find file after it had been uploaded"
110
+ for file in results :
111
+ assert file .checksum == large_server_file .server_file .checksum
0 commit comments