Skip to content

Commit d35d1de

Browse files
authored
Add support for transferring 3D geometries and closed surface meshes in volumes anns (#7)
* Add support for downloading and uploading 3D geometries * Remove support for Closed Surface Mesh geometries and add warning for skipped copying * Add cancellation flag for deletion of source items when entities cannot be moved
1 parent c64d870 commit d35d1de

File tree

1 file changed

+66
-13
lines changed

1 file changed

+66
-13
lines changed

src/main.py

Lines changed: 66 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -11,10 +11,12 @@
1111
from tqdm import tqdm
1212
from supervisely.api.labeling_job_api import LabelingJobInfo
1313
from supervisely.annotation.tag_meta import TagApplicableTo, TagTargetType
14-
15-
14+
from supervisely.geometry.closed_surface_mesh import ClosedSurfaceMesh
15+
import tempfile
16+
from supervisely.volume import stl_converter
17+
from supervisely.project.volume_project import _create_volume_header
1618
import api_utils as api_utils
17-
19+
from uuid import UUID
1820

1921
load_dotenv("local.env")
2022
load_dotenv(os.path.expanduser("~/supervisely.env"))
@@ -27,6 +29,7 @@
2729
executor = ThreadPoolExecutor(max_workers=5)
2830
merged_meta = None
2931
TASK_ID = None
32+
cancel_deletion = False # flag to cancel deletion of the source items
3033

3134
if sly.is_development():
3235
api.app.workflow.enable()
@@ -615,20 +618,51 @@ def _copy_volumes(
615618
def _copy_anns(
616619
src: List[sly.api.volume_api.VolumeInfo], dst: List[sly.api.volume_api.VolumeInfo]
617620
):
621+
global cancel_deletion
618622
ann_jsons = run_in_executor(
619623
api.volume.annotation.download_bulk, src_dataset_id, [info.id for info in src]
620624
)
621625
tasks = []
626+
mask3d_tmp_dir = tempfile.mkdtemp()
627+
mask_ids = []
628+
mask_paths = []
629+
key_id_map = sly.KeyIdMap()
630+
set_csm_warning = False
622631
for ann_json, dst_info in zip(ann_jsons, dst):
623-
key_id_map = sly.KeyIdMap()
624632
ann = sly.VolumeAnnotation.from_json(ann_json, project_meta, key_id_map)
633+
sf_idx_to_remove = []
634+
for idx, sf in enumerate(ann.spatial_figures):
635+
figure_id = key_id_map.get_figure_id(sf.key())
636+
if sf.geometry.name() == sly.Mask3D.name():
637+
mask_ids.append(figure_id)
638+
mask_paths.append(os.path.join(mask3d_tmp_dir, sf.key().hex))
639+
if sf.geometry.name() == ClosedSurfaceMesh.name():
640+
sf_idx_to_remove.append(idx)
641+
set_csm_warning = True
642+
cancel_deletion = True
643+
sf_idx_to_remove.reverse()
644+
for idx in sf_idx_to_remove:
645+
ann.spatial_figures.pop(idx)
646+
run_in_executor(
647+
api.volume.figure.download_sf_geometries, mask_ids, mask_paths)
625648
tasks.append(
626649
executor.submit(
627650
api.volume.annotation.append, dst_info.id, ann, key_id_map, volume_info=dst_info
628651
)
629652
)
653+
630654
for task in as_completed(tasks):
631655
task.result()
656+
progress_masks = tqdm(total=len(mask_paths), desc="Uploading Mask 3D geometries")
657+
for file in mask_paths:
658+
with open(file, 'rb') as f:
659+
key = UUID(os.path.basename(f.name))
660+
api.volume.figure.upload_sf_geometries([key] , {key:f.read()}, key_id_map)
661+
progress_masks.update(1)
662+
progress_masks.close()
663+
if set_csm_warning:
664+
logger.warning("Closed Surface Meshes are no longer supported. Skipped copying.")
665+
set_csm_warning = False
632666
return src, dst
633667

634668
def _maybe_copy_anns_and_replace(src, dst):
@@ -1596,6 +1630,7 @@ def move_project(
15961630
progress_cb=None,
15971631
existing_projects=None,
15981632
) -> List[CreatedDataset]:
1633+
global cancel_deletion
15991634
if dst_project_id is None and src_project_info.workspace_id == dst_workspace_id:
16001635
logger.warning(
16011636
"Moving project to the same workspace. Skipping",
@@ -1626,8 +1661,13 @@ def move_project(
16261661
"No datasets created. Skipping deletion", extra={"project_id": src_project_info.id}
16271662
)
16281663
return []
1629-
logger.info("Removing source project", extra={"project_id": src_project_info.id})
1630-
run_in_executor(api.project.remove, src_project_info.id)
1664+
1665+
if cancel_deletion:
1666+
logger.info("The source project will not be removed because some of its entities cannot be moved.", extra={"project_id": src_project_info.id})
1667+
else:
1668+
logger.info("Removing source project", extra={"project_id": src_project_info.id})
1669+
run_in_executor(api.project.remove, src_project_info.id)
1670+
cancel_deletion = False
16311671
return created_datasets
16321672

16331673

@@ -1672,6 +1712,8 @@ def move_datasets_tree(
16721712
options: Dict,
16731713
progress_cb=None,
16741714
):
1715+
global cancel_deletion
1716+
16751717
creted_datasets = copy_dataset_tree(
16761718
datasets_tree,
16771719
project_type,
@@ -1699,11 +1741,16 @@ def move_datasets_tree(
16991741
if len(datasets_to_remove) == 0:
17001742
logger.info("No datasets to remove", extra={"dataset_id": dst_dataset_id})
17011743
return creted_datasets
1702-
logger.info(
1703-
"Removing source datasets",
1704-
extra={"dataset_ids": [ds.id for ds in datasets_to_remove]},
1705-
)
1706-
run_in_executor(api.dataset.remove_batch, [ds.id for ds in datasets_to_remove])
1744+
1745+
if cancel_deletion:
1746+
logger.info("The source datasets will not be removed because some of its entities cannot be moved.", extra={"dataset_id": dst_dataset_id})
1747+
else:
1748+
logger.info(
1749+
"Removing source datasets",
1750+
extra={"dataset_ids": [ds.id for ds in datasets_to_remove]},
1751+
)
1752+
run_in_executor(api.dataset.remove_batch, [ds.id for ds in datasets_to_remove])
1753+
cancel_deletion = False
17071754
return creted_datasets
17081755

17091756

@@ -1762,6 +1809,8 @@ def move_items_to_dataset(
17621809
options: Dict,
17631810
progress_cb=None,
17641811
):
1812+
global cancel_deletion
1813+
17651814
item_ids = [item[JSONKEYS.ID] for item in items]
17661815
item_infos = get_item_infos(src_dataset_id, item_ids, project_type)
17671816
created_item_infos = clone_items(
@@ -1772,8 +1821,12 @@ def move_items_to_dataset(
17721821
options=options,
17731822
progress_cb=progress_cb,
17741823
src_infos=item_infos,
1775-
)
1776-
delete_items(item_infos)
1824+
)
1825+
if cancel_deletion or len(created_item_infos) < len(item_infos):
1826+
logger.info("Some items were not moved. Skipping deletion of source items", extra={"dataset_id": dst_dataset_id})
1827+
else:
1828+
delete_items(item_infos)
1829+
cancel_deletion = False
17771830
return created_item_infos
17781831

17791832

0 commit comments

Comments
 (0)