11
11
from tqdm import tqdm
12
12
from supervisely .api .labeling_job_api import LabelingJobInfo
13
13
from supervisely .annotation .tag_meta import TagApplicableTo , TagTargetType
14
-
15
-
14
+ from supervisely .geometry .closed_surface_mesh import ClosedSurfaceMesh
15
+ import tempfile
16
+ from supervisely .volume import stl_converter
17
+ from supervisely .project .volume_project import _create_volume_header
16
18
import api_utils as api_utils
17
-
19
+ from uuid import UUID
18
20
19
21
load_dotenv ("local.env" )
20
22
load_dotenv (os .path .expanduser ("~/supervisely.env" ))
27
29
executor = ThreadPoolExecutor (max_workers = 5 )
28
30
merged_meta = None
29
31
TASK_ID = None
32
+ cancel_deletion = False # flag to cancel deletion of the source items
30
33
31
34
if sly .is_development ():
32
35
api .app .workflow .enable ()
@@ -615,20 +618,51 @@ def _copy_volumes(
615
618
def _copy_anns (
616
619
src : List [sly .api .volume_api .VolumeInfo ], dst : List [sly .api .volume_api .VolumeInfo ]
617
620
):
621
+ global cancel_deletion
618
622
ann_jsons = run_in_executor (
619
623
api .volume .annotation .download_bulk , src_dataset_id , [info .id for info in src ]
620
624
)
621
625
tasks = []
626
+ mask3d_tmp_dir = tempfile .mkdtemp ()
627
+ mask_ids = []
628
+ mask_paths = []
629
+ key_id_map = sly .KeyIdMap ()
630
+ set_csm_warning = False
622
631
for ann_json , dst_info in zip (ann_jsons , dst ):
623
- key_id_map = sly .KeyIdMap ()
624
632
ann = sly .VolumeAnnotation .from_json (ann_json , project_meta , key_id_map )
633
+ sf_idx_to_remove = []
634
+ for idx , sf in enumerate (ann .spatial_figures ):
635
+ figure_id = key_id_map .get_figure_id (sf .key ())
636
+ if sf .geometry .name () == sly .Mask3D .name ():
637
+ mask_ids .append (figure_id )
638
+ mask_paths .append (os .path .join (mask3d_tmp_dir , sf .key ().hex ))
639
+ if sf .geometry .name () == ClosedSurfaceMesh .name ():
640
+ sf_idx_to_remove .append (idx )
641
+ set_csm_warning = True
642
+ cancel_deletion = True
643
+ sf_idx_to_remove .reverse ()
644
+ for idx in sf_idx_to_remove :
645
+ ann .spatial_figures .pop (idx )
646
+ run_in_executor (
647
+ api .volume .figure .download_sf_geometries , mask_ids , mask_paths )
625
648
tasks .append (
626
649
executor .submit (
627
650
api .volume .annotation .append , dst_info .id , ann , key_id_map , volume_info = dst_info
628
651
)
629
652
)
653
+
630
654
for task in as_completed (tasks ):
631
655
task .result ()
656
+ progress_masks = tqdm (total = len (mask_paths ), desc = "Uploading Mask 3D geometries" )
657
+ for file in mask_paths :
658
+ with open (file , 'rb' ) as f :
659
+ key = UUID (os .path .basename (f .name ))
660
+ api .volume .figure .upload_sf_geometries ([key ] , {key :f .read ()}, key_id_map )
661
+ progress_masks .update (1 )
662
+ progress_masks .close ()
663
+ if set_csm_warning :
664
+ logger .warning ("Closed Surface Meshes are no longer supported. Skipped copying." )
665
+ set_csm_warning = False
632
666
return src , dst
633
667
634
668
def _maybe_copy_anns_and_replace (src , dst ):
@@ -1596,6 +1630,7 @@ def move_project(
1596
1630
progress_cb = None ,
1597
1631
existing_projects = None ,
1598
1632
) -> List [CreatedDataset ]:
1633
+ global cancel_deletion
1599
1634
if dst_project_id is None and src_project_info .workspace_id == dst_workspace_id :
1600
1635
logger .warning (
1601
1636
"Moving project to the same workspace. Skipping" ,
@@ -1626,8 +1661,13 @@ def move_project(
1626
1661
"No datasets created. Skipping deletion" , extra = {"project_id" : src_project_info .id }
1627
1662
)
1628
1663
return []
1629
- logger .info ("Removing source project" , extra = {"project_id" : src_project_info .id })
1630
- run_in_executor (api .project .remove , src_project_info .id )
1664
+
1665
+ if cancel_deletion :
1666
+ logger .info ("The source project will not be removed because some of its entities cannot be moved." , extra = {"project_id" : src_project_info .id })
1667
+ else :
1668
+ logger .info ("Removing source project" , extra = {"project_id" : src_project_info .id })
1669
+ run_in_executor (api .project .remove , src_project_info .id )
1670
+ cancel_deletion = False
1631
1671
return created_datasets
1632
1672
1633
1673
@@ -1672,6 +1712,8 @@ def move_datasets_tree(
1672
1712
options : Dict ,
1673
1713
progress_cb = None ,
1674
1714
):
1715
+ global cancel_deletion
1716
+
1675
1717
creted_datasets = copy_dataset_tree (
1676
1718
datasets_tree ,
1677
1719
project_type ,
@@ -1699,11 +1741,16 @@ def move_datasets_tree(
1699
1741
if len (datasets_to_remove ) == 0 :
1700
1742
logger .info ("No datasets to remove" , extra = {"dataset_id" : dst_dataset_id })
1701
1743
return creted_datasets
1702
- logger .info (
1703
- "Removing source datasets" ,
1704
- extra = {"dataset_ids" : [ds .id for ds in datasets_to_remove ]},
1705
- )
1706
- run_in_executor (api .dataset .remove_batch , [ds .id for ds in datasets_to_remove ])
1744
+
1745
+ if cancel_deletion :
1746
+ logger .info ("The source datasets will not be removed because some of its entities cannot be moved." , extra = {"dataset_id" : dst_dataset_id })
1747
+ else :
1748
+ logger .info (
1749
+ "Removing source datasets" ,
1750
+ extra = {"dataset_ids" : [ds .id for ds in datasets_to_remove ]},
1751
+ )
1752
+ run_in_executor (api .dataset .remove_batch , [ds .id for ds in datasets_to_remove ])
1753
+ cancel_deletion = False
1707
1754
return creted_datasets
1708
1755
1709
1756
@@ -1762,6 +1809,8 @@ def move_items_to_dataset(
1762
1809
options : Dict ,
1763
1810
progress_cb = None ,
1764
1811
):
1812
+ global cancel_deletion
1813
+
1765
1814
item_ids = [item [JSONKEYS .ID ] for item in items ]
1766
1815
item_infos = get_item_infos (src_dataset_id , item_ids , project_type )
1767
1816
created_item_infos = clone_items (
@@ -1772,8 +1821,12 @@ def move_items_to_dataset(
1772
1821
options = options ,
1773
1822
progress_cb = progress_cb ,
1774
1823
src_infos = item_infos ,
1775
- )
1776
- delete_items (item_infos )
1824
+ )
1825
+ if cancel_deletion or len (created_item_infos ) < len (item_infos ):
1826
+ logger .info ("Some items were not moved. Skipping deletion of source items" , extra = {"dataset_id" : dst_dataset_id })
1827
+ else :
1828
+ delete_items (item_infos )
1829
+ cancel_deletion = False
1777
1830
return created_item_infos
1778
1831
1779
1832
0 commit comments