Skip to content

Commit 9a09a89

Browse files
align 11x with master (#284)
* [Fixes #273] connect resource with execution request * Update README.md * store spacial file always true for cloning * fix build * [Fixes #12763] 3D tiles geometricError mandatory field should be on t… (#279) * [Fixes #12763] 3D tiles geometricError mandatory field should be on tileset level ref to GeoNode/geonode#12763 * fix tests * Update tests.py * Update test_end2end.py * fix test * [Fixes #12789] Improve 3dtiles filename handling (#281) * Fix migrations for asset (#283) * Fix migrations for create handlerinfo via asset * Fix migrations for create handlerinfo via asset --------- Co-authored-by: Giovanni Allegri <giovanni.allegri@gmail.com>
1 parent 79c8cc7 commit 9a09a89

File tree

3 files changed

+80
-20
lines changed

3 files changed

+80
-20
lines changed

importer/handlers/tiles3d/handler.py

Lines changed: 25 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -56,13 +56,15 @@ def can_handle(_data) -> bool:
5656
This endpoint will return True or False if with the info provided
5757
the handler is able to handle the file or not
5858
"""
59-
base = _data.get("base_file")
60-
if not base:
59+
try:
60+
base = _data.get("base_file")
61+
if not base:
62+
return False
63+
ext = base.split(".")[-1] if isinstance(base, str) else base.name.split(".")[-1]
64+
if ext in ["json"] and Tiles3DFileHandler.is_3dtiles_json(base):
65+
return True
66+
except Exception:
6167
return False
62-
ext = base.split(".")[-1] if isinstance(base, str) else base.name.split(".")[-1]
63-
input_filename = os.path.basename(base if isinstance(base, str) else base.name)
64-
if ext in ["json"] and "tileset.json" in input_filename:
65-
return True
6668
return False
6769

6870
@staticmethod
@@ -90,25 +92,29 @@ def is_valid(files, user):
9092
)
9193

9294
try:
93-
with open(_file, "r") as _readed_file:
94-
_file = json.loads(_readed_file.read())
95-
# required key described in the specification of 3dtiles
96-
# https://docs.ogc.org/cs/22-025r4/22-025r4.html#toc92
97-
is_valid = all(
98-
key in _file.keys() for key in ("asset", "geometricError", "root")
99-
)
100-
101-
if not is_valid:
102-
raise Invalid3DTilesException(
103-
"The provided 3DTiles is not valid, some of the mandatory keys are missing. Mandatory keys are: 'asset', 'geometricError', 'root'"
104-
)
95+
_file = Tiles3DFileHandler.is_3dtiles_json(_file)
10596

10697
Tiles3DFileHandler.validate_3dtile_payload(payload=_file)
10798

10899
except Exception as e:
109100
raise Invalid3DTilesException(e)
110101

111102
return True
103+
104+
@staticmethod
105+
def is_3dtiles_json(_file):
106+
with open(_file, "r") as _readed_file:
107+
_file = json.loads(_readed_file.read())
108+
# required key described in the specification of 3dtiles
109+
# https://docs.ogc.org/cs/22-025r4/22-025r4.html#toc92
110+
is_valid = all(key in _file.keys() for key in ("asset", "geometricError", "root"))
111+
112+
if not is_valid:
113+
raise Invalid3DTilesException(
114+
"The provided 3DTiles is not valid, some of the mandatory keys are missing. Mandatory keys are: 'asset', 'geometricError', 'root'"
115+
)
116+
117+
return _file
112118

113119
@staticmethod
114120
def validate_3dtile_payload(payload):
@@ -212,7 +218,7 @@ def create_geonode_resource(
212218
asset=None,
213219
):
214220
# we want just the tileset.json as location of the asset
215-
asset.location = [path for path in asset.location if "tileset.json" in path]
221+
asset.location = [path for path in asset.location if path.endswith(".json")]
216222
asset.save()
217223

218224
resource = super().create_geonode_resource(

importer/migrations/0006_dataset_migration.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ def dataset_migration(apps, _):
1111
pk__in=NewResources.objects.values_list("resource_id", flat=True)
1212
).exclude(subtype__in=["remote", None]):
1313
# generating orchestrator expected data file
14-
if not old_resource.files:
14+
if not hasattr(old_resource, "files"):
1515
if old_resource.is_vector():
1616
converted_files = [{"base_file": "placeholder.shp"}]
1717
else:
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
# Generated by Django 3.2.15 on 2022-10-04 13:03
2+
3+
import logging
4+
from django.db import migrations
5+
from importer.orchestrator import orchestrator
6+
from geonode.layers.models import Dataset
7+
from geonode.assets.utils import get_default_asset
8+
from geonode.utils import get_allowed_extensions
9+
10+
logger = logging.getLogger("django")
11+
12+
def dataset_migration(apps, _):
13+
NewResources = apps.get_model("importer", "ResourceHandlerInfo")
14+
for old_resource in Dataset.objects.exclude(
15+
pk__in=NewResources.objects.values_list("resource_id", flat=True)
16+
).exclude(subtype__in=["remote", None]):
17+
# generating orchestrator expected data file
18+
if old_resource.resourcehandlerinfo_set.first() is None:
19+
if get_default_asset(old_resource):
20+
available_choices = get_allowed_extensions()
21+
not_main_files = ["xml", "sld", "zip", "kmz"]
22+
base_file_choices = set(x for x in available_choices if x not in not_main_files)
23+
output_files = dict()
24+
for _file in get_default_asset(old_resource).location:
25+
if _file.split(".")[-1] in base_file_choices:
26+
output_files.update({"base_file": _file})
27+
break
28+
else:
29+
if old_resource.is_vector():
30+
output_files = {"base_file": "placeholder.shp"}
31+
else:
32+
output_files = {"base_file": "placeholder.tiff"}
33+
34+
handler = orchestrator.get_handler(output_files)
35+
if handler is None:
36+
logger.error(f"Handler not found for resource: {old_resource}")
37+
continue
38+
handler.create_resourcehandlerinfo(
39+
handler_module_path=str(handler),
40+
resource=old_resource,
41+
execution_id=None
42+
)
43+
else:
44+
logger.debug(f"resourcehandler info already exists for the resource")
45+
46+
47+
class Migration(migrations.Migration):
48+
dependencies = [
49+
("importer", "0006_dataset_migration"),
50+
]
51+
52+
operations = [
53+
migrations.RunPython(dataset_migration),
54+
]

0 commit comments

Comments
 (0)