Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions docker-compose-test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ services:

# Geoserver backend
geoserver:
image: geonode/geoserver:2.24.3-latest
image: geonode/geoserver:2.24.4-v2
container_name: geoserver4importer
healthcheck:
test: "curl -m 10 --fail --silent --write-out 'HTTP CODE : %{http_code}\n' --output /dev/null http://geoserver:8080/geoserver/ows"
Expand All @@ -53,7 +53,7 @@ services:
condition: service_healthy

data-dir-conf:
image: geonode/geoserver_data:2.24.3-latest
image: geonode/geoserver_data:2.24.4-v2
container_name: gsconf4importer
entrypoint: sleep infinity
volumes:
Expand Down
5 changes: 3 additions & 2 deletions importer/celery_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
IMPORTER_RESOURCE_CREATION_RATE_LIMIT,
)
from importer.utils import call_rollback_function, error_handler, find_key_recursively
from importer.handlers.base import BaseHandler

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -610,8 +611,8 @@ def copy_dynamic_model(
raise Exception("The resource requested does not exists")

resource = resource.first()

new_dataset_alternate = create_alternate(resource.title, exec_id).lower()
sanitized_title = BaseHandler().fixup_name(resource.title)
new_dataset_alternate = create_alternate(sanitized_title, exec_id).lower()

if os.getenv("IMPORTER_ENABLE_DYN_MODELS", False):
dynamic_schema = ModelSchema.objects.filter(name=alternate.split(":")[1])
Expand Down
60 changes: 45 additions & 15 deletions importer/handlers/common/raster.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@
from osgeo import gdal
from importer.celery_app import importer_app
from geonode.storage.manager import storage_manager
from geonode.assets.handlers import asset_handler_registry
from geonode.assets.models import Asset
from geonode.assets.utils import create_link

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -190,6 +193,11 @@ def extract_resource_to_publish(
self, files, action, layer_name, alternate, **kwargs
):
if action == exa.COPY.value:
nl = kwargs.get("new_file_location") or kwargs.get("kwargs", {}).get("new_file_location", {})
raster_path = None
if nl:
files_list = nl.get("files") or []
raster_path = files_list[0] if files_list else None
return [
{
"name": alternate,
Expand All @@ -199,9 +207,7 @@ def extract_resource_to_publish(
)
.first()
.srid,
"raster_path": kwargs["kwargs"]
.get("new_file_location")
.get("files")[0],
"raster_path": raster_path
}
]

Expand Down Expand Up @@ -466,6 +472,19 @@ def overwrite_resourcehandlerinfo(
return self.create_resourcehandlerinfo(
handler_module_path, resource, execution_id, **kwargs
)
def _prepare_assets_for_copy(self, resource, kwargs):
"""
Prepare assets for copying.
It gets the cloned asset and identifies other assets to be linked.
"""
_nl = kwargs.get("new_file_location") or kwargs.get("kwargs", {}).get("new_file_location", {})
_asset = _nl.get("asset")
_asset_id = _nl.get("asset_id")
if not _asset and _asset_id:
_asset = Asset.objects.filter(pk=_asset_id).first()

assets_to_link = Asset.objects.filter(link__resource=resource).exclude(title="Original")
return _asset, assets_to_link

def copy_geonode_resource(
self,
Expand All @@ -476,16 +495,18 @@ def copy_geonode_resource(
new_alternate: str,
**kwargs,
):
resource = self.create_geonode_resource(
cloned_asset, assets_to_link = self._prepare_assets_for_copy(resource, kwargs)

new_resource = self.create_geonode_resource(
layer_name=data_to_update.get("title"),
alternate=new_alternate,
execution_id=str(_exec.exec_id),
asset=kwargs.get("kwargs", {})
.get("new_file_location", {})
.get("asset", []),
asset=cloned_asset,
)
resource.refresh_from_db()
return resource
[create_link(new_resource, asset) for asset in assets_to_link]

new_resource.refresh_from_db()
return new_resource

def _get_execution_request_object(self, execution_id: str):
return ExecutionRequest.objects.filter(exec_id=execution_id).first()
Expand Down Expand Up @@ -538,16 +559,25 @@ def copy_raster_file(

original_dataset = original_dataset.first()

if not original_dataset.files:
# Ensure the dataset has at least one Asset associated
filters = {"link__resource": original_dataset, "title": "Original"}
if not Asset.objects.filter().exists():
raise InvalidGeoTiffException(
"The original file of the dataset is not available, Is not possible to copy the dataset"
"The dataset does not have any original asset associated; cannot copy the dataset"
)
original_asset = Asset.objects.filter(**filters).last()

new_file_location = orchestrator.load_handler(
handler_module_path
).copy_original_file(original_dataset)
# The original asset is cloned here, as it is required for the creation of the new cloned resource. Other associated assets will be linked to the new resource later in the process.
cloned_asset = asset_handler_registry.get_handler(original_asset).clone(original_asset)
new_file_location = {
"files": cloned_asset.location if getattr(cloned_asset, "location", None) else [],
"asset_id": cloned_asset.id,
}
if not new_file_location["files"]:
raise InvalidGeoTiffException("Could not determine the location of the copied file")

new_dataset_alternate = create_alternate(original_dataset.title, exec_id)
sanitized_title = BaseHandler().fixup_name(original_dataset.title)
new_dataset_alternate = create_alternate(sanitized_title, exec_id)

additional_kwargs = {
"original_dataset_alternate": original_dataset.alternate,
Expand Down
Loading