From 2f0f190807bf24a033b69d45740ef6d7ae3fd5d7 Mon Sep 17 00:00:00 2001 From: Henning Bredel Date: Thu, 29 Feb 2024 13:53:00 +0100 Subject: [PATCH 1/8] Updates image tags for major and minor version --- .github/workflows/52n-release.yml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/52n-release.yml b/.github/workflows/52n-release.yml index 983fec64aae..6a72b28abc6 100644 --- a/.github/workflows/52n-release.yml +++ b/.github/workflows/52n-release.yml @@ -39,6 +39,10 @@ jobs: name: Extract metadata (tags, labels) for Docker id: meta uses: docker/metadata-action@v4 + env: + MAJOR_VERSION: ${{ steps.semver_parser.outputs.major }} + MAJOR_MINOR_VERSION: ${{ steps.semver_parser.outputs.major }}-${{ steps.semver_parser.outputs.minor }} + MAJOR_MINOR_PATCH_VERSION: ${{ steps.semver_parser.outputs.fullversion }} with: images: ${{ env.IMAGE }} labels: | @@ -49,7 +53,9 @@ jobs: "org.opencontainers.image.licenses=${{ env.LICENSE }}" tags: | latest - ${{ steps.semver_parser.outputs.fullversion }} + ${{ env.MAJOR_VERSION }} + ${{ env.MAJOR_MINOR_VERSION }} + ${{ env.MAJOR_MINOR_PATCH_VERSION }} - name: Login to Docker Hub uses: docker/login-action@v2 From 5f0b294201506fdddfaf07d5ed323c52dd12977f Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 5 Mar 2024 18:30:39 +0100 Subject: [PATCH 2/8] Upgrade Remote Docker version for CircleCI (#12020) (#12022) (cherry picked from commit cd1ad83e3161c70c912da67d231e7be06dbaf523) Co-authored-by: Giovanni Allegri --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 782732b6ff3..a5524f5ab34 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -21,7 +21,7 @@ jobs: - checkout - setup_remote_docker: docker_layer_caching: false - version: 20.10.12 + version: docker24 - run: name: Build the stack From 4b8a1696642265ebe5a88cb951f4c3561d67b006 Mon Sep 17 00:00:00 2001 From: Emanuele Tajariol Date: Wed, 6 Mar 2024 11:34:44 +0100 Subject: [PATCH 3/8] [Fixes #11977] B/R should only deal with data (#12006) --- geonode/br/management/commands/backup.py | 125 +---------------- geonode/br/management/commands/restore.py | 161 +++------------------- 2 files changed, 20 insertions(+), 266 deletions(-) diff --git a/geonode/br/management/commands/backup.py b/geonode/br/management/commands/backup.py index 9741d084ccf..1b23dde6915 100644 --- a/geonode/br/management/commands/backup.py +++ b/geonode/br/management/commands/backup.py @@ -156,6 +156,9 @@ def execute_backup(self, **options): # Dump Fixtures logger.info("*** Dumping GeoNode fixtures...") + fixtures_target = os.path.join(target_folder, "fixtures") + os.makedirs(fixtures_target, exist_ok=True) + for app_name, dump_name in zip(config.app_names, config.dump_names): # prevent dumping BackupRestore application if app_name == "br": @@ -163,7 +166,7 @@ def execute_backup(self, **options): logger.info(f" - Dumping '{app_name}' into '{dump_name}.json'") # Point stdout at a file for dumping data to. - with open(os.path.join(target_folder, f"{dump_name}.json"), "w") as output: + with open(os.path.join(fixtures_target, f"{dump_name}.json"), "w") as output: call_command("dumpdata", app_name, format="json", indent=2, stdout=output) # Store Media Root @@ -181,126 +184,6 @@ def execute_backup(self, **options): ) logger.info(f"Saved media files from '{media_root}'") - # Store Static Root - logger.info("*** Dumping GeoNode static folder...") - - static_root = settings.STATIC_ROOT - static_folder = os.path.join(target_folder, utils.STATIC_ROOT) - if not os.path.exists(static_folder): - os.makedirs(static_folder, exist_ok=True) - - copy_tree( - static_root, - static_folder, - ignore=utils.ignore_time(config.gs_data_dt_filter[0], config.gs_data_dt_filter[1]), - ) - logger.info(f"Saved static root from '{static_root}'.") - - # Store Static Folders - logger.info("*** Dumping GeoNode static files...") - - static_folders = settings.STATICFILES_DIRS - static_files_folders = os.path.join(target_folder, utils.STATICFILES_DIRS) - if not os.path.exists(static_files_folders): - os.makedirs(static_files_folders, exist_ok=True) - - for static_files_folder in static_folders: - # skip dumping of static files of apps not located under PROJECT_ROOT path - # (check to prevent saving files from site-packages in project-template based GeoNode projects) - if getattr(settings, "PROJECT_ROOT", None) and not static_files_folder.startswith( - settings.PROJECT_ROOT - ): - logger.info( - f"Skipping static directory: {static_files_folder}. " - f"It's not located under PROJECT_ROOT path: {settings.PROJECT_ROOT}." - ) - continue - - static_folder = os.path.join( - static_files_folders, os.path.basename(os.path.normpath(static_files_folder)) - ) - if not os.path.exists(static_folder): - os.makedirs(static_folder, exist_ok=True) - - copy_tree( - static_files_folder, - static_folder, - ignore=utils.ignore_time(config.gs_data_dt_filter[0], config.gs_data_dt_filter[1]), - ) - logger.info(f"Saved static files from '{static_files_folder}'.") - - # Store Template Folders - logger.info("*** Dumping GeoNode template folders...") - - template_folders = [] - try: - template_folders = settings.TEMPLATE_DIRS - except Exception: - try: - template_folders = settings.TEMPLATES[0]["DIRS"] - except Exception: - pass - template_files_folders = os.path.join(target_folder, utils.TEMPLATE_DIRS) - if not os.path.exists(template_files_folders): - os.makedirs(template_files_folders, exist_ok=True) - - for template_files_folder in template_folders: - # skip dumping of template files of apps not located under PROJECT_ROOT path - # (check to prevent saving files from site-packages in project-template based GeoNode projects) - if getattr(settings, "PROJECT_ROOT", None) and not template_files_folder.startswith( - settings.PROJECT_ROOT - ): - logger.info( - f"Skipping template directory: {template_files_folder}. " - f"It's not located under PROJECT_ROOT path: {settings.PROJECT_ROOT}." - ) - continue - - template_folder = os.path.join( - template_files_folders, os.path.basename(os.path.normpath(template_files_folder)) - ) - if not os.path.exists(template_folder): - os.makedirs(template_folder, exist_ok=True) - - copy_tree( - template_files_folder, - template_folder, - ignore=utils.ignore_time(config.gs_data_dt_filter[0], config.gs_data_dt_filter[1]), - ) - logger.info(f"Saved template files from '{template_files_folder}'.") - - # Store Locale Folders - logger.info("*** Dumping GeoNode locale folders...") - locale_folders = settings.LOCALE_PATHS - locale_files_folders = os.path.join(target_folder, utils.LOCALE_PATHS) - if not os.path.exists(locale_files_folders): - os.makedirs(locale_files_folders, exist_ok=True) - - for locale_files_folder in locale_folders: - # skip dumping of locale files of apps not located under PROJECT_ROOT path - # (check to prevent saving files from site-packages in project-template based GeoNode projects) - if getattr(settings, "PROJECT_ROOT", None) and not locale_files_folder.startswith( - settings.PROJECT_ROOT - ): - logger.info( - f"Skipping locale directory: {locale_files_folder}. " - f"It's not located under PROJECT_ROOT path: {settings.PROJECT_ROOT}." - ) - continue - - locale_folder = os.path.join( - locale_files_folders, os.path.basename(os.path.normpath(locale_files_folder)) - ) - if not os.path.exists(locale_folder): - os.makedirs(locale_folder, exist_ok=True) - - copy_tree( - locale_files_folder, - locale_folder, - ignore=utils.ignore_time(config.gs_data_dt_filter[0], config.gs_data_dt_filter[1]), - ) - logger.info(f"Saved Locale Files from '{locale_files_folder}'.") - # Create Final ZIP Archive logger.info("*** Creating final ZIP archive...") diff --git a/geonode/br/management/commands/restore.py b/geonode/br/management/commands/restore.py index 7544c2215fc..95ee228d955 100755 --- a/geonode/br/management/commands/restore.py +++ b/geonode/br/management/commands/restore.py @@ -59,7 +59,11 @@ def add_arguments(self, parser): # Named (optional) arguments utils.option(parser) - utils.geoserver_option_list(parser) + parser.add_argument( + "--geoserver-data-dir", + dest="gs_data_dir", + default=None, + help="Geoserver data directory") parser.add_argument( "-i", @@ -107,7 +111,7 @@ def add_arguments(self, parser): "--recovery-file", dest="recovery_file", default=None, - help="Backup archive containing GeoNode data to restore.", + help="Archive that shall be used to restore the original content of GeoNode should the restore fail.", ) parser.add_argument( @@ -165,7 +169,7 @@ def handle(self, **options): skip_read_only = options.get("skip_read_only") config = Configuration.load() - # activate read only mode and store it's original config value + # activate read only mode and store its original config value if not skip_read_only: original_read_only_value = config.read_only config.read_only = True @@ -261,21 +265,6 @@ def execute_restore(self, **options): # Write Checks media_root = settings.MEDIA_ROOT media_folder = os.path.join(target_folder, utils.MEDIA_ROOT) - static_root = settings.STATIC_ROOT - static_folder = os.path.join(target_folder, utils.STATIC_ROOT) - static_folders = settings.STATICFILES_DIRS - static_files_folders = os.path.join(target_folder, utils.STATICFILES_DIRS) - template_folders = [] - try: - template_folders = settings.TEMPLATE_DIRS - except Exception: - try: - template_folders = settings.TEMPLATES[0]["DIRS"] - except Exception: - pass - template_files_folders = os.path.join(target_folder, utils.TEMPLATE_DIRS) - locale_folders = settings.LOCALE_PATHS - locale_files_folders = os.path.join(target_folder, utils.LOCALE_PATHS) try: logger.info("*** Performing some checks...") @@ -283,20 +272,6 @@ def execute_restore(self, **options): chmod_tree(restore_folder) logger.info(f"[Sanity Check] Full Write Access to media root: '{media_root}' ...") chmod_tree(media_root) - logger.info(f"[Sanity Check] Full Write Access to static root: '{static_root}' ...") - chmod_tree(static_root) - for folder in static_folders: - if getattr(settings, "PROJECT_ROOT", None) and folder.startswith(settings.PROJECT_ROOT): - logger.info(f"[Sanity Check] Full Write Access to static file folder: '{folder}' ...") - chmod_tree(folder) - for folder in template_folders: - if getattr(settings, "PROJECT_ROOT", None) and folder.startswith(settings.PROJECT_ROOT): - logger.info(f"[Sanity Check] Full Write Access to template folder: '{folder}' ...") - chmod_tree(folder) - for folder in locale_folders: - if getattr(settings, "PROJECT_ROOT", None) and folder.startswith(settings.PROJECT_ROOT): - logger.info(f"[Sanity Check] Full Write Access to locale files folder: '{folder}' ...") - chmod_tree(folder) except Exception as e: if notify: restore_notification.apply_async( @@ -361,14 +336,6 @@ def execute_restore(self, **options): logger.info("*** Align the database schema") # call_command('makemigrations', interactive=False) call_command("migrate", interactive=False) - - # db_name = settings.DATABASES['default']['NAME'] - # db_user = settings.DATABASES['default']['USER'] - # db_port = settings.DATABASES['default']['PORT'] - # db_host = settings.DATABASES['default']['HOST'] - # db_passwd = settings.DATABASES['default']['PASSWORD'] - # - # utils.patch_db(db_name, db_user, db_port, db_host, db_passwd, settings.MONITORING_ENABLED) except Exception as e: logger.warning(f"Error while aligning the db: {e}", exc_info=e) @@ -397,8 +364,15 @@ def execute_restore(self, **options): err_cnt = 0 logger.info("*** Restoring GeoNode fixtures...") + + fixtures_folder = os.path.join(target_folder, "fixtures") + if not os.path.exists(fixtures_folder): + # fixtures folder was introduced on 2024-02; make the restore command lenient about + # dumps created without such a folder (this behaviour may be removed in a short while) + fixtures_folder = target_folder + for app_name, dump_name in zip(config.app_names, config.dump_names): - fixture_file = os.path.join(target_folder, f"{dump_name}.json") + fixture_file = os.path.join(fixtures_folder, f"{dump_name}.json") logger.info(f" - restoring '{fixture_file}'") try: @@ -430,109 +404,6 @@ def execute_restore(self, **options): chmod_tree(media_root) logger.info(f"Media files restored into '{media_root}'.") - # Restore Static Root - logger.info("*** Restore static root...") - if config.gs_data_dt_filter[0] is None: - shutil.rmtree(static_root, ignore_errors=True) - - if not os.path.exists(static_root): - os.makedirs(static_root, exist_ok=True) - - copy_tree(static_folder, static_root) - chmod_tree(static_root) - logger.info(f"Static root restored into '{static_root}'.") - - # Restore Static Folders - logger.info("*** Restore static folders...") - - for folder in static_folders: - logger.info(f"* Restoring {folder}...") - - # skip restoration of static files of apps not located under PROJECT_ROOT path - # (check to prevent overriding files from site-packages - # in project-template based GeoNode projects) - if getattr(settings, "PROJECT_ROOT", None) and not folder.startswith(settings.PROJECT_ROOT): - logger.info( - f"Skipping static directory: {folder}. " - f"It's not located under PROJECT_ROOT path: {settings.PROJECT_ROOT}." - ) - continue - - if config.gs_data_dt_filter[0] is None: - logger.info(f"Cleaning {folder}...") - shutil.rmtree(folder, ignore_errors=True) - - logger.info(f"Restoring {folder}...") - if not os.path.exists(folder): - os.makedirs(folder, exist_ok=True) - - copy_tree( - os.path.join(static_files_folders, os.path.basename(os.path.normpath(folder))), folder - ) - chmod_tree(folder) - logger.info(f"Static files restored into '{folder}'.") - - # Restore Template Folders - logger.info("*** Restore template folders...") - for folder in template_folders: - logger.info(f"* Restoring {folder}...") - - # skip restoration of template files of apps not located under PROJECT_ROOT path - # (check to prevent overriding files from site-packages - # in project-template based GeoNode projects) - if getattr(settings, "PROJECT_ROOT", None) and not folder.startswith(settings.PROJECT_ROOT): - logger.info( - f"Skipping template directory: {folder}. " - f"It's not located under PROJECT_ROOT path: {settings.PROJECT_ROOT}." - ) - continue - - if config.gs_data_dt_filter[0] is None: - logger.info(f"Cleaning {folder}...") - shutil.rmtree(folder, ignore_errors=True) - - logger.info(f"Restoring {folder}...") - if not os.path.exists(folder): - os.makedirs(folder, exist_ok=True) - - copy_tree( - os.path.join(template_files_folders, os.path.basename(os.path.normpath(folder))), folder - ) - chmod_tree(folder) - logger.info(f"Template files restored into '{folder}'.") - - # Restore Locale Folders - logger.info("*** Restore locale folders...") - for folder in locale_folders: - logger.info(f"* Restoring {folder}...") - - # skip restoration of locale files of apps not located under PROJECT_ROOT path - # (check to prevent overriding files from site-packages - # in project-template based GeoNode projects) - if getattr(settings, "PROJECT_ROOT", None) and not folder.startswith(settings.PROJECT_ROOT): - logger.info( - f"Skipping locale directory: {folder}. " - f"It's not located under PROJECT_ROOT path: {settings.PROJECT_ROOT}." - ) - continue - - if config.gs_data_dt_filter[0] is None: - logger.info(f"Cleaning {folder}...") - shutil.rmtree(folder, ignore_errors=True) - - logger.info(f"Restoring {folder}...") - if not os.path.exists(folder): - os.makedirs(folder, exist_ok=True) - - copy_tree( - os.path.join(locale_files_folders, os.path.basename(os.path.normpath(folder))), folder - ) - chmod_tree(folder) - logger.info(f"Locale Files Restored into '{folder}'.") - - logger.info("*** Calling collectstatic...") - call_command("collectstatic", interactive=False) - # store backup info restored_backup = RestoredBackup( name=backup_file.rsplit("/", 1)[-1], @@ -771,7 +642,7 @@ def bstr(x): gs_bk_exec_status = gs_backup["restore"]["execution"]["status"] gs_bk_exec_progress = gs_backup["restore"]["execution"]["progress"] - logger.info(f"Async backup status: {gs_bk_exec_status} - {gs_bk_exec_progress}") + logger.info(f"Async restore status: {gs_bk_exec_status} - {gs_bk_exec_progress}") time.sleep(3) else: raise ValueError(error_backup.format(url, r.status_code, r.text)) From 4bd93cc6b667189fe17971b208f24d351e5d648d Mon Sep 17 00:00:00 2001 From: Giovanni Allegri Date: Thu, 14 Mar 2024 19:18:54 +0100 Subject: [PATCH 4/8] [Backport 4.2.x] Upgrade to GeoServer 2.24.2 (#12048) * [Fixes #12039] Upgrade to GeoServer 2.24.2 (#12040) * [Fixes #12039] Upgrade to GeoServer 2.24.2 * Update to Geoserver 2.24.2-latest image --------- Co-authored-by: Giovanni Allegri * fix conflict * fixed image version * fixed typo --------- Co-authored-by: Emanuele Tajariol --- docker-compose-test.yml | 4 ++-- docker-compose.yml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker-compose-test.yml b/docker-compose-test.yml index a0da45aafa0..2d649b8df67 100644 --- a/docker-compose-test.yml +++ b/docker-compose-test.yml @@ -92,7 +92,7 @@ services: # Geoserver backend geoserver: - image: geonode/geoserver:2.23.3-v2 + image: geonode/geoserver:2.24.2-latest container_name: geoserver4${COMPOSE_PROJECT_NAME} healthcheck: test: "curl -m 10 --fail --silent --write-out 'HTTP CODE : %{http_code}\n' --output /dev/null http://geoserver:8080/geoserver/ows" @@ -118,7 +118,7 @@ services: condition: service_healthy data-dir-conf: - image: geonode/geoserver_data:2.23.3-v1 + image: geonode/geoserver_data:2.24.2-latest container_name: gsconf4${COMPOSE_PROJECT_NAME} entrypoint: sleep infinity volumes: diff --git a/docker-compose.yml b/docker-compose.yml index d073b1cbbcf..d0529c178f4 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -91,7 +91,7 @@ services: # Geoserver backend geoserver: - image: geonode/geoserver:2.23.3-v2 + image: geonode/geoserver:2.24.2-latest container_name: geoserver4${COMPOSE_PROJECT_NAME} healthcheck: test: "curl -m 10 --fail --silent --write-out 'HTTP CODE : %{http_code}\n' --output /dev/null http://geoserver:8080/geoserver/ows" @@ -117,7 +117,7 @@ services: condition: service_healthy data-dir-conf: - image: geonode/geoserver_data:2.23.3-v1 + image: geonode/geoserver_data:2.24.2-latest container_name: gsconf4${COMPOSE_PROJECT_NAME} entrypoint: sleep infinity volumes: From 0bc9040417ad97b5dcbd844ca36ec161a6b44be9 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 18 Mar 2024 16:39:45 +0100 Subject: [PATCH 5/8] [Fixes #12063] Cannot clone map/geoapp with linked resources (#12064) (#12065) (cherry picked from commit f6a2884582e75ab56f8dbc64e1620fe9ae28e1de) Co-authored-by: mattiagiupponi <51856725+mattiagiupponi@users.noreply.github.com> --- geonode/resource/manager.py | 4 ++-- geonode/resource/tests.py | 22 +++++++++++++++++++++- 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/geonode/resource/manager.py b/geonode/resource/manager.py index 6b2cead54ca..ad64452d6dc 100644 --- a/geonode/resource/manager.py +++ b/geonode/resource/manager.py @@ -513,11 +513,11 @@ def copy( defaults.pop("name") _resource.save() for lr in LinkedResource.get_linked_resources(source=instance.pk, is_internal=False): - LinkedResource.object.get_or_create( + LinkedResource.objects.get_or_create( source_id=_resource.pk, target_id=lr.target.pk, internal=False ) for lr in LinkedResource.get_linked_resources(target=instance.pk, is_internal=False): - LinkedResource.object.get_or_create( + LinkedResource.objects.get_or_create( source_id=lr.source.pk, target_id=_resource.pk, internal=False ) diff --git a/geonode/resource/tests.py b/geonode/resource/tests.py index be7f5428c82..9ef2467a6fc 100644 --- a/geonode/resource/tests.py +++ b/geonode/resource/tests.py @@ -29,7 +29,7 @@ from geonode.base.populate_test_data import create_models from geonode.tests.base import GeoNodeBaseTestSupport from geonode.resource.manager import ResourceManager -from geonode.base.models import ResourceBase +from geonode.base.models import LinkedResource, ResourceBase from geonode.layers.models import Dataset from geonode.services.models import Service from geonode.documents.models import Document @@ -173,6 +173,26 @@ def _copy_assert_resource(res, title): self.assertTrue(isinstance(res, Map)) _copy_assert_resource(res, "A Test Map 2") + def test_resource_copy_with_linked_resources(self): + def _copy_assert_resource(res, title): + dataset_copy = None + try: + dataset_copy = self.rm.copy(res, defaults=dict(title=title)) + self.assertIsNotNone(dataset_copy) + self.assertEqual(dataset_copy.title, title) + finally: + if dataset_copy: + dataset_copy.delete() + self.assertIsNotNone(res) + res.delete() + + # copy with maps + res = create_single_map("A Test Map") + target = ResourceBase.objects.first() + LinkedResource.objects.get_or_create(source_id=res.id, target_id=target.id) + self.assertTrue(isinstance(res, Map)) + _copy_assert_resource(res, "A Test Map 2") + @patch.object(ResourceManager, "_validate_resource") def test_append(self, mock_validator): mock_validator.return_value = True From d46e227303cb5b6e35580ab9d80515554bbe088c Mon Sep 17 00:00:00 2001 From: Emanuele Tajariol Date: Wed, 20 Mar 2024 20:16:30 +0100 Subject: [PATCH 6/8] [Fixes #12068][Backport 4.2.x] linked_resources API is slow (#12071) * [Fixes #12068][Backport 4.2.x] linked_resources API is slow * Fix black check --- geonode/base/api/tests.py | 124 ++++++++++++++++++++++++----------- geonode/base/api/views.py | 89 +++++++++++++++---------- geonode/geoserver/signals.py | 6 +- 3 files changed, 146 insertions(+), 73 deletions(-) diff --git a/geonode/base/api/tests.py b/geonode/base/api/tests.py index 4833a955484..bbff21b3267 100644 --- a/geonode/base/api/tests.py +++ b/geonode/base/api/tests.py @@ -1951,10 +1951,10 @@ def test_set_resource_thumbnail(self): self.assertEqual(response.json(), "The url must be of an image with format (png, jpeg or jpg)") # using Base64 data as an ASCII byte string - data[ - "file" - ] = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAABHNCSVQICAgI\ + data["file"] = ( + "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAABHNCSVQICAgI\ fAhkiAAAABl0RVh0U29mdHdhcmUAZ25vbWUtc2NyZWVuc2hvdO8Dvz4AAAANSURBVAiZYzAxMfkPAALYAZzx61+bAAAAAElFTkSuQmCC" + ) with patch("geonode.base.models.is_monochromatic_image") as _mck: _mck.return_value = False response = self.client.put(url, data=data, format="json") @@ -2623,15 +2623,6 @@ def test_linked_resource_for_document(self): # validation self.assertEqual(response.status_code, 200) payload = response.json() - self.assert_linkedres_size(payload, "resources", 2) - self.assert_linkedres_contains( - payload, - "resources", - ( - {"pk": self.map.id, "title": ">>> " + self.map.title}, - {"pk": self.dataset.id, "title": ">>> " + self.dataset.title}, - ), - ) self.assert_linkedres_size(payload, "linked_to", 2) self.assert_linkedres_contains( payload, @@ -2644,7 +2635,11 @@ def test_linked_resource_for_document(self): d.delete() def assert_linkedres_size(self, payload, element: str, expected_size: int): - self.assertEqual(expected_size, len(payload[element]), f"Mismatching payload size of {element}") + self.assertEqual( + expected_size, + len(payload[element]), + f"Mismatching payload size of '{element}': exp:{expected_size} found:{payload[element]}", + ) def assert_linkedres_contains(self, payload, element: str, expected_elements: Iterable): res_list = payload[element] @@ -2684,15 +2679,6 @@ def test_linked_resource_for_maps_mixed(self): self.assertEqual(response.status_code, 200) payload = response.json() - self.assert_linkedres_size(payload, "resources", 2) - self.assert_linkedres_contains( - payload, - "resources", - ( - {"pk": self.doc.id, "title": "<<< " + self.doc.title}, - {"pk": self.dataset.id, "title": ">>> " + self.dataset.title}, - ), - ) self.assert_linkedres_size(payload, "linked_to", 1) self.assert_linkedres_contains( payload, "linked_to", ({"pk": self.dataset.id, "title": self.dataset.title},) @@ -2705,6 +2691,7 @@ def test_linked_resource_for_maps_mixed(self): d.delete() def test_linked_resources_for_maps(self): + _m = None try: # data preparation _m = MapLayer.objects.create( @@ -2723,10 +2710,6 @@ def test_linked_resources_for_maps(self): self.assertEqual(response.status_code, 200) payload = response.json() - self.assert_linkedres_size(payload, "resources", 1) - self.assert_linkedres_contains( - payload, "resources", ({"pk": self.dataset.id, "title": ">>> " + self.dataset.title},) - ) self.assert_linkedres_size(payload, "linked_to", 1) self.assert_linkedres_contains( payload, "linked_to", ({"pk": self.dataset.id, "title": self.dataset.title},) @@ -2757,10 +2740,6 @@ def test_linked_resource_for_dataset(self): self.assertEqual(response.status_code, 200) payload = response.json() - self.assert_linkedres_size(payload, "resources", 1) - self.assert_linkedres_contains( - payload, "resources", ({"pk": self.map.id, "title": "<<< " + self.map.title},) - ) self.assert_linkedres_size(payload, "linked_to", 0) self.assert_linkedres_size(payload, "linked_by", 1) self.assert_linkedres_contains(payload, "linked_by", ({"pk": self.map.id, "title": self.map.title},)) @@ -2791,15 +2770,6 @@ def test_linked_resource_for_datasets_mixed(self): # validation self.assertEqual(response.status_code, 200) payload = response.json() - self.assert_linkedres_size(payload, "resources", 2) - self.assert_linkedres_contains( - payload, - "resources", - ( - {"pk": self.doc.id, "title": "<<< " + self.doc.title}, - {"pk": self.map.id, "title": "<<< " + self.map.title}, - ), - ) self.assert_linkedres_size(payload, "linked_to", 0) self.assert_linkedres_size(payload, "linked_by", 2) self.assert_linkedres_contains( @@ -2848,6 +2818,82 @@ def test_linked_resource_deprecated_pagination(self): for d in _d: d.delete() + def test_linked_resource_filter_one_resource_type(self): + _d = [] + try: + # data preparation + _d.append(LinkedResource.objects.create(source_id=self.doc.id, target_id=self.dataset.id)) + _d.append(LinkedResource.objects.create(source_id=self.doc.id, target_id=self.map.id)) + resource_type_param = "dataset" + # call api with single resource_type param + url = reverse("base-resources-linked_resources", args=[self.doc.id]) + response = self.client.get(f"{url}?resource_type={resource_type_param}") + + # validation + self.assertEqual(response.status_code, 200) + payload = response.json() + + res_types_orig = resource_type_param.split(",") + res_types_payload = [res["resource_type"] for res in payload["linked_to"]] + for r in res_types_payload: + self.assertTrue(r in res_types_orig) + + finally: + for d in _d: + d.delete() + + def test_linked_resource_filter_multiple_resource_type_linktype(self): + _d = [] + try: + # data preparation + _d.append(LinkedResource.objects.create(source_id=self.doc.id, target_id=self.dataset.id)) + _d.append(LinkedResource.objects.create(source_id=self.doc.id, target_id=self.map.id)) + resource_type_param = "map" + link_type = "linked_to" + # call the API w/ both parameters + url = reverse("base-resources-linked_resources", args=[self.doc.id]) + response = self.client.get(f"{url}?resource_type={resource_type_param}&link_type={link_type}") + + # validation + self.assertEqual(response.status_code, 200) + payload = response.json() + + res_types_orig = resource_type_param.split(",") + res_types_payload = [res["resource_type"] for res in payload["linked_to"]] + for type in res_types_payload: + self.assertTrue(type in res_types_orig) + self.assertTrue({"linked_to", "WARNINGS"} == set(payload.keys())) + + finally: + for d in _d: + d.delete() + + def test_linked_resource_filter_multiple_resource_type_without_linktype(self): + _d = [] + try: + # data preparation + _d.append(LinkedResource.objects.create(source_id=self.doc.id, target_id=self.dataset.id)) + _d.append(LinkedResource.objects.create(source_id=self.doc.id, target_id=self.map.id)) + resource_type_param = "dataset,map" + # call the API w/ resource_type + url = reverse("base-resources-linked_resources", args=[self.doc.id]) + response = self.client.get(f"{url}?resource_type={resource_type_param}") + + # validation + self.assertEqual(response.status_code, 200) + payload = response.json() + + res_types_orig = resource_type_param.split(",") + res_types_payload = [res["resource_type"] for res in payload["linked_to"]] + for type in res_types_payload: + self.assertTrue(type in res_types_orig) + payload_keys = {"linked_by", "linked_to", "WARNINGS"} + self.assertTrue(payload_keys == set(payload.keys())) + + finally: + for d in _d: + d.delete() + class TestApiAdditionalBBoxCalculation(GeoNodeBaseTestSupport): @classmethod diff --git a/geonode/base/api/views.py b/geonode/base/api/views.py index 2721fd7c195..6132b54a412 100644 --- a/geonode/base/api/views.py +++ b/geonode/base/api/views.py @@ -17,6 +17,7 @@ # ######################################################################### import ast +import functools import json import re @@ -33,7 +34,7 @@ from django.shortcuts import get_object_or_404 from django.urls import reverse from django.conf import settings -from django.db.models import Subquery +from django.db.models import Subquery, QuerySet from django.http.request import QueryDict from django.contrib.auth import get_user_model @@ -97,7 +98,6 @@ ) from .serializers import ( FavoriteSerializer, - SimpleResourceSerializer, UserSerializer, PermSpecSerialiazer, GroupProfileSerializer, @@ -1493,47 +1493,70 @@ def linked_resources(self, request, pk, *args, **kwargs): def base_linked_resources(instance, user, params): + try: - visibile_resources = get_visible_resources( - ResourceBase.objects, + resource_type = params.get("resource_type") + link_type = params.get("link_type") + type_list = resource_type.split(",") if resource_type else [] + + warnings = {} + + if "page_size" in params or "page" in params: + warnings["PAGINATION"] = "Pagination is not supported on this call" + + ret = {"WARNINGS": warnings} + + get_visible_resources_p = functools.partial( + get_visible_resources, user=user, admin_approval_required=settings.ADMIN_MODERATE_UPLOADS, unpublished_not_visible=settings.RESOURCE_PUBLISHING, private_groups_not_visibile=settings.GROUP_PRIVATE_RESOURCES, - ).order_by("-pk") - visible_ids = [res.id for res in visibile_resources] + ) - linked_resources = [lres for lres in instance.get_linked_resources() if lres.target.id in visible_ids] - linked_by = [lres for lres in instance.get_linked_resources(as_target=True) if lres.source.id in visible_ids] + if not link_type or link_type == "linked_to": + # list of linked resources, probably extended by ResourceBase's child class - may be loopable only once + linked_to_over = instance.get_linked_resources() - warnings = { - "DEPRECATION": "'resources' field is deprecated, please use 'linked_to'", - } + # resolve the ids of linked resources - using either e QuerySet (preferred) or a list + if isinstance(linked_to_over, QuerySet): + linked_to_over_loopable = linked_to_over + linked_to_id_values = linked_to_over.values("target_id") + else: + linked_to_over_loopable = [lr for lr in linked_to_over] + linked_to_id_values = [lr.target_id for lr in linked_to_over_loopable] - if "page_size" in params or "page" in params: - warnings["PAGINATION"] = "Pagination is not supported on this call" + # filter resources by visibility / permissions + linked_to_visib = get_visible_resources_p(ResourceBase.objects.filter(id__in=linked_to_id_values)).order_by( + "-pk" + ) + # optionally filter by resource type + linked_to_visib = linked_to_visib.filter(resource_type__in=type_list) if type_list else linked_to_visib + linked_to_visib_ids = linked_to_visib.values_list("id", flat=True) + linked_to = [lres for lres in linked_to_over_loopable if lres.target.id in linked_to_visib_ids] + + ret["linked_to"] = LinkedResourceSerializer(linked_to, embed=True, many=True).data + + if not link_type or link_type == "linked_by": + linked_by_over = instance.get_linked_resources(as_target=True) + if isinstance(linked_by_over, QuerySet): + linked_by_over_loopable = linked_by_over + linked_by_id_values = linked_by_over.values("source_id") + else: + linked_by_over_loopable = [lr for lr in linked_by_over] + linked_by_id_values = [lr.source_id for lr in linked_by_over_loopable] - # "resources" will be deprecated, so next block is temporary - # "resources" at the moment it's the only element rendered, so we want to add there both the linked_resources and the linked_by - # we want to tell them apart, so we're adding an attr to store this info, that will be used in the SimpleResourceSerializer - resources = [] - for lres in linked_resources: - res = lres.target - setattr(res, "is_target", True) - resources.append(res) - for lres in linked_by: - res = lres.source - setattr(res, "is_target", False) - resources.append(res) - - ret = { - "WARNINGS": warnings, - "resources": SimpleResourceSerializer(resources, embed=True, many=True).data, # deprecated - "linked_to": LinkedResourceSerializer(linked_resources, embed=True, many=True).data, - "linked_by": LinkedResourceSerializer( + linked_by_visib = get_visible_resources_p(ResourceBase.objects.filter(id__in=linked_by_id_values)).order_by( + "-pk" + ) + + linked_by_visib = linked_by_visib.filter(resource_type__in=type_list) if type_list else linked_by_visib + linked_by_visib_ids = linked_by_visib.values_list("id", flat=True) + linked_by = [lres for lres in linked_by_over_loopable if lres.source.id in linked_by_visib_ids] + + ret["linked_by"] = LinkedResourceSerializer( instance=linked_by, serialize_source=True, embed=True, many=True - ).data, - } + ).data return Response(ret) diff --git a/geonode/geoserver/signals.py b/geonode/geoserver/signals.py index 70b96060303..e8b36dcde0f 100644 --- a/geonode/geoserver/signals.py +++ b/geonode/geoserver/signals.py @@ -18,6 +18,7 @@ ######################################################################### import errno import logging +from requests.exceptions import ConnectionError from deprecated import deprecated from geoserver.layer import Layer as GsLayer @@ -90,10 +91,13 @@ def geoserver_pre_save_maplayer(instance, sender, **kwargs): try: instance.local = isinstance(gs_catalog.get_layer(instance.name), GsLayer) + except ConnectionError as e: + logger.warning(f"Could not connect to catalog to verify if layer {instance.name} was local: {e}") except OSError as e: + logger.warning(f"***** OSERROR TYPE:{type(e)} ERR:{e} ERRNO:{e.errno}") if e.errno == errno.ECONNREFUSED: msg = f"Could not connect to catalog to verify if layer {instance.name} was local" - logger.warn(msg) + logger.warning(msg) else: raise e From 650deb4ce1f476db39c7115290a48536b16da266 Mon Sep 17 00:00:00 2001 From: Giovanni Allegri Date: Thu, 21 Mar 2024 17:08:05 +0100 Subject: [PATCH 7/8] [Backport #12050] [Fixes #12049] Adapt the SLD and XML upload forms to the new progress API (#12075) * [Fixes #12049] Adapt the SLD and XML upload forms to the new progress API (#12050) * Adapt SLD and XML forms to the new API * change source name and delete execution when completed * Fixed LayerInfo.js --- geonode/geoserver/context_processors.py | 1 + .../datasets/dataset_metadata_upload.html | 1 + .../datasets/dataset_style_upload.html | 1 + geonode/settings.py | 4 +- geonode/static/geonode/js/upload/LayerInfo.js | 44 ++++++++++++++++--- 5 files changed, 44 insertions(+), 7 deletions(-) diff --git a/geonode/geoserver/context_processors.py b/geonode/geoserver/context_processors.py index 464cfffa772..572a02469a1 100644 --- a/geonode/geoserver/context_processors.py +++ b/geonode/geoserver/context_processors.py @@ -30,6 +30,7 @@ def geoserver_urls(request): GEOSERVER_BASE_URL=ogc_server_settings.public_url, UPLOADER_URL=reverse("importer_upload"), LAYER_ANCILLARY_FILES_UPLOAD_URL=reverse("importer_upload"), + EXECUTION_STATUS_ENDPOINT=reverse("executionrequest-list"), MAPFISH_PRINT_ENABLED=getattr(ogc_server_settings, "MAPFISH_PRINT_ENABLED", False), PRINT_NG_ENABLED=getattr(ogc_server_settings, "PRINT_NG_ENABLED", False), GEONODE_SECURITY_ENABLED=getattr(ogc_server_settings, "GEONODE_SECURITY_ENABLED", False), diff --git a/geonode/layers/templates/datasets/dataset_metadata_upload.html b/geonode/layers/templates/datasets/dataset_metadata_upload.html index 6ef4d3d2e55..b5238bdf372 100644 --- a/geonode/layers/templates/datasets/dataset_metadata_upload.html +++ b/geonode/layers/templates/datasets/dataset_metadata_upload.html @@ -80,6 +80,7 @@

{% trans "Files to be uploaded" %}

csrf_token = "{{ csrf_token }}", form_target = "{{ LAYER_ANCILLARY_FILES_UPLOAD_URL }}", + executions_status_endpoint = "{{EXECUTION_STATUS_ENDPOINT}}", time_enabled = false, mosaic_enabled = false, userLookup = "{% url "account_ajax_lookup" %}" diff --git a/geonode/layers/templates/datasets/dataset_style_upload.html b/geonode/layers/templates/datasets/dataset_style_upload.html index 6b873fab6c4..ac90cbd9d34 100644 --- a/geonode/layers/templates/datasets/dataset_style_upload.html +++ b/geonode/layers/templates/datasets/dataset_style_upload.html @@ -81,6 +81,7 @@

{% trans "Files to be uploaded" %}

csrf_token = "{{ csrf_token }}", form_target = "{{ LAYER_ANCILLARY_FILES_UPLOAD_URL }}", + executions_status_endpoint = "{{EXECUTION_STATUS_ENDPOINT}}", time_enabled = false, mosaic_enabled = false, userLookup = "{% url "account_ajax_lookup" %}" diff --git a/geonode/settings.py b/geonode/settings.py index 2224bce108f..d42d330a2cf 100644 --- a/geonode/settings.py +++ b/geonode/settings.py @@ -2347,7 +2347,9 @@ def get_geonode_catalogue_service(): 'importer.handlers.shapefile.handler.ShapeFileHandler',\ 'importer.handlers.kml.handler.KMLFileHandler',\ 'importer.handlers.csv.handler.CSVFileHandler',\ - 'importer.handlers.geotiff.handler.GeoTiffFileHandler'\ + 'importer.handlers.geotiff.handler.GeoTiffFileHandler',\ + 'importer.handlers.xml.handler.XMLFileHandler',\ + 'importer.handlers.sld.handler.SLDFileHandler',\ ]", ) ) diff --git a/geonode/static/geonode/js/upload/LayerInfo.js b/geonode/static/geonode/js/upload/LayerInfo.js index 4d4c4385172..cb6500cb0ab 100644 --- a/geonode/static/geonode/js/upload/LayerInfo.js +++ b/geonode/static/geonode/js/upload/LayerInfo.js @@ -404,6 +404,14 @@ define(function (require, exports) { }); }; + LayerInfo.prototype.markEnd = function () { + this.logStatus({ + msg: 'Your upload was succesfull!', + level: 'alert-success', + empty: 'true' + }); + }; + LayerInfo.prototype.doResume = function (event) { $(this).text(gettext('Finalizing')).attr('disabled', 'disabled').after(''); var id = (new Date()).getTime(); @@ -479,13 +487,36 @@ define(function (require, exports) { }); }; - LayerInfo.prototype.startPolling = function() { + LayerInfo.prototype.startPolling = function(execution_id) { var self = this; + const baseUrl = executions_status_endpoint; if (self.polling) { - $.ajax({ url: updateUrl(siteUrl + "upload/progress", 'id', self.id), type: 'GET', success: function(data){ + $.ajax({ + url: baseUrl + "?import&filter{source}=resource_file_upload&page=1&page_size=99999", type: 'GET', success: function(data){ // TODO: Not sure we need to do anything here? //console.log('polling'); - }, dataType: "json", complete: setTimeout(function() {self.startPolling()}, 3000), timeout: 30000 }); + }, + dataType: "json", + success: function(resp, code) { + if (resp.requests && resp.requests.length>0) { + const execution_data = resp.requests.find((req) => req.exec_id === execution_id); + if (execution_data.status == 'finished'){ + self.polling = false; + self.markEnd(); + $.ajax({url: baseUrl + "/" + execution_id, type: "DELETE"}); + if (execution_data.output_params && execution_data.output_params['detail_url']) { + const detail_url = execution_data.output_params['detail_url']; + if (detail_url != '') { + window.location = detail_url; + } + } + + } + } + setTimeout(function() {self.startPolling(execution_id)}, 3000) + }, + timeout: 30000 + }) } }; @@ -675,8 +706,6 @@ define(function (require, exports) { }, beforeSend: function () { self.markStart(); - self.polling = true; - self.startPolling(); }, error: function (jqXHR) { self.polling = false; @@ -713,13 +742,16 @@ define(function (require, exports) { callback(array); }, success: function (resp, status) { - self.logStatus({ + /*self.logStatus({ msg: '

' + gettext('Layer files uploaded, configuring in GeoServer') + '

', level: 'alert-success', empty: 'true' }); self.id = resp.id; self.doStep(resp, callback, array); + */ + self.polling = true; + self.startPolling(resp.execution_id); } }); }; From d5d7b6e8cf13620f35b76e842cfba46c43b9d85d Mon Sep 17 00:00:00 2001 From: Giovanni Allegri Date: Wed, 27 Mar 2024 12:30:43 +0100 Subject: [PATCH 8/8] Release 4.2.3 (#12093) --- docker-compose-dev.yml | 4 ++-- docker-compose-test.yml | 4 ++-- docker-compose.yml | 9 +++------ geonode/__init__.py | 2 +- requirements.txt | 4 ++-- setup.cfg | 4 ++-- 6 files changed, 12 insertions(+), 15 deletions(-) diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 19b81b0985a..b1973f64aab 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -79,7 +79,7 @@ services: # Geoserver backend geoserver: - image: geonode/geoserver:2.23.3-v2 + image: geonode/geoserver:2.24.2-v1 container_name: geoserver4${COMPOSE_PROJECT_NAME} healthcheck: test: "curl -m 10 --fail --silent --write-out 'HTTP CODE : %{http_code}\n' --output /dev/null http://geoserver:8080/geoserver/ows" @@ -105,7 +105,7 @@ services: condition: service_healthy data-dir-conf: - image: geonode/geoserver_data:2.23.3-v1 + image: geonode/geoserver_data:2.24.2-v1 container_name: gsconf4${COMPOSE_PROJECT_NAME} entrypoint: sleep infinity volumes: diff --git a/docker-compose-test.yml b/docker-compose-test.yml index 2d649b8df67..7b085c5e6d0 100644 --- a/docker-compose-test.yml +++ b/docker-compose-test.yml @@ -92,7 +92,7 @@ services: # Geoserver backend geoserver: - image: geonode/geoserver:2.24.2-latest + image: geonode/geoserver:2.24.2-v1 container_name: geoserver4${COMPOSE_PROJECT_NAME} healthcheck: test: "curl -m 10 --fail --silent --write-out 'HTTP CODE : %{http_code}\n' --output /dev/null http://geoserver:8080/geoserver/ows" @@ -118,7 +118,7 @@ services: condition: service_healthy data-dir-conf: - image: geonode/geoserver_data:2.24.2-latest + image: geonode/geoserver_data:2.24.2-v1 container_name: gsconf4${COMPOSE_PROJECT_NAME} entrypoint: sleep infinity volumes: diff --git a/docker-compose.yml b/docker-compose.yml index d0529c178f4..ca203e8c858 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,7 +3,7 @@ version: '3.9' # Common Django template for GeoNode and Celery services below x-common-django: &default-common-django - image: geonode/geonode:4.2.x + image: geonode/geonode:4.2.3 restart: unless-stopped env_file: - .env @@ -22,9 +22,6 @@ services: # Our custom django application. It includes Geonode. django: << : *default-common-django - build: - context: ./ - dockerfile: Dockerfile container_name: django4${COMPOSE_PROJECT_NAME} healthcheck: test: "curl -m 10 --fail --silent --write-out 'HTTP CODE : %{http_code}\n' --output /dev/null http://django:8000/" @@ -91,7 +88,7 @@ services: # Geoserver backend geoserver: - image: geonode/geoserver:2.24.2-latest + image: geonode/geoserver:2.24.2-v1 container_name: geoserver4${COMPOSE_PROJECT_NAME} healthcheck: test: "curl -m 10 --fail --silent --write-out 'HTTP CODE : %{http_code}\n' --output /dev/null http://geoserver:8080/geoserver/ows" @@ -117,7 +114,7 @@ services: condition: service_healthy data-dir-conf: - image: geonode/geoserver_data:2.24.2-latest + image: geonode/geoserver_data:2.24.2-v1 container_name: gsconf4${COMPOSE_PROJECT_NAME} entrypoint: sleep infinity volumes: diff --git a/geonode/__init__.py b/geonode/__init__.py index 9b1e32fbb4d..08d6c164079 100644 --- a/geonode/__init__.py +++ b/geonode/__init__.py @@ -19,7 +19,7 @@ import os -__version__ = (4, 2, 0, "dev", 0) +__version__ = (4, 2, 3, "final", 0) default_app_config = "geonode.apps.AppConfig" diff --git a/requirements.txt b/requirements.txt index 70f7db16c82..25559512627 100644 --- a/requirements.txt +++ b/requirements.txt @@ -88,8 +88,8 @@ pinax-notifications==6.0.0 pinax-ratings==4.0.0 # GeoNode org maintained apps. --e git+https://github.com/GeoNode/geonode-mapstore-client.git@4.2.x#egg=django_geonode_mapstore_client --e git+https://github.com/GeoNode/geonode-importer.git@master#egg=geonode-importer +django-geonode-mapstore-client==4.2.0 +geonode-importer==1.0.8 django-avatar==7.1.1 geonode-oauth-toolkit==2.2.2 geonode-user-messages==2.0.2 diff --git a/setup.cfg b/setup.cfg index 01c83ee8f1f..99d74f19ed4 100644 --- a/setup.cfg +++ b/setup.cfg @@ -114,8 +114,8 @@ install_requires = pinax-ratings==4.0.0 # GeoNode org maintained apps. - django-geonode-mapstore-client>=4.1.1,<5.0.0 - geonode-importer>=1.0.6 + django-geonode-mapstore-client==4.2.0 + geonode-importer==1.0.8 django-avatar==7.1.1 geonode-oauth-toolkit==2.2.2 geonode-user-messages==2.0.2