From 95e60b385f7e940fac46dc59683e4485ca52e7c7 Mon Sep 17 00:00:00 2001 From: bencomp Date: Mon, 3 Oct 2022 01:58:47 +0200 Subject: [PATCH 01/63] Overload DatasetField method to remove type check I spotted a TODO near a type check based on a string comparison of the class name. --- .../edu/harvard/iq/dataverse/DatasetField.java | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetField.java b/src/main/java/edu/harvard/iq/dataverse/DatasetField.java index 31d08f84c02..772538aa5cb 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetField.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetField.java @@ -54,15 +54,18 @@ public int compare(DatasetField o1, DatasetField o2) { o2.getDatasetFieldType().getDisplayOrder() ); }}; - public static DatasetField createNewEmptyDatasetField(DatasetFieldType dsfType, Object dsv) { + public static DatasetField createNewEmptyDatasetField(DatasetFieldType dsfType, DatasetVersion dsv) { DatasetField dsfv = createNewEmptyDatasetField(dsfType); - //TODO - a better way to handle this? - if (dsv.getClass().getName().equals("edu.harvard.iq.dataverse.DatasetVersion")){ - dsfv.setDatasetVersion((DatasetVersion)dsv); - } else { - dsfv.setTemplate((Template)dsv); - } + dsfv.setDatasetVersion(dsv); + + return dsfv; + } + + public static DatasetField createNewEmptyDatasetField(DatasetFieldType dsfType, Template dsv) { + + DatasetField dsfv = createNewEmptyDatasetField(dsfType); + dsfv.setTemplate(dsv); return dsfv; } From daccc0e3f4d4efee653489f249f8a1db9e3828bd Mon Sep 17 00:00:00 2001 From: bencomp Date: Mon, 3 Oct 2022 12:32:13 +0200 Subject: [PATCH 02/63] Compare types using instanceof --- src/main/java/edu/harvard/iq/dataverse/DatasetField.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetField.java b/src/main/java/edu/harvard/iq/dataverse/DatasetField.java index 772538aa5cb..e6a4ca21fea 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetField.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetField.java @@ -563,7 +563,7 @@ private DatasetField copy(Object version, DatasetFieldCompoundValue parent) { dsf.setDatasetFieldType(datasetFieldType); if (version != null) { - if (version.getClass().getName().equals("edu.harvard.iq.dataverse.DatasetVersion")) { + if (version instanceof DatasetVersion) { dsf.setDatasetVersion((DatasetVersion) version); } else { dsf.setTemplate((Template) version); From bdb62163d8ad4ed01b94943ad3c45f13406a6dae Mon Sep 17 00:00:00 2001 From: bencomp Date: Mon, 3 Oct 2022 13:07:46 +0200 Subject: [PATCH 03/63] Create type-specific copy methods in DatasetField Both `copy(DatasetVersion)` and `copy(Template)` still refer to `copy(Object, DatasetFieldCompoundValue)`, because copying that method would make the code less DRY. --- .../edu/harvard/iq/dataverse/DatasetField.java | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetField.java b/src/main/java/edu/harvard/iq/dataverse/DatasetField.java index e6a4ca21fea..35a8184e45b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetField.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetField.java @@ -548,9 +548,12 @@ public String toString() { return "edu.harvard.iq.dataverse.DatasetField[ id=" + id + " ]"; } - public DatasetField copy(Object version) { + public DatasetField copy(DatasetVersion version) { return copy(version, null); } + public DatasetField copy(Template template) { + return copy(template, null); + } // originally this was an overloaded method, but we renamed it to get around an issue with Bean Validation // (that looked t overloaded methods, when it meant to look at overriden methods @@ -558,15 +561,15 @@ public DatasetField copyChild(DatasetFieldCompoundValue parent) { return copy(null, parent); } - private DatasetField copy(Object version, DatasetFieldCompoundValue parent) { + private DatasetField copy(Object versionOrTemplate, DatasetFieldCompoundValue parent) { DatasetField dsf = new DatasetField(); dsf.setDatasetFieldType(datasetFieldType); - if (version != null) { - if (version instanceof DatasetVersion) { - dsf.setDatasetVersion((DatasetVersion) version); + if (versionOrTemplate != null) { + if (versionOrTemplate instanceof DatasetVersion) { + dsf.setDatasetVersion((DatasetVersion) versionOrTemplate); } else { - dsf.setTemplate((Template) version); + dsf.setTemplate((Template) versionOrTemplate); } } From 4579283f8e5039cbbc6c8df43ac73ee5fd63b967 Mon Sep 17 00:00:00 2001 From: bencomp Date: Mon, 18 Sep 2023 00:07:16 +0200 Subject: [PATCH 04/63] Add basic DatasetFieldTest --- .../iq/dataverse/DatasetFieldTest.java | 41 +++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 src/test/java/edu/harvard/iq/dataverse/DatasetFieldTest.java diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTest.java new file mode 100644 index 00000000000..23ba2d69fff --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTest.java @@ -0,0 +1,41 @@ +package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.DatasetFieldType.FieldType; +import edu.harvard.iq.dataverse.mocks.MocksFactory; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + + +class DatasetFieldTest { + @Test + void testCreateNewEmptyDatasetField_withEmptyTemplate() { + Template template = new Template(); + + DatasetField field = DatasetField.createNewEmptyDatasetField(new DatasetFieldType("subject", FieldType.TEXT, false), template); + assertTrue(field.getTemplate() == template); + assertTrue(template.getDatasetFields().isEmpty()); + } + + @Test + void testNotEqualDatasetFields() { + DatasetField field1 = DatasetField.createNewEmptyDatasetField(new DatasetFieldType("subject", FieldType.TEXT, false), new Template()); + field1.setId(MocksFactory.nextId()); + DatasetField field2 = DatasetField.createNewEmptyDatasetField(new DatasetFieldType("subject", FieldType.TEXT, false), new Template()); + field2.setId(MocksFactory.nextId()); + + assertNotEquals(field1, field2); + } + + @Test + void testEqualDatasetFields() { + DatasetField field1 = DatasetField.createNewEmptyDatasetField(new DatasetFieldType("subject", FieldType.TEXT, false), new Template()); + field1.setId(100L); + DatasetField field2 = DatasetField.createNewEmptyDatasetField(new DatasetFieldType("subject", FieldType.TEXT, false), new Template()); + field2.setId(100L); + + assertEquals(field1, field2); + } +} \ No newline at end of file From 87bfe16c6a4390075bd0b481071ebddf2f1c1e5d Mon Sep 17 00:00:00 2001 From: bencomp Date: Mon, 18 Sep 2023 00:25:52 +0200 Subject: [PATCH 05/63] Test DatasetField identities --- .../iq/dataverse/DatasetFieldTest.java | 27 +++++++++++++++++-- 1 file changed, 25 insertions(+), 2 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTest.java index 23ba2d69fff..97999af3244 100644 --- a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldTest.java @@ -7,6 +7,7 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertNull; class DatasetFieldTest { @@ -21,12 +22,17 @@ void testCreateNewEmptyDatasetField_withEmptyTemplate() { @Test void testNotEqualDatasetFields() { - DatasetField field1 = DatasetField.createNewEmptyDatasetField(new DatasetFieldType("subject", FieldType.TEXT, false), new Template()); + DatasetFieldType type1 = new DatasetFieldType("subject", FieldType.TEXT, false); + Template template1 = new Template(); + DatasetField field1 = DatasetField.createNewEmptyDatasetField(type1, template1); field1.setId(MocksFactory.nextId()); - DatasetField field2 = DatasetField.createNewEmptyDatasetField(new DatasetFieldType("subject", FieldType.TEXT, false), new Template()); + DatasetFieldType type2 = new DatasetFieldType("subject", FieldType.TEXT, false); + Template template2 = new Template(); + DatasetField field2 = DatasetField.createNewEmptyDatasetField(type2, template2); field2.setId(MocksFactory.nextId()); assertNotEquals(field1, field2); + assertNotEquals(field1, template2); } @Test @@ -34,8 +40,25 @@ void testEqualDatasetFields() { DatasetField field1 = DatasetField.createNewEmptyDatasetField(new DatasetFieldType("subject", FieldType.TEXT, false), new Template()); field1.setId(100L); DatasetField field2 = DatasetField.createNewEmptyDatasetField(new DatasetFieldType("subject", FieldType.TEXT, false), new Template()); + + // Fields are not equal before both have IDs set + assertNotEquals(field1, field2); + field2.setId(100L); assertEquals(field1, field2); } + + @Test + void testCopyDatasetFields() { + DatasetField field1 = DatasetField.createNewEmptyDatasetField(new DatasetFieldType("subject", FieldType.TEXT, false), new Template()); + field1.setId(100L); + DatasetField field2 = field1.copy(field1.getTemplate()); + + assertNull(field2.getId()); + // A copy of a field should not be equal + assertNotEquals(field1, field2); + + assertEquals(field2.getDatasetFieldType(), field1.getDatasetFieldType()); + } } \ No newline at end of file From 963a00e96f71936903809e0aa6eb9deb3d312def Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Thu, 2 Nov 2023 10:22:36 +0100 Subject: [PATCH 06/63] Return valid JSON from API blocking filters --- .../edu/harvard/iq/dataverse/api/ApiBlockingFilter.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java b/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java index 0e5b8226310..b51b1aa2612 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/ApiBlockingFilter.java @@ -49,7 +49,7 @@ public void doBlock(ServletRequest sr, ServletResponse sr1, FilterChain fc) thro @Override public void doBlock(ServletRequest sr, ServletResponse sr1, FilterChain fc) throws IOException, ServletException { HttpServletResponse httpResponse = (HttpServletResponse) sr1; - httpResponse.getWriter().println("{ status:\"error\", message:\"Endpoint blocked. Please contact the dataverse administrator\"}" ); + httpResponse.getWriter().println("{ \"status\":\"error\", \"message\":\"Endpoint blocked. Please contact the dataverse administrator\"}" ); httpResponse.setStatus(HttpServletResponse.SC_SERVICE_UNAVAILABLE); httpResponse.setContentType("application/json"); } @@ -67,7 +67,7 @@ public void doBlock(ServletRequest sr, ServletResponse sr1, FilterChain fc) thro fc.doFilter(sr, sr1); } else { HttpServletResponse httpResponse = (HttpServletResponse) sr1; - httpResponse.getWriter().println("{ status:\"error\", message:\"Endpoint available from localhost only. Please contact the dataverse administrator\"}" ); + httpResponse.getWriter().println("{ \"status\":\"error\", \"message\":\"Endpoint available from localhost only. Please contact the dataverse administrator\"}" ); httpResponse.setStatus(HttpServletResponse.SC_SERVICE_UNAVAILABLE); httpResponse.setContentType("application/json"); } @@ -102,7 +102,7 @@ public void doBlock(ServletRequest sr, ServletResponse sr1, FilterChain fc) thro if ( block ) { HttpServletResponse httpResponse = (HttpServletResponse) sr1; - httpResponse.getWriter().println("{ status:\"error\", message:\"Endpoint available using API key only. Please contact the dataverse administrator\"}" ); + httpResponse.getWriter().println("{ \"status\":\"error\", \"message\":\"Endpoint available using API key only. Please contact the dataverse administrator\"}" ); httpResponse.setStatus(HttpServletResponse.SC_SERVICE_UNAVAILABLE); httpResponse.setContentType("application/json"); } else { From cd154813eea9a4f937d6b71645097f3d04867275 Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Thu, 2 Nov 2023 23:52:49 +0100 Subject: [PATCH 07/63] Add PR release notes --- doc/release-notes/api-blocking-filter-json.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 doc/release-notes/api-blocking-filter-json.md diff --git a/doc/release-notes/api-blocking-filter-json.md b/doc/release-notes/api-blocking-filter-json.md new file mode 100644 index 00000000000..337ff82dd8b --- /dev/null +++ b/doc/release-notes/api-blocking-filter-json.md @@ -0,0 +1,3 @@ +* When any `ApiBlockingFilter` policy applies to a request, the JSON in the body of the error response is now valid JSON. + In case an API client did any special processing to allow it to parse the body, that is no longer necessary. + The status code of such responses has not changed. From 7d17b9a496f52ae7adb979471dcf6c782bb7c400 Mon Sep 17 00:00:00 2001 From: Victoria Lubitch Date: Mon, 3 Jun 2024 14:21:00 -0400 Subject: [PATCH 08/63] Dataverse with WSL --- .../source/developers/windows.rst | 88 +++++++++++++++++++ 1 file changed, 88 insertions(+) diff --git a/doc/sphinx-guides/source/developers/windows.rst b/doc/sphinx-guides/source/developers/windows.rst index 53578fe980c..91e3d783177 100755 --- a/doc/sphinx-guides/source/developers/windows.rst +++ b/doc/sphinx-guides/source/developers/windows.rst @@ -16,3 +16,91 @@ See the `post + +You will be asked to create a linux user. +After installation of Linux check that you have internet connection: + +.. code-block:: bash + + ping www.google.com + +If you do not have internet connection try add in ``/etc/wsl.conf`` + +.. code-block:: bash + + [network] + generateResolvConf = false + +Also in /etc/resolv.conf add + +.. code-block:: bash + + nameserver 1.1.1.1 + +Now you can install all the tools one usually uses in Linux. For example, it is good idea to run update: + +.. code-block:: bash + + sudo apt update + sudo apt full-upgrade -y + +Install Dataverse +~~~~~~~~~~~~~~~~~ + +Now you can install Dataverse in WSL following the instructions for :doc:`classic-dev-env` +At the end check that you have ``-Ddataverse.pid.default-provider=fake`` in jvm-options. + +Now you can access dataverse in your windows browser + + - http://localhost:8080 + - username: dataverseAdmin + - password: admin + +IDE for Dataverse in Windows +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Files in WSL are accessible from Windows for editing using ``\\wsl.localhost`` or ``\\wsl$`` path. Windows files are accessible in linux in ``/mnt/c/`` directory. Therefore one can use one's favorite editor or IDE to edit dataverse project files. Then one can build using ``mvn`` in WSL and deploy manually in WSL using ``asadmin``. + +It is still though possible to use full strength of IDE, the following instructions are for Intelij users. + +- Install Intelij in Windows. + +You can open the project through ``\\wsl.localhost`` and navigate to dataverse project. +You can try to build the project in Intelij. You may get a message ``Cannot establish network connection from WSL to Windows host (could be blocked by firewall).`` In that case you can try +to disable WSL Hyperviser from firewall. +After that you should be able to build the project in Intelij. +It seems that at present it is impossible to deploy the glassfish application in Intelij. You can try to add Glassfish plugin through Settings->Plugins and in Run->Edit Configurations configure Application Server from WSL ``/usr/localhost/payara6`` with URL http://localhost:8080 and Server Domain as domian one, but it may fail since Intelij confuses the Windows and Linux paths. + +To use the full strength of Intelij with build, deployment and debugging, one will need to use Intelij ``Remote development``. Close all the projects in InteliJ and go to ``Remote development->WSL`` and press ``New Project``. In WSL instance choose your linux distribution and press ``Next``. In ``Prpject Directory`` navigate to WSL dataverse project.Then press ``Download IDE and Connect``. This will install InteliJ in WSL in ``~/.cache/JetBrains/``. Now in InteliJ you should see your project opened in a new InteliJ window. After adding Glassfish plugin and editing configuration you should be able to build the project and run the project. + +PgAdmin in Windows for Dataverse +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can access dataverse database from Windows. Install pgAdmin https://www.pgadmin.org/download/pgadmin-4-windows/ In pgAdmin register server using 127.0.0.1 with port 5432, database dvndb and dvnapp as username with secret password. Now you will be able to access and update dataverse database. + From 8eeaa31829a2e980cc13387f50a691319d9fb37d Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 3 Jun 2024 14:44:29 -0400 Subject: [PATCH 09/63] Update doc/sphinx-guides/source/developers/windows.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/developers/windows.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/windows.rst b/doc/sphinx-guides/source/developers/windows.rst index 91e3d783177..d72ec0e50d2 100755 --- a/doc/sphinx-guides/source/developers/windows.rst +++ b/doc/sphinx-guides/source/developers/windows.rst @@ -38,7 +38,7 @@ See the list of possible distributions: wsl --list --online -Choose the distribution you would like. Then run the following command. Notice that this installation of dataverse was tried with ubuntu distribution. +Choose the distribution you would like. Then run the following command. These instructions were tested with Ubuntu. .. code-block:: powershell From 88d9854cd0328349c0acf0b96c767b04100d7ec7 Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 3 Jun 2024 14:44:53 -0400 Subject: [PATCH 10/63] Update doc/sphinx-guides/source/developers/windows.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/developers/windows.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/windows.rst b/doc/sphinx-guides/source/developers/windows.rst index d72ec0e50d2..6a2fc9155fc 100755 --- a/doc/sphinx-guides/source/developers/windows.rst +++ b/doc/sphinx-guides/source/developers/windows.rst @@ -45,7 +45,7 @@ Choose the distribution you would like. Then run the following command. These in wsl --install -d You will be asked to create a linux user. -After installation of Linux check that you have internet connection: +After the installation of Linux is complete, check that you have an internet connection: .. code-block:: bash From 9b5b29cf0eac6fc21f666d05c74f8406e1ddbdbf Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 3 Jun 2024 14:45:14 -0400 Subject: [PATCH 11/63] Update doc/sphinx-guides/source/developers/windows.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/developers/windows.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/windows.rst b/doc/sphinx-guides/source/developers/windows.rst index 6a2fc9155fc..8986cc2e875 100755 --- a/doc/sphinx-guides/source/developers/windows.rst +++ b/doc/sphinx-guides/source/developers/windows.rst @@ -51,7 +51,7 @@ After the installation of Linux is complete, check that you have an internet con ping www.google.com -If you do not have internet connection try add in ``/etc/wsl.conf`` +If you do not have an internet connection try adding it in ``/etc/wsl.conf`` .. code-block:: bash From 87edbcba66f8c293e2074cfbd758b338da0ad057 Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 3 Jun 2024 14:45:28 -0400 Subject: [PATCH 12/63] Update doc/sphinx-guides/source/developers/windows.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/developers/windows.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/windows.rst b/doc/sphinx-guides/source/developers/windows.rst index 8986cc2e875..f689b172cd6 100755 --- a/doc/sphinx-guides/source/developers/windows.rst +++ b/doc/sphinx-guides/source/developers/windows.rst @@ -58,7 +58,7 @@ If you do not have an internet connection try adding it in ``/etc/wsl.conf`` [network] generateResolvConf = false -Also in /etc/resolv.conf add +Also in ``/etc/resolv.conf`` add .. code-block:: bash From 74254affb19eba566699675fae4d677ce6417ef2 Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 3 Jun 2024 14:45:45 -0400 Subject: [PATCH 13/63] Update doc/sphinx-guides/source/developers/windows.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/developers/windows.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/windows.rst b/doc/sphinx-guides/source/developers/windows.rst index f689b172cd6..ccba42274b8 100755 --- a/doc/sphinx-guides/source/developers/windows.rst +++ b/doc/sphinx-guides/source/developers/windows.rst @@ -64,7 +64,7 @@ Also in ``/etc/resolv.conf`` add nameserver 1.1.1.1 -Now you can install all the tools one usually uses in Linux. For example, it is good idea to run update: +Now you can install all the tools one usually uses in Linux. For example, it is good idea to run an update: .. code-block:: bash From 93c452d6bec71a402e73b4670db68cc0f95918fc Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 3 Jun 2024 14:46:18 -0400 Subject: [PATCH 14/63] Update doc/sphinx-guides/source/developers/windows.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/developers/windows.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/windows.rst b/doc/sphinx-guides/source/developers/windows.rst index ccba42274b8..dda34a22719 100755 --- a/doc/sphinx-guides/source/developers/windows.rst +++ b/doc/sphinx-guides/source/developers/windows.rst @@ -102,5 +102,5 @@ To use the full strength of Intelij with build, deployment and debugging, one wi PgAdmin in Windows for Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -You can access dataverse database from Windows. Install pgAdmin https://www.pgadmin.org/download/pgadmin-4-windows/ In pgAdmin register server using 127.0.0.1 with port 5432, database dvndb and dvnapp as username with secret password. Now you will be able to access and update dataverse database. +You can access The Dataverse database from Windows. Install pgAdmin https://www.pgadmin.org/download/pgadmin-4-windows/ In pgAdmin register a server using 127.0.0.1 with port 5432, database dvndb and dvnapp as username with secret password. Now you will be able to access and update Dataverse database. From fc57ddffd12c75798fd83f2a99da20128da0821b Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 3 Jun 2024 14:46:46 -0400 Subject: [PATCH 15/63] Update doc/sphinx-guides/source/developers/windows.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/developers/windows.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/windows.rst b/doc/sphinx-guides/source/developers/windows.rst index dda34a22719..81b518333f9 100755 --- a/doc/sphinx-guides/source/developers/windows.rst +++ b/doc/sphinx-guides/source/developers/windows.rst @@ -30,7 +30,7 @@ If you have Docker already installed, you should already have WSL installed, oth wsl --install -If you already had WSL installed you can install specific linux distribution: +If you already had WSL installed you can install a specific Linux distribution: See the list of possible distributions: From fc25adf11a78f405045fdf64aee37f044bde4973 Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 3 Jun 2024 14:46:57 -0400 Subject: [PATCH 16/63] Update doc/sphinx-guides/source/developers/windows.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/developers/windows.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/windows.rst b/doc/sphinx-guides/source/developers/windows.rst index 81b518333f9..4ff309bbc9f 100755 --- a/doc/sphinx-guides/source/developers/windows.rst +++ b/doc/sphinx-guides/source/developers/windows.rst @@ -44,7 +44,7 @@ Choose the distribution you would like. Then run the following command. These in wsl --install -d -You will be asked to create a linux user. +You will be asked to create a Linux user. After the installation of Linux is complete, check that you have an internet connection: .. code-block:: bash From 841ffd06ddc475f5168549171b97c1b1288673cd Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 3 Jun 2024 14:47:13 -0400 Subject: [PATCH 17/63] Update doc/sphinx-guides/source/developers/windows.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/developers/windows.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/windows.rst b/doc/sphinx-guides/source/developers/windows.rst index 4ff309bbc9f..fdc6641eab1 100755 --- a/doc/sphinx-guides/source/developers/windows.rst +++ b/doc/sphinx-guides/source/developers/windows.rst @@ -94,7 +94,7 @@ It is still though possible to use full strength of IDE, the following instructi You can open the project through ``\\wsl.localhost`` and navigate to dataverse project. You can try to build the project in Intelij. You may get a message ``Cannot establish network connection from WSL to Windows host (could be blocked by firewall).`` In that case you can try to disable WSL Hyperviser from firewall. -After that you should be able to build the project in Intelij. +After that you should be able to build the project in Intellij. It seems that at present it is impossible to deploy the glassfish application in Intelij. You can try to add Glassfish plugin through Settings->Plugins and in Run->Edit Configurations configure Application Server from WSL ``/usr/localhost/payara6`` with URL http://localhost:8080 and Server Domain as domian one, but it may fail since Intelij confuses the Windows and Linux paths. To use the full strength of Intelij with build, deployment and debugging, one will need to use Intelij ``Remote development``. Close all the projects in InteliJ and go to ``Remote development->WSL`` and press ``New Project``. In WSL instance choose your linux distribution and press ``Next``. In ``Prpject Directory`` navigate to WSL dataverse project.Then press ``Download IDE and Connect``. This will install InteliJ in WSL in ``~/.cache/JetBrains/``. Now in InteliJ you should see your project opened in a new InteliJ window. After adding Glassfish plugin and editing configuration you should be able to build the project and run the project. From 185c31cac6aee4869772736bda0ea9879be61a42 Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 3 Jun 2024 14:47:26 -0400 Subject: [PATCH 18/63] Update doc/sphinx-guides/source/developers/windows.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/developers/windows.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/windows.rst b/doc/sphinx-guides/source/developers/windows.rst index fdc6641eab1..5d74072f264 100755 --- a/doc/sphinx-guides/source/developers/windows.rst +++ b/doc/sphinx-guides/source/developers/windows.rst @@ -92,7 +92,7 @@ It is still though possible to use full strength of IDE, the following instructi - Install Intelij in Windows. You can open the project through ``\\wsl.localhost`` and navigate to dataverse project. -You can try to build the project in Intelij. You may get a message ``Cannot establish network connection from WSL to Windows host (could be blocked by firewall).`` In that case you can try +You can try to build the project in Intellij. You may get a message ``Cannot establish network connection from WSL to Windows host (could be blocked by firewall).`` In that case you can try to disable WSL Hyperviser from firewall. After that you should be able to build the project in Intellij. It seems that at present it is impossible to deploy the glassfish application in Intelij. You can try to add Glassfish plugin through Settings->Plugins and in Run->Edit Configurations configure Application Server from WSL ``/usr/localhost/payara6`` with URL http://localhost:8080 and Server Domain as domian one, but it may fail since Intelij confuses the Windows and Linux paths. From f9fb940228106221b08f4cf60139cdd829171a6f Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 3 Jun 2024 14:48:08 -0400 Subject: [PATCH 19/63] Update doc/sphinx-guides/source/developers/windows.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/developers/windows.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/windows.rst b/doc/sphinx-guides/source/developers/windows.rst index 5d74072f264..e877f4612e1 100755 --- a/doc/sphinx-guides/source/developers/windows.rst +++ b/doc/sphinx-guides/source/developers/windows.rst @@ -77,7 +77,7 @@ Install Dataverse Now you can install Dataverse in WSL following the instructions for :doc:`classic-dev-env` At the end check that you have ``-Ddataverse.pid.default-provider=fake`` in jvm-options. -Now you can access dataverse in your windows browser +Now you can access Dataverse in your Windows browser (Edge, Chrome, etc.): - http://localhost:8080 - username: dataverseAdmin From d336f45154425cd0e49526807b3d087e268233d5 Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 3 Jun 2024 14:53:15 -0400 Subject: [PATCH 20/63] Update doc/sphinx-guides/source/developers/windows.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/developers/windows.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/windows.rst b/doc/sphinx-guides/source/developers/windows.rst index e877f4612e1..16063272966 100755 --- a/doc/sphinx-guides/source/developers/windows.rst +++ b/doc/sphinx-guides/source/developers/windows.rst @@ -85,7 +85,7 @@ Now you can access Dataverse in your Windows browser (Edge, Chrome, etc.): IDE for Dataverse in Windows ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Files in WSL are accessible from Windows for editing using ``\\wsl.localhost`` or ``\\wsl$`` path. Windows files are accessible in linux in ``/mnt/c/`` directory. Therefore one can use one's favorite editor or IDE to edit dataverse project files. Then one can build using ``mvn`` in WSL and deploy manually in WSL using ``asadmin``. +Files in WSL are accessible from Windows for editing using ``\\wsl.localhost`` or ``\\wsl$`` path. Windows files are accessible under Linux in the ``/mnt/c/`` directory. Therefore one can use one's favorite editor or IDE to edit Dataverse project files. Then one can build using ``mvn`` in WSL and deploy manually in WSL using ``asadmin``. It is still though possible to use full strength of IDE, the following instructions are for Intelij users. From 4e4af12890c00b1b484d1d98d7428c16c32faa1c Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 3 Jun 2024 14:53:32 -0400 Subject: [PATCH 21/63] Update doc/sphinx-guides/source/developers/windows.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/developers/windows.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/windows.rst b/doc/sphinx-guides/source/developers/windows.rst index 16063272966..ec92adf7099 100755 --- a/doc/sphinx-guides/source/developers/windows.rst +++ b/doc/sphinx-guides/source/developers/windows.rst @@ -87,7 +87,7 @@ IDE for Dataverse in Windows ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Files in WSL are accessible from Windows for editing using ``\\wsl.localhost`` or ``\\wsl$`` path. Windows files are accessible under Linux in the ``/mnt/c/`` directory. Therefore one can use one's favorite editor or IDE to edit Dataverse project files. Then one can build using ``mvn`` in WSL and deploy manually in WSL using ``asadmin``. -It is still though possible to use full strength of IDE, the following instructions are for Intelij users. +It is still though possible to use a full-strength IDE. The following instructions are for Intellij users. - Install Intelij in Windows. From 6a34b97b7e67e163553f483ab12a55e1379d4b92 Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 3 Jun 2024 14:53:48 -0400 Subject: [PATCH 22/63] Update doc/sphinx-guides/source/developers/windows.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/developers/windows.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/windows.rst b/doc/sphinx-guides/source/developers/windows.rst index ec92adf7099..b16e86b5c2d 100755 --- a/doc/sphinx-guides/source/developers/windows.rst +++ b/doc/sphinx-guides/source/developers/windows.rst @@ -91,7 +91,7 @@ It is still though possible to use a full-strength IDE. The following instructio - Install Intelij in Windows. -You can open the project through ``\\wsl.localhost`` and navigate to dataverse project. +You can open the project through ``\\wsl.localhost`` and navigate to Dataverse project. You can try to build the project in Intellij. You may get a message ``Cannot establish network connection from WSL to Windows host (could be blocked by firewall).`` In that case you can try to disable WSL Hyperviser from firewall. After that you should be able to build the project in Intellij. From 3b9e691c476f9cc50ca75065c80f1a08526ff386 Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 3 Jun 2024 14:54:05 -0400 Subject: [PATCH 23/63] Update doc/sphinx-guides/source/developers/windows.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/developers/windows.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/windows.rst b/doc/sphinx-guides/source/developers/windows.rst index b16e86b5c2d..09060b801ad 100755 --- a/doc/sphinx-guides/source/developers/windows.rst +++ b/doc/sphinx-guides/source/developers/windows.rst @@ -95,7 +95,7 @@ You can open the project through ``\\wsl.localhost`` and navigate to Dataverse p You can try to build the project in Intellij. You may get a message ``Cannot establish network connection from WSL to Windows host (could be blocked by firewall).`` In that case you can try to disable WSL Hyperviser from firewall. After that you should be able to build the project in Intellij. -It seems that at present it is impossible to deploy the glassfish application in Intelij. You can try to add Glassfish plugin through Settings->Plugins and in Run->Edit Configurations configure Application Server from WSL ``/usr/localhost/payara6`` with URL http://localhost:8080 and Server Domain as domian one, but it may fail since Intelij confuses the Windows and Linux paths. +It seems that at present it is impossible to deploy the Glassfish application in Intellij. You can try to add Glassfish plugin through Settings->Plugins and in Run->Edit Configurations configure Application Server from WSL ``/usr/localhost/payara6`` with URL http://localhost:8080 and Server Domain as domain1, but it may fail since Intellij confuses the Windows and Linux paths. To use the full strength of Intelij with build, deployment and debugging, one will need to use Intelij ``Remote development``. Close all the projects in InteliJ and go to ``Remote development->WSL`` and press ``New Project``. In WSL instance choose your linux distribution and press ``Next``. In ``Prpject Directory`` navigate to WSL dataverse project.Then press ``Download IDE and Connect``. This will install InteliJ in WSL in ``~/.cache/JetBrains/``. Now in InteliJ you should see your project opened in a new InteliJ window. After adding Glassfish plugin and editing configuration you should be able to build the project and run the project. From 2e497a65aa609eff3934a914951b18da217dac9d Mon Sep 17 00:00:00 2001 From: Victoria Lubitch <43550154+lubitchv@users.noreply.github.com> Date: Mon, 3 Jun 2024 14:54:23 -0400 Subject: [PATCH 24/63] Update doc/sphinx-guides/source/developers/windows.rst Co-authored-by: Philip Durbin --- doc/sphinx-guides/source/developers/windows.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/developers/windows.rst b/doc/sphinx-guides/source/developers/windows.rst index 09060b801ad..54a30e95aef 100755 --- a/doc/sphinx-guides/source/developers/windows.rst +++ b/doc/sphinx-guides/source/developers/windows.rst @@ -97,7 +97,7 @@ to disable WSL Hyperviser from firewall. After that you should be able to build the project in Intellij. It seems that at present it is impossible to deploy the Glassfish application in Intellij. You can try to add Glassfish plugin through Settings->Plugins and in Run->Edit Configurations configure Application Server from WSL ``/usr/localhost/payara6`` with URL http://localhost:8080 and Server Domain as domain1, but it may fail since Intellij confuses the Windows and Linux paths. -To use the full strength of Intelij with build, deployment and debugging, one will need to use Intelij ``Remote development``. Close all the projects in InteliJ and go to ``Remote development->WSL`` and press ``New Project``. In WSL instance choose your linux distribution and press ``Next``. In ``Prpject Directory`` navigate to WSL dataverse project.Then press ``Download IDE and Connect``. This will install InteliJ in WSL in ``~/.cache/JetBrains/``. Now in InteliJ you should see your project opened in a new InteliJ window. After adding Glassfish plugin and editing configuration you should be able to build the project and run the project. +To use the full strength of Intelij with build, deployment and debugging, one will need to use Intelij ``Remote development``. Close all the projects in Intellij and go to ``Remote development->WSL`` and press ``New Project``. In WSL instance choose your Linux distribution and press ``Next``. In ``Prpject Directory`` navigate to WSL Dataverse project. Then press ``Download IDE and Connect``. This will install Intellij in WSL in ``~/.cache/JetBrains/``. Now in Intellij you should see your project opened in a new Intellij window. After adding Glassfish plugin and editing configuration you should be able to build the project and run the project. PgAdmin in Windows for Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From 40503758427f6c2bae790f7ead45844af20293e9 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 5 Jun 2024 13:46:40 -0400 Subject: [PATCH 25/63] adding fix to file name for download guestbook responses --- .../edu/harvard/iq/dataverse/GuestbookResponsesPage.java | 5 +++-- .../java/edu/harvard/iq/dataverse/ManageGuestbooksPage.java | 3 ++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponsesPage.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponsesPage.java index c53df93def8..93ba8028fa8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponsesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponsesPage.java @@ -101,8 +101,9 @@ public String init() { private String getFileName(){ // The fix below replaces any spaces in the name of the dataverse with underscores; // without it, the filename was chopped off (by the browser??), and the user - // was getting the file name "Foo", instead of "Foo and Bar in Social Sciences.csv". -- L.A. - return dataverse.getName().replace(' ', '_') + "_" + guestbook.getId() + "_GuestbookReponses.csv"; + // was getting the file name "Foo", instead of "Foo and Bar in Social Sciences.csv". -- L.A. + // Also removing some chars that have been reported to cause issues with certain browsers + return dataverse.getName().replace(' ', '_').replaceAll("[\\\\/:*?\"<>|,;]", "") + "_" + guestbook.getId() + "_GuestbookResponses.csv"; } public void streamResponsesByDataverseAndGuestbook(){ diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageGuestbooksPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageGuestbooksPage.java index cc89cfd9d56..94c36a40794 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManageGuestbooksPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManageGuestbooksPage.java @@ -220,7 +220,8 @@ private String getFileName(){ // The fix below replaces any spaces in the name of the dataverse with underscores; // without it, the filename was chopped off (by the browser??), and the user // was getting the file name "Foo", instead of "Foo and Bar in Social Sciences.csv". -- L.A. - return dataverse.getName().replace(' ', '_') + "_GuestbookReponses.csv"; + // Also removing some chars that have been reported to cause issues with certain browsers + return dataverse.getName().replace(' ', '_').replaceAll("[\\\\/:*?\"<>|,;]", "") + "_GuestbookResponses.csv"; } public void deleteGuestbook() { From c129474b2f90bfe1087b9c62660fed02c0577eb3 Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 5 Jun 2024 14:27:13 -0400 Subject: [PATCH 26/63] #8796 fix display when no custom terms entered --- .../dataverse/DatasetVersionServiceBean.java | 17 +++++++++++++ .../iq/dataverse/TermsOfUseAndAccess.java | 24 +++++++++++++++++++ .../iq/dataverse/dataset/DatasetUtil.java | 18 ++++++++++++++ .../iq/dataverse/search/IndexServiceBean.java | 12 ++++++++++ src/main/java/propertyFiles/Bundle.properties | 2 ++ src/main/webapp/dataset-license-terms.xhtml | 4 ++-- .../search/IndexServiceBeanTest.java | 1 + 7 files changed, 76 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index ab23fa779d5..f99b3ee1b53 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -315,6 +315,23 @@ private void msg(String s){ //logger.fine(s); } + public boolean isVersionDefaultCustomTerms(DatasetVersion datasetVersion) { + + if (datasetVersion.getId() != null) { + try { + TermsOfUseAndAccess toua = (TermsOfUseAndAccess) em.createNamedQuery("TermsOfUseAndAccess.findByDatasetVersionIdAndDefaultTerms") + .setParameter("id", datasetVersion.getId()).setParameter("defaultTerms", TermsOfUseAndAccess.DEFAULT_NOTERMS).getSingleResult(); + if (toua != null && datasetVersion.getTermsOfUseAndAccess().getLicense() == null) { + return true; + } + + } catch (NoResultException e) { + return false; + } + } + return false; + } + /** * Does the version identifier in the URL ask for a "DRAFT"? * diff --git a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java index ee865770dbe..9e48c6c0165 100644 --- a/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java +++ b/src/main/java/edu/harvard/iq/dataverse/TermsOfUseAndAccess.java @@ -17,6 +17,28 @@ import jakarta.persistence.Transient; import edu.harvard.iq.dataverse.license.License; +import jakarta.persistence.NamedQueries; +import jakarta.persistence.NamedQuery; + +@NamedQueries({ + // TermsOfUseAndAccess.findByDatasetVersionIdAndDefaultTerms + // is used to determine if the dataset terms were set by the multi license support update + // as part of the 5.10 release. + + @NamedQuery(name = "TermsOfUseAndAccess.findByDatasetVersionIdAndDefaultTerms", + query = "SELECT o FROM TermsOfUseAndAccess o, DatasetVersion dv WHERE " + + "dv.id =:id " + + "AND dv.termsOfUseAndAccess.id = o.id " + + "AND o.termsOfUse =:defaultTerms " + + "AND o.confidentialityDeclaration IS null " + + "AND o.specialPermissions IS null " + + "AND o.restrictions IS null " + + "AND o.citationRequirements IS null " + + "AND o.depositorRequirements IS null " + + "AND o.conditions IS null " + + "AND o.disclaimer IS null " + ) +}) /** * @@ -26,6 +48,8 @@ @Entity @ValidateTermsOfUseAndAccess public class TermsOfUseAndAccess implements Serializable { + + public static final String DEFAULT_NOTERMS = "This dataset is made available without information on how it can be used. You should communicate with the Contact(s) specified before use."; @Id @GeneratedValue(strategy = GenerationType.IDENTITY) diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index 98bd26b51d6..060b8694e9c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -653,6 +653,15 @@ public static License getLicense(DatasetVersion dsv) { } public static String getLicenseName(DatasetVersion dsv) { + + DatasetVersionServiceBean datasetVersionService = CDI.current().select(DatasetVersionServiceBean.class).get(); + /* + Special case where there are default custom terms indicating that no actual choice has been made... + */ + if (datasetVersionService.isVersionDefaultCustomTerms(dsv)) { + return BundleUtil.getStringFromBundle("license.none.chosen"); + } + License license = DatasetUtil.getLicense(dsv); return getLocalizedLicenseName(license); } @@ -683,7 +692,16 @@ public static String getLicenseIcon(DatasetVersion dsv) { } public static String getLicenseDescription(DatasetVersion dsv) { + + DatasetVersionServiceBean datasetVersionService = CDI.current().select(DatasetVersionServiceBean.class).get(); + /* + Special case where there are default custom terms indicating that no actual choice has been made... + */ + if (datasetVersionService.isVersionDefaultCustomTerms(dsv)) { + return BundleUtil.getStringFromBundle("license.none.chosen.description"); + } License license = DatasetUtil.getLicense(dsv); + return license != null ? getLocalizedLicenseDetails(license,"DESCRIPTION") : BundleUtil.getStringFromBundle("license.custom.description"); } diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index e61b93a741f..cbe6c532a65 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -53,7 +53,9 @@ import jakarta.inject.Named; import jakarta.json.JsonObject; import jakarta.persistence.EntityManager; +import jakarta.persistence.NoResultException; import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Query; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; @@ -96,6 +98,8 @@ public class IndexServiceBean { @EJB DatasetServiceBean datasetService; @EJB + DatasetVersionServiceBean datasetVersionService; + @EJB BuiltinUserServiceBean dataverseUserServiceBean; @EJB PermissionServiceBean permissionService; @@ -468,11 +472,13 @@ public void indexDvObject(DvObject objectIn) throws SolrServerException, IOExce public void indexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) throws SolrServerException, IOException { doIndexDataset(dataset, doNormalSolrDocCleanUp); + System.out.print("indexed: " + dataset.getId()); updateLastIndexedTime(dataset.getId()); } private void doIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) throws SolrServerException, IOException { logger.fine("indexing dataset " + dataset.getId()); + System.out.print("indexing dataset " + dataset.getId()); /** * @todo should we use solrDocIdentifierDataset or * IndexableObject.IndexableTypes.DATASET.getName() + "_" ? @@ -1694,6 +1700,12 @@ private List getDataversePathsFromSegments(List dataversePathSeg private void addLicenseToSolrDoc(SolrInputDocument solrInputDocument, DatasetVersion datasetVersion) { if (datasetVersion != null && datasetVersion.getTermsOfUseAndAccess() != null) { + //test to see if the terms of use are the default set in 5.10 - if so and there's no license then don't add license to solr doc. + //fixes 10513 + if (datasetVersionService.isVersionDefaultCustomTerms(datasetVersion)){ + return; + } + String licenseName = "Custom Terms"; if(datasetVersion.getTermsOfUseAndAccess().getLicense() != null) { licenseName = datasetVersion.getTermsOfUseAndAccess().getLicense().getName(); diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 0441853eee9..c32adb352a8 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -1445,6 +1445,8 @@ dataset.exportBtn.itemLabel.json=JSON dataset.exportBtn.itemLabel.oai_ore=OAI_ORE dataset.exportBtn.itemLabel.dataciteOpenAIRE=OpenAIRE dataset.exportBtn.itemLabel.html=DDI HTML Codebook +license.none.chosen=No license or custom terms chosen +license.none.chosen.description=No custom terms have been entered for this dataset license.custom=Custom Dataset Terms license.custom.description=Custom terms specific to this dataset metrics.title=Metrics diff --git a/src/main/webapp/dataset-license-terms.xhtml b/src/main/webapp/dataset-license-terms.xhtml index c54d94442ea..88bd75947cb 100644 --- a/src/main/webapp/dataset-license-terms.xhtml +++ b/src/main/webapp/dataset-license-terms.xhtml @@ -46,8 +46,8 @@

- - + +

diff --git a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java index 92b06e5936f..c062f63e264 100644 --- a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java @@ -51,6 +51,7 @@ public void setUp() { indexService.settingsService = Mockito.mock(SettingsServiceBean.class); indexService.dataverseService = Mockito.mock(DataverseServiceBean.class); indexService.datasetFieldService = Mockito.mock(DatasetFieldServiceBean.class); + indexService.datasetVersionService = Mockito.mock(DatasetVersionServiceBean.class); BrandingUtil.injectServices(indexService.dataverseService, indexService.settingsService); Mockito.when(indexService.dataverseService.findRootDataverse()).thenReturn(dataverse); From 7cb45960dfcad5b9e6f41c052769a174f2e8704a Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 5 Jun 2024 15:19:41 -0400 Subject: [PATCH 27/63] #8796 code cleanup --- .../edu/harvard/iq/dataverse/search/IndexServiceBean.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 02f04d38bd1..129e3f5af16 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -54,9 +54,7 @@ import jakarta.inject.Named; import jakarta.json.JsonObject; import jakarta.persistence.EntityManager; -import jakarta.persistence.NoResultException; import jakarta.persistence.PersistenceContext; -import jakarta.persistence.Query; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; @@ -476,13 +474,11 @@ public void indexDvObject(DvObject objectIn) throws SolrServerException, IOExce public void indexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) throws SolrServerException, IOException { doIndexDataset(dataset, doNormalSolrDocCleanUp); - System.out.print("indexed: " + dataset.getId()); updateLastIndexedTime(dataset.getId()); } private void doIndexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp) throws SolrServerException, IOException { logger.fine("indexing dataset " + dataset.getId()); - System.out.print("indexing dataset " + dataset.getId()); /** * @todo should we use solrDocIdentifierDataset or * IndexableObject.IndexableTypes.DATASET.getName() + "_" ? From 88877a44f5ac0e52168c0b86841fae5bd9d09ca6 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 5 Jun 2024 16:25:03 -0400 Subject: [PATCH 28/63] move like method to FileUtil static method and added tests --- .../harvard/iq/dataverse/GuestbookResponsesPage.java | 3 ++- .../edu/harvard/iq/dataverse/ManageGuestbooksPage.java | 3 ++- .../java/edu/harvard/iq/dataverse/util/FileUtil.java | 10 +++++++++- .../edu/harvard/iq/dataverse/util/FileUtilTest.java | 7 +++++++ 4 files changed, 20 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponsesPage.java b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponsesPage.java index 93ba8028fa8..4276eb02882 100644 --- a/src/main/java/edu/harvard/iq/dataverse/GuestbookResponsesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/GuestbookResponsesPage.java @@ -8,6 +8,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseCommand; import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import java.util.List; import java.util.logging.Logger; @@ -103,7 +104,7 @@ private String getFileName(){ // without it, the filename was chopped off (by the browser??), and the user // was getting the file name "Foo", instead of "Foo and Bar in Social Sciences.csv". -- L.A. // Also removing some chars that have been reported to cause issues with certain browsers - return dataverse.getName().replace(' ', '_').replaceAll("[\\\\/:*?\"<>|,;]", "") + "_" + guestbook.getId() + "_GuestbookResponses.csv"; + return FileUtil.sanitizeFileName(dataverse.getName() + "_" + guestbook.getId() + "_GuestbookResponses.csv"); } public void streamResponsesByDataverseAndGuestbook(){ diff --git a/src/main/java/edu/harvard/iq/dataverse/ManageGuestbooksPage.java b/src/main/java/edu/harvard/iq/dataverse/ManageGuestbooksPage.java index 94c36a40794..d1cc515fd01 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ManageGuestbooksPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/ManageGuestbooksPage.java @@ -5,6 +5,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseGuestbookRootCommand; import edu.harvard.iq.dataverse.util.BundleUtil; +import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.JsfHelper; import static edu.harvard.iq.dataverse.util.JsfHelper.JH; import java.util.LinkedList; @@ -221,7 +222,7 @@ private String getFileName(){ // without it, the filename was chopped off (by the browser??), and the user // was getting the file name "Foo", instead of "Foo and Bar in Social Sciences.csv". -- L.A. // Also removing some chars that have been reported to cause issues with certain browsers - return dataverse.getName().replace(' ', '_').replaceAll("[\\\\/:*?\"<>|,;]", "") + "_GuestbookResponses.csv"; + return FileUtil.sanitizeFileName(dataverse.getName() + "_GuestbookResponses.csv"); } public void deleteGuestbook() { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 6c427672e6d..a0c32d5c8ce 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -1816,5 +1816,13 @@ public static String getStorageDriver(DataFile dataFile) { String storageIdentifier = dataFile.getStorageIdentifier(); return storageIdentifier.substring(0, storageIdentifier.indexOf(DataAccess.SEPARATOR)); } - + + /** + * Replace spaces with "_" and remove invalid chars + * @param fileNameIn - Name before sanitization NOTE: not full path since this method removes '/' and '\' + * @return filename without spaces or invalid chars + */ + public static String sanitizeFileName(String fileNameIn) { + return fileNameIn == null ? null : fileNameIn.replace(' ', '_').replaceAll("[\\\\/:*?\"<>|,;]", ""); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java index ce8698c95eb..46359d7b02c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/FileUtilTest.java @@ -434,4 +434,11 @@ public void testDetermineFileTypeROCrate() { assertEquals("Code", FileUtil.getIndexableFacetFileType(dockerDataFile)); } + @Test + public void testSanitizeFileName() { + assertEquals(null, FileUtil.sanitizeFileName(null)); + assertEquals("with_space", FileUtil.sanitizeFileName("with space")); + assertEquals("withcomma", FileUtil.sanitizeFileName("with,comma")); + assertEquals("with.txt", FileUtil.sanitizeFileName("with,\\?:;,.txt")); + } } From 3d9592cdc20ae20c339fdd59f975cdfcfe09081c Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 5 Jun 2024 16:44:01 -0400 Subject: [PATCH 29/63] #8796 add release note --- doc/release-notes/8796-fix-license-display-indexing.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/8796-fix-license-display-indexing.md diff --git a/doc/release-notes/8796-fix-license-display-indexing.md b/doc/release-notes/8796-fix-license-display-indexing.md new file mode 100644 index 00000000000..ebded088875 --- /dev/null +++ b/doc/release-notes/8796-fix-license-display-indexing.md @@ -0,0 +1 @@ +When datasets have neither a license nor custom terms of use the display will indicate this. Also, these datasets will no longer be indexed as having custom terms. From 584dad757bbb5b9e438466b50e2b6fefdfac8a2e Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Wed, 5 Jun 2024 16:58:45 -0400 Subject: [PATCH 30/63] #8796 fix popups --- src/main/webapp/datasetLicenseInfoFragment.xhtml | 2 +- src/main/webapp/guestbook-terms-popup-fragment.xhtml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/webapp/datasetLicenseInfoFragment.xhtml b/src/main/webapp/datasetLicenseInfoFragment.xhtml index 257f6b3b12f..a1bd604aa6a 100644 --- a/src/main/webapp/datasetLicenseInfoFragment.xhtml +++ b/src/main/webapp/datasetLicenseInfoFragment.xhtml @@ -22,7 +22,7 @@ xmlns:jsf="http://xmlns.jcp.org/jsf"> target="_blank">#{DatasetUtil:getLicenseName(DatasetPage.workingVersion)} -

diff --git a/src/main/webapp/guestbook-terms-popup-fragment.xhtml b/src/main/webapp/guestbook-terms-popup-fragment.xhtml index 5948047d845..d53c4bf4709 100644 --- a/src/main/webapp/guestbook-terms-popup-fragment.xhtml +++ b/src/main/webapp/guestbook-terms-popup-fragment.xhtml @@ -48,7 +48,7 @@ - From f7b95656df437ff3086b13a88e5434092f1ee737 Mon Sep 17 00:00:00 2001 From: Thomas van Erven Date: Thu, 6 Jun 2024 15:59:24 +0200 Subject: [PATCH 31/63] Docs update. --- doc/sphinx-guides/source/installation/config.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 8fb9460892b..6b2ef571101 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1292,8 +1292,8 @@ Reported Working S3-Compatible Storage Note that for direct uploads and downloads, Dataverse redirects to the proxy-url but presigns the urls based on the ``dataverse.files..custom-endpoint-url``. Also, note that if you choose to enable ``dataverse.files..download-redirect`` the S3 URLs expire after 60 minutes by default. You can change that minute value to reflect a timeout value that’s more appropriate by using ``dataverse.files..url-expiration-minutes``. `Surf Object Store v2019-10-30 `_ - Set ``dataverse.files..payload-signing=true`` and ``dataverse.files..chunked-encoding=false`` to use Surf Object - Store. + Set ``dataverse.files..payload-signing=true``, ``dataverse.files..chunked-encoding=false`` and ``dataverse.files..path-style-request=true`` to use Surf Object + Store. You will need the Swift client (documented at ) to create the access key and secret key for the S3 interface. Note that the ``dataverse.files..proxy-url`` setting can be used in installations where the object store is proxied, but it should be considered an advanced option that will require significant expertise to properly configure. For direct uploads and downloads, Dataverse redirects to the proxy-url but presigns the urls based on the ``dataverse.files..custom-endpoint-url``. From 7b9319eea4f26ad7d2b6986cfdc1976b7f433e9a Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 12 Jun 2024 11:49:38 -0400 Subject: [PATCH 32/63] ignore shapefiles if they are under a hidden directory in the zip file --- .../iq/dataverse/util/ShapefileHandler.java | 42 +++++++++++------- .../util/shapefile/ShapefileHandlerTest.java | 8 +++- src/test/resources/hiddenShapefiles.zip | Bin 0 -> 53764 bytes 3 files changed, 33 insertions(+), 17 deletions(-) create mode 100644 src/test/resources/hiddenShapefiles.zip diff --git a/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java b/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java index 9786fda4217..0c77e33712b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java @@ -15,6 +15,7 @@ import java.util.*; import java.nio.file.Files; +import java.nio.file.Paths; import static java.nio.file.StandardCopyOption.REPLACE_EXISTING; import java.util.logging.Level; import java.util.logging.Logger; @@ -695,33 +696,42 @@ private boolean examineZipfile(FileInputStream zip_file_stream){ this.filesListInDir.clear(); this.filesizeHash.clear(); this.fileGroups.clear(); - - try{ + + try{ ZipInputStream zipStream = new ZipInputStream(zip_file_stream); ZipEntry entry; - + List hiddenDirectories = new ArrayList<>(); while((entry = zipStream.getNextEntry())!=null){ + String zentryFileName = entry.getName(); + boolean isDirectory = entry.isDirectory(); + + Boolean skip = isDirectory || this.isFileToSkip(zentryFileName); + + // check if path is hidden + if (isDirectory && Files.isHidden(Paths.get(zentryFileName))) { + hiddenDirectories.add(zentryFileName); + logger.info("Ignoring files under hidden directory: " + zentryFileName); + } else { + // check if the path was already found to be hidden + for (String hidden : hiddenDirectories) { + if (zentryFileName.startsWith(hidden)) { + skip = true; + break; + } + } + } - String zentryFileName = entry.getName(); - //msg("zip entry: " + entry.getName()); - // Skip files or folders starting with __ - if (this.isFileToSkip(zentryFileName)){ - continue; - } - - if (entry.isDirectory()) { - //String dirpath = outputFolder + "/" + zentryFileName; - //createDirectory(dirpath); - continue; + if (skip) { + continue; } - + String unzipFileName = this.getFileBasename(zentryFileName); if (unzipFileName==null){ logger.warning("Zip Entry Basename is an empty string: " + zentryFileName); continue; } String unzipFolderName = this.getFolderName(zentryFileName); - + String unzipFilePath = unzipFileName; if (unzipFolderName != null) { unzipFilePath = unzipFolderName + "/" + unzipFileName; diff --git a/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java b/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java index f0e538616b2..3c5b4797b0a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/shapefile/ShapefileHandlerTest.java @@ -282,8 +282,14 @@ public void testZippedShapefileWithExtraFiles() throws IOException{ msg("Passed!"); } - + @Test + public void testHiddenFiles() { + // test with shapefiles in hidden directory + ShapefileHandler shp_handler = new ShapefileHandler("src/test/resources/hiddenShapefiles.zip"); + shp_handler.DEBUG= true; + assertFalse(shp_handler.containsShapefile()); + } diff --git a/src/test/resources/hiddenShapefiles.zip b/src/test/resources/hiddenShapefiles.zip new file mode 100644 index 0000000000000000000000000000000000000000..64ed1721eb20f9c5bcaff7e205ec866a967b90d2 GIT binary patch literal 53764 zcmcG#cUTkQn=Y&(qNt!Kpi~tF6%py3sEDX2ReFi^PAEcvkVHj6sUiqcBO)TbcSuBf z2Z4khAV4SyAp{7d9)G)gojtqfoZY?7x8Gdz%r$?^d*AmwbI&{PyfcrHKF3k+L;qNe zpQ4}t)6TzajvW#?6yWFO7AR}>{KO#+PoXNudzBupK_Mp(9X?7ueCW{8^M8*t`k(FR z&H63>*Yv;A!+cNZrwKahv-%YVkgReQMN|Hl6nE92j35&b`6$={K`BXdVZM&Zss z!|wfO5?i0so_A*6{CMiG&VR(s{|xrFw?p7d2PY?&z(5!0e@ep>2G&vge^0}|N;%E_ zuPOQWDZTuk@p*eV{ui_T@1Yg{E6^dT%0OjB8Svlm@$hv6E6D#B*-EVb?X^|^e`Hg< zedqSSa0)bexC;G^|EmYK{vD^z|0hl#2fvpfkoUisIq5HP(SJl9WAkrM_{o0{d7!(4 zpUb~!_>A;$P5m4HhXnlN#yWoJh!D-_;@#Zz(?<>+ddqR>kja0mhCmPJz}tcDA^+s+ zY8g==;D2|ukEzyEQ-||f3DE*aY8-Pp4@+OtEIG0*^_sipN$VSZhv&In)b?edzE`)5j_8;K>u9b`;)IIa{h1YpT-_>xu*N{dKn>Mv#+h)qWxBA zLDiLx{c}ba9T>;_d=%!6#+UKA9R?Sz&tkuCv|H?!S)0uENc~w}JA;yXoOxw4F#&3z z<-&OHz79B&pR`ErRI~^(X;uo{UN}XO5jUczJYuiczO)DJsK2g~w9gsXXYzPG{F6Ne!j=MbKCcX+|0 zhS{l%NiXKs(Wul@1k>m~@l{n_ojomsP1ENzW>07Ipccsa4+cAWXKiCYH!Aj)u(XFI z04qyiQlHvT=ls<~yxr5=TI0!TgiXvTnwgtCdNyjwPd#;XdrO1|p(+*CLe^?+`ZaP5 z8Za^K7o45yJl81oO7GMQMs6!hs_P9_s3*i2v|qr@<0|^OZSZiLJE}z*S8|ct9(n2dQrLPU)qTetu=KH~TtSB6|M#sc067LH}#c*3I$3mIb%h?1I{dqz)R3@pzTq6lFKGb)65_8O8Qfhtd z3{%?o>2#ma$lmSLpm+~%AmX+DkP(T1>xJ&h|Y?LkuSeF{Zc~(i8AfnQ*Rt z{LCaznDCB@=7LdY5obL00*jkaO}tBwIAyVR9LXm#uAK?B8Q3R->=Y`uQr@%(PnXZd z>7{3?A$&Dl(xvNGLS*o_i;wYSOf?jAg=HN?sxZ=W^*~>=5c^!r z>B$Cfw|xyXKA84y4=?@*BAD4b6Y3A);le!6WA51X1A2EVu)Z{~GL$sLAm_@g%F-8Z zOUs8&(@?WXUL?qTNNgNnS(OHU1tgtixG7Gf;TM8czVPgE?M+hm^iG#$4Qw*1~CFo0*Ll~7g3)a+~;T9t(0EeD^4pH_v5YT$e{5xekH z@jnur8D*ytns-nZJ*q&SyD+Etd)gWC5y;u9-SRrJ#TG8%?PhI+c%2KJ1_>Ub3y2`5 zvsg73u;OmP<~t7TKctKTltMb-bk-?)&CvRItHETK-OCFB6Rti$HT6+UXuTk%+EEOA zG`s<*JR>FEn_sGfDF`r9D-baobJ1KDWp+M1C5|O?MXN3>Vda! z=J`Z?4w`rRJSivK7n;LO3$4t`_wSgW9w>u9{~Y9_5>B``3~|Kc@I|fD1Flpz3pY_O z0t_7d%+D*St>juaS+j}6G!}$Gf}NV<;?h8p0frc{v2v)j+s=xs#W?8$@HwggX2vX~ zx$I7_W6b70f7152<)O}}_0Dix*7plgzY;W_Rhh6+KIncM*O^fL02nD(3doIHE1Q*l zGUd|Alf8M|9z>NL0gMkFVe&K0nHb6f}}4m?{5d0`(v60Js{9@^x*$DFq8x-iWl+;?yWAbtwThs@rvQLU}wg|PSY zhV{Mes(wgB=_EE)1s$Uh7sN=JXPra9De^$eSX7X7|NT;l);WgP8dkjN=f>lJ3(V2_ z(amZH-S8p^)R;Ax(e1)+w*AeiJ6Rhbj;2?dnq<>o<{{H6m=+&1;w|!aG!$C^?8?Iz zedMsi6d80#A=ylB$YKhO%7oZe-+8DV^o%8Ju|nU}LQ77g;z4dJ+pegDW*#}z6`UZc z!>#cc3X7!sfKf8f6ZZndowRysLcEW%v|BD8 zvT)_Vjxps|$?}Y3plCzX=PgrmAD1wE zzo7g*#9?Mre!rnnnQ@xNg%jtQmB~H{S7xz)b?2l|6g?tp1g`&Vm1VRKd;{x9eKT9jwd@xb z{6=&&w+g+;i2qR*0OJ$f4iCdVA$evzqc%QwF6ptmtrm6d?^DwIH&Z*?nVR~Z-h#TQ zAy`s!8v3zEcM}>Wd#hgMN%&WCM5mw-+&%moIi^!Zi0vMJK3U`A0l2!myHnhORb;*# zOIYmf$A>%Njh4)E$5^wUdYOWr+H)N6WpDFzr-5bUxd!o+)Fy>k%`nB&5dQ3kbDEZx zJdAUp0d>d2Y$+kDF<{R~1eXhlqp~EqY!#;Sb_KCtd+5f1+U{AgnF2s1LM0fWJqWh8F!?ix|xut2*-_l}K&eBbd#FJ>TXizmWcUQcG z*Ms?JL*E+yhn9>kt1kT-#8skyeoUsi^l}hyNsRmKgN>Wk;E~2yv`=+u?g3&?r@|s` zzXfr%#N79r=phg$1p^Hlol>*dPjlfXDG?U7M-YOJU9-&jj;wBI*9NziJ0_VNYk`v9 z`|Sfg+YEavzMb3CoyV`Lj4gpMh^(sxO`EFh z6{aK39!QotSPLNplG=_pSr+@lcbkcZdj%6c@D!Ea>eHRd9f;jd;5+d|@%I^4TE9B? z5S9BI$NCayUu>j=zoCil>aW+08RR!d-3~B0Ez;_=|0puLPJjtgD;YGY>Uw+Wxs(B~W(+rxCGrvNdRHBikjIosQlTpVXqAK?`R0 zHyF5_8=r4pqa~dWw1x)-+r8U_vh@e0I5(v5L!ZWgS@sIU)7ee#8XNLcVhu@_izcg^ zqNoMz=3zR9^`vgviagUyY`7%<+8j#?-TGY_LFdEHJZoD(kgaJBw3Nd}+fxoqKaf3jEnL z4Pf2Pyu8oL(AD{Xdc$3e33XA~PGUZJ8k|ltfa<)Qg>6lBY}-Eev8BuoDGgQ!rJHb3 z(XYmbev1E5uS2)i7?cLREUEGmwcps`3<@uxY-qfC13t1He;0ev-A?0kmJL_ShaYbu zR4T9wrf79b3NwY{dLka{JTI72{I4+m$|R{9*;*|o333l zC$!IyTKl>cjyk~}qCBy!mh7oW7XMHl+jcO2+DnY7N9ui+74=ZnK1Kg;)kMOS|9Pr?4Slpgtn_3 zEFFLIPcSFtZBmguT3?yu^8#MbaWx+!-{c;e;`#zChF2FelWNoVbJW_&yTY73E`o6q z&jgQjxhwg<7tcYBs0H&f&N1VaGwk@8rp)I|nPJcf`wb{V3X%=f zy;s*i!xHd&F07peDuxAf2KWzs*WQeEL4HN)(Q^RuK%$d7>If^72l@rYM>~T($&=7J z7piB<4#slmNf)!ULN#{MBfg3s)~e?rlpT%IRzIKH z2Wtxdn4Q)-{AWscGxi$(Yo+O)|Mab>GBb4T+(sHQY;H$$J#?H>9K=_WyUK`rf;u@G zbX=x%&k_l$j2B{esXL36pw5rAe7|jOq>|KVGO>GN)s(0$NAlkbEF3?F{#~ST*tU~? zzsN^~izk|#vFS_W%313R>*Ot*Vy@`!2TD-5nh*~MPY-i}x=zMoj}&ja6P74aYj#&c z*a}nDGFQOVkBrCl(BL0FGqi_OEJ1=TZ?G&~1UeaXZhujBf~%o!wdEm6%i3*N{Vn=N zb$m_`Pf3J()`N}f)@~!}v1pI#c${*>aRU2AFx7FZZ2z=WHa8%y0bdwC+cJ0XZeVk9 zbNgmxU>&Q?@w3o75@R#F13Irn(ZiMY?d(#ynzUq6kOQlLT*C!=RV!}>{7VoAPL!gt zVRiLbG%BE>i{A#O_Idos=Ic4$Kr0eQFz?J@_yUIjwV4y%nl}GpJ}z`p5*O~e zP5ZsGD7m!3Qx_K9Nfl!2!@gdEUpn9%0DOlK=OUO1Ly?H_EIqer3{G+yt=2EKmafIQx&q8AQ4k_m8`PS9GuDLj-8~DC3c`c;3Oy0XL61Ys!>0;t%aJ zx~`g)w<}kI@PNt`zYcFZNRfq{C4j1SYb$6bT8k0;!G6`d!jyB9Ob_#Qm6 zS7&%yjBk2|7m)Ki^Mz2J;YH-K&MqNjGq+>ouV77yTj<4>68 zTg<+l7JpsIAKfhn=TQ9Y=A+2<=4Un=8=K*&L89IjThB+^ZoB};9~gefRUEwezD>~} zN!0+S8ONbyFs5pPdm5*+%gkMAP~t5O4H(Gv%C@6yrRQVzZ#VC1uaw1Voro6eBV=tF z_L)FTZ!ojkTKdYL*#Ua@KdCXNmjh(8Auz@9UUO(!=GWe{2B9cWar=pN2>38%R&x05wA%nDoG ze2Z6rETFC5gin>KK$?^H0*Q=xWm`mG@@5{Kn?nD*CN5L1X>w=}*a2II)!t-kC$5Zu z_i9>VEi=aV13nv!N=uUEq;Tv@MMWkF~7z1f#!Lcm05k;f%R|O zo4VMJ^UWEFVQwMrkg3$V{X)JCFsoC$DBN0#?Q(COR8r>FAYA6+6Qb6&G;>E5|5LB+ z>GV(8AOVy(mARiLIqWr$&7x;0kS+&<4l1uXQ7h~5uhHFW4~rza)n8_D1_2As@sA~b zAN(G4y~LD%tRf7p48^7;#!-(4%gvdk?ut4bOy%wHsY_@!5SV^I|j&lv1xINK2ciUY5gic*f|D<*^TB$KsYrW|aRHXoK zIFz3aw1ZEZJgKP`bAj+NqpDKSTLCrb2k~(f#~mSyuSC8o)02hHw1>EYwn$8i3#?u! zoE>ks9bfi3v()Z1Lj3bEj2o{#cPL)+OP1$r?S~rLllj``S7TO_)-%>~*NfJx)|;}q zv}D7wcbeBi$0&t(s|~a@2HjYP?x^uC-JN_xD6pa0W$OTt+tr~B zk%1;rlIPAZM7M?rs(BgbQyg-Hj8N$b=6nq`gq3%SpzopA1~feEpHR%xG27se7=Ky@9R8a6krg4G1ALs_ z&E7w~A0+uo{}kXjU{1hq?4Y=Ru9Nii%_gdc!G58|x=hHYo|sUJ4!Oq2+U?^10+dof|^BIR(iFp-56tID(=EL*aMBSe9iiL)wtP8*2zu+lwSiceCSl zYq$bW>3lK55qoXQ`s~4Xtd+&`ED}}!Wclgr)`{)?8Bn*-kVkVq`Dgb=3CR>Y=<64OI^S#k>|4%75cLP_Mtd{6HP%D0MNN!$VM8h=vX@{Z26wgXpo zBqyn!BZ#|(X*K?|0IfJg27a-J1kPRZ>C;Pa zfBcK=-xTNx(CxPw&6VbGu&eh1H=V@_x4dd}D|>R3&5RU2|XADJ-9_GZ3M8 z@kMYntp}RmOiS_RREg;_-7I{a#3>yDR~JA{%P-B&zu8n?8OZWdUFTy|2)_~`97?sA zdFJ0B;wx33y$Dld>Fr-$PDlp!E~B2nidTj_VI{N^yAtbeW4z0Z6n~@!_kl%w|~^9EqX?8)bCLSc($}UsJDKVR{!FS#KT1#<}&=J##HRGLdu03>(?%rINte5ud6=8SnH`^7xn3a9o1;}S!bCz5P2Mhy0 zIfL;bWR2+7i>if>pr7$?-(#o=;ESaU(fZ8XRlKIfO<;F<{P*3av6>5ouP%4Pq<)5Y z|0rF&5}k2fEGOjM;>Bq2b+Ksrv<3k{suReGtw z;zoy^$d?KIY=3fp%^m-A4)U2isyC32)ee|IP$f$ia7pKgP3xd-d^nh86&z3Xr1KQ{ z`KlDY$0lSiiujp;CSe#GUoCE6X?Wu{QBB+aAZ&s1nPg$FA1b@tk*s2Bf3;y`$-X#Z z)f!h)c!i5sqRUViVS`x*9FE^0Jq(We%4@+f+4MRHH&g@pVx1wn>9$?Y>%Cd#91dtk zSe;xDop1oBj6kvrOi8OrZ+5|#78!hlgM_#ln%= zYt-TE@x}g5w@Z`jO>rh21LrMF|JwIPM;?Ejxvh;qNYs#MvUhozyZP|ytyvAhfJ2I; zu&>K@HCcTSNh?_Ez(m%P=yvU!hos>lxTAUQbyj< z`R6*sJxOfFvtF*L{S0HVtO%_@yN_B!i`f!xcjnp#;tkUrf@l0t+gtX=z{U7_+%5Eo zO!$zRm|jS>2=7(R#O+7D3LeO)l?Zp_`;{bj2TfChCG7{M7 z_~$IVidKxO*E$egFkjKffJ80Uk$4`zEOMk=TFaBy!k?Mp;Q)NXqOfFT=`jg<*NfCo z_UFn24cWZRnf;qSSu|xIi&xY*n0)xe98soj|859IM7F58WQpi$U$(+r*-NYB9fuZf zIt7Uf)(A!aooi@o3(=l=mBl-~tqn%3NtLsLbmP4=2ls{2wGE^4_qr}Y*l)XdKKW|` z)(-k@-wpY(`cUq-h&_x<6lN}u?6N<`jbPhfE^!(FrLr}S@-KD0y|geOon z_MA35T)GWyn#0=Z&1_vp9UyjlNo^W|jU=Qv!e=7HFD0nVIb2*O)68N)1p>cJ-Jzuli;o0Lk%~?O=4;8wEp83Bzb^q(fbnd0=HGl+p&F=QpdmzB z26Z4CE4x-Hms_=TX+=7%@il%(XdrC5hi)pWcX~L=V&U4HxDi0}g(e9VC^`gee2F5V z{K~Qlnm{NIH8=6*U)C4eXI0;27JNFnYM5ugaOY$9W+Wzc3UE-%diVP#>Qvu0@xIX+;2K}><$%*6 z7nwAl2^`HU6u8ywV7W_R>P^*wJ%ieA_U}crm;&QJQAC#|V91=;(A0ZUVNx3QtCU)} z!@zqQpR~uVyowYgU?y6dxm)F4mM~khHMv120%u#iWLugf3U)MtU5I@bQAXv{*R(fp z)sbYI_f5S`52ngFLTg#q@*T`KfAT2es#w!*8ign@+;F`pi0)hZ=Y8=qb}(nK=$H8leKd+pzAmc z`f@NTCIO@MYV+(%-W*8mjM!4na`Mna18oPxdF4raFh6rQJ;N|OxMod|8C8Ndf-7KM zULL{4J*=>Q*o(VuUm{IJY$^j9nPb>yM}Pq#FLWoD7-CY!Udgx%UN&aY?F>e|cGU9vpj zCu#4$m0q+v)~7ao4)}rjc&?gUceII||5~O=K(mb2GQm$e?yrOyPMtlsT0}n0$-29F zoc!Zf-&h=SIo8wUAZXg@*Xg$wZ|>=4EgV~1`4QFOci8^g&Y3KZJ%o3g{deezjjfh0 z$aN}GZ>X1lss8A)(AyZJ()$gkmerF_8kat3IRAJ2Ya8pQ)Vq8glS?bLpn3OtF8ZU0 z3bC`>m$?s42Pk)FoHV#o=nN@R^NSyGc2p)=4~`uWf6*3gj*(}&g~^Vn zNFl0s<=|}0^7I#PC`DjV>WwiPvpf+R$K)3_BHXRu><|mctn&NQYbBjul35q9s5WS; z7SwZX#P-~P?uae_fXib^OW(nR_Co)lshFs8#0g+IbsU~ZC2nS0oh0AFg)~WQFn}gE zEtKN$ZF7dMKA;nZ-x~_Ge(^Dc-7#=JwJ-WjF!8qgLL;&r+~@U;itkk%@3?us&!||Z z;>>b$^0_{L*F}pl(odjClMuT{_19-KRxdGk)^G%ypsJ7+uKtN^KOvP%|0(WPJj=|z zj+!2H=3NM-9&@Y3X(e+hXZvLh1ojrVuxEh=-0 zS;MOq!|zmoSKezYIU*5g@#Y`|qB?yR@sNUx2kQO09cWT&D(g{qZ~H8(%xDWmtG4R_7c+{O)liW+O=20tPuuPk{C zX0Pnr0OP`rP2`ELBv3Ck9jvg$$~E}wENw$>sYThVA?udjrPLyn;tQ2Gc=z-(o)yXeWin*uIOU611k5Bsh4XcyV`M6-?~ts7UT_Tj&UVFI$*eP=p)yFBE%j zsK)bj(0bJ>wi+}N9@7+G&MGdie@4T+CS**HB8du_sC1a=JHG$q|HJZe4+LPF zS+HS^1d&@M`r#6H9+GPGp7D0KX0)w>^%p;vFJMDh0*ijhXYWmaZxDHaR+*+;T(+;0`UpBCG1qi=1QS5R zu{7WBZN?P<*BsXS!WxQq(k}Lqwo`ce_g~!?`?#s|sjQ;xFf)Kzx8?0b5ga&(I}XYu zYFq_B4UAPR9j@2GeX^bT<{#IoB_#NGCOM|49Z`TXh5uyzY5Mh2WzVCtF8r~p`26P5 z!FQhJ&FcX(i}FJXpezehR54NCfO$njWfYwY?`rO<`=af2S&szMg|r&2m)+yp02n~i zP<{GYyMFlAGRSF5iG^X)=-Ow4VeRAda}frF{BM^t)lMXcdTb1By3RSXwnA_#(%O0l zm~JwtRyH)LDZTAOWP(yd zo-W^5xH)+Ipou_FCfhDVhZv+M2<(X_9-z5v4N2k9Pa?Bho;$*_71*2UZ|&Z17-h$< zwD{^<^@_W|SDBFnyC}ek4N5br$1mSi66joS&O1m8nLW)W`dz@;zO@^-ZM0=oO!(JO z)Bu-)CBs1q!Jx*;s2-aeWt}0+3N*}u@Es5$NF2}CEZmh2-h{+p!-%aszzD(*wVmAy zjfR7ocN7E*Nz&A*7MyAG*ova6lk>w=PP^p(3@W?dMr}t3+Fl=_!EKu7UK=(HbpDgkA*<*93kisCQI}JEZn0&NzQN~4UmF7-} zRbSZoAar{FQ^H5PpuLS= zotuUQnm@KPdn0ZdrsIr1yYY)Dca3r<6lsFuxdgH;@%Ta$I_Z4XDT5aYsVhu@dYMVM zG_qhne=)aL{{|?pIc3|vH|7TDM|1kNZ*SiXP`+A!I<8o4*q%3FBEq1X#oa{NR9dWm zaja5UZVoM{wsnugCclKhQ`gKz%`&zh$4ZGjv|Xve^8io`BTCh* ze4ya7#ldG~9|l}Zcf^%1-i1p~gKlBmP-4&sk(oA@4W8<0(H=880S9`6YIEmL9+eH2 z?%w6T$tQLF_SgE?i`>x~*Kg<4zgrZFhPZEjrOE9Eu5*nW7K)o`tiyXmRa?#4QLL37 zO~B0Jf{r~NBJS=#YPsact!fpXv|Q`Mp=S!U$3?q+Hz}5jdp&xg_LWxe#Ty+O{Jdt( z&&gvVb2meZ7i*#;Je!Nj)t%Eq30}>m0GP(b$>lMNC<&i|i}}{` z50(i56MdTxvV8XYqtBvC*A5bm(`v^^8^}v_ppVdZn;HR(;_bTMq?_y_hF?5D0A}0; zvAdBe>;4)P3nO-=D?BZK)yh4XACg1$_!NpB!2Sy7RfYs>9BB4D+KSwAb0)n+o+F7c z{QjKR;8S`KKXrlHz|`2Oejg%3^pfu(5LJ7VOuYv;V1`&I+G0l$rnG+W8^-j$4b&RJadi8+Ma@S311*goYsVp1v1`=%(HX zxYF{V->0wQ%X4|fIKL|atEYehFq4Bm5!4O0Lp!mrRyf_~Uajz72W5FzZqo;Vz7o8b zGM)A*z>m3mE$*OSep7%Zj$L@ZCUQx#7h8_~qE;zG`?QO0|4e%}B-~&qACl4V9zto# zU?r4Q`sy>46EJfyX_gdi8LSg8_=i+o5|VO&Xk1DQ>$+giR71&-#9hKd((n--^r66U zP0N9#an11;LF0~C{Uy1jUD}3}bEiOwoaQ_LJ?9^(-(Q7&lo8dk1s;aItRUYL9s1sa zo51U;jQ6bGt{>ZxdlfKlVQR4+%g*F(_hZsryuB`WChaS{?==NSb+m$0rMp=iwIm*1 z*>1A+4xi@BGt0C6s6lC+gVfc>xW{oSN@C+7W;m-ju#y-$#ESWzc5zo{J%22Lv%~{1 z_nIJi2G!VGO9Di)j&Yq*g6s$f479G|d4z$63|~JrTmqu1;iA8vYw4c`{cJgeEM>HK zjCIM@hbry8_Nn6YyBWS&@%N%}hKnjqI0X9%KsK`{Onr}M%Cl?sRH=^z%_CH5t?8R@ zpjBCJ()vq!eGxwwVJ}=j7TsDj||G4K~UW?3<0++@}5mi9-@ea?j z5LNnLd!Plg$}0MX46MM_Z9v#MJPeVgXY~%c1;eYnW{7`e#!Hs9l}dF5WJ|8Ou)-86 z3kG)p#*y1w>!oQ8hWNU7(sO1N2}#Y-4bK{)j?_tk&91`>7(ccVz2|P0eQl1|=Igb& zUY64wwSA>G<9Zoixb0{`n3@A zy?#+TI4s$lWm~Vv1GVBriZHh8eoV#`@gYE5LQi5iRV8t6@n-kJ2LZiZRvKyH^wu2< zyx#Cs5guvg8_*bOK0jb%3@>DvGb?BtelwN*mO+{&do;xMns)nO z_W^LqUU_mbwEvSr;5D~nrfes?Wq{lKQfy{+&Zt6g*fGPQ)#c5iBsw90I@<#cX`pW} z!PG#V_qqX7lh@P$g?Rm4M*c>~p+Nswhczc2H%8ShsR9l34jvLXu`sjnt#Tu4|pp(Et7 z{RAN|NZzR)KrE(l47%YFzQzXG;Du~-&vuaCNqcZSsHnbT;t*;%6|HOJtVPa_HCGEMKiO&@`ovftLy0`i*(<&65d^#74<=jrIdyQGO4GMwt zk7X-u|I7h<5)#{9#7C%|k<$BJb$-aYwr<2AYrGyI{c!xx57^a0>(4b&@51=T{&Y&@ zlP=DtC}${VDI>IBYe#6m(~i>qpdF(fr=6gks6ATn&Y|R#r-!9io3%*3IaiHl@vei+ z881Gos_yiMXw5%$9cney&h&)25;8*|YcistO9_D(u(&-ISaz?%XlMLH3%ty6-1&z) zsBX}x=UKkDmO$HG`%PH}B762sLVkUy++0@+4wl!k*K|u+57+?zQiHI8c!Km3eRM1(PdQ#gff|->K8m61Eaa?!3$jp^HKmz)SBG?uG$AIPmd^ zaWxONQeIKd@?N!n6n-o7d;N87clkE(o1sj>_9rO7 z2N~+5OM#o$aiXsExLEL4a=ap*M`=!Yc>EG8-Y#{LJzWHa`?+d;ZI`%W++)Hgwn%t-|O0Twq6xv+ulS9n$ z`9}QalS|C?`A+=q<6_%dAA#TZd$V||3~UxDc(7m)7qtsm`n-)V0Prd+>*uFD(u&OUIh_fDgOsmUp}r zeN=rs{fdn#Wu%c1sq&$CsT9{9YorO~G{DAwk9lBx0d_O$0JfF}Zr`kxo@f5rq9CjXk>H#u z`})5keun6iX61k+HRp!dA;Gkn>w;%D&x#W3Xm4a>gh=2j*}5+SA;#(p&`DeK&H|3H zv+HMqV%58C&j*?rJUirh%+mB%ItTh{x$A4C)5W1Lo*ng)%uJ7t*dtOf0!8p9sQy;! zr7FzG-;c-irhqZ9&7X?J6M+h_-1q+TO)})^-vCDX!xpOjg6K$wq?6P;V2J29Tv<@m z-R(keNHe-TU4ha&7(-cUtoPmSDA2uE5^dUdL^CRcmwahamnZwJ!f~iy{jS`IUYP=- zBzx;epB}Rwv9c)A7k0t{y4A=s99yjxsrSUWp~0xKYzQ-o0Us?^tTQMKyfq5Z0dPv+~u$L zWUK4$4;iTT&{0i=t*5ODTtinitG1r%KpsGJArB#XkVlZmkSBltOZ_@$RQ40x8ueq; zCf@@lmO1f7c7_a!JnvF?>j{;v7 zkRQMNm~mNK%Vpb6N?ncFJQf61u&)zvFT>pMwC#L`VJ$?6&lOJ17 zBpRDICp=Ro@aPYkGk zNmh=#UwSgk=_;^lxO^$&pe*alBMQP2oqFR(cb$UrzR>8sw&Bgm*=zX{N~|u&Q~L_3 zEh%H)-@hLOai@tT88+Cq-e*%moLQ>uf%E+*IZ`R{g9LmC9koWvQ+K=ulwg z=pBeUqmUna=UqRaib(Tp`+f6x_>e!Gvg{msXXA^j=M75YeL=ohwg>BNRRczj`7%q* z%D!LBUAxk2Ec<@;po9WXT*WXiIx_yvR#hh9W&e*mwmihX9HZ=!6g`(&_GEFYWh!r# zqi#%l&CVBhw9uv?Sl(*2H!akFo^UL%lJVtpB1}2i>1ocHXA5-t1lYLn8r5PnCMoOn zV6(w_e`t==h{Dm6*i; ztN5qmbT-)MJl}3x9T1%)-m`;v#@nxvEMy| zt=@e>1;=ymaj&tJAGyENs^`RWYp+Y}-4U!xB5z)nJ$+*gfBlbK>m6O&wGlH8n-hR@ zzX^X5^&g3cLq}K0IQWZMt;@)vPi(%8!8Y$?CVEA0`TSJFSH{Pc>nE9vgV~7(tEZ$i z#H;)<$fJTZqwCk3F)c^-zJ8_3(0ngBU^#bS??5NQWL>^}Tj$W0nAcG08f><{aF^h4 z*|o*GSNgNtoy6WE?Bl%2TRWijU8cujo=n1({?Y>XN1qR8OhWDcY<*h=uy?Pa?P9Hp z5QhW*q$rH^30%I`b4p-)H__Sr8ej1p*EIXKY@1eBcicL$V;)6!wT{tvd`|nu8!JSI z22b3MvHR{;yH2i{K$vn+R7kqSSwa0fUNYzSTUuY=AKEr_1-_XG_ikP&vJpR<@$E}i z%KpNc-1qlMq8bucTj9F-GxDEMy6rzMxZ;3fH$0C5=nlUL8Ap}j589t5%El(~=aY!` zwxL1<47!&G}220}p<0NAqM3Sdh1hu~El^ueqM?AGo+&dcN_x zw|Wa<6z(P+xa@qRe~uel6LNM;;F;R885~tUEc?*-6fj##s;`~kJT8#GJb1ov<#Cg+ z)$tA2r*_*#nU-LFSFoK;`4mz4UAt&qR0onX*aGo#+xlB=aVG&uvs3?cCSxz{^Xmc$ zqo_*mSo`L^1?OvW^w=78I2aBFvu!Y~9(o{_uyZc<=e!zY*V^t-9D`owXv zq~s7(PW#IX%nS1^|EO?%sdw37AoW3W+~(z_`A247 zrYC)F^}b)y@@Z8-Qog)bRwb%ANm&|0yDFPlgmF?H4m8J{+Y0LY?JF4NDqwB0Rqi_& zxESvuC#@;{I1GFI-IHU`e)$D&-2~RMZ`Q@$9^YTZJ&xEsCi6UZy)uMqR?B9=ZmuI9 zPdw$R%1HGQk#mqAK_+b6_Km3Wc%ZEHh4G+0L(_HOVuW_r#*Op&S)#&+H(ZM@*qNWY z;#qveRmLaMn;&b`O?Ul3(C8j_X!;%euJ&j^W%L&)B3tDhXDI%CF|j**=G@z}p?W9& z2W{^e)l}Doi`r2@P^yX$0qN2e1cWGvNbe#wfE4LUl@em3OYc%b5s;2lX$ei~1nHp# zkP-+GAcUG^-@Na4zI)HPXN>djjAx9DnYEJawdQ>0oNMo_Qggh;|3ScpQQ<(oDZP^m zF^gz^ekXS7lazyte8+qRpp&b*m7XX-_YzBd89KG#-od~jd3s>=H1J53R_HR;Om)0^ z_?HmLypG*+)lSx?egMY7-EpV{X)vv$clv=j23V3x)yH&s${^*B`*?zm1Lp37_j zqk9HpmaM&4v((^RJxQa7Mb$2zAGrt|qt$tI-G7z!_l74|>zVV{9qFQB4!J2z-PS)Q zKSS#zagKofDNheyL*Y!R;(L<9XcyYFBIKnXhlBqn`%dB7lbD2^0;0tm^`otl82n*~SwY9cx$=@{!>? zK8gd=NWm{`B#qqZbThBX+8=tahv08nFU6zwB^4GW4;}QmGY4P2TUW-S>L1M1)07De zIlM>z@o~Aj^(*}~?gQ(5=K`a}$5$K^I@||UoHa0MUkjc0Q=3!2>L^b{=!$pDdLEDu z7VP}2eanXM%Uam>wX!4@$ax>a=mBe(2*WjBi7wZOjIaGyU;5dPnDZ+3FlOt{{xbg* zp?Je)V$?rik?1j8BxWABLMxXnu0!^VNCFGEd_gmVDOP!Q>CX!rH2oBoeIO#Z$Jy^X zCbcD8-kMnMtFeaM50;rlUwS($ENCDcua+4dg08KV6lUL8mGs!Akww?3rkpCclZ9NvLFzsqhw|Q|-Y)uWc?>M1eV{BadUa?0M_z_jrsWTvR5Gca+ za_}jSwuc&F>agC(XGU0a$+cM`&_ET#&u;tOD{)5e-hhgQ@rpFd>}8ll_hvqc_xzYI zf5K6Y-p^|R=aLoGeyu88d|S*+BFJI-=G6U3lP4^HfbI6pOR0Kbbst*A+a{3i_dbm+w(mx}B7*AKYc94og9;k- zzMFbt3%-kiY3yc!D^G>q%7_fOPlNZWCoh~ZaS%qxd_MucDRSDJ-&?cdeR=+qp9w#N zKW!xft(I@9Xe7*K2YG64Z1BV8=%PV!r`6-mG8tc)5wD`r;ezf(#NpJbF^TAcOQz4) zy|;teRq`Cr)-s#@E#19rEM~XA3(Ew43}HaJ*O%ND*nD%XCSy+bR9sG#yUxe`5&s(* z2oj4vn2D%C+aCNz<#20aorw4evzE~8He(-ktrbaION9Z1$JLkI`D(cwZm)2@ZA~}h zRRUTR!WEQ0qt6HZ8Le5I|F~2l7Tz9Q{_U61yTJ$_##b-ar^` z`#^3^CXWl$?pk!wuX}`>ws^kGbc*4W$hluB^_}0j-iXdSMO5j0w zph5&XObR7-{FqZpq~u2AFUpK&SKN8Gu>5q#B_<(n)6)KTlYwj}AP@JLv@H4IY>~;c zig|>0!#!%VyiCUfS8DVNO2(M3@XO1SOwH*XG8?n?*Uo1%y?skt!%Aw=_sGTcn(p9K zmA>gxcj3*#DOz#hz644kT=lpR_WR< zu+{8Va--M= zz=iN}*(*TdGUTw$_1pUNUQyqIBZMbEb1~)>LwNm0RAQl-De>rSE zeD~FEd?nv!Ai(AT-M>|#mdjAr@?~u?R6gncxccI*(vs_ZT2_q}zFV#bs_`IM+oYWV z-KS8@@#g~Wlj)ztSN?1Zi3z$1qS}2gUuzqDs7~Utfj|b74%S4YaRqU6jx}PR*w_rN ziA!swtL!H_N^s=7>riB0En~#UG(QTN`oc9sP+68)#094*a`D={W!n4p7(NW z)BBT5jtXID*j4>(HVKu&H2!E$mEiE}Z+6l7&yNDmI7BNw_wdrU%}X|P34ia2NRU& zKOp1Iq*?n}QR1D!D0wkPttrp5gB&g=a-*gx>zRiIMjtEwix1cop>qP6?+VUGU`xfV zCuXdPOY)52$FM8P)IYWdoC9pDvT-~Df!Ax!PDg5gc)-#G^o)@nW^PT#t#?+gbfLv; zUL4HcNBFHsAf4`?CSU!r)_||O((*K$zP)Vv3t#(6svyg5t=6WFB3(h&39~o$-j6W7 zflDT+n%<2 zuc4rBo=4(?<(l+Iv?pE@*N->(9b%8agb6YjVof%GiRo9NSL}12sQN#(nC)Xl8Pv?4m-(8!k(*UoBzET?VP?W_5ExyKjSx-#TdNNk(}Bl-@GP|#A)6AOc#a$ z)3Ez29NBgxKX%P4r_XE;_TbYw4r#HfYGGk$KW_KLgW?!Fy!Aw1^KK;9togyi(SU?4 zM&!@f3U%%Sju3QNuzD;bD~rJ-(nR_Kf(v-?dOgXi!!J)#;kKha*SDUhpXRoxfA*YO zHWW9HW=^TlJ1DZ`4OX`^Oeo(}7O|etG%G*o7K*TbC22l(q$gf@g|xZuX(+3YWs0iF#< z(-U@DIrvy=iMu1Ww(2XgYGhaQ9`C>&T+(tVc%}lV>3(`^^Uc$|hrY?zY-;WfOmqdt zxD|i0*it(E-4Kk`{Mq~^De?+f^Y^B6MSDEwF!N4L(xJKC(jg~L>Z9MACX?wN{nitw zvM?*0LmIJK949v){CsoqiUU!$yk6mg^oa+0(FHW+v6@|@Sqi>}Q~Kpv*!`!N@r9(m zx@|l7$-zCco=afDxgXoo12GJ<)rYf;UXQWWW{1z%nz&YVZbEr%DRK^f#j?I!W&7Az zV2q?~=aiKxCrFLm-F|qnM&RN-=E^Yx?ECiWza`f)XdKJ;U)eEvd=r;wHlBCCLpT>A zmD=K_5&EviqDo#G;(^wX3Z3ou zN4REx_Oj;2ft~H+ppMB=$qcbPQ-C5GQcpCxss4#GGKtQ|IV0i_k($N{X+3H>1btV@ z)dtwwxn2~L<+df%Hy2qQn$3SZ-)d*@Ndx+(2Qa?%uzz(&M)f$O#kJoQ`w|=9 z^a*(x+~2WT$IFH^dLf~IxDR?E-VP%#KQn+XT?Ph!Z;F;Wi;C{}^wzPHSgjeyzP~z% zeQrB0hfbIN%(*AYk^Z2@;9SrMTF`LuT;P1rhCoU6 z*O!N!4nHRT4&snAM%Ijk{iDnx&X$f|M7Jvi@!Coko6JkoT*>jIGFQJtVaFn)2MLmB zZZNUYOnt<>OR}D$3vGFYOO=QMuGrMXrYW+_2=Wudl{K0)2XsGyNi#e^mRda4kWQks z=8v&&isICwPq3xrz?=zYOcTG!8??vlRzo}X`~9QdS%?JooFqn=grD6wozHaTc6c!B z>Astt7gzM+cfWR31vz5#FJUDj)9f=k_T=KwL^4ZB$wUO2mW(gi`djm<@|vIi7j(jt zBGJQKjAW_0QVQ`RlKEPV;T4C#c?Nk-HitZ$(j|)`Reln zVR$vbY{nqAvmqqBp3*z2gNoh$D2T>a0{!t=2~=K->VP5+y4C(5`7gWtv(=k%a>9D}~;?ylq zD%Zj3b0Pi_7xga7alnckC~(NN`6S1`LnygiTwR0AF|D3iezv9%HWDNA#Fc?_K1hRp zb*fQhC~pzkb$ce<^ug#%nOKkGuHS0m(~t1->}!oN)#0;~pG^heDRJzfEG0LSd#c2K zVb&sk9(_bV`1%)z>S!`2X@*UzWPz6h-n6{`a_le=>}WZ|;d}uK$?WsK>Kn84gV95s zXTq(0bJZ?=sN9(4mRu@pQD=5RrR=5h0luXzkycDXXfhfuXtWcG{zZ}D-N#y?wcTy! zUJM|Ef|6M7Fe@HJ`X<1s_^Go~4gT^6MtF<3|b#o>QDO^IoDY8OcRQ&$I+j@_* za%9FFlhV`NIZG7dy)zl)llc)(oX{c?v;)3F*{{eqd4+i()noF$|mCNEi9 z?6&63ymXZMYnM?Q|FY+<-CyDJd!hsy2R=Xc3n3o>%4;9TyB~m*-&$XJtAG32_-2@v z9pG$fv(`s%v4jQIC_ECD8+n|$CE(CdZ~1P7zNU7r^YUMoDUVO^uOBKn7m-Mb=B$Pq z-HJqI!q?QS;m>5xUPUap>dTahti$Of8)&s5cUYr91DKU$+f($E3LLkxUEOMToitgQ zW=bP|H6})7e}X>v?O;-Fr95>g>%f>kHTdR3jZin`&o~K#S7=R(ZoTt}4X=_*3XP3z zYWFkXG;{AaxrP`GL(^dbEo%e>jcf;K-9P)7_r<8G;jWWS!4CNF3kNi9!Hm<{(yg7_ zjZ1eY>Q%3ua<-Mh(|7vivC&7HDS@9=wo?A=)>O&zZc2osYm(FJ+cnd}QFzO@9An#6 z_OIbbS2_?XX>J>EbVj@ew^}RufdlV7Zw1UE&IN3X>`c?9U#PaVq&`z0H(F_k12HwQ zXT|CXIJaGPa046rl=DW`8y9V+8dB7~g@yoUC)7(W(OF_FzW3osr)FUSA zEu9B*V*q`PLs4L?N0TadvGfnGi-?bdnSe%ne0n$*mk;$)hC_3a@OZ+o`~bjV?*==`|e=yT1rQD4vWW% z%@L#>_8M^m27KD|S)34MScEinD|j;bKSJy5bi1Tm1Y0{!j;9n?ff03%plK}tpA4nmi$Bn0r__R;-?8< zdMHCqw>V4LlXuWPSVF})spPJ++w!b9wJOwzPXQHn+n)VzDUstqZ|8Kq!#R+?NQ(eT{M@}%M_Ob}kFtpQo%+#3AV4MKRSZn?#3e79^nv6fb z3oU+PPy6F%wDgn*E8-O5uCXYv9X$bLw-Eo98Q%$6ostspXro7awvgCtv)Ak#tqTIx==rJ3Ft=7JTKLmQi^}X@C;@$ynW@{HXF}7`4X7 z^DU#ixPH%aaXNhmx$zZ89fib zoj3d{+k%&7M?J9o!_?$b7y32XBcowrdpLtdn0ZIa=%eDBIn&(yTRNK`CV*R-m6x4D zr;?xqgCXW^TZg4fm0gd9g0rYi7N3n>NTxZZ{M9v^1=2$hPPD%^TK|$5dZ{g&Tno9> z)I_Us1}+ zMRMJh%o6JSuP$R;<99d?kP+|liO9-V128{iq25c_(zASbwk2p{wXch!Bd6`~31%yg z3}I+6=$U-q?QwHCA*{URB%ULjOy$t23QEVA}^l=(4 zNXYq0R@B^o8X({Zw7rZ(A8dDu@AV0JAB6Tg3w-~faZAYOS@*-We`s3X$emmPT-})A zGOvq?9S&UGWOW|o*?df;NP*Qy=|wUAuR2j*p8&&mZKqwWRI6$!4aeBtL6A+TYwY#! zYXOW0ZfWLF!+#opKbDg}_$}e*m+kOHVX``Ihb!Ju;e{sxFY%gOx&Otd;BBOq-E5L{ z*I=;P{WSjafV6NrGXUrF_Nn`W{R>g_Z1Q}I583Y8CQbkq$?N+-W12FZ2J;tsZa{d- z^nTf2{!L9BtRbp@#hIFjO9CV(6D@0$y0O}vJV99#z;6%H)r8cXVIAgTDEl4ofJSC^ zN2>NR#0<#T*$>)$RMj~!li_Zuo2P=@;?f~c*2+cnQ z{i0IC<=q4>;0dE)S(ST&qRI(>W&ECbJNlC}mGH6JX-yIXI1 zl&a0wF*{_@^#$DT*pU0&V#4P@Lk838Q(qn>Sj4WHXJj&MtdOIpH}gPX6V4x5x+hi{dth8 z|ARVBj}){)PgO9s`^@Z*72L{iagS@K1w>@n-E)NTcE4{3xM;;E(lBN-d)Ha`Iy;rM zr-5ysSHx-80}w8A=DSFQz9jXBdiREppZ#W0ZknN`Z`9I1eteis`15q`8xc9sRDYWW z`{2TPUhs|g_(!)7{Ux8i4!BuMIV3h4D&!0C%hx4$2Xc@mCx!&i?wf3_)H<{`V_NvSN~tgavEo z`2_EsLFtG-b)`B)LiQ>*^ylADX>^Ui6p}Hnwm4~wdpm6c@D0zN8)0}L^K{jRP9X-| zd+Xwb_yD1e;e9U zOjTCod@2cZ>hwak<~(z`!BDi@G(m1d;rAQ1~ncQ=`8!0 z0W-@FjWy{#iOj^Y*Pw$_o$bu86Baz7)PypmI!stY98ev} zyjAukZD;T+519>L73kkGS`Yra$#!=yI(ueSU3$xPuH-UmjFO?PqpR;>z1+X7}# zvi(`gk@Z&#H{bmo@gZar`(qQS43e2Uz1k5VWT)t6B=)h~m>Dk9ZyWbAbpl{Z!_-Sm zspdZQ3K10e*Vc)ahOzxwhm+DoorkR_i-Xu&R+o$T5q!S zu`e(}&nOOzh(Y1YB2O7ra4@1#{mc7PdV;?-t55#kpu5@^h8g1V0+j>W^$369O4sh~ z^us_3S=W+-8r*dr8JRcyIO7SQKFf)Fi@g#|Hf3IZmPt*e^JR3k7H1kg#=WA$;ujc4 z+514rcW@lkjLoIh@BQcrH==9f^Wf&;h7xflC?i~vYQ_+iiBXRU!yYZ-8Lu^Qwx$X2 z=?%WeM|Y_Xx8wIDh~$setahB44M9xj^%CA5LGvmLiOA$ziVh^{wC7uG8^aDiqq;vj zbj~?OSSaIQFsF#=8K6KPu;)QwA=`Ib9GB<=9TO}jf41z{-{=2a5TAOb;%+`E-I?mJG!Ow zxT2kE%31ilxK&Qa)5`7+8G#m?-0Ve8-A%gi04kZlAmk0eFe&0-+3r`lEeu?tD6sSX zNb#Q`lEY-*W~QZ9Za^SP(Z&2_Px(Q(iTTC1|3rlK=DmvR;jdN)?kC>zwK9p3el9S7 z{pO1_Xw_iP(hq?#w%-|L#Xs@9!J2K<%hTzW84M~s=dPnI`Xsi1Q+^^$3!S`z&dzA+&3KqL(3rk&X4D=EQzm=h;0~0>TiN#q5;7s2FQ`Jq~dzY#s@N@LkfZ*o5 zxZkMPoG}-JJFIft4;QNbDD-c(7AeDP8TdIQn{%RI!_f(t-IA`d5Yr^o@rI{SdtnAr3Y2IOG$Q{nbo^3&zVXDh zyIgr`KE~D;H@jZ|;x0lY4lTmZ`HxJxc({}0mc*!uVEi#hr7>Fwt%)}#IfUy^!z0mE z6R>_PHGP-+ggI}@>BV6 zFlBnM-agRg*Zh#@>e3c#tY@e==!aN2VC=O9DW$K04JH+r$8VxB@@upZzD0#v%H-)S zR#Ez6E&CKmB6oAqxKR1qJNpt;BQ_gVXc8BkXx-MGIoRG$C%6IXW6#5PsPP{6Fk>9g zafxW#`pt@OAkIR^5FhljyLi-|+8;&Rg7LGWEF%e+LB)|iW$GSdh3KEx{HQFZV_Dst z0_o(!1@14=dcEvvwhrq}WBO;5-R#L4@uQGs!*Z$iN5WR(#p03Xv40~^zO2gKpd}%1 zVBs?%;}Tmyo*lukCJviSwLnDgOk--3P?BgClf%@&L?vh<2B~sHO{=XSHa+Zl;=}rP z`NLhO{ zD8s+h!CrHfc9wnUiIrbZj1NFhKB*k?8i%b!{4<=Rtef9mHn4ftfK2~3OIPjlh~kh+z<@Q|cly*u*{s7%7D5CeI#SZ-}wD6Gr|vQ?9d4Y;82 z9d)WMPYqGxOq(LcdD|9aUCI&4Fw{J6uuB)!C80srQ7Z>}&-vYpJ;e4UjO_;>(Sew~ z*vbJ`V>Ib=fLd;;lt5)>H?sE&_dmleH8sDRAg*t5+6`*{7X~ue#Ae0VP2L25a%_(Zw_a&6Edm^tq@Xh+UVf_6Eb^GxOSPl2aBkKLq zAq4Nc&JiN!ZnN75wc$4Y1XbZhJf*b0N3UGHdbQuf5Ib=ll$Cjpl$DAVpsLwsSJ8#T zk~aP5vOL`fJoZJ;RUa~cP{IUo5?CATW{nh@{bBt;Wk5WWv>SXY5xYgTqA;=zuT9c2 z%B>$FuOdLqmJrTL2g=R+<=VV}SG+-}=- z!n-&7I%NB=raNY*JO1-}vvE+PI^iz5{I2>?Q^H*la>BRz%tSlBUiE67ghWw+UiI=! z*W%>6>eLT8@|mg+^{E@U9kx(f7(sYE1cOwn)UXdI@tjY$n4UK*Hu8g%FCY?&4mi`m z37Mu@rr`T9-__28lfR>cNgxB}8L(7;ys|YJ%asxAy1j57pVh+vf-9gLYnnN>wzUHq zlycR4jCI#dP7*1m2Ol4cPm~gK<|}pABM}84T*HYAIwF!kpxCO&56gQ}hA6>rtexa< zC(YGByjqn#CR7#6#IYPFAA-xlIEejWXu#vjKNq2T)V0_r_3l`|6LU0_d3wBm|5B?V zXnbw|XT>^ufEggDAXgA#RosA840s9|V{WRzV8u`T&~&`5PbG0zm@`@4v{mUjZ-u2gqNrD1Z$RrI?c%Qj-2F)h7XI{~O*J z9>*Sg5HPDq#UfI}f22WJ1|S2tQJASnG+hBN{tHv%e+!r?3M9WH{0?Rj*$E2+h9dX= zhXkNbEspVfm=$C{EENy~EU8QN{QfDJ6Ux|w{|92ifYAR0MM@y)@CYv|2>cJp?H}os zba;#>VAhZ*7#vUnpp;XWzJH~gfN3tt@qYs&&kC#!cn_QfKH2|QI#2QXe?j0|0k8cF zi~%K##CQza124mRfH;5~Q2D1oHkm42q~cfR9`$g9{|VqFSQqg6#W862Ka%l(r8Gbi zctz}(zWPiK)tXDL*;^~PAj@2CtjN-g)w zi@pp8imrI}G2N-T@SSu{L+3$$)37EVE~vyAp_)C;7AwPfl(-M!rCdY zNIqR5kYNCYd~!^Iosds~{yWG z0&2#xyKJB)Yv!{dgxN_#|A6j{f4!6@nnD^ssd1SwIjn|g5qkY=6Nm(b%EPrunYELE zU~~U+$8kBenJjT1s1W{S({$ukzIHZvM*4-0(>IF%I35L(gPtE)~|~3*#o!E-hf>*vQP`iFf%% zzAT4Xrl~XvqiH^fnKcCd+N(iO_l!YKn}1xYkOk}4F?dYfCq{U4-&23-R^W9tN>WV+ z>Nuh1u}gDMT&{(|gZE#Sqo3Er+%Hi?4vqcJUvcbYLUmi23t57+G))3_D6NMcr9(O~bxokBS|j0f`B?rBBRIhY zC-tcZyezIzPqu7Sa;m@ks&Z!1(Y>{@tnr}HRoZ8`Hq^_wQ6ekZ8b+8tJW0JbV=mQ| z8?vAn#R@DvdDKpeLT!mwDSdn*18L(rN zT&ZmYsV(9B)GGWisn{8|BIYwZt7Ko<*uS5l^Jbq^f7Dn>OfTrKFG_K07>g>8?Pw47 z{j;91`xThHwJp|d>NUN0t^K5j&uZ6FMEtbali3sSZ-k9P0uE9P$IqH~Q*aty?2U1^ zenAwGuu5H70O>XU^>uXN43cZNCw;l(!4Co-vcP)Or>luUW1m9%!Y_#Bv406=;Dz2U zfe+r$Fl1p*HvrzDAi+4|vDXle zLnfvsLGUX;+56H4cB9%uynOy1U=}kXsz{THi08w_3Q14>A*_%IM2b2!uUE#21Sv($z{dSqd6MxxF(~FE?n*>di!0qyzRbc! zaF2KoxaY;aF)dbb5>#GT!Q`N%`M~Q^y~8tobWqW1};@Bk{C$>)>&rrS^bQWp2X z9g4A5TKdLFr`>p#f^{fsLaN@iU6TX3_wVZZPAl7#MB%M}qlRL3P9oxO&339!TNXr*_%q+jl~?^?XwDS?e#J{>!n+my?x&JKy}v zVd5#w9`8$+4tpUibjvl;Y7Ep_efW|ab`dUY35?m5_S&gXhhP)L&75rTu4+tpkfl?b zO!bq8`|;~8LKlB>CLqu-0(wtG;+1t-wcVKospYx+S# zWEUR>PA8a}Hx>`{58-cbhSisR{L@KL*x14>IPR<2sjWPM*y!WMGw;B@-h*``clDF~ z`Wc_?x-6a(r|794)+D2`NKvo9qQ8?`1<<_Duag?x$YFP0g+l0eir%yjqIzb)^2rCy zZd^d2*TbR%Q&Mm} z1S5WbEYkBV*NwXe;}#cJu0O%+PtzVR`W`#FxrkMR8t-4$=>!KvPCyU_*GEAzUwAc6 zKFhb%sgaUrmt_&j>dvT@^aY_Pg2&vuXGeOAH_buzqC1-Mclw@9(;qE@8sqQ;I(IgN zM&PZ(kw<{>#OayB7nZ97ZBS38z3+WFxR_lPvK6d#$hkv?m9DNw%_1rPT~a?k_mxh8 zsjB&M=9{0l&jGyNlH$N)nO*uAQ>$}1^{I9=DY3%*5qCvJc$9W zD9DF}I?5lJIw~tbh#yLh9k>2WP_Vq%SI>?$i~T4JxR2zXn$8R9R8QjJl6e*G_<%$b zARWYI9RC(b{wWs3$C!EX*?^MIbO0-Y>D(>E6laX`kzRMY#Iz82(Lso!kUO2@1~`#yIFl*YqYbXk1W|h}UA~38 zJC9@k_Ge+7TnPDuK1;h_cn|0et)nG>ucImFbxfAZrSFMcMnujjo!Cbl^1^jcRook|1N$n z|NEo!0546J7>W0SW)NCgcrONl)GKAYZ}FFuPR5dp>fy?Iu}5CVYf+%>-k?;OXULbU zG(ZwPMf}s(lEs<5$BukVE4Q~i^1HnMg0dP*l1%E;T9v97rar@(9wj{Pjg@rSn|voT z(#Ev72K@`fvpjt_DU2J$EhCrxOncsL)hiLG5Q?>rNUvzgsWKM%7xs|c_bsC zWo)v^)5qRdLn#!A%nV(kLvJui48RGeGvbKJZhIS9K%IR3X~j3&4Cuz4eyjbPNkCIg z@o#nYRHF^wgzW+f^)>;?4b(a`Z=5dVgc>-Sxb4s&_>r`$JN#04*Ne?E zXL*BI3a0nem6y)3u3|pxvx-Q6QPa_<@DLD3#{O0->{-idmkfp|g$AIwRC;#>rAdE7 zyRxGk)?bx8LC|&GYSNdgve}GvfgrNxcI;-dXHJBv8HJauqJ110YrgjEh^FX83+20N z5&t9FS%~wHu5h*xJIiX`bLJyQn~;D-W~ocg&C^z5$!|oAxEA z(#w@H!DJlyN$7I+g~=^b%5PRWz{u_*zk}%*InGk>sZh8o9`&b@AYQAd4P3lz<_@0O zOI3a&L$Tps`kP?R%0)|XF_?6LDLgR#Wtt@Dl1=7gWY1*2EjUX4wDi{CNb5vPsDsf{ z`?zCG&~6y+y^yiSt}zkNPZ`WQ7Hq6(de%GhmFM;(ISWv=b1GE$$lPhjh1cczD>?@JcU?aiDx-$Fh7t@^Mkb*xgPXsUM zy`F1Q3aadcGa3c%@OKOj!SWpYazM``1QB}X0mY~eVn248=f@jT@fqNCLtQFYnauIu z&wR;YaFK^d&#=GpkWuIQ!l5$XizagpQBR=V=!zStf9(pnFp8wq90a*0Z zMNXJ;1t?JTheTm|wQD=UR~L$cRB~9<3py;4K&6>}+h?w@kO(yeFELOEUZ zr@HfyX0J(@&FUXDedfybOvusMbnr#UD3D}->&AsAE(hNckD;V$kUz^J&EhT2tj0{h z=-uLRwWyyzc(1af+5BdbtxD7mnAG+QY*;Wny|Jr!^X3`wV@1PUllOvnUGi6^$QPP1 zk2=D}V%>wyIp|Z%U){U6hSL=s^AA}(I4Lu5lc;~|LCa!8LEsNjSWFVshlvohAI(fI zXBgKI^W}AqeI}-Xuv-twTC07RTdo_0? z`eFUM*+*9fq?+*+?F&L-@&47bYQIJ8rf7swmys*x@Z!uTN-Av?FEWZcIW<<71Pb6<>q|_4dVrk4qxzr?y22X%x{MQi;vP_;w?!=;b=oa^~4V2|L(+|1khHv^rNQ zF)YTvGUi)mvJX2+e4ADIM2@{e!*{D!eoeDyr=9)NQ8e7*$P*Y_eNk?>Q!3YGY6l0Pgi+@0~MK2#fjlvnBgH{K%!b#L%^P5X}; zJmOPadGH4%HL%keI!=f+T#$m8RFkk#G}JM27nKX<%A#C+6_b2@>UgBFD>S2nm!hx) zvf4o26f>Y`md*z#9n%GAO`hf2yl~_^;&F#jATVR`_~T>bQmRZeIi%T0W|>LJm6sZE;mr8|= zPJ_J@a^L$x=3 zU`h%6rR3h2I64r?DF|g523pj*2*Pp3;q>l1;jjehM=@63cSeqrA$}wors6Xgv&XIQ$}|fn-m=Tqv9LQu zPW<2Al34@T3tl^&!{)CUf?M~cnS+0Jva+!=Uq{x0lj)rP6_$CRL`}tz6DnG_j$GsI5TDT7`A8l;m@;Y z%d&g`Z{2ku=(Dm-5;z;FcGm_PYeOL2}aBE-~gqZ0hq7)!4d2&1_!g=<7 zQ^-lJ=U?5DkA2WLPWf=k$`hR&SMU4nAn8Z@jAri|uN$%Jljb~4Fce5s;mbU~yzliJ zy|55dY|4@rNAReE;4StY9zJ|Hd=oc=OkQbqmZwQl(h17te~jY$!%be2{o*!l6weDB zPe2k&h>?lKvTFj(Z&-b+zP@DwSAhR44)*UAe}~s`G zlOOEt^ikw_v(quzi#hiI)#44iPsCS-4?r)9VzZH_oG^AW`%_Q<$g1BujJmuS{< z`C1+ubc5{D*6u9I*S4T0bN{uCjUNx-i<8~IfWG%3^V$Lm&+_qmV{T!UZA`s|%k$=w zYnQIz6bh`!$?{-2(K3s#N(datdpUt<{Vcr!tY^%Z#|p!{Y=M3^uGL_Vo@?9oxXVv%ca*%3sTO7oPx_cBN@U zQxV?laRzjdLRmkkb)JLH!=m)@WB*Rv)wfwo%3Y}bXv|@vu@t0rLp#=oObodd}ua#LGTkpvjUcA`XD! zN=x_6_dUP*R4r=zEf_E~^FkGlurt7ATi@WxlT%kIItnQ3kf-(u26klrfqu&Q!}52e z)#6DtNNToQq~KN7iI6d9XDDZ`hgdL{_GVGTw3D(#|G1Il^^D__l=Xz?36A(+iT%Dy z9l_<~8K=datSZ$cAjb0$FFq=7^hLKDLx#y_sfF$PN}l`qSnX`6J^me{-Q5qyTpUt$ zv+udbv%BD}J#dEY*Lz88;``jMuhgEGvjEv&W>-?fm@hEO^_Fr4v$pVhCx|2@fbalk zkIhcC3Oc3Z&>dq6THIpAl zxmopW9>32)+=OpueYM)B6wIC|3cugaz8hHsKR&~uU16I`+1R15uRalM(0l0aggOIG zcFs{k5^qg2Ruu!$|Px0{Kp$#i=q}u%(#`)0NBN;-==1?Gg||UPh3v! za6V(UBgU5#`-JkLF7wrtCZ(|~#5}T-lRHy+GNl!Wx+#kF;D~hoi(v|QDu1^dvhU%{ z=s^=n5`|{6Dq52UwI#(gup6pn!m6Bnk>Bl9R+CBN-&; zoRj1{0|J5q0-}-`$vK1MBsq@coO2E{2$Pu0*|YoaIlFuJ-{;=_;rU>ghp+1Gs_LHp z>h0>*dMr1@u52<*aI@<4K;2BxxriPUxBF0zDdLzL)VPl{u_q6HI8ioV#KlJ@V>U51 z@ks^n^25Wl>1S!5$un1)+tVBXKO(0k%dO{;?9Z@r7)CGn4^}Z4YtW3i*(-f$z6yI=!0BU86llVDsQCn46EI~JtX1$Wq* z#vQ+1R#VJYY6gh}k&eD@;ef^Y_SK&&PzQJa@a{wDthhmQL{PKl*K1=C#-5;fB|ODR zk9ECU4(x`Tm2E0>$ZCizgukgc#*csQns79!h3LX~1E88DREOc;x?pQ(?rViAh?B3} zC2dD^x{#;YKBUYAYNOK1+V1_UD(*%?9(j;s1aE$ zX1Kod(TSLt*L#7ntsr=dt3xM)OJrPG^&{x#!&wzxC6TYr0^683r5DOI^5`tOSDtIQ z9wv>}q$8pdQY)YO8GOaBRuE~$BMoe23aHEjg0@V+ZiFNi7Uw}b0=cOb;q8q288@?{ z9NIDCPjFzi%j|Z|7b_1`ir9oH9DFl?Ow4X(=Y!%+CYnldOWRi~Y}-!xQtxbwrXcpT zz=JS_7e9J@f}9Yw))7^uCLcz*-29=DAMO*p&6-@lR^X0I;d~W+p|KGeiGrl+zI0>X zw3}~&5q&7Nk5f(+Ed2qeZn+4H!5(Q|V@JoFyO5o}YGVpw!aCzM`#i4{5d<5c?Dj3Y z{VQiRQyi#?krreR5e(=i^2)p7g7%#XY=2H83_~*5%Il^{Y(uwp+o{^bV0&`#F{OHm z^mi_nLihUnpTGu&&!qeY^>fS+j#BJfUp}W}qgGrvdoGb(vD}kVRN1gDbzac3tZ9iOZw0e*A772ff_*Wgy9%$T}h0J%NOo9py@P%Pg^*Z@!q zAljmtF7%2v6n$A+eo^$rIAb%l3R?nT$1EO1jJUTF0KyF*9Ay8wAa2O8crX-1wch+I z=9UpI_|0vv%+06C$jc4%%)~qs^*ZTDyG`nB1d)yVpgO(_{l;W_kQ&8=-dykvBXDPB zu+tb(j~AxqY^fpd8{CW~0d=FKS-$peqp+%i2?aK^zt|yPVNZp<^}5s94u!n5fVq04 zbzQi@pz$(1>-Mq;riK$_&={0;gL-%G+)jcB3VL3njj|cTsMo$Z?+^$AuDBtfwewnK zu`WWFkpA2?*#t^eW|~@$;g?g_Ucukq0QK6Dp7#}pGe3orjHrWXQ@Lz)Ag$4m73Mz5 zH_Sy9ni%gjRPaI^cfWMt01SMDD37haEaIQh#tr*&B~jLMhXG9qANyj)UDa!;#dM@J z$~8dB2A-H_$E`u($F9hxurZjwKptGbwro5Vt`TIU@XwsN^nR4O2JO`748BTqj__pC z%q1%7y|`HeJDM-e`01MVT2+xPfI0_6046(cl}*DkVb=_zz8I|d0?h_wt*e#9_W2%6 zaBB2U^n^&iBZ6{0e8OwTTB&Z(y9D6$z4|$3y_9Fy_F;6|s-Mkf$V|nzgFnd`yHDoY z&J5jdpWT61E#J%9aQ$`gBn;M=0r$`aq{h}*-jq=TE0&<3JLb4`d$P|rQFhF-JSWBv z$Db4og4l(lMsb4-b0ONF0>NCuCKg++-dBeR(6*O9P^qR4x^rs7o5`cvLfq5m19b}= z0yxW0KfHYiXDf2qP8_jQV{Ow%J5G#iVIw11nno$D^uVLL_}ke)m@>?h`=NA*0O)*W z1$^Pt7SOmqW1sMa!6oe?XxpuUMJ+j8*s~@BrFRTomX%w31^S3MK894M)_kYFIXeI{ z=AwSe`#K2BVE^dcnRJF6hbj0j!PyqrCgKBK>4;+{$os>iuO~iq+;Q!S;7IxoMRosh z@Ima#J*MdSMV78Y28>Lw4__-{7f|mMgPiAXJwt8M04Anhamwb56tuH+_F4Bv@D}$S zYF*#MiO=tO(?*Lu=oSpDhG$1%por5Q-FEKdsU3y6MPFLSoDu83i=>iv`T!5Up#|xd zgPiADoAe?9FGA?973^X?LcT;`V}vhbE7%qi513vELy0VagE=+h?>=eHK%CPGxNi0> zkViDdh^+VSB%<`@?vLy$tc(r^`UlS6P6y@75BN#wms9L}PMct!M{U!im^L5~)a%)R z9)2sW0;nFgc$F$Nmw*%w4%$-i*pY+Z<)uuoy-8Jm*$Y?#%sIz)7=3G#d*fsRXS}brCpzLZd|nyMfs(kw%`%z4|B*$@HRtPTeQDm zB!l^d;twr-FnOt>9{S^bd7=kH-)go{`rlL7^9kK6`BJKklYXe#{!*>HCL84pT39oU z3eQi6qVk?Z-%p=(K-G-+uIJv9f{@Jtpn+xxe4P0}2Jhnwvwf$oV_I&*AL}`1C5VLT zpbs5L#7r~jmhh8SFu!RqY$^ge)uj6T40@pe1>@h2I@uW-NJMg9mSn&+$pl^6G_<@1 zMls*UNN|Y1+dtcnyXsspy}~OS4q2l5j0p-}WmUG@?A&8eh#QT!zZHI@XW6nH2bTk! zpvMSy&j#kp&DMe8O3VeguBXNp^0ADLF9ElJ*O@E#K)3H_VK1sBw%vEgj>9J3x!*3>siB>{br(Y3zNzO~a-n5b zxOx>U5f58E?Fu{V#=|Y@MJvFMPn6$$^%I7tADunKd56e&djhj$7T*?3SBR<_y=+HWh+qp5(!37mdBU*T%=rfE{ecORetq_5h*ZqRIoTJn2z5R_>r+tm}6 zQ`HJ^_6+!T(Nt7nCC1ehs{V>{6=6W$8F+b<``l|Bu2rZYy4LpLBi`$U>ipa>JCM2K zZ%;eV-vQbWOM!d$%bB$A^S!NosWNvmhQ@zc9tiZSLDJ#K=ru#yAAOF`P=BPrASbc7 z+m8d2zwR8wo`K$VvZ3*?br^QP^5^E7#cP_|Nxb<~4E^N(LKDPqCF+WKf2}ppBTE^$ z9XT{((AJO@GqDyaRId>7-f^~_8RId-&-oAl z^+BkCy7oyYlq+)pjr#S=pGlTz-Z}TvBc{h1KU3J(LGo=#cAj5^Gco~w$8kR=$Roc$ zK>Jkjd4|OYcpG;_!QA-WTVQc7CM*e`Aog}iAMr6i`v^IP(nRk?UlCf_GXbakidMiU zHQTshjMO21+h9JZt#rN`!`GkVVAu+e-==7r#%pcV)35j!w81aRIOib}Svgy2$quE95B}#s^|Ic=OO!aEu>JKrAQBS6QYAe!?Y_Nn=1>@?P4j*qT?S*cM#`4R zqg4s)12+Kgw?ss1g@9K)(_kM=MqdRgV4>T?_N5#G6cFWhovVp&$6ZI{DUE4oL2J=A zSzN_ayBgbS&4!VTtjM#R_?^=Opk&}H?uc)UgQj}riX6i>p z?w49AM|#2ODf*2$$(v}Q>w9w_6^F7>CIe{l6^pavSjdvo6;qeq_R71nGw2LDEJ9S) z`JU<~Xp@;MnDX^a0f4xuPGRR6xt8|sNxtUpDHy&MlA}HZhQ%$65E#4%7ERK?48+&sGZ0SDvo^axhe6X)@gw_O z)2X8<>TS0-xQTI%_@@I+2h1Y@67dd(=;aHdm~t70WSp=tP>gW4Sp?BQQ2Q%0h1>2( zb&rJsZW470)pk6)ftJc+Xx@9S*}PKo3i7>@EIPL!CqujqXUG^={G^KSYBctVwf_oZTbwx z0fw6|VhYh&fF22(ZzruVtEtK#vT1-&X*)D|`}Z>eL6XsR$|NTZg(sT#mO+cCTvY+k z2U+15*>q-=`JzJzcgERKV|>43(U{C9Bc$g+g=e27Sw?eWGMf<5Vh79G404+%p9zw;d}Z_=gtt^1?TIi zCx$Y}tNI~XIg|ek>r4qikB!3Fw&r3!1uSFzFmqn;Jp}GQZN~%lW4^kFOqvh}r)QFS zXtLPoRzT)wefG_~y8SVwL5xs2;~J)wQz0b~olC>FW+^Ps@z7KivH*`v{*IJx$l zV$e_m=D`ZHW5A$bv%IUH)Iug4bI7HlILLCpZbyb7gjYUpe+WCqzYI{bf0+N0^jXk4 z7b0|cjT`9kQzl^*y=pFvq~4lVw>_2}e5uTr6$UxFo)dPqJcxtzb(~(jAEXwB6>};1 zf|v-COm}0Ct7(HG+B7W$d3=7HmR)pph-SIQz^uA2kwE@!?0(oq?Q1p6LoEr01J2}S zg3g`MMGL($MUendMIh!^(960rDsh~|btni2fy*9|W6cYEF_f7vmqj~z!s|W}0Vw*y zOZRfGgFcwWlmSB&&(?lgwq!J4bS{T-xydwWqy$i*6*o`e+zsl8v5Sngu1^?epo3M+ zI=NXXt5FJT_R-em!uVbmns%CDAJL0uT&o&`aip%ONh5hdB(96SZAj}kX>o%Y<6#c> zp^Gb@Ppj}R>|7p#?#8~EQcx+8+Q>_Xyo6d%Y3E-?>=zYCWK-d>w;J@F0++pKRy6xP zg$B*fwDLyMBVx{1MWB%WMUAHd0j1@x48 z*Ui(BJVkbcbdT?zT}w>V5Q;@E89ap6bD`c>-ur>&MXF#XH1jTcAoSf>U~5NT;2{o;@Z-%WUylJyoCQk z2%o+xVD3O%?ww7tYXB99#646k7baJ z7v!fB_H}8CbM|(?hkUF*>xW1)kj+JQ*8E>%${w6>u~W85zmP<0x-A0F_+q94Qbz`o zbVyX zyqnjNaGHLcF}83h-%bO1DBH>e7mAsSMZ$18&^qT*vKg8~6mpm5;3UsEC(2ByLI=NBv{?+P!uOJkWe}K4)+` zA;1gNz>>_|&frT-zE~}{ClAl)FxcYSN3nu70OG;(4MHST4+=9H$WPc^_?a)vtp7d{|XRR0*bfw zvRxV<;5kph41c@Fk4&NVe+L%22A6zu2#rI`^fQ+4q-|%YWGsLe-nZ}GqD~C&<6v!z zU!yFS;?e|igm}*pA%@StF(nI1|2#8t9FD7Y2hUw_EFG&SHM8d_KpCj)Js>-F`H#ZJ z9^aL?#8ALqVxJ{P1c3q5BpVD3xjgHvtw|^KJ2Ess?y>^wn%lCIYZ%qVB z&1i`ipx-ov0^SJ-O4en&qpHdB!R%liyj3;k!-V%4BMc?4nOX>a){bcALl4(hGTKDL zIF3e798L4(!a`{L)32v35Y+FoXZyc1r0q=kuH&72??U(m?3wjXW;e8@m87DspS3bw z`g+D$Dgi+Z(2)4R1d~ZNkZc6}2L!+QIlM=E6y5)GXOi#X;T1P`Bf&%=!Mo82Xv!Wq zV2uGbx0eFxp5FYW2>zbUO!=`sF8KPo^b$>{7ev<4{aw|tZO{%#=$oo4CM6EdIFxvN z5O0tp52qCRVREHZo`RfF5f6aLir_r;S9&hgB z!2>RY9!$^cMH8C4A~v+TmOCcO%tqqqB4~rRqi5=v@D%Y(EJ}LY(!6k?Ad|?g%kCa> z;|&ql1y?{{sV5^f)D`+*-jDbRuheuV1Vzy*qSvmi#K0PsR*SDf@uy(9J{$ zV5MI?UNbNfWD2nPmTTukvbNC#(@*I0_*nqc-_$hYefrjk~e8?L_$}(cIvDnV7jv-@> zO(l7)2yXv?W^Z(xz8%jL3^cezIkGE{Ar~9=e_Y((y)zc+3IA-SJAe+!ec7xLM!18i zH^ST$gn!ffdVYL+0EQ5rX{I9*;7U!dMM3>^QN!%3-Qd>tia5CXa%PsF z0!C|$O9xMWpr>-Ue{0?H75}68oU4oR_KD<_&O(*#n^YCp)_Hr*(>vlaPOq6O9buZq zLVOm9r$GrYzogUV z)o9>>Pu1>ixq8XK1$ObyP87yiL``ti@<`yYw%3%L_4_o%3uOa?4?=E}Gy7?unH8X8 z>*>s9cXHL8$I>6e&zK>;zAKoGU%R`zAQ0jt;i^eOM5Haq7o{r7g+bjMQG)R}J3jB4 zYmPhN#LWb!gHRWM!yQb(q~G=0(JB`EV zI)&vEQg_{t4R_ekDF{G=A8ObjwIH8ywI;!H;Qk{~IujPEzH-NgW4#i0(Eesb=71O8$dZC? zShn)vb3s%}dZN#xuR8|3)rWZ9B^T!w!&%mP{nD&j-jdHJU--pmKf3hHwz9t>^)ZCJ zO;1TzLlo@n1xZ*IRO{}Fyq2#wy&Bq3bKEF{6}WYao|E)6+eT)j_%*ipCvy}si4!6w z6teon+()OpYM~D^%HppX_H0 zy{IqQ?e;7S`sAaFCABl!5jmmKl3bpN^BOe@8pY3$HVtG5r;}pNQ|NwZ$$F)`Bzvv> ze$FBJ>ICS{hm))n*YDqMfrE71^E^b_L~CRvwO5i9HV`iKvs3{l)<3HiL3x4;JyW3o(50uVmpZ z*nKJcoHB%8b~4Lcu1GKWY68d_{lwE42ElW98S!CLYRl?5#m&zAQ)-(`R-26_-3Kh` z$z-GzNbmN>gT6x*gWZ(#mb>q){3Mt@uBz*@(_Cx!%hA!tyQjV7n>v>8r^9it-FXeM#F;WyxrMOjPSdKmY@@%qWu3-n%f z_v7asYB>ozH5ThnweMdei%o-UC4Wrc?MSf`&| zR;^*OCa;EH+Exd*r;HH66au|#cCBrH3M@OlJ>UGebopv-fbAZv>p@Fy0)XQ!Yfktl zKd}>rKBb^F^4dvdc1qtqlg>VNW6CbhkNhNIE^Cl*M0s$Xu-?rfO>TuN>9JS|BrB%- zMVtN|xgQP)!hSV{0`{6?kpt(iZPj8_P!a39u9Ipmtv_Y5W9Xi=#oV*ou_OWEM#RMU zVi{NS@q$=b+LlUjs!&3hyX;<0_s087uC)8+><>&d$_ESgIGb_QaDE=%mJE^~WIbyE zJtF)?iAiDeDrQt$i@9~Gk<%jIk2RCRhE(06&YaO1T|IaumR9C=tnjGOGKnI<;mCoyqIhu}I= zoXEvug_wG&_LR=B1ix{JTi>IN9}}`!T79a1645WKt}^-5<_&969S$I3^pEn1{Pi2OiiL4jLjnSWzEUg!tjrV-HmuD z-LAN901~BfT8+lE{KVh?D0MDKYidjS2*MgGF*Y7Am=f(8zBcjq+@9+owLrYi$-59)d{ zyoy5s|9*Y58qjUAfP+YjtiFl zJ=APSqX(Lwx@MW-_Le|sTJW58NLhlpk(JZ*#3|OM9HYc%OIQmWSA4Ad~o_zcIo6%$H6c(HcBh-qX1#Hss zQ#Al#?=OJsi?N^32~vk%@5wJdr(8{(1JC&NW54gT^evEO0MkYcU<#kBctoSTYdha5 zP1kL^N9A;^v*EI2{xSn64M{z69txDpns5_*R7RL)maWrg{BDg{8piBDz?<2*iXl65 zX&0%b+-&`+>@?yi?D2s6lkMX!|9N?kc4*F#BwnKX$M6PrAGWp>VA zoVD`o*3~3_?UNXJ@nUSOi8;fA2NLyBdJwdJb7K#k?(Wv}JHsaNeVudA<;1BGd0HL< zbzw(eKWkb(;TAL1f@pVB9^0%Wqt8FvnWYT2o7gVt*h&Pi^034zc+>4~lY%lonnGNH z0<)6HS;B&8Hq}@C6EilISsKUs9QZp-Vw!UuI=aZtJZjp&U}s{-ynmZ@!F_^mI7)Cwn!_wpu?7bD^qrzv88S< zX4B|5UqLaEKw@^DN7=+)BK1af1d8W!cgw%@{gm=SP?uV7q+f)x0lbMRbGX1P-}ClR zm(TGLA6bT>aFXb$#p9acXUR4%F+0>K<|M2D*3Hs87tTRORO@BUTT(17#K0-$k*Vnd74it^2M|uX?=k@-_Nm>D>LzJ+sj{Jn`h}Bo?G1WtHd7m|L|N_z{$u zz*v+r86ZwtF^#sA7=@gSL`SR5b?I~Y<3y*refa`qz8yBM7Gav>nJ_m{?kJW)@!I|6 zy0R%mn(NtQpL{wuLnIr#g(Nr!o~UFkFpA64W_A&|{ca4uRRcvR9JOZ6l0PjW;}n-V zV$q6Cct7(!K%H`Ej#yFDf8aoKS399B$y<}Pmp!$qJ$DY6=ydkDh5m74WPf*D3GfW& zeVu1EoI2v(*<-ppSxwSb%k+_@<7)m{BjnZ6tBpkiQ;*p^z0gL%)|0-bktEM)>H5(4 zCw{Yo3s-otg~sp1&FlPZ8Raltb{JA26dJPW;w}*)Xu3*zIz&s88($ zy~SgcHNwtOFAuGg{iau98a<6br9bg)7(AT-_r>$&~&Mj&!P&eulnPTt)U28;A89tHD2T@k_cdl4Zavs8(jcIkWUF4-LBO(;&hGRZ>|Vt`SOzay_|IR@C96FU^~Q#TC7$S?h6u5(5412C zsV|>1=?gv=d5z{m$YxvU;i_OpBR3yXO z9>jQtzHfh#xjnSQ1KMxBZ2Uk!=+*qo`O5+?&i6*F^cdx#uN@^BICoV0-r>%(vzT%M zREbubTKyHqitzfhkvVuVLGnfkIwN@<&`=Ic7x+s*w~2I3)|lHTzwwIci77AV<*G{w z=dr!-q)lIft9qK=)_Jt=O$|+)veqQBGJj?vSs6ykoTMBd&A42sT(G2|k-~~EthK}1 zXATKF)XcBc0;zD*Rn~jFtoSme3q+>?1G+_s_Y9)BDUI!dqBanK#$}$xcphJRJYsPAo3*%F$um zWD%E8zUomjQYQiBqILb#(0Nrk>B@SSj`zfTd>~Lc=euY8(Zr*0kK-KI;)$vwMQYYh z@B5Q>%HxyfLaa>{?Z4{IIc_{1nrn)T0d($bro^)Fzi;*&OWdqmm}YR$+PaU!e^l%r zPLJZY1iosF@)5fyyfbXoC3(QY^*UzoMF@OsBTU2X+YavuH~n#8b|qB5fhdU6uOobR zgL$@1w*WS~r8Ac}`n)D6%tVsHTw%*rIX)__;Z<|(-au};kC|RPRCQE$rS4|GdS-d< z(PG8&8sj=?Oolt=zJa_zXiJfMY~S~A;z-uje3nD3$@SVs(no#={!+=_E;Q#t&bJ7; z$$Za$;s4n;-#-3eb-M9O_H8UIo&RcHNZZEK%fZ!-)5*p8-#I5XVaW9dH?iEzDS7|W zTQhfh`LLUVg-SS|lS?q-vB%NybSa!s^>?hXryGKae5GxHp{-tIJSvk8U8zy&pUZ%->%?h^+UF z9r!to#X}H>Cu_urljcU>3+x5CS%If(pL;ijli9qG>mMM7Miu#_mHuT@^7Rs^dy#!1 z1PQA_S8h3WQ?3yKG|&7>O6lL?;n^AAFycx`Z0pINI-($r+x>pGJd)0jhc8#=tZQZV zd=n{~M$@c_<2Xzg(j*rq7J3T{ma#piT7@ZGrRBL;SG^Y;tvo8ou{{cK>fe;#XMfP7 z6aCpT<@~eqfUuR~*WtT;57irpi{hnidDAB5m~8aB(|K9^WNj8@978ce~vZ_*-Bw5Hx zZc&Ct_zSY`sgQ-Y4qslolRCDBJT%cRdjCv--&QFxRqoj)R={JTb$^vqy;pT64UR=8 zVQb3UT>jR{U@R;r^+k~oLM&Pj_D7}RR)~nVx)a^i5{svEzPYfu{f|*cG8wFt&QTql zz;@mki#sw@lDpk&{bzZJ+dvWK6@2EdD@!FsjuRcf2t9bt1(Owfb&lff_r9gWW5J_d z1LK9!jG4GVMCTU*rUiV&hCepss6dDQxV7VpyOG*$p+gG1hTghNpjgH<*Jwd<~1?$h33*Sy$r3P}=fO(1XoDdLpBkRy0E>@I_xO%cPJK?G-OoN02P(9jQaM zmtAN3wq{|BQ5;Xw81Br=vNlqGWw)>UDX)NE`(Dk1ue9Ks4!^>IeW>7=qHB6B6}<$N zM9SLzh`pP4Fl4+^C>o_km@~TF6m-pc&oa`&v2VZD0?ifYTt@B?V8|b`6^pf9q%UOj zAr23I+a)`3vGyTjllwYntyWP z-=pBS+a0l@T&izw|1An`9P<0^e-4G;e$Ho0Z?Ig=0K9TjX< z?zQMcBOOJ@3nq#v_wu9x2o6QQ4V zpjYvtd*#iWEjSWjA%MYu45W(Q@TNT_yfZ{t;d8M6?m}7e6dY0D>YT;CwAgg-UKP3e z-wjv(VM=>WAp9!)rscm~M1O0!ubGd9hbO0pwdY?D)JCExkRn=Y9uDj*Etk|?Xqtz{#B-i=N1oK1;CZG=7a zgS&J$NwhK*m40%_?HG;w(=FYLla`Fwa~cvtmHv3;lB)2=FB+P_o$+j@ZwdB4#BNDy ztyvNfV6fo5J^M{J+W+px-x@Qiv)(rU01K;%5DSayZ;kQtb@1}C@i4P;wy|)vvHss$ z)v6OML>hzCYt6>jlE%||{v{|PckwINYd6ZG%}0u*sPK+wCiLbP1dcWFh5WkcNDQ28UT@%n z_ukn#-J|%qPfO3Ne)0Db5%-c~XDyC+uTEs$8Ef=6*H@D=0f{bqcRJ?7J_qs_BDIju zAnM))0oU{2K1u3f@J5@Z_d7Bzt-}pF2VI}_^!5J21a7=@dh%hwmHrgUYJlq=5x%KHzOpL9%+sC-mwEyURc8on($;r6v|P@fO0UcNr69nbu9 z(U%NwFu5X4li)t7wi#nM==`NwDD_x?sXL%E^EA-AgSs-V-Ls&w^7NMO)0G(ydSffv z0AAwK5EdmaFH_3Cy}XYUf}rL!>bdsi$s8m7Gkx$o%l5CO^xziJe!kCNX_Z7p8g*>~ z+xZ>V`k_}E2Yaa32JgS)tV(6YUG2BZo0?vZhS${12lpnxlv zWE{|QXluVqZ$6*GJSejsZ5v{$H@ zv{>d}8zLF=h4=$KkI1L<$_?HhDDoG(ibq$55#vl%{ug^!4X3|&$Jx8LXMhLG-`6(!W>{Wx*_hNiIWjLxUd z#M*5~XPLT8fNQPs?K=YsE%J2jx3yY|4Zx8=@>>g~k?^R6^FX74E(NI8w z!h+%td)*{CYXFw4OPf5s>?MHp4wloj+WOJPcZ(?w8WBrR+}=M=JfZ1h{iavM%3G?H zz>mpKNNuJx>9w$(mMEftSc}Z6qI7HNjRUYa=ibS?P`uCfDZsx9mdPFa{j zwU|c-2g`>0>61Ju4(&|mFA)_?ES|kt-S$)4mxsKm(=}xB5uz^;F}Esd5iU0NsEpO@ zZk?p2Y8W44qV&_3(pB{xY|zzbm+eBS<`AN?6o6N3ZcGD9=H$J6>0&st zSmFBK06#3gMni6f8|8fKcs;xb?8+R4Pun2?-x%#toPRR8%S98_v73(~zM}l>NNUe6 z49OM!B%gGzbx;ze$F1r|!cINaViM3w&dsLlAe1j(RKa-h)1FbRUeUeF8O>E%I)p@f zZY~Reb!98;zc{r5{5O&!ab}5dLH1R4u`MCDnAJR)rtXqSPM_);dibTJ&aW)alhyR7 zbcl*Ln)RPE@BYH~QCbabpppHaeF|Lwz*IC>_6)DR+`phUMJn%ZQQ|Yr%gnh3wOoBR z7iqmi@kI0k-Oy($Y4F6-FBigoJLkIyCObjfEXfYs_pWDJ!}{#;Tl~I0!$H1FM9mW6 zZDIn^ywJpR8Eqryw~LM=+qYB?Ro|%&E%8NrpFQ;WXriZ5oV?AAYf@9j*&4m~Qiz8) z5*~x@l~u)J;UQ*JzG0W5B7Rz=UnFHRt2=MYsnf5gqcg;JZ%Q!ES{}XEm^?7GAAdqr z5F+KwJr*t!eHt?DyyBOGorrG3;+@h7d{R47F!~YsBWBuf&L&nI{duX>`R%YD)=_?F zYD>n;LuZ1I``+rY?_WI;wu5w)RD=grK~*>IeDt8#(IGL4zZ|Vd=yFjh904xi-*xSY zW^wV6Q0zzQ7Q4>lx7~lhLjkIA`WWEm5M*8Dg+vXfq4S!BQ$&RdgiU8tb#fvG@#Pp4 zCDw^!RWB)+WB>);4muxG*U-xXlV(NFyi}}N13&72eke_9RSwHxw+~NT$cu|kK?U%@ ztt}HYr`B<`3_m5#w9gaQ=)?w&efhYa7?GECzeDu$jayT|i?0hY$ookB@G@UVqe}V9 zMF=Se={FgoXSfX#3vcE?ZsA<41G$1j$eop`&FTG4SEEwhT0x#4QCnvX)V{HUHoN}s zR|I}${qzqvDac(Fxx=4upVAgHTOAo2+~$w+yQ;2mJ~W%&#^1} zpu|@7HebIp+U@Q*=WykLu#AgFd$h$Yjd{vA&V%0S)Q`W+`qA`0^z3FLn@0PLkJ&?T zlBW!LwWx2^OO|1WUD?;%Y$|$ipPeJ^JBfkEgcF`L#F;}<&B-a?nqb!Dr^9gF_D=sx zHW18iF{Z_QE|t7J{N%@}`{FL$uEFHglX)p{0%^lr29~ag5<}xSl_arA{3HTt$pOT0 zqv;Moo@(gXqKyhWG<)bxDB;(GSDifnUDf zm47&nB$_2ONDgG7CERypS`YC2B&qrY_nu@r)|?j^rrTH{k;H%3&|qV&8cUkmsenE5 zA}XD;TI)sEp+}w*v9&36A&$-0s+@*Z9M2wIn~Xwue}Mgo@#pW7>-hpbp_1vu*n_cMz_r#w@TD8EyOR;5F7stQn{G=oUpDRrg#^XC?Ly z^N1;pQs5JpY_6mErXFuHg9gNw2qURJUHyOq-qXD?TU`_cK}!f|@SdCic+zIezn@Pn7doi_eQW+k9HC1KE-px2b+b7I;>M-n zF!PD(Tgn4W2MF^N2((D;FMKU33!jm{I+4{b9SGdV?E9-G_hS@V9txNm?OIg^68CB> zsX4rZ)4ok|n-sk|+w?-*)lj~5n;!f3M2)7(wD#{AgTLktZXVKq|02dhOa1oBufP;^5={UB$0|54}?xq1~JQJpSfH2dv+&=YLgk6O353p&S33 z3Qp&nK((;4vhnn^vHpvxlu#@G&Q$+s_Q!u}lGcsd-_8CaAc{9X#03G#FJ-drv>NpoL5-v3B*nZci9LF7-GX>U6C|JuNRp_!kPoAa;wDVqOn z%$)v%ewjb%kN^Lr-^JpMmz$gOUpqzOzXuX+^k+jT{Rzl}n`1=&zZ>FzfOy(lys`N! zo-b|xFNXOoOMVaQ_9KJ_wPa4}gBufs-)MjRKX`tN^o0K^p1&->^|Ze^%;aY9sO6wD z`y3Y3e*T}~`ZF2*JFee9eBsin^E_l8Ja%|&8q*a z(0|F-{O{8Lc~1Yk^xva;q~|7f{yxLM5%=2+|DV9#y&3wkuqbc7OK%Jz-S^vH{|}hJ BF?s+1 literal 0 HcmV?d00001 From 2eea8e6a46cf22273ea2f272ac87c4517ed444e1 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Wed, 12 Jun 2024 15:42:04 -0400 Subject: [PATCH 33/63] added doc and updated zip file --- ...pefiles-under-hidden-directories-in-zip.md | 5 +++++ src/test/resources/hiddenShapefiles.zip | Bin 53764 -> 54468 bytes 2 files changed, 5 insertions(+) create mode 100644 doc/release-notes/8945-ignore-shapefiles-under-hidden-directories-in-zip.md diff --git a/doc/release-notes/8945-ignore-shapefiles-under-hidden-directories-in-zip.md b/doc/release-notes/8945-ignore-shapefiles-under-hidden-directories-in-zip.md new file mode 100644 index 00000000000..145ae5f6d55 --- /dev/null +++ b/doc/release-notes/8945-ignore-shapefiles-under-hidden-directories-in-zip.md @@ -0,0 +1,5 @@ +### Shapefile Handling will now ignore files under a hidden directory within the zip file + +Directories that are hidden will be ignored when determining if a zip file contains Shapefile files. + +For more information, see #8945. \ No newline at end of file diff --git a/src/test/resources/hiddenShapefiles.zip b/src/test/resources/hiddenShapefiles.zip index 64ed1721eb20f9c5bcaff7e205ec866a967b90d2..c733a2083ae601f27a236284bacd877542e36c78 100644 GIT binary patch delta 1530 zcmZwFUuaTM90%}oy-rAm90$hGIxpM9tPPhqT~=cz*;oscE!UImGPbadw#A@_U}QuT zL3DQ@2!e!sX%MU(wlF7MLYvPFGtP>mS3 z0i}}Dn7}MDgzX@sl5|u}yRlo$XK@b<7o>4K2*c8rYgy;U0WtT|MP3Ms1wZ}83t_P^ zLT}109DxNTH^yYWPuHvxVowd{Ja_^=qvpe`e<0k|tTM{V)8FN0l@RNgNj04y$Q*Gy zZvM=x)$d%8(G{5LiTR6%Ot+) z=ch@@BkojtZM!gf@2lKuH>{<)Vbq*GPFK<<{%y5_&ZoVEp#Mz{e9Mr$O$UfwLw-QQ zcaNUPxQM&PBo30Xi~O%~9KNF6^hL%=dNkd#kS$0Z)R=XWK@IEGfo^7A^L1T<_# zWL~P_Es;o_R8HyFgHB5B9w2)LfrRLAzK=|3Iz1WyOwHd1 zWcaE;M(NTv?DS+tU?F;V#{=eCjRU6Sg47Xjp=`%le{uC&_P+*H&4=hv!9$eV5vuIV z8f1kB#L{reMC@Ag&sZV$%CLcv$NS`<6icC-|v0%92(sebw6b`yI!HLlBkIUYF3ITZB(cyiDFAl(+-+a;3rL~HuRn&6_&oJByL0f zqLGS&xUZ6w4ZYxqejtj{HsqCzR2nYpDw(nn+pyF|4Sl2sMV)4S2O7ESr7dSL@V1)x1Jv%SNbNJYx2p<~Z?n*_C2zgWB886bahw Date: Wed, 12 Jun 2024 19:36:44 -0400 Subject: [PATCH 34/63] Update src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java Co-authored-by: Philip Durbin --- .../java/edu/harvard/iq/dataverse/util/ShapefileHandler.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java b/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java index 0c77e33712b..f1440cc3c02 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/ShapefileHandler.java @@ -710,7 +710,7 @@ private boolean examineZipfile(FileInputStream zip_file_stream){ // check if path is hidden if (isDirectory && Files.isHidden(Paths.get(zentryFileName))) { hiddenDirectories.add(zentryFileName); - logger.info("Ignoring files under hidden directory: " + zentryFileName); + logger.fine("Ignoring files under hidden directory: " + zentryFileName); } else { // check if the path was already found to be hidden for (String hidden : hiddenDirectories) { From 71098cb9e54c66a2af620b74d44514e663612cd8 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 17 Jun 2024 14:34:51 -0400 Subject: [PATCH 35/63] adding sql script to convert custom license CC0 Waiver to CC0 1.0 --- .../9081-CC0-waiver-turned-into-custom-license.md | 6 ++++++ src/main/resources/db/migration/V6.2.0.2.sql | 10 ++++++++++ 2 files changed, 16 insertions(+) create mode 100644 doc/release-notes/9081-CC0-waiver-turned-into-custom-license.md create mode 100644 src/main/resources/db/migration/V6.2.0.2.sql diff --git a/doc/release-notes/9081-CC0-waiver-turned-into-custom-license.md b/doc/release-notes/9081-CC0-waiver-turned-into-custom-license.md new file mode 100644 index 00000000000..fed38c73e6c --- /dev/null +++ b/doc/release-notes/9081-CC0-waiver-turned-into-custom-license.md @@ -0,0 +1,6 @@ +In an earlier Dataverse release, Datasets with only 'CC0 Waiver' in termsofuse field were converted to 'Custom License' instead of CC0 1.0 licenses during an automated process. A new process was added to correct this. Only Datasets with no terms other than the one create by the previous process will be modified. +- The existing 'Terms of Use' must be equal to 'This dataset is made available under a Creative Commons CC0 license with the following additional/modified terms and conditions: CC0 Waiver' +- The following terms fields must be empty: Confidentiality Declaration, Special Permissions, Restrictions, Citation Requirements, Depositor Requirements, Conditions, and Disclaimer. +- The License ID must not be assigned. + +This process will set the License ID to that of the CC0 1.0 license and change the termsofuse field back to 'CC0 Waiver', which will be hidden in the UI do to the License ID being set. diff --git a/src/main/resources/db/migration/V6.2.0.2.sql b/src/main/resources/db/migration/V6.2.0.2.sql new file mode 100644 index 00000000000..f5905d586de --- /dev/null +++ b/src/main/resources/db/migration/V6.2.0.2.sql @@ -0,0 +1,10 @@ +UPDATE termsofuseandaccess SET license_id = (SELECT license.id FROM license WHERE license.name = 'CC0 1.0'), termsofuse = 'CC0 Waiver' +WHERE termsofuse = 'This dataset is made available under a Creative Commons CC0 license with the following additional/modified terms and conditions: CC0 Waiver' + AND license_id IS null + AND confidentialitydeclaration IS null + AND specialpermissions IS null + AND restrictions IS null + AND citationrequirements IS null + AND depositorrequirements IS null + AND conditions IS null + AND disclaimer IS null; From 116262c0fdad3a919ad88e76879cd0a1d253cee3 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 17 Jun 2024 15:01:04 -0400 Subject: [PATCH 36/63] change new contents of termofuse field to null --- doc/release-notes/9081-CC0-waiver-turned-into-custom-license.md | 2 +- src/main/resources/db/migration/V6.2.0.2.sql | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/release-notes/9081-CC0-waiver-turned-into-custom-license.md b/doc/release-notes/9081-CC0-waiver-turned-into-custom-license.md index fed38c73e6c..042b2ec39fd 100644 --- a/doc/release-notes/9081-CC0-waiver-turned-into-custom-license.md +++ b/doc/release-notes/9081-CC0-waiver-turned-into-custom-license.md @@ -3,4 +3,4 @@ In an earlier Dataverse release, Datasets with only 'CC0 Waiver' in termsofuse f - The following terms fields must be empty: Confidentiality Declaration, Special Permissions, Restrictions, Citation Requirements, Depositor Requirements, Conditions, and Disclaimer. - The License ID must not be assigned. -This process will set the License ID to that of the CC0 1.0 license and change the termsofuse field back to 'CC0 Waiver', which will be hidden in the UI do to the License ID being set. +This process will set the License ID to that of the CC0 1.0 license and remove the contents of termsofuse field. diff --git a/src/main/resources/db/migration/V6.2.0.2.sql b/src/main/resources/db/migration/V6.2.0.2.sql index f5905d586de..fd9cd823868 100644 --- a/src/main/resources/db/migration/V6.2.0.2.sql +++ b/src/main/resources/db/migration/V6.2.0.2.sql @@ -1,4 +1,4 @@ -UPDATE termsofuseandaccess SET license_id = (SELECT license.id FROM license WHERE license.name = 'CC0 1.0'), termsofuse = 'CC0 Waiver' +UPDATE termsofuseandaccess SET license_id = (SELECT license.id FROM license WHERE license.name = 'CC0 1.0'), termsofuse = NULL WHERE termsofuse = 'This dataset is made available under a Creative Commons CC0 license with the following additional/modified terms and conditions: CC0 Waiver' AND license_id IS null AND confidentialitydeclaration IS null From b42222fd35006c24e57eccece81d3496dcac31b8 Mon Sep 17 00:00:00 2001 From: Guillermo Portas Date: Tue, 2 Jul 2024 15:59:19 +0100 Subject: [PATCH 37/63] Fix metadata field type display condition in dataverses/{id}/metadatablocks API endpoint (#10642) * Fixed: metadata field type display condition in dataverses/{id}/metadatablocks * Changed: json object builder instantiation * Added: extended test coverage for testUpdateInputLevels and testFeatureDataverse * Added: release notes for #10637 * Fixed: JsonPrinter metadata blocks dataset field type isRequired logic * Refactor: simpler conditions in jsonPrinter * Refactor: reordered condition in jsonPrinter * Fixed: displayCondition in jsonPrinter --- .../10637-fix-dataverse-metadatablocks-api.md | 2 + .../edu/harvard/iq/dataverse/Dataverse.java | 18 ++++--- .../iq/dataverse/util/json/JsonPrinter.java | 18 +++++-- .../iq/dataverse/api/DataversesIT.java | 52 +++++++++++++------ .../edu/harvard/iq/dataverse/api/UtilIT.java | 21 ++++---- 5 files changed, 76 insertions(+), 35 deletions(-) create mode 100644 doc/release-notes/10637-fix-dataverse-metadatablocks-api.md diff --git a/doc/release-notes/10637-fix-dataverse-metadatablocks-api.md b/doc/release-notes/10637-fix-dataverse-metadatablocks-api.md new file mode 100644 index 00000000000..c8c9c4fa66f --- /dev/null +++ b/doc/release-notes/10637-fix-dataverse-metadatablocks-api.md @@ -0,0 +1,2 @@ +dataverses/{id}/metadatablocks API endpoint has been fixed, since the fields returned for each metadata block when returnDatasetTypes query parameter is set to true was not correct. + diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java index 78b1827c798..978c716e058 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataverse.java @@ -412,12 +412,18 @@ public List getDataverseFieldTypeInputLevels() { } public boolean isDatasetFieldTypeRequiredAsInputLevel(Long datasetFieldTypeId) { - for(DataverseFieldTypeInputLevel dataverseFieldTypeInputLevel : dataverseFieldTypeInputLevels) { - if (dataverseFieldTypeInputLevel.getDatasetFieldType().getId().equals(datasetFieldTypeId) && dataverseFieldTypeInputLevel.isRequired()) { - return true; - } - } - return false; + return dataverseFieldTypeInputLevels.stream() + .anyMatch(inputLevel -> inputLevel.getDatasetFieldType().getId().equals(datasetFieldTypeId) && inputLevel.isRequired()); + } + + public boolean isDatasetFieldTypeIncludedAsInputLevel(Long datasetFieldTypeId) { + return dataverseFieldTypeInputLevels.stream() + .anyMatch(inputLevel -> inputLevel.getDatasetFieldType().getId().equals(datasetFieldTypeId) && inputLevel.isInclude()); + } + + public boolean isDatasetFieldTypeInInputLevels(Long datasetFieldTypeId) { + return dataverseFieldTypeInputLevels.stream() + .anyMatch(inputLevel -> inputLevel.getDatasetFieldType().getId().equals(datasetFieldTypeId)); } public Template getDefaultTemplate() { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 95f14b79ece..c72dfc1d127 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -640,18 +640,26 @@ public static JsonObjectBuilder json(MetadataBlock metadataBlock, boolean printO JsonObjectBuilder fieldsBuilder = Json.createObjectBuilder(); Set datasetFieldTypes = new TreeSet<>(metadataBlock.getDatasetFieldTypes()); + for (DatasetFieldType datasetFieldType : datasetFieldTypes) { - boolean requiredInOwnerDataverse = ownerDataverse != null && ownerDataverse.isDatasetFieldTypeRequiredAsInputLevel(datasetFieldType.getId()); - boolean displayCondition = !printOnlyDisplayedOnCreateDatasetFieldTypes || - datasetFieldType.isDisplayOnCreate() || - requiredInOwnerDataverse; + Long datasetFieldTypeId = datasetFieldType.getId(); + boolean requiredAsInputLevelInOwnerDataverse = ownerDataverse != null && ownerDataverse.isDatasetFieldTypeRequiredAsInputLevel(datasetFieldTypeId); + boolean includedAsInputLevelInOwnerDataverse = ownerDataverse != null && ownerDataverse.isDatasetFieldTypeIncludedAsInputLevel(datasetFieldTypeId); + boolean isNotInputLevelInOwnerDataverse = ownerDataverse != null && !ownerDataverse.isDatasetFieldTypeInInputLevels(datasetFieldTypeId); + + DatasetFieldType parentDatasetFieldType = datasetFieldType.getParentDatasetFieldType(); + boolean isRequired = parentDatasetFieldType == null ? datasetFieldType.isRequired() : parentDatasetFieldType.isRequired(); + + boolean displayCondition = printOnlyDisplayedOnCreateDatasetFieldTypes + ? (datasetFieldType.isDisplayOnCreate() || isRequired || requiredAsInputLevelInOwnerDataverse) + : ownerDataverse == null || includedAsInputLevelInOwnerDataverse || isNotInputLevelInOwnerDataverse; + if (displayCondition) { fieldsBuilder.add(datasetFieldType.getName(), json(datasetFieldType, ownerDataverse)); } } jsonObjectBuilder.add("fields", fieldsBuilder); - return jsonObjectBuilder; } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index b072a803aa4..79cc46cfa79 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -702,8 +702,10 @@ public void testListMetadataBlocks() { Response setMetadataBlocksResponse = UtilIT.setMetadataBlocks(dataverseAlias, Json.createArrayBuilder().add("citation").add("astrophysics"), apiToken); setMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); - String[] testInputLevelNames = {"geographicCoverage", "country"}; - Response updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, apiToken); + String[] testInputLevelNames = {"geographicCoverage", "country", "city"}; + boolean[] testRequiredInputLevels = {false, true, false}; + boolean[] testIncludedInputLevels = {false, true, true}; + Response updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, testRequiredInputLevels, testIncludedInputLevels, apiToken); updateDataverseInputLevelsResponse.then().assertThat().statusCode(OK.getStatusCode()); // Dataverse not found @@ -769,6 +771,21 @@ public void testListMetadataBlocks() { assertThat(expectedAllMetadataBlockDisplayNames, hasItemInArray(actualMetadataBlockDisplayName2)); assertThat(expectedAllMetadataBlockDisplayNames, hasItemInArray(actualMetadataBlockDisplayName3)); + // Check dataset fields for the updated input levels are retrieved + int geospatialMetadataBlockIndex = actualMetadataBlockDisplayName1.equals("Geospatial Metadata") ? 0 : actualMetadataBlockDisplayName2.equals("Geospatial Metadata") ? 1 : 2; + + // Since the included property of geographicCoverage is set to false, we should retrieve the total number of fields minus one + listMetadataBlocksResponse.then().assertThat() + .body(String.format("data[%d].fields.size()", geospatialMetadataBlockIndex), equalTo(10)); + + String actualMetadataField1 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.geographicCoverage.name", geospatialMetadataBlockIndex)); + String actualMetadataField2 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.country.name", geospatialMetadataBlockIndex)); + String actualMetadataField3 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.city.name", geospatialMetadataBlockIndex)); + + assertNull(actualMetadataField1); + assertNotNull(actualMetadataField2); + assertNotNull(actualMetadataField3); + // Existent dataverse and onlyDisplayedOnCreate=true and returnDatasetFieldTypes=true listMetadataBlocksResponse = UtilIT.listMetadataBlocks(dataverseAlias, true, true, apiToken); listMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); @@ -785,16 +802,18 @@ public void testListMetadataBlocks() { assertThat(expectedOnlyDisplayedOnCreateMetadataBlockDisplayNames, hasItemInArray(actualMetadataBlockDisplayName2)); // Check dataset fields for the updated input levels are retrieved - int geospatialMetadataBlockIndex = actualMetadataBlockDisplayName2.equals("Geospatial Metadata") ? 1 : 0; + geospatialMetadataBlockIndex = actualMetadataBlockDisplayName2.equals("Geospatial Metadata") ? 1 : 0; listMetadataBlocksResponse.then().assertThat() - .body(String.format("data[%d].fields.size()", geospatialMetadataBlockIndex), equalTo(2)); + .body(String.format("data[%d].fields.size()", geospatialMetadataBlockIndex), equalTo(1)); - String actualMetadataField1 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.geographicCoverage.name", geospatialMetadataBlockIndex)); - String actualMetadataField2 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.country.name", geospatialMetadataBlockIndex)); + actualMetadataField1 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.geographicCoverage.name", geospatialMetadataBlockIndex)); + actualMetadataField2 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.country.name", geospatialMetadataBlockIndex)); + actualMetadataField3 = listMetadataBlocksResponse.then().extract().path(String.format("data[%d].fields.city.name", geospatialMetadataBlockIndex)); - assertNotNull(actualMetadataField1); + assertNull(actualMetadataField1); assertNotNull(actualMetadataField2); + assertNull(actualMetadataField3); // User has no permissions on the requested dataverse Response createSecondUserResponse = UtilIT.createRandomUser(); @@ -898,12 +917,16 @@ public void testUpdateInputLevels() { // Update valid input levels String[] testInputLevelNames = {"geographicCoverage", "country"}; - Response updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, apiToken); + boolean[] testRequiredInputLevels = {true, false}; + boolean[] testIncludedInputLevels = {true, false}; + Response updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, testRequiredInputLevels, testIncludedInputLevels, apiToken); + String actualInputLevelName = updateDataverseInputLevelsResponse.then().extract().path("data.inputLevels[0].datasetFieldTypeName"); + int geographicCoverageInputLevelIndex = actualInputLevelName.equals("geographicCoverage") ? 0 : 1; updateDataverseInputLevelsResponse.then().assertThat() - .body("data.inputLevels[0].required", equalTo(true)) - .body("data.inputLevels[0].include", equalTo(true)) - .body("data.inputLevels[1].required", equalTo(true)) - .body("data.inputLevels[1].include", equalTo(true)) + .body(String.format("data.inputLevels[%d].include", geographicCoverageInputLevelIndex), equalTo(true)) + .body(String.format("data.inputLevels[%d].required", geographicCoverageInputLevelIndex), equalTo(true)) + .body(String.format("data.inputLevels[%d].include", 1 - geographicCoverageInputLevelIndex), equalTo(false)) + .body(String.format("data.inputLevels[%d].required", 1 - geographicCoverageInputLevelIndex), equalTo(false)) .statusCode(OK.getStatusCode()); String actualFieldTypeName1 = updateDataverseInputLevelsResponse.then().extract().path("data.inputLevels[0].datasetFieldTypeName"); String actualFieldTypeName2 = updateDataverseInputLevelsResponse.then().extract().path("data.inputLevels[1].datasetFieldTypeName"); @@ -913,15 +936,14 @@ public void testUpdateInputLevels() { // Update input levels with an invalid field type name String[] testInvalidInputLevelNames = {"geographicCoverage", "invalid1"}; - updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInvalidInputLevelNames, apiToken); + updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInvalidInputLevelNames, testRequiredInputLevels, testIncludedInputLevels, apiToken); updateDataverseInputLevelsResponse.then().assertThat() .body("message", equalTo("Invalid dataset field type name: invalid1")) .statusCode(BAD_REQUEST.getStatusCode()); // Update invalid empty input levels testInputLevelNames = new String[]{}; - updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, apiToken); - updateDataverseInputLevelsResponse.prettyPrint(); + updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, testRequiredInputLevels, testIncludedInputLevels, apiToken); updateDataverseInputLevelsResponse.then().assertThat() .body("message", equalTo("Error while updating dataverse input levels: Input level list cannot be null or empty")) .statusCode(INTERNAL_SERVER_ERROR.getStatusCode()); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 257610dbc32..0216859b869 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -3960,22 +3960,25 @@ static Response requestGlobusUploadPaths(Integer datasetId, JsonObject body, Str .post("/api/datasets/" + datasetId + "/requestGlobusUploadPaths"); } - static Response updateDataverseInputLevels(String dataverseAlias, String[] inputLevelNames, String apiToken) { - JsonArrayBuilder contactArrayBuilder = Json.createArrayBuilder(); - for(String inputLevelName : inputLevelNames) { - contactArrayBuilder.add(Json.createObjectBuilder() - .add("datasetFieldTypeName", inputLevelName) - .add("required", true) - .add("include", true) - ); + public static Response updateDataverseInputLevels(String dataverseAlias, String[] inputLevelNames, boolean[] requiredInputLevels, boolean[] includedInputLevels, String apiToken) { + JsonArrayBuilder inputLevelsArrayBuilder = Json.createArrayBuilder(); + for (int i = 0; i < inputLevelNames.length; i++) { + inputLevelsArrayBuilder.add(createInputLevelObject(inputLevelNames[i], requiredInputLevels[i], includedInputLevels[i])); } return given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .body(contactArrayBuilder.build().toString()) + .body(inputLevelsArrayBuilder.build().toString()) .contentType(ContentType.JSON) .put("/api/dataverses/" + dataverseAlias + "/inputLevels"); } + private static JsonObjectBuilder createInputLevelObject(String name, boolean required, boolean include) { + return Json.createObjectBuilder() + .add("datasetFieldTypeName", name) + .add("required", required) + .add("include", include); + } + public static Response getOpenAPI(String accept, String format) { Response response = given() .header("Accept", accept) From eda4def4b4c3279e5517883daa37842a8254dfbd Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Tue, 2 Jul 2024 16:29:21 -0400 Subject: [PATCH 38/63] Bump version to 6.3 --- doc/sphinx-guides/source/conf.py | 4 ++-- doc/sphinx-guides/source/versions.rst | 3 ++- modules/dataverse-parent/pom.xml | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/doc/sphinx-guides/source/conf.py b/doc/sphinx-guides/source/conf.py index 6478f15655e..c719fb05e3c 100755 --- a/doc/sphinx-guides/source/conf.py +++ b/doc/sphinx-guides/source/conf.py @@ -68,9 +68,9 @@ # built documents. # # The short X.Y version. -version = '6.2' +version = '6.3' # The full version, including alpha/beta/rc tags. -release = '6.2' +release = '6.3' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/doc/sphinx-guides/source/versions.rst b/doc/sphinx-guides/source/versions.rst index 850702d823e..952eba72616 100755 --- a/doc/sphinx-guides/source/versions.rst +++ b/doc/sphinx-guides/source/versions.rst @@ -7,7 +7,8 @@ Dataverse Software Documentation Versions This list provides a way to refer to the documentation for previous and future versions of the Dataverse Software. In order to learn more about the updates delivered from one version to another, visit the `Releases `__ page in our GitHub repo. - pre-release `HTML (not final!) `__ and `PDF (experimental!) `__ built from the :doc:`develop ` branch :doc:`(how to contribute!) ` -- 6.2 +- 6.3 +- `6.2 `__ - `6.1 `__ - `6.0 `__ - `5.14 `__ diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 4a67e301f5b..62efbf62317 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -131,7 +131,7 @@ - 6.2 + 6.3 17 UTF-8 From ecb5e94489aad1592e0f48e0f4d3bab3a6ca623f Mon Sep 17 00:00:00 2001 From: Juan Pablo Tosca Villanueva <142103991+jp-tosca@users.noreply.github.com> Date: Tue, 2 Jul 2024 16:54:29 -0400 Subject: [PATCH 39/63] 6.3 Release Notes (#10662) * Initial changes * Update with 4th lvl H * Table of contet change * MD format test * External voc info update * Update 1:04 * First version for PR * a few quick changes to the upgrade instruction #10646 * Update 6.3-release-notes.md * keywordTermURI heading #10646 * removed reExportAll from the upgrade instruction; it's only needed if they choose to perform the optional step of migrating some metadata fields; and it is already mentioned in the instruction on how to do that. #10646 * simplified the jvm options part of the payara upgrade instruction #10646 * moved payara upgrade into the main upgrade instruction #10646 * typos #10646 * added the Solr upgrade instruction #10646 * cosmetic #10646 * cosmetic #10646 * Deleted the solr upgrade note. #10646 * Initial changes * #10646 add note about file access request email update * Add issue number test * add featured collection link * Test issue link * Batch of issues # * Bug fixes issues no * #10646 add issue link * normalize line endings #10646 * various improvments #10646 * reorder #10646 * add Contributor Guide to 6.3 release notes #10646 * incorporate #10637 into release note #10646 * Update 6.3-release-notes.md * Make Hidden HTML fields one more element in the ext. vocab updates * Removing snippets * small tweaks to upgrade instructions #10646 * no need to mention #10637 and #10642 as they were pre-release #10646 * s/safe/supported/ and remove dupe #10646 --------- Co-authored-by: Leonid Andreev Co-authored-by: qqmyers Co-authored-by: Stephen Kraffmiller Co-authored-by: Philip Durbin --- .../10015-RO-Crate-metadata-file.md | 10 - .../10022_upload_redirect_without_tagging.md | 5 - ...10116-incomplete-metadata-label-setting.md | 1 - .../10137-2-add-disable-reason-flag.md | 6 - .../10236-openapi-definition-endpoint.md | 8 - doc/release-notes/10242-add-feature-dv-api | 1 - ...-add-term_uri-metadata-in-keyword-block.md | 53 -- doc/release-notes/10316_cvoc_http_headers.md | 5 - .../10330-api-change-latest-version-status.md | 1 - doc/release-notes/10339-workflow.md | 3 - .../10389-metadatablocks-api-extension.md | 6 - ...pi-performance-issues-on-large-datasets.md | 4 - doc/release-notes/10425-add-MIT-License.md | 3 - .../10464-add-name-harvesting-client-facet.md | 3 - .../10466-math-challenge-403-error-page.md | 1 - .../10468-doc-datalad-integration.md | 1 - ...tadatablocks-api-extension-input-levels.md | 3 - ...dd-isreleased-to-get-dataverse-response.md | 22 - doc/release-notes/10494-payara-upgrade.md | 119 ----- .../10503-cvoc-hidden-html-fields.md | 11 - doc/release-notes/10531-contrib.md | 1 - doc/release-notes/10547-solr-updates.md | 1 - .../10554-avoid-solr-join-guest.md | 5 - doc/release-notes/10561-3dviewer.md | 1 - .../10565-banner-test-improvements.md | 1 - doc/release-notes/10568-Fix File Reingest.md | 1 - .../10570-extra-facet-settings.md | 4 - doc/release-notes/10579-avoid-solr-deletes.md | 9 - .../10611-harvested-origin-facet.md | 10 - .../10637-fix-dataverse-metadatablocks-api.md | 2 - .../5621_dataset image in header.md | 1 - doc/release-notes/6.3-release-notes.md | 470 ++++++++++++++++++ .../8243-improve-language-controlled-vocab.md | 11 - ...655-re-add-cell-counting-biomedical-tsv.md | 12 - ...8936-more-than-50000-entries-in-sitemap.md | 11 - doc/release-notes/8985-deprecate-rsync.md | 8 - ...ow-flexible-params-in-retrievaluri-cvoc.md | 14 - doc/release-notes/9276-doc-cvoc-index-in.md | 18 - doc/release-notes/9375-retention-period.md | 8 - doc/release-notes/9729-release-notes.md | 1 - doc/release-notes/9739-url-validator.md | 7 - .../9887-new-superuser-status-endpoint.md | 1 - doc/release-notes/solr-9.4.1.md | 14 - doc/sphinx-guides/source/api/native-api.rst | 2 + 44 files changed, 472 insertions(+), 407 deletions(-) delete mode 100644 doc/release-notes/10015-RO-Crate-metadata-file.md delete mode 100644 doc/release-notes/10022_upload_redirect_without_tagging.md delete mode 100644 doc/release-notes/10116-incomplete-metadata-label-setting.md delete mode 100644 doc/release-notes/10137-2-add-disable-reason-flag.md delete mode 100644 doc/release-notes/10236-openapi-definition-endpoint.md delete mode 100644 doc/release-notes/10242-add-feature-dv-api delete mode 100644 doc/release-notes/10288-add-term_uri-metadata-in-keyword-block.md delete mode 100644 doc/release-notes/10316_cvoc_http_headers.md delete mode 100644 doc/release-notes/10330-api-change-latest-version-status.md delete mode 100644 doc/release-notes/10339-workflow.md delete mode 100644 doc/release-notes/10389-metadatablocks-api-extension.md delete mode 100644 doc/release-notes/10415-fix-api-performance-issues-on-large-datasets.md delete mode 100644 doc/release-notes/10425-add-MIT-License.md delete mode 100644 doc/release-notes/10464-add-name-harvesting-client-facet.md delete mode 100644 doc/release-notes/10466-math-challenge-403-error-page.md delete mode 100644 doc/release-notes/10468-doc-datalad-integration.md delete mode 100644 doc/release-notes/10477-metadatablocks-api-extension-input-levels.md delete mode 100644 doc/release-notes/10491-add-isreleased-to-get-dataverse-response.md delete mode 100644 doc/release-notes/10494-payara-upgrade.md delete mode 100644 doc/release-notes/10503-cvoc-hidden-html-fields.md delete mode 100644 doc/release-notes/10531-contrib.md delete mode 100644 doc/release-notes/10547-solr-updates.md delete mode 100644 doc/release-notes/10554-avoid-solr-join-guest.md delete mode 100644 doc/release-notes/10561-3dviewer.md delete mode 100644 doc/release-notes/10565-banner-test-improvements.md delete mode 100644 doc/release-notes/10568-Fix File Reingest.md delete mode 100644 doc/release-notes/10570-extra-facet-settings.md delete mode 100644 doc/release-notes/10579-avoid-solr-deletes.md delete mode 100644 doc/release-notes/10611-harvested-origin-facet.md delete mode 100644 doc/release-notes/10637-fix-dataverse-metadatablocks-api.md delete mode 100644 doc/release-notes/5621_dataset image in header.md create mode 100644 doc/release-notes/6.3-release-notes.md delete mode 100644 doc/release-notes/8243-improve-language-controlled-vocab.md delete mode 100644 doc/release-notes/8655-re-add-cell-counting-biomedical-tsv.md delete mode 100644 doc/release-notes/8936-more-than-50000-entries-in-sitemap.md delete mode 100644 doc/release-notes/8985-deprecate-rsync.md delete mode 100644 doc/release-notes/9276-allow-flexible-params-in-retrievaluri-cvoc.md delete mode 100644 doc/release-notes/9276-doc-cvoc-index-in.md delete mode 100644 doc/release-notes/9375-retention-period.md delete mode 100644 doc/release-notes/9729-release-notes.md delete mode 100644 doc/release-notes/9739-url-validator.md delete mode 100644 doc/release-notes/9887-new-superuser-status-endpoint.md delete mode 100644 doc/release-notes/solr-9.4.1.md diff --git a/doc/release-notes/10015-RO-Crate-metadata-file.md b/doc/release-notes/10015-RO-Crate-metadata-file.md deleted file mode 100644 index 4b018a634f7..00000000000 --- a/doc/release-notes/10015-RO-Crate-metadata-file.md +++ /dev/null @@ -1,10 +0,0 @@ -Detection of mime-types based on a filename with extension and detection of the RO-Crate metadata files. - -From now on, filenames with extensions can be added into `MimeTypeDetectionByFileName.properties` file. Filenames added there will take precedence over simply recognizing files by extensions. For example, two new filenames are added into that file: -``` -ro-crate-metadata.json=application/ld+json; profile="http://www.w3.org/ns/json-ld#flattened http://www.w3.org/ns/json-ld#compacted https://w3id.org/ro/crate" -ro-crate-metadata.jsonld=application/ld+json; profile="http://www.w3.org/ns/json-ld#flattened http://www.w3.org/ns/json-ld#compacted https://w3id.org/ro/crate" -``` - -Therefore, files named `ro-crate-metadata.json` will be then detected as RO-Crated metadata files from now on, instead as generic `JSON` files. -For more information on the RO-Crate specifications, see https://www.researchobject.org/ro-crate diff --git a/doc/release-notes/10022_upload_redirect_without_tagging.md b/doc/release-notes/10022_upload_redirect_without_tagging.md deleted file mode 100644 index 7ff17f08f4c..00000000000 --- a/doc/release-notes/10022_upload_redirect_without_tagging.md +++ /dev/null @@ -1,5 +0,0 @@ -If your S3 store does not support tagging and gives an error if you configure direct uploads, you can disable the tagging by using the ``dataverse.files..disable-tagging`` JVM option. For more details see https://dataverse-guide--10029.org.readthedocs.build/en/10029/developers/big-data-support.html#s3-tags #10022 and #10029. - -## New config options - -- dataverse.files..disable-tagging diff --git a/doc/release-notes/10116-incomplete-metadata-label-setting.md b/doc/release-notes/10116-incomplete-metadata-label-setting.md deleted file mode 100644 index 769100c3804..00000000000 --- a/doc/release-notes/10116-incomplete-metadata-label-setting.md +++ /dev/null @@ -1 +0,0 @@ -Bug fixed for the ``incomplete metadata`` label being shown for published dataset with incomplete metadata in certain scenarios. This label will now be shown for draft versions of such datasets and published datasets that the user can edit. This label can also be made invisible for published datasets (regardless of edit rights) with the new option ``dataverse.ui.show-validity-label-when-published`` set to `false`. diff --git a/doc/release-notes/10137-2-add-disable-reason-flag.md b/doc/release-notes/10137-2-add-disable-reason-flag.md deleted file mode 100644 index ee5257466ee..00000000000 --- a/doc/release-notes/10137-2-add-disable-reason-flag.md +++ /dev/null @@ -1,6 +0,0 @@ -## Release Highlights - -### Feature flag to remove the required "reason" field in the "Return To Author" dialog - -A reason field, that is required to not be empty, was added in v6.2. Installations that handle author communications through email or another system may prefer to not be required to use this new field. v6.2 includes a new -disable-return-to-author-reason feature flag that can be enabled to drop the reason field from the dialog and make sending a reason optional in the api/datasets/{id}/returnToAuthor call. diff --git a/doc/release-notes/10236-openapi-definition-endpoint.md b/doc/release-notes/10236-openapi-definition-endpoint.md deleted file mode 100644 index 60492c29d78..00000000000 --- a/doc/release-notes/10236-openapi-definition-endpoint.md +++ /dev/null @@ -1,8 +0,0 @@ -In Dataverse 6.0 Payara was updated, which caused the url `/openapi` to stop working: - -- https://github.com/IQSS/dataverse/issues/9981 -- https://github.com/payara/Payara/issues/6369 - -When it worked in Dataverse 5.x, the `/openapi` output was generated automatically by Payara, but in this release we have switched to OpenAPI output produced by the [SmallRye OpenAPI plugin](https://github.com/smallrye/smallrye-open-api/tree/main/tools/maven-plugin). This gives us finer control over the output. - -For more information, see the section on [OpenAPI](https://dataverse-guide--10328.org.readthedocs.build/en/10328/api/getting-started.html#openapi) in the API Guide. diff --git a/doc/release-notes/10242-add-feature-dv-api b/doc/release-notes/10242-add-feature-dv-api deleted file mode 100644 index 5c786554ff9..00000000000 --- a/doc/release-notes/10242-add-feature-dv-api +++ /dev/null @@ -1 +0,0 @@ -New api endpoints have been added to allow you to add or remove featured collections from a dataverse collection. diff --git a/doc/release-notes/10288-add-term_uri-metadata-in-keyword-block.md b/doc/release-notes/10288-add-term_uri-metadata-in-keyword-block.md deleted file mode 100644 index eb3a79dbf25..00000000000 --- a/doc/release-notes/10288-add-term_uri-metadata-in-keyword-block.md +++ /dev/null @@ -1,53 +0,0 @@ -### New keywordTermURI Metadata in keyword Metadata Block - -Adding a new metadata `keywordTermURI` to the `keyword` metadata block to facilitate the integration of controlled vocabulary services, in particular by adding the possibility of saving the "term" and its associated URI. For more information, see #10288 and PR #10371. - -## Upgrade Instructions - -1\. Update the Citation metadata block - -- `wget https://github.com/IQSS/dataverse/releases/download/v6.3/citation.tsv` -- `curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @citation.tsv -H "Content-type: text/tab-separated-values"` - -2\. Update your Solr `schema.xml` to include the new field. - - For details, please see https://guides.dataverse.org/en/latest/admin/metadatacustomization.html#updating-the-solr-schema - - -3\. Reindex Solr. - - Once the schema.xml is updated, Solr must be restarted and a reindex initiated. - For details, see https://guides.dataverse.org/en/latest/admin/solr-search-index.html but here is the reindex command: - - `curl http://localhost:8080/api/admin/index` - - -4\. Run ReExportAll to update dataset metadata exports. Follow the instructions in the [Metadata Export of Admin Guide](https://guides.dataverse.org/en/latest/admin/metadataexport.html#batch-exports-through-the-api). - - -## Notes for Dataverse Installation Administrators - -### Data migration to the new `keywordTermURI` field - -You can migrate your `keywordValue` data containing URIs to the new `keywordTermURI` field. -In case of data migration, view the affected data with the following database query: - -``` -SELECT value FROM datasetfieldvalue dfv -INNER JOIN datasetfield df ON df.id = dfv.datasetfield_id -WHERE df.datasetfieldtype_id = (SELECT id FROM datasetfieldtype WHERE name = 'keywordValue') -AND value ILIKE 'http%'; -``` - -If you wish to migrate your data, a database update is then necessary: - -``` -UPDATE datasetfield df -SET datasetfieldtype_id = (SELECT id FROM datasetfieldtype WHERE name = 'keywordTermURI') -FROM datasetfieldvalue dfv -WHERE dfv.datasetfield_id = df.id -AND df.datasetfieldtype_id = (SELECT id FROM datasetfieldtype WHERE name = 'keywordValue') -AND dfv.value ILIKE 'http%'; -``` - -A ['Reindex in Place'](https://guides.dataverse.org/en/latest/admin/solr-search-index.html#reindex-in-place) will be required and ReExportAll will need to be run to update the metadata exports of the dataset. Follow the directions in the [Admin Guide](http://guides.dataverse.org/en/latest/admin/metadataexport.html#batch-exports-through-the-api). \ No newline at end of file diff --git a/doc/release-notes/10316_cvoc_http_headers.md b/doc/release-notes/10316_cvoc_http_headers.md deleted file mode 100644 index 4b557383a2e..00000000000 --- a/doc/release-notes/10316_cvoc_http_headers.md +++ /dev/null @@ -1,5 +0,0 @@ -You are now able to add HTTP request headers required by the External Vocabulary Services you are implementing. - -A combined documentation can be found on pull request [#10404](https://github.com/IQSS/dataverse/pull/10404). - -For more information, see issue [#10316](https://github.com/IQSS/dataverse/issues/10316) and pull request [gddc/dataverse-external-vocab-support#19](https://github.com/gdcc/dataverse-external-vocab-support/pull/19). diff --git a/doc/release-notes/10330-api-change-latest-version-status.md b/doc/release-notes/10330-api-change-latest-version-status.md deleted file mode 100644 index 6e6a018fe12..00000000000 --- a/doc/release-notes/10330-api-change-latest-version-status.md +++ /dev/null @@ -1 +0,0 @@ -The API endpoint for getting the Dataset version has been extended to include latestVersionPublishingStatus. \ No newline at end of file diff --git a/doc/release-notes/10339-workflow.md b/doc/release-notes/10339-workflow.md deleted file mode 100644 index 90d08dabb1f..00000000000 --- a/doc/release-notes/10339-workflow.md +++ /dev/null @@ -1,3 +0,0 @@ -The computational workflow metadata block has been updated to present a clickable link for the External Code Repository URL field. - -Release notes should include the usual instructions, for those who have installed this optional block, to update the computational_workflow block. (PR#10441) \ No newline at end of file diff --git a/doc/release-notes/10389-metadatablocks-api-extension.md b/doc/release-notes/10389-metadatablocks-api-extension.md deleted file mode 100644 index 9b14100d33c..00000000000 --- a/doc/release-notes/10389-metadatablocks-api-extension.md +++ /dev/null @@ -1,6 +0,0 @@ -New optional query parameters added to ``api/metadatablocks`` and ``api/dataverses/{id}/metadatablocks`` endpoints: - -- ``returnDatasetFieldTypes``: Whether or not to return the dataset field types present in each metadata block. If not set, the default value is false. -- ``onlyDisplayedOnCreate``: Whether or not to return only the metadata blocks that are displayed on dataset creation. If ``returnDatasetFieldTypes`` is true, only the dataset field types shown on dataset creation will be returned within each metadata block. If not set, the default value is false. - -Added new ``displayOnCreate`` field to the MetadataBlock and DatasetFieldType payloads. diff --git a/doc/release-notes/10415-fix-api-performance-issues-on-large-datasets.md b/doc/release-notes/10415-fix-api-performance-issues-on-large-datasets.md deleted file mode 100644 index e8840e9d4f7..00000000000 --- a/doc/release-notes/10415-fix-api-performance-issues-on-large-datasets.md +++ /dev/null @@ -1,4 +0,0 @@ -For scenarios involving API calls related to large datasets (Numerous files, for example: ~10k) it has been optimized: - -- The search API endpoint. -- The permission checking logic present in PermissionServiceBean. diff --git a/doc/release-notes/10425-add-MIT-License.md b/doc/release-notes/10425-add-MIT-License.md deleted file mode 100644 index 95d6fb38ded..00000000000 --- a/doc/release-notes/10425-add-MIT-License.md +++ /dev/null @@ -1,3 +0,0 @@ -A new file has been added to import the MIT License to Dataverse: licenseMIT.json. - -Documentation has been added to explain the procedure for adding new licenses to the guides. diff --git a/doc/release-notes/10464-add-name-harvesting-client-facet.md b/doc/release-notes/10464-add-name-harvesting-client-facet.md deleted file mode 100644 index 1fc0bb47caf..00000000000 --- a/doc/release-notes/10464-add-name-harvesting-client-facet.md +++ /dev/null @@ -1,3 +0,0 @@ -The Metadata Source facet has been updated to show the name of the harvesting client rather than grouping all such datasets under 'harvested' - -TODO: for the v6.13 release note: Please add a full re-index using http://localhost:8080/api/admin/index to the upgrade instructions. diff --git a/doc/release-notes/10466-math-challenge-403-error-page.md b/doc/release-notes/10466-math-challenge-403-error-page.md deleted file mode 100644 index 160c760dc9d..00000000000 --- a/doc/release-notes/10466-math-challenge-403-error-page.md +++ /dev/null @@ -1 +0,0 @@ -On forbidden access error page, also know as 403 error page, the math challenge is now correctly display to submit the contact form. diff --git a/doc/release-notes/10468-doc-datalad-integration.md b/doc/release-notes/10468-doc-datalad-integration.md deleted file mode 100644 index cd4d2d53a5f..00000000000 --- a/doc/release-notes/10468-doc-datalad-integration.md +++ /dev/null @@ -1 +0,0 @@ -DataLad has been integrated with Dataverse. For more information, see https://dataverse-guide--10470.org.readthedocs.build/en/10470/admin/integrations.html#datalad diff --git a/doc/release-notes/10477-metadatablocks-api-extension-input-levels.md b/doc/release-notes/10477-metadatablocks-api-extension-input-levels.md deleted file mode 100644 index 77cc7f59773..00000000000 --- a/doc/release-notes/10477-metadatablocks-api-extension-input-levels.md +++ /dev/null @@ -1,3 +0,0 @@ -Changed ``api/dataverses/{id}/metadatablocks`` so that setting the query parameter ``onlyDisplayedOnCreate=true`` also returns metadata blocks with dataset field type input levels configured as required on the General Information page of the collection, in addition to the metadata blocks and their fields with the property ``displayOnCreate=true`` (which was the original behavior). - -A new endpoint ``api/dataverses/{id}/inputLevels`` has been created for updating the dataset field type input levels of a collection via API. diff --git a/doc/release-notes/10491-add-isreleased-to-get-dataverse-response.md b/doc/release-notes/10491-add-isreleased-to-get-dataverse-response.md deleted file mode 100644 index 5293c7267d0..00000000000 --- a/doc/release-notes/10491-add-isreleased-to-get-dataverse-response.md +++ /dev/null @@ -1,22 +0,0 @@ -The Dataverse object returned by /api/dataverses has been extended to include "isReleased": {boolean}. -```javascript -{ - "status": "OK", - "data": { - "id": 32, - "alias": "dv6f645bb5", - "name": "dv6f645bb5", - "dataverseContacts": [ - { - "displayOrder": 0, - "contactEmail": "54180268@mailinator.com" - } - ], - "permissionRoot": true, - "dataverseType": "UNCATEGORIZED", - "ownerId": 1, - "creationDate": "2024-04-12T18:05:59Z", - "isReleased": true - } -} -``` \ No newline at end of file diff --git a/doc/release-notes/10494-payara-upgrade.md b/doc/release-notes/10494-payara-upgrade.md deleted file mode 100644 index 23ee0e698f7..00000000000 --- a/doc/release-notes/10494-payara-upgrade.md +++ /dev/null @@ -1,119 +0,0 @@ -# Upgrade Payara to v6.2024.6 - -With this version of Dataverse, we encourage you to upgrade to version 6.2024.6. -This will address security issues accumulated since the release of 6.2023.8, which was required since Dataverse release 6.0. - -## Instructions for Upgrading - -If you are using GDCC containers, this upgrade is included when pulling new release images. -No manual intervention is necessary. - -We recommend you ensure you followed all update instructions from the past releases regarding Payara. -(Latest Payara update was for [v6.0](https://github.com/IQSS/dataverse/releases/tag/v6.0)) - -Upgrading requires a maintenance window and downtime. Please plan ahead, create backups of your database, etc. - -The steps below are a simple matter of reusing your existing domain directory with the new distribution. -But we also recommend that you review the Payara upgrade instructions as it could be helpful during any troubleshooting: -[Payara Release Notes](https://docs.payara.fish/community/docs/Release%20Notes/Release%20Notes%206.2024.6.html) -We assume you are already on a Dataverse 6.x installation, using a Payara 6.x release. - -```shell -export PAYARA=/usr/local/payara6 -``` - -(or `setenv PAYARA /usr/local/payara6` if you are using a `csh`-like shell) - -1\. Undeploy the previous version - -```shell - $PAYARA/bin/asadmin list-applications - $PAYARA/bin/asadmin undeploy dataverse<-version> -``` - -2\. Stop Payara - -```shell - service payara stop - rm -rf $PAYARA/glassfish/domains/domain1/generated - rm -rf $PAYARA/glassfish/domains/domain1/osgi-cache - rm -rf $PAYARA/glassfish/domains/domain1/lib/databases -``` - -3\. Move the current Payara directory out of the way - -```shell - mv $PAYARA $PAYARA.MOVED -``` - -4\. Download the new Payara version (6.2024.6), and unzip it in its place - -5\. Replace the brand new payara/glassfish/domains/domain1 with your old, preserved domain1 - -6\. Make sure that you have the following `--add-opens` options in your domain.xml. If not present, add them: - -```diff ---- payara-6.2023.8/glassfish/domains/domain1/config/domain.xml -+++ payara-6.2024.6/glassfish/domains/domain1/config/domain.xml -@@ -212,12 +212,16 @@ - --add-opens=java.naming/javax.naming.spi=ALL-UNNAMED - --add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED - --add-opens=java.logging/java.util.logging=ALL-UNNAMED -+ --add-opens=java.management/javax.management=ALL-UNNAMED -+ --add-opens=java.management/javax.management.openmbean=ALL-UNNAMED - [17|]--add-exports=java.base/sun.net.www=ALL-UNNAMED - [17|]--add-exports=java.base/sun.security.util=ALL-UNNAMED - [17|]--add-opens=java.base/java.lang.invoke=ALL-UNNAMED - [17|]--add-opens=java.desktop/java.beans=ALL-UNNAMED - [17|]--add-exports=jdk.naming.dns/com.sun.jndi.dns=ALL-UNNAMED - [17|]--add-exports=java.naming/com.sun.jndi.ldap=ALL-UNNAMED -+ [17|]--add-opens=java.base/java.io=ALL-UNNAMED -+ [21|]--add-opens=java.base/jdk.internal.misc=ALL-UNNAMED - -Xmx512m - -XX:NewRatio=2 - -XX:+UnlockDiagnosticVMOptions -@@ -447,12 +451,16 @@ - --add-opens=java.naming/javax.naming.spi=ALL-UNNAMED - --add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED - --add-opens=java.logging/java.util.logging=ALL-UNNAMED -+ --add-opens=java.management/javax.management=ALL-UNNAMED -+ --add-opens=java.management/javax.management.openmbean=ALL-UNNAMED - [17|]--add-exports=java.base/sun.net.www=ALL-UNNAMED - [17|]--add-exports=java.base/sun.security.util=ALL-UNNAMED - [17|]--add-opens=java.base/java.lang.invoke=ALL-UNNAMED - [17|]--add-opens=java.desktop/java.beans=ALL-UNNAMED - [17|]--add-exports=jdk.naming.dns/com.sun.jndi.dns=ALL-UNNAMED - [17|]--add-exports=java.naming/com.sun.jndi.ldap=ALL-UNNAMED -+ [17|]--add-opens=java.base/java.io=ALL-UNNAMED -+ [21|]--add-opens=java.base/jdk.internal.misc=ALL-UNNAMED - -Xmx512m - -XX:NewRatio=2 - -XX:+UnlockDiagnosticVMOptions -``` -(You can also save this as a patch file and try to apply it.) - -TODO: For the combined 6.3 release note, I would consider replacing the patch format above with just the 4 specific options, for clarity etc. (L.A.) As in: -``` - --add-opens=java.management/javax.management=ALL-UNNAMED - --add-opens=java.management/javax.management.openmbean=ALL-UNNAMED - [17|]--add-opens=java.base/java.io=ALL-UNNAMED - [21|]--add-opens=java.base/jdk.internal.misc=ALL-UNNAMED -``` - -7\. Start Payara - -```shell - service payara start -``` - -8\. Deploy this version. - -```shell - $PAYARA/bin/asadmin deploy dataverse-6.3.war -``` - -9\. Restart payara - -```shell - service payara stop - service payara start diff --git a/doc/release-notes/10503-cvoc-hidden-html-fields.md b/doc/release-notes/10503-cvoc-hidden-html-fields.md deleted file mode 100644 index e3ea0463fb8..00000000000 --- a/doc/release-notes/10503-cvoc-hidden-html-fields.md +++ /dev/null @@ -1,11 +0,0 @@ -## Release Highlights - -### Updates on Support for External Vocabulary Services - -#### Hidden HTML Fields - -External Controlled Vocabulary scripts, configured via [:CVocConf](https://guides.dataverse.org/en/6.3/installation/config.html#cvocconf), can now access the values of managed fields as well as the term-uri-field for use in constructing the metadata view for a dataset. - -Those values are hidden and can be found with the html attribute `data-cvoc-metadata-name`. - -For more information, see [#10503](https://github.com/IQSS/dataverse/pull/10503). diff --git a/doc/release-notes/10531-contrib.md b/doc/release-notes/10531-contrib.md deleted file mode 100644 index 6cfbe988992..00000000000 --- a/doc/release-notes/10531-contrib.md +++ /dev/null @@ -1 +0,0 @@ -A new [Contributor Guide](https://dataverse-guide--10532.org.readthedocs.build/en/10532/contributor/index.html) has been added by the UX Working Group (#10531 and #10532). diff --git a/doc/release-notes/10547-solr-updates.md b/doc/release-notes/10547-solr-updates.md deleted file mode 100644 index a21809c6369..00000000000 --- a/doc/release-notes/10547-solr-updates.md +++ /dev/null @@ -1 +0,0 @@ -Multiple improvements have ben made to they way Solr indexing and searching is done. Response times should be significantly improved. See the individual PRs in this release for details. \ No newline at end of file diff --git a/doc/release-notes/10554-avoid-solr-join-guest.md b/doc/release-notes/10554-avoid-solr-join-guest.md deleted file mode 100644 index 956c658dbed..00000000000 --- a/doc/release-notes/10554-avoid-solr-join-guest.md +++ /dev/null @@ -1,5 +0,0 @@ -Two experimental features flag called "add-publicobject-solr-field" and "avoid-expensive-solr-join" have been added to change how Solr documents are indexed for public objects and how Solr queries are constructed to accommodate access to restricted content (drafts, etc.). It is hoped that it will help with performance, especially on large instances and under load. - -Before the search feature flag ("avoid-expensive...") can be turned on, the indexing flag must be enabled, and a full reindex performed. Otherwise publicly available objects are NOT going to be shown in search results. - -For details see https://dataverse-guide--10555.org.readthedocs.build/en/10555/installation/config.html#feature-flags and #10555. diff --git a/doc/release-notes/10561-3dviewer.md b/doc/release-notes/10561-3dviewer.md deleted file mode 100644 index 47da10f8837..00000000000 --- a/doc/release-notes/10561-3dviewer.md +++ /dev/null @@ -1 +0,0 @@ -3DViewer by openforestdata.pl has been added to the list of external tools: https://preview.guides.gdcc.io/en/develop/admin/external-tools.html#inventory-of-external-tools diff --git a/doc/release-notes/10565-banner-test-improvements.md b/doc/release-notes/10565-banner-test-improvements.md deleted file mode 100644 index d9030f2a0c3..00000000000 --- a/doc/release-notes/10565-banner-test-improvements.md +++ /dev/null @@ -1 +0,0 @@ -The endpoint `api/admin/bannerMessage` has been extended so the ID is returned when created \ No newline at end of file diff --git a/doc/release-notes/10568-Fix File Reingest.md b/doc/release-notes/10568-Fix File Reingest.md deleted file mode 100644 index 354aa847f01..00000000000 --- a/doc/release-notes/10568-Fix File Reingest.md +++ /dev/null @@ -1 +0,0 @@ -A bug that prevented the Ingest option in the File page Edit File menu from working has been fixed \ No newline at end of file diff --git a/doc/release-notes/10570-extra-facet-settings.md b/doc/release-notes/10570-extra-facet-settings.md deleted file mode 100644 index 9d68defc9a3..00000000000 --- a/doc/release-notes/10570-extra-facet-settings.md +++ /dev/null @@ -1,4 +0,0 @@ -Extra settings have been added giving an instance admin more choices in -selectively limiting the availability of search facets on the Collection and Dataset pages. -See the [Disable Solr Facets](https://guides.dataverse.org/en/6.3/installation/config.html#DisableSolrFacets) sections of the Config Guide for more info. - diff --git a/doc/release-notes/10579-avoid-solr-deletes.md b/doc/release-notes/10579-avoid-solr-deletes.md deleted file mode 100644 index 1062a2fb78f..00000000000 --- a/doc/release-notes/10579-avoid-solr-deletes.md +++ /dev/null @@ -1,9 +0,0 @@ -A features flag called "reduce-solr-deletes" has been added to improve how datafiles are indexed. When the flag is enabled, -Dataverse wil avoid pre-emptively deleting existing solr documents for the files prior to sending updated information. This -should improve performance and will allow additional optimizations going forward. - -The /api/admin/index/status and /api/admin/index/clear-orphans calls -(see https://guides.dataverse.org/en/latest/admin/solr-search-index.html#index-and-database-consistency) -will now find and remove (respectively) additional permissions related solr documents that were not being detected before. -Reducing the overall number of documents will improve solr performance and large sites may wish to periodically call the -clear-orphans API. \ No newline at end of file diff --git a/doc/release-notes/10611-harvested-origin-facet.md b/doc/release-notes/10611-harvested-origin-facet.md deleted file mode 100644 index 89ab6eb7639..00000000000 --- a/doc/release-notes/10611-harvested-origin-facet.md +++ /dev/null @@ -1,10 +0,0 @@ -NOTE that this release note supercedes the 10464-add-name-harvesting-client-facet.md note from the PR 10464. - -An option has been added to index the name of the Harvesting Client as the "Metadata Source" of harvested datasets and files; if enabled, the Metadata Source facet will be showing separate entries for the content harvested from different sources, instead of the current, default behavior where there is one "Harvested" facet for all such content. - - -TODO: for the v6.3 release note: -If you choose to enable the extended "Metadata Souce" facet for harvested content, set the optional feature flage (jvm option) `dataverse.feature.index-harvested-metadata-source=true` before reindexing. - -[Please note that the upgrade instruction in 6.3 will contain a suggestion to run full reindex, as part of the Solr upgrade, so the sentence above will need to be added to that section] - diff --git a/doc/release-notes/10637-fix-dataverse-metadatablocks-api.md b/doc/release-notes/10637-fix-dataverse-metadatablocks-api.md deleted file mode 100644 index c8c9c4fa66f..00000000000 --- a/doc/release-notes/10637-fix-dataverse-metadatablocks-api.md +++ /dev/null @@ -1,2 +0,0 @@ -dataverses/{id}/metadatablocks API endpoint has been fixed, since the fields returned for each metadata block when returnDatasetTypes query parameter is set to true was not correct. - diff --git a/doc/release-notes/5621_dataset image in header.md b/doc/release-notes/5621_dataset image in header.md deleted file mode 100644 index 34b445fd9e1..00000000000 --- a/doc/release-notes/5621_dataset image in header.md +++ /dev/null @@ -1 +0,0 @@ -Dataverse will use the Dataset thumbnail, if one is defined, rather than the generic Dataverse logo in the Open Graph metadata header. This means the image will be seen when, for example, the dataset is referenced in Facebook. diff --git a/doc/release-notes/6.3-release-notes.md b/doc/release-notes/6.3-release-notes.md new file mode 100644 index 00000000000..6cad513690d --- /dev/null +++ b/doc/release-notes/6.3-release-notes.md @@ -0,0 +1,470 @@ +# Dataverse 6.3 + +Please note: To read these instructions in full, please go to https://github.com/IQSS/dataverse/releases/tag/v6.3 rather than the list of releases, which will cut them off. + +This release brings new features, enhancements, and bug fixes to Dataverse. Thank you to all of the community members who contributed code, suggestions, bug reports, and other assistance across the project. + +# Table of Contents +- [Release Highlights](#release-highlights) +- [Features](#features) +- [Bug Fixes](#bug-fixes) +- [API](#api) +- [Settings](#settings) +- [Complete List of Changes](#complete-list-of-changes) +- [Getting Help](#getting-help) +- [Upgrade instructions](#upgrade-instructions) + +## Release Highlights + +### Solr Search and Indexing Improvements + +Multiple improvements have been made to the way Solr indexing and searching is done. Response times should be significantly improved. + +- Two experimental features flag called "add-publicobject-solr-field" and "avoid-expensive-solr-join" have been added to change how Solr documents are indexed for public objects and how Solr queries are constructed to accommodate access to restricted content (drafts, etc.). It is hoped that it will help with performance, especially on large instances and under load. + +- Before the search feature flag ("avoid-expensive...") can be turned on, the indexing flag must be enabled, and a full reindex performed. Otherwise publicly available objects are NOT going to be shown in search results. + +- A feature flag called "reduce-solr-deletes" has been added to improve how datafiles are indexed. When the flag is enabled, Dataverse will avoid pre-emptively deleting existing Solr documents for the files prior to sending updated information. This +should improve performance and will allow additional optimizations going forward. + +- The /api/admin/index/status and /api/admin/index/clear-orphans calls +(see https://guides.dataverse.org/en/latest/admin/solr-search-index.html#index-and-database-consistency) +will now find and remove (respectively) additional permissions related Solr documents that were not being detected before. +Reducing the overall number of documents will improve Solr performance and large sites may wish to periodically call the "clear-orphans" API. + +- Dataverse now relies on the autoCommit and autoSoftCommit settings in the Solr configuration instead of explicitly committing documents to the Solr index. This improves indexing speed. + +See also #10554, #10654, and #10579. + +### File Retention Period + +Dataverse now supports file-level retention periods. The ability to set retention periods, with a minimum duration (in months), can be configured by a Dataverse installation administrator. For more information, see the [Retention Periods section](https://guides.dataverse.org/en/6.3/user/dataset-management.html#retention-periods) of the User Guide. + +- Users can configure a specific retention period, defined by an end date and a short reason, on a set of selected files or an individual file, by selecting the "Retention Period" menu item and entering information in a popup dialog. Retention periods can only be set, changed, or removed before a file has been published. After publication, only Dataverse installation administrators can make changes, using an API. + +- After the retention period expires, files can not be previewed or downloaded (as if restricted, with no option to allow access requests). The file (landing) page and all the metadata remains available. + +## Features + +### Large Datasets Improvements + +For scenarios involving API calls related to large datasets (numerous files, for example: ~10k) the following have been been optimized: + +- The Search API endpoint. +- The permission checking logic present in PermissionServiceBean. + +See also [#10415](https://github.com/IQSS/dataverse/pull/10415). + +### Improved Controlled Vocabulary for Citation Block + +The Controlled Vocabuary Values list for the "Language" metadata field in the citation block has been improved, with some missing two- and three-letter ISO 639 codes added, as well as more alternative names for some of the languages, making all these extra language identifiers importable. See also [#8243](https://github.com/IQSS/dataverse/pull/8243). + +### Updates on Support for External Vocabulary Services + +Multiple extensions of the external vocabulary mechanism have been added. These extensions allow interaction with services based on the Ontoportal software and are expected to be generally useful for other service types. + +These changes include: + +- *Improved Indexing with Compound Fields:* When using an external vocabulary service with compound fields, you can now specify which field(s) will include additional indexed information, such as translations of an entry into other languages. This is done by adding the `indexIn` in `retrieval-filtering`. See also [#10505](https://github.com/IQSS/dataverse/pull/10505) and [GDCC/dataverse-external-vocab-support documentation](https://github.com/gdcc/dataverse-external-vocab-support/tree/main/docs). + +- *Broader Support for Indexing Service Responses:* Indexing of the results from `retrieval-filtering` responses can now handle additional formats including JSON arrays of strings and values from arbitrary keys within a JSON Object. See [#10505](https://github.com/IQSS/dataverse/pull/10505). + +- *HTTP Headers:* You are now able to add HTTP request headers required by the service you are implementing. See [#10331](https://github.com/IQSS/dataverse/pull/10331). + +- *Flexible params in retrievalUri:* You can now use `managed-fields` field names as well as the `term-uri-field` field name as parameters in the `retrieval-uri` when configuring an external vocabulary service. `{0}` as an alternative to using the `term-uri-field` name is still supported for backward compatibility. Also you can specify if the value must be url encoded with `encodeUrl:`. See [#10404](https://github.com/IQSS/dataverse/pull/10404). + + For example : `"retrieval-uri": "https://data.agroportal.lirmm.fr/ontologies/{keywordVocabulary}/classes/{encodeUrl:keywordermURL}"` + +- *Hidden HTML Fields* External controlled vocabulary scripts, configured via [:CVocConf](https://guides.dataverse.org/en/6.3/installation/config.html#cvocconf), can now access the values of managed fields as well as the term-uri-field for use in constructing the metadata view for a dataset. These values are now added as hidden elements in the HTML and can be found with the HTML attribute `data-cvoc-metadata-name`. See also [#10503](https://github.com/IQSS/dataverse/pull/10503). + +### A Contributor Guide is now available + +A new [Contributor Guide](https://guides.dataverse.org/en/6.3/contributor/index.html) has been added by the UX Working Group (#10531 and #10532). + +### URL Validation Is More Permissive + +URL validation now allows two slashes in the path component of the URL. +Among other things, this allows metadata fields of `url` type to be filled with more complex url such as https://archive.softwareheritage.org/browse/directory/561bfe6698ca9e58b552b4eb4e56132cac41c6f9/?origin_url=https://github.com/gem-pasteur/macsyfinder&revision=868637fce184865d8e0436338af66a2648e8f6e1&snapshot=1bde3cb370766b10132c4e004c7cb377979928d1 + +See also #9750 and [#9739](https://github.com/IQSS/dataverse/pull/9739) + +### Improved Detection of RO-Crate Files + +Detection of mime-types based on a filename with extension and detection of the RO-Crate metadata files. + +From now on, filenames with extensions can be added into `MimeTypeDetectionByFileName.properties` file. Filenames added there will take precedence over simply recognizing files by extensions. For example, two new filenames are added into that file: +``` +ro-crate-metadata.json=application/ld+json; profile="http://www.w3.org/ns/json-ld#flattened http://www.w3.org/ns/json-ld#compacted https://w3id.org/ro/crate" +ro-crate-metadata.jsonld=application/ld+json; profile="http://www.w3.org/ns/json-ld#flattened http://www.w3.org/ns/json-ld#compacted https://w3id.org/ro/crate" +``` + +Therefore, files named `ro-crate-metadata.json` will be then detected as RO-Crated metadata files from now on, instead as generic `JSON` files. +For more information on the RO-Crate specifications, see https://www.researchobject.org/ro-crate + +See also [#10015](https://github.com/IQSS/dataverse/pull/10015). + +### New S3 Tagging Configuration Option + +If your S3 store does not support tagging and gives an error if you configure direct upload, you can disable the tagging by using the `dataverse.files..disable-tagging` JVM option. For more details, see the section on [S3 tags](https://guides.dataverse.org/en/6.3/developers/big-data-support.html#s3-tags) in the guides, #10022 and #10029. + +### Feature Flag To Remove the Required "Reason" Field in the "Return to Author" Dialog + +A reason field, that is required to not be empty, was added to the "Return to Author" dialog in v6.2. Installations that handle author communications through email or another system may prefer to not be required to use this new field. v6.3 includes a new +disable-return-to-author-reason feature flag that can be enabled to drop the reason field from the dialog and make sending a reason optional in the api/datasets/{id}/returnToAuthor call. See also #10655. + +### Improved Use of Dataverse Thumbnail + +Dataverse will use the dataset thumbnail, if one is defined, rather than the generic Dataverse logo in the Open Graph metadata header. This means the image will be seen when, for example, the dataset is referenced on Facebook. See also [#5621](https://github.com/IQSS/dataverse/pull/5621). + +### Improved Email Notifications When Guestbook is Used for File Access Requests + +Multiple improvements to guestbook response emails making it easier to organize and process them. The subject line of the notification email now includes the name and user identifier of the requestor. Additionally, the body of the email now includes the user id of the requestor. Finally the guestbook responses have been sorted and spaced to improve readability. See also [#10581](https://github.com/IQSS/dataverse/issues/10581). + +### New keywordTermURI Metadata Field in the Citation Metadata Block + +A new metadata field - `keywordTermURI`, has been added in the citation metadata block (as a fourth child field under the `keyword` parent field). This has been done to improve usability and to facilitate the integration of controlled vocabulary services, adding the possibility of saving the "term" and/or its associated URI. For more information, see #10288 and PR #10371. + +### Updated Computational Workflow Metadata Block + +The computational workflow metadata block has been updated to present a clickable link for the External Code Repository URL field. See also [#10339](https://github.com/IQSS/dataverse/pull/10339). + +### Metadata Source Facet Added + +An option has been added to index the name of the harvesting client as the "Metadata Source" of harvested datasets and files; if enabled, the Metadata Source facet will show separate entries for the content harvested from different sources, instead of the current, default behavior where there is one "Harvested" facet for all such content. + +Tho enable this feature, set the optional feature flage (jvm option) `dataverse.feature.index-harvested-metadata-source=true` before reindexing. + +See also [#10611](https://github.com/IQSS/dataverse/pull/10611) and #10651. + +### Additional Facet Settings + +Extra settings have been added giving an instance admin more choices in selectively limiting the availability of search facets on the collection and dataset pages. + +See [Disable Solr Facets](https://guides.dataverse.org/en/6.3/installation/config.html#DisableSolrFacets) under the configuration section of the Installation Guide for more info as well as [#10570](https://github.com/IQSS/dataverse/pull/10570). + +### Sitemap Now Supports More Than 50k Items + +Dataverse can now handle more than 50,000 items when generating sitemap files, splitting the content across multiple files to comply with the Sitemap protocol. For details, see the [sitemap section](https://guides.dataverse.org/en/6.3/installation/config.html#creating-a-sitemap-and-submitting-it-to-search-engines) of the Installation Guide. See also [#8936](https://github.com/IQSS/dataverse/pull/8936) and [#10321](https://github.com/IQSS/dataverse/pull/10321). + +### MIT and Apache 2.0 Licenses Added + +New files have been added to import the MIT and Apache 2.0 Licenses to Dataverse: + +- licenseMIT.json +- licenseApache-2.0.json + +Guidance has been added to the [guides](https://guides.dataverse.org/en/6.2/installation/config.html#adding-custom-licenses) to explain the procedure for adding new licenses to Dataverse. + +See also [#10425](https://github.com/IQSS/dataverse/pull/10425). + +### 3D Viewer by Open Forest Data + +3DViewer by openforestdata.pl has been added to the [list of external tools](https://guides.dataverse.org/en/6.3/admin/external-tools.html#inventory-of-external-tools). See also [#10561](https://github.com/IQSS/dataverse/pull/10561). + +### Datalad Integration With Dataverse + +DataLad has been integrated with Dataverse. For more information, see the [integrations](https://guides.dataverse.org/en/6.3/admin/integrations.html#datalad) section of the guides. See also [#10468](https://github.com/IQSS/dataverse/pull/10468). + +### Rsync Support Has Been Deprecated + +Support for rsync has been deprecated. Information has been removed from the guides for rsync and related software such as Data Capture Module (DCM) and Repository Storage Abstraction Layer (RSAL). You can still find this information in [older versions](https://guides.dataverse.org/en/6.2/developers/big-data-support.html#data-capture-module-dcm) of the guides. See [Settings](#database-settings), below, for deprecated settings. See also [#8985](https://github.com/IQSS/dataverse/pull/8985). + +[↑ Table of Contents](#table-of-contents) + +## Bug Fixes + +### OpenAPI Re-Enabled + +In Dataverse 6.0 when Payara was updated it caused the url `/openapi` to stop working: + +- https://github.com/IQSS/dataverse/issues/9981 +- https://github.com/payara/Payara/issues/6369 + +In addition to fixing the `/openapi` URL, we are also making some changes on how we provide the OpenAPI document: + +When it worked in Dataverse 5.x, the `/openapi` output was generated automatically by Payara, but in this release we have switched to OpenAPI output produced by the [SmallRye OpenAPI plugin](https://github.com/smallrye/smallrye-open-api/tree/main/tools/maven-plugin). This gives us finer control over the output. + +For more information, see the section on [OpenAPI](https://guides.dataverse.org/en/6.3/getting-started.html#openapi) in the API Guide and #10328. + +### Re-Addition of "Cell Counting" to Life Sciences Block + +In the Life Sciences metadata block under the "Measurement Type" field the value `cell counting` was accidentally removed in v5.1. It has been restored. See also #8655 and #9735. + +### Math Challenge Fixed on 403 Error Page + +On the "forbidden" (403) error page, the math challenge now correctly displays so that the contact form can be submitted. See also [#10466](https://github.com/IQSS/dataverse/pull/10466). + +### Ingest Option Bug Fixed + +A bug that prevented the "Ingest" option in the file page "Edit File" menu from working has been fixed. See also [#10568](https://github.com/IQSS/dataverse/pull/10568). + +### Incomplete Metadata Bug Fix + +A bug was fixed where the `incomplete metadata` label was being shown for published dataset with incomplete metadata in certain scenarios. This label will now be shown for draft versions of such datasets and published datasets that the user can edit. This label can also be made invisible for published datasets (regardless of edit rights) with the new option ``dataverse.ui.show-validity-label-when-published`` set to `false`. See also [#10116](https://github.com/IQSS/dataverse/pull/10116). + +### Identical Role Error Message + +An error is now correctly reported when an attempt is made to assign an identical role to the same collection, dataset, or file. See also [#9729](https://github.com/IQSS/dataverse/pull/9729) and #10465. + +[↑ Table of Contents](#table-of-contents) + +## API + +### Superuser Endpoint + +The existing API endpoint for toggling the superuser status of a user has been deprecated in favor of a new API endpoint that allows you to explicitly and idempotently set the status as true or false. For details, see the [API Guide](https://guides.dataverse.org/en/6.3/api/native-api.html#set-superuser-status), [#9887](https://github.com/IQSS/dataverse/pull/9887) and [#10440](https://github.com/IQSS/dataverse/pull/10440). + +### New Featured Collections Endpoints + +New API endpoints have been added to allow you to add or remove featured collections from a collection. + +See also the sections on [listing, setting, and removing](https://guides.dataverse.org/en/6.3/api/native-api.html#list-featured-collections-for-a-dataverse-collection) featured collections in the API Guide, [#10242](https://github.com/IQSS/dataverse/pull/10242) and #10459. + +### Dataset Version Endpoint Extended + +The API endpoint for getting the Dataset version has been extended to include latestVersionPublishingStatus. See also [#10330](https://github.com/IQSS/dataverse/pull/10330). + +### New Optional Query Parameters for Metadatablocks Endpoints + +New optional query parameters have been added to `api/metadatablocks` and `api/dataverses/{id}/metadatablocks` endpoints: + +- `returnDatasetFieldTypes`: Whether or not to return the dataset field types present in each metadata block. If not set, the default value is false. +- Setting the query parameter `onlyDisplayedOnCreate=true` also returns metadata blocks with dataset field type input levels configured as required on the General Information page of the collection, in addition to the metadata blocks and their fields with the property ``displayOnCreate=true``. + +See also [#10389](https://github.com/IQSS/dataverse/pull/10389) + +### Dataverse Payload Includes Release Status + +The Dataverse object returned by /api/dataverses has been extended to include "isReleased": {boolean}. See also [#10491](https://github.com/IQSS/dataverse/pull/10491). + +### New Field Type Input Level Endpoint + +A new endpoint ``api/dataverses/{id}/inputLevels`` has been created for updating the dataset field type input levels of a collection via API. See also [#10477](https://github.com/IQSS/dataverse/pull/10477). + +### Banner Message Endpoint Extended + +The endpoint `api/admin/bannerMessage` has been extended so the ID is returned when created. See also [#10565](https://github.com/IQSS/dataverse/pull/10565). + +[↑ Table of Contents](#table-of-contents) + +## Settings + +### Database Settings: + +***New:*** + +- :DisableSolrFacets + +***Deprecated (used with rsync):*** + +- :DataCaptureModuleUrl +- :DownloadMethods +- :LocalDataAccessPath +- :RepositoryStorageAbstractionLayerUrl + +### New Configuration Options + +- `dataverse.files..disable-tagging` +- `dataverse.feature.add-publicobject-solr-field` +- `dataverse.feature.avoid-expensive-solr-join` +- `dataverse.feature.reduce-solr-deletes` +- `dataverse.feature.disable-return-to-author-reason` +- `dataverse.feature.index-harvested-metadata-source` +- `dataverse.ui.show-validity-label-when-published` + +[↑ Table of Contents](#table-of-contents) + +## Complete List of Changes + +For the complete list of code changes in this release, see the [6.3 Milestone](https://github.com/IQSS/dataverse/issues?q=milestone%3A6.3+is%3Aclosed) in GitHub. + +[↑ Table of Contents](#table-of-contents) + +## Getting Help + +For help with upgrading, installing, or general questions please post to the [Dataverse Community Google Group](https://groups.google.com/g/dataverse-community) or email support@dataverse.org. + +[↑ Table of Contents](#table-of-contents) + +## Upgrade Instructions + +Upgrading requires a maintenance window and downtime. Please plan accordingly, create backups of your database, etc. + +These instructions assume that you've already upgraded through all the 5.x releases and are now running Dataverse 6.2. + +0\. These instructions assume that you are upgrading from the immediate previous version. If you are running an earlier version, the only supported way to upgrade is to progress through the upgrades to all the releases in between before attempting the upgrade to this version. + +If you are running Payara as a non-root user (and you should be!), **remember not to execute the commands below as root**. Use `sudo` to change to that user first. For example, `sudo -i -u dataverse` if `dataverse` is your dedicated application user. + +In the following commands, we assume that Payara 6 is installed in `/usr/local/payara6`. If not, adjust as needed. + +`export PAYARA=/usr/local/payara6` + +(or `setenv PAYARA /usr/local/payara6` if you are using a `csh`-like shell) + +1\. Undeploy the previous version. + +- `$PAYARA/bin/asadmin undeploy dataverse-6.2` + +2\. Stop Payara and remove the following directories: + +```shell +service payara stop +rm -rf $PAYARA/glassfish/domains/domain1/generated +rm -rf $PAYARA/glassfish/domains/domain1/osgi-cache +rm -rf $PAYARA/glassfish/domains/domain1/lib/databases +``` + +3\. Upgrade Payara to v6.2024.6 + +With this version of Dataverse, we encourage you to upgrade to version 6.2024.6. +This will address security issues accumulated since the release of 6.2023.8. + +Note that if you are using GDCC containers, this upgrade is included when pulling new release images. +No manual intervention is necessary. + +The steps below are a simple matter of reusing your existing domain directory with the new distribution. +But we recommend that you review the Payara upgrade instructions as it could be helpful during any troubleshooting: +[Payara Release Notes](https://docs.payara.fish/community/docs/Release%20Notes/Release%20Notes%206.2024.6.html). +We also recommend you ensure you followed all update instructions from the past releases regarding Payara. +(The latest Payara update was for [v6.0](https://github.com/IQSS/dataverse/releases/tag/v6.0).) + +Move the current Payara directory out of the way: + +```shell +mv $PAYARA $PAYARA.6.2023.8 +``` + +Download the new Payara version 6.2024.6 (from https://www.payara.fish/downloads/payara-platform-community-edition/), and unzip it in its place: + +```shell +cd /usr/local +unzip payara-6.2024.6.zip +``` + +Replace the brand new `payara/glassfish/domains/domain1` with your old, preserved domain1: + +```shell +mv payara6/glassfish/domains/domain1 payara6/glassfish/domains/domain1_DIST +mv payara6-2023.8/glassfish/domains/domain1 payara6/glassfish/domains/ +``` + +Make sure that you have the following `--add-opens` options in your `payara6/glassfish/domains/domain1/config/domain.xml`. If not present, add them: + +``` +--add-opens=java.management/javax.management=ALL-UNNAMED +--add-opens=java.management/javax.management.openmbean=ALL-UNNAMED +[17|]--add-opens=java.base/java.io=ALL-UNNAMED +[21|]--add-opens=java.base/jdk.internal.misc=ALL-UNNAMED +``` + +(Note that you likely already have the `java.base/java.io` option there, but without the `[17|]` prefix. Make sure to replace it with the version above) + +Start Payara: + +```shell +sudo service payara start +``` + +4\. Deploy this version. + +```shell +$PAYARA/bin/asadmin deploy dataverse-6.3.war +``` + +5\. For installations with internationalization: + +- Please remember to update translations via [Dataverse language packs](https://github.com/GlobalDataverseCommunityConsortium/dataverse-language-packs). + +6\. Restart Payara + +```shell +service payara stop +service payara start +``` + +7\. Update the following metadata blocks to reflect the incremental improvements made to the handling of core metadata fields: + +```shell +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.3/scripts/api/data/metadatablocks/citation.tsv + +curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file citation.tsv + +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.3/scripts/api/data/metadatablocks/biomedical.tsv + +curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file biomedical.tsv + +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.3/scripts/api/data/metadatablocks/computational_workflow.tsv + +curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file computational_workflow.tsv + +``` + +8\. Upgrade Solr + +Solr 9.4.1 is now the version recommended in our Installation Guide and used with automated testing. There is a known security issue in the previously recommended version 9.3.0: https://nvd.nist.gov/vuln/detail/CVE-2023-36478. While the risk of an exploit should not be significant unless the Solr instance is accessible from outside networks (which we have always recommended against), we recommend to upgrade. + +Install Solr 9.4.1 following the [instructions](https://guides.dataverse.org/en/6.3/installation/prerequisites.html#solr) from the Installation Guide. + +The instructions in the guide suggest to use the config files from the installer zip bundle. Upgrading an existing instance, it may be easier to download them from the source tree: + +```shell +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.3/conf/solr/solrconfig.xml +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.3/conf/solr/schema.xml +cp solrconfig.xml schema.xml /usr/local/solr/solr-9.4.1/server/solr/collection1/conf +``` + +8a\. For installations with custom or experimental metadata blocks: + +- Stop Solr instance (usually `service solr stop`, depending on Solr installation/OS, see the [Installation Guide](https://guides.dataverse.org/en/6.3/installation/prerequisites.html#solr-init-script)). + +- Run the `update-fields.sh` script that we supply, as in the example below (modify the command lines as needed to reflect the correct path of your Solr installation): + +```shell +wget https://raw.githubusercontent.com/IQSS/dataverse/v6.3/conf/solr/update-fields.sh +chmod +x update-fields.sh +curl "http://localhost:8080/api/admin/index/solr/schema" | ./update-fields.sh /usr/local/solr/solr-9.4.1/server/solr/collection1/conf/schema.xml +``` + +- Start Solr instance (usually `service solr start` depending on Solr/OS). + +9\. Enable the Metadata Source facet for harvested content (Optional): + +If you choose to enable this new feature, set the optional feature flag (jvm option) `dataverse.feature.index-harvested-metadata-source=true` before reindexing. + +10\. Reindex Solr, if you upgraded Solr (recommended), or chose to enable any options that require a reindex: + +```shell +curl http://localhost:8080/api/admin/index +``` + +Note: if you choose to perform a migration of your `keywordValue` metadata fields (section below), that will require a reindex as well, so do that first. + +## Notes for Dataverse Installation Administrators + +### Data migration to the new `keywordTermURI` field + +You can migrate your `keywordValue` data containing URIs to the new `keywordTermURI` field. +In case of data migration, view the affected data with the following database query: + +```sql +SELECT value FROM datasetfieldvalue dfv +INNER JOIN datasetfield df ON df.id = dfv.datasetfield_id +WHERE df.datasetfieldtype_id = (SELECT id FROM datasetfieldtype WHERE name = 'keywordValue') +AND value ILIKE 'http%'; +``` + +If you wish to migrate your data, a database update is then necessary: + +```sql +UPDATE datasetfield df +SET datasetfieldtype_id = (SELECT id FROM datasetfieldtype WHERE name = 'keywordTermURI') +FROM datasetfieldvalue dfv +WHERE dfv.datasetfield_id = df.id +AND df.datasetfieldtype_id = (SELECT id FROM datasetfieldtype WHERE name = 'keywordValue') +AND dfv.value ILIKE 'http%'; +``` + +A [reindex in place](https://guides.dataverse.org/en/latest/admin/solr-search-index.html#reindex-in-place) will be required. ReExportAll will need to be run to update the metadata exports of the dataset. Follow the directions in the [Admin Guide](http://guides.dataverse.org/en/latest/admin/metadataexport.html#batch-exports-through-the-api). + +[↑ Table of Contents](#table-of-contents) diff --git a/doc/release-notes/8243-improve-language-controlled-vocab.md b/doc/release-notes/8243-improve-language-controlled-vocab.md deleted file mode 100644 index 15b2b46c02d..00000000000 --- a/doc/release-notes/8243-improve-language-controlled-vocab.md +++ /dev/null @@ -1,11 +0,0 @@ -The Controlled Vocabuary Values list for the metadata field Language in the Citation block has been improved, with some missing two- and three-letter ISO 639 codes added, as well as more alternative names for some of the languages, making all these extra language identifiers importable. - -To be added to the 6.3 release instructions: - -Update the Citation block, to incorporate the improved controlled vocabulary for language [plus whatever other improvements may be made to the block in other PRs]: - -``` -wget https://raw.githubusercontent.com/IQSS/dataverse/v6.3/scripts/api/data/metadatablocks/citation.tsv -curl http://localhost:8080/api/admin/datasetfield/load -H "Content-type: text/tab-separated-values" -X POST --upload-file citation.tsv -``` - diff --git a/doc/release-notes/8655-re-add-cell-counting-biomedical-tsv.md b/doc/release-notes/8655-re-add-cell-counting-biomedical-tsv.md deleted file mode 100644 index 295f206871f..00000000000 --- a/doc/release-notes/8655-re-add-cell-counting-biomedical-tsv.md +++ /dev/null @@ -1,12 +0,0 @@ -## Release Highlights - -### Life Science Metadata - -Re-adding value `cell counting` to Life Science metadatablock's Measurement Type vocabularies accidentally removed in `v5.1`. - -## Upgrade Instructions - -### Update the Life Science metadata block - -- `wget https://github.com/IQSS/dataverse/releases/download/v6.3/biomedical.tsv` -- `curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @biomedical.tsv -H "Content-type: text/tab-separated-values"` \ No newline at end of file diff --git a/doc/release-notes/8936-more-than-50000-entries-in-sitemap.md b/doc/release-notes/8936-more-than-50000-entries-in-sitemap.md deleted file mode 100644 index 7b367e328c1..00000000000 --- a/doc/release-notes/8936-more-than-50000-entries-in-sitemap.md +++ /dev/null @@ -1,11 +0,0 @@ -Dataverse can now handle more than 50,000 items when generating sitemap files, splitting the content across multiple files to comply with the Sitemap protocol. - -For details see https://dataverse-guide--10321.org.readthedocs.build/en/10321/installation/config.html#creating-a-sitemap-and-submitting-it-to-search-engines #8936 and #10321. - -## Upgrade instructions - -If your installation has more than 50,000 entries, you should re-submit your sitemap URL to Google or other search engines. The file in the URL will change from ``sitemap.xml`` to ``sitemap_index.xml``. - -As explained at https://dataverse-guide--10321.org.readthedocs.build/en/10321/installation/config.html#creating-a-sitemap-and-submitting-it-to-search-engines this is the command for regenerating your sitemap: - -`curl -X POST http://localhost:8080/api/admin/sitemap` diff --git a/doc/release-notes/8985-deprecate-rsync.md b/doc/release-notes/8985-deprecate-rsync.md deleted file mode 100644 index 44563f292fd..00000000000 --- a/doc/release-notes/8985-deprecate-rsync.md +++ /dev/null @@ -1,8 +0,0 @@ -Support for rsync has been deprecated. Information has been removed from the guides for rsync and related software such as Data Capture Module (DCM) and Repository Storage Abstraction Layer (RSAL). You can still find this information in [older versions](https://guides.dataverse.org/en/6.2/developers/big-data-support.html#data-capture-module-dcm) of the guides. - -The following related database settings have been deprecated as well: - -- :DataCaptureModuleUrl -- :DownloadMethods -- :LocalDataAccessPath -- :RepositoryStorageAbstractionLayerUrl diff --git a/doc/release-notes/9276-allow-flexible-params-in-retrievaluri-cvoc.md b/doc/release-notes/9276-allow-flexible-params-in-retrievaluri-cvoc.md deleted file mode 100644 index 5e18007e8ae..00000000000 --- a/doc/release-notes/9276-allow-flexible-params-in-retrievaluri-cvoc.md +++ /dev/null @@ -1,14 +0,0 @@ -## Release Highlights - -### Updates on Support for External Vocabulary Services - -#### HTTP Headers - -You are now able to add HTTP request headers required by the service you are implementing (#10331) - -#### Flexible params in retrievalUri - -You can now use `managed-fields` field names as well as the `term-uri-field` field name as parameters in the `retrieval-uri` when configuring an external vocabulary service. `{0}` as an alternative to using the `term-uri-field` name is still supported for backward compatibility. -Also you can specify if the value must be url encoded with `encodeUrl:`. (#10404) - -For example : `"retrieval-uri": "https://data.agroportal.lirmm.fr/ontologies/{keywordVocabulary}/classes/{encodeUrl:keywordTermURL}"` \ No newline at end of file diff --git a/doc/release-notes/9276-doc-cvoc-index-in.md b/doc/release-notes/9276-doc-cvoc-index-in.md deleted file mode 100644 index 78289201511..00000000000 --- a/doc/release-notes/9276-doc-cvoc-index-in.md +++ /dev/null @@ -1,18 +0,0 @@ -## Release Highlights - -### Updates on Support for External Vocabulary Services - -Multiple extensions of the External Vocabulary mechanism have been added. These extensions allow interaction with services based on the Ontoportal software and are expected to be generally useful for other service types. - -These changes include: - -#### Improved Indexing with Compound Fields - -When using an external vocabulary service with compound fields, you can now specify which field(s) will include additional indexed information, such as translations of an entry into other languages. This is done by adding the `indexIn` in `retrieval-filtering`. (#10505) -For more information, please check [GDCC/dataverse-external-vocab-support documentation](https://github.com/gdcc/dataverse-external-vocab-support/tree/main/docs). - -#### Broader Support for Indexing Service Responses - -Indexing of the results from `retrieval-filtering` responses can now handle additional formats including Json Arrays of Strings and values from arbitrary keys within a JSON Object. (#10505) - -**** This documentation must be merged with 9276-allow-flexible-params-in-retrievaluri-cvoc.md (#10404) \ No newline at end of file diff --git a/doc/release-notes/9375-retention-period.md b/doc/release-notes/9375-retention-period.md deleted file mode 100644 index a088cabf138..00000000000 --- a/doc/release-notes/9375-retention-period.md +++ /dev/null @@ -1,8 +0,0 @@ -The Dataverse Software now supports file-level retention periods. The ability to set retention periods, with a minimum duration (in months), can be configured by a Dataverse installation administrator. For more information, see the [Retention Periods section](https://guides.dataverse.org/en/6.3/user/dataset-management.html#retention-periods) of the Dataverse Software Guides. - -- Users can configure a specific retention period, defined by an end date and a short reason, on a set of selected files or an individual file, by selecting the 'Retention Period' menu item and entering information in a popup dialog. Retention Periods can only be set, changed, or removed before a file has been published. After publication, only Dataverse installation administrators can make changes, using an API. - -- After the retention period expires, files can not be previewed or downloaded (as if restricted, with no option to allow access requests). The file (landing) page and all the metadata remains available. - - -Release notes should mention that a Solr schema update is needed. diff --git a/doc/release-notes/9729-release-notes.md b/doc/release-notes/9729-release-notes.md deleted file mode 100644 index 9dc27995405..00000000000 --- a/doc/release-notes/9729-release-notes.md +++ /dev/null @@ -1 +0,0 @@ -An error is now correctly reported when an attempt is made to assign an identical role to the same collection, dataset, or file. #9729 #10465 \ No newline at end of file diff --git a/doc/release-notes/9739-url-validator.md b/doc/release-notes/9739-url-validator.md deleted file mode 100644 index ad149c54459..00000000000 --- a/doc/release-notes/9739-url-validator.md +++ /dev/null @@ -1,7 +0,0 @@ -## Release Highlights - -### URL validation is more permissive - -Url validation now allows two slashes in the path component of the URL. (#9750) -Among other things, this allows metadata fields of `url` type to be filled with more complex url such as https://archive.softwareheritage.org/browse/directory/561bfe6698ca9e58b552b4eb4e56132cac41c6f9/?origin_url=https://github.com/gem-pasteur/macsyfinder&revision=868637fce184865d8e0436338af66a2648e8f6e1&snapshot=1bde3cb370766b10132c4e004c7cb377979928d1 - diff --git a/doc/release-notes/9887-new-superuser-status-endpoint.md b/doc/release-notes/9887-new-superuser-status-endpoint.md deleted file mode 100644 index 01b1f539f7a..00000000000 --- a/doc/release-notes/9887-new-superuser-status-endpoint.md +++ /dev/null @@ -1 +0,0 @@ -The existing API endpoint for toggling the superuser status of a user has been deprecated in favor of a new API endpoint that allows you to explicitly and idempotently set the status as true or false. For details, see [the guides](https://dataverse-guide--10440.org.readthedocs.build/en/10440/api/native-api.html), #9887 and #10440. \ No newline at end of file diff --git a/doc/release-notes/solr-9.4.1.md b/doc/release-notes/solr-9.4.1.md deleted file mode 100644 index 13624a272ab..00000000000 --- a/doc/release-notes/solr-9.4.1.md +++ /dev/null @@ -1,14 +0,0 @@ -Solr 9.4.1 is now the version recommended in our installation guides and used with automated testing. There is a known security issue in the previously recommended version 9.3.0: https://nvd.nist.gov/vuln/detail/CVE-2023-36478. While the risk of an exploit should not be significant unless the Solr instance is accessible from the outside networks (which we have always recommended against), existing Dataverse installations should consider upgrading. - -For the upgrade instructions section: - -[note that 6.3 will contain other solr-related changes, so the instructions may need to contain information merged from multiple release notes!] - -If you are upgrading Solr: - - Install solr-9.4.1 following the instructions from the Installation guide. - - Run a full reindex to populate the search catalog. - - Note that it may be possible to skip the reindexing step by simply moving the existing `.../server/solr/collection1/` under the new `solr-9.4.1` installation directory. This however has not been thoroughly tested and is not officially supported. - - - - diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 2d0dc714132..75ee5a51f90 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -585,6 +585,8 @@ The fully expanded example above (without environment variables) looks like this Note: you must have "Add Dataset" permission in the given collection to invoke this endpoint. +.. _featured-collections: + List Featured Collections for a Dataverse Collection ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ From c9538ab20fd745d5ffce65300a60f7c74d36cc31 Mon Sep 17 00:00:00 2001 From: diptechexpo <78333467+diptechexpo@users.noreply.github.com> Date: Sat, 6 Jul 2024 10:17:32 -0500 Subject: [PATCH 40/63] Added dataverse.db.parameters from long list of JVM options Addressing documentation issue #10669. Updated the config.rst file under doc/sphinx-guides/source/installation to add missing documentation about dataverse.db.parameters under JVM Options section --- doc/sphinx-guides/source/installation/config.rst | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 9e4a5e0ee7b..57bf2d72c81 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -781,7 +781,7 @@ Basic Database Settings - | ``dataverse`` | (installer sets to ``dvndb``) * - dataverse.db.parameters - - Connection parameters, such as ``sslmode=require``. See `Postgres JDBC docs `_ + - Connection parameters, such as ``sslmode=require``. See `Postgres JDBC docs ` Note: you don't need to provide the initial "?". - *Empty string* @@ -2442,6 +2442,15 @@ Defaults to ``5432``, the default PostgreSQL port. Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_DB_PORT``. +dataverse.db.parameters ++++++++++++++++++++++++ + +The PostgreSQL server connection parameters. + +Defaults to *Empty string* + +Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_DB_PARAMETERS``. + .. _dataverse.solr.host: dataverse.solr.host From 5fc0df576344e3d6707a5b95a297a3ff39f2a615 Mon Sep 17 00:00:00 2001 From: diptechexpo <78333467+diptechexpo@users.noreply.github.com> Date: Sat, 6 Jul 2024 14:48:30 -0500 Subject: [PATCH 41/63] Update doc/sphinx-guides/source/installation/config.rst Co-authored-by: Oliver Bertuch --- doc/sphinx-guides/source/installation/config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 57bf2d72c81..4b1ac1f1521 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -2447,7 +2447,7 @@ dataverse.db.parameters The PostgreSQL server connection parameters. -Defaults to *Empty string* +Defaults to *empty string* Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_DB_PARAMETERS``. From 86815ecd3d0c1aa62286d9112eeedbc31dc22709 Mon Sep 17 00:00:00 2001 From: diptechexpo <78333467+diptechexpo@users.noreply.github.com> Date: Sat, 6 Jul 2024 14:56:05 -0500 Subject: [PATCH 42/63] Update config.rst Added back underscore(_) character to maintain the link. --- doc/sphinx-guides/source/installation/config.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 4b1ac1f1521..ba92a4180ae 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -781,7 +781,7 @@ Basic Database Settings - | ``dataverse`` | (installer sets to ``dvndb``) * - dataverse.db.parameters - - Connection parameters, such as ``sslmode=require``. See `Postgres JDBC docs ` + - Connection parameters, such as ``sslmode=require``. See `Postgres JDBC docs `_ Note: you don't need to provide the initial "?". - *Empty string* From d35a474caa81c1642f06b786092b87f8e64106c6 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 8 Jul 2024 09:30:51 +0200 Subject: [PATCH 43/63] refactor(ct): migrate to useradd style commands in base image The Ubuntu specific wrappers "adduser" and "addgroup" have been removed with Ubuntu 24.04. Also, lets be more compatible with LSB this way. --- modules/container-base/src/main/docker/Dockerfile | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/modules/container-base/src/main/docker/Dockerfile b/modules/container-base/src/main/docker/Dockerfile index 93f9fa4f0c1..4f7b25aaa3b 100644 --- a/modules/container-base/src/main/docker/Dockerfile +++ b/modules/container-base/src/main/docker/Dockerfile @@ -78,16 +78,18 @@ RUN < Date: Mon, 8 Jul 2024 09:32:00 +0200 Subject: [PATCH 44/63] fix(ct): security fix to use the unprivileged user in base image It was left as "root" by accident, but should obviously be not. --- modules/container-base/src/main/docker/Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/container-base/src/main/docker/Dockerfile b/modules/container-base/src/main/docker/Dockerfile index 4f7b25aaa3b..05e09267f5f 100644 --- a/modules/container-base/src/main/docker/Dockerfile +++ b/modules/container-base/src/main/docker/Dockerfile @@ -226,6 +226,7 @@ USER root RUN true && \ chgrp -R 0 "${DOMAIN_DIR}" && \ chmod -R g=u "${DOMAIN_DIR}" +USER payara # Set the entrypoint to tini (as a process supervisor) ENTRYPOINT ["/usr/bin/dumb-init", "--"] From 7213c43424c05e20faa00d3082580c55ac1674a8 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 8 Jul 2024 09:32:59 +0200 Subject: [PATCH 45/63] refactor(ct): use pkg names compatible with Ubuntu 22 and 24 Also delete some unused packages like gpg and dirmngr --- modules/container-base/src/main/docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/container-base/src/main/docker/Dockerfile b/modules/container-base/src/main/docker/Dockerfile index 05e09267f5f..d4e830f3ab3 100644 --- a/modules/container-base/src/main/docker/Dockerfile +++ b/modules/container-base/src/main/docker/Dockerfile @@ -96,7 +96,7 @@ ARG JATTACH_VERSION="v2.1" ARG JATTACH_CHECKSUM="07885fdc782e02e7302c6d190f54c3930afa10a38140365adf54076ec1086a8e" ARG WAIT_FOR_VERSION="v2.2.3" ARG WAIT_FOR_CHECKSUM="70271181be69cd2c7265b2746f97fccfd7e8aa1059894138a775369c23589ff4" -ARG PKGS="jq imagemagick curl unzip wget acl dirmngr gpg lsof procps netcat dumb-init" +ARG PKGS="jq imagemagick curl unzip wget acl lsof procps netcat-openbsd dumb-init" # Installing the packages in an extra container layer for better caching RUN < Date: Mon, 8 Jul 2024 15:19:06 +0200 Subject: [PATCH 46/63] fix(ct): solve pre/post boot command file trouble - The entrypoint now defines two locations, which can be overridden by a user _without_ implicitely trying to execute these scripts. - The entrypoint now _removes_ any files found at these locations to _always_ start with a clean slate. Otherwise stale files might be looped over and over again. - A consequence of this: any kind of commands to be included must be provided via a script and cannot be provided by some initial file. - The configuration scripts no longer leave temporary files dangling and avoid these files if possible. Instead, we are injecting statements into these files while checking for duplicates as a safety measure. --- .../src/main/docker/scripts/entrypoint.sh | 12 ++-- .../init_1_generate_deploy_commands.sh | 17 +++-- .../init_1_generate_devmode_commands.sh | 67 ++++++++++--------- .../main/docker/scripts/startInForeground.sh | 10 +-- src/main/docker/scripts/init_2_configure.sh | 35 +++++++--- 5 files changed, 82 insertions(+), 59 deletions(-) diff --git a/modules/container-base/src/main/docker/scripts/entrypoint.sh b/modules/container-base/src/main/docker/scripts/entrypoint.sh index bd7031db9f0..ed3b8ea9aa4 100644 --- a/modules/container-base/src/main/docker/scripts/entrypoint.sh +++ b/modules/container-base/src/main/docker/scripts/entrypoint.sh @@ -12,10 +12,14 @@ # We do not define these variables within our Dockerfile so the location can be changed when trying to avoid # writes to the overlay filesystem. (CONFIG_DIR is defined within the Dockerfile, but might be overridden.) -${PREBOOT_COMMANDS:="${CONFIG_DIR}/pre-boot-commands.asadmin"} -export PREBOOT_COMMANDS -${POSTBOOT_COMMANDS:="${CONFIG_DIR}/post-boot-commands.asadmin"} -export POSTBOOT_COMMANDS +PREBOOT_COMMANDS_FILE=${PREBOOT_COMMANDS:-"${CONFIG_DIR}/pre-boot-commands.asadmin"} +export PREBOOT_COMMANDS_FILE +POSTBOOT_COMMANDS_FILE=${POSTBOOT_COMMANDS:-"${CONFIG_DIR}/post-boot-commands.asadmin"} +export POSTBOOT_COMMANDS_FILE + +# Remove existing POSTBOOT/PREBOOT files if they exist. Anything to be done needs to be injected by a script +rm -rf "$POSTBOOT_COMMANDS_FILE" || exit 1 +rm -rf "$PREBOOT_COMMANDS_FILE" || exit 1 # Execute any scripts BEFORE the appserver starts for f in "${SCRIPT_DIR}"/init_* "${SCRIPT_DIR}"/init.d/*; do diff --git a/modules/container-base/src/main/docker/scripts/init_1_generate_deploy_commands.sh b/modules/container-base/src/main/docker/scripts/init_1_generate_deploy_commands.sh index 161f10caebf..622ea82d6f6 100644 --- a/modules/container-base/src/main/docker/scripts/init_1_generate_deploy_commands.sh +++ b/modules/container-base/src/main/docker/scripts/init_1_generate_deploy_commands.sh @@ -35,12 +35,11 @@ set -euo pipefail # Check required variables are set if [ -z "$DEPLOY_DIR" ]; then echo "Variable DEPLOY_DIR is not set."; exit 1; fi -if [ -z "$PREBOOT_COMMANDS" ]; then echo "Variable PREBOOT_COMMANDS is not set."; exit 1; fi -if [ -z "$POSTBOOT_COMMANDS" ]; then echo "Variable POSTBOOT_COMMANDS is not set."; exit 1; fi - -# Create pre and post boot command files if they don't exist -touch "$POSTBOOT_COMMANDS" -touch "$PREBOOT_COMMANDS" +if [ -z "$PREBOOT_COMMANDS_FILE" ]; then echo "Variable PREBOOT_COMMANDS_FILE is not set."; exit 1; fi +if [ -z "$POSTBOOT_COMMANDS_FILE" ]; then echo "Variable POSTBOOT_COMMANDS_FILE is not set."; exit 1; fi +# Test if files are writeable for us, exit otherwise +touch "$PREBOOT_COMMANDS_FILE" || exit 1 +touch "$POSTBOOT_COMMANDS_FILE" || exit 1 deploy() { @@ -50,14 +49,14 @@ deploy() { fi DEPLOY_STATEMENT="deploy $DEPLOY_PROPS $1" - if grep -q "$1" "$POSTBOOT_COMMANDS"; then - echo "post boot commands already deploys $1"; + if grep -q "$1" "$POSTBOOT_COMMANDS_FILE"; then + echo "Post boot commands already deploys $1, skip adding"; else if [ -n "$SKIP_DEPLOY" ] && { [ "$SKIP_DEPLOY" = "1" ] || [ "$SKIP_DEPLOY" = "true" ]; }; then echo "Skipping deployment of $1 as requested."; else echo "Adding deployment target $1 to post boot commands"; - echo "$DEPLOY_STATEMENT" >> "$POSTBOOT_COMMANDS"; + echo "$DEPLOY_STATEMENT" >> "$POSTBOOT_COMMANDS_FILE"; fi fi } diff --git a/modules/container-base/src/main/docker/scripts/init_1_generate_devmode_commands.sh b/modules/container-base/src/main/docker/scripts/init_1_generate_devmode_commands.sh index 016151168d5..77b37abac30 100644 --- a/modules/container-base/src/main/docker/scripts/init_1_generate_devmode_commands.sh +++ b/modules/container-base/src/main/docker/scripts/init_1_generate_devmode_commands.sh @@ -11,39 +11,49 @@ set -euo pipefail # for the parent shell before executing Payara. ###### ###### ###### ###### ###### ###### ###### ###### ###### ###### ###### +if [ -z "$PREBOOT_COMMANDS_FILE" ]; then echo "Variable PREBOOT_COMMANDS_FILE is not set."; exit 1; fi +# Test if preboot file is writeable for us, exit otherwise +touch "$PREBOOT_COMMANDS_FILE" || exit 1 + # 0. Init variables ENABLE_JMX=${ENABLE_JMX:-0} ENABLE_JDWP=${ENABLE_JDWP:-0} ENABLE_RELOAD=${ENABLE_RELOAD:-0} -DV_PREBOOT=${CONFIG_DIR}/dataverse_preboot -echo "# Dataverse preboot configuration for Payara" > "${DV_PREBOOT}" +function inject() { + if [ -z "$1" ]; then echo "No line specified"; exit 1; fi + # If the line is not yet in the file, try to add it + if ! grep -q "$1" "$PREBOOT_COMMANDS_FILE"; then + # Check if the line is still not in the file when splitting at the first = + if ! grep -q "$(echo "$1" | cut -f1 -d"=")" "$PREBOOT_COMMANDS_FILE"; then + echo "$1" >> "$PREBOOT_COMMANDS_FILE" + fi + fi +} # 1. Configure JMX (enabled by default on port 8686, but requires SSL) # See also https://blog.payara.fish/monitoring-payara-server-with-jconsole # To still use it, you can use a sidecar container proxying or using JMX via localhost without SSL. if [ "${ENABLE_JMX}" = "1" ]; then echo "Enabling unsecured JMX on 0.0.0.0:8686, enabling AMX and tuning monitoring levels to HIGH. You'll need a sidecar for this, as access is allowed from same machine only (without SSL)." - { \ - echo "set configs.config.server-config.amx-configuration.enabled=true" - echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.jvm=HIGH" - echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.connector-service=HIGH" - echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.connector-connection-pool=HIGH" - echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.jdbc-connection-pool=HIGH" - echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.web-services-container=HIGH" - echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.ejb-container=HIGH" - echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.thread-pool=HIGH" - echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.http-service=HIGH" - echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.security=HIGH" - echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.jms-service=HIGH" - echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.jersey=HIGH" - echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.transaction-service=HIGH" - echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.jpa=HIGH" - echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.web-container=HIGH" - echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.orb=HIGH" - echo "set configs.config.server-config.monitoring-service.module-monitoring-levels.deployment=HIGH" - echo "set configs.config.server-config.admin-service.jmx-connector.system.security-enabled=false" - } >> "${DV_PREBOOT}" + inject "set configs.config.server-config.amx-configuration.enabled=true" + inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.jvm=HIGH" + inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.connector-service=HIGH" + inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.connector-connection-pool=HIGH" + inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.jdbc-connection-pool=HIGH" + inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.web-services-container=HIGH" + inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.ejb-container=HIGH" + inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.thread-pool=HIGH" + inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.http-service=HIGH" + inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.security=HIGH" + inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.jms-service=HIGH" + inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.jersey=HIGH" + inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.transaction-service=HIGH" + inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.jpa=HIGH" + inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.web-container=HIGH" + inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.orb=HIGH" + inject "set configs.config.server-config.monitoring-service.module-monitoring-levels.deployment=HIGH" + inject "set configs.config.server-config.admin-service.jmx-connector.system.security-enabled=false" fi # 2. Enable JDWP via debugging switch @@ -55,17 +65,14 @@ fi # 3. Enable hot reload if [ "${ENABLE_RELOAD}" = "1" ]; then echo "Enabling hot reload of deployments." - echo "set configs.config.server-config.admin-service.das-config.dynamic-reload-enabled=true" >> "${DV_PREBOOT}" - echo "set configs.config.server-config.admin-service.das-config.autodeploy-enabled=true" >> "${DV_PREBOOT}" export DATAVERSE_JSF_PROJECT_STAGE=${DATAVERSE_JSF_PROJECT_STAGE:-"Development"} export DATAVERSE_JSF_REFRESH_PERIOD=${DATAVERSE_JSF_REFRESH_PERIOD:-"0"} + inject "set configs.config.server-config.admin-service.das-config.dynamic-reload-enabled=true" + inject "set configs.config.server-config.admin-service.das-config.autodeploy-enabled=true" fi # 4. Add the commands to the existing preboot file, but insert BEFORE deployment -TMP_PREBOOT=$(mktemp) -cat "${DV_PREBOOT}" "${PREBOOT_COMMANDS}" > "${TMP_PREBOOT}" -mv "${TMP_PREBOOT}" "${PREBOOT_COMMANDS}" -echo "DEBUG: preboot contains the following commands:" +echo "DEBUG: preboot contains now the following commands:" +echo "--------------------------------------------------" +cat "${PREBOOT_COMMANDS_FILE}" echo "--------------------------------------------------" -cat "${PREBOOT_COMMANDS}" -echo "--------------------------------------------------" \ No newline at end of file diff --git a/modules/container-base/src/main/docker/scripts/startInForeground.sh b/modules/container-base/src/main/docker/scripts/startInForeground.sh index 4843f6ae055..c19bd66b8be 100644 --- a/modules/container-base/src/main/docker/scripts/startInForeground.sh +++ b/modules/container-base/src/main/docker/scripts/startInForeground.sh @@ -34,8 +34,8 @@ # Check required variables are set if [ -z "$ADMIN_USER" ]; then echo "Variable ADMIN_USER is not set."; exit 1; fi if [ -z "$PASSWORD_FILE" ]; then echo "Variable PASSWORD_FILE is not set."; exit 1; fi -if [ -z "$PREBOOT_COMMANDS" ]; then echo "Variable PREBOOT_COMMANDS is not set."; exit 1; fi -if [ -z "$POSTBOOT_COMMANDS" ]; then echo "Variable POSTBOOT_COMMANDS is not set."; exit 1; fi +if [ -z "$PREBOOT_COMMANDS_FILE" ]; then echo "Variable PREBOOT_COMMANDS_FILE is not set."; exit 1; fi +if [ -z "$POSTBOOT_COMMANDS_FILE" ]; then echo "Variable POSTBOOT_COMMANDS_FILE is not set."; exit 1; fi if [ -z "$DOMAIN_NAME" ]; then echo "Variable DOMAIN_NAME is not set."; exit 1; fi # Check if dumps are enabled - add arg to JVM_ARGS in this case @@ -50,12 +50,12 @@ fi # - remove lines before and after the command line and squash commands on a single line # Create pre and post boot command files if they don't exist -touch "$POSTBOOT_COMMANDS" -touch "$PREBOOT_COMMANDS" +touch "$POSTBOOT_COMMANDS_FILE" || exit 1 +touch "$PREBOOT_COMMANDS_FILE" || exit 1 # shellcheck disable=SC2068 # -- Using $@ is necessary here as asadmin cannot deal with options enclosed in ""! -OUTPUT=$("${PAYARA_DIR}"/bin/asadmin --user="${ADMIN_USER}" --passwordfile="${PASSWORD_FILE}" start-domain --dry-run --prebootcommandfile="${PREBOOT_COMMANDS}" --postbootcommandfile="${POSTBOOT_COMMANDS}" $@ "$DOMAIN_NAME") +OUTPUT=$("${PAYARA_DIR}"/bin/asadmin --user="${ADMIN_USER}" --passwordfile="$PASSWORD_FILE" start-domain --dry-run --prebootcommandfile="${PREBOOT_COMMANDS_FILE}" --postbootcommandfile="${POSTBOOT_COMMANDS_FILE}" $@ "$DOMAIN_NAME") STATUS=$? if [ "$STATUS" -ne 0 ] then diff --git a/src/main/docker/scripts/init_2_configure.sh b/src/main/docker/scripts/init_2_configure.sh index b31cfac37b7..d2da393f22c 100755 --- a/src/main/docker/scripts/init_2_configure.sh +++ b/src/main/docker/scripts/init_2_configure.sh @@ -26,13 +26,28 @@ fi DV_POSTBOOT=${PAYARA_DIR}/dataverse_postboot echo "# Dataverse postboot configuration for Payara" > "${DV_POSTBOOT}" -# 2. Domain-spaced resources (JDBC, JMS, ...) -# TODO: This is ugly and dirty. It should be replaced with resources from -# EE 8 code annotations or at least glassfish-resources.xml -# NOTE: postboot commands is not multi-line capable, thus spaghetti needed. +# Check prerequisites for commands handling +if [ -z "$POSTBOOT_COMMANDS_FILE" ]; then echo "Variable POSTBOOT_COMMANDS_FILE is not set."; exit 1; fi +# Test if postboot file is writeable for us, exit otherwise +touch "$POSTBOOT_COMMANDS_FILE" || exit 1 +# Copy and split the postboot contents to manipulate them +EXISTING_DEPLOY_COMMANDS=$(mktemp) +NEW_POSTBOOT_COMMANDS=$(mktemp) +grep -e "^deploy " "$POSTBOOT_COMMANDS_FILE" > "$EXISTING_DEPLOY_COMMANDS" || true +grep -v -e "^deploy" "$POSTBOOT_COMMANDS_FILE" > "$NEW_POSTBOOT_COMMANDS" || true -# 3. Domain based configuration options -# Set Dataverse environment variables +function inject() { + if [ -z "$1" ]; then echo "No line specified"; exit 1; fi + # If the line is not yet in the file, try to add it + if ! grep -q "$1" "$NEW_POSTBOOT_COMMANDS"; then + # Check if the line is still not in the file when splitting at the first = + if ! grep -q "$(echo "$1" | cut -f1 -d"=")" "$NEW_POSTBOOT_COMMANDS"; then + echo "$1" >> "$NEW_POSTBOOT_COMMANDS" + fi + fi +} + +# Domain based configuration options - set from Dataverse environment variables echo "INFO: Defining system properties for Dataverse configuration options." #env | grep -Ee "^(dataverse|doi)_" | sort -fd env -0 | grep -z -Ee "^(dataverse|doi)_" | while IFS='=' read -r -d '' k v; do @@ -47,14 +62,12 @@ env -0 | grep -z -Ee "^(dataverse|doi)_" | while IFS='=' read -r -d '' k v; do v=$(echo "${v}" | sed -e 's/:/\\\:/g') echo "DEBUG: Handling ${KEY}=${v}." - echo "create-system-properties ${KEY}=${v}" >> "${DV_POSTBOOT}" + inject "create-system-properties ${KEY}=${v}" done # 4. Add the commands to the existing postboot file, but insert BEFORE deployment -TMPFILE=$(mktemp) -cat "${DV_POSTBOOT}" "${POSTBOOT_COMMANDS}" > "${TMPFILE}" && mv "${TMPFILE}" "${POSTBOOT_COMMANDS}" +cat "$NEW_POSTBOOT_COMMANDS" "$EXISTING_DEPLOY_COMMANDS" > "${POSTBOOT_COMMANDS_FILE}" echo "DEBUG: postboot contains the following commands:" echo "--------------------------------------------------" -cat "${POSTBOOT_COMMANDS}" +cat "${POSTBOOT_COMMANDS_FILE}" echo "--------------------------------------------------" - From bd0832ac58c820155398e5ee5337c828c2030fba Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 8 Jul 2024 15:20:19 +0200 Subject: [PATCH 47/63] refactor(ct): move Dataverse specific tweaks for reload from base to application Setting these env vars when reloading are highly application specific and shall not reside in the base image. --- .../docker/scripts/init_1_generate_devmode_commands.sh | 2 -- src/main/docker/scripts/init_2_configure.sh | 9 ++++++--- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/modules/container-base/src/main/docker/scripts/init_1_generate_devmode_commands.sh b/modules/container-base/src/main/docker/scripts/init_1_generate_devmode_commands.sh index 77b37abac30..608113d1cf7 100644 --- a/modules/container-base/src/main/docker/scripts/init_1_generate_devmode_commands.sh +++ b/modules/container-base/src/main/docker/scripts/init_1_generate_devmode_commands.sh @@ -65,8 +65,6 @@ fi # 3. Enable hot reload if [ "${ENABLE_RELOAD}" = "1" ]; then echo "Enabling hot reload of deployments." - export DATAVERSE_JSF_PROJECT_STAGE=${DATAVERSE_JSF_PROJECT_STAGE:-"Development"} - export DATAVERSE_JSF_REFRESH_PERIOD=${DATAVERSE_JSF_REFRESH_PERIOD:-"0"} inject "set configs.config.server-config.admin-service.das-config.dynamic-reload-enabled=true" inject "set configs.config.server-config.admin-service.das-config.autodeploy-enabled=true" fi diff --git a/src/main/docker/scripts/init_2_configure.sh b/src/main/docker/scripts/init_2_configure.sh index d2da393f22c..5c1075f01f3 100755 --- a/src/main/docker/scripts/init_2_configure.sh +++ b/src/main/docker/scripts/init_2_configure.sh @@ -22,9 +22,12 @@ if [ "${dataverse_files_storage__driver__id}" = "local" ]; then export dataverse_files_local_directory="${dataverse_files_local_directory:-${STORAGE_DIR}/store}" fi -# 0. Define postboot commands file to be read by Payara and clear it -DV_POSTBOOT=${PAYARA_DIR}/dataverse_postboot -echo "# Dataverse postboot configuration for Payara" > "${DV_POSTBOOT}" +# If reload is enable via ENABLE_RELOAD=1, set according Jakarta Faces options +ENABLE_RELOAD=${ENABLE_RELOAD:-0} +if [ "${ENABLE_RELOAD}" = "1" ]; then + export DATAVERSE_JSF_PROJECT_STAGE=${DATAVERSE_JSF_PROJECT_STAGE:-"Development"} + export DATAVERSE_JSF_REFRESH_PERIOD=${DATAVERSE_JSF_REFRESH_PERIOD:-"0"} +fi # Check prerequisites for commands handling if [ -z "$POSTBOOT_COMMANDS_FILE" ]; then echo "Variable POSTBOOT_COMMANDS_FILE is not set."; exit 1; fi From db802f8f83f0fcd32e627dc7612baaf39fef9af4 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 8 Jul 2024 15:26:35 +0200 Subject: [PATCH 48/63] feat(ct): base image infrastructure to set passwords - Provide env vars for admin, linux user and domain master password. - These are set to the publicly known values, good enough for development or demo purposes. - For production purposes, these variables will be used to change passwords at run/boottime of the container. - As of this commit, do _not_ leave any password lying around in files, which could be exploited. --- .../container-base/src/main/docker/Dockerfile | 22 +++++++++++++------ 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/modules/container-base/src/main/docker/Dockerfile b/modules/container-base/src/main/docker/Dockerfile index d4e830f3ab3..df13777e94e 100644 --- a/modules/container-base/src/main/docker/Dockerfile +++ b/modules/container-base/src/main/docker/Dockerfile @@ -41,11 +41,16 @@ ENV PAYARA_DIR="${HOME_DIR}/appserver" \ STORAGE_DIR="/dv" \ SECRETS_DIR="/secrets" \ DUMPS_DIR="/dumps" \ - PASSWORD_FILE="${HOME_DIR}/passwordFile" \ ADMIN_USER="admin" \ + # This is a public default, easy to change via this env var at runtime ADMIN_PASSWORD="admin" \ DOMAIN_NAME="domain1" \ - PAYARA_ARGS="" + # This is the public default as per https://docs.payara.fish/community/docs/Technical%20Documentation/Payara%20Server%20Documentation/Security%20Guide/Administering%20System%20Security.html#to-change-the-master-password + # Can be changed at runtime via this env var + DOMAIN_MASTER_PASSWORD="changeit" \ + PAYARA_ARGS="" \ + # This is a public default and can be changed at runtime using this env var + LINUX_USER_PASSWORD="payara" ENV PATH="${PATH}:${PAYARA_DIR}/bin:${SCRIPT_DIR}" \ DOMAIN_DIR="${PAYARA_DIR}/glassfish/domains/${DOMAIN_NAME}" \ DEPLOY_PROPS="" \ @@ -84,7 +89,7 @@ RUN <> /tmp/password-change-file.txt - echo "AS_ADMIN_PASSWORD=${ADMIN_PASSWORD}" >> ${PASSWORD_FILE} asadmin --user=${ADMIN_USER} --passwordfile=/tmp/password-change-file.txt change-admin-password --domain_name=${DOMAIN_NAME} + + # Prepare shorthand + PASSWORD_FILE=$(mktemp) + echo "AS_ADMIN_PASSWORD=${ADMIN_PASSWORD}" >> ${PASSWORD_FILE} + ASADMIN="${PAYARA_DIR}/bin/asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE}" + # Start domain for configuration ${ASADMIN} start-domain ${DOMAIN_NAME} # Allow access to admin with password only @@ -215,6 +222,7 @@ RUN < Date: Mon, 8 Jul 2024 15:28:48 +0200 Subject: [PATCH 49/63] feat(ct): introduce runtime password changing A new init script allows to set passwords at boot time of the container. If the passwords are not changed, there will be warnings logged about the default in use. Slightly modifying the startInForeground.sh script to avoid keeping password files or sensitive passwords around after starting the server. --- .../docker/scripts/init_1_change_passwords.sh | 43 +++++++++++++++++++ .../main/docker/scripts/startInForeground.sh | 30 +++++++------ 2 files changed, 59 insertions(+), 14 deletions(-) create mode 100644 modules/container-base/src/main/docker/scripts/init_1_change_passwords.sh diff --git a/modules/container-base/src/main/docker/scripts/init_1_change_passwords.sh b/modules/container-base/src/main/docker/scripts/init_1_change_passwords.sh new file mode 100644 index 00000000000..07dd90a1b98 --- /dev/null +++ b/modules/container-base/src/main/docker/scripts/init_1_change_passwords.sh @@ -0,0 +1,43 @@ +#!/bin/bash +set -euo pipefail + +# NOTE: ALL PASSWORD ENV VARS WILL BE SCRAMBLED IN startInForeground.sh FOR SECURITY! +# This is to avoid possible attack vectors where someone could extract the sensitive information +# from within an env var dump inside an application! + +# Someone set the env var for passwords - get the new password in. Otherwise print warning. +# https://docs.openshift.com/container-platform/4.14/openshift_images/create-images.html#avoid-default-passwords +if [ "$LINUX_USER_PASSWORD" != "payara" ]; then + echo -e "payara\n$LINUX_USER_PASSWORD\n$LINUX_USER_PASSWORD" | passwd +else + echo "IMPORTANT: THIS CONTAINER USES THE DEFAULT PASSWORD FOR USER payara! ('payara')" + echo " To change the password, set the LINUX_USER_PASSWORD env var." +fi + +# Change the domain admin password if necessary +if [ "$ADMIN_PASSWORD" != "admin" ]; then + PASSWORD_FILE=$(mktemp) + echo "AS_ADMIN_PASSWORD=admin" > "$PASSWORD_FILE" + echo "AS_ADMIN_NEWPASSWORD=${ADMIN_PASSWORD}" >> "$PASSWORD_FILE" + asadmin --user="${ADMIN_USER}" --passwordfile="$PASSWORD_FILE" change-admin-password --domain_name="${DOMAIN_NAME}" + rm "$PASSWORD_FILE" +else + echo "IMPORTANT: THIS CONTAINER USES THE DEFAULT PASSWORD FOR PAYARA ASADMIN! ('admin')" + echo " To change the password, set the ADMIN_PASSWORD env var." +fi + +# Change the domain master password if necessary +# > The master password is not tied to a user account, and it is not used for authentication. +# > Instead, Payara Server strictly uses the master password to ONLY encrypt the keystore and truststore used to store keys and certificates for the DAS and instances usage. +# It will be requested when booting the application server! +# https://docs.payara.fish/community/docs/Technical%20Documentation/Payara%20Server%20Documentation/Security%20Guide/Administering%20System%20Security.html#to-change-the-master-password +if [ "$DOMAIN_MASTER_PASSWORD" != "changeit" ]; then + PASSWORD_FILE=$(mktemp) + echo "AS_ADMIN_MASTERPASSWORD=changeit" >> "$PASSWORD_FILE" + echo "AS_ADMIN_NEWMASTERPASSWORD=${DOMAIN_MASTER_PASSWORD}" >> "$PASSWORD_FILE" + asadmin --user="${ADMIN_USER}" --passwordfile="$PASSWORD_FILE" change-master-password --savemasterpassword false "${DOMAIN_NAME}" + rm "$PASSWORD_FILE" +else + echo "IMPORTANT: THIS CONTAINER USES THE DEFAULT MASTER PASSWORD FOR THE DOMAIN! ('changeit')" + echo " To change the password, set the DOMAIN_MASTER_PASSWORD env var." +fi diff --git a/modules/container-base/src/main/docker/scripts/startInForeground.sh b/modules/container-base/src/main/docker/scripts/startInForeground.sh index c19bd66b8be..262cadd9aca 100644 --- a/modules/container-base/src/main/docker/scripts/startInForeground.sh +++ b/modules/container-base/src/main/docker/scripts/startInForeground.sh @@ -33,7 +33,8 @@ # Check required variables are set if [ -z "$ADMIN_USER" ]; then echo "Variable ADMIN_USER is not set."; exit 1; fi -if [ -z "$PASSWORD_FILE" ]; then echo "Variable PASSWORD_FILE is not set."; exit 1; fi +if [ -z "$ADMIN_PASSWORD" ]; then echo "Variable ADMIN_PASSWORD is not set."; exit 1; fi +if [ -z "$DOMAIN_MASTER_PASSWORD" ]; then echo "Variable DOMAIN_MASTER_PASSWORD is not set."; exit 1; fi if [ -z "$PREBOOT_COMMANDS_FILE" ]; then echo "Variable PREBOOT_COMMANDS_FILE is not set."; exit 1; fi if [ -z "$POSTBOOT_COMMANDS_FILE" ]; then echo "Variable POSTBOOT_COMMANDS_FILE is not set."; exit 1; fi if [ -z "$DOMAIN_NAME" ]; then echo "Variable DOMAIN_NAME is not set."; exit 1; fi @@ -43,6 +44,13 @@ if [ -n "${ENABLE_DUMPS}" ] && [ "${ENABLE_DUMPS}" = "1" ]; then JVM_ARGS="${JVM_DUMPS_ARG} ${JVM_ARGS}" fi +# For safety reasons, do no longer expose the passwords - malicious code could extract it! +# (We need to save the master password for booting the server though) +MASTER_PASSWORD="${DOMAIN_MASTER_PASSWORD}" +export LINUX_USER_PASSWORD="have-some-scrambled-eggs" +export ADMIN_PASSWORD="have-some-scrambled-eggs" +export DOMAIN_MASTER_PASSWORD="have-some-scrambled-eggs" + # The following command gets the command line to be executed by start-domain # - print the command line to the server with --dry-run, each argument on a separate line # - remove -read-string argument @@ -53,16 +61,22 @@ fi touch "$POSTBOOT_COMMANDS_FILE" || exit 1 touch "$PREBOOT_COMMANDS_FILE" || exit 1 +# This workaround is necessary due to limitations of asadmin +PASSWORD_FILE=$(mktemp) +echo "AS_ADMIN_MASTERPASSWORD=$MASTER_PASSWORD" > "$PASSWORD_FILE" # shellcheck disable=SC2068 # -- Using $@ is necessary here as asadmin cannot deal with options enclosed in ""! OUTPUT=$("${PAYARA_DIR}"/bin/asadmin --user="${ADMIN_USER}" --passwordfile="$PASSWORD_FILE" start-domain --dry-run --prebootcommandfile="${PREBOOT_COMMANDS_FILE}" --postbootcommandfile="${POSTBOOT_COMMANDS_FILE}" $@ "$DOMAIN_NAME") STATUS=$? +rm "$PASSWORD_FILE" if [ "$STATUS" -ne 0 ] then echo ERROR: "$OUTPUT" >&2 exit 1 fi +echo "Booting now..." + COMMAND=$(echo "$OUTPUT"\ | sed -n -e '2,/^$/p'\ | sed "s|glassfish.jar|glassfish.jar $JVM_ARGS |g") @@ -72,18 +86,6 @@ echo "$COMMAND" | tr ' ' '\n' echo # Run the server in foreground - read master password from variable or file or use the default "changeit" password - -set +x -if test "$AS_ADMIN_MASTERPASSWORD"x = x -a -f "$PASSWORD_FILE" - then - # shellcheck disable=SC1090 - source "$PASSWORD_FILE" -fi -if test "$AS_ADMIN_MASTERPASSWORD"x = x - then - AS_ADMIN_MASTERPASSWORD=changeit -fi -echo "AS_ADMIN_MASTERPASSWORD=$AS_ADMIN_MASTERPASSWORD" > /tmp/masterpwdfile # shellcheck disable=SC2086 # -- Unquoted exec var is necessary, as otherwise things get escaped that may not be escaped (parameters for Java) -exec ${COMMAND} < /tmp/masterpwdfile +exec ${COMMAND} < <(echo "AS_ADMIN_MASTERPASSWORD=$MASTER_PASSWORD") From 07ecb4d62ecca139b21ed4eb0a84ef4dbbd96af4 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 9 Jul 2024 17:04:45 -0400 Subject: [PATCH 50/63] improve "making releases" doc (post 6.3) #10675 - better intro - get issue numbers from snippet filename - add milestones to issues and PRs - fix instructions for "target" for tag/release --- .../source/developers/making-releases.rst | 28 ++++++++++++------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/doc/sphinx-guides/source/developers/making-releases.rst b/doc/sphinx-guides/source/developers/making-releases.rst index e7a59910e56..e436ba9e9d2 100755 --- a/doc/sphinx-guides/source/developers/making-releases.rst +++ b/doc/sphinx-guides/source/developers/making-releases.rst @@ -8,12 +8,12 @@ Making Releases Introduction ------------ -Note: See :doc:`making-library-releases` for how to publish our libraries to Maven Central. - -See :doc:`version-control` for background on our branching strategy. +This document is about releasing the main Dataverse app (https://github.com/IQSS/dataverse). See :doc:`making-library-releases` for how to release our various libraries. Other projects have their own release documentation. The steps below describe making both regular releases and hotfix releases. +Below you'll see branches like "develop" and "master" mentioned. For more on our branching strategy, see :doc:`version-control`. + .. _write-release-notes: Write Release Notes @@ -24,10 +24,10 @@ Developers express the need for an addition to release notes by creating a "rele The task at or near release time is to collect these snippets into a single file. - Create an issue in GitHub to track the work of creating release notes for the upcoming release. -- Create a branch, add a .md file for the release (ex. 5.10.1 Release Notes) in ``/doc/release-notes`` and write the release notes, making sure to pull content from the release note snippets mentioned above. -- Delete the release note snippets as the content is added to the main release notes file. -- Include instructions to describe the steps required to upgrade the application from the previous version. These must be customized for release numbers and special circumstances such as changes to metadata blocks and infrastructure. -- Take the release notes .md through the regular Code Review and QA process. +- Create a branch, add a .md file for the release (ex. 5.10.1 Release Notes) in ``/doc/release-notes`` and write the release notes, making sure to pull content from the release note snippets mentioned above. Snippets may not include any issue number or pull request number in the text so be sure copy the number from the filename of the snippet into the final release note. +- Delete (``git rm``) the release note snippets as the content is added to the main release notes file. +- Include instructions describing the steps required to upgrade the application from the previous version. These must be customized for release numbers and special circumstances such as changes to metadata blocks and infrastructure. +- Take the release notes .md through the regular Code Review and QA process. That is, make a pull request. Create a GitHub Issue and Branch for the Release ------------------------------------------------ @@ -70,6 +70,13 @@ Once important tests have passed (compile, unit tests, etc.), merge the pull req If this is a hotfix release, skip this whole "merge develop to master" step (the "develop" branch is not involved until later). +Add Milestone to Pull Requests and Issues +----------------------------------------- + +Often someone is making sure that the proper milestone (e.g. 5.10.1) is being applied to pull requests and issues, but sometimes this falls between the cracks. + +Check for merged pull requests that have no milestone by going to https://github.com/IQSS/dataverse/pulls and entering `is:pr is:merged no:milestone `_ as a query. If you find any, add the milestone to the pull request and any issues it closes. This includes the "merge develop into master" pull request above. + (Optional) Test Docker Images ----------------------------- @@ -106,7 +113,7 @@ Create a Draft Release on GitHub Go to https://github.com/IQSS/dataverse/releases/new to start creating a draft release. - Under "Choose a tag" you will be creating a new tag. Have it start with a "v" such as ``v5.10.1``. Click "Create new tag on publish". -- Under "Target" go to "Recent Commits" and select the merge commit from when you merged ``develop`` into ``master`` above. This commit will appear in ``/api/info/version`` from a running installation. +- Under "Target", choose "master". This commit will appear in ``/api/info/version`` from a running installation. - Under "Release title" use the same name as the tag such as ``v5.10.1``. - In the description, copy and paste the content from the release notes .md file created in the "Write Release Notes" steps above. - Click "Save draft" because we do not want to publish the release yet. @@ -153,6 +160,7 @@ ssh into the dataverse-internal server and do the following: - ``mkdir target`` - ``cp /tmp/dataverse-5.10.1.war target`` - ``cd scripts/installer`` +- ``make clean`` - ``make`` A zip file called ``dvinstall.zip`` should be produced. @@ -175,7 +183,7 @@ Upload the following artifacts to the draft release you created: Deploy on Demo -------------- -Now that you have the release ready to go, give it one final test by deploying it on https://demo.dataverse.org . Note that this is also an opportunity to re-test the upgrade checklist as described in the release note. +Now that you have the release ready to go, consider giving it one final test by deploying it on https://demo.dataverse.org. Note that this is also an opportunity to re-test the upgrade checklist as described in the release note. Publish the Release ------------------- @@ -194,7 +202,7 @@ ssh into the guides server and update the symlink to point to the latest release cd /var/www/html/en ln -s 5.10.1 latest - +This step could be done before publishing the release if you'd like to double check that links in the release notes work. Close Milestone on GitHub and Create a New One ---------------------------------------------- From bfcebb879e8e7d343464e73447084f22662591a4 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 10 Jul 2024 12:05:17 -0400 Subject: [PATCH 51/63] tweak docs and add release note #10606 --- .../10606-dataverse-in-windows-wsl.md | 1 + .../source/developers/windows.rst | 45 +++++++++++-------- 2 files changed, 27 insertions(+), 19 deletions(-) create mode 100644 doc/release-notes/10606-dataverse-in-windows-wsl.md diff --git a/doc/release-notes/10606-dataverse-in-windows-wsl.md b/doc/release-notes/10606-dataverse-in-windows-wsl.md new file mode 100644 index 00000000000..9501d6e3090 --- /dev/null +++ b/doc/release-notes/10606-dataverse-in-windows-wsl.md @@ -0,0 +1 @@ +New instructions have been added for developers on Windows trying to run a Dataverse development environment using Windows Subsystem for Linux (WSL). See https://dataverse-guide--10608.org.readthedocs.build/en/10608/developers/windows.html #10606 and #10608. diff --git a/doc/sphinx-guides/source/developers/windows.rst b/doc/sphinx-guides/source/developers/windows.rst index 54a30e95aef..699b64c1e1f 100755 --- a/doc/sphinx-guides/source/developers/windows.rst +++ b/doc/sphinx-guides/source/developers/windows.rst @@ -12,19 +12,23 @@ Running Dataverse in Docker on Windows See the `post `_ by Akio Sone for additional details, but please observe the following: -- In git, the line-ending setting should be set to always LF (line feed, ``core.autocrlf=input``) - You must have jq installed: https://jqlang.github.io/jq/download/ +- In git, the line-ending setting should be set to always LF (line feed, ``core.autocrlf=input``). Update: This should have been fixed by https://github.com/IQSS/dataverse/pull/10092. -One the above is all set you can move on to :doc:`/container/dev-usage` in the Container Guide. +Once the above is all set you can move on to :doc:`/container/dev-usage` in the Container Guide. + +Generally speaking, if you're having trouble running a Dataverse dev environment in Docker on Windows, you are highly encouraged to post about it in the #containers channel on Zulip (https://chat.dataverse.org) and join a Containerization Working Group meeting (https://ct.gdcc.io). See also :doc:`/container/intro` in the Container Guide. Running Dataverse in Windows WSL -------------------------------- -It is possible to run Dataverse in Windows 10 and 11 through WSL (Windows subsystem for Linux) +It is possible to run Dataverse in Windows 10 and 11 through WSL (Windows Subsystem for Linux). + +Please note: these instructions have not been extensively tested. If you find any problems, please open an issue at https://github.com/IQSS/dataverse/issues. Install WSL ~~~~~~~~~~~ -If you have Docker already installed, you should already have WSL installed, otherwise open PowerShell and run: +If you have Docker already installed, you should already have WSL installed. Otherwise open PowerShell and run: .. code-block:: powershell @@ -45,13 +49,13 @@ Choose the distribution you would like. Then run the following command. These in wsl --install -d You will be asked to create a Linux user. -After the installation of Linux is complete, check that you have an internet connection: +After the installation of Linux is complete, check that you have an Internet connection: .. code-block:: bash ping www.google.com -If you do not have an internet connection try adding it in ``/etc/wsl.conf`` +If you do not have an Internet connection, try adding it in ``/etc/wsl.conf`` .. code-block:: bash @@ -75,32 +79,35 @@ Install Dataverse ~~~~~~~~~~~~~~~~~ Now you can install Dataverse in WSL following the instructions for :doc:`classic-dev-env` -At the end check that you have ``-Ddataverse.pid.default-provider=fake`` in jvm-options. +At the end, check that you have ``-Ddataverse.pid.default-provider=fake`` in jvm-options. Now you can access Dataverse in your Windows browser (Edge, Chrome, etc.): - - http://localhost:8080 - - username: dataverseAdmin - - password: admin +- http://localhost:8080 +- username: dataverseAdmin +- password: admin IDE for Dataverse in Windows ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Files in WSL are accessible from Windows for editing using ``\\wsl.localhost`` or ``\\wsl$`` path. Windows files are accessible under Linux in the ``/mnt/c/`` directory. Therefore one can use one's favorite editor or IDE to edit Dataverse project files. Then one can build using ``mvn`` in WSL and deploy manually in WSL using ``asadmin``. -It is still though possible to use a full-strength IDE. The following instructions are for Intellij users. +It is still though possible to use a full-strength IDE. The following instructions are for IntelliJ users. - Install Intelij in Windows. -You can open the project through ``\\wsl.localhost`` and navigate to Dataverse project. -You can try to build the project in Intellij. You may get a message ``Cannot establish network connection from WSL to Windows host (could be blocked by firewall).`` In that case you can try -to disable WSL Hyperviser from firewall. -After that you should be able to build the project in Intellij. -It seems that at present it is impossible to deploy the Glassfish application in Intellij. You can try to add Glassfish plugin through Settings->Plugins and in Run->Edit Configurations configure Application Server from WSL ``/usr/localhost/payara6`` with URL http://localhost:8080 and Server Domain as domain1, but it may fail since Intellij confuses the Windows and Linux paths. +You can open the project through ``\\wsl.localhost`` and navigate to the Dataverse project. +You can try to build the project in IntelliJ. You may get a message ``Cannot establish network connection from WSL to Windows host (could be blocked by the firewall).`` In that case you can try +to disable WSL Hyperviser from the firewall. +After that you should be able to build the project in IntelliJ. +It seems that at present it is impossible to deploy the Glassfish application in IntelliJ. You can try to add a Glassfish plugin through Settings->Plugins and in Run->Edit Configurations configure Application Server from WSL ``/usr/localhost/payara6`` with URL http://localhost:8080 and Server Domain as domain1, but it may fail since IntelliJ confuses the Windows and Linux paths. -To use the full strength of Intelij with build, deployment and debugging, one will need to use Intelij ``Remote development``. Close all the projects in Intellij and go to ``Remote development->WSL`` and press ``New Project``. In WSL instance choose your Linux distribution and press ``Next``. In ``Prpject Directory`` navigate to WSL Dataverse project. Then press ``Download IDE and Connect``. This will install Intellij in WSL in ``~/.cache/JetBrains/``. Now in Intellij you should see your project opened in a new Intellij window. After adding Glassfish plugin and editing configuration you should be able to build the project and run the project. +To use the full strength of Intelij with build, deployment and debugging, one will need to use Intelij ``Remote development``. Close all the projects in IntelliJ and go to ``Remote development->WSL`` and press ``New Project``. In WSL instance choose your Linux distribution and press ``Next``. In ``Project Directory`` navigate to WSL Dataverse project. Then press ``Download IDE and Connect``. This will install IntelliJ in WSL in ``~/.cache/JetBrains/``. Now in IntelliJ you should see your project opened in a new IntelliJ window. After adding the Glassfish plugin and editing your configuration you should be able to build the project and run the project. -PgAdmin in Windows for Dataverse +pgAdmin in Windows for Dataverse ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -You can access The Dataverse database from Windows. Install pgAdmin https://www.pgadmin.org/download/pgadmin-4-windows/ In pgAdmin register a server using 127.0.0.1 with port 5432, database dvndb and dvnapp as username with secret password. Now you will be able to access and update Dataverse database. +You can access the Dataverse database from Windows. + +Install pgAdmin from https://www.pgadmin.org/download/pgadmin-4-windows/ +In pgAdmin, register a server using 127.0.0.1 with port 5432, database dvndb and dvnapp as username with secret password. Now you will be able to access and update the Dataverse database. From 5ba74e8a8e75fecec7f4b477abb920453212c53e Mon Sep 17 00:00:00 2001 From: Ben Companjen Date: Fri, 12 Jul 2024 16:28:28 +0200 Subject: [PATCH 52/63] Compare classes, not classnames in tests (#9014) * Use instanceof to compare types, not class names * Use instanceof in FileDataProviderFactoryTest Instead of comparing class names, the tests assert that `result instanceof Class` (for each appropriate class). --- src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java | 2 +- .../util/bagit/data/FileDataProviderFactoryTest.java | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java index 4df6c89411d..518431bfa2d 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SwordIT.java @@ -462,7 +462,7 @@ public void testCreateAndDeleteDatasetInRoot() { assertNull(attemptToGetFileId); } catch (Exception ex) { System.out.println("We expect an exception here because we can no longer find the file because deleted it: " + ex); - assertTrue(ex.getClass().getName().equals(ArrayIndexOutOfBoundsException.class.getName())); + assertTrue(ex instanceof ArrayIndexOutOfBoundsException); } String newTitle = "A New Hope"; diff --git a/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/FileDataProviderFactoryTest.java b/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/FileDataProviderFactoryTest.java index f43a0c78284..9fac4d42bcd 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/FileDataProviderFactoryTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/bagit/data/FileDataProviderFactoryTest.java @@ -23,21 +23,21 @@ public class FileDataProviderFactoryTest { public void should_return_FolderDataProvider_when_parameter_is_path() { FileDataProvider result = target.getFileDataProvider(Path.of(UUID.randomUUID().toString())); - MatcherAssert.assertThat(result.getClass().getName(), Matchers.is(FolderDataProvider.class.getName())); + MatcherAssert.assertThat("should return FolderDataProvider when parameter is path", result instanceof FolderDataProvider); } @Test public void should_return_ZipFileDataProvider_when_parameter_is_file() throws IOException { FileDataProvider result = target.getFileDataProvider(Path.of(FIXTURE_DIRECTORY, "FileDataProviderFactoryTest.zip").toFile()); - MatcherAssert.assertThat(result.getClass().getName(), Matchers.is(ZipFileDataProvider.class.getName())); + MatcherAssert.assertThat("should return ZipFileDataProvider when parameter is file", result instanceof ZipFileDataProvider); } @Test public void should_return_DataFileDataProvider_when_parameter_is_datafiles() { FileDataProvider result = target.getFileDataProvider("test-name", Collections.emptyList()); - MatcherAssert.assertThat(result.getClass().getName(), Matchers.is(DataFileDataProvider.class.getName())); + MatcherAssert.assertThat("should return DataFileDataProvider when parameter is datafiles", result instanceof DataFileDataProvider); } } \ No newline at end of file From 119d9eb42ef35565ec6cdfcb8f31d9465b3bf999 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 Apr 2024 14:14:56 +0200 Subject: [PATCH 53/63] chore(ct): replace wait-for with wait4x Aligning configbaker and base image with same tool. wait4x has much more features to wait for different services. --- doc/sphinx-guides/source/container/base-image.rst | 2 +- modules/container-base/src/main/docker/Dockerfile | 12 ++++++------ .../docker/scripts/init_3_wait_dataverse_db_host.sh | 2 +- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/doc/sphinx-guides/source/container/base-image.rst b/doc/sphinx-guides/source/container/base-image.rst index c41250d48c5..29c357b91f6 100644 --- a/doc/sphinx-guides/source/container/base-image.rst +++ b/doc/sphinx-guides/source/container/base-image.rst @@ -46,7 +46,7 @@ The base image provides: - CLI tools necessary to run Dataverse (i. e. ``curl`` or ``jq`` - see also :doc:`../installation/prerequisites` in Installation Guide) - Linux tools for analysis, monitoring and so on - `Jattach `__ (attach to running JVM) -- `wait-for `__ (tool to "wait for" a service to be available) +- `wait4x `__ (tool to "wait for" a service to be available) - `dumb-init `__ (see :ref:`below ` for details) This image is created as a "multi-arch image", see :ref:`below `. diff --git a/modules/container-base/src/main/docker/Dockerfile b/modules/container-base/src/main/docker/Dockerfile index df13777e94e..eba765b41d4 100644 --- a/modules/container-base/src/main/docker/Dockerfile +++ b/modules/container-base/src/main/docker/Dockerfile @@ -99,8 +99,7 @@ EOF ARG JATTACH_VERSION="v2.1" ARG JATTACH_CHECKSUM="07885fdc782e02e7302c6d190f54c3930afa10a38140365adf54076ec1086a8e" -ARG WAIT_FOR_VERSION="v2.2.3" -ARG WAIT_FOR_CHECKSUM="70271181be69cd2c7265b2746f97fccfd7e8aa1059894138a775369c23589ff4" +ARG WAIT4X_VERSION="v2.14.0" ARG PKGS="jq imagemagick curl unzip wget acl lsof procps netcat-openbsd dumb-init" # Installing the packages in an extra container layer for better caching @@ -115,10 +114,11 @@ RUN < Date: Mon, 15 Apr 2024 14:17:55 +0200 Subject: [PATCH 54/63] build(ct): make target architecture available in base image build As per https://docs.docker.com/reference/dockerfile/#automatic-platform-args-in-the-global-scope BuildKit / buildx will expose the target architecture. It requires adding an ARG in the Dockerfile to inject the data. --- modules/container-base/src/main/docker/Dockerfile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/modules/container-base/src/main/docker/Dockerfile b/modules/container-base/src/main/docker/Dockerfile index eba765b41d4..794dc4f1d59 100644 --- a/modules/container-base/src/main/docker/Dockerfile +++ b/modules/container-base/src/main/docker/Dockerfile @@ -74,6 +74,9 @@ ENV PATH="${PATH}:${PAYARA_DIR}/bin:${SCRIPT_DIR}" \ ### PART 1: SYSTEM ### ARG UID=1000 ARG GID=1000 +# Auto-populated by BuildKit / buildx +#ARG TARGETARCH="amd64" +ARG TARGETARCH USER root WORKDIR / SHELL ["/bin/bash", "-euo", "pipefail", "-c"] From 904229f33dde03df613e3c31887ef65666188900 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 Apr 2024 14:20:18 +0200 Subject: [PATCH 55/63] chore(ct): upgrade base image with jattach v2.2 jattach binary is now available for ARM64 and AMD64, but requires special handling with download URLs and checksums. --- .../container-base/src/main/docker/Dockerfile | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/modules/container-base/src/main/docker/Dockerfile b/modules/container-base/src/main/docker/Dockerfile index 794dc4f1d59..f9360c13bb6 100644 --- a/modules/container-base/src/main/docker/Dockerfile +++ b/modules/container-base/src/main/docker/Dockerfile @@ -100,8 +100,9 @@ RUN < Date: Mon, 15 Jul 2024 14:56:56 +0200 Subject: [PATCH 56/63] refactor(ct): change security related variable names for clarity Variable names related to user, password, and domain in Dockerfile and scripts have been modified for better clarity and consistency. This includes changing the names of admin user and password, domain master password, and Linux password and user. --- .../container-base/src/main/docker/Dockerfile | 35 ++++++++++--------- .../docker/scripts/init_1_change_passwords.sh | 28 +++++++-------- .../main/docker/scripts/startInForeground.sh | 16 ++++----- 3 files changed, 41 insertions(+), 38 deletions(-) diff --git a/modules/container-base/src/main/docker/Dockerfile b/modules/container-base/src/main/docker/Dockerfile index f9360c13bb6..29078e6896c 100644 --- a/modules/container-base/src/main/docker/Dockerfile +++ b/modules/container-base/src/main/docker/Dockerfile @@ -41,16 +41,18 @@ ENV PAYARA_DIR="${HOME_DIR}/appserver" \ STORAGE_DIR="/dv" \ SECRETS_DIR="/secrets" \ DUMPS_DIR="/dumps" \ - ADMIN_USER="admin" \ + PAYARA_ADMIN_USER="admin" \ # This is a public default, easy to change via this env var at runtime - ADMIN_PASSWORD="admin" \ + PAYARA_ADMIN_PASSWORD="admin" \ DOMAIN_NAME="domain1" \ # This is the public default as per https://docs.payara.fish/community/docs/Technical%20Documentation/Payara%20Server%20Documentation/Security%20Guide/Administering%20System%20Security.html#to-change-the-master-password # Can be changed at runtime via this env var - DOMAIN_MASTER_PASSWORD="changeit" \ + DOMAIN_PASSWORD="changeit" \ PAYARA_ARGS="" \ + LINUX_USER="payara" \ + LINUX_GROUP="payara" \ # This is a public default and can be changed at runtime using this env var - LINUX_USER_PASSWORD="payara" + LINUX_PASSWORD="payara" ENV PATH="${PATH}:${PAYARA_DIR}/bin:${SCRIPT_DIR}" \ DOMAIN_DIR="${PAYARA_DIR}/glassfish/domains/${DOMAIN_NAME}" \ DEPLOY_PROPS="" \ @@ -77,6 +79,7 @@ ARG GID=1000 # Auto-populated by BuildKit / buildx #ARG TARGETARCH="amd64" ARG TARGETARCH + USER root WORKDIR / SHELL ["/bin/bash", "-euo", "pipefail", "-c"] @@ -90,13 +93,13 @@ RUN <> /tmp/password-change-file.txt - asadmin --user=${ADMIN_USER} --passwordfile=/tmp/password-change-file.txt change-admin-password --domain_name=${DOMAIN_NAME} + echo "AS_ADMIN_NEWPASSWORD=${PAYARA_ADMIN_PASSWORD}" >> /tmp/password-change-file.txt + asadmin --user=${PAYARA_ADMIN_USER} --passwordfile=/tmp/password-change-file.txt change-admin-password --domain_name=${DOMAIN_NAME} # Prepare shorthand PASSWORD_FILE=$(mktemp) - echo "AS_ADMIN_PASSWORD=${ADMIN_PASSWORD}" >> ${PASSWORD_FILE} - ASADMIN="${PAYARA_DIR}/bin/asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE}" + echo "AS_ADMIN_PASSWORD=${PAYARA_ADMIN_PASSWORD}" >> ${PASSWORD_FILE} + ASADMIN="${PAYARA_DIR}/bin/asadmin --user=${PAYARA_ADMIN_USER} --passwordfile=${PASSWORD_FILE}" # Start domain for configuration ${ASADMIN} start-domain ${DOMAIN_NAME} @@ -243,7 +246,7 @@ USER root RUN true && \ chgrp -R 0 "${DOMAIN_DIR}" && \ chmod -R g=u "${DOMAIN_DIR}" -USER payara +USER ${LINUX_USER} # Set the entrypoint to tini (as a process supervisor) ENTRYPOINT ["/usr/bin/dumb-init", "--"] diff --git a/modules/container-base/src/main/docker/scripts/init_1_change_passwords.sh b/modules/container-base/src/main/docker/scripts/init_1_change_passwords.sh index 07dd90a1b98..0bf9d0b80fb 100644 --- a/modules/container-base/src/main/docker/scripts/init_1_change_passwords.sh +++ b/modules/container-base/src/main/docker/scripts/init_1_change_passwords.sh @@ -7,23 +7,23 @@ set -euo pipefail # Someone set the env var for passwords - get the new password in. Otherwise print warning. # https://docs.openshift.com/container-platform/4.14/openshift_images/create-images.html#avoid-default-passwords -if [ "$LINUX_USER_PASSWORD" != "payara" ]; then - echo -e "payara\n$LINUX_USER_PASSWORD\n$LINUX_USER_PASSWORD" | passwd +if [ "$LINUX_PASSWORD" != "payara" ]; then + echo -e "$LINUX_USER\n$LINUX_PASSWORD\n$LINUX_PASSWORD" | passwd else - echo "IMPORTANT: THIS CONTAINER USES THE DEFAULT PASSWORD FOR USER payara! ('payara')" - echo " To change the password, set the LINUX_USER_PASSWORD env var." + echo "IMPORTANT: THIS CONTAINER USES THE DEFAULT PASSWORD FOR USER \"${LINUX_USER}\"! ('payara')" + echo " To change the password, set the LINUX_PASSWORD env var." fi # Change the domain admin password if necessary -if [ "$ADMIN_PASSWORD" != "admin" ]; then +if [ "$PAYARA_ADMIN_PASSWORD" != "admin" ]; then PASSWORD_FILE=$(mktemp) echo "AS_ADMIN_PASSWORD=admin" > "$PASSWORD_FILE" - echo "AS_ADMIN_NEWPASSWORD=${ADMIN_PASSWORD}" >> "$PASSWORD_FILE" - asadmin --user="${ADMIN_USER}" --passwordfile="$PASSWORD_FILE" change-admin-password --domain_name="${DOMAIN_NAME}" + echo "AS_ADMIN_NEWPASSWORD=${PAYARA_ADMIN_PASSWORD}" >> "$PASSWORD_FILE" + asadmin --user="${PAYARA_ADMIN_USER}" --passwordfile="$PASSWORD_FILE" change-admin-password --domain_name="${DOMAIN_NAME}" rm "$PASSWORD_FILE" else - echo "IMPORTANT: THIS CONTAINER USES THE DEFAULT PASSWORD FOR PAYARA ASADMIN! ('admin')" - echo " To change the password, set the ADMIN_PASSWORD env var." + echo "IMPORTANT: THIS CONTAINER USES THE DEFAULT PASSWORD FOR PAYARA ADMIN \"${PAYARA_ADMIN_USER}\"! ('admin')" + echo " To change the password, set the PAYARA_ADMIN_PASSWORD env var." fi # Change the domain master password if necessary @@ -31,13 +31,13 @@ fi # > Instead, Payara Server strictly uses the master password to ONLY encrypt the keystore and truststore used to store keys and certificates for the DAS and instances usage. # It will be requested when booting the application server! # https://docs.payara.fish/community/docs/Technical%20Documentation/Payara%20Server%20Documentation/Security%20Guide/Administering%20System%20Security.html#to-change-the-master-password -if [ "$DOMAIN_MASTER_PASSWORD" != "changeit" ]; then +if [ "$DOMAIN_PASSWORD" != "changeit" ]; then PASSWORD_FILE=$(mktemp) echo "AS_ADMIN_MASTERPASSWORD=changeit" >> "$PASSWORD_FILE" - echo "AS_ADMIN_NEWMASTERPASSWORD=${DOMAIN_MASTER_PASSWORD}" >> "$PASSWORD_FILE" - asadmin --user="${ADMIN_USER}" --passwordfile="$PASSWORD_FILE" change-master-password --savemasterpassword false "${DOMAIN_NAME}" + echo "AS_ADMIN_NEWMASTERPASSWORD=${DOMAIN_PASSWORD}" >> "$PASSWORD_FILE" + asadmin --user="${PAYARA_ADMIN_USER}" --passwordfile="$PASSWORD_FILE" change-master-password --savemasterpassword false "${DOMAIN_NAME}" rm "$PASSWORD_FILE" else - echo "IMPORTANT: THIS CONTAINER USES THE DEFAULT MASTER PASSWORD FOR THE DOMAIN! ('changeit')" - echo " To change the password, set the DOMAIN_MASTER_PASSWORD env var." + echo "IMPORTANT: THIS CONTAINER USES THE DEFAULT DOMAIN \"MASTER\" PASSWORD! ('changeit')" + echo " To change the password, set the DOMAIN_PASSWORD env var." fi diff --git a/modules/container-base/src/main/docker/scripts/startInForeground.sh b/modules/container-base/src/main/docker/scripts/startInForeground.sh index 262cadd9aca..fa7d533b0d1 100644 --- a/modules/container-base/src/main/docker/scripts/startInForeground.sh +++ b/modules/container-base/src/main/docker/scripts/startInForeground.sh @@ -32,9 +32,9 @@ ########################################################################################################## # Check required variables are set -if [ -z "$ADMIN_USER" ]; then echo "Variable ADMIN_USER is not set."; exit 1; fi -if [ -z "$ADMIN_PASSWORD" ]; then echo "Variable ADMIN_PASSWORD is not set."; exit 1; fi -if [ -z "$DOMAIN_MASTER_PASSWORD" ]; then echo "Variable DOMAIN_MASTER_PASSWORD is not set."; exit 1; fi +if [ -z "$PAYARA_ADMIN_USER" ]; then echo "Variable ADMIN_USER is not set."; exit 1; fi +if [ -z "$PAYARA_ADMIN_PASSWORD" ]; then echo "Variable ADMIN_PASSWORD is not set."; exit 1; fi +if [ -z "$DOMAIN_PASSWORD" ]; then echo "Variable DOMAIN_PASSWORD is not set."; exit 1; fi if [ -z "$PREBOOT_COMMANDS_FILE" ]; then echo "Variable PREBOOT_COMMANDS_FILE is not set."; exit 1; fi if [ -z "$POSTBOOT_COMMANDS_FILE" ]; then echo "Variable POSTBOOT_COMMANDS_FILE is not set."; exit 1; fi if [ -z "$DOMAIN_NAME" ]; then echo "Variable DOMAIN_NAME is not set."; exit 1; fi @@ -46,10 +46,10 @@ fi # For safety reasons, do no longer expose the passwords - malicious code could extract it! # (We need to save the master password for booting the server though) -MASTER_PASSWORD="${DOMAIN_MASTER_PASSWORD}" -export LINUX_USER_PASSWORD="have-some-scrambled-eggs" -export ADMIN_PASSWORD="have-some-scrambled-eggs" -export DOMAIN_MASTER_PASSWORD="have-some-scrambled-eggs" +MASTER_PASSWORD="${DOMAIN_PASSWORD}" +export LINUX_PASSWORD="have-some-scrambled-eggs" +export PAYARA_ADMIN_PASSWORD="have-some-scrambled-eggs" +export DOMAIN_PASSWORD="have-some-scrambled-eggs" # The following command gets the command line to be executed by start-domain # - print the command line to the server with --dry-run, each argument on a separate line @@ -66,7 +66,7 @@ PASSWORD_FILE=$(mktemp) echo "AS_ADMIN_MASTERPASSWORD=$MASTER_PASSWORD" > "$PASSWORD_FILE" # shellcheck disable=SC2068 # -- Using $@ is necessary here as asadmin cannot deal with options enclosed in ""! -OUTPUT=$("${PAYARA_DIR}"/bin/asadmin --user="${ADMIN_USER}" --passwordfile="$PASSWORD_FILE" start-domain --dry-run --prebootcommandfile="${PREBOOT_COMMANDS_FILE}" --postbootcommandfile="${POSTBOOT_COMMANDS_FILE}" $@ "$DOMAIN_NAME") +OUTPUT=$("${PAYARA_DIR}"/bin/asadmin --user="${PAYARA_ADMIN_USER}" --passwordfile="$PASSWORD_FILE" start-domain --dry-run --prebootcommandfile="${PREBOOT_COMMANDS_FILE}" --postbootcommandfile="${POSTBOOT_COMMANDS_FILE}" $@ "$DOMAIN_NAME") STATUS=$? rm "$PASSWORD_FILE" if [ "$STATUS" -ne 0 ] From 5c7a91c9be0d41f042612be7ac65f76c28d9c7e5 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 15 Jul 2024 15:30:39 +0200 Subject: [PATCH 57/63] docs(ct): add documentation about changing passwords and some more Also includes a release note --- doc/release-notes/10508-base-image-fixes.md | 12 +++++++++++ .../source/container/base-image.rst | 20 ++++++++++++++++--- 2 files changed, 29 insertions(+), 3 deletions(-) create mode 100644 doc/release-notes/10508-base-image-fixes.md diff --git a/doc/release-notes/10508-base-image-fixes.md b/doc/release-notes/10508-base-image-fixes.md new file mode 100644 index 00000000000..148066435e8 --- /dev/null +++ b/doc/release-notes/10508-base-image-fixes.md @@ -0,0 +1,12 @@ +# Security and Compatibility Fixes to the Container Base Image + +- Switch "wait-for" to "wait4x", aligned with the Configbaker Image +- Update "jattach" to v2.2 +- Install AMD64 / ARM64 versions of tools as necessary +- Run base image as unprivileged user by default instead of `root` - this was an oversight from OpenShift changes +- Linux User, Payara Admin and Domain Master passwords: + - Print hints about default, public knowledge passwords in place for + - Enable replacing these passwords at container boot time +- Enable building with updates Temurin JRE image based on Ubuntu 24.04 LTS +- Fix entrypoint script troubles with pre- and postboot script files +- Unify location of files at CONFIG_DIR=/opt/payara/config, avoid writing to other places \ No newline at end of file diff --git a/doc/sphinx-guides/source/container/base-image.rst b/doc/sphinx-guides/source/container/base-image.rst index 29c357b91f6..0005265fb1c 100644 --- a/doc/sphinx-guides/source/container/base-image.rst +++ b/doc/sphinx-guides/source/container/base-image.rst @@ -85,7 +85,7 @@ Some additional notes, using Maven parameters to change the build and use ...: (See also `Docker Hub search example `_) - ... a different Java Distribution: add ``-Djava.image="name:tag"`` with precise reference to an image available local or remote. -- ... a different UID/GID for the ``payara`` user/group: add ``-Dbase.image.uid=1234`` (or ``.gid``) +- ... a different UID/GID for the ``payara`` user/group (default ``1000:1000``): add ``-Dbase.image.uid=1234`` (or ``.gid``) Automated Builds & Publishing ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -151,12 +151,12 @@ provides. These are mostly based on environment variables (very common with cont - [preboot]_ - Abs. path - Provide path to file with ``asadmin`` commands to run **before** boot of application server. - See also `Pre/postboot script docs`_. + See also `Pre/postboot script docs`_. Must be writeable by Payara Linux user! * - ``POSTBOOT_COMMANDS`` - [postboot]_ - Abs. path - Provide path to file with ``asadmin`` commands to run **after** boot of application server. - See also `Pre/postboot script docs`_. + See also `Pre/postboot script docs`_. Must be writeable by Payara Linux user! * - ``JVM_ARGS`` - (empty) - String @@ -231,6 +231,18 @@ provides. These are mostly based on environment variables (very common with cont - See :ref:`:ApplicationServerSettings` ``http.request-timeout-seconds``. *Note:* can also be set using any other `MicroProfile Config Sources`_ available via ``dataverse.http.timeout``. + * - ``PAYARA_ADMIN_PASSWORD`` + - ``admin`` + - String + - Set to secret string to change `Payara Admin Console`_ Adminstrator User ("admin") password. + * - ``LINUX_PASSWORD`` + - ``payara`` + - String + - Set to secret string to change the Payara Linux User ("payara", default UID=1000) password. + * - ``DOMAIN_PASSWORD`` + - ``changeit`` + - String + - Set to secret string to change the `Domain Master Password`_. .. [preboot] ``${CONFIG_DIR}/pre-boot-commands.asadmin`` @@ -374,3 +386,5 @@ from `run-java-sh recommendations`_. .. _Pre/postboot script docs: https://docs.payara.fish/community/docs/Technical%20Documentation/Payara%20Micro%20Documentation/Payara%20Micro%20Configuration%20and%20Management/Micro%20Management/Asadmin%20Commands/Pre%20and%20Post%20Boot%20Commands.html .. _MicroProfile Config Sources: https://docs.payara.fish/community/docs/Technical%20Documentation/MicroProfile/Config/Overview.html .. _run-java-sh recommendations: https://github.com/fabric8io-images/run-java-sh/blob/master/TUNING.md#recommandations +.. _Domain Master Password: https://docs.payara.fish/community/docs/Technical%20Documentation/Payara%20Server%20Documentation/Security%20Guide/Administering%20System%20Security.html#to-change-the-master-password +.. _Payara Admin Console: https://docs.payara.fish/community/docs/Technical%20Documentation/Payara%20Server%20Documentation/General%20Administration/Overview.html#administration-console \ No newline at end of file From db19451409521f5c319a0ccefecc5ae2aa28a555 Mon Sep 17 00:00:00 2001 From: Steven Winship <39765413+stevenwinship@users.noreply.github.com> Date: Mon, 15 Jul 2024 11:21:30 -0400 Subject: [PATCH 58/63] renamed script from 6.2 to 6.3 --- src/main/resources/db/migration/{V6.2.0.2.sql => V6.3.0.1.sql} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename src/main/resources/db/migration/{V6.2.0.2.sql => V6.3.0.1.sql} (100%) diff --git a/src/main/resources/db/migration/V6.2.0.2.sql b/src/main/resources/db/migration/V6.3.0.1.sql similarity index 100% rename from src/main/resources/db/migration/V6.2.0.2.sql rename to src/main/resources/db/migration/V6.3.0.1.sql From 194b099902132d50b4ae961fbd427f2b69f05541 Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Wed, 17 Jul 2024 12:33:17 -0400 Subject: [PATCH 59/63] an extra null check on TermsOfUse - this should fix #10513 for datasets harvested in oai_dc, and not break anything else... I think? #8796 --- .../edu/harvard/iq/dataverse/search/IndexServiceBean.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 78bc80a798d..c91eb0bfa7c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -1849,8 +1849,12 @@ private void addLicenseToSolrDoc(SolrInputDocument solrInputDocument, DatasetVer } String licenseName = "Custom Terms"; - if(datasetVersion.getTermsOfUseAndAccess().getLicense() != null) { + if (datasetVersion.getTermsOfUseAndAccess().getLicense() != null) { licenseName = datasetVersion.getTermsOfUseAndAccess().getLicense().getName(); + } else if (datasetVersion.getTermsOfUseAndAccess().getTermsOfUse() == null) { + // this fixes #10513 for datasets harvested in oai_dc - these + // have neither the license id, nor any actual custom terms + return; } solrInputDocument.addField(SearchFields.DATASET_LICENSE, licenseName); } From a6c1c11bc60223ee4ff75b7be66ab6ccd2a8ba70 Mon Sep 17 00:00:00 2001 From: Benedikt Meier Date: Fri, 19 Jul 2024 12:05:31 +0200 Subject: [PATCH 60/63] add aws-cli to configbacker #10700 --- modules/container-configbaker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/container-configbaker/Dockerfile b/modules/container-configbaker/Dockerfile index dae4a3aa272..351425a17ba 100644 --- a/modules/container-configbaker/Dockerfile +++ b/modules/container-configbaker/Dockerfile @@ -21,7 +21,7 @@ ENV SCRIPT_DIR="/scripts" \ ENV PATH="${PATH}:${SCRIPT_DIR}" \ BOOTSTRAP_DIR="${SCRIPT_DIR}/bootstrap" -ARG APK_PACKAGES="curl bind-tools netcat-openbsd jq bash dumb-init wait4x ed postgresql-client" +ARG APK_PACKAGES="curl bind-tools netcat-openbsd jq bash dumb-init wait4x ed postgresql-client aws-cli" RUN true && \ # Install necessary software and tools From 4abfb245e396caaacbfc28e388a61962db51f269 Mon Sep 17 00:00:00 2001 From: Dimitri Szabo <46443753+DS-INRA@users.noreply.github.com> Date: Wed, 24 Jul 2024 16:46:52 +0200 Subject: [PATCH 61/63] #10069 add new suggestion template (#10091) * add new suggestion template * Update idea_proposal.md Add new ideas * Update feature_request.md * Update bug_report.md * Update feature_request.md * Update feature_request.md * Update bug_report.md * Update idea_proposal.md * Update PULL_REQUEST_TEMPLATE.md * Update idea_proposal.md changed "feature (request)" to idea or suggestion * Added "contributing" section to bug_report.md Also updated link to the contributor guide * update contributor guide link idea_proposal.md * remove unnecessary '"contributing" in PR template * Update contributor guide link in bug_report.md * Update feature_request.md Update contributor guide link Added section for contribution * remove punctuation spaces bug_report.md Co-authored-by: Philip Durbin * Update feature_request.md removed "Idea" Changed the PR paragraph * Update PR paragraph in bug_report.md * Update PR paragraph in idea_proposal.md * Add a space back --------- Co-authored-by: Martin Amouzou <85512093+martinAmouzou@users.noreply.github.com> Co-authored-by: Philip Durbin --- .github/ISSUE_TEMPLATE/bug_report.md | 7 ++-- .github/ISSUE_TEMPLATE/feature_request.md | 7 ++-- .github/ISSUE_TEMPLATE/idea_proposal.md | 40 +++++++++++++++++++++++ 3 files changed, 50 insertions(+), 4 deletions(-) create mode 100644 .github/ISSUE_TEMPLATE/idea_proposal.md diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 7e6995d76d9..3dba7d52109 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -14,7 +14,7 @@ Thank you for contributing to the Dataverse Project through the creation of a bu WARNING: If this is a security issue it should be reported privately to security@dataverse.org More information on bug issues and contributions can be found in the "Contributing to Dataverse" page: -https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md#bug-reportsissues +https://guides.dataverse.org/en/latest/contributor/index.html Please fill out as much of the template as you can. Start below this comment section. @@ -44,7 +44,6 @@ Start below this comment section. **Any related open or closed issues to this bug report?** - **Screenshots:** No matter the issue, screenshots are always welcome. @@ -53,3 +52,7 @@ To add a screenshot, please use one of the following formats and/or methods desc * https://help.github.com/en/articles/file-attachments-on-issues-and-pull-requests * + + +**Are you thinking about creating a pull request for this issue?** +Help is always welcome, is this bug something you or your organization plan to fix? diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index d6248537418..7365cb4317c 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,7 +1,7 @@ --- name: Feature request about: Suggest an idea or new feature for the Dataverse software! -title: 'Feature Request/Idea:' +title: 'Feature Request:' labels: 'Type: Feature' assignees: '' @@ -11,7 +11,7 @@ assignees: '' Thank you for contributing to the Dataverse Project through the creation of a feature request! More information on ideas/feature requests and contributions can be found in the "Contributing to Dataverse" page: -https://github.com/IQSS/dataverse/blob/develop/CONTRIBUTING.md#ideasfeature-requests +https://guides.dataverse.org/en/latest/contributor/index.html Please fill out as much of the template as you can. Start below this comment section. @@ -34,3 +34,6 @@ Start below this comment section. **Any open or closed issues related to this feature request?** + +**Are you thinking about creating a pull request for this feature?** +Help is always welcome, is this feature something you or your organization plan to implement? diff --git a/.github/ISSUE_TEMPLATE/idea_proposal.md b/.github/ISSUE_TEMPLATE/idea_proposal.md new file mode 100644 index 00000000000..8cb6c7bfafe --- /dev/null +++ b/.github/ISSUE_TEMPLATE/idea_proposal.md @@ -0,0 +1,40 @@ +--- +name: Idea proposal +about: Propose a new idea for discussion to improve the Dataverse software! +title: 'Suggestion:' +labels: 'Type: Suggestion' +assignees: '' + +--- + + + +**Overview of the Suggestion** + + +**What kind of user is the suggestion intended for?** +(Example users roles: API User, Curator, Depositor, Guest, Superuser, Sysadmin) + + +**What inspired this idea?** + + +**What existing behavior do you want changed?** + + +**Any brand new behavior do you want to add to Dataverse?** + + +**Any open or closed issues related to this suggestion?** + + +**Are you thinking about creating a pull request for this issue?** +Help is always welcome, is this idea something you or your organization plan to implement? From 146c9273e26569bb0d598d1962f23ee2bfe6cba0 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 25 Jul 2024 09:17:08 -0400 Subject: [PATCH 62/63] bump to prevent "unknown flag: --driver" on Mac #10508 #9771 --- modules/dataverse-parent/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 62efbf62317..036a47e43c4 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -198,7 +198,7 @@ 1.7.0 - 0.43.4 + 0.44.0 From 4258900ad94a3e59c877e3a63a08b1f0f10c08be Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 25 Jul 2024 10:56:20 -0400 Subject: [PATCH 63/63] Revert "bump to prevent "unknown flag: --driver" on Mac #10508 #9771" This reverts commit 146c9273e26569bb0d598d1962f23ee2bfe6cba0. We can't use 0.44 because of this error: "Unable to inspect image [solr:]" See https://github.com/fabric8io/docker-maven-plugin/issues/1756e --- modules/dataverse-parent/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index 036a47e43c4..62efbf62317 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -198,7 +198,7 @@ 1.7.0 - 0.44.0 + 0.43.4